• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2015 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/interpreter/bytecode-generator.h"
6 
7 #include "src/api-inl.h"
8 #include "src/ast/ast-source-ranges.h"
9 #include "src/ast/scopes.h"
10 #include "src/builtins/builtins-constructor.h"
11 #include "src/code-stubs.h"
12 #include "src/compiler.h"
13 #include "src/interpreter/bytecode-flags.h"
14 #include "src/interpreter/bytecode-jump-table.h"
15 #include "src/interpreter/bytecode-label.h"
16 #include "src/interpreter/bytecode-register-allocator.h"
17 #include "src/interpreter/control-flow-builders.h"
18 #include "src/objects-inl.h"
19 #include "src/objects/debug-objects.h"
20 #include "src/objects/literal-objects-inl.h"
21 #include "src/parsing/parse-info.h"
22 #include "src/parsing/token.h"
23 #include "src/unoptimized-compilation-info.h"
24 
25 namespace v8 {
26 namespace internal {
27 namespace interpreter {
28 
29 // Scoped class tracking context objects created by the visitor. Represents
30 // mutations of the context chain within the function body, allowing pushing and
31 // popping of the current {context_register} during visitation.
32 class BytecodeGenerator::ContextScope BASE_EMBEDDED {
33  public:
ContextScope(BytecodeGenerator * generator,Scope * scope)34   ContextScope(BytecodeGenerator* generator, Scope* scope)
35       : generator_(generator),
36         scope_(scope),
37         outer_(generator_->execution_context()),
38         register_(Register::current_context()),
39         depth_(0) {
40     DCHECK(scope->NeedsContext() || outer_ == nullptr);
41     if (outer_) {
42       depth_ = outer_->depth_ + 1;
43 
44       // Push the outer context into a new context register.
45       Register outer_context_reg =
46           generator_->register_allocator()->NewRegister();
47       outer_->set_register(outer_context_reg);
48       generator_->builder()->PushContext(outer_context_reg);
49     }
50     generator_->set_execution_context(this);
51   }
52 
~ContextScope()53   ~ContextScope() {
54     if (outer_) {
55       DCHECK_EQ(register_.index(), Register::current_context().index());
56       generator_->builder()->PopContext(outer_->reg());
57       outer_->set_register(register_);
58     }
59     generator_->set_execution_context(outer_);
60   }
61 
62   // Returns the depth of the given |scope| for the current execution context.
ContextChainDepth(Scope * scope)63   int ContextChainDepth(Scope* scope) {
64     return scope_->ContextChainLength(scope);
65   }
66 
67   // Returns the execution context at |depth| in the current context chain if it
68   // is a function local execution context, otherwise returns nullptr.
Previous(int depth)69   ContextScope* Previous(int depth) {
70     if (depth > depth_) {
71       return nullptr;
72     }
73 
74     ContextScope* previous = this;
75     for (int i = depth; i > 0; --i) {
76       previous = previous->outer_;
77     }
78     return previous;
79   }
80 
reg() const81   Register reg() const { return register_; }
82 
83  private:
builder() const84   const BytecodeArrayBuilder* builder() const { return generator_->builder(); }
85 
set_register(Register reg)86   void set_register(Register reg) { register_ = reg; }
87 
88   BytecodeGenerator* generator_;
89   Scope* scope_;
90   ContextScope* outer_;
91   Register register_;
92   int depth_;
93 };
94 
95 // Scoped class for tracking control statements entered by the
96 // visitor. The pattern derives AstGraphBuilder::ControlScope.
97 class BytecodeGenerator::ControlScope BASE_EMBEDDED {
98  public:
ControlScope(BytecodeGenerator * generator)99   explicit ControlScope(BytecodeGenerator* generator)
100       : generator_(generator), outer_(generator->execution_control()),
101         context_(generator->execution_context()) {
102     generator_->set_execution_control(this);
103   }
~ControlScope()104   virtual ~ControlScope() { generator_->set_execution_control(outer()); }
105 
Break(Statement * stmt)106   void Break(Statement* stmt) {
107     PerformCommand(CMD_BREAK, stmt, kNoSourcePosition);
108   }
Continue(Statement * stmt)109   void Continue(Statement* stmt) {
110     PerformCommand(CMD_CONTINUE, stmt, kNoSourcePosition);
111   }
ReturnAccumulator(int source_position=kNoSourcePosition)112   void ReturnAccumulator(int source_position = kNoSourcePosition) {
113     PerformCommand(CMD_RETURN, nullptr, source_position);
114   }
AsyncReturnAccumulator(int source_position=kNoSourcePosition)115   void AsyncReturnAccumulator(int source_position = kNoSourcePosition) {
116     PerformCommand(CMD_ASYNC_RETURN, nullptr, source_position);
117   }
118 
119   class DeferredCommands;
120 
121  protected:
122   enum Command {
123     CMD_BREAK,
124     CMD_CONTINUE,
125     CMD_RETURN,
126     CMD_ASYNC_RETURN,
127     CMD_RETHROW
128   };
CommandUsesAccumulator(Command command)129   static constexpr bool CommandUsesAccumulator(Command command) {
130     return command != CMD_BREAK && command != CMD_CONTINUE;
131   }
132 
133   void PerformCommand(Command command, Statement* statement,
134                       int source_position);
135   virtual bool Execute(Command command, Statement* statement,
136                        int source_position) = 0;
137 
138   // Helper to pop the context chain to a depth expected by this control scope.
139   // Note that it is the responsibility of each individual {Execute} method to
140   // trigger this when commands are handled and control-flow continues locally.
141   void PopContextToExpectedDepth();
142 
generator() const143   BytecodeGenerator* generator() const { return generator_; }
outer() const144   ControlScope* outer() const { return outer_; }
context() const145   ContextScope* context() const { return context_; }
146 
147  private:
148   BytecodeGenerator* generator_;
149   ControlScope* outer_;
150   ContextScope* context_;
151 
152   DISALLOW_COPY_AND_ASSIGN(ControlScope);
153 };
154 
155 // Helper class for a try-finally control scope. It can record intercepted
156 // control-flow commands that cause entry into a finally-block, and re-apply
157 // them after again leaving that block. Special tokens are used to identify
158 // paths going through the finally-block to dispatch after leaving the block.
159 class BytecodeGenerator::ControlScope::DeferredCommands final {
160  public:
DeferredCommands(BytecodeGenerator * generator,Register token_register,Register result_register)161   DeferredCommands(BytecodeGenerator* generator, Register token_register,
162                    Register result_register)
163       : generator_(generator),
164         deferred_(generator->zone()),
165         token_register_(token_register),
166         result_register_(result_register),
167         return_token_(-1),
168         async_return_token_(-1),
169         rethrow_token_(-1) {}
170 
171   // One recorded control-flow command.
172   struct Entry {
173     Command command;       // The command type being applied on this path.
174     Statement* statement;  // The target statement for the command or {nullptr}.
175     int token;             // A token identifying this particular path.
176   };
177 
178   // Records a control-flow command while entering the finally-block. This also
179   // generates a new dispatch token that identifies one particular path. This
180   // expects the result to be in the accumulator.
RecordCommand(Command command,Statement * statement)181   void RecordCommand(Command command, Statement* statement) {
182     int token = GetTokenForCommand(command, statement);
183 
184     DCHECK_LT(token, deferred_.size());
185     DCHECK_EQ(deferred_[token].command, command);
186     DCHECK_EQ(deferred_[token].statement, statement);
187     DCHECK_EQ(deferred_[token].token, token);
188 
189     if (CommandUsesAccumulator(command)) {
190       builder()->StoreAccumulatorInRegister(result_register_);
191     }
192     builder()->LoadLiteral(Smi::FromInt(token));
193     builder()->StoreAccumulatorInRegister(token_register_);
194     if (!CommandUsesAccumulator(command)) {
195       // If we're not saving the accumulator in the result register, shove a
196       // harmless value there instead so that it is still considered "killed" in
197       // the liveness analysis. Normally we would LdaUndefined first, but the
198       // Smi token value is just as good, and by reusing it we save a bytecode.
199       builder()->StoreAccumulatorInRegister(result_register_);
200     }
201   }
202 
203   // Records the dispatch token to be used to identify the re-throw path when
204   // the finally-block has been entered through the exception handler. This
205   // expects the exception to be in the accumulator.
RecordHandlerReThrowPath()206   void RecordHandlerReThrowPath() {
207     // The accumulator contains the exception object.
208     RecordCommand(CMD_RETHROW, nullptr);
209   }
210 
211   // Records the dispatch token to be used to identify the implicit fall-through
212   // path at the end of a try-block into the corresponding finally-block.
RecordFallThroughPath()213   void RecordFallThroughPath() {
214     builder()->LoadLiteral(Smi::FromInt(-1));
215     builder()->StoreAccumulatorInRegister(token_register_);
216     // Since we're not saving the accumulator in the result register, shove a
217     // harmless value there instead so that it is still considered "killed" in
218     // the liveness analysis. Normally we would LdaUndefined first, but the Smi
219     // token value is just as good, and by reusing it we save a bytecode.
220     builder()->StoreAccumulatorInRegister(result_register_);
221   }
222 
223   // Applies all recorded control-flow commands after the finally-block again.
224   // This generates a dynamic dispatch on the token from the entry point.
ApplyDeferredCommands()225   void ApplyDeferredCommands() {
226     if (deferred_.size() == 0) return;
227 
228     BytecodeLabel fall_through;
229 
230     if (deferred_.size() == 1) {
231       // For a single entry, just jump to the fallthrough if we don't match the
232       // entry token.
233       const Entry& entry = deferred_[0];
234 
235       builder()
236           ->LoadLiteral(Smi::FromInt(entry.token))
237           .CompareReference(token_register_)
238           .JumpIfFalse(ToBooleanMode::kAlreadyBoolean, &fall_through);
239 
240       if (CommandUsesAccumulator(entry.command)) {
241         builder()->LoadAccumulatorWithRegister(result_register_);
242       }
243       execution_control()->PerformCommand(entry.command, entry.statement,
244                                           kNoSourcePosition);
245     } else {
246       // For multiple entries, build a jump table and switch on the token,
247       // jumping to the fallthrough if none of them match.
248 
249       BytecodeJumpTable* jump_table =
250           builder()->AllocateJumpTable(static_cast<int>(deferred_.size()), 0);
251       builder()
252           ->LoadAccumulatorWithRegister(token_register_)
253           .SwitchOnSmiNoFeedback(jump_table)
254           .Jump(&fall_through);
255       for (const Entry& entry : deferred_) {
256         builder()->Bind(jump_table, entry.token);
257 
258         if (CommandUsesAccumulator(entry.command)) {
259           builder()->LoadAccumulatorWithRegister(result_register_);
260         }
261         execution_control()->PerformCommand(entry.command, entry.statement,
262                                             kNoSourcePosition);
263       }
264     }
265 
266     builder()->Bind(&fall_through);
267   }
268 
builder()269   BytecodeArrayBuilder* builder() { return generator_->builder(); }
execution_control()270   ControlScope* execution_control() { return generator_->execution_control(); }
271 
272  private:
GetTokenForCommand(Command command,Statement * statement)273   int GetTokenForCommand(Command command, Statement* statement) {
274     switch (command) {
275       case CMD_RETURN:
276         return GetReturnToken();
277       case CMD_ASYNC_RETURN:
278         return GetAsyncReturnToken();
279       case CMD_RETHROW:
280         return GetRethrowToken();
281       default:
282         // TODO(leszeks): We could also search for entries with the same
283         // command and statement.
284         return GetNewTokenForCommand(command, statement);
285     }
286   }
287 
GetReturnToken()288   int GetReturnToken() {
289     if (return_token_ == -1) {
290       return_token_ = GetNewTokenForCommand(CMD_RETURN, nullptr);
291     }
292     return return_token_;
293   }
294 
GetAsyncReturnToken()295   int GetAsyncReturnToken() {
296     if (async_return_token_ == -1) {
297       async_return_token_ = GetNewTokenForCommand(CMD_ASYNC_RETURN, nullptr);
298     }
299     return async_return_token_;
300   }
301 
GetRethrowToken()302   int GetRethrowToken() {
303     if (rethrow_token_ == -1) {
304       rethrow_token_ = GetNewTokenForCommand(CMD_RETHROW, nullptr);
305     }
306     return rethrow_token_;
307   }
308 
GetNewTokenForCommand(Command command,Statement * statement)309   int GetNewTokenForCommand(Command command, Statement* statement) {
310     int token = static_cast<int>(deferred_.size());
311     deferred_.push_back({command, statement, token});
312     return token;
313   }
314 
315   BytecodeGenerator* generator_;
316   ZoneVector<Entry> deferred_;
317   Register token_register_;
318   Register result_register_;
319 
320   // Tokens for commands that don't need a statement.
321   int return_token_;
322   int async_return_token_;
323   int rethrow_token_;
324 };
325 
326 // Scoped class for dealing with control flow reaching the function level.
327 class BytecodeGenerator::ControlScopeForTopLevel final
328     : public BytecodeGenerator::ControlScope {
329  public:
ControlScopeForTopLevel(BytecodeGenerator * generator)330   explicit ControlScopeForTopLevel(BytecodeGenerator* generator)
331       : ControlScope(generator) {}
332 
333  protected:
Execute(Command command,Statement * statement,int source_position)334   bool Execute(Command command, Statement* statement,
335                int source_position) override {
336     switch (command) {
337       case CMD_BREAK:  // We should never see break/continue in top-level.
338       case CMD_CONTINUE:
339         UNREACHABLE();
340       case CMD_RETURN:
341         // No need to pop contexts, execution leaves the method body.
342         generator()->BuildReturn(source_position);
343         return true;
344       case CMD_ASYNC_RETURN:
345         // No need to pop contexts, execution leaves the method body.
346         generator()->BuildAsyncReturn(source_position);
347         return true;
348       case CMD_RETHROW:
349         // No need to pop contexts, execution leaves the method body.
350         generator()->BuildReThrow();
351         return true;
352     }
353     return false;
354   }
355 };
356 
357 // Scoped class for enabling break inside blocks and switch blocks.
358 class BytecodeGenerator::ControlScopeForBreakable final
359     : public BytecodeGenerator::ControlScope {
360  public:
ControlScopeForBreakable(BytecodeGenerator * generator,BreakableStatement * statement,BreakableControlFlowBuilder * control_builder)361   ControlScopeForBreakable(BytecodeGenerator* generator,
362                            BreakableStatement* statement,
363                            BreakableControlFlowBuilder* control_builder)
364       : ControlScope(generator),
365         statement_(statement),
366         control_builder_(control_builder) {}
367 
368  protected:
Execute(Command command,Statement * statement,int source_position)369   bool Execute(Command command, Statement* statement,
370                int source_position) override {
371     control_builder_->set_needs_continuation_counter();
372     if (statement != statement_) return false;
373     switch (command) {
374       case CMD_BREAK:
375         PopContextToExpectedDepth();
376         control_builder_->Break();
377         return true;
378       case CMD_CONTINUE:
379       case CMD_RETURN:
380       case CMD_ASYNC_RETURN:
381       case CMD_RETHROW:
382         break;
383     }
384     return false;
385   }
386 
387  private:
388   Statement* statement_;
389   BreakableControlFlowBuilder* control_builder_;
390 };
391 
392 // Scoped class for enabling 'break' and 'continue' in iteration
393 // constructs, e.g. do...while, while..., for...
394 class BytecodeGenerator::ControlScopeForIteration final
395     : public BytecodeGenerator::ControlScope {
396  public:
ControlScopeForIteration(BytecodeGenerator * generator,IterationStatement * statement,LoopBuilder * loop_builder)397   ControlScopeForIteration(BytecodeGenerator* generator,
398                            IterationStatement* statement,
399                            LoopBuilder* loop_builder)
400       : ControlScope(generator),
401         statement_(statement),
402         loop_builder_(loop_builder) {
403     generator->loop_depth_++;
404   }
~ControlScopeForIteration()405   ~ControlScopeForIteration() { generator()->loop_depth_--; }
406 
407  protected:
Execute(Command command,Statement * statement,int source_position)408   bool Execute(Command command, Statement* statement,
409                int source_position) override {
410     if (statement != statement_) return false;
411     switch (command) {
412       case CMD_BREAK:
413         PopContextToExpectedDepth();
414         loop_builder_->Break();
415         return true;
416       case CMD_CONTINUE:
417         PopContextToExpectedDepth();
418         loop_builder_->Continue();
419         return true;
420       case CMD_RETURN:
421       case CMD_ASYNC_RETURN:
422       case CMD_RETHROW:
423         break;
424     }
425     return false;
426   }
427 
428  private:
429   Statement* statement_;
430   LoopBuilder* loop_builder_;
431 };
432 
433 // Scoped class for enabling 'throw' in try-catch constructs.
434 class BytecodeGenerator::ControlScopeForTryCatch final
435     : public BytecodeGenerator::ControlScope {
436  public:
ControlScopeForTryCatch(BytecodeGenerator * generator,TryCatchBuilder * try_catch_builder)437   ControlScopeForTryCatch(BytecodeGenerator* generator,
438                           TryCatchBuilder* try_catch_builder)
439       : ControlScope(generator) {}
440 
441  protected:
Execute(Command command,Statement * statement,int source_position)442   bool Execute(Command command, Statement* statement,
443                int source_position) override {
444     switch (command) {
445       case CMD_BREAK:
446       case CMD_CONTINUE:
447       case CMD_RETURN:
448       case CMD_ASYNC_RETURN:
449         break;
450       case CMD_RETHROW:
451         // No need to pop contexts, execution re-enters the method body via the
452         // stack unwinding mechanism which itself restores contexts correctly.
453         generator()->BuildReThrow();
454         return true;
455     }
456     return false;
457   }
458 };
459 
460 // Scoped class for enabling control flow through try-finally constructs.
461 class BytecodeGenerator::ControlScopeForTryFinally final
462     : public BytecodeGenerator::ControlScope {
463  public:
ControlScopeForTryFinally(BytecodeGenerator * generator,TryFinallyBuilder * try_finally_builder,DeferredCommands * commands)464   ControlScopeForTryFinally(BytecodeGenerator* generator,
465                             TryFinallyBuilder* try_finally_builder,
466                             DeferredCommands* commands)
467       : ControlScope(generator),
468         try_finally_builder_(try_finally_builder),
469         commands_(commands) {}
470 
471  protected:
Execute(Command command,Statement * statement,int source_position)472   bool Execute(Command command, Statement* statement,
473                int source_position) override {
474     switch (command) {
475       case CMD_BREAK:
476       case CMD_CONTINUE:
477       case CMD_RETURN:
478       case CMD_ASYNC_RETURN:
479       case CMD_RETHROW:
480         PopContextToExpectedDepth();
481         // We don't record source_position here since we don't generate return
482         // bytecode right here and will generate it later as part of finally
483         // block. Each return bytecode generated in finally block will get own
484         // return source position from corresponded return statement or we'll
485         // use end of function if no return statement is presented.
486         commands_->RecordCommand(command, statement);
487         try_finally_builder_->LeaveTry();
488         return true;
489     }
490     return false;
491   }
492 
493  private:
494   TryFinallyBuilder* try_finally_builder_;
495   DeferredCommands* commands_;
496 };
497 
498 // Allocate and fetch the coverage indices tracking NaryLogical Expressions.
499 class BytecodeGenerator::NaryCodeCoverageSlots {
500  public:
NaryCodeCoverageSlots(BytecodeGenerator * generator,NaryOperation * expr)501   NaryCodeCoverageSlots(BytecodeGenerator* generator, NaryOperation* expr)
502       : generator_(generator) {
503     if (generator_->block_coverage_builder_ == nullptr) return;
504     for (size_t i = 0; i < expr->subsequent_length(); i++) {
505       coverage_slots_.push_back(
506           generator_->AllocateNaryBlockCoverageSlotIfEnabled(expr, i));
507     }
508   }
509 
GetSlotFor(size_t subsequent_expr_index) const510   int GetSlotFor(size_t subsequent_expr_index) const {
511     if (generator_->block_coverage_builder_ == nullptr) {
512       return BlockCoverageBuilder::kNoCoverageArraySlot;
513     }
514     DCHECK(coverage_slots_.size() > subsequent_expr_index);
515     return coverage_slots_[subsequent_expr_index];
516   }
517 
518  private:
519   BytecodeGenerator* generator_;
520   std::vector<int> coverage_slots_;
521 };
522 
PerformCommand(Command command,Statement * statement,int source_position)523 void BytecodeGenerator::ControlScope::PerformCommand(Command command,
524                                                      Statement* statement,
525                                                      int source_position) {
526   ControlScope* current = this;
527   do {
528     if (current->Execute(command, statement, source_position)) {
529       return;
530     }
531     current = current->outer();
532   } while (current != nullptr);
533   UNREACHABLE();
534 }
535 
PopContextToExpectedDepth()536 void BytecodeGenerator::ControlScope::PopContextToExpectedDepth() {
537   // Pop context to the expected depth. Note that this can in fact pop multiple
538   // contexts at once because the {PopContext} bytecode takes a saved register.
539   if (generator()->execution_context() != context()) {
540     generator()->builder()->PopContext(context()->reg());
541   }
542 }
543 
544 class BytecodeGenerator::RegisterAllocationScope final {
545  public:
RegisterAllocationScope(BytecodeGenerator * generator)546   explicit RegisterAllocationScope(BytecodeGenerator* generator)
547       : generator_(generator),
548         outer_next_register_index_(
549             generator->register_allocator()->next_register_index()) {}
550 
~RegisterAllocationScope()551   ~RegisterAllocationScope() {
552     generator_->register_allocator()->ReleaseRegisters(
553         outer_next_register_index_);
554   }
555 
556  private:
557   BytecodeGenerator* generator_;
558   int outer_next_register_index_;
559 
560   DISALLOW_COPY_AND_ASSIGN(RegisterAllocationScope);
561 };
562 
563 // Scoped base class for determining how the result of an expression will be
564 // used.
565 class BytecodeGenerator::ExpressionResultScope {
566  public:
ExpressionResultScope(BytecodeGenerator * generator,Expression::Context kind)567   ExpressionResultScope(BytecodeGenerator* generator, Expression::Context kind)
568       : generator_(generator),
569         outer_(generator->execution_result()),
570         allocator_(generator),
571         kind_(kind),
572         type_hint_(TypeHint::kAny) {
573     generator_->set_execution_result(this);
574   }
575 
~ExpressionResultScope()576   virtual ~ExpressionResultScope() {
577     generator_->set_execution_result(outer_);
578   }
579 
IsEffect() const580   bool IsEffect() const { return kind_ == Expression::kEffect; }
IsValue() const581   bool IsValue() const { return kind_ == Expression::kValue; }
IsTest() const582   bool IsTest() const { return kind_ == Expression::kTest; }
583 
AsTest()584   TestResultScope* AsTest() {
585     DCHECK(IsTest());
586     return reinterpret_cast<TestResultScope*>(this);
587   }
588 
589   // Specify expression always returns a Boolean result value.
SetResultIsBoolean()590   void SetResultIsBoolean() {
591     DCHECK_EQ(type_hint_, TypeHint::kAny);
592     type_hint_ = TypeHint::kBoolean;
593   }
594 
SetResultIsString()595   void SetResultIsString() {
596     DCHECK_EQ(type_hint_, TypeHint::kAny);
597     type_hint_ = TypeHint::kString;
598   }
599 
type_hint() const600   TypeHint type_hint() const { return type_hint_; }
601 
602  private:
603   BytecodeGenerator* generator_;
604   ExpressionResultScope* outer_;
605   RegisterAllocationScope allocator_;
606   Expression::Context kind_;
607   TypeHint type_hint_;
608 
609   DISALLOW_COPY_AND_ASSIGN(ExpressionResultScope);
610 };
611 
612 // Scoped class used when the result of the current expression is not
613 // expected to produce a result.
614 class BytecodeGenerator::EffectResultScope final
615     : public ExpressionResultScope {
616  public:
EffectResultScope(BytecodeGenerator * generator)617   explicit EffectResultScope(BytecodeGenerator* generator)
618       : ExpressionResultScope(generator, Expression::kEffect) {}
619 };
620 
621 // Scoped class used when the result of the current expression to be
622 // evaluated should go into the interpreter's accumulator.
623 class BytecodeGenerator::ValueResultScope final : public ExpressionResultScope {
624  public:
ValueResultScope(BytecodeGenerator * generator)625   explicit ValueResultScope(BytecodeGenerator* generator)
626       : ExpressionResultScope(generator, Expression::kValue) {}
627 };
628 
629 // Scoped class used when the result of the current expression to be
630 // evaluated is only tested with jumps to two branches.
631 class BytecodeGenerator::TestResultScope final : public ExpressionResultScope {
632  public:
TestResultScope(BytecodeGenerator * generator,BytecodeLabels * then_labels,BytecodeLabels * else_labels,TestFallthrough fallthrough)633   TestResultScope(BytecodeGenerator* generator, BytecodeLabels* then_labels,
634                   BytecodeLabels* else_labels, TestFallthrough fallthrough)
635       : ExpressionResultScope(generator, Expression::kTest),
636         result_consumed_by_test_(false),
637         fallthrough_(fallthrough),
638         then_labels_(then_labels),
639         else_labels_(else_labels) {}
640 
641   // Used when code special cases for TestResultScope and consumes any
642   // possible value by testing and jumping to a then/else label.
SetResultConsumedByTest()643   void SetResultConsumedByTest() {
644     result_consumed_by_test_ = true;
645   }
result_consumed_by_test()646   bool result_consumed_by_test() { return result_consumed_by_test_; }
647 
648   // Inverts the control flow of the operation, swapping the then and else
649   // labels and the fallthrough.
InvertControlFlow()650   void InvertControlFlow() {
651     std::swap(then_labels_, else_labels_);
652     fallthrough_ = inverted_fallthrough();
653   }
654 
NewThenLabel()655   BytecodeLabel* NewThenLabel() { return then_labels_->New(); }
NewElseLabel()656   BytecodeLabel* NewElseLabel() { return else_labels_->New(); }
657 
then_labels() const658   BytecodeLabels* then_labels() const { return then_labels_; }
else_labels() const659   BytecodeLabels* else_labels() const { return else_labels_; }
660 
set_then_labels(BytecodeLabels * then_labels)661   void set_then_labels(BytecodeLabels* then_labels) {
662     then_labels_ = then_labels;
663   }
set_else_labels(BytecodeLabels * else_labels)664   void set_else_labels(BytecodeLabels* else_labels) {
665     else_labels_ = else_labels;
666   }
667 
fallthrough() const668   TestFallthrough fallthrough() const { return fallthrough_; }
inverted_fallthrough() const669   TestFallthrough inverted_fallthrough() const {
670     switch (fallthrough_) {
671       case TestFallthrough::kThen:
672         return TestFallthrough::kElse;
673       case TestFallthrough::kElse:
674         return TestFallthrough::kThen;
675       default:
676         return TestFallthrough::kNone;
677     }
678   }
set_fallthrough(TestFallthrough fallthrough)679   void set_fallthrough(TestFallthrough fallthrough) {
680     fallthrough_ = fallthrough;
681   }
682 
683  private:
684   bool result_consumed_by_test_;
685   TestFallthrough fallthrough_;
686   BytecodeLabels* then_labels_;
687   BytecodeLabels* else_labels_;
688 
689   DISALLOW_COPY_AND_ASSIGN(TestResultScope);
690 };
691 
692 // Used to build a list of global declaration initial value pairs.
693 class BytecodeGenerator::GlobalDeclarationsBuilder final : public ZoneObject {
694  public:
GlobalDeclarationsBuilder(Zone * zone)695   explicit GlobalDeclarationsBuilder(Zone* zone)
696       : declarations_(0, zone),
697         constant_pool_entry_(0),
698         has_constant_pool_entry_(false) {}
699 
AddFunctionDeclaration(const AstRawString * name,FeedbackSlot slot,FeedbackSlot literal_slot,FunctionLiteral * func)700   void AddFunctionDeclaration(const AstRawString* name, FeedbackSlot slot,
701                               FeedbackSlot literal_slot,
702                               FunctionLiteral* func) {
703     DCHECK(!slot.IsInvalid());
704     declarations_.push_back(Declaration(name, slot, literal_slot, func));
705   }
706 
AddUndefinedDeclaration(const AstRawString * name,FeedbackSlot slot)707   void AddUndefinedDeclaration(const AstRawString* name, FeedbackSlot slot) {
708     DCHECK(!slot.IsInvalid());
709     declarations_.push_back(Declaration(name, slot, nullptr));
710   }
711 
AllocateDeclarations(UnoptimizedCompilationInfo * info,Handle<Script> script,Isolate * isolate)712   Handle<FixedArray> AllocateDeclarations(UnoptimizedCompilationInfo* info,
713                                           Handle<Script> script,
714                                           Isolate* isolate) {
715     DCHECK(has_constant_pool_entry_);
716     int array_index = 0;
717     Handle<FixedArray> data = isolate->factory()->NewFixedArray(
718         static_cast<int>(declarations_.size() * 4), TENURED);
719     for (const Declaration& declaration : declarations_) {
720       FunctionLiteral* func = declaration.func;
721       Handle<Object> initial_value;
722       if (func == nullptr) {
723         initial_value = isolate->factory()->undefined_value();
724       } else {
725         initial_value = Compiler::GetSharedFunctionInfo(func, script, isolate);
726       }
727 
728       // Return a null handle if any initial values can't be created. Caller
729       // will set stack overflow.
730       if (initial_value.is_null()) return Handle<FixedArray>();
731 
732       data->set(array_index++, *declaration.name->string());
733       data->set(array_index++, Smi::FromInt(declaration.slot.ToInt()));
734       Object* undefined_or_literal_slot;
735       if (declaration.literal_slot.IsInvalid()) {
736         undefined_or_literal_slot = ReadOnlyRoots(isolate).undefined_value();
737       } else {
738         undefined_or_literal_slot =
739             Smi::FromInt(declaration.literal_slot.ToInt());
740       }
741       data->set(array_index++, undefined_or_literal_slot);
742       data->set(array_index++, *initial_value);
743     }
744     return data;
745   }
746 
constant_pool_entry()747   size_t constant_pool_entry() {
748     DCHECK(has_constant_pool_entry_);
749     return constant_pool_entry_;
750   }
751 
set_constant_pool_entry(size_t constant_pool_entry)752   void set_constant_pool_entry(size_t constant_pool_entry) {
753     DCHECK(!empty());
754     DCHECK(!has_constant_pool_entry_);
755     constant_pool_entry_ = constant_pool_entry;
756     has_constant_pool_entry_ = true;
757   }
758 
empty()759   bool empty() { return declarations_.empty(); }
760 
761  private:
762   struct Declaration {
Declarationv8::internal::interpreter::BytecodeGenerator::GlobalDeclarationsBuilder::Declaration763     Declaration() : slot(FeedbackSlot::Invalid()), func(nullptr) {}
Declarationv8::internal::interpreter::BytecodeGenerator::GlobalDeclarationsBuilder::Declaration764     Declaration(const AstRawString* name, FeedbackSlot slot,
765                 FeedbackSlot literal_slot, FunctionLiteral* func)
766         : name(name), slot(slot), literal_slot(literal_slot), func(func) {}
Declarationv8::internal::interpreter::BytecodeGenerator::GlobalDeclarationsBuilder::Declaration767     Declaration(const AstRawString* name, FeedbackSlot slot,
768                 FunctionLiteral* func)
769         : name(name),
770           slot(slot),
771           literal_slot(FeedbackSlot::Invalid()),
772           func(func) {}
773 
774     const AstRawString* name;
775     FeedbackSlot slot;
776     FeedbackSlot literal_slot;
777     FunctionLiteral* func;
778   };
779   ZoneVector<Declaration> declarations_;
780   size_t constant_pool_entry_;
781   bool has_constant_pool_entry_;
782 };
783 
784 class BytecodeGenerator::CurrentScope final {
785  public:
CurrentScope(BytecodeGenerator * generator,Scope * scope)786   CurrentScope(BytecodeGenerator* generator, Scope* scope)
787       : generator_(generator), outer_scope_(generator->current_scope()) {
788     if (scope != nullptr) {
789       DCHECK_EQ(outer_scope_, scope->outer_scope());
790       generator_->set_current_scope(scope);
791     }
792   }
~CurrentScope()793   ~CurrentScope() {
794     if (outer_scope_ != generator_->current_scope()) {
795       generator_->set_current_scope(outer_scope_);
796     }
797   }
798 
799  private:
800   BytecodeGenerator* generator_;
801   Scope* outer_scope_;
802 };
803 
804 class BytecodeGenerator::FeedbackSlotCache : public ZoneObject {
805  public:
FeedbackSlotCache(Zone * zone)806   explicit FeedbackSlotCache(Zone* zone) : map_(zone) {}
807 
Put(FeedbackSlotKind slot_kind,Variable * variable,FeedbackSlot slot)808   void Put(FeedbackSlotKind slot_kind, Variable* variable, FeedbackSlot slot) {
809     PutImpl(slot_kind, 0, variable, slot);
810   }
Put(FeedbackSlotKind slot_kind,AstNode * node,FeedbackSlot slot)811   void Put(FeedbackSlotKind slot_kind, AstNode* node, FeedbackSlot slot) {
812     PutImpl(slot_kind, 0, node, slot);
813   }
Put(FeedbackSlotKind slot_kind,int variable_index,const AstRawString * name,FeedbackSlot slot)814   void Put(FeedbackSlotKind slot_kind, int variable_index,
815            const AstRawString* name, FeedbackSlot slot) {
816     PutImpl(slot_kind, variable_index, name, slot);
817   }
818 
Get(FeedbackSlotKind slot_kind,Variable * variable) const819   FeedbackSlot Get(FeedbackSlotKind slot_kind, Variable* variable) const {
820     return GetImpl(slot_kind, 0, variable);
821   }
Get(FeedbackSlotKind slot_kind,AstNode * node) const822   FeedbackSlot Get(FeedbackSlotKind slot_kind, AstNode* node) const {
823     return GetImpl(slot_kind, 0, node);
824   }
Get(FeedbackSlotKind slot_kind,int variable_index,const AstRawString * name) const825   FeedbackSlot Get(FeedbackSlotKind slot_kind, int variable_index,
826                    const AstRawString* name) const {
827     return GetImpl(slot_kind, variable_index, name);
828   }
829 
830  private:
831   typedef std::tuple<FeedbackSlotKind, int, const void*> Key;
832 
PutImpl(FeedbackSlotKind slot_kind,int index,const void * node,FeedbackSlot slot)833   void PutImpl(FeedbackSlotKind slot_kind, int index, const void* node,
834                FeedbackSlot slot) {
835     Key key = std::make_tuple(slot_kind, index, node);
836     auto entry = std::make_pair(key, slot);
837     map_.insert(entry);
838   }
839 
GetImpl(FeedbackSlotKind slot_kind,int index,const void * node) const840   FeedbackSlot GetImpl(FeedbackSlotKind slot_kind, int index,
841                        const void* node) const {
842     Key key = std::make_tuple(slot_kind, index, node);
843     auto iter = map_.find(key);
844     if (iter != map_.end()) {
845       return iter->second;
846     }
847     return FeedbackSlot();
848   }
849 
850   ZoneMap<Key, FeedbackSlot> map_;
851 };
852 
853 class BytecodeGenerator::IteratorRecord final {
854  public:
IteratorRecord(Register object_register,Register next_register,IteratorType type=IteratorType::kNormal)855   IteratorRecord(Register object_register, Register next_register,
856                  IteratorType type = IteratorType::kNormal)
857       : type_(type), object_(object_register), next_(next_register) {
858     DCHECK(object_.is_valid() && next_.is_valid());
859   }
860 
type() const861   inline IteratorType type() const { return type_; }
object() const862   inline Register object() const { return object_; }
next() const863   inline Register next() const { return next_; }
864 
865  private:
866   IteratorType type_;
867   Register object_;
868   Register next_;
869 };
870 
871 #ifdef DEBUG
872 
IsInEagerLiterals(FunctionLiteral * literal,const ZoneVector<FunctionLiteral * > & eager_literals)873 static bool IsInEagerLiterals(
874     FunctionLiteral* literal,
875     const ZoneVector<FunctionLiteral*>& eager_literals) {
876   for (FunctionLiteral* eager_literal : eager_literals) {
877     if (literal == eager_literal) return true;
878   }
879   return false;
880 }
881 
882 #endif  // DEBUG
883 
BytecodeGenerator(UnoptimizedCompilationInfo * info,const AstStringConstants * ast_string_constants,ZoneVector<FunctionLiteral * > * eager_inner_literals)884 BytecodeGenerator::BytecodeGenerator(
885     UnoptimizedCompilationInfo* info,
886     const AstStringConstants* ast_string_constants,
887     ZoneVector<FunctionLiteral*>* eager_inner_literals)
888     : zone_(info->zone()),
889       builder_(zone(), info->num_parameters_including_this(),
890                info->scope()->num_stack_slots(), info->feedback_vector_spec(),
891                info->SourcePositionRecordingMode()),
892       info_(info),
893       ast_string_constants_(ast_string_constants),
894       closure_scope_(info->scope()),
895       current_scope_(info->scope()),
896       eager_inner_literals_(eager_inner_literals),
897       feedback_slot_cache_(new (zone()) FeedbackSlotCache(zone())),
898       globals_builder_(new (zone()) GlobalDeclarationsBuilder(zone())),
899       block_coverage_builder_(nullptr),
900       global_declarations_(0, zone()),
901       function_literals_(0, zone()),
902       native_function_literals_(0, zone()),
903       object_literals_(0, zone()),
904       array_literals_(0, zone()),
905       class_literals_(0, zone()),
906       template_objects_(0, zone()),
907       execution_control_(nullptr),
908       execution_context_(nullptr),
909       execution_result_(nullptr),
910       incoming_new_target_or_generator_(),
911       dummy_feedback_slot_(),
912       generator_jump_table_(nullptr),
913       suspend_count_(0),
914       loop_depth_(0),
915       catch_prediction_(HandlerTable::UNCAUGHT) {
916   DCHECK_EQ(closure_scope(), closure_scope()->GetClosureScope());
917   if (info->has_source_range_map()) {
918     block_coverage_builder_ = new (zone())
919         BlockCoverageBuilder(zone(), builder(), info->source_range_map());
920   }
921 }
922 
FinalizeBytecode(Isolate * isolate,Handle<Script> script)923 Handle<BytecodeArray> BytecodeGenerator::FinalizeBytecode(
924     Isolate* isolate, Handle<Script> script) {
925   DCHECK(ThreadId::Current().Equals(isolate->thread_id()));
926 
927   AllocateDeferredConstants(isolate, script);
928 
929   if (block_coverage_builder_) {
930     info()->set_coverage_info(
931         isolate->factory()->NewCoverageInfo(block_coverage_builder_->slots()));
932     if (FLAG_trace_block_coverage) {
933       info()->coverage_info()->Print(info()->literal()->GetDebugName());
934     }
935   }
936 
937   if (HasStackOverflow()) return Handle<BytecodeArray>();
938   Handle<BytecodeArray> bytecode_array = builder()->ToBytecodeArray(isolate);
939 
940   if (incoming_new_target_or_generator_.is_valid()) {
941     bytecode_array->set_incoming_new_target_or_generator_register(
942         incoming_new_target_or_generator_);
943   }
944 
945   return bytecode_array;
946 }
947 
AllocateDeferredConstants(Isolate * isolate,Handle<Script> script)948 void BytecodeGenerator::AllocateDeferredConstants(Isolate* isolate,
949                                                   Handle<Script> script) {
950   // Build global declaration pair arrays.
951   for (GlobalDeclarationsBuilder* globals_builder : global_declarations_) {
952     Handle<FixedArray> declarations =
953         globals_builder->AllocateDeclarations(info(), script, isolate);
954     if (declarations.is_null()) return SetStackOverflow();
955     builder()->SetDeferredConstantPoolEntry(
956         globals_builder->constant_pool_entry(), declarations);
957   }
958 
959   // Find or build shared function infos.
960   for (std::pair<FunctionLiteral*, size_t> literal : function_literals_) {
961     FunctionLiteral* expr = literal.first;
962     Handle<SharedFunctionInfo> shared_info =
963         Compiler::GetSharedFunctionInfo(expr, script, isolate);
964     if (shared_info.is_null()) return SetStackOverflow();
965     builder()->SetDeferredConstantPoolEntry(literal.second, shared_info);
966   }
967 
968   // Find or build shared function infos for the native function templates.
969   for (std::pair<NativeFunctionLiteral*, size_t> literal :
970        native_function_literals_) {
971     NativeFunctionLiteral* expr = literal.first;
972     v8::Isolate* v8_isolate = reinterpret_cast<v8::Isolate*>(isolate);
973 
974     // Compute the function template for the native function.
975     v8::Local<v8::FunctionTemplate> info =
976         expr->extension()->GetNativeFunctionTemplate(
977             v8_isolate, Utils::ToLocal(expr->name()));
978     DCHECK(!info.IsEmpty());
979 
980     Handle<SharedFunctionInfo> shared_info =
981         FunctionTemplateInfo::GetOrCreateSharedFunctionInfo(
982             isolate, Utils::OpenHandle(*info), expr->name());
983     DCHECK(!shared_info.is_null());
984     builder()->SetDeferredConstantPoolEntry(literal.second, shared_info);
985   }
986 
987   // Build object literal constant properties
988   for (std::pair<ObjectLiteral*, size_t> literal : object_literals_) {
989     ObjectLiteral* object_literal = literal.first;
990     if (object_literal->properties_count() > 0) {
991       // If constant properties is an empty fixed array, we've already added it
992       // to the constant pool when visiting the object literal.
993       Handle<ObjectBoilerplateDescription> constant_properties =
994           object_literal->GetOrBuildBoilerplateDescription(isolate);
995 
996       builder()->SetDeferredConstantPoolEntry(literal.second,
997                                               constant_properties);
998     }
999   }
1000 
1001   // Build array literal constant elements
1002   for (std::pair<ArrayLiteral*, size_t> literal : array_literals_) {
1003     ArrayLiteral* array_literal = literal.first;
1004     Handle<ArrayBoilerplateDescription> constant_elements =
1005         array_literal->GetOrBuildBoilerplateDescription(isolate);
1006     builder()->SetDeferredConstantPoolEntry(literal.second, constant_elements);
1007   }
1008 
1009   // Build class literal boilerplates.
1010   for (std::pair<ClassLiteral*, size_t> literal : class_literals_) {
1011     ClassLiteral* class_literal = literal.first;
1012     Handle<ClassBoilerplate> class_boilerplate =
1013         ClassBoilerplate::BuildClassBoilerplate(isolate, class_literal);
1014     builder()->SetDeferredConstantPoolEntry(literal.second, class_boilerplate);
1015   }
1016 
1017   // Build template literals.
1018   for (std::pair<GetTemplateObject*, size_t> literal : template_objects_) {
1019     GetTemplateObject* get_template_object = literal.first;
1020     Handle<TemplateObjectDescription> description =
1021         get_template_object->GetOrBuildDescription(isolate);
1022     builder()->SetDeferredConstantPoolEntry(literal.second, description);
1023   }
1024 }
1025 
GenerateBytecode(uintptr_t stack_limit)1026 void BytecodeGenerator::GenerateBytecode(uintptr_t stack_limit) {
1027   DisallowHeapAllocation no_allocation;
1028   DisallowHandleAllocation no_handles;
1029   DisallowHandleDereference no_deref;
1030 
1031   InitializeAstVisitor(stack_limit);
1032 
1033   // Initialize the incoming context.
1034   ContextScope incoming_context(this, closure_scope());
1035 
1036   // Initialize control scope.
1037   ControlScopeForTopLevel control(this);
1038 
1039   RegisterAllocationScope register_scope(this);
1040 
1041   AllocateTopLevelRegisters();
1042 
1043   if (info()->literal()->CanSuspend()) {
1044     BuildGeneratorPrologue();
1045   }
1046 
1047   if (closure_scope()->NeedsContext()) {
1048     // Push a new inner context scope for the function.
1049     BuildNewLocalActivationContext();
1050     ContextScope local_function_context(this, closure_scope());
1051     BuildLocalActivationContextInitialization();
1052     GenerateBytecodeBody();
1053   } else {
1054     GenerateBytecodeBody();
1055   }
1056 
1057   // Check that we are not falling off the end.
1058   DCHECK(!builder()->RequiresImplicitReturn());
1059 }
1060 
GenerateBytecodeBody()1061 void BytecodeGenerator::GenerateBytecodeBody() {
1062   // Build the arguments object if it is used.
1063   VisitArgumentsObject(closure_scope()->arguments());
1064 
1065   // Build rest arguments array if it is used.
1066   Variable* rest_parameter = closure_scope()->rest_parameter();
1067   VisitRestArgumentsArray(rest_parameter);
1068 
1069   // Build assignment to the function name or {.this_function}
1070   // variables if used.
1071   VisitThisFunctionVariable(closure_scope()->function_var());
1072   VisitThisFunctionVariable(closure_scope()->this_function_var());
1073 
1074   // Build assignment to {new.target} variable if it is used.
1075   VisitNewTargetVariable(closure_scope()->new_target_var());
1076 
1077   // Create a generator object if necessary and initialize the
1078   // {.generator_object} variable.
1079   if (info()->literal()->CanSuspend()) {
1080     BuildGeneratorObjectVariableInitialization();
1081   }
1082 
1083   // Emit tracing call if requested to do so.
1084   if (FLAG_trace) builder()->CallRuntime(Runtime::kTraceEnter);
1085 
1086   // Emit type profile call.
1087   if (info()->collect_type_profile()) {
1088     feedback_spec()->AddTypeProfileSlot();
1089     int num_parameters = closure_scope()->num_parameters();
1090     for (int i = 0; i < num_parameters; i++) {
1091       Register parameter(builder()->Parameter(i));
1092       builder()->LoadAccumulatorWithRegister(parameter).CollectTypeProfile(
1093           closure_scope()->parameter(i)->initializer_position());
1094     }
1095   }
1096 
1097   // Visit declarations within the function scope.
1098   VisitDeclarations(closure_scope()->declarations());
1099 
1100   // Emit initializing assignments for module namespace imports (if any).
1101   VisitModuleNamespaceImports();
1102 
1103   // Perform a stack-check before the body.
1104   builder()->StackCheck(info()->literal()->start_position());
1105 
1106   // The derived constructor case is handled in VisitCallSuper.
1107   if (IsBaseConstructor(function_kind()) &&
1108       info()->literal()->requires_instance_fields_initializer()) {
1109     BuildInstanceFieldInitialization(Register::function_closure(),
1110                                      builder()->Receiver());
1111   }
1112 
1113   // Visit statements in the function body.
1114   VisitStatements(info()->literal()->body());
1115 
1116   // Emit an implicit return instruction in case control flow can fall off the
1117   // end of the function without an explicit return being present on all paths.
1118   if (builder()->RequiresImplicitReturn()) {
1119     builder()->LoadUndefined();
1120     BuildReturn();
1121   }
1122 }
1123 
AllocateTopLevelRegisters()1124 void BytecodeGenerator::AllocateTopLevelRegisters() {
1125   if (info()->literal()->CanSuspend()) {
1126     // Either directly use generator_object_var or allocate a new register for
1127     // the incoming generator object.
1128     Variable* generator_object_var = closure_scope()->generator_object_var();
1129     if (generator_object_var->location() == VariableLocation::LOCAL) {
1130       incoming_new_target_or_generator_ =
1131           GetRegisterForLocalVariable(generator_object_var);
1132     } else {
1133       incoming_new_target_or_generator_ = register_allocator()->NewRegister();
1134     }
1135   } else if (closure_scope()->new_target_var()) {
1136     // Either directly use new_target_var or allocate a new register for
1137     // the incoming new target object.
1138     Variable* new_target_var = closure_scope()->new_target_var();
1139     if (new_target_var->location() == VariableLocation::LOCAL) {
1140       incoming_new_target_or_generator_ =
1141           GetRegisterForLocalVariable(new_target_var);
1142     } else {
1143       incoming_new_target_or_generator_ = register_allocator()->NewRegister();
1144     }
1145   }
1146 }
1147 
BuildGeneratorPrologue()1148 void BytecodeGenerator::BuildGeneratorPrologue() {
1149   DCHECK_GT(info()->literal()->suspend_count(), 0);
1150   DCHECK(generator_object().is_valid());
1151   generator_jump_table_ =
1152       builder()->AllocateJumpTable(info()->literal()->suspend_count(), 0);
1153 
1154   // If the generator is not undefined, this is a resume, so perform state
1155   // dispatch.
1156   builder()->SwitchOnGeneratorState(generator_object(), generator_jump_table_);
1157 
1158   // Otherwise, fall-through to the ordinary function prologue, after which we
1159   // will run into the generator object creation and other extra code inserted
1160   // by the parser.
1161 }
1162 
VisitBlock(Block * stmt)1163 void BytecodeGenerator::VisitBlock(Block* stmt) {
1164   // Visit declarations and statements.
1165   CurrentScope current_scope(this, stmt->scope());
1166   if (stmt->scope() != nullptr && stmt->scope()->NeedsContext()) {
1167     BuildNewLocalBlockContext(stmt->scope());
1168     ContextScope scope(this, stmt->scope());
1169     VisitBlockDeclarationsAndStatements(stmt);
1170   } else {
1171     VisitBlockDeclarationsAndStatements(stmt);
1172   }
1173 }
1174 
VisitBlockDeclarationsAndStatements(Block * stmt)1175 void BytecodeGenerator::VisitBlockDeclarationsAndStatements(Block* stmt) {
1176   BlockBuilder block_builder(builder(), block_coverage_builder_, stmt);
1177   ControlScopeForBreakable execution_control(this, stmt, &block_builder);
1178   if (stmt->scope() != nullptr) {
1179     VisitDeclarations(stmt->scope()->declarations());
1180   }
1181   VisitStatements(stmt->statements());
1182 }
1183 
VisitVariableDeclaration(VariableDeclaration * decl)1184 void BytecodeGenerator::VisitVariableDeclaration(VariableDeclaration* decl) {
1185   Variable* variable = decl->proxy()->var();
1186   switch (variable->location()) {
1187     case VariableLocation::UNALLOCATED: {
1188       DCHECK(!variable->binding_needs_init());
1189       FeedbackSlot slot =
1190           GetCachedLoadGlobalICSlot(NOT_INSIDE_TYPEOF, variable);
1191       globals_builder()->AddUndefinedDeclaration(variable->raw_name(), slot);
1192       break;
1193     }
1194     case VariableLocation::LOCAL:
1195       if (variable->binding_needs_init()) {
1196         Register destination(builder()->Local(variable->index()));
1197         builder()->LoadTheHole().StoreAccumulatorInRegister(destination);
1198       }
1199       break;
1200     case VariableLocation::PARAMETER:
1201       if (variable->binding_needs_init()) {
1202         Register destination(builder()->Parameter(variable->index()));
1203         builder()->LoadTheHole().StoreAccumulatorInRegister(destination);
1204       }
1205       break;
1206     case VariableLocation::CONTEXT:
1207       if (variable->binding_needs_init()) {
1208         DCHECK_EQ(0, execution_context()->ContextChainDepth(variable->scope()));
1209         builder()->LoadTheHole().StoreContextSlot(execution_context()->reg(),
1210                                                   variable->index(), 0);
1211       }
1212       break;
1213     case VariableLocation::LOOKUP: {
1214       DCHECK_EQ(VariableMode::kVar, variable->mode());
1215       DCHECK(!variable->binding_needs_init());
1216 
1217       Register name = register_allocator()->NewRegister();
1218 
1219       builder()
1220           ->LoadLiteral(variable->raw_name())
1221           .StoreAccumulatorInRegister(name)
1222           .CallRuntime(Runtime::kDeclareEvalVar, name);
1223       break;
1224     }
1225     case VariableLocation::MODULE:
1226       if (variable->IsExport() && variable->binding_needs_init()) {
1227         builder()->LoadTheHole();
1228         BuildVariableAssignment(variable, Token::INIT, HoleCheckMode::kElided);
1229       }
1230       // Nothing to do for imports.
1231       break;
1232   }
1233 }
1234 
VisitFunctionDeclaration(FunctionDeclaration * decl)1235 void BytecodeGenerator::VisitFunctionDeclaration(FunctionDeclaration* decl) {
1236   Variable* variable = decl->proxy()->var();
1237   DCHECK(variable->mode() == VariableMode::kLet ||
1238          variable->mode() == VariableMode::kVar);
1239   switch (variable->location()) {
1240     case VariableLocation::UNALLOCATED: {
1241       FeedbackSlot slot =
1242           GetCachedLoadGlobalICSlot(NOT_INSIDE_TYPEOF, variable);
1243       FeedbackSlot literal_slot = GetCachedCreateClosureSlot(decl->fun());
1244       globals_builder()->AddFunctionDeclaration(variable->raw_name(), slot,
1245                                                 literal_slot, decl->fun());
1246       AddToEagerLiteralsIfEager(decl->fun());
1247       break;
1248     }
1249     case VariableLocation::PARAMETER:
1250     case VariableLocation::LOCAL: {
1251       VisitForAccumulatorValue(decl->fun());
1252       BuildVariableAssignment(variable, Token::INIT, HoleCheckMode::kElided);
1253       break;
1254     }
1255     case VariableLocation::CONTEXT: {
1256       DCHECK_EQ(0, execution_context()->ContextChainDepth(variable->scope()));
1257       VisitForAccumulatorValue(decl->fun());
1258       builder()->StoreContextSlot(execution_context()->reg(), variable->index(),
1259                                   0);
1260       break;
1261     }
1262     case VariableLocation::LOOKUP: {
1263       RegisterList args = register_allocator()->NewRegisterList(2);
1264       builder()
1265           ->LoadLiteral(variable->raw_name())
1266           .StoreAccumulatorInRegister(args[0]);
1267       VisitForAccumulatorValue(decl->fun());
1268       builder()->StoreAccumulatorInRegister(args[1]).CallRuntime(
1269           Runtime::kDeclareEvalFunction, args);
1270       break;
1271     }
1272     case VariableLocation::MODULE:
1273       DCHECK_EQ(variable->mode(), VariableMode::kLet);
1274       DCHECK(variable->IsExport());
1275       VisitForAccumulatorValue(decl->fun());
1276       BuildVariableAssignment(variable, Token::INIT, HoleCheckMode::kElided);
1277       break;
1278   }
1279   DCHECK_IMPLIES(decl->fun()->ShouldEagerCompile(),
1280                  IsInEagerLiterals(decl->fun(), *eager_inner_literals_));
1281 }
1282 
VisitModuleNamespaceImports()1283 void BytecodeGenerator::VisitModuleNamespaceImports() {
1284   if (!closure_scope()->is_module_scope()) return;
1285 
1286   RegisterAllocationScope register_scope(this);
1287   Register module_request = register_allocator()->NewRegister();
1288 
1289   ModuleDescriptor* descriptor = closure_scope()->AsModuleScope()->module();
1290   for (auto entry : descriptor->namespace_imports()) {
1291     builder()
1292         ->LoadLiteral(Smi::FromInt(entry->module_request))
1293         .StoreAccumulatorInRegister(module_request)
1294         .CallRuntime(Runtime::kGetModuleNamespace, module_request);
1295     Variable* var = closure_scope()->LookupLocal(entry->local_name);
1296     DCHECK_NOT_NULL(var);
1297     BuildVariableAssignment(var, Token::INIT, HoleCheckMode::kElided);
1298   }
1299 }
1300 
VisitDeclarations(Declaration::List * declarations)1301 void BytecodeGenerator::VisitDeclarations(Declaration::List* declarations) {
1302   RegisterAllocationScope register_scope(this);
1303   DCHECK(globals_builder()->empty());
1304   for (Declaration* decl : *declarations) {
1305     RegisterAllocationScope register_scope(this);
1306     Visit(decl);
1307   }
1308   if (globals_builder()->empty()) return;
1309 
1310   globals_builder()->set_constant_pool_entry(
1311       builder()->AllocateDeferredConstantPoolEntry());
1312   int encoded_flags = DeclareGlobalsEvalFlag::encode(info()->is_eval()) |
1313                       DeclareGlobalsNativeFlag::encode(info()->is_native());
1314 
1315   // Emit code to declare globals.
1316   RegisterList args = register_allocator()->NewRegisterList(3);
1317   builder()
1318       ->LoadConstantPoolEntry(globals_builder()->constant_pool_entry())
1319       .StoreAccumulatorInRegister(args[0])
1320       .LoadLiteral(Smi::FromInt(encoded_flags))
1321       .StoreAccumulatorInRegister(args[1])
1322       .MoveRegister(Register::function_closure(), args[2])
1323       .CallRuntime(Runtime::kDeclareGlobals, args);
1324 
1325   // Push and reset globals builder.
1326   global_declarations_.push_back(globals_builder());
1327   globals_builder_ = new (zone()) GlobalDeclarationsBuilder(zone());
1328 }
1329 
VisitStatements(ZonePtrList<Statement> * statements)1330 void BytecodeGenerator::VisitStatements(ZonePtrList<Statement>* statements) {
1331   for (int i = 0; i < statements->length(); i++) {
1332     // Allocate an outer register allocations scope for the statement.
1333     RegisterAllocationScope allocation_scope(this);
1334     Statement* stmt = statements->at(i);
1335     Visit(stmt);
1336     if (stmt->IsJump()) break;
1337   }
1338 }
1339 
VisitExpressionStatement(ExpressionStatement * stmt)1340 void BytecodeGenerator::VisitExpressionStatement(ExpressionStatement* stmt) {
1341   builder()->SetStatementPosition(stmt);
1342   VisitForEffect(stmt->expression());
1343 }
1344 
VisitEmptyStatement(EmptyStatement * stmt)1345 void BytecodeGenerator::VisitEmptyStatement(EmptyStatement* stmt) {
1346 }
1347 
VisitIfStatement(IfStatement * stmt)1348 void BytecodeGenerator::VisitIfStatement(IfStatement* stmt) {
1349   ConditionalControlFlowBuilder conditional_builder(
1350       builder(), block_coverage_builder_, stmt);
1351   builder()->SetStatementPosition(stmt);
1352 
1353   if (stmt->condition()->ToBooleanIsTrue()) {
1354     // Generate then block unconditionally as always true.
1355     conditional_builder.Then();
1356     Visit(stmt->then_statement());
1357   } else if (stmt->condition()->ToBooleanIsFalse()) {
1358     // Generate else block unconditionally if it exists.
1359     if (stmt->HasElseStatement()) {
1360       conditional_builder.Else();
1361       Visit(stmt->else_statement());
1362     }
1363   } else {
1364     // TODO(oth): If then statement is BreakStatement or
1365     // ContinueStatement we can reduce number of generated
1366     // jump/jump_ifs here. See BasicLoops test.
1367     VisitForTest(stmt->condition(), conditional_builder.then_labels(),
1368                  conditional_builder.else_labels(), TestFallthrough::kThen);
1369 
1370     conditional_builder.Then();
1371     Visit(stmt->then_statement());
1372 
1373     if (stmt->HasElseStatement()) {
1374       conditional_builder.JumpToEnd();
1375       conditional_builder.Else();
1376       Visit(stmt->else_statement());
1377     }
1378   }
1379 }
1380 
VisitSloppyBlockFunctionStatement(SloppyBlockFunctionStatement * stmt)1381 void BytecodeGenerator::VisitSloppyBlockFunctionStatement(
1382     SloppyBlockFunctionStatement* stmt) {
1383   Visit(stmt->statement());
1384 }
1385 
VisitContinueStatement(ContinueStatement * stmt)1386 void BytecodeGenerator::VisitContinueStatement(ContinueStatement* stmt) {
1387   AllocateBlockCoverageSlotIfEnabled(stmt, SourceRangeKind::kContinuation);
1388   builder()->SetStatementPosition(stmt);
1389   execution_control()->Continue(stmt->target());
1390 }
1391 
VisitBreakStatement(BreakStatement * stmt)1392 void BytecodeGenerator::VisitBreakStatement(BreakStatement* stmt) {
1393   AllocateBlockCoverageSlotIfEnabled(stmt, SourceRangeKind::kContinuation);
1394   builder()->SetStatementPosition(stmt);
1395   execution_control()->Break(stmt->target());
1396 }
1397 
VisitReturnStatement(ReturnStatement * stmt)1398 void BytecodeGenerator::VisitReturnStatement(ReturnStatement* stmt) {
1399   AllocateBlockCoverageSlotIfEnabled(stmt, SourceRangeKind::kContinuation);
1400   builder()->SetStatementPosition(stmt);
1401   VisitForAccumulatorValue(stmt->expression());
1402   if (stmt->is_async_return()) {
1403     execution_control()->AsyncReturnAccumulator(stmt->end_position());
1404   } else {
1405     execution_control()->ReturnAccumulator(stmt->end_position());
1406   }
1407 }
1408 
VisitWithStatement(WithStatement * stmt)1409 void BytecodeGenerator::VisitWithStatement(WithStatement* stmt) {
1410   builder()->SetStatementPosition(stmt);
1411   VisitForAccumulatorValue(stmt->expression());
1412   BuildNewLocalWithContext(stmt->scope());
1413   VisitInScope(stmt->statement(), stmt->scope());
1414 }
1415 
VisitSwitchStatement(SwitchStatement * stmt)1416 void BytecodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
1417   // We need this scope because we visit for register values. We have to
1418   // maintain a execution result scope where registers can be allocated.
1419   ZonePtrList<CaseClause>* clauses = stmt->cases();
1420   SwitchBuilder switch_builder(builder(), block_coverage_builder_, stmt,
1421                                clauses->length());
1422   ControlScopeForBreakable scope(this, stmt, &switch_builder);
1423   int default_index = -1;
1424 
1425   builder()->SetStatementPosition(stmt);
1426 
1427   // Keep the switch value in a register until a case matches.
1428   Register tag = VisitForRegisterValue(stmt->tag());
1429   FeedbackSlot slot = clauses->length() > 0
1430                           ? feedback_spec()->AddCompareICSlot()
1431                           : FeedbackSlot::Invalid();
1432 
1433   // Iterate over all cases and create nodes for label comparison.
1434   for (int i = 0; i < clauses->length(); i++) {
1435     CaseClause* clause = clauses->at(i);
1436 
1437     // The default is not a test, remember index.
1438     if (clause->is_default()) {
1439       default_index = i;
1440       continue;
1441     }
1442 
1443     // Perform label comparison as if via '===' with tag.
1444     VisitForAccumulatorValue(clause->label());
1445     builder()->CompareOperation(Token::Value::EQ_STRICT, tag,
1446                                 feedback_index(slot));
1447     switch_builder.Case(ToBooleanMode::kAlreadyBoolean, i);
1448   }
1449 
1450   if (default_index >= 0) {
1451     // Emit default jump if there is a default case.
1452     switch_builder.DefaultAt(default_index);
1453   } else {
1454     // Otherwise if we have reached here none of the cases matched, so jump to
1455     // the end.
1456     switch_builder.Break();
1457   }
1458 
1459   // Iterate over all cases and create the case bodies.
1460   for (int i = 0; i < clauses->length(); i++) {
1461     CaseClause* clause = clauses->at(i);
1462     switch_builder.SetCaseTarget(i, clause);
1463     VisitStatements(clause->statements());
1464   }
1465 }
1466 
VisitIterationBody(IterationStatement * stmt,LoopBuilder * loop_builder)1467 void BytecodeGenerator::VisitIterationBody(IterationStatement* stmt,
1468                                            LoopBuilder* loop_builder) {
1469   loop_builder->LoopBody();
1470   ControlScopeForIteration execution_control(this, stmt, loop_builder);
1471   builder()->StackCheck(stmt->position());
1472   Visit(stmt->body());
1473   loop_builder->BindContinueTarget();
1474 }
1475 
VisitDoWhileStatement(DoWhileStatement * stmt)1476 void BytecodeGenerator::VisitDoWhileStatement(DoWhileStatement* stmt) {
1477   LoopBuilder loop_builder(builder(), block_coverage_builder_, stmt);
1478   if (stmt->cond()->ToBooleanIsFalse()) {
1479     VisitIterationBody(stmt, &loop_builder);
1480   } else if (stmt->cond()->ToBooleanIsTrue()) {
1481     loop_builder.LoopHeader();
1482     VisitIterationBody(stmt, &loop_builder);
1483     loop_builder.JumpToHeader(loop_depth_);
1484   } else {
1485     loop_builder.LoopHeader();
1486     VisitIterationBody(stmt, &loop_builder);
1487     builder()->SetExpressionAsStatementPosition(stmt->cond());
1488     BytecodeLabels loop_backbranch(zone());
1489     VisitForTest(stmt->cond(), &loop_backbranch, loop_builder.break_labels(),
1490                  TestFallthrough::kThen);
1491     loop_backbranch.Bind(builder());
1492     loop_builder.JumpToHeader(loop_depth_);
1493   }
1494 }
1495 
VisitWhileStatement(WhileStatement * stmt)1496 void BytecodeGenerator::VisitWhileStatement(WhileStatement* stmt) {
1497   LoopBuilder loop_builder(builder(), block_coverage_builder_, stmt);
1498 
1499   if (stmt->cond()->ToBooleanIsFalse()) {
1500     // If the condition is false there is no need to generate the loop.
1501     return;
1502   }
1503 
1504   loop_builder.LoopHeader();
1505   if (!stmt->cond()->ToBooleanIsTrue()) {
1506     builder()->SetExpressionAsStatementPosition(stmt->cond());
1507     BytecodeLabels loop_body(zone());
1508     VisitForTest(stmt->cond(), &loop_body, loop_builder.break_labels(),
1509                  TestFallthrough::kThen);
1510     loop_body.Bind(builder());
1511   }
1512   VisitIterationBody(stmt, &loop_builder);
1513   loop_builder.JumpToHeader(loop_depth_);
1514 }
1515 
VisitForStatement(ForStatement * stmt)1516 void BytecodeGenerator::VisitForStatement(ForStatement* stmt) {
1517   LoopBuilder loop_builder(builder(), block_coverage_builder_, stmt);
1518 
1519   if (stmt->init() != nullptr) {
1520     Visit(stmt->init());
1521   }
1522   if (stmt->cond() && stmt->cond()->ToBooleanIsFalse()) {
1523     // If the condition is known to be false there is no need to generate
1524     // body, next or condition blocks. Init block should be generated.
1525     return;
1526   }
1527 
1528   loop_builder.LoopHeader();
1529   if (stmt->cond() && !stmt->cond()->ToBooleanIsTrue()) {
1530     builder()->SetExpressionAsStatementPosition(stmt->cond());
1531     BytecodeLabels loop_body(zone());
1532     VisitForTest(stmt->cond(), &loop_body, loop_builder.break_labels(),
1533                  TestFallthrough::kThen);
1534     loop_body.Bind(builder());
1535   }
1536   VisitIterationBody(stmt, &loop_builder);
1537   if (stmt->next() != nullptr) {
1538     builder()->SetStatementPosition(stmt->next());
1539     Visit(stmt->next());
1540   }
1541   loop_builder.JumpToHeader(loop_depth_);
1542 }
1543 
VisitForInAssignment(Expression * expr)1544 void BytecodeGenerator::VisitForInAssignment(Expression* expr) {
1545   DCHECK(expr->IsValidReferenceExpression());
1546 
1547   // Evaluate assignment starting with the value to be stored in the
1548   // accumulator.
1549   Property* property = expr->AsProperty();
1550   LhsKind assign_type = Property::GetAssignType(property);
1551   switch (assign_type) {
1552     case VARIABLE: {
1553       VariableProxy* proxy = expr->AsVariableProxy();
1554       BuildVariableAssignment(proxy->var(), Token::ASSIGN,
1555                               proxy->hole_check_mode());
1556       break;
1557     }
1558     case NAMED_PROPERTY: {
1559       RegisterAllocationScope register_scope(this);
1560       Register value = register_allocator()->NewRegister();
1561       builder()->StoreAccumulatorInRegister(value);
1562       Register object = VisitForRegisterValue(property->obj());
1563       const AstRawString* name =
1564           property->key()->AsLiteral()->AsRawPropertyName();
1565       builder()->LoadAccumulatorWithRegister(value);
1566       FeedbackSlot slot = GetCachedStoreICSlot(property->obj(), name);
1567       builder()->StoreNamedProperty(object, name, feedback_index(slot),
1568                                     language_mode());
1569       builder()->LoadAccumulatorWithRegister(value);
1570       break;
1571     }
1572     case KEYED_PROPERTY: {
1573       RegisterAllocationScope register_scope(this);
1574       Register value = register_allocator()->NewRegister();
1575       builder()->StoreAccumulatorInRegister(value);
1576       Register object = VisitForRegisterValue(property->obj());
1577       Register key = VisitForRegisterValue(property->key());
1578       builder()->LoadAccumulatorWithRegister(value);
1579       FeedbackSlot slot = feedback_spec()->AddKeyedStoreICSlot(language_mode());
1580       builder()->StoreKeyedProperty(object, key, feedback_index(slot),
1581                                     language_mode());
1582       builder()->LoadAccumulatorWithRegister(value);
1583       break;
1584     }
1585     case NAMED_SUPER_PROPERTY: {
1586       RegisterAllocationScope register_scope(this);
1587       RegisterList args = register_allocator()->NewRegisterList(4);
1588       builder()->StoreAccumulatorInRegister(args[3]);
1589       SuperPropertyReference* super_property =
1590           property->obj()->AsSuperPropertyReference();
1591       VisitForRegisterValue(super_property->this_var(), args[0]);
1592       VisitForRegisterValue(super_property->home_object(), args[1]);
1593       builder()
1594           ->LoadLiteral(property->key()->AsLiteral()->AsRawPropertyName())
1595           .StoreAccumulatorInRegister(args[2])
1596           .CallRuntime(StoreToSuperRuntimeId(), args);
1597       break;
1598     }
1599     case KEYED_SUPER_PROPERTY: {
1600       RegisterAllocationScope register_scope(this);
1601       RegisterList args = register_allocator()->NewRegisterList(4);
1602       builder()->StoreAccumulatorInRegister(args[3]);
1603       SuperPropertyReference* super_property =
1604           property->obj()->AsSuperPropertyReference();
1605       VisitForRegisterValue(super_property->this_var(), args[0]);
1606       VisitForRegisterValue(super_property->home_object(), args[1]);
1607       VisitForRegisterValue(property->key(), args[2]);
1608       builder()->CallRuntime(StoreKeyedToSuperRuntimeId(), args);
1609       break;
1610     }
1611   }
1612 }
1613 
VisitForInStatement(ForInStatement * stmt)1614 void BytecodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1615   if (stmt->subject()->IsNullLiteral() ||
1616       stmt->subject()->IsUndefinedLiteral()) {
1617     // ForIn generates lots of code, skip if it wouldn't produce any effects.
1618     return;
1619   }
1620 
1621   BytecodeLabel subject_null_label, subject_undefined_label;
1622   FeedbackSlot slot = feedback_spec()->AddForInSlot();
1623 
1624   // Prepare the state for executing ForIn.
1625   builder()->SetExpressionAsStatementPosition(stmt->subject());
1626   VisitForAccumulatorValue(stmt->subject());
1627   builder()->JumpIfUndefined(&subject_undefined_label);
1628   builder()->JumpIfNull(&subject_null_label);
1629   Register receiver = register_allocator()->NewRegister();
1630   builder()->ToObject(receiver);
1631 
1632   // Used as kRegTriple and kRegPair in ForInPrepare and ForInNext.
1633   RegisterList triple = register_allocator()->NewRegisterList(3);
1634   Register cache_length = triple[2];
1635   builder()->ForInEnumerate(receiver);
1636   builder()->ForInPrepare(triple, feedback_index(slot));
1637 
1638   // Set up loop counter
1639   Register index = register_allocator()->NewRegister();
1640   builder()->LoadLiteral(Smi::kZero);
1641   builder()->StoreAccumulatorInRegister(index);
1642 
1643   // The loop
1644   {
1645     LoopBuilder loop_builder(builder(), block_coverage_builder_, stmt);
1646     loop_builder.LoopHeader();
1647     builder()->SetExpressionAsStatementPosition(stmt->each());
1648     builder()->ForInContinue(index, cache_length);
1649     loop_builder.BreakIfFalse(ToBooleanMode::kAlreadyBoolean);
1650     builder()->ForInNext(receiver, index, triple.Truncate(2),
1651                          feedback_index(slot));
1652     loop_builder.ContinueIfUndefined();
1653     VisitForInAssignment(stmt->each());
1654     VisitIterationBody(stmt, &loop_builder);
1655     builder()->ForInStep(index);
1656     builder()->StoreAccumulatorInRegister(index);
1657     loop_builder.JumpToHeader(loop_depth_);
1658   }
1659   builder()->Bind(&subject_null_label);
1660   builder()->Bind(&subject_undefined_label);
1661 }
1662 
VisitForOfStatement(ForOfStatement * stmt)1663 void BytecodeGenerator::VisitForOfStatement(ForOfStatement* stmt) {
1664   LoopBuilder loop_builder(builder(), block_coverage_builder_, stmt);
1665 
1666   builder()->SetExpressionAsStatementPosition(stmt->assign_iterator());
1667   VisitForEffect(stmt->assign_iterator());
1668   VisitForEffect(stmt->assign_next());
1669 
1670   loop_builder.LoopHeader();
1671   builder()->SetExpressionAsStatementPosition(stmt->next_result());
1672   VisitForEffect(stmt->next_result());
1673   TypeHint type_hint = VisitForAccumulatorValue(stmt->result_done());
1674   loop_builder.BreakIfTrue(ToBooleanModeFromTypeHint(type_hint));
1675 
1676   VisitForEffect(stmt->assign_each());
1677   VisitIterationBody(stmt, &loop_builder);
1678   loop_builder.JumpToHeader(loop_depth_);
1679 }
1680 
VisitTryCatchStatement(TryCatchStatement * stmt)1681 void BytecodeGenerator::VisitTryCatchStatement(TryCatchStatement* stmt) {
1682   // Update catch prediction tracking. The updated catch_prediction value lasts
1683   // until the end of the try_block in the AST node, and does not apply to the
1684   // catch_block.
1685   HandlerTable::CatchPrediction outer_catch_prediction = catch_prediction();
1686   set_catch_prediction(stmt->GetCatchPrediction(outer_catch_prediction));
1687 
1688   TryCatchBuilder try_control_builder(builder(), block_coverage_builder_, stmt,
1689                                       catch_prediction());
1690 
1691   // Preserve the context in a dedicated register, so that it can be restored
1692   // when the handler is entered by the stack-unwinding machinery.
1693   // TODO(mstarzinger): Be smarter about register allocation.
1694   Register context = register_allocator()->NewRegister();
1695   builder()->MoveRegister(Register::current_context(), context);
1696 
1697   // Evaluate the try-block inside a control scope. This simulates a handler
1698   // that is intercepting 'throw' control commands.
1699   try_control_builder.BeginTry(context);
1700   {
1701     ControlScopeForTryCatch scope(this, &try_control_builder);
1702     Visit(stmt->try_block());
1703     set_catch_prediction(outer_catch_prediction);
1704   }
1705   try_control_builder.EndTry();
1706 
1707   if (stmt->scope()) {
1708     // Create a catch scope that binds the exception.
1709     BuildNewLocalCatchContext(stmt->scope());
1710     builder()->StoreAccumulatorInRegister(context);
1711   }
1712 
1713   // If requested, clear message object as we enter the catch block.
1714   if (stmt->ShouldClearPendingException(outer_catch_prediction)) {
1715     builder()->LoadTheHole().SetPendingMessage();
1716   }
1717 
1718   // Load the catch context into the accumulator.
1719   builder()->LoadAccumulatorWithRegister(context);
1720 
1721   // Evaluate the catch-block.
1722   if (stmt->scope()) {
1723     VisitInScope(stmt->catch_block(), stmt->scope());
1724   } else {
1725     VisitBlock(stmt->catch_block());
1726   }
1727   try_control_builder.EndCatch();
1728 }
1729 
VisitTryFinallyStatement(TryFinallyStatement * stmt)1730 void BytecodeGenerator::VisitTryFinallyStatement(TryFinallyStatement* stmt) {
1731   // We can't know whether the finally block will override ("catch") an
1732   // exception thrown in the try block, so we just adopt the outer prediction.
1733   TryFinallyBuilder try_control_builder(builder(), block_coverage_builder_,
1734                                         stmt, catch_prediction());
1735 
1736   // We keep a record of all paths that enter the finally-block to be able to
1737   // dispatch to the correct continuation point after the statements in the
1738   // finally-block have been evaluated.
1739   //
1740   // The try-finally construct can enter the finally-block in three ways:
1741   // 1. By exiting the try-block normally, falling through at the end.
1742   // 2. By exiting the try-block with a function-local control flow transfer
1743   //    (i.e. through break/continue/return statements).
1744   // 3. By exiting the try-block with a thrown exception.
1745   //
1746   // The result register semantics depend on how the block was entered:
1747   //  - ReturnStatement: It represents the return value being returned.
1748   //  - ThrowStatement: It represents the exception being thrown.
1749   //  - BreakStatement/ContinueStatement: Undefined and not used.
1750   //  - Falling through into finally-block: Undefined and not used.
1751   Register token = register_allocator()->NewRegister();
1752   Register result = register_allocator()->NewRegister();
1753   ControlScope::DeferredCommands commands(this, token, result);
1754 
1755   // Preserve the context in a dedicated register, so that it can be restored
1756   // when the handler is entered by the stack-unwinding machinery.
1757   // TODO(mstarzinger): Be smarter about register allocation.
1758   Register context = register_allocator()->NewRegister();
1759   builder()->MoveRegister(Register::current_context(), context);
1760 
1761   // Evaluate the try-block inside a control scope. This simulates a handler
1762   // that is intercepting all control commands.
1763   try_control_builder.BeginTry(context);
1764   {
1765     ControlScopeForTryFinally scope(this, &try_control_builder, &commands);
1766     Visit(stmt->try_block());
1767   }
1768   try_control_builder.EndTry();
1769 
1770   // Record fall-through and exception cases.
1771   commands.RecordFallThroughPath();
1772   try_control_builder.LeaveTry();
1773   try_control_builder.BeginHandler();
1774   commands.RecordHandlerReThrowPath();
1775 
1776   // Pending message object is saved on entry.
1777   try_control_builder.BeginFinally();
1778   Register message = context;  // Reuse register.
1779 
1780   // Clear message object as we enter the finally block.
1781   builder()->LoadTheHole().SetPendingMessage().StoreAccumulatorInRegister(
1782       message);
1783 
1784   // Evaluate the finally-block.
1785   Visit(stmt->finally_block());
1786   try_control_builder.EndFinally();
1787 
1788   // Pending message object is restored on exit.
1789   builder()->LoadAccumulatorWithRegister(message).SetPendingMessage();
1790 
1791   // Dynamic dispatch after the finally-block.
1792   commands.ApplyDeferredCommands();
1793 }
1794 
VisitDebuggerStatement(DebuggerStatement * stmt)1795 void BytecodeGenerator::VisitDebuggerStatement(DebuggerStatement* stmt) {
1796   builder()->SetStatementPosition(stmt);
1797   builder()->Debugger();
1798 }
1799 
VisitFunctionLiteral(FunctionLiteral * expr)1800 void BytecodeGenerator::VisitFunctionLiteral(FunctionLiteral* expr) {
1801   DCHECK(expr->scope()->outer_scope() == current_scope());
1802   uint8_t flags = CreateClosureFlags::Encode(
1803       expr->pretenure(), closure_scope()->is_function_scope());
1804   size_t entry = builder()->AllocateDeferredConstantPoolEntry();
1805   FeedbackSlot slot = GetCachedCreateClosureSlot(expr);
1806   builder()->CreateClosure(entry, feedback_index(slot), flags);
1807   function_literals_.push_back(std::make_pair(expr, entry));
1808   AddToEagerLiteralsIfEager(expr);
1809 }
1810 
AddToEagerLiteralsIfEager(FunctionLiteral * literal)1811 void BytecodeGenerator::AddToEagerLiteralsIfEager(FunctionLiteral* literal) {
1812   if (eager_inner_literals_ && literal->ShouldEagerCompile()) {
1813     DCHECK(!IsInEagerLiterals(literal, *eager_inner_literals_));
1814     eager_inner_literals_->push_back(literal);
1815   }
1816 }
1817 
ShouldOptimizeAsOneShot() const1818 bool BytecodeGenerator::ShouldOptimizeAsOneShot() const {
1819   if (!FLAG_enable_one_shot_optimization) return false;
1820 
1821   if (loop_depth_ > 0) return false;
1822 
1823   return info()->literal()->is_top_level() || info()->literal()->is_iife();
1824 }
1825 
BuildClassLiteral(ClassLiteral * expr)1826 void BytecodeGenerator::BuildClassLiteral(ClassLiteral* expr) {
1827   size_t class_boilerplate_entry =
1828       builder()->AllocateDeferredConstantPoolEntry();
1829   class_literals_.push_back(std::make_pair(expr, class_boilerplate_entry));
1830 
1831   VisitDeclarations(expr->scope()->declarations());
1832   Register class_constructor = register_allocator()->NewRegister();
1833 
1834   {
1835     RegisterAllocationScope register_scope(this);
1836     RegisterList args = register_allocator()->NewGrowableRegisterList();
1837 
1838     Register class_boilerplate = register_allocator()->GrowRegisterList(&args);
1839     Register class_constructor_in_args =
1840         register_allocator()->GrowRegisterList(&args);
1841     Register super_class = register_allocator()->GrowRegisterList(&args);
1842     DCHECK_EQ(ClassBoilerplate::kFirstDynamicArgumentIndex,
1843               args.register_count());
1844 
1845     VisitForAccumulatorValueOrTheHole(expr->extends());
1846     builder()->StoreAccumulatorInRegister(super_class);
1847 
1848     VisitFunctionLiteral(expr->constructor());
1849     builder()
1850         ->StoreAccumulatorInRegister(class_constructor)
1851         .MoveRegister(class_constructor, class_constructor_in_args)
1852         .LoadConstantPoolEntry(class_boilerplate_entry)
1853         .StoreAccumulatorInRegister(class_boilerplate);
1854 
1855     // Create computed names and method values nodes to store into the literal.
1856     for (int i = 0; i < expr->properties()->length(); i++) {
1857       ClassLiteral::Property* property = expr->properties()->at(i);
1858       if (property->is_computed_name()) {
1859         DCHECK_NE(property->kind(), ClassLiteral::Property::PRIVATE_FIELD);
1860         Register key = register_allocator()->GrowRegisterList(&args);
1861 
1862         BuildLoadPropertyKey(property, key);
1863         if (property->is_static()) {
1864           // The static prototype property is read only. We handle the non
1865           // computed property name case in the parser. Since this is the only
1866           // case where we need to check for an own read only property we
1867           // special case this so we do not need to do this for every property.
1868 
1869           FeedbackSlot slot = GetDummyCompareICSlot();
1870           BytecodeLabel done;
1871           builder()
1872               ->LoadLiteral(ast_string_constants()->prototype_string())
1873               .CompareOperation(Token::Value::EQ_STRICT, key,
1874                                 feedback_index(slot))
1875               .JumpIfFalse(ToBooleanMode::kAlreadyBoolean, &done)
1876               .CallRuntime(Runtime::kThrowStaticPrototypeError)
1877               .Bind(&done);
1878         }
1879 
1880         if (property->kind() == ClassLiteral::Property::PUBLIC_FIELD) {
1881           // Initialize field's name variable with the computed name.
1882           DCHECK_NOT_NULL(property->computed_name_var());
1883           builder()->LoadAccumulatorWithRegister(key);
1884           BuildVariableAssignment(property->computed_name_var(), Token::INIT,
1885                                   HoleCheckMode::kElided);
1886         }
1887       }
1888 
1889       if (property->kind() == ClassLiteral::Property::PUBLIC_FIELD) {
1890         // We don't compute field's value here, but instead do it in the
1891         // initializer function.
1892         continue;
1893       } else if (property->kind() == ClassLiteral::Property::PRIVATE_FIELD) {
1894         builder()->CallRuntime(Runtime::kCreatePrivateFieldSymbol);
1895         DCHECK_NOT_NULL(property->private_field_name_var());
1896         BuildVariableAssignment(property->private_field_name_var(), Token::INIT,
1897                                 HoleCheckMode::kElided);
1898         continue;
1899       }
1900 
1901       Register value = register_allocator()->GrowRegisterList(&args);
1902       VisitForRegisterValue(property->value(), value);
1903     }
1904 
1905     builder()->CallRuntime(Runtime::kDefineClass, args);
1906   }
1907   Register prototype = register_allocator()->NewRegister();
1908   builder()->StoreAccumulatorInRegister(prototype);
1909 
1910   // Assign to class variable.
1911   if (expr->class_variable() != nullptr) {
1912     DCHECK(expr->class_variable()->IsStackLocal() ||
1913            expr->class_variable()->IsContextSlot());
1914     builder()->LoadAccumulatorWithRegister(class_constructor);
1915     BuildVariableAssignment(expr->class_variable(), Token::INIT,
1916                             HoleCheckMode::kElided);
1917   }
1918 
1919   if (expr->instance_fields_initializer_function() != nullptr) {
1920     Register initializer =
1921         VisitForRegisterValue(expr->instance_fields_initializer_function());
1922 
1923     if (FunctionLiteral::NeedsHomeObject(
1924             expr->instance_fields_initializer_function())) {
1925       FeedbackSlot slot = feedback_spec()->AddStoreICSlot(language_mode());
1926       builder()->LoadAccumulatorWithRegister(prototype).StoreHomeObjectProperty(
1927           initializer, feedback_index(slot), language_mode());
1928     }
1929 
1930     FeedbackSlot slot = feedback_spec()->AddStoreICSlot(language_mode());
1931     builder()
1932         ->LoadAccumulatorWithRegister(initializer)
1933         .StoreClassFieldsInitializer(class_constructor, feedback_index(slot))
1934         .LoadAccumulatorWithRegister(class_constructor);
1935   }
1936 
1937   if (expr->static_fields_initializer() != nullptr) {
1938     RegisterList args = register_allocator()->NewRegisterList(1);
1939     Register initializer =
1940         VisitForRegisterValue(expr->static_fields_initializer());
1941 
1942     if (FunctionLiteral::NeedsHomeObject(expr->static_fields_initializer())) {
1943       FeedbackSlot slot = feedback_spec()->AddStoreICSlot(language_mode());
1944       builder()
1945           ->LoadAccumulatorWithRegister(class_constructor)
1946           .StoreHomeObjectProperty(initializer, feedback_index(slot),
1947                                    language_mode());
1948     }
1949 
1950     builder()
1951         ->MoveRegister(class_constructor, args[0])
1952         .CallProperty(initializer, args,
1953                       feedback_index(feedback_spec()->AddCallICSlot()));
1954   }
1955   builder()->LoadAccumulatorWithRegister(class_constructor);
1956 }
1957 
VisitClassLiteral(ClassLiteral * expr)1958 void BytecodeGenerator::VisitClassLiteral(ClassLiteral* expr) {
1959   CurrentScope current_scope(this, expr->scope());
1960   DCHECK_NOT_NULL(expr->scope());
1961   if (expr->scope()->NeedsContext()) {
1962     BuildNewLocalBlockContext(expr->scope());
1963     ContextScope scope(this, expr->scope());
1964     BuildClassLiteral(expr);
1965   } else {
1966     BuildClassLiteral(expr);
1967   }
1968 }
1969 
VisitInitializeClassFieldsStatement(InitializeClassFieldsStatement * expr)1970 void BytecodeGenerator::VisitInitializeClassFieldsStatement(
1971     InitializeClassFieldsStatement* expr) {
1972   RegisterList args = register_allocator()->NewRegisterList(3);
1973   Register constructor = args[0], key = args[1], value = args[2];
1974   builder()->MoveRegister(builder()->Receiver(), constructor);
1975 
1976   for (int i = 0; i < expr->fields()->length(); i++) {
1977     ClassLiteral::Property* property = expr->fields()->at(i);
1978 
1979     if (property->is_computed_name()) {
1980       DCHECK_EQ(property->kind(), ClassLiteral::Property::PUBLIC_FIELD);
1981       Variable* var = property->computed_name_var();
1982       DCHECK_NOT_NULL(var);
1983       // The computed name is already evaluated and stored in a
1984       // variable at class definition time.
1985       BuildVariableLoad(var, HoleCheckMode::kElided);
1986       builder()->StoreAccumulatorInRegister(key);
1987     } else if (property->kind() == ClassLiteral::Property::PRIVATE_FIELD) {
1988       Variable* private_field_name_var = property->private_field_name_var();
1989       DCHECK_NOT_NULL(private_field_name_var);
1990       BuildVariableLoad(private_field_name_var, HoleCheckMode::kElided);
1991       builder()->StoreAccumulatorInRegister(key);
1992     } else {
1993       BuildLoadPropertyKey(property, key);
1994     }
1995 
1996     VisitForRegisterValue(property->value(), value);
1997     VisitSetHomeObject(value, constructor, property);
1998 
1999     Runtime::FunctionId function_id =
2000         property->kind() == ClassLiteral::Property::PUBLIC_FIELD
2001             ? Runtime::kCreateDataProperty
2002             : Runtime::kAddPrivateField;
2003     builder()->CallRuntime(function_id, args);
2004   }
2005 }
2006 
BuildInstanceFieldInitialization(Register constructor,Register instance)2007 void BytecodeGenerator::BuildInstanceFieldInitialization(Register constructor,
2008                                                          Register instance) {
2009   RegisterList args = register_allocator()->NewRegisterList(1);
2010   Register initializer = register_allocator()->NewRegister();
2011 
2012   FeedbackSlot slot = feedback_spec()->AddLoadICSlot();
2013   BytecodeLabel done;
2014 
2015   builder()
2016       ->LoadClassFieldsInitializer(constructor, feedback_index(slot))
2017       // TODO(gsathya): This jump can be elided for the base
2018       // constructor and derived constructor. This is only required
2019       // when called from an arrow function.
2020       .JumpIfUndefined(&done)
2021       .StoreAccumulatorInRegister(initializer)
2022       .MoveRegister(instance, args[0])
2023       .CallProperty(initializer, args,
2024                     feedback_index(feedback_spec()->AddCallICSlot()))
2025       .Bind(&done);
2026 }
2027 
VisitNativeFunctionLiteral(NativeFunctionLiteral * expr)2028 void BytecodeGenerator::VisitNativeFunctionLiteral(
2029     NativeFunctionLiteral* expr) {
2030   size_t entry = builder()->AllocateDeferredConstantPoolEntry();
2031   FeedbackSlot slot = feedback_spec()->AddCreateClosureSlot();
2032   builder()->CreateClosure(entry, feedback_index(slot), NOT_TENURED);
2033   native_function_literals_.push_back(std::make_pair(expr, entry));
2034 }
2035 
VisitDoExpression(DoExpression * expr)2036 void BytecodeGenerator::VisitDoExpression(DoExpression* expr) {
2037   VisitBlock(expr->block());
2038   VisitVariableProxy(expr->result());
2039 }
2040 
VisitConditional(Conditional * expr)2041 void BytecodeGenerator::VisitConditional(Conditional* expr) {
2042   ConditionalControlFlowBuilder conditional_builder(
2043       builder(), block_coverage_builder_, expr);
2044 
2045   if (expr->condition()->ToBooleanIsTrue()) {
2046     // Generate then block unconditionally as always true.
2047     conditional_builder.Then();
2048     VisitForAccumulatorValue(expr->then_expression());
2049   } else if (expr->condition()->ToBooleanIsFalse()) {
2050     // Generate else block unconditionally if it exists.
2051     conditional_builder.Else();
2052     VisitForAccumulatorValue(expr->else_expression());
2053   } else {
2054     VisitForTest(expr->condition(), conditional_builder.then_labels(),
2055                  conditional_builder.else_labels(), TestFallthrough::kThen);
2056 
2057     conditional_builder.Then();
2058     VisitForAccumulatorValue(expr->then_expression());
2059     conditional_builder.JumpToEnd();
2060 
2061     conditional_builder.Else();
2062     VisitForAccumulatorValue(expr->else_expression());
2063   }
2064 }
2065 
VisitLiteral(Literal * expr)2066 void BytecodeGenerator::VisitLiteral(Literal* expr) {
2067   if (execution_result()->IsEffect()) return;
2068   switch (expr->type()) {
2069     case Literal::kSmi:
2070       builder()->LoadLiteral(expr->AsSmiLiteral());
2071       break;
2072     case Literal::kHeapNumber:
2073       builder()->LoadLiteral(expr->AsNumber());
2074       break;
2075     case Literal::kUndefined:
2076       builder()->LoadUndefined();
2077       break;
2078     case Literal::kBoolean:
2079       builder()->LoadBoolean(expr->ToBooleanIsTrue());
2080       execution_result()->SetResultIsBoolean();
2081       break;
2082     case Literal::kNull:
2083       builder()->LoadNull();
2084       break;
2085     case Literal::kTheHole:
2086       builder()->LoadTheHole();
2087       break;
2088     case Literal::kString:
2089       builder()->LoadLiteral(expr->AsRawString());
2090       execution_result()->SetResultIsString();
2091       break;
2092     case Literal::kSymbol:
2093       builder()->LoadLiteral(expr->AsSymbol());
2094       break;
2095     case Literal::kBigInt:
2096       builder()->LoadLiteral(expr->AsBigInt());
2097       break;
2098   }
2099 }
2100 
VisitRegExpLiteral(RegExpLiteral * expr)2101 void BytecodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
2102   // Materialize a regular expression literal.
2103   builder()->CreateRegExpLiteral(
2104       expr->raw_pattern(), feedback_index(feedback_spec()->AddLiteralSlot()),
2105       expr->flags());
2106 }
2107 
BuildCreateObjectLiteral(Register literal,uint8_t flags,size_t entry)2108 void BytecodeGenerator::BuildCreateObjectLiteral(Register literal,
2109                                                  uint8_t flags, size_t entry) {
2110   if (ShouldOptimizeAsOneShot()) {
2111     RegisterList args = register_allocator()->NewRegisterList(2);
2112     builder()
2113         ->LoadConstantPoolEntry(entry)
2114         .StoreAccumulatorInRegister(args[0])
2115         .LoadLiteral(Smi::FromInt(flags))
2116         .StoreAccumulatorInRegister(args[1])
2117         .CallRuntime(Runtime::kCreateObjectLiteralWithoutAllocationSite, args)
2118         .StoreAccumulatorInRegister(literal);
2119 
2120   } else {
2121     // TODO(cbruni): Directly generate runtime call for literals we cannot
2122     // optimize once the CreateShallowObjectLiteral stub is in sync with the TF
2123     // optimizations.
2124     int literal_index = feedback_index(feedback_spec()->AddLiteralSlot());
2125     builder()->CreateObjectLiteral(entry, literal_index, flags, literal);
2126   }
2127 }
2128 
VisitObjectLiteral(ObjectLiteral * expr)2129 void BytecodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
2130   expr->InitDepthAndFlags();
2131 
2132   // Fast path for the empty object literal which doesn't need an
2133   // AllocationSite.
2134   if (expr->IsEmptyObjectLiteral()) {
2135     DCHECK(expr->IsFastCloningSupported());
2136     builder()->CreateEmptyObjectLiteral();
2137     return;
2138   }
2139 
2140   // Deep-copy the literal boilerplate.
2141   uint8_t flags = CreateObjectLiteralFlags::Encode(
2142       expr->ComputeFlags(), expr->IsFastCloningSupported());
2143 
2144   Register literal = register_allocator()->NewRegister();
2145 
2146   // Create literal object.
2147   int property_index = 0;
2148   bool clone_object_spread =
2149       expr->properties()->first()->kind() == ObjectLiteral::Property::SPREAD;
2150   if (clone_object_spread) {
2151     // Avoid the slow path for spreads in the following common cases:
2152     //   1) `let obj = { ...source }`
2153     //   2) `let obj = { ...source, override: 1 }`
2154     //   3) `let obj = { ...source, ...overrides }`
2155     RegisterAllocationScope register_scope(this);
2156     Expression* property = expr->properties()->first()->value();
2157     Register from_value = VisitForRegisterValue(property);
2158 
2159     BytecodeLabels clone_object(zone());
2160     builder()->JumpIfUndefined(clone_object.New());
2161     builder()->JumpIfNull(clone_object.New());
2162     builder()->ToObject(from_value);
2163 
2164     clone_object.Bind(builder());
2165     int clone_index = feedback_index(feedback_spec()->AddCloneObjectSlot());
2166     builder()->CloneObject(from_value, flags, clone_index);
2167     builder()->StoreAccumulatorInRegister(literal);
2168     property_index++;
2169   } else {
2170     size_t entry;
2171     // If constant properties is an empty fixed array, use a cached empty fixed
2172     // array to ensure it's only added to the constant pool once.
2173     if (expr->properties_count() == 0) {
2174       entry = builder()->EmptyObjectBoilerplateDescriptionConstantPoolEntry();
2175     } else {
2176       entry = builder()->AllocateDeferredConstantPoolEntry();
2177       object_literals_.push_back(std::make_pair(expr, entry));
2178     }
2179     BuildCreateObjectLiteral(literal, flags, entry);
2180   }
2181 
2182   // Store computed values into the literal.
2183   AccessorTable accessor_table(zone());
2184   for (; property_index < expr->properties()->length(); property_index++) {
2185     ObjectLiteral::Property* property = expr->properties()->at(property_index);
2186     if (property->is_computed_name()) break;
2187     if (!clone_object_spread && property->IsCompileTimeValue()) continue;
2188 
2189     RegisterAllocationScope inner_register_scope(this);
2190     Literal* key = property->key()->AsLiteral();
2191     switch (property->kind()) {
2192       case ObjectLiteral::Property::SPREAD:
2193         UNREACHABLE();
2194       case ObjectLiteral::Property::CONSTANT:
2195       case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2196         DCHECK(clone_object_spread || !property->value()->IsCompileTimeValue());
2197         V8_FALLTHROUGH;
2198       case ObjectLiteral::Property::COMPUTED: {
2199         // It is safe to use [[Put]] here because the boilerplate already
2200         // contains computed properties with an uninitialized value.
2201         if (key->IsStringLiteral()) {
2202           DCHECK(key->IsPropertyName());
2203           if (property->emit_store()) {
2204             builder()->SetExpressionPosition(property->value());
2205             VisitForAccumulatorValue(property->value());
2206             FeedbackSlot slot = feedback_spec()->AddStoreOwnICSlot();
2207             if (FunctionLiteral::NeedsHomeObject(property->value())) {
2208               RegisterAllocationScope register_scope(this);
2209               Register value = register_allocator()->NewRegister();
2210               builder()->StoreAccumulatorInRegister(value);
2211               builder()->StoreNamedOwnProperty(
2212                   literal, key->AsRawPropertyName(), feedback_index(slot));
2213               VisitSetHomeObject(value, literal, property);
2214             } else {
2215               builder()->StoreNamedOwnProperty(
2216                   literal, key->AsRawPropertyName(), feedback_index(slot));
2217             }
2218           } else {
2219             builder()->SetExpressionPosition(property->value());
2220             VisitForEffect(property->value());
2221           }
2222         } else {
2223           RegisterList args = register_allocator()->NewRegisterList(4);
2224 
2225           builder()->MoveRegister(literal, args[0]);
2226           builder()->SetExpressionPosition(property->key());
2227           VisitForRegisterValue(property->key(), args[1]);
2228           builder()->SetExpressionPosition(property->value());
2229           VisitForRegisterValue(property->value(), args[2]);
2230           if (property->emit_store()) {
2231             builder()
2232                 ->LoadLiteral(Smi::FromEnum(LanguageMode::kSloppy))
2233                 .StoreAccumulatorInRegister(args[3])
2234                 .CallRuntime(Runtime::kSetProperty, args);
2235             Register value = args[2];
2236             VisitSetHomeObject(value, literal, property);
2237           }
2238         }
2239         break;
2240       }
2241       case ObjectLiteral::Property::PROTOTYPE: {
2242         // __proto__:null is handled by CreateObjectLiteral.
2243         if (property->IsNullPrototype()) break;
2244         DCHECK(property->emit_store());
2245         DCHECK(!property->NeedsSetFunctionName());
2246         RegisterList args = register_allocator()->NewRegisterList(2);
2247         builder()->MoveRegister(literal, args[0]);
2248         builder()->SetExpressionPosition(property->value());
2249         VisitForRegisterValue(property->value(), args[1]);
2250         builder()->CallRuntime(Runtime::kInternalSetPrototype, args);
2251         break;
2252       }
2253       case ObjectLiteral::Property::GETTER:
2254         if (property->emit_store()) {
2255           accessor_table.lookup(key)->second->getter = property;
2256         }
2257         break;
2258       case ObjectLiteral::Property::SETTER:
2259         if (property->emit_store()) {
2260           accessor_table.lookup(key)->second->setter = property;
2261         }
2262         break;
2263     }
2264   }
2265 
2266   // Define accessors, using only a single call to the runtime for each pair of
2267   // corresponding getters and setters.
2268   for (AccessorTable::Iterator it = accessor_table.begin();
2269        it != accessor_table.end(); ++it) {
2270     RegisterAllocationScope inner_register_scope(this);
2271     RegisterList args = register_allocator()->NewRegisterList(5);
2272     builder()->MoveRegister(literal, args[0]);
2273     VisitForRegisterValue(it->first, args[1]);
2274     VisitObjectLiteralAccessor(literal, it->second->getter, args[2]);
2275     VisitObjectLiteralAccessor(literal, it->second->setter, args[3]);
2276     builder()
2277         ->LoadLiteral(Smi::FromInt(NONE))
2278         .StoreAccumulatorInRegister(args[4])
2279         .CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, args);
2280   }
2281 
2282   // Object literals have two parts. The "static" part on the left contains no
2283   // computed property names, and so we can compute its map ahead of time; see
2284   // Runtime_CreateObjectLiteralBoilerplate. The second "dynamic" part starts
2285   // with the first computed property name and continues with all properties to
2286   // its right. All the code from above initializes the static component of the
2287   // object literal, and arranges for the map of the result to reflect the
2288   // static order in which the keys appear. For the dynamic properties, we
2289   // compile them into a series of "SetOwnProperty" runtime calls. This will
2290   // preserve insertion order.
2291   for (; property_index < expr->properties()->length(); property_index++) {
2292     ObjectLiteral::Property* property = expr->properties()->at(property_index);
2293     RegisterAllocationScope inner_register_scope(this);
2294 
2295     if (property->IsPrototype()) {
2296       // __proto__:null is handled by CreateObjectLiteral.
2297       if (property->IsNullPrototype()) continue;
2298       DCHECK(property->emit_store());
2299       DCHECK(!property->NeedsSetFunctionName());
2300       RegisterList args = register_allocator()->NewRegisterList(2);
2301       builder()->MoveRegister(literal, args[0]);
2302       builder()->SetExpressionPosition(property->value());
2303       VisitForRegisterValue(property->value(), args[1]);
2304       builder()->CallRuntime(Runtime::kInternalSetPrototype, args);
2305       continue;
2306     }
2307 
2308     switch (property->kind()) {
2309       case ObjectLiteral::Property::CONSTANT:
2310       case ObjectLiteral::Property::COMPUTED:
2311       case ObjectLiteral::Property::MATERIALIZED_LITERAL: {
2312         Register key = register_allocator()->NewRegister();
2313         BuildLoadPropertyKey(property, key);
2314         builder()->SetExpressionPosition(property->value());
2315         Register value = VisitForRegisterValue(property->value());
2316         VisitSetHomeObject(value, literal, property);
2317 
2318         DataPropertyInLiteralFlags data_property_flags =
2319             DataPropertyInLiteralFlag::kNoFlags;
2320         if (property->NeedsSetFunctionName()) {
2321           data_property_flags |= DataPropertyInLiteralFlag::kSetFunctionName;
2322         }
2323 
2324         FeedbackSlot slot =
2325             feedback_spec()->AddStoreDataPropertyInLiteralICSlot();
2326         builder()
2327             ->LoadAccumulatorWithRegister(value)
2328             .StoreDataPropertyInLiteral(literal, key, data_property_flags,
2329                                         feedback_index(slot));
2330         break;
2331       }
2332       case ObjectLiteral::Property::GETTER:
2333       case ObjectLiteral::Property::SETTER: {
2334         RegisterList args = register_allocator()->NewRegisterList(4);
2335         builder()->MoveRegister(literal, args[0]);
2336         BuildLoadPropertyKey(property, args[1]);
2337         builder()->SetExpressionPosition(property->value());
2338         VisitForRegisterValue(property->value(), args[2]);
2339         VisitSetHomeObject(args[2], literal, property);
2340         builder()
2341             ->LoadLiteral(Smi::FromInt(NONE))
2342             .StoreAccumulatorInRegister(args[3]);
2343         Runtime::FunctionId function_id =
2344             property->kind() == ObjectLiteral::Property::GETTER
2345                 ? Runtime::kDefineGetterPropertyUnchecked
2346                 : Runtime::kDefineSetterPropertyUnchecked;
2347         builder()->CallRuntime(function_id, args);
2348         break;
2349       }
2350       case ObjectLiteral::Property::SPREAD: {
2351         RegisterList args = register_allocator()->NewRegisterList(2);
2352         builder()->MoveRegister(literal, args[0]);
2353         builder()->SetExpressionPosition(property->value());
2354         VisitForRegisterValue(property->value(), args[1]);
2355         builder()->CallRuntime(Runtime::kCopyDataProperties, args);
2356         break;
2357       }
2358       case ObjectLiteral::Property::PROTOTYPE:
2359         UNREACHABLE();  // Handled specially above.
2360         break;
2361     }
2362   }
2363 
2364   builder()->LoadAccumulatorWithRegister(literal);
2365 }
2366 
BuildArrayLiteralElementsInsertion(Register array,int first_spread_index,ZonePtrList<Expression> * elements,bool skip_constants)2367 void BytecodeGenerator::BuildArrayLiteralElementsInsertion(
2368     Register array, int first_spread_index, ZonePtrList<Expression>* elements,
2369     bool skip_constants) {
2370   DCHECK_LT(first_spread_index, elements->length());
2371 
2372   Register index = register_allocator()->NewRegister();
2373   int array_index = 0;
2374 
2375   ZonePtrList<Expression>::iterator iter = elements->begin();
2376   ZonePtrList<Expression>::iterator first_spread_or_end =
2377       first_spread_index >= 0 ? elements->begin() + first_spread_index
2378                               : elements->end();
2379 
2380   // Evaluate subexpressions and store them into the array.
2381   FeedbackSlot keyed_store_slot;
2382   for (; iter != first_spread_or_end; ++iter, array_index++) {
2383     Expression* subexpr = *iter;
2384     DCHECK(!subexpr->IsSpread());
2385     if (skip_constants && subexpr->IsCompileTimeValue()) continue;
2386     if (keyed_store_slot.IsInvalid()) {
2387       keyed_store_slot = feedback_spec()->AddKeyedStoreICSlot(language_mode());
2388     }
2389     builder()
2390         ->LoadLiteral(Smi::FromInt(array_index))
2391         .StoreAccumulatorInRegister(index);
2392     VisitForAccumulatorValue(subexpr);
2393     builder()->StoreKeyedProperty(
2394         array, index, feedback_index(keyed_store_slot), language_mode());
2395   }
2396   if (iter != elements->end()) {
2397     builder()->LoadLiteral(array_index).StoreAccumulatorInRegister(index);
2398 
2399     // Handle the first spread element and everything that follows.
2400     FeedbackSlot element_slot = feedback_spec()->AddStoreInArrayLiteralICSlot();
2401     FeedbackSlot index_slot = feedback_spec()->AddBinaryOpICSlot();
2402     // TODO(neis): Only create length_slot when there are holes.
2403     FeedbackSlot length_slot =
2404         feedback_spec()->AddStoreICSlot(LanguageMode::kStrict);
2405     for (; iter != elements->end(); ++iter) {
2406       Expression* subexpr = *iter;
2407       if (subexpr->IsSpread()) {
2408         BuildArrayLiteralSpread(subexpr->AsSpread(), array, index, index_slot,
2409                                 element_slot);
2410       } else if (!subexpr->IsTheHoleLiteral()) {
2411         // literal[index++] = subexpr
2412         VisitForAccumulatorValue(subexpr);
2413         builder()
2414             ->StoreInArrayLiteral(array, index, feedback_index(element_slot))
2415             .LoadAccumulatorWithRegister(index)
2416             .UnaryOperation(Token::INC, feedback_index(index_slot))
2417             .StoreAccumulatorInRegister(index);
2418       } else {
2419         // literal.length = ++index
2420         auto length = ast_string_constants()->length_string();
2421         builder()
2422             ->LoadAccumulatorWithRegister(index)
2423             .UnaryOperation(Token::INC, feedback_index(index_slot))
2424             .StoreAccumulatorInRegister(index)
2425             .StoreNamedProperty(array, length, feedback_index(length_slot),
2426                                 LanguageMode::kStrict);
2427       }
2428     }
2429   }
2430   builder()->LoadAccumulatorWithRegister(array);
2431 }
2432 
VisitArrayLiteral(ArrayLiteral * expr)2433 void BytecodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
2434   expr->InitDepthAndFlags();
2435   uint8_t flags = CreateArrayLiteralFlags::Encode(
2436       expr->IsFastCloningSupported(), expr->ComputeFlags());
2437 
2438   bool is_empty = expr->is_empty();
2439   bool optimize_as_one_shot = ShouldOptimizeAsOneShot();
2440   size_t entry;
2441   if (is_empty && optimize_as_one_shot) {
2442     entry = builder()->EmptyArrayBoilerplateDescriptionConstantPoolEntry();
2443   } else if (!is_empty) {
2444     entry = builder()->AllocateDeferredConstantPoolEntry();
2445     array_literals_.push_back(std::make_pair(expr, entry));
2446   }
2447 
2448   if (optimize_as_one_shot) {
2449     // Create array literal without any allocation sites
2450     RegisterAllocationScope register_scope(this);
2451     RegisterList args = register_allocator()->NewRegisterList(2);
2452     builder()
2453         ->LoadConstantPoolEntry(entry)
2454         .StoreAccumulatorInRegister(args[0])
2455         .LoadLiteral(Smi::FromInt(flags))
2456         .StoreAccumulatorInRegister(args[1])
2457         .CallRuntime(Runtime::kCreateArrayLiteralWithoutAllocationSite, args);
2458   } else if (is_empty) {
2459     // Empty array literal fast-path.
2460     int literal_index = feedback_index(feedback_spec()->AddLiteralSlot());
2461     DCHECK(expr->IsFastCloningSupported());
2462     builder()->CreateEmptyArrayLiteral(literal_index);
2463     return;
2464   } else {
2465     // Deep-copy the literal boilerplate
2466     int literal_index = feedback_index(feedback_spec()->AddLiteralSlot());
2467     builder()->CreateArrayLiteral(entry, literal_index, flags);
2468   }
2469 
2470   Register literal = register_allocator()->NewRegister();
2471   builder()->StoreAccumulatorInRegister(literal);
2472   // Insert all elements except the constant ones, since they are already there.
2473   BuildArrayLiteralElementsInsertion(literal, expr->first_spread_index(),
2474                                      expr->values(), true);
2475 }
2476 
BuildArrayLiteralSpread(Spread * spread,Register array,Register index,FeedbackSlot index_slot,FeedbackSlot element_slot)2477 void BytecodeGenerator::BuildArrayLiteralSpread(Spread* spread, Register array,
2478                                                 Register index,
2479                                                 FeedbackSlot index_slot,
2480                                                 FeedbackSlot element_slot) {
2481   RegisterAllocationScope register_scope(this);
2482   Register value = register_allocator()->NewRegister();
2483 
2484   builder()->SetExpressionAsStatementPosition(spread->expression());
2485   IteratorRecord iterator =
2486       BuildGetIteratorRecord(spread->expression(), IteratorType::kNormal);
2487 
2488   LoopBuilder loop_builder(builder(), nullptr, nullptr);
2489   loop_builder.LoopHeader();
2490 
2491   // Call the iterator's .next() method. Break from the loop if the `done`
2492   // property is truthy, otherwise load the value from the iterator result and
2493   // append the argument.
2494   BuildIteratorNext(iterator, value);
2495   builder()->LoadNamedProperty(
2496       value, ast_string_constants()->done_string(),
2497       feedback_index(feedback_spec()->AddLoadICSlot()));
2498   loop_builder.BreakIfTrue(ToBooleanMode::kConvertToBoolean);
2499 
2500   loop_builder.LoopBody();
2501   builder()
2502       // value = value.value
2503       ->LoadNamedProperty(value, ast_string_constants()->value_string(),
2504                           feedback_index(feedback_spec()->AddLoadICSlot()))
2505       .StoreAccumulatorInRegister(value)
2506       // array[index] = value
2507       .StoreInArrayLiteral(array, index, feedback_index(element_slot))
2508       // index++
2509       .LoadAccumulatorWithRegister(index)
2510       .UnaryOperation(Token::INC, feedback_index(index_slot))
2511       .StoreAccumulatorInRegister(index);
2512   loop_builder.BindContinueTarget();
2513   loop_builder.JumpToHeader(loop_depth_);
2514 }
2515 
VisitStoreInArrayLiteral(StoreInArrayLiteral * expr)2516 void BytecodeGenerator::VisitStoreInArrayLiteral(StoreInArrayLiteral* expr) {
2517   builder()->SetExpressionAsStatementPosition(expr);
2518   RegisterAllocationScope register_scope(this);
2519   Register array = register_allocator()->NewRegister();
2520   Register index = register_allocator()->NewRegister();
2521   VisitForRegisterValue(expr->array(), array);
2522   VisitForRegisterValue(expr->index(), index);
2523   VisitForAccumulatorValue(expr->value());
2524   builder()->StoreInArrayLiteral(
2525       array, index,
2526       feedback_index(feedback_spec()->AddStoreInArrayLiteralICSlot()));
2527 }
2528 
VisitVariableProxy(VariableProxy * proxy)2529 void BytecodeGenerator::VisitVariableProxy(VariableProxy* proxy) {
2530   builder()->SetExpressionPosition(proxy);
2531   BuildVariableLoad(proxy->var(), proxy->hole_check_mode());
2532 }
2533 
BuildVariableLoad(Variable * variable,HoleCheckMode hole_check_mode,TypeofMode typeof_mode)2534 void BytecodeGenerator::BuildVariableLoad(Variable* variable,
2535                                           HoleCheckMode hole_check_mode,
2536                                           TypeofMode typeof_mode) {
2537   switch (variable->location()) {
2538     case VariableLocation::LOCAL: {
2539       Register source(builder()->Local(variable->index()));
2540       // We need to load the variable into the accumulator, even when in a
2541       // VisitForRegisterScope, in order to avoid register aliasing if
2542       // subsequent expressions assign to the same variable.
2543       builder()->LoadAccumulatorWithRegister(source);
2544       if (hole_check_mode == HoleCheckMode::kRequired) {
2545         BuildThrowIfHole(variable);
2546       }
2547       break;
2548     }
2549     case VariableLocation::PARAMETER: {
2550       Register source;
2551       if (variable->IsReceiver()) {
2552         source = builder()->Receiver();
2553       } else {
2554         source = builder()->Parameter(variable->index());
2555       }
2556       // We need to load the variable into the accumulator, even when in a
2557       // VisitForRegisterScope, in order to avoid register aliasing if
2558       // subsequent expressions assign to the same variable.
2559       builder()->LoadAccumulatorWithRegister(source);
2560       if (hole_check_mode == HoleCheckMode::kRequired) {
2561         BuildThrowIfHole(variable);
2562       }
2563       break;
2564     }
2565     case VariableLocation::UNALLOCATED: {
2566       // The global identifier "undefined" is immutable. Everything
2567       // else could be reassigned. For performance, we do a pointer comparison
2568       // rather than checking if the raw_name is really "undefined".
2569       if (variable->raw_name() == ast_string_constants()->undefined_string()) {
2570         builder()->LoadUndefined();
2571       } else {
2572         FeedbackSlot slot = GetCachedLoadGlobalICSlot(typeof_mode, variable);
2573         builder()->LoadGlobal(variable->raw_name(), feedback_index(slot),
2574                               typeof_mode);
2575       }
2576       break;
2577     }
2578     case VariableLocation::CONTEXT: {
2579       int depth = execution_context()->ContextChainDepth(variable->scope());
2580       ContextScope* context = execution_context()->Previous(depth);
2581       Register context_reg;
2582       if (context) {
2583         context_reg = context->reg();
2584         depth = 0;
2585       } else {
2586         context_reg = execution_context()->reg();
2587       }
2588 
2589       BytecodeArrayBuilder::ContextSlotMutability immutable =
2590           (variable->maybe_assigned() == kNotAssigned)
2591               ? BytecodeArrayBuilder::kImmutableSlot
2592               : BytecodeArrayBuilder::kMutableSlot;
2593 
2594       builder()->LoadContextSlot(context_reg, variable->index(), depth,
2595                                  immutable);
2596       if (hole_check_mode == HoleCheckMode::kRequired) {
2597         BuildThrowIfHole(variable);
2598       }
2599       break;
2600     }
2601     case VariableLocation::LOOKUP: {
2602       switch (variable->mode()) {
2603         case VariableMode::kDynamicLocal: {
2604           Variable* local_variable = variable->local_if_not_shadowed();
2605           int depth =
2606               execution_context()->ContextChainDepth(local_variable->scope());
2607           builder()->LoadLookupContextSlot(variable->raw_name(), typeof_mode,
2608                                            local_variable->index(), depth);
2609           if (hole_check_mode == HoleCheckMode::kRequired) {
2610             BuildThrowIfHole(variable);
2611           }
2612           break;
2613         }
2614         case VariableMode::kDynamicGlobal: {
2615           int depth =
2616               current_scope()->ContextChainLengthUntilOutermostSloppyEval();
2617           FeedbackSlot slot = GetCachedLoadGlobalICSlot(typeof_mode, variable);
2618           builder()->LoadLookupGlobalSlot(variable->raw_name(), typeof_mode,
2619                                           feedback_index(slot), depth);
2620           break;
2621         }
2622         default:
2623           builder()->LoadLookupSlot(variable->raw_name(), typeof_mode);
2624       }
2625       break;
2626     }
2627     case VariableLocation::MODULE: {
2628       int depth = execution_context()->ContextChainDepth(variable->scope());
2629       builder()->LoadModuleVariable(variable->index(), depth);
2630       if (hole_check_mode == HoleCheckMode::kRequired) {
2631         BuildThrowIfHole(variable);
2632       }
2633       break;
2634     }
2635   }
2636 }
2637 
BuildVariableLoadForAccumulatorValue(Variable * variable,HoleCheckMode hole_check_mode,TypeofMode typeof_mode)2638 void BytecodeGenerator::BuildVariableLoadForAccumulatorValue(
2639     Variable* variable, HoleCheckMode hole_check_mode, TypeofMode typeof_mode) {
2640   ValueResultScope accumulator_result(this);
2641   BuildVariableLoad(variable, hole_check_mode, typeof_mode);
2642 }
2643 
BuildReturn(int source_position)2644 void BytecodeGenerator::BuildReturn(int source_position) {
2645   if (FLAG_trace) {
2646     RegisterAllocationScope register_scope(this);
2647     Register result = register_allocator()->NewRegister();
2648     // Runtime returns {result} value, preserving accumulator.
2649     builder()->StoreAccumulatorInRegister(result).CallRuntime(
2650         Runtime::kTraceExit, result);
2651   }
2652   if (info()->collect_type_profile()) {
2653     builder()->CollectTypeProfile(info()->literal()->return_position());
2654   }
2655   builder()->SetReturnPosition(source_position, info()->literal());
2656   builder()->Return();
2657 }
2658 
BuildAsyncReturn(int source_position)2659 void BytecodeGenerator::BuildAsyncReturn(int source_position) {
2660   RegisterAllocationScope register_scope(this);
2661 
2662   if (IsAsyncGeneratorFunction(info()->literal()->kind())) {
2663     RegisterList args = register_allocator()->NewRegisterList(3);
2664     builder()
2665         ->MoveRegister(generator_object(), args[0])  // generator
2666         .StoreAccumulatorInRegister(args[1])         // value
2667         .LoadTrue()
2668         .StoreAccumulatorInRegister(args[2])  // done
2669         .CallRuntime(Runtime::kInlineAsyncGeneratorResolve, args);
2670   } else {
2671     DCHECK(IsAsyncFunction(info()->literal()->kind()));
2672     RegisterList args = register_allocator()->NewRegisterList(2);
2673     Register promise = args[0];
2674     Register return_value = args[1];
2675     builder()->StoreAccumulatorInRegister(return_value);
2676 
2677     Variable* var_promise = closure_scope()->promise_var();
2678     DCHECK_NOT_NULL(var_promise);
2679     BuildVariableLoad(var_promise, HoleCheckMode::kElided);
2680     builder()
2681         ->StoreAccumulatorInRegister(promise)
2682         .CallRuntime(Runtime::kInlineResolvePromise, args)
2683         .LoadAccumulatorWithRegister(promise);
2684   }
2685 
2686   BuildReturn(source_position);
2687 }
2688 
BuildReThrow()2689 void BytecodeGenerator::BuildReThrow() { builder()->ReThrow(); }
2690 
BuildThrowIfHole(Variable * variable)2691 void BytecodeGenerator::BuildThrowIfHole(Variable* variable) {
2692   if (variable->is_this()) {
2693     DCHECK(variable->mode() == VariableMode::kConst);
2694     builder()->ThrowSuperNotCalledIfHole();
2695   } else {
2696     builder()->ThrowReferenceErrorIfHole(variable->raw_name());
2697   }
2698 }
2699 
BuildHoleCheckForVariableAssignment(Variable * variable,Token::Value op)2700 void BytecodeGenerator::BuildHoleCheckForVariableAssignment(Variable* variable,
2701                                                             Token::Value op) {
2702   if (variable->is_this() && variable->mode() == VariableMode::kConst &&
2703       op == Token::INIT) {
2704     // Perform an initialization check for 'this'. 'this' variable is the
2705     // only variable able to trigger bind operations outside the TDZ
2706     // via 'super' calls.
2707     builder()->ThrowSuperAlreadyCalledIfNotHole();
2708   } else {
2709     // Perform an initialization check for let/const declared variables.
2710     // E.g. let x = (x = 20); is not allowed.
2711     DCHECK(IsLexicalVariableMode(variable->mode()));
2712     BuildThrowIfHole(variable);
2713   }
2714 }
2715 
BuildVariableAssignment(Variable * variable,Token::Value op,HoleCheckMode hole_check_mode,LookupHoistingMode lookup_hoisting_mode)2716 void BytecodeGenerator::BuildVariableAssignment(
2717     Variable* variable, Token::Value op, HoleCheckMode hole_check_mode,
2718     LookupHoistingMode lookup_hoisting_mode) {
2719   VariableMode mode = variable->mode();
2720   RegisterAllocationScope assignment_register_scope(this);
2721   BytecodeLabel end_label;
2722   switch (variable->location()) {
2723     case VariableLocation::PARAMETER:
2724     case VariableLocation::LOCAL: {
2725       Register destination;
2726       if (VariableLocation::PARAMETER == variable->location()) {
2727         if (variable->IsReceiver()) {
2728           destination = builder()->Receiver();
2729         } else {
2730           destination = builder()->Parameter(variable->index());
2731         }
2732       } else {
2733         destination = builder()->Local(variable->index());
2734       }
2735 
2736       if (hole_check_mode == HoleCheckMode::kRequired) {
2737         // Load destination to check for hole.
2738         Register value_temp = register_allocator()->NewRegister();
2739         builder()
2740             ->StoreAccumulatorInRegister(value_temp)
2741             .LoadAccumulatorWithRegister(destination);
2742 
2743         BuildHoleCheckForVariableAssignment(variable, op);
2744         builder()->LoadAccumulatorWithRegister(value_temp);
2745       }
2746 
2747       if (mode != VariableMode::kConst || op == Token::INIT) {
2748         builder()->StoreAccumulatorInRegister(destination);
2749       } else if (variable->throw_on_const_assignment(language_mode())) {
2750         builder()->CallRuntime(Runtime::kThrowConstAssignError);
2751       }
2752       break;
2753     }
2754     case VariableLocation::UNALLOCATED: {
2755       FeedbackSlot slot = GetCachedStoreGlobalICSlot(language_mode(), variable);
2756       builder()->StoreGlobal(variable->raw_name(), feedback_index(slot));
2757       break;
2758     }
2759     case VariableLocation::CONTEXT: {
2760       int depth = execution_context()->ContextChainDepth(variable->scope());
2761       ContextScope* context = execution_context()->Previous(depth);
2762       Register context_reg;
2763 
2764       if (context) {
2765         context_reg = context->reg();
2766         depth = 0;
2767       } else {
2768         context_reg = execution_context()->reg();
2769       }
2770 
2771       if (hole_check_mode == HoleCheckMode::kRequired) {
2772         // Load destination to check for hole.
2773         Register value_temp = register_allocator()->NewRegister();
2774         builder()
2775             ->StoreAccumulatorInRegister(value_temp)
2776             .LoadContextSlot(context_reg, variable->index(), depth,
2777                              BytecodeArrayBuilder::kMutableSlot);
2778 
2779         BuildHoleCheckForVariableAssignment(variable, op);
2780         builder()->LoadAccumulatorWithRegister(value_temp);
2781       }
2782 
2783       if (mode != VariableMode::kConst || op == Token::INIT) {
2784         builder()->StoreContextSlot(context_reg, variable->index(), depth);
2785       } else if (variable->throw_on_const_assignment(language_mode())) {
2786         builder()->CallRuntime(Runtime::kThrowConstAssignError);
2787       }
2788       break;
2789     }
2790     case VariableLocation::LOOKUP: {
2791       builder()->StoreLookupSlot(variable->raw_name(), language_mode(),
2792                                  lookup_hoisting_mode);
2793       break;
2794     }
2795     case VariableLocation::MODULE: {
2796       DCHECK(IsDeclaredVariableMode(mode));
2797 
2798       if (mode == VariableMode::kConst && op != Token::INIT) {
2799         builder()->CallRuntime(Runtime::kThrowConstAssignError);
2800         break;
2801       }
2802 
2803       // If we don't throw above, we know that we're dealing with an
2804       // export because imports are const and we do not generate initializing
2805       // assignments for them.
2806       DCHECK(variable->IsExport());
2807 
2808       int depth = execution_context()->ContextChainDepth(variable->scope());
2809       if (hole_check_mode == HoleCheckMode::kRequired) {
2810         Register value_temp = register_allocator()->NewRegister();
2811         builder()
2812             ->StoreAccumulatorInRegister(value_temp)
2813             .LoadModuleVariable(variable->index(), depth);
2814         BuildHoleCheckForVariableAssignment(variable, op);
2815         builder()->LoadAccumulatorWithRegister(value_temp);
2816       }
2817       builder()->StoreModuleVariable(variable->index(), depth);
2818       break;
2819     }
2820   }
2821 }
2822 
BuildLoadNamedProperty(Property * property,Register object,const AstRawString * name)2823 void BytecodeGenerator::BuildLoadNamedProperty(Property* property,
2824                                                Register object,
2825                                                const AstRawString* name) {
2826   if (ShouldOptimizeAsOneShot()) {
2827     RegisterList args = register_allocator()->NewRegisterList(2);
2828     size_t name_index = builder()->GetConstantPoolEntry(name);
2829     builder()
2830         ->MoveRegister(object, args[0])
2831         .LoadConstantPoolEntry(name_index)
2832         .StoreAccumulatorInRegister(args[1])
2833         .CallRuntime(Runtime::kInlineGetProperty, args);
2834   } else {
2835     FeedbackSlot slot = GetCachedLoadICSlot(property->obj(), name);
2836     builder()->LoadNamedProperty(object, name, feedback_index(slot));
2837   }
2838 }
2839 
BuildStoreNamedProperty(Property * property,Register object,const AstRawString * name)2840 void BytecodeGenerator::BuildStoreNamedProperty(Property* property,
2841                                                 Register object,
2842                                                 const AstRawString* name) {
2843   Register value;
2844   if (!execution_result()->IsEffect()) {
2845     value = register_allocator()->NewRegister();
2846     builder()->StoreAccumulatorInRegister(value);
2847   }
2848 
2849   if (ShouldOptimizeAsOneShot()) {
2850     RegisterList args = register_allocator()->NewRegisterList(4);
2851     size_t name_index = builder()->GetConstantPoolEntry(name);
2852     builder()
2853         ->MoveRegister(object, args[0])
2854         .StoreAccumulatorInRegister(args[2])
2855         .LoadConstantPoolEntry(name_index)
2856         .StoreAccumulatorInRegister(args[1])
2857         .LoadLiteral(Smi::FromEnum(language_mode()))
2858         .StoreAccumulatorInRegister(args[3])
2859         .CallRuntime(Runtime::kSetProperty, args);
2860   } else {
2861     FeedbackSlot slot = GetCachedStoreICSlot(property->obj(), name);
2862     builder()->StoreNamedProperty(object, name, feedback_index(slot),
2863                                   language_mode());
2864   }
2865 
2866   if (!execution_result()->IsEffect()) {
2867     builder()->LoadAccumulatorWithRegister(value);
2868   }
2869 }
2870 
VisitAssignment(Assignment * expr)2871 void BytecodeGenerator::VisitAssignment(Assignment* expr) {
2872   DCHECK(expr->target()->IsValidReferenceExpression() ||
2873          (expr->op() == Token::INIT && expr->target()->IsVariableProxy() &&
2874           expr->target()->AsVariableProxy()->is_this()));
2875   Register object, key;
2876   RegisterList super_property_args;
2877   const AstRawString* name;
2878 
2879   // Left-hand side can only be a property, a global or a variable slot.
2880   Property* property = expr->target()->AsProperty();
2881   LhsKind assign_type = Property::GetAssignType(property);
2882 
2883   // Evaluate LHS expression.
2884   switch (assign_type) {
2885     case VARIABLE:
2886       // Nothing to do to evaluate variable assignment LHS.
2887       break;
2888     case NAMED_PROPERTY: {
2889       object = VisitForRegisterValue(property->obj());
2890       name = property->key()->AsLiteral()->AsRawPropertyName();
2891       break;
2892     }
2893     case KEYED_PROPERTY: {
2894       object = VisitForRegisterValue(property->obj());
2895       key = VisitForRegisterValue(property->key());
2896       break;
2897     }
2898     case NAMED_SUPER_PROPERTY: {
2899       super_property_args = register_allocator()->NewRegisterList(4);
2900       SuperPropertyReference* super_property =
2901           property->obj()->AsSuperPropertyReference();
2902       VisitForRegisterValue(super_property->this_var(), super_property_args[0]);
2903       VisitForRegisterValue(super_property->home_object(),
2904                             super_property_args[1]);
2905       builder()
2906           ->LoadLiteral(property->key()->AsLiteral()->AsRawPropertyName())
2907           .StoreAccumulatorInRegister(super_property_args[2]);
2908       break;
2909     }
2910     case KEYED_SUPER_PROPERTY: {
2911       super_property_args = register_allocator()->NewRegisterList(4);
2912       SuperPropertyReference* super_property =
2913           property->obj()->AsSuperPropertyReference();
2914       VisitForRegisterValue(super_property->this_var(), super_property_args[0]);
2915       VisitForRegisterValue(super_property->home_object(),
2916                             super_property_args[1]);
2917       VisitForRegisterValue(property->key(), super_property_args[2]);
2918       break;
2919     }
2920   }
2921 
2922   // Evaluate the value and potentially handle compound assignments by loading
2923   // the left-hand side value and performing a binary operation.
2924   if (expr->IsCompoundAssignment()) {
2925     switch (assign_type) {
2926       case VARIABLE: {
2927         VariableProxy* proxy = expr->target()->AsVariableProxy();
2928         BuildVariableLoad(proxy->var(), proxy->hole_check_mode());
2929         break;
2930       }
2931       case NAMED_PROPERTY: {
2932         BuildLoadNamedProperty(property, object, name);
2933         break;
2934       }
2935       case KEYED_PROPERTY: {
2936         // Key is already in accumulator at this point due to evaluating the
2937         // LHS above.
2938         FeedbackSlot slot = feedback_spec()->AddKeyedLoadICSlot();
2939         builder()->LoadKeyedProperty(object, feedback_index(slot));
2940         break;
2941       }
2942       case NAMED_SUPER_PROPERTY: {
2943         builder()->CallRuntime(Runtime::kLoadFromSuper,
2944                                super_property_args.Truncate(3));
2945         break;
2946       }
2947       case KEYED_SUPER_PROPERTY: {
2948         builder()->CallRuntime(Runtime::kLoadKeyedFromSuper,
2949                                super_property_args.Truncate(3));
2950         break;
2951       }
2952     }
2953     BinaryOperation* binop = expr->AsCompoundAssignment()->binary_operation();
2954     FeedbackSlot slot = feedback_spec()->AddBinaryOpICSlot();
2955     if (expr->value()->IsSmiLiteral()) {
2956       builder()->BinaryOperationSmiLiteral(
2957           binop->op(), expr->value()->AsLiteral()->AsSmiLiteral(),
2958           feedback_index(slot));
2959     } else {
2960       Register old_value = register_allocator()->NewRegister();
2961       builder()->StoreAccumulatorInRegister(old_value);
2962       VisitForAccumulatorValue(expr->value());
2963       builder()->BinaryOperation(binop->op(), old_value, feedback_index(slot));
2964     }
2965   } else {
2966     VisitForAccumulatorValue(expr->value());
2967   }
2968 
2969   // Store the value.
2970   builder()->SetExpressionPosition(expr);
2971   switch (assign_type) {
2972     case VARIABLE: {
2973       // TODO(oth): The BuildVariableAssignment() call is hard to reason about.
2974       // Is the value in the accumulator safe? Yes, but scary.
2975       VariableProxy* proxy = expr->target()->AsVariableProxy();
2976       BuildVariableAssignment(proxy->var(), expr->op(),
2977                               proxy->hole_check_mode(),
2978                               expr->lookup_hoisting_mode());
2979       break;
2980     }
2981     case NAMED_PROPERTY: {
2982       BuildStoreNamedProperty(property, object, name);
2983       break;
2984     }
2985     case KEYED_PROPERTY: {
2986       FeedbackSlot slot = feedback_spec()->AddKeyedStoreICSlot(language_mode());
2987       Register value;
2988       if (!execution_result()->IsEffect()) {
2989         value = register_allocator()->NewRegister();
2990         builder()->StoreAccumulatorInRegister(value);
2991       }
2992       builder()->StoreKeyedProperty(object, key, feedback_index(slot),
2993                                     language_mode());
2994       if (!execution_result()->IsEffect()) {
2995         builder()->LoadAccumulatorWithRegister(value);
2996       }
2997       break;
2998     }
2999     case NAMED_SUPER_PROPERTY: {
3000       builder()
3001           ->StoreAccumulatorInRegister(super_property_args[3])
3002           .CallRuntime(StoreToSuperRuntimeId(), super_property_args);
3003       break;
3004     }
3005     case KEYED_SUPER_PROPERTY: {
3006       builder()
3007           ->StoreAccumulatorInRegister(super_property_args[3])
3008           .CallRuntime(StoreKeyedToSuperRuntimeId(), super_property_args);
3009       break;
3010     }
3011   }
3012 }
3013 
VisitCompoundAssignment(CompoundAssignment * expr)3014 void BytecodeGenerator::VisitCompoundAssignment(CompoundAssignment* expr) {
3015   VisitAssignment(expr);
3016 }
3017 
3018 // Suspends the generator to resume at the next suspend_id, with output stored
3019 // in the accumulator. When the generator is resumed, the sent value is loaded
3020 // in the accumulator.
BuildSuspendPoint(Expression * suspend_expr)3021 void BytecodeGenerator::BuildSuspendPoint(Expression* suspend_expr) {
3022   const int suspend_id = suspend_count_++;
3023 
3024   RegisterList registers = register_allocator()->AllLiveRegisters();
3025 
3026   // Save context, registers, and state. This bytecode then returns the value
3027   // in the accumulator.
3028   builder()->SetExpressionPosition(suspend_expr);
3029   builder()->SuspendGenerator(generator_object(), registers, suspend_id);
3030 
3031   // Upon resume, we continue here.
3032   builder()->Bind(generator_jump_table_, suspend_id);
3033 
3034   // Clobbers all registers and sets the accumulator to the
3035   // [[input_or_debug_pos]] slot of the generator object.
3036   builder()->ResumeGenerator(generator_object(), registers);
3037 }
3038 
VisitYield(Yield * expr)3039 void BytecodeGenerator::VisitYield(Yield* expr) {
3040   builder()->SetExpressionPosition(expr);
3041   VisitForAccumulatorValue(expr->expression());
3042 
3043   // If this is not the first yield
3044   if (suspend_count_ > 0) {
3045     if (IsAsyncGeneratorFunction(function_kind())) {
3046       // AsyncGenerator yields (with the exception of the initial yield)
3047       // delegate work to the AsyncGeneratorYield stub, which Awaits the operand
3048       // and on success, wraps the value in an IteratorResult.
3049       RegisterAllocationScope register_scope(this);
3050       RegisterList args = register_allocator()->NewRegisterList(3);
3051       builder()
3052           ->MoveRegister(generator_object(), args[0])  // generator
3053           .StoreAccumulatorInRegister(args[1])         // value
3054           .LoadBoolean(catch_prediction() != HandlerTable::ASYNC_AWAIT)
3055           .StoreAccumulatorInRegister(args[2])  // is_caught
3056           .CallRuntime(Runtime::kInlineAsyncGeneratorYield, args);
3057     } else {
3058       // Generator yields (with the exception of the initial yield) wrap the
3059       // value into IteratorResult.
3060       RegisterAllocationScope register_scope(this);
3061       RegisterList args = register_allocator()->NewRegisterList(2);
3062       builder()
3063           ->StoreAccumulatorInRegister(args[0])  // value
3064           .LoadFalse()
3065           .StoreAccumulatorInRegister(args[1])   // done
3066           .CallRuntime(Runtime::kInlineCreateIterResultObject, args);
3067     }
3068   }
3069 
3070   BuildSuspendPoint(expr);
3071   // At this point, the generator has been resumed, with the received value in
3072   // the accumulator.
3073 
3074   // TODO(caitp): remove once yield* desugaring for async generators is handled
3075   // in BytecodeGenerator.
3076   if (expr->on_abrupt_resume() == Yield::kNoControl) {
3077     DCHECK(IsAsyncGeneratorFunction(function_kind()));
3078     return;
3079   }
3080 
3081   Register input = register_allocator()->NewRegister();
3082   builder()->StoreAccumulatorInRegister(input).CallRuntime(
3083       Runtime::kInlineGeneratorGetResumeMode, generator_object());
3084 
3085   // Now dispatch on resume mode.
3086   STATIC_ASSERT(JSGeneratorObject::kNext + 1 == JSGeneratorObject::kReturn);
3087   BytecodeJumpTable* jump_table =
3088       builder()->AllocateJumpTable(2, JSGeneratorObject::kNext);
3089 
3090   builder()->SwitchOnSmiNoFeedback(jump_table);
3091 
3092   {
3093     // Resume with throw (switch fallthrough).
3094     // TODO(leszeks): Add a debug-only check that the accumulator is
3095     // JSGeneratorObject::kThrow.
3096     builder()->SetExpressionPosition(expr);
3097     builder()->LoadAccumulatorWithRegister(input);
3098     builder()->Throw();
3099   }
3100 
3101   {
3102     // Resume with return.
3103     builder()->Bind(jump_table, JSGeneratorObject::kReturn);
3104     builder()->LoadAccumulatorWithRegister(input);
3105     if (IsAsyncGeneratorFunction(function_kind())) {
3106       execution_control()->AsyncReturnAccumulator();
3107     } else {
3108       execution_control()->ReturnAccumulator();
3109     }
3110   }
3111 
3112   {
3113     // Resume with next.
3114     builder()->Bind(jump_table, JSGeneratorObject::kNext);
3115     BuildIncrementBlockCoverageCounterIfEnabled(expr,
3116                                                 SourceRangeKind::kContinuation);
3117     builder()->LoadAccumulatorWithRegister(input);
3118   }
3119 }
3120 
3121 // Desugaring of (yield* iterable)
3122 //
3123 //   do {
3124 //     const kNext = 0;
3125 //     const kReturn = 1;
3126 //     const kThrow = 2;
3127 //
3128 //     let output; // uninitialized
3129 //
3130 //     let iteratorRecord = GetIterator(iterable);
3131 //     let iterator = iteratorRecord.[[Iterator]];
3132 //     let next = iteratorRecord.[[NextMethod]];
3133 //     let input = undefined;
3134 //     let resumeMode = kNext;
3135 //
3136 //     while (true) {
3137 //       // From the generator to the iterator:
3138 //       // Forward input according to resumeMode and obtain output.
3139 //       switch (resumeMode) {
3140 //         case kNext:
3141 //           output = next.[[Call]](iterator, « »);;
3142 //           break;
3143 //         case kReturn:
3144 //           let iteratorReturn = iterator.return;
3145 //           if (IS_NULL_OR_UNDEFINED(iteratorReturn)) return input;
3146 //           output = iteratorReturn.[[Call]](iterator, «input»);
3147 //           break;
3148 //         case kThrow:
3149 //           let iteratorThrow = iterator.throw;
3150 //           if (IS_NULL_OR_UNDEFINED(iteratorThrow)) {
3151 //             let iteratorReturn = iterator.return;
3152 //             if (!IS_NULL_OR_UNDEFINED(iteratorReturn)) {
3153 //               output = iteratorReturn.[[Call]](iterator, « »);
3154 //               if (IS_ASYNC_GENERATOR) output = await output;
3155 //               if (!IS_RECEIVER(output)) %ThrowIterResultNotAnObject(output);
3156 //             }
3157 //             throw MakeTypeError(kThrowMethodMissing);
3158 //           }
3159 //           output = iteratorThrow.[[Call]](iterator, «input»);
3160 //           break;
3161 //       }
3162 //
3163 //       if (IS_ASYNC_GENERATOR) output = await output;
3164 //       if (!IS_RECEIVER(output)) %ThrowIterResultNotAnObject(output);
3165 //       if (output.done) break;
3166 //
3167 //       // From the generator to its user:
3168 //       // Forward output, receive new input, and determine resume mode.
3169 //       if (IS_ASYNC_GENERATOR) {
3170 //         // AsyncGeneratorYield abstract operation awaits the operand before
3171 //         // resolving the promise for the current AsyncGeneratorRequest.
3172 //         %_AsyncGeneratorYield(output.value)
3173 //       }
3174 //       input = Suspend(output);
3175 //       resumeMode = %GeneratorGetResumeMode();
3176 //     }
3177 //
3178 //     if (resumeMode === kReturn) {
3179 //       return output.value;
3180 //     }
3181 //     output.value
3182 //   }
VisitYieldStar(YieldStar * expr)3183 void BytecodeGenerator::VisitYieldStar(YieldStar* expr) {
3184   Register output = register_allocator()->NewRegister();
3185   Register resume_mode = register_allocator()->NewRegister();
3186   IteratorType iterator_type = IsAsyncGeneratorFunction(function_kind())
3187                                    ? IteratorType::kAsync
3188                                    : IteratorType::kNormal;
3189 
3190   {
3191     RegisterAllocationScope register_scope(this);
3192     RegisterList iterator_and_input = register_allocator()->NewRegisterList(2);
3193     IteratorRecord iterator = BuildGetIteratorRecord(
3194         expr->expression(),
3195         register_allocator()->NewRegister() /* next method */,
3196         iterator_and_input[0], iterator_type);
3197 
3198     Register input = iterator_and_input[1];
3199     builder()->LoadUndefined().StoreAccumulatorInRegister(input);
3200     builder()
3201         ->LoadLiteral(Smi::FromInt(JSGeneratorObject::kNext))
3202         .StoreAccumulatorInRegister(resume_mode);
3203 
3204     {
3205       // This loop builder does not construct counters as the loop is not
3206       // visible to the user, and we therefore neither pass the block coverage
3207       // builder nor the expression.
3208       //
3209       // In addition to the normal suspend for yield*, a yield* in an async
3210       // generator has 2 additional suspends:
3211       //   - One for awaiting the iterator result of closing the generator when
3212       //     resumed with a "throw" completion, and a throw method is not
3213       //     present on the delegated iterator
3214       //   - One for awaiting the iterator result yielded by the delegated
3215       //     iterator
3216 
3217       LoopBuilder loop(builder(), nullptr, nullptr);
3218       loop.LoopHeader();
3219 
3220       {
3221         BytecodeLabels after_switch(zone());
3222         BytecodeJumpTable* switch_jump_table =
3223             builder()->AllocateJumpTable(2, 1);
3224 
3225         builder()
3226             ->LoadAccumulatorWithRegister(resume_mode)
3227             .SwitchOnSmiNoFeedback(switch_jump_table);
3228 
3229         // Fallthrough to default case.
3230         // TODO(tebbi): Add debug code to check that {resume_mode} really is
3231         // {JSGeneratorObject::kNext} in this case.
3232         STATIC_ASSERT(JSGeneratorObject::kNext == 0);
3233         {
3234           FeedbackSlot slot = feedback_spec()->AddCallICSlot();
3235           builder()->CallProperty(iterator.next(), iterator_and_input,
3236                                   feedback_index(slot));
3237           builder()->Jump(after_switch.New());
3238         }
3239 
3240         STATIC_ASSERT(JSGeneratorObject::kReturn == 1);
3241         builder()->Bind(switch_jump_table, JSGeneratorObject::kReturn);
3242         {
3243           const AstRawString* return_string =
3244               ast_string_constants()->return_string();
3245           BytecodeLabels no_return_method(zone());
3246 
3247           BuildCallIteratorMethod(iterator.object(), return_string,
3248                                   iterator_and_input, after_switch.New(),
3249                                   &no_return_method);
3250           no_return_method.Bind(builder());
3251           builder()->LoadAccumulatorWithRegister(input);
3252           if (iterator_type == IteratorType::kAsync) {
3253             execution_control()->AsyncReturnAccumulator();
3254           } else {
3255             execution_control()->ReturnAccumulator();
3256           }
3257         }
3258 
3259         STATIC_ASSERT(JSGeneratorObject::kThrow == 2);
3260         builder()->Bind(switch_jump_table, JSGeneratorObject::kThrow);
3261         {
3262           const AstRawString* throw_string =
3263               ast_string_constants()->throw_string();
3264           BytecodeLabels no_throw_method(zone());
3265           BuildCallIteratorMethod(iterator.object(), throw_string,
3266                                   iterator_and_input, after_switch.New(),
3267                                   &no_throw_method);
3268 
3269           // If there is no "throw" method, perform IteratorClose, and finally
3270           // throw a TypeError.
3271           no_throw_method.Bind(builder());
3272           BuildIteratorClose(iterator, expr);
3273           builder()->CallRuntime(Runtime::kThrowThrowMethodMissing);
3274         }
3275 
3276         after_switch.Bind(builder());
3277       }
3278 
3279       if (iterator_type == IteratorType::kAsync) {
3280         // Await the result of the method invocation.
3281         BuildAwait(expr);
3282       }
3283 
3284       // Check that output is an object.
3285       BytecodeLabel check_if_done;
3286       builder()
3287           ->StoreAccumulatorInRegister(output)
3288           .JumpIfJSReceiver(&check_if_done)
3289           .CallRuntime(Runtime::kThrowIteratorResultNotAnObject, output);
3290 
3291       builder()->Bind(&check_if_done);
3292       // Break once output.done is true.
3293       builder()->LoadNamedProperty(
3294           output, ast_string_constants()->done_string(),
3295           feedback_index(feedback_spec()->AddLoadICSlot()));
3296 
3297       loop.BreakIfTrue(ToBooleanMode::kConvertToBoolean);
3298 
3299       // Suspend the current generator.
3300       if (iterator_type == IteratorType::kNormal) {
3301         builder()->LoadAccumulatorWithRegister(output);
3302       } else {
3303         RegisterAllocationScope register_scope(this);
3304         DCHECK_EQ(iterator_type, IteratorType::kAsync);
3305         // If generatorKind is async, perform AsyncGeneratorYield(output.value),
3306         // which will await `output.value` before resolving the current
3307         // AsyncGeneratorRequest's promise.
3308         builder()->LoadNamedProperty(
3309             output, ast_string_constants()->value_string(),
3310             feedback_index(feedback_spec()->AddLoadICSlot()));
3311 
3312         RegisterList args = register_allocator()->NewRegisterList(3);
3313         builder()
3314             ->MoveRegister(generator_object(), args[0])  // generator
3315             .StoreAccumulatorInRegister(args[1])         // value
3316             .LoadBoolean(catch_prediction() != HandlerTable::ASYNC_AWAIT)
3317             .StoreAccumulatorInRegister(args[2])  // is_caught
3318             .CallRuntime(Runtime::kInlineAsyncGeneratorYield, args);
3319       }
3320 
3321       BuildSuspendPoint(expr);
3322       builder()->StoreAccumulatorInRegister(input);
3323       builder()
3324           ->CallRuntime(Runtime::kInlineGeneratorGetResumeMode,
3325                         generator_object())
3326           .StoreAccumulatorInRegister(resume_mode);
3327 
3328       loop.BindContinueTarget();
3329       loop.JumpToHeader(loop_depth_);
3330     }
3331   }
3332 
3333   // Decide if we trigger a return or if the yield* expression should just
3334   // produce a value.
3335   BytecodeLabel completion_is_output_value;
3336   Register output_value = register_allocator()->NewRegister();
3337   builder()
3338       ->LoadNamedProperty(output, ast_string_constants()->value_string(),
3339                           feedback_index(feedback_spec()->AddLoadICSlot()))
3340       .StoreAccumulatorInRegister(output_value)
3341       .LoadLiteral(Smi::FromInt(JSGeneratorObject::kReturn))
3342       .CompareReference(resume_mode)
3343       .JumpIfFalse(ToBooleanMode::kAlreadyBoolean, &completion_is_output_value)
3344       .LoadAccumulatorWithRegister(output_value);
3345   if (iterator_type == IteratorType::kAsync) {
3346     execution_control()->AsyncReturnAccumulator();
3347   } else {
3348     execution_control()->ReturnAccumulator();
3349   }
3350 
3351   builder()->Bind(&completion_is_output_value);
3352   BuildIncrementBlockCoverageCounterIfEnabled(expr,
3353                                               SourceRangeKind::kContinuation);
3354   builder()->LoadAccumulatorWithRegister(output_value);
3355 }
3356 
BuildAwait(Expression * await_expr)3357 void BytecodeGenerator::BuildAwait(Expression* await_expr) {
3358   // Rather than HandlerTable::UNCAUGHT, async functions use
3359   // HandlerTable::ASYNC_AWAIT to communicate that top-level exceptions are
3360   // transformed into promise rejections. This is necessary to prevent emitting
3361   // multiple debug events for the same uncaught exception. There is no point
3362   // in the body of an async function where catch prediction is
3363   // HandlerTable::UNCAUGHT.
3364   DCHECK(catch_prediction() != HandlerTable::UNCAUGHT);
3365 
3366   {
3367     // Await(operand) and suspend.
3368     RegisterAllocationScope register_scope(this);
3369 
3370     int await_builtin_context_index;
3371     RegisterList args;
3372     if (IsAsyncGeneratorFunction(function_kind())) {
3373       await_builtin_context_index =
3374           catch_prediction() == HandlerTable::ASYNC_AWAIT
3375               ? Context::ASYNC_GENERATOR_AWAIT_UNCAUGHT
3376               : Context::ASYNC_GENERATOR_AWAIT_CAUGHT;
3377       args = register_allocator()->NewRegisterList(2);
3378       builder()
3379           ->MoveRegister(generator_object(), args[0])
3380           .StoreAccumulatorInRegister(args[1]);
3381     } else {
3382       await_builtin_context_index =
3383           catch_prediction() == HandlerTable::ASYNC_AWAIT
3384               ? Context::ASYNC_FUNCTION_AWAIT_UNCAUGHT_INDEX
3385               : Context::ASYNC_FUNCTION_AWAIT_CAUGHT_INDEX;
3386       args = register_allocator()->NewRegisterList(3);
3387       builder()
3388           ->MoveRegister(generator_object(), args[0])
3389           .StoreAccumulatorInRegister(args[1]);
3390 
3391       // AsyncFunction Await builtins require a 3rd parameter to hold the outer
3392       // promise.
3393       Variable* var_promise = closure_scope()->promise_var();
3394       BuildVariableLoadForAccumulatorValue(var_promise, HoleCheckMode::kElided);
3395       builder()->StoreAccumulatorInRegister(args[2]);
3396     }
3397 
3398     builder()->CallJSRuntime(await_builtin_context_index, args);
3399   }
3400 
3401   BuildSuspendPoint(await_expr);
3402 
3403   Register input = register_allocator()->NewRegister();
3404   Register resume_mode = register_allocator()->NewRegister();
3405 
3406   // Now dispatch on resume mode.
3407   BytecodeLabel resume_next;
3408   builder()
3409       ->StoreAccumulatorInRegister(input)
3410       .CallRuntime(Runtime::kInlineGeneratorGetResumeMode, generator_object())
3411       .StoreAccumulatorInRegister(resume_mode)
3412       .LoadLiteral(Smi::FromInt(JSGeneratorObject::kNext))
3413       .CompareReference(resume_mode)
3414       .JumpIfTrue(ToBooleanMode::kAlreadyBoolean, &resume_next);
3415 
3416   // Resume with "throw" completion (rethrow the received value).
3417   // TODO(leszeks): Add a debug-only check that the accumulator is
3418   // JSGeneratorObject::kThrow.
3419   builder()->LoadAccumulatorWithRegister(input).ReThrow();
3420 
3421   // Resume with next.
3422   builder()->Bind(&resume_next);
3423   builder()->LoadAccumulatorWithRegister(input);
3424 }
3425 
VisitAwait(Await * expr)3426 void BytecodeGenerator::VisitAwait(Await* expr) {
3427   builder()->SetExpressionPosition(expr);
3428   VisitForAccumulatorValue(expr->expression());
3429   BuildAwait(expr);
3430   BuildIncrementBlockCoverageCounterIfEnabled(expr,
3431                                               SourceRangeKind::kContinuation);
3432 }
3433 
VisitThrow(Throw * expr)3434 void BytecodeGenerator::VisitThrow(Throw* expr) {
3435   AllocateBlockCoverageSlotIfEnabled(expr, SourceRangeKind::kContinuation);
3436   VisitForAccumulatorValue(expr->exception());
3437   builder()->SetExpressionPosition(expr);
3438   builder()->Throw();
3439 }
3440 
VisitPropertyLoad(Register obj,Property * property)3441 void BytecodeGenerator::VisitPropertyLoad(Register obj, Property* property) {
3442   LhsKind property_kind = Property::GetAssignType(property);
3443   switch (property_kind) {
3444     case VARIABLE:
3445       UNREACHABLE();
3446     case NAMED_PROPERTY: {
3447       builder()->SetExpressionPosition(property);
3448       const AstRawString* name =
3449           property->key()->AsLiteral()->AsRawPropertyName();
3450       BuildLoadNamedProperty(property, obj, name);
3451       break;
3452     }
3453     case KEYED_PROPERTY: {
3454       VisitForAccumulatorValue(property->key());
3455       builder()->SetExpressionPosition(property);
3456       builder()->LoadKeyedProperty(
3457           obj, feedback_index(feedback_spec()->AddKeyedLoadICSlot()));
3458       break;
3459     }
3460     case NAMED_SUPER_PROPERTY:
3461       VisitNamedSuperPropertyLoad(property, Register::invalid_value());
3462       break;
3463     case KEYED_SUPER_PROPERTY:
3464       VisitKeyedSuperPropertyLoad(property, Register::invalid_value());
3465       break;
3466   }
3467 }
3468 
VisitPropertyLoadForRegister(Register obj,Property * expr,Register destination)3469 void BytecodeGenerator::VisitPropertyLoadForRegister(Register obj,
3470                                                      Property* expr,
3471                                                      Register destination) {
3472   ValueResultScope result_scope(this);
3473   VisitPropertyLoad(obj, expr);
3474   builder()->StoreAccumulatorInRegister(destination);
3475 }
3476 
VisitNamedSuperPropertyLoad(Property * property,Register opt_receiver_out)3477 void BytecodeGenerator::VisitNamedSuperPropertyLoad(Property* property,
3478                                                     Register opt_receiver_out) {
3479   RegisterAllocationScope register_scope(this);
3480   SuperPropertyReference* super_property =
3481       property->obj()->AsSuperPropertyReference();
3482   RegisterList args = register_allocator()->NewRegisterList(3);
3483   VisitForRegisterValue(super_property->this_var(), args[0]);
3484   VisitForRegisterValue(super_property->home_object(), args[1]);
3485 
3486   builder()->SetExpressionPosition(property);
3487   builder()
3488       ->LoadLiteral(property->key()->AsLiteral()->AsRawPropertyName())
3489       .StoreAccumulatorInRegister(args[2])
3490       .CallRuntime(Runtime::kLoadFromSuper, args);
3491 
3492   if (opt_receiver_out.is_valid()) {
3493     builder()->MoveRegister(args[0], opt_receiver_out);
3494   }
3495 }
3496 
VisitKeyedSuperPropertyLoad(Property * property,Register opt_receiver_out)3497 void BytecodeGenerator::VisitKeyedSuperPropertyLoad(Property* property,
3498                                                     Register opt_receiver_out) {
3499   RegisterAllocationScope register_scope(this);
3500   SuperPropertyReference* super_property =
3501       property->obj()->AsSuperPropertyReference();
3502   RegisterList args = register_allocator()->NewRegisterList(3);
3503   VisitForRegisterValue(super_property->this_var(), args[0]);
3504   VisitForRegisterValue(super_property->home_object(), args[1]);
3505   VisitForRegisterValue(property->key(), args[2]);
3506 
3507   builder()->SetExpressionPosition(property);
3508   builder()->CallRuntime(Runtime::kLoadKeyedFromSuper, args);
3509 
3510   if (opt_receiver_out.is_valid()) {
3511     builder()->MoveRegister(args[0], opt_receiver_out);
3512   }
3513 }
3514 
VisitProperty(Property * expr)3515 void BytecodeGenerator::VisitProperty(Property* expr) {
3516   LhsKind property_kind = Property::GetAssignType(expr);
3517   if (property_kind != NAMED_SUPER_PROPERTY &&
3518       property_kind != KEYED_SUPER_PROPERTY) {
3519     Register obj = VisitForRegisterValue(expr->obj());
3520     VisitPropertyLoad(obj, expr);
3521   } else {
3522     VisitPropertyLoad(Register::invalid_value(), expr);
3523   }
3524 }
3525 
VisitResolvedProperty(ResolvedProperty * expr)3526 void BytecodeGenerator::VisitResolvedProperty(ResolvedProperty* expr) {
3527   // Handled by VisitCall().
3528   UNREACHABLE();
3529 }
3530 
VisitArguments(ZonePtrList<Expression> * args,RegisterList * arg_regs)3531 void BytecodeGenerator::VisitArguments(ZonePtrList<Expression>* args,
3532                                        RegisterList* arg_regs) {
3533   // Visit arguments.
3534   for (int i = 0; i < static_cast<int>(args->length()); i++) {
3535     VisitAndPushIntoRegisterList(args->at(i), arg_regs);
3536   }
3537 }
3538 
VisitCall(Call * expr)3539 void BytecodeGenerator::VisitCall(Call* expr) {
3540   Expression* callee_expr = expr->expression();
3541   Call::CallType call_type = expr->GetCallType();
3542 
3543   if (call_type == Call::SUPER_CALL) {
3544     return VisitCallSuper(expr);
3545   }
3546 
3547   // Grow the args list as we visit receiver / arguments to avoid allocating all
3548   // the registers up-front. Otherwise these registers are unavailable during
3549   // receiver / argument visiting and we can end up with memory leaks due to
3550   // registers keeping objects alive.
3551   Register callee = register_allocator()->NewRegister();
3552   RegisterList args = register_allocator()->NewGrowableRegisterList();
3553 
3554   bool implicit_undefined_receiver = false;
3555   // When a call contains a spread, a Call AST node is only created if there is
3556   // exactly one spread, and it is the last argument.
3557   bool is_spread_call = expr->only_last_arg_is_spread();
3558 
3559   // TODO(petermarshall): We have a lot of call bytecodes that are very similar,
3560   // see if we can reduce the number by adding a separate argument which
3561   // specifies the call type (e.g., property, spread, tailcall, etc.).
3562 
3563   // Prepare the callee and the receiver to the function call. This depends on
3564   // the semantics of the underlying call type.
3565   switch (call_type) {
3566     case Call::NAMED_PROPERTY_CALL:
3567     case Call::KEYED_PROPERTY_CALL: {
3568       Property* property = callee_expr->AsProperty();
3569       VisitAndPushIntoRegisterList(property->obj(), &args);
3570       VisitPropertyLoadForRegister(args.last_register(), property, callee);
3571       break;
3572     }
3573     case Call::RESOLVED_PROPERTY_CALL: {
3574       ResolvedProperty* resolved = callee_expr->AsResolvedProperty();
3575       VisitAndPushIntoRegisterList(resolved->object(), &args);
3576       VisitForAccumulatorValue(resolved->property());
3577       builder()->StoreAccumulatorInRegister(callee);
3578       break;
3579     }
3580     case Call::GLOBAL_CALL: {
3581       // Receiver is undefined for global calls.
3582       if (!is_spread_call) {
3583         implicit_undefined_receiver = true;
3584       } else {
3585         // TODO(leszeks): There's no special bytecode for tail calls or spread
3586         // calls with an undefined receiver, so just push undefined ourselves.
3587         BuildPushUndefinedIntoRegisterList(&args);
3588       }
3589       // Load callee as a global variable.
3590       VariableProxy* proxy = callee_expr->AsVariableProxy();
3591       BuildVariableLoadForAccumulatorValue(proxy->var(),
3592                                            proxy->hole_check_mode());
3593       builder()->StoreAccumulatorInRegister(callee);
3594       break;
3595     }
3596     case Call::WITH_CALL: {
3597       Register receiver = register_allocator()->GrowRegisterList(&args);
3598       DCHECK(callee_expr->AsVariableProxy()->var()->IsLookupSlot());
3599       {
3600         RegisterAllocationScope inner_register_scope(this);
3601         Register name = register_allocator()->NewRegister();
3602 
3603         // Call %LoadLookupSlotForCall to get the callee and receiver.
3604         RegisterList result_pair = register_allocator()->NewRegisterList(2);
3605         Variable* variable = callee_expr->AsVariableProxy()->var();
3606         builder()
3607             ->LoadLiteral(variable->raw_name())
3608             .StoreAccumulatorInRegister(name)
3609             .CallRuntimeForPair(Runtime::kLoadLookupSlotForCall, name,
3610                                 result_pair)
3611             .MoveRegister(result_pair[0], callee)
3612             .MoveRegister(result_pair[1], receiver);
3613       }
3614       break;
3615     }
3616     case Call::OTHER_CALL: {
3617       // Receiver is undefined for other calls.
3618       if (!is_spread_call) {
3619         implicit_undefined_receiver = true;
3620       } else {
3621         // TODO(leszeks): There's no special bytecode for tail calls or spread
3622         // calls with an undefined receiver, so just push undefined ourselves.
3623         BuildPushUndefinedIntoRegisterList(&args);
3624       }
3625       VisitForRegisterValue(callee_expr, callee);
3626       break;
3627     }
3628     case Call::NAMED_SUPER_PROPERTY_CALL: {
3629       Register receiver = register_allocator()->GrowRegisterList(&args);
3630       Property* property = callee_expr->AsProperty();
3631       VisitNamedSuperPropertyLoad(property, receiver);
3632       builder()->StoreAccumulatorInRegister(callee);
3633       break;
3634     }
3635     case Call::KEYED_SUPER_PROPERTY_CALL: {
3636       Register receiver = register_allocator()->GrowRegisterList(&args);
3637       Property* property = callee_expr->AsProperty();
3638       VisitKeyedSuperPropertyLoad(property, receiver);
3639       builder()->StoreAccumulatorInRegister(callee);
3640       break;
3641     }
3642     case Call::SUPER_CALL:
3643       UNREACHABLE();
3644       break;
3645   }
3646 
3647   // Evaluate all arguments to the function call and store in sequential args
3648   // registers.
3649   VisitArguments(expr->arguments(), &args);
3650   int reciever_arg_count = implicit_undefined_receiver ? 0 : 1;
3651   CHECK_EQ(reciever_arg_count + expr->arguments()->length(),
3652            args.register_count());
3653 
3654   // Resolve callee for a potential direct eval call. This block will mutate the
3655   // callee value.
3656   if (expr->is_possibly_eval() && expr->arguments()->length() > 0) {
3657     RegisterAllocationScope inner_register_scope(this);
3658     // Set up arguments for ResolvePossiblyDirectEval by copying callee, source
3659     // strings and function closure, and loading language and
3660     // position.
3661     Register first_arg = args[reciever_arg_count];
3662     RegisterList runtime_call_args = register_allocator()->NewRegisterList(6);
3663     builder()
3664         ->MoveRegister(callee, runtime_call_args[0])
3665         .MoveRegister(first_arg, runtime_call_args[1])
3666         .MoveRegister(Register::function_closure(), runtime_call_args[2])
3667         .LoadLiteral(Smi::FromEnum(language_mode()))
3668         .StoreAccumulatorInRegister(runtime_call_args[3])
3669         .LoadLiteral(Smi::FromInt(current_scope()->start_position()))
3670         .StoreAccumulatorInRegister(runtime_call_args[4])
3671         .LoadLiteral(Smi::FromInt(expr->position()))
3672         .StoreAccumulatorInRegister(runtime_call_args[5]);
3673 
3674     // Call ResolvePossiblyDirectEval and modify the callee.
3675     builder()
3676         ->CallRuntime(Runtime::kResolvePossiblyDirectEval, runtime_call_args)
3677         .StoreAccumulatorInRegister(callee);
3678   }
3679 
3680   builder()->SetExpressionPosition(expr);
3681 
3682   int feedback_slot_index = feedback_index(feedback_spec()->AddCallICSlot());
3683 
3684   if (is_spread_call) {
3685     DCHECK(!implicit_undefined_receiver);
3686     builder()->CallWithSpread(callee, args, feedback_slot_index);
3687   } else if (call_type == Call::NAMED_PROPERTY_CALL ||
3688              call_type == Call::KEYED_PROPERTY_CALL ||
3689              call_type == Call::RESOLVED_PROPERTY_CALL) {
3690     DCHECK(!implicit_undefined_receiver);
3691     builder()->CallProperty(callee, args, feedback_slot_index);
3692   } else if (implicit_undefined_receiver) {
3693     builder()->CallUndefinedReceiver(callee, args, feedback_slot_index);
3694   } else {
3695     builder()->CallAnyReceiver(callee, args, feedback_slot_index);
3696   }
3697 }
3698 
VisitCallSuper(Call * expr)3699 void BytecodeGenerator::VisitCallSuper(Call* expr) {
3700   RegisterAllocationScope register_scope(this);
3701   SuperCallReference* super = expr->expression()->AsSuperCallReference();
3702   ZonePtrList<Expression>* args = expr->arguments();
3703 
3704   int first_spread_index = 0;
3705   for (; first_spread_index < args->length(); first_spread_index++) {
3706     if (args->at(first_spread_index)->IsSpread()) break;
3707   }
3708 
3709   // Prepare the constructor to the super call.
3710   Register this_function = VisitForRegisterValue(super->this_function_var());
3711   Register constructor = register_allocator()->NewRegister();
3712   builder()
3713       ->LoadAccumulatorWithRegister(this_function)
3714       .GetSuperConstructor(constructor);
3715 
3716   if (first_spread_index < expr->arguments()->length() - 1) {
3717     // We rewrite something like
3718     //    super(1, ...x, 2)
3719     // to
3720     //    %reflect_construct(constructor, [1, ...x, 2], new_target)
3721     // That is, we implement (non-last-arg) spreads in super calls via our
3722     // mechanism for spreads in array literals.
3723 
3724     // First generate the array containing all arguments.
3725     Register array = register_allocator()->NewRegister();
3726     int literal_index = feedback_index(feedback_spec()->AddLiteralSlot());
3727     builder()
3728         ->CreateEmptyArrayLiteral(literal_index)
3729         .StoreAccumulatorInRegister(array);
3730     BuildArrayLiteralElementsInsertion(array, first_spread_index, args, false);
3731 
3732     // Now pass that array to %reflect_construct.
3733     RegisterList construct_args = register_allocator()->NewRegisterList(3);
3734     builder()->MoveRegister(constructor, construct_args[0]);
3735     builder()->MoveRegister(array, construct_args[1]);
3736     VisitForRegisterValue(super->new_target_var(), construct_args[2]);
3737     builder()->CallJSRuntime(Context::REFLECT_CONSTRUCT_INDEX, construct_args);
3738   } else {
3739     RegisterList args_regs = register_allocator()->NewGrowableRegisterList();
3740     VisitArguments(args, &args_regs);
3741     // The new target is loaded into the accumulator from the
3742     // {new.target} variable.
3743     VisitForAccumulatorValue(super->new_target_var());
3744     builder()->SetExpressionPosition(expr);
3745 
3746     int feedback_slot_index = feedback_index(feedback_spec()->AddCallICSlot());
3747 
3748     if (first_spread_index == expr->arguments()->length() - 1) {
3749       builder()->ConstructWithSpread(constructor, args_regs,
3750                                      feedback_slot_index);
3751     } else {
3752       DCHECK_EQ(first_spread_index, expr->arguments()->length());
3753       // Call construct.
3754       // TODO(turbofan): For now we do gather feedback on super constructor
3755       // calls, utilizing the existing machinery to inline the actual call
3756       // target and the JSCreate for the implicit receiver allocation. This
3757       // is not an ideal solution for super constructor calls, but it gets
3758       // the job done for now. In the long run we might want to revisit this
3759       // and come up with a better way.
3760       builder()->Construct(constructor, args_regs, feedback_slot_index);
3761     }
3762   }
3763 
3764   // Explicit calls to the super constructor using super() perform an
3765   // implicit binding assignment to the 'this' variable.
3766   //
3767   // Default constructors don't need have to do the assignment because
3768   // 'this' isn't accessed in default constructors.
3769   if (!IsDefaultConstructor(info()->literal()->kind())) {
3770     BuildVariableAssignment(super->this_var()->var(), Token::INIT,
3771                             HoleCheckMode::kRequired);
3772   }
3773 
3774   // The derived constructor has the correct bit set always, so we
3775   // don't emit code to load and call the initializer if not
3776   // required.
3777   //
3778   // For the arrow function or eval case, we always emit code to load
3779   // and call the initializer.
3780   //
3781   // TODO(gsathya): In the future, we could tag nested arrow functions
3782   // or eval with the correct bit so that we do the load conditionally
3783   // if required.
3784   if (info()->literal()->requires_instance_fields_initializer() ||
3785       !IsDerivedConstructor(info()->literal()->kind())) {
3786     Register instance = register_allocator()->NewRegister();
3787     builder()->StoreAccumulatorInRegister(instance);
3788     BuildInstanceFieldInitialization(this_function, instance);
3789     builder()->LoadAccumulatorWithRegister(instance);
3790   }
3791 }
3792 
VisitCallNew(CallNew * expr)3793 void BytecodeGenerator::VisitCallNew(CallNew* expr) {
3794   Register constructor = VisitForRegisterValue(expr->expression());
3795   RegisterList args = register_allocator()->NewGrowableRegisterList();
3796   VisitArguments(expr->arguments(), &args);
3797 
3798   // The accumulator holds new target which is the same as the
3799   // constructor for CallNew.
3800   builder()->SetExpressionPosition(expr);
3801   builder()->LoadAccumulatorWithRegister(constructor);
3802 
3803   int feedback_slot_index = feedback_index(feedback_spec()->AddCallICSlot());
3804   if (expr->only_last_arg_is_spread()) {
3805     builder()->ConstructWithSpread(constructor, args, feedback_slot_index);
3806   } else {
3807     builder()->Construct(constructor, args, feedback_slot_index);
3808   }
3809 }
3810 
VisitCallRuntime(CallRuntime * expr)3811 void BytecodeGenerator::VisitCallRuntime(CallRuntime* expr) {
3812   if (expr->is_jsruntime()) {
3813     RegisterList args = register_allocator()->NewGrowableRegisterList();
3814     VisitArguments(expr->arguments(), &args);
3815     builder()->CallJSRuntime(expr->context_index(), args);
3816   } else {
3817     // Evaluate all arguments to the runtime call.
3818     RegisterList args = register_allocator()->NewGrowableRegisterList();
3819     VisitArguments(expr->arguments(), &args);
3820     Runtime::FunctionId function_id = expr->function()->function_id;
3821     builder()->CallRuntime(function_id, args);
3822   }
3823 }
3824 
VisitVoid(UnaryOperation * expr)3825 void BytecodeGenerator::VisitVoid(UnaryOperation* expr) {
3826   VisitForEffect(expr->expression());
3827   builder()->LoadUndefined();
3828 }
3829 
VisitForTypeOfValue(Expression * expr)3830 void BytecodeGenerator::VisitForTypeOfValue(Expression* expr) {
3831   if (expr->IsVariableProxy()) {
3832     // Typeof does not throw a reference error on global variables, hence we
3833     // perform a non-contextual load in case the operand is a variable proxy.
3834     VariableProxy* proxy = expr->AsVariableProxy();
3835     BuildVariableLoadForAccumulatorValue(proxy->var(), proxy->hole_check_mode(),
3836                                          INSIDE_TYPEOF);
3837   } else {
3838     VisitForAccumulatorValue(expr);
3839   }
3840 }
3841 
VisitTypeOf(UnaryOperation * expr)3842 void BytecodeGenerator::VisitTypeOf(UnaryOperation* expr) {
3843   VisitForTypeOfValue(expr->expression());
3844   builder()->TypeOf();
3845 }
3846 
VisitNot(UnaryOperation * expr)3847 void BytecodeGenerator::VisitNot(UnaryOperation* expr) {
3848   if (execution_result()->IsEffect()) {
3849     VisitForEffect(expr->expression());
3850   } else if (execution_result()->IsTest()) {
3851     // No actual logical negation happening, we just swap the control flow, by
3852     // swapping the target labels and the fallthrough branch, and visit in the
3853     // same test result context.
3854     TestResultScope* test_result = execution_result()->AsTest();
3855     test_result->InvertControlFlow();
3856     VisitInSameTestExecutionScope(expr->expression());
3857   } else {
3858     TypeHint type_hint = VisitForAccumulatorValue(expr->expression());
3859     builder()->LogicalNot(ToBooleanModeFromTypeHint(type_hint));
3860     // Always returns a boolean value.
3861     execution_result()->SetResultIsBoolean();
3862   }
3863 }
3864 
VisitUnaryOperation(UnaryOperation * expr)3865 void BytecodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3866   switch (expr->op()) {
3867     case Token::Value::NOT:
3868       VisitNot(expr);
3869       break;
3870     case Token::Value::TYPEOF:
3871       VisitTypeOf(expr);
3872       break;
3873     case Token::Value::VOID:
3874       VisitVoid(expr);
3875       break;
3876     case Token::Value::DELETE:
3877       VisitDelete(expr);
3878       break;
3879     case Token::Value::ADD:
3880     case Token::Value::SUB:
3881     case Token::Value::BIT_NOT:
3882       VisitForAccumulatorValue(expr->expression());
3883       builder()->SetExpressionPosition(expr);
3884       builder()->UnaryOperation(
3885           expr->op(), feedback_index(feedback_spec()->AddBinaryOpICSlot()));
3886       break;
3887     default:
3888       UNREACHABLE();
3889   }
3890 }
3891 
VisitDelete(UnaryOperation * expr)3892 void BytecodeGenerator::VisitDelete(UnaryOperation* expr) {
3893   if (expr->expression()->IsProperty()) {
3894     // Delete of an object property is allowed both in sloppy
3895     // and strict modes.
3896     Property* property = expr->expression()->AsProperty();
3897     Register object = VisitForRegisterValue(property->obj());
3898     VisitForAccumulatorValue(property->key());
3899     builder()->Delete(object, language_mode());
3900   } else if (expr->expression()->IsVariableProxy()) {
3901     // Delete of an unqualified identifier is allowed in sloppy mode but is
3902     // not allowed in strict mode. Deleting 'this' and 'new.target' is allowed
3903     // in both modes.
3904     VariableProxy* proxy = expr->expression()->AsVariableProxy();
3905     DCHECK(is_sloppy(language_mode()) || proxy->is_this() ||
3906            proxy->is_new_target());
3907     if (proxy->is_this() || proxy->is_new_target()) {
3908       builder()->LoadTrue();
3909     } else {
3910       Variable* variable = proxy->var();
3911       switch (variable->location()) {
3912         case VariableLocation::PARAMETER:
3913         case VariableLocation::LOCAL:
3914         case VariableLocation::CONTEXT: {
3915           // Deleting local var/let/const, context variables, and arguments
3916           // does not have any effect.
3917           builder()->LoadFalse();
3918           break;
3919         }
3920         case VariableLocation::UNALLOCATED:
3921         // TODO(adamk): Falling through to the runtime results in correct
3922         // behavior, but does unnecessary context-walking (since scope
3923         // analysis has already proven that the variable doesn't exist in
3924         // any non-global scope). Consider adding a DeleteGlobal bytecode
3925         // that knows how to deal with ScriptContexts as well as global
3926         // object properties.
3927         case VariableLocation::LOOKUP: {
3928           Register name_reg = register_allocator()->NewRegister();
3929           builder()
3930               ->LoadLiteral(variable->raw_name())
3931               .StoreAccumulatorInRegister(name_reg)
3932               .CallRuntime(Runtime::kDeleteLookupSlot, name_reg);
3933           break;
3934         }
3935         default:
3936           UNREACHABLE();
3937       }
3938     }
3939   } else {
3940     // Delete of an unresolvable reference returns true.
3941     VisitForEffect(expr->expression());
3942     builder()->LoadTrue();
3943   }
3944 }
3945 
VisitCountOperation(CountOperation * expr)3946 void BytecodeGenerator::VisitCountOperation(CountOperation* expr) {
3947   DCHECK(expr->expression()->IsValidReferenceExpression());
3948 
3949   // Left-hand side can only be a property, a global or a variable slot.
3950   Property* property = expr->expression()->AsProperty();
3951   LhsKind assign_type = Property::GetAssignType(property);
3952 
3953   bool is_postfix = expr->is_postfix() && !execution_result()->IsEffect();
3954 
3955   // Evaluate LHS expression and get old value.
3956   Register object, key, old_value;
3957   RegisterList super_property_args;
3958   const AstRawString* name;
3959   switch (assign_type) {
3960     case VARIABLE: {
3961       VariableProxy* proxy = expr->expression()->AsVariableProxy();
3962       BuildVariableLoadForAccumulatorValue(proxy->var(),
3963                                            proxy->hole_check_mode());
3964       break;
3965     }
3966     case NAMED_PROPERTY: {
3967       object = VisitForRegisterValue(property->obj());
3968       name = property->key()->AsLiteral()->AsRawPropertyName();
3969       builder()->LoadNamedProperty(
3970           object, name,
3971           feedback_index(GetCachedLoadICSlot(property->obj(), name)));
3972       break;
3973     }
3974     case KEYED_PROPERTY: {
3975       object = VisitForRegisterValue(property->obj());
3976       // Use visit for accumulator here since we need the key in the accumulator
3977       // for the LoadKeyedProperty.
3978       key = register_allocator()->NewRegister();
3979       VisitForAccumulatorValue(property->key());
3980       builder()->StoreAccumulatorInRegister(key).LoadKeyedProperty(
3981           object, feedback_index(feedback_spec()->AddKeyedLoadICSlot()));
3982       break;
3983     }
3984     case NAMED_SUPER_PROPERTY: {
3985       super_property_args = register_allocator()->NewRegisterList(4);
3986       RegisterList load_super_args = super_property_args.Truncate(3);
3987       SuperPropertyReference* super_property =
3988           property->obj()->AsSuperPropertyReference();
3989       VisitForRegisterValue(super_property->this_var(), load_super_args[0]);
3990       VisitForRegisterValue(super_property->home_object(), load_super_args[1]);
3991       builder()
3992           ->LoadLiteral(property->key()->AsLiteral()->AsRawPropertyName())
3993           .StoreAccumulatorInRegister(load_super_args[2])
3994           .CallRuntime(Runtime::kLoadFromSuper, load_super_args);
3995       break;
3996     }
3997     case KEYED_SUPER_PROPERTY: {
3998       super_property_args = register_allocator()->NewRegisterList(4);
3999       RegisterList load_super_args = super_property_args.Truncate(3);
4000       SuperPropertyReference* super_property =
4001           property->obj()->AsSuperPropertyReference();
4002       VisitForRegisterValue(super_property->this_var(), load_super_args[0]);
4003       VisitForRegisterValue(super_property->home_object(), load_super_args[1]);
4004       VisitForRegisterValue(property->key(), load_super_args[2]);
4005       builder()->CallRuntime(Runtime::kLoadKeyedFromSuper, load_super_args);
4006       break;
4007     }
4008   }
4009 
4010   // Save result for postfix expressions.
4011   FeedbackSlot count_slot = feedback_spec()->AddBinaryOpICSlot();
4012   if (is_postfix) {
4013     old_value = register_allocator()->NewRegister();
4014     // Convert old value into a number before saving it.
4015     // TODO(ignition): Think about adding proper PostInc/PostDec bytecodes
4016     // instead of this ToNumeric + Inc/Dec dance.
4017     builder()
4018         ->ToNumeric(feedback_index(count_slot))
4019         .StoreAccumulatorInRegister(old_value);
4020   }
4021 
4022   // Perform +1/-1 operation.
4023   builder()->UnaryOperation(expr->op(), feedback_index(count_slot));
4024 
4025   // Store the value.
4026   builder()->SetExpressionPosition(expr);
4027   switch (assign_type) {
4028     case VARIABLE: {
4029       VariableProxy* proxy = expr->expression()->AsVariableProxy();
4030       BuildVariableAssignment(proxy->var(), expr->op(),
4031                               proxy->hole_check_mode());
4032       break;
4033     }
4034     case NAMED_PROPERTY: {
4035       FeedbackSlot slot = GetCachedStoreICSlot(property->obj(), name);
4036       Register value;
4037       if (!execution_result()->IsEffect()) {
4038         value = register_allocator()->NewRegister();
4039         builder()->StoreAccumulatorInRegister(value);
4040       }
4041       builder()->StoreNamedProperty(object, name, feedback_index(slot),
4042                                     language_mode());
4043       if (!execution_result()->IsEffect()) {
4044         builder()->LoadAccumulatorWithRegister(value);
4045       }
4046       break;
4047     }
4048     case KEYED_PROPERTY: {
4049       FeedbackSlot slot = feedback_spec()->AddKeyedStoreICSlot(language_mode());
4050       Register value;
4051       if (!execution_result()->IsEffect()) {
4052         value = register_allocator()->NewRegister();
4053         builder()->StoreAccumulatorInRegister(value);
4054       }
4055       builder()->StoreKeyedProperty(object, key, feedback_index(slot),
4056                                     language_mode());
4057       if (!execution_result()->IsEffect()) {
4058         builder()->LoadAccumulatorWithRegister(value);
4059       }
4060       break;
4061     }
4062     case NAMED_SUPER_PROPERTY: {
4063       builder()
4064           ->StoreAccumulatorInRegister(super_property_args[3])
4065           .CallRuntime(StoreToSuperRuntimeId(), super_property_args);
4066       break;
4067     }
4068     case KEYED_SUPER_PROPERTY: {
4069       builder()
4070           ->StoreAccumulatorInRegister(super_property_args[3])
4071           .CallRuntime(StoreKeyedToSuperRuntimeId(), super_property_args);
4072       break;
4073     }
4074   }
4075 
4076   // Restore old value for postfix expressions.
4077   if (is_postfix) {
4078     builder()->LoadAccumulatorWithRegister(old_value);
4079   }
4080 }
4081 
VisitBinaryOperation(BinaryOperation * binop)4082 void BytecodeGenerator::VisitBinaryOperation(BinaryOperation* binop) {
4083   switch (binop->op()) {
4084     case Token::COMMA:
4085       VisitCommaExpression(binop);
4086       break;
4087     case Token::OR:
4088       VisitLogicalOrExpression(binop);
4089       break;
4090     case Token::AND:
4091       VisitLogicalAndExpression(binop);
4092       break;
4093     default:
4094       VisitArithmeticExpression(binop);
4095       break;
4096   }
4097 }
4098 
VisitNaryOperation(NaryOperation * expr)4099 void BytecodeGenerator::VisitNaryOperation(NaryOperation* expr) {
4100   switch (expr->op()) {
4101     case Token::COMMA:
4102       VisitNaryCommaExpression(expr);
4103       break;
4104     case Token::OR:
4105       VisitNaryLogicalOrExpression(expr);
4106       break;
4107     case Token::AND:
4108       VisitNaryLogicalAndExpression(expr);
4109       break;
4110     default:
4111       VisitNaryArithmeticExpression(expr);
4112       break;
4113   }
4114 }
4115 
BuildLiteralCompareNil(Token::Value op,BytecodeArrayBuilder::NilValue nil)4116 void BytecodeGenerator::BuildLiteralCompareNil(
4117     Token::Value op, BytecodeArrayBuilder::NilValue nil) {
4118   if (execution_result()->IsTest()) {
4119     TestResultScope* test_result = execution_result()->AsTest();
4120     switch (test_result->fallthrough()) {
4121       case TestFallthrough::kThen:
4122         builder()->JumpIfNotNil(test_result->NewElseLabel(), op, nil);
4123         break;
4124       case TestFallthrough::kElse:
4125         builder()->JumpIfNil(test_result->NewThenLabel(), op, nil);
4126         break;
4127       case TestFallthrough::kNone:
4128         builder()
4129             ->JumpIfNil(test_result->NewThenLabel(), op, nil)
4130             .Jump(test_result->NewElseLabel());
4131     }
4132     test_result->SetResultConsumedByTest();
4133   } else {
4134     builder()->CompareNil(op, nil);
4135   }
4136 }
4137 
VisitCompareOperation(CompareOperation * expr)4138 void BytecodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4139   Expression* sub_expr;
4140   Literal* literal;
4141   if (expr->IsLiteralCompareTypeof(&sub_expr, &literal)) {
4142     // Emit a fast literal comparion for expressions of the form:
4143     // typeof(x) === 'string'.
4144     VisitForTypeOfValue(sub_expr);
4145     builder()->SetExpressionPosition(expr);
4146     TestTypeOfFlags::LiteralFlag literal_flag =
4147         TestTypeOfFlags::GetFlagForLiteral(ast_string_constants(), literal);
4148     if (literal_flag == TestTypeOfFlags::LiteralFlag::kOther) {
4149       builder()->LoadFalse();
4150     } else {
4151       builder()->CompareTypeOf(literal_flag);
4152     }
4153   } else if (expr->IsLiteralCompareUndefined(&sub_expr)) {
4154     VisitForAccumulatorValue(sub_expr);
4155     builder()->SetExpressionPosition(expr);
4156     BuildLiteralCompareNil(expr->op(), BytecodeArrayBuilder::kUndefinedValue);
4157   } else if (expr->IsLiteralCompareNull(&sub_expr)) {
4158     VisitForAccumulatorValue(sub_expr);
4159     builder()->SetExpressionPosition(expr);
4160     BuildLiteralCompareNil(expr->op(), BytecodeArrayBuilder::kNullValue);
4161   } else {
4162     Register lhs = VisitForRegisterValue(expr->left());
4163     VisitForAccumulatorValue(expr->right());
4164     builder()->SetExpressionPosition(expr);
4165     if (expr->op() == Token::IN) {
4166       builder()->CompareOperation(expr->op(), lhs);
4167     } else if (expr->op() == Token::INSTANCEOF) {
4168       FeedbackSlot slot = feedback_spec()->AddInstanceOfSlot();
4169       builder()->CompareOperation(expr->op(), lhs, feedback_index(slot));
4170     } else {
4171       FeedbackSlot slot = feedback_spec()->AddCompareICSlot();
4172       builder()->CompareOperation(expr->op(), lhs, feedback_index(slot));
4173     }
4174   }
4175   // Always returns a boolean value.
4176   execution_result()->SetResultIsBoolean();
4177 }
4178 
VisitArithmeticExpression(BinaryOperation * expr)4179 void BytecodeGenerator::VisitArithmeticExpression(BinaryOperation* expr) {
4180   FeedbackSlot slot = feedback_spec()->AddBinaryOpICSlot();
4181   Expression* subexpr;
4182   Smi* literal;
4183   if (expr->IsSmiLiteralOperation(&subexpr, &literal)) {
4184     TypeHint type_hint = VisitForAccumulatorValue(subexpr);
4185     builder()->SetExpressionPosition(expr);
4186     builder()->BinaryOperationSmiLiteral(expr->op(), literal,
4187                                          feedback_index(slot));
4188     if (expr->op() == Token::ADD && type_hint == TypeHint::kString) {
4189       execution_result()->SetResultIsString();
4190     }
4191   } else {
4192     TypeHint lhs_type = VisitForAccumulatorValue(expr->left());
4193     Register lhs = register_allocator()->NewRegister();
4194     builder()->StoreAccumulatorInRegister(lhs);
4195     TypeHint rhs_type = VisitForAccumulatorValue(expr->right());
4196     if (expr->op() == Token::ADD &&
4197         (lhs_type == TypeHint::kString || rhs_type == TypeHint::kString)) {
4198       execution_result()->SetResultIsString();
4199     }
4200 
4201     builder()->SetExpressionPosition(expr);
4202     builder()->BinaryOperation(expr->op(), lhs, feedback_index(slot));
4203   }
4204 }
4205 
VisitNaryArithmeticExpression(NaryOperation * expr)4206 void BytecodeGenerator::VisitNaryArithmeticExpression(NaryOperation* expr) {
4207   // TODO(leszeks): Add support for lhs smi in commutative ops.
4208   TypeHint type_hint = VisitForAccumulatorValue(expr->first());
4209 
4210   for (size_t i = 0; i < expr->subsequent_length(); ++i) {
4211     RegisterAllocationScope register_scope(this);
4212     if (expr->subsequent(i)->IsSmiLiteral()) {
4213       builder()->SetExpressionPosition(expr->subsequent_op_position(i));
4214       builder()->BinaryOperationSmiLiteral(
4215           expr->op(), expr->subsequent(i)->AsLiteral()->AsSmiLiteral(),
4216           feedback_index(feedback_spec()->AddBinaryOpICSlot()));
4217     } else {
4218       Register lhs = register_allocator()->NewRegister();
4219       builder()->StoreAccumulatorInRegister(lhs);
4220       TypeHint rhs_hint = VisitForAccumulatorValue(expr->subsequent(i));
4221       if (rhs_hint == TypeHint::kString) type_hint = TypeHint::kString;
4222       builder()->SetExpressionPosition(expr->subsequent_op_position(i));
4223       builder()->BinaryOperation(
4224           expr->op(), lhs,
4225           feedback_index(feedback_spec()->AddBinaryOpICSlot()));
4226     }
4227   }
4228 
4229   if (type_hint == TypeHint::kString && expr->op() == Token::ADD) {
4230     // If any operand of an ADD is a String, a String is produced.
4231     execution_result()->SetResultIsString();
4232   }
4233 }
4234 
4235 // Note: the actual spreading is performed by the surrounding expression's
4236 // visitor.
VisitSpread(Spread * expr)4237 void BytecodeGenerator::VisitSpread(Spread* expr) { Visit(expr->expression()); }
4238 
VisitEmptyParentheses(EmptyParentheses * expr)4239 void BytecodeGenerator::VisitEmptyParentheses(EmptyParentheses* expr) {
4240   UNREACHABLE();
4241 }
4242 
VisitImportCallExpression(ImportCallExpression * expr)4243 void BytecodeGenerator::VisitImportCallExpression(ImportCallExpression* expr) {
4244   RegisterList args = register_allocator()->NewRegisterList(2);
4245   VisitForRegisterValue(expr->argument(), args[1]);
4246   builder()
4247       ->MoveRegister(Register::function_closure(), args[0])
4248       .CallRuntime(Runtime::kDynamicImportCall, args);
4249 }
4250 
BuildGetIterator(Expression * iterable,IteratorType hint)4251 void BytecodeGenerator::BuildGetIterator(Expression* iterable,
4252                                          IteratorType hint) {
4253   RegisterList args = register_allocator()->NewRegisterList(1);
4254   Register method = register_allocator()->NewRegister();
4255   Register obj = args[0];
4256 
4257   VisitForAccumulatorValue(iterable);
4258 
4259   if (hint == IteratorType::kAsync) {
4260     // Set method to GetMethod(obj, @@asyncIterator)
4261     builder()->StoreAccumulatorInRegister(obj).LoadAsyncIteratorProperty(
4262         obj, feedback_index(feedback_spec()->AddLoadICSlot()));
4263 
4264     BytecodeLabel async_iterator_undefined, async_iterator_null, done;
4265     // TODO(ignition): Add a single opcode for JumpIfNullOrUndefined
4266     builder()->JumpIfUndefined(&async_iterator_undefined);
4267     builder()->JumpIfNull(&async_iterator_null);
4268 
4269     // Let iterator be Call(method, obj)
4270     builder()->StoreAccumulatorInRegister(method).CallProperty(
4271         method, args, feedback_index(feedback_spec()->AddCallICSlot()));
4272 
4273     // If Type(iterator) is not Object, throw a TypeError exception.
4274     builder()->JumpIfJSReceiver(&done);
4275     builder()->CallRuntime(Runtime::kThrowSymbolAsyncIteratorInvalid);
4276 
4277     builder()->Bind(&async_iterator_undefined);
4278     builder()->Bind(&async_iterator_null);
4279     // If method is undefined,
4280     //     Let syncMethod be GetMethod(obj, @@iterator)
4281     builder()
4282         ->LoadIteratorProperty(obj,
4283                                feedback_index(feedback_spec()->AddLoadICSlot()))
4284         .StoreAccumulatorInRegister(method);
4285 
4286     //     Let syncIterator be Call(syncMethod, obj)
4287     builder()->CallProperty(method, args,
4288                             feedback_index(feedback_spec()->AddCallICSlot()));
4289 
4290     // Return CreateAsyncFromSyncIterator(syncIterator)
4291     // alias `method` register as it's no longer used
4292     Register sync_iter = method;
4293     builder()->StoreAccumulatorInRegister(sync_iter).CallRuntime(
4294         Runtime::kInlineCreateAsyncFromSyncIterator, sync_iter);
4295 
4296     builder()->Bind(&done);
4297   } else {
4298     // Let method be GetMethod(obj, @@iterator).
4299     builder()
4300         ->StoreAccumulatorInRegister(obj)
4301         .LoadIteratorProperty(obj,
4302                               feedback_index(feedback_spec()->AddLoadICSlot()))
4303         .StoreAccumulatorInRegister(method);
4304 
4305     // Let iterator be Call(method, obj).
4306     builder()->CallProperty(method, args,
4307                             feedback_index(feedback_spec()->AddCallICSlot()));
4308 
4309     // If Type(iterator) is not Object, throw a TypeError exception.
4310     BytecodeLabel no_type_error;
4311     builder()->JumpIfJSReceiver(&no_type_error);
4312     builder()->CallRuntime(Runtime::kThrowSymbolIteratorInvalid);
4313     builder()->Bind(&no_type_error);
4314   }
4315 }
4316 
4317 // Returns an IteratorRecord which is valid for the lifetime of the current
4318 // register_allocation_scope.
BuildGetIteratorRecord(Expression * iterable,Register next,Register object,IteratorType hint)4319 BytecodeGenerator::IteratorRecord BytecodeGenerator::BuildGetIteratorRecord(
4320     Expression* iterable, Register next, Register object, IteratorType hint) {
4321   DCHECK(next.is_valid() && object.is_valid());
4322   BuildGetIterator(iterable, hint);
4323 
4324   builder()
4325       ->StoreAccumulatorInRegister(object)
4326       .LoadNamedProperty(object, ast_string_constants()->next_string(),
4327                          feedback_index(feedback_spec()->AddLoadICSlot()))
4328       .StoreAccumulatorInRegister(next);
4329   return IteratorRecord(object, next, hint);
4330 }
4331 
BuildGetIteratorRecord(Expression * iterable,IteratorType hint)4332 BytecodeGenerator::IteratorRecord BytecodeGenerator::BuildGetIteratorRecord(
4333     Expression* iterable, IteratorType hint) {
4334   Register next = register_allocator()->NewRegister();
4335   Register object = register_allocator()->NewRegister();
4336   return BuildGetIteratorRecord(iterable, next, object, hint);
4337 }
4338 
BuildIteratorNext(const IteratorRecord & iterator,Register next_result)4339 void BytecodeGenerator::BuildIteratorNext(const IteratorRecord& iterator,
4340                                           Register next_result) {
4341   DCHECK(next_result.is_valid());
4342   builder()->CallProperty(iterator.next(), RegisterList(iterator.object()),
4343                           feedback_index(feedback_spec()->AddCallICSlot()));
4344 
4345   // TODO(caitp): support async IteratorNext here.
4346 
4347   BytecodeLabel is_object;
4348   builder()
4349       ->StoreAccumulatorInRegister(next_result)
4350       .JumpIfJSReceiver(&is_object)
4351       .CallRuntime(Runtime::kThrowIteratorResultNotAnObject, next_result)
4352       .Bind(&is_object);
4353 }
4354 
BuildCallIteratorMethod(Register iterator,const AstRawString * method_name,RegisterList receiver_and_args,BytecodeLabel * if_called,BytecodeLabels * if_notcalled)4355 void BytecodeGenerator::BuildCallIteratorMethod(Register iterator,
4356                                                 const AstRawString* method_name,
4357                                                 RegisterList receiver_and_args,
4358                                                 BytecodeLabel* if_called,
4359                                                 BytecodeLabels* if_notcalled) {
4360   RegisterAllocationScope register_scope(this);
4361 
4362   Register method = register_allocator()->NewRegister();
4363   FeedbackSlot slot = feedback_spec()->AddLoadICSlot();
4364   builder()
4365       ->LoadNamedProperty(iterator, method_name, feedback_index(slot))
4366       .JumpIfUndefined(if_notcalled->New())
4367       .JumpIfNull(if_notcalled->New())
4368       .StoreAccumulatorInRegister(method)
4369       .CallProperty(method, receiver_and_args,
4370                     feedback_index(feedback_spec()->AddCallICSlot()))
4371       .Jump(if_called);
4372 }
4373 
BuildIteratorClose(const IteratorRecord & iterator,Expression * expr)4374 void BytecodeGenerator::BuildIteratorClose(const IteratorRecord& iterator,
4375                                            Expression* expr) {
4376   RegisterAllocationScope register_scope(this);
4377   BytecodeLabels done(zone());
4378   BytecodeLabel if_called;
4379   RegisterList args = RegisterList(iterator.object());
4380   BuildCallIteratorMethod(iterator.object(),
4381                           ast_string_constants()->return_string(), args,
4382                           &if_called, &done);
4383   builder()->Bind(&if_called);
4384 
4385   if (iterator.type() == IteratorType::kAsync) {
4386     DCHECK_NOT_NULL(expr);
4387     BuildAwait(expr);
4388   }
4389 
4390   builder()->JumpIfJSReceiver(done.New());
4391   {
4392     RegisterAllocationScope register_scope(this);
4393     Register return_result = register_allocator()->NewRegister();
4394     builder()
4395         ->StoreAccumulatorInRegister(return_result)
4396         .CallRuntime(Runtime::kThrowIteratorResultNotAnObject, return_result);
4397   }
4398 
4399   done.Bind(builder());
4400 }
4401 
VisitGetIterator(GetIterator * expr)4402 void BytecodeGenerator::VisitGetIterator(GetIterator* expr) {
4403   builder()->SetExpressionPosition(expr);
4404   BuildGetIterator(expr->iterable(), expr->hint());
4405 }
4406 
VisitGetTemplateObject(GetTemplateObject * expr)4407 void BytecodeGenerator::VisitGetTemplateObject(GetTemplateObject* expr) {
4408   builder()->SetExpressionPosition(expr);
4409   size_t entry = builder()->AllocateDeferredConstantPoolEntry();
4410   template_objects_.push_back(std::make_pair(expr, entry));
4411   FeedbackSlot literal_slot = feedback_spec()->AddLiteralSlot();
4412   builder()->GetTemplateObject(entry, feedback_index(literal_slot));
4413 }
4414 
VisitTemplateLiteral(TemplateLiteral * expr)4415 void BytecodeGenerator::VisitTemplateLiteral(TemplateLiteral* expr) {
4416   const ZonePtrList<const AstRawString>& parts = *expr->string_parts();
4417   const ZonePtrList<Expression>& substitutions = *expr->substitutions();
4418   // Template strings with no substitutions are turned into StringLiterals.
4419   DCHECK_GT(substitutions.length(), 0);
4420   DCHECK_EQ(parts.length(), substitutions.length() + 1);
4421 
4422   // Generate string concatenation
4423   // TODO(caitp): Don't generate feedback slot if it's not used --- introduce
4424   // a simple, concise, reusable mechanism to lazily create reusable slots.
4425   FeedbackSlot slot = feedback_spec()->AddBinaryOpICSlot();
4426   Register last_part = register_allocator()->NewRegister();
4427   bool last_part_valid = false;
4428 
4429   builder()->SetExpressionPosition(expr);
4430   for (int i = 0; i < substitutions.length(); ++i) {
4431     if (i != 0) {
4432       builder()->StoreAccumulatorInRegister(last_part);
4433       last_part_valid = true;
4434     }
4435 
4436     if (!parts[i]->IsEmpty()) {
4437       builder()->LoadLiteral(parts[i]);
4438       if (last_part_valid) {
4439         builder()->BinaryOperation(Token::ADD, last_part, feedback_index(slot));
4440       }
4441       builder()->StoreAccumulatorInRegister(last_part);
4442       last_part_valid = true;
4443     }
4444 
4445     TypeHint type_hint = VisitForAccumulatorValue(substitutions[i]);
4446     if (type_hint != TypeHint::kString) {
4447       builder()->ToString();
4448     }
4449     if (last_part_valid) {
4450       builder()->BinaryOperation(Token::ADD, last_part, feedback_index(slot));
4451     }
4452     last_part_valid = false;
4453   }
4454 
4455   if (!parts.last()->IsEmpty()) {
4456     builder()->StoreAccumulatorInRegister(last_part);
4457     builder()->LoadLiteral(parts.last());
4458     builder()->BinaryOperation(Token::ADD, last_part, feedback_index(slot));
4459   }
4460 }
4461 
VisitThisFunction(ThisFunction * expr)4462 void BytecodeGenerator::VisitThisFunction(ThisFunction* expr) {
4463   builder()->LoadAccumulatorWithRegister(Register::function_closure());
4464 }
4465 
VisitSuperCallReference(SuperCallReference * expr)4466 void BytecodeGenerator::VisitSuperCallReference(SuperCallReference* expr) {
4467   // Handled by VisitCall().
4468   UNREACHABLE();
4469 }
4470 
VisitSuperPropertyReference(SuperPropertyReference * expr)4471 void BytecodeGenerator::VisitSuperPropertyReference(
4472     SuperPropertyReference* expr) {
4473   builder()->CallRuntime(Runtime::kThrowUnsupportedSuperError);
4474 }
4475 
VisitCommaExpression(BinaryOperation * binop)4476 void BytecodeGenerator::VisitCommaExpression(BinaryOperation* binop) {
4477   VisitForEffect(binop->left());
4478   Visit(binop->right());
4479 }
4480 
VisitNaryCommaExpression(NaryOperation * expr)4481 void BytecodeGenerator::VisitNaryCommaExpression(NaryOperation* expr) {
4482   DCHECK_GT(expr->subsequent_length(), 0);
4483 
4484   VisitForEffect(expr->first());
4485   for (size_t i = 0; i < expr->subsequent_length() - 1; ++i) {
4486     VisitForEffect(expr->subsequent(i));
4487   }
4488   Visit(expr->subsequent(expr->subsequent_length() - 1));
4489 }
4490 
VisitLogicalTestSubExpression(Token::Value token,Expression * expr,BytecodeLabels * then_labels,BytecodeLabels * else_labels,int coverage_slot)4491 void BytecodeGenerator::VisitLogicalTestSubExpression(
4492     Token::Value token, Expression* expr, BytecodeLabels* then_labels,
4493     BytecodeLabels* else_labels, int coverage_slot) {
4494   DCHECK(token == Token::OR || token == Token::AND);
4495 
4496   BytecodeLabels test_next(zone());
4497   if (token == Token::OR) {
4498     VisitForTest(expr, then_labels, &test_next, TestFallthrough::kElse);
4499   } else {
4500     DCHECK_EQ(Token::AND, token);
4501     VisitForTest(expr, &test_next, else_labels, TestFallthrough::kThen);
4502   }
4503   test_next.Bind(builder());
4504 
4505   BuildIncrementBlockCoverageCounterIfEnabled(coverage_slot);
4506 }
4507 
VisitLogicalTest(Token::Value token,Expression * left,Expression * right,int right_coverage_slot)4508 void BytecodeGenerator::VisitLogicalTest(Token::Value token, Expression* left,
4509                                          Expression* right,
4510                                          int right_coverage_slot) {
4511   DCHECK(token == Token::OR || token == Token::AND);
4512   TestResultScope* test_result = execution_result()->AsTest();
4513   BytecodeLabels* then_labels = test_result->then_labels();
4514   BytecodeLabels* else_labels = test_result->else_labels();
4515   TestFallthrough fallthrough = test_result->fallthrough();
4516 
4517   VisitLogicalTestSubExpression(token, left, then_labels, else_labels,
4518                                 right_coverage_slot);
4519   // The last test has the same then, else and fallthrough as the parent test.
4520   VisitForTest(right, then_labels, else_labels, fallthrough);
4521 }
4522 
VisitNaryLogicalTest(Token::Value token,NaryOperation * expr,const NaryCodeCoverageSlots * coverage_slots)4523 void BytecodeGenerator::VisitNaryLogicalTest(
4524     Token::Value token, NaryOperation* expr,
4525     const NaryCodeCoverageSlots* coverage_slots) {
4526   DCHECK(token == Token::OR || token == Token::AND);
4527   DCHECK_GT(expr->subsequent_length(), 0);
4528 
4529   TestResultScope* test_result = execution_result()->AsTest();
4530   BytecodeLabels* then_labels = test_result->then_labels();
4531   BytecodeLabels* else_labels = test_result->else_labels();
4532   TestFallthrough fallthrough = test_result->fallthrough();
4533 
4534   VisitLogicalTestSubExpression(token, expr->first(), then_labels, else_labels,
4535                                 coverage_slots->GetSlotFor(0));
4536   for (size_t i = 0; i < expr->subsequent_length() - 1; ++i) {
4537     VisitLogicalTestSubExpression(token, expr->subsequent(i), then_labels,
4538                                   else_labels,
4539                                   coverage_slots->GetSlotFor(i + 1));
4540   }
4541   // The last test has the same then, else and fallthrough as the parent test.
4542   VisitForTest(expr->subsequent(expr->subsequent_length() - 1), then_labels,
4543                else_labels, fallthrough);
4544 }
4545 
VisitLogicalOrSubExpression(Expression * expr,BytecodeLabels * end_labels,int coverage_slot)4546 bool BytecodeGenerator::VisitLogicalOrSubExpression(Expression* expr,
4547                                                     BytecodeLabels* end_labels,
4548                                                     int coverage_slot) {
4549   if (expr->ToBooleanIsTrue()) {
4550     VisitForAccumulatorValue(expr);
4551     end_labels->Bind(builder());
4552     return true;
4553   } else if (!expr->ToBooleanIsFalse()) {
4554     TypeHint type_hint = VisitForAccumulatorValue(expr);
4555     builder()->JumpIfTrue(ToBooleanModeFromTypeHint(type_hint),
4556                           end_labels->New());
4557   }
4558 
4559   BuildIncrementBlockCoverageCounterIfEnabled(coverage_slot);
4560 
4561   return false;
4562 }
4563 
VisitLogicalAndSubExpression(Expression * expr,BytecodeLabels * end_labels,int coverage_slot)4564 bool BytecodeGenerator::VisitLogicalAndSubExpression(Expression* expr,
4565                                                      BytecodeLabels* end_labels,
4566                                                      int coverage_slot) {
4567   if (expr->ToBooleanIsFalse()) {
4568     VisitForAccumulatorValue(expr);
4569     end_labels->Bind(builder());
4570     return true;
4571   } else if (!expr->ToBooleanIsTrue()) {
4572     TypeHint type_hint = VisitForAccumulatorValue(expr);
4573     builder()->JumpIfFalse(ToBooleanModeFromTypeHint(type_hint),
4574                            end_labels->New());
4575   }
4576 
4577   BuildIncrementBlockCoverageCounterIfEnabled(coverage_slot);
4578 
4579   return false;
4580 }
4581 
VisitLogicalOrExpression(BinaryOperation * binop)4582 void BytecodeGenerator::VisitLogicalOrExpression(BinaryOperation* binop) {
4583   Expression* left = binop->left();
4584   Expression* right = binop->right();
4585 
4586   int right_coverage_slot =
4587       AllocateBlockCoverageSlotIfEnabled(binop, SourceRangeKind::kRight);
4588 
4589   if (execution_result()->IsTest()) {
4590     TestResultScope* test_result = execution_result()->AsTest();
4591     if (left->ToBooleanIsTrue()) {
4592       builder()->Jump(test_result->NewThenLabel());
4593     } else if (left->ToBooleanIsFalse() && right->ToBooleanIsFalse()) {
4594       BuildIncrementBlockCoverageCounterIfEnabled(right_coverage_slot);
4595       builder()->Jump(test_result->NewElseLabel());
4596     } else {
4597       VisitLogicalTest(Token::OR, left, right, right_coverage_slot);
4598     }
4599     test_result->SetResultConsumedByTest();
4600   } else {
4601     BytecodeLabels end_labels(zone());
4602     if (VisitLogicalOrSubExpression(left, &end_labels, right_coverage_slot)) {
4603       return;
4604     }
4605     VisitForAccumulatorValue(right);
4606     end_labels.Bind(builder());
4607   }
4608 }
4609 
VisitNaryLogicalOrExpression(NaryOperation * expr)4610 void BytecodeGenerator::VisitNaryLogicalOrExpression(NaryOperation* expr) {
4611   Expression* first = expr->first();
4612   DCHECK_GT(expr->subsequent_length(), 0);
4613 
4614   NaryCodeCoverageSlots coverage_slots(this, expr);
4615 
4616   if (execution_result()->IsTest()) {
4617     TestResultScope* test_result = execution_result()->AsTest();
4618     if (first->ToBooleanIsTrue()) {
4619       builder()->Jump(test_result->NewThenLabel());
4620     } else {
4621       VisitNaryLogicalTest(Token::OR, expr, &coverage_slots);
4622     }
4623     test_result->SetResultConsumedByTest();
4624   } else {
4625     BytecodeLabels end_labels(zone());
4626     if (VisitLogicalOrSubExpression(first, &end_labels,
4627                                     coverage_slots.GetSlotFor(0))) {
4628       return;
4629     }
4630     for (size_t i = 0; i < expr->subsequent_length() - 1; ++i) {
4631       if (VisitLogicalOrSubExpression(expr->subsequent(i), &end_labels,
4632                                       coverage_slots.GetSlotFor(i + 1))) {
4633         return;
4634       }
4635     }
4636     // We have to visit the last value even if it's true, because we need its
4637     // actual value.
4638     VisitForAccumulatorValue(expr->subsequent(expr->subsequent_length() - 1));
4639     end_labels.Bind(builder());
4640   }
4641 }
4642 
VisitLogicalAndExpression(BinaryOperation * binop)4643 void BytecodeGenerator::VisitLogicalAndExpression(BinaryOperation* binop) {
4644   Expression* left = binop->left();
4645   Expression* right = binop->right();
4646 
4647   int right_coverage_slot =
4648       AllocateBlockCoverageSlotIfEnabled(binop, SourceRangeKind::kRight);
4649 
4650   if (execution_result()->IsTest()) {
4651     TestResultScope* test_result = execution_result()->AsTest();
4652     if (left->ToBooleanIsFalse()) {
4653       builder()->Jump(test_result->NewElseLabel());
4654     } else if (left->ToBooleanIsTrue() && right->ToBooleanIsTrue()) {
4655       BuildIncrementBlockCoverageCounterIfEnabled(right_coverage_slot);
4656       builder()->Jump(test_result->NewThenLabel());
4657     } else {
4658       VisitLogicalTest(Token::AND, left, right, right_coverage_slot);
4659     }
4660     test_result->SetResultConsumedByTest();
4661   } else {
4662     BytecodeLabels end_labels(zone());
4663     if (VisitLogicalAndSubExpression(left, &end_labels, right_coverage_slot)) {
4664       return;
4665     }
4666     VisitForAccumulatorValue(right);
4667     end_labels.Bind(builder());
4668   }
4669 }
4670 
VisitNaryLogicalAndExpression(NaryOperation * expr)4671 void BytecodeGenerator::VisitNaryLogicalAndExpression(NaryOperation* expr) {
4672   Expression* first = expr->first();
4673   DCHECK_GT(expr->subsequent_length(), 0);
4674 
4675   NaryCodeCoverageSlots coverage_slots(this, expr);
4676 
4677   if (execution_result()->IsTest()) {
4678     TestResultScope* test_result = execution_result()->AsTest();
4679     if (first->ToBooleanIsFalse()) {
4680       builder()->Jump(test_result->NewElseLabel());
4681     } else {
4682       VisitNaryLogicalTest(Token::AND, expr, &coverage_slots);
4683     }
4684     test_result->SetResultConsumedByTest();
4685   } else {
4686     BytecodeLabels end_labels(zone());
4687     if (VisitLogicalAndSubExpression(first, &end_labels,
4688                                      coverage_slots.GetSlotFor(0))) {
4689       return;
4690     }
4691     for (size_t i = 0; i < expr->subsequent_length() - 1; ++i) {
4692       if (VisitLogicalAndSubExpression(expr->subsequent(i), &end_labels,
4693                                        coverage_slots.GetSlotFor(i + 1))) {
4694         return;
4695       }
4696     }
4697     // We have to visit the last value even if it's false, because we need its
4698     // actual value.
4699     VisitForAccumulatorValue(expr->subsequent(expr->subsequent_length() - 1));
4700     end_labels.Bind(builder());
4701   }
4702 }
4703 
VisitRewritableExpression(RewritableExpression * expr)4704 void BytecodeGenerator::VisitRewritableExpression(RewritableExpression* expr) {
4705   Visit(expr->expression());
4706 }
4707 
BuildNewLocalActivationContext()4708 void BytecodeGenerator::BuildNewLocalActivationContext() {
4709   ValueResultScope value_execution_result(this);
4710   Scope* scope = closure_scope();
4711   DCHECK_EQ(current_scope(), closure_scope());
4712 
4713   // Create the appropriate context.
4714   if (scope->is_script_scope()) {
4715     Register scope_reg = register_allocator()->NewRegister();
4716     builder()
4717         ->LoadLiteral(scope)
4718         .StoreAccumulatorInRegister(scope_reg)
4719         .CallRuntime(Runtime::kNewScriptContext, scope_reg);
4720   } else if (scope->is_module_scope()) {
4721     // We don't need to do anything for the outer script scope.
4722     DCHECK(scope->outer_scope()->is_script_scope());
4723 
4724     // A JSFunction representing a module is called with the module object as
4725     // its sole argument.
4726     RegisterList args = register_allocator()->NewRegisterList(2);
4727     builder()
4728         ->MoveRegister(builder()->Parameter(0), args[0])
4729         .LoadLiteral(scope)
4730         .StoreAccumulatorInRegister(args[1])
4731         .CallRuntime(Runtime::kPushModuleContext, args);
4732   } else {
4733     DCHECK(scope->is_function_scope() || scope->is_eval_scope());
4734     int slot_count = scope->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
4735     if (slot_count <= ConstructorBuiltins::MaximumFunctionContextSlots()) {
4736       switch (scope->scope_type()) {
4737         case EVAL_SCOPE:
4738           builder()->CreateEvalContext(scope, slot_count);
4739           break;
4740         case FUNCTION_SCOPE:
4741           builder()->CreateFunctionContext(scope, slot_count);
4742           break;
4743         default:
4744           UNREACHABLE();
4745       }
4746     } else {
4747       Register arg = register_allocator()->NewRegister();
4748       builder()->LoadLiteral(scope).StoreAccumulatorInRegister(arg).CallRuntime(
4749           Runtime::kNewFunctionContext, arg);
4750     }
4751   }
4752 }
4753 
BuildLocalActivationContextInitialization()4754 void BytecodeGenerator::BuildLocalActivationContextInitialization() {
4755   DeclarationScope* scope = closure_scope();
4756 
4757   if (scope->has_this_declaration() && scope->receiver()->IsContextSlot()) {
4758     Variable* variable = scope->receiver();
4759     Register receiver(builder()->Receiver());
4760     // Context variable (at bottom of the context chain).
4761     DCHECK_EQ(0, scope->ContextChainLength(variable->scope()));
4762     builder()->LoadAccumulatorWithRegister(receiver).StoreContextSlot(
4763         execution_context()->reg(), variable->index(), 0);
4764   }
4765 
4766   // Copy parameters into context if necessary.
4767   int num_parameters = scope->num_parameters();
4768   for (int i = 0; i < num_parameters; i++) {
4769     Variable* variable = scope->parameter(i);
4770     if (!variable->IsContextSlot()) continue;
4771 
4772     Register parameter(builder()->Parameter(i));
4773     // Context variable (at bottom of the context chain).
4774     DCHECK_EQ(0, scope->ContextChainLength(variable->scope()));
4775     builder()->LoadAccumulatorWithRegister(parameter).StoreContextSlot(
4776         execution_context()->reg(), variable->index(), 0);
4777   }
4778 }
4779 
BuildNewLocalBlockContext(Scope * scope)4780 void BytecodeGenerator::BuildNewLocalBlockContext(Scope* scope) {
4781   ValueResultScope value_execution_result(this);
4782   DCHECK(scope->is_block_scope());
4783 
4784   builder()->CreateBlockContext(scope);
4785 }
4786 
BuildNewLocalWithContext(Scope * scope)4787 void BytecodeGenerator::BuildNewLocalWithContext(Scope* scope) {
4788   ValueResultScope value_execution_result(this);
4789 
4790   Register extension_object = register_allocator()->NewRegister();
4791 
4792   builder()->ToObject(extension_object);
4793   builder()->CreateWithContext(extension_object, scope);
4794 }
4795 
BuildNewLocalCatchContext(Scope * scope)4796 void BytecodeGenerator::BuildNewLocalCatchContext(Scope* scope) {
4797   ValueResultScope value_execution_result(this);
4798   DCHECK(scope->catch_variable()->IsContextSlot());
4799 
4800   Register exception = register_allocator()->NewRegister();
4801   builder()->StoreAccumulatorInRegister(exception);
4802   builder()->CreateCatchContext(exception, scope);
4803 }
4804 
VisitObjectLiteralAccessor(Register home_object,ObjectLiteralProperty * property,Register value_out)4805 void BytecodeGenerator::VisitObjectLiteralAccessor(
4806     Register home_object, ObjectLiteralProperty* property, Register value_out) {
4807   if (property == nullptr) {
4808     builder()->LoadNull().StoreAccumulatorInRegister(value_out);
4809   } else {
4810     VisitForRegisterValue(property->value(), value_out);
4811     VisitSetHomeObject(value_out, home_object, property);
4812   }
4813 }
4814 
VisitSetHomeObject(Register value,Register home_object,LiteralProperty * property)4815 void BytecodeGenerator::VisitSetHomeObject(Register value, Register home_object,
4816                                            LiteralProperty* property) {
4817   Expression* expr = property->value();
4818   if (FunctionLiteral::NeedsHomeObject(expr)) {
4819     FeedbackSlot slot = feedback_spec()->AddStoreICSlot(language_mode());
4820     builder()
4821         ->LoadAccumulatorWithRegister(home_object)
4822         .StoreHomeObjectProperty(value, feedback_index(slot), language_mode());
4823   }
4824 }
4825 
VisitArgumentsObject(Variable * variable)4826 void BytecodeGenerator::VisitArgumentsObject(Variable* variable) {
4827   if (variable == nullptr) return;
4828 
4829   DCHECK(variable->IsContextSlot() || variable->IsStackAllocated());
4830 
4831   // Allocate and initialize a new arguments object and assign to the
4832   // {arguments} variable.
4833   builder()->CreateArguments(closure_scope()->GetArgumentsType());
4834   BuildVariableAssignment(variable, Token::ASSIGN, HoleCheckMode::kElided);
4835 }
4836 
VisitRestArgumentsArray(Variable * rest)4837 void BytecodeGenerator::VisitRestArgumentsArray(Variable* rest) {
4838   if (rest == nullptr) return;
4839 
4840   // Allocate and initialize a new rest parameter and assign to the {rest}
4841   // variable.
4842   builder()->CreateArguments(CreateArgumentsType::kRestParameter);
4843   DCHECK(rest->IsContextSlot() || rest->IsStackAllocated());
4844   BuildVariableAssignment(rest, Token::ASSIGN, HoleCheckMode::kElided);
4845 }
4846 
VisitThisFunctionVariable(Variable * variable)4847 void BytecodeGenerator::VisitThisFunctionVariable(Variable* variable) {
4848   if (variable == nullptr) return;
4849 
4850   // Store the closure we were called with in the given variable.
4851   builder()->LoadAccumulatorWithRegister(Register::function_closure());
4852   BuildVariableAssignment(variable, Token::INIT, HoleCheckMode::kElided);
4853 }
4854 
VisitNewTargetVariable(Variable * variable)4855 void BytecodeGenerator::VisitNewTargetVariable(Variable* variable) {
4856   if (variable == nullptr) return;
4857 
4858   // The generator resume trampoline abuses the new.target register
4859   // to pass in the generator object.  In ordinary calls, new.target is always
4860   // undefined because generator functions are non-constructible, so don't
4861   // assign anything to the new.target variable.
4862   if (info()->literal()->CanSuspend()) return;
4863 
4864   if (variable->location() == VariableLocation::LOCAL) {
4865     // The new.target register was already assigned by entry trampoline.
4866     DCHECK_EQ(incoming_new_target_or_generator_.index(),
4867               GetRegisterForLocalVariable(variable).index());
4868     return;
4869   }
4870 
4871   // Store the new target we were called with in the given variable.
4872   builder()->LoadAccumulatorWithRegister(incoming_new_target_or_generator_);
4873   BuildVariableAssignment(variable, Token::INIT, HoleCheckMode::kElided);
4874 }
4875 
BuildGeneratorObjectVariableInitialization()4876 void BytecodeGenerator::BuildGeneratorObjectVariableInitialization() {
4877   DCHECK(IsResumableFunction(info()->literal()->kind()));
4878 
4879   Variable* generator_object_var = closure_scope()->generator_object_var();
4880   RegisterAllocationScope register_scope(this);
4881   RegisterList args = register_allocator()->NewRegisterList(2);
4882   builder()
4883       ->MoveRegister(Register::function_closure(), args[0])
4884       .MoveRegister(builder()->Receiver(), args[1])
4885       .CallRuntime(Runtime::kInlineCreateJSGeneratorObject, args)
4886       .StoreAccumulatorInRegister(generator_object());
4887 
4888   if (generator_object_var->location() == VariableLocation::LOCAL) {
4889     // The generator object register is already set to the variable's local
4890     // register.
4891     DCHECK_EQ(generator_object().index(),
4892               GetRegisterForLocalVariable(generator_object_var).index());
4893   } else {
4894     BuildVariableAssignment(generator_object_var, Token::INIT,
4895                             HoleCheckMode::kElided);
4896   }
4897 }
4898 
BuildPushUndefinedIntoRegisterList(RegisterList * reg_list)4899 void BytecodeGenerator::BuildPushUndefinedIntoRegisterList(
4900     RegisterList* reg_list) {
4901   Register reg = register_allocator()->GrowRegisterList(reg_list);
4902   builder()->LoadUndefined().StoreAccumulatorInRegister(reg);
4903 }
4904 
BuildLoadPropertyKey(LiteralProperty * property,Register out_reg)4905 void BytecodeGenerator::BuildLoadPropertyKey(LiteralProperty* property,
4906                                              Register out_reg) {
4907   if (property->key()->IsStringLiteral()) {
4908     VisitForRegisterValue(property->key(), out_reg);
4909   } else {
4910     VisitForAccumulatorValue(property->key());
4911     builder()->ToName(out_reg);
4912   }
4913 }
4914 
AllocateBlockCoverageSlotIfEnabled(AstNode * node,SourceRangeKind kind)4915 int BytecodeGenerator::AllocateBlockCoverageSlotIfEnabled(
4916     AstNode* node, SourceRangeKind kind) {
4917   return (block_coverage_builder_ == nullptr)
4918              ? BlockCoverageBuilder::kNoCoverageArraySlot
4919              : block_coverage_builder_->AllocateBlockCoverageSlot(node, kind);
4920 }
4921 
AllocateNaryBlockCoverageSlotIfEnabled(NaryOperation * node,size_t index)4922 int BytecodeGenerator::AllocateNaryBlockCoverageSlotIfEnabled(
4923     NaryOperation* node, size_t index) {
4924   return (block_coverage_builder_ == nullptr)
4925              ? BlockCoverageBuilder::kNoCoverageArraySlot
4926              : block_coverage_builder_->AllocateNaryBlockCoverageSlot(node,
4927                                                                       index);
4928 }
4929 
BuildIncrementBlockCoverageCounterIfEnabled(AstNode * node,SourceRangeKind kind)4930 void BytecodeGenerator::BuildIncrementBlockCoverageCounterIfEnabled(
4931     AstNode* node, SourceRangeKind kind) {
4932   if (block_coverage_builder_ == nullptr) return;
4933   block_coverage_builder_->IncrementBlockCounter(node, kind);
4934 }
4935 
BuildIncrementBlockCoverageCounterIfEnabled(int coverage_array_slot)4936 void BytecodeGenerator::BuildIncrementBlockCoverageCounterIfEnabled(
4937     int coverage_array_slot) {
4938   if (block_coverage_builder_ != nullptr) {
4939     block_coverage_builder_->IncrementBlockCounter(coverage_array_slot);
4940   }
4941 }
4942 
4943 // Visits the expression |expr| and places the result in the accumulator.
VisitForAccumulatorValue(Expression * expr)4944 BytecodeGenerator::TypeHint BytecodeGenerator::VisitForAccumulatorValue(
4945     Expression* expr) {
4946   ValueResultScope accumulator_scope(this);
4947   Visit(expr);
4948   return accumulator_scope.type_hint();
4949 }
4950 
VisitForAccumulatorValueOrTheHole(Expression * expr)4951 void BytecodeGenerator::VisitForAccumulatorValueOrTheHole(Expression* expr) {
4952   if (expr == nullptr) {
4953     builder()->LoadTheHole();
4954   } else {
4955     VisitForAccumulatorValue(expr);
4956   }
4957 }
4958 
4959 // Visits the expression |expr| and discards the result.
VisitForEffect(Expression * expr)4960 void BytecodeGenerator::VisitForEffect(Expression* expr) {
4961   EffectResultScope effect_scope(this);
4962   Visit(expr);
4963 }
4964 
4965 // Visits the expression |expr| and returns the register containing
4966 // the expression result.
VisitForRegisterValue(Expression * expr)4967 Register BytecodeGenerator::VisitForRegisterValue(Expression* expr) {
4968   VisitForAccumulatorValue(expr);
4969   Register result = register_allocator()->NewRegister();
4970   builder()->StoreAccumulatorInRegister(result);
4971   return result;
4972 }
4973 
4974 // Visits the expression |expr| and stores the expression result in
4975 // |destination|.
VisitForRegisterValue(Expression * expr,Register destination)4976 void BytecodeGenerator::VisitForRegisterValue(Expression* expr,
4977                                               Register destination) {
4978   ValueResultScope register_scope(this);
4979   Visit(expr);
4980   builder()->StoreAccumulatorInRegister(destination);
4981 }
4982 
4983 // Visits the expression |expr| and pushes the result into a new register
4984 // added to the end of |reg_list|.
VisitAndPushIntoRegisterList(Expression * expr,RegisterList * reg_list)4985 void BytecodeGenerator::VisitAndPushIntoRegisterList(Expression* expr,
4986                                                      RegisterList* reg_list) {
4987   {
4988     ValueResultScope register_scope(this);
4989     Visit(expr);
4990   }
4991   // Grow the register list after visiting the expression to avoid reserving
4992   // the register across the expression evaluation, which could cause memory
4993   // leaks for deep expressions due to dead objects being kept alive by pointers
4994   // in registers.
4995   Register destination = register_allocator()->GrowRegisterList(reg_list);
4996   builder()->StoreAccumulatorInRegister(destination);
4997 }
4998 
BuildTest(ToBooleanMode mode,BytecodeLabels * then_labels,BytecodeLabels * else_labels,TestFallthrough fallthrough)4999 void BytecodeGenerator::BuildTest(ToBooleanMode mode,
5000                                   BytecodeLabels* then_labels,
5001                                   BytecodeLabels* else_labels,
5002                                   TestFallthrough fallthrough) {
5003   switch (fallthrough) {
5004     case TestFallthrough::kThen:
5005       builder()->JumpIfFalse(mode, else_labels->New());
5006       break;
5007     case TestFallthrough::kElse:
5008       builder()->JumpIfTrue(mode, then_labels->New());
5009       break;
5010     case TestFallthrough::kNone:
5011       builder()->JumpIfTrue(mode, then_labels->New());
5012       builder()->Jump(else_labels->New());
5013       break;
5014   }
5015 }
5016 
5017 // Visits the expression |expr| for testing its boolean value and jumping to the
5018 // |then| or |other| label depending on value and short-circuit semantics
VisitForTest(Expression * expr,BytecodeLabels * then_labels,BytecodeLabels * else_labels,TestFallthrough fallthrough)5019 void BytecodeGenerator::VisitForTest(Expression* expr,
5020                                      BytecodeLabels* then_labels,
5021                                      BytecodeLabels* else_labels,
5022                                      TestFallthrough fallthrough) {
5023   bool result_consumed;
5024   TypeHint type_hint;
5025   {
5026     // To make sure that all temporary registers are returned before generating
5027     // jumps below, we ensure that the result scope is deleted before doing so.
5028     // Dead registers might be materialized otherwise.
5029     TestResultScope test_result(this, then_labels, else_labels, fallthrough);
5030     Visit(expr);
5031     result_consumed = test_result.result_consumed_by_test();
5032     type_hint = test_result.type_hint();
5033     // Labels and fallthrough might have been mutated, so update based on
5034     // TestResultScope.
5035     then_labels = test_result.then_labels();
5036     else_labels = test_result.else_labels();
5037     fallthrough = test_result.fallthrough();
5038   }
5039   if (!result_consumed) {
5040     BuildTest(ToBooleanModeFromTypeHint(type_hint), then_labels, else_labels,
5041               fallthrough);
5042   }
5043 }
5044 
VisitInSameTestExecutionScope(Expression * expr)5045 void BytecodeGenerator::VisitInSameTestExecutionScope(Expression* expr) {
5046   DCHECK(execution_result()->IsTest());
5047   {
5048     RegisterAllocationScope reg_scope(this);
5049     Visit(expr);
5050   }
5051   if (!execution_result()->AsTest()->result_consumed_by_test()) {
5052     TestResultScope* result_scope = execution_result()->AsTest();
5053     BuildTest(ToBooleanModeFromTypeHint(result_scope->type_hint()),
5054               result_scope->then_labels(), result_scope->else_labels(),
5055               result_scope->fallthrough());
5056     result_scope->SetResultConsumedByTest();
5057   }
5058 }
5059 
VisitInScope(Statement * stmt,Scope * scope)5060 void BytecodeGenerator::VisitInScope(Statement* stmt, Scope* scope) {
5061   DCHECK(scope->declarations()->is_empty());
5062   CurrentScope current_scope(this, scope);
5063   ContextScope context_scope(this, scope);
5064   Visit(stmt);
5065 }
5066 
GetRegisterForLocalVariable(Variable * variable)5067 Register BytecodeGenerator::GetRegisterForLocalVariable(Variable* variable) {
5068   DCHECK_EQ(VariableLocation::LOCAL, variable->location());
5069   return builder()->Local(variable->index());
5070 }
5071 
function_kind() const5072 FunctionKind BytecodeGenerator::function_kind() const {
5073   return info()->literal()->kind();
5074 }
5075 
language_mode() const5076 LanguageMode BytecodeGenerator::language_mode() const {
5077   return current_scope()->language_mode();
5078 }
5079 
generator_object() const5080 Register BytecodeGenerator::generator_object() const {
5081   DCHECK(info()->literal()->CanSuspend());
5082   return incoming_new_target_or_generator_;
5083 }
5084 
feedback_spec()5085 FeedbackVectorSpec* BytecodeGenerator::feedback_spec() {
5086   return info()->feedback_vector_spec();
5087 }
5088 
feedback_index(FeedbackSlot slot) const5089 int BytecodeGenerator::feedback_index(FeedbackSlot slot) const {
5090   DCHECK(!slot.IsInvalid());
5091   return FeedbackVector::GetIndex(slot);
5092 }
5093 
GetCachedLoadGlobalICSlot(TypeofMode typeof_mode,Variable * variable)5094 FeedbackSlot BytecodeGenerator::GetCachedLoadGlobalICSlot(
5095     TypeofMode typeof_mode, Variable* variable) {
5096   FeedbackSlotKind slot_kind =
5097       typeof_mode == INSIDE_TYPEOF
5098           ? FeedbackSlotKind::kLoadGlobalInsideTypeof
5099           : FeedbackSlotKind::kLoadGlobalNotInsideTypeof;
5100   FeedbackSlot slot = feedback_slot_cache()->Get(slot_kind, variable);
5101   if (!slot.IsInvalid()) {
5102     return slot;
5103   }
5104   slot = feedback_spec()->AddLoadGlobalICSlot(typeof_mode);
5105   feedback_slot_cache()->Put(slot_kind, variable, slot);
5106   return slot;
5107 }
5108 
GetCachedStoreGlobalICSlot(LanguageMode language_mode,Variable * variable)5109 FeedbackSlot BytecodeGenerator::GetCachedStoreGlobalICSlot(
5110     LanguageMode language_mode, Variable* variable) {
5111   FeedbackSlotKind slot_kind = is_strict(language_mode)
5112                                    ? FeedbackSlotKind::kStoreGlobalStrict
5113                                    : FeedbackSlotKind::kStoreGlobalSloppy;
5114   FeedbackSlot slot = feedback_slot_cache()->Get(slot_kind, variable);
5115   if (!slot.IsInvalid()) {
5116     return slot;
5117   }
5118   slot = feedback_spec()->AddStoreGlobalICSlot(language_mode);
5119   feedback_slot_cache()->Put(slot_kind, variable, slot);
5120   return slot;
5121 }
5122 
GetCachedLoadICSlot(const Expression * expr,const AstRawString * name)5123 FeedbackSlot BytecodeGenerator::GetCachedLoadICSlot(const Expression* expr,
5124                                                     const AstRawString* name) {
5125   if (!FLAG_ignition_share_named_property_feedback) {
5126     return feedback_spec()->AddLoadICSlot();
5127   }
5128   FeedbackSlotKind slot_kind = FeedbackSlotKind::kLoadProperty;
5129   if (!expr->IsVariableProxy()) {
5130     return feedback_spec()->AddLoadICSlot();
5131   }
5132   const VariableProxy* proxy = expr->AsVariableProxy();
5133   FeedbackSlot slot =
5134       feedback_slot_cache()->Get(slot_kind, proxy->var()->index(), name);
5135   if (!slot.IsInvalid()) {
5136     return slot;
5137   }
5138   slot = feedback_spec()->AddLoadICSlot();
5139   feedback_slot_cache()->Put(slot_kind, proxy->var()->index(), name, slot);
5140   return slot;
5141 }
5142 
GetCachedStoreICSlot(const Expression * expr,const AstRawString * name)5143 FeedbackSlot BytecodeGenerator::GetCachedStoreICSlot(const Expression* expr,
5144                                                      const AstRawString* name) {
5145   if (!FLAG_ignition_share_named_property_feedback) {
5146     return feedback_spec()->AddStoreICSlot(language_mode());
5147   }
5148   FeedbackSlotKind slot_kind = is_strict(language_mode())
5149                                    ? FeedbackSlotKind::kStoreNamedStrict
5150                                    : FeedbackSlotKind::kStoreNamedSloppy;
5151   if (!expr->IsVariableProxy()) {
5152     return feedback_spec()->AddStoreICSlot(language_mode());
5153   }
5154   const VariableProxy* proxy = expr->AsVariableProxy();
5155   FeedbackSlot slot =
5156       feedback_slot_cache()->Get(slot_kind, proxy->var()->index(), name);
5157   if (!slot.IsInvalid()) {
5158     return slot;
5159   }
5160   slot = feedback_spec()->AddStoreICSlot(language_mode());
5161   feedback_slot_cache()->Put(slot_kind, proxy->var()->index(), name, slot);
5162   return slot;
5163 }
5164 
GetCachedCreateClosureSlot(FunctionLiteral * literal)5165 FeedbackSlot BytecodeGenerator::GetCachedCreateClosureSlot(
5166     FunctionLiteral* literal) {
5167   FeedbackSlotKind slot_kind = FeedbackSlotKind::kCreateClosure;
5168   FeedbackSlot slot = feedback_slot_cache()->Get(slot_kind, literal);
5169   if (!slot.IsInvalid()) {
5170     return slot;
5171   }
5172   slot = feedback_spec()->AddCreateClosureSlot();
5173   feedback_slot_cache()->Put(slot_kind, literal, slot);
5174   return slot;
5175 }
5176 
GetDummyCompareICSlot()5177 FeedbackSlot BytecodeGenerator::GetDummyCompareICSlot() {
5178   if (!dummy_feedback_slot_.IsInvalid()) {
5179     return dummy_feedback_slot_;
5180   }
5181   dummy_feedback_slot_ = feedback_spec()->AddCompareICSlot();
5182   return dummy_feedback_slot_;
5183 }
5184 
StoreToSuperRuntimeId()5185 Runtime::FunctionId BytecodeGenerator::StoreToSuperRuntimeId() {
5186   return is_strict(language_mode()) ? Runtime::kStoreToSuper_Strict
5187                                     : Runtime::kStoreToSuper_Sloppy;
5188 }
5189 
StoreKeyedToSuperRuntimeId()5190 Runtime::FunctionId BytecodeGenerator::StoreKeyedToSuperRuntimeId() {
5191   return is_strict(language_mode()) ? Runtime::kStoreKeyedToSuper_Strict
5192                                     : Runtime::kStoreKeyedToSuper_Sloppy;
5193 }
5194 
5195 }  // namespace interpreter
5196 }  // namespace internal
5197 }  // namespace v8
5198