• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2015 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/interpreter/bytecode-generator.h"
6 
7 #include "src/api/api-inl.h"
8 #include "src/ast/ast-source-ranges.h"
9 #include "src/ast/scopes.h"
10 #include "src/builtins/builtins-constructor.h"
11 #include "src/codegen/compiler.h"
12 #include "src/codegen/unoptimized-compilation-info.h"
13 #include "src/interpreter/bytecode-flags.h"
14 #include "src/interpreter/bytecode-jump-table.h"
15 #include "src/interpreter/bytecode-label.h"
16 #include "src/interpreter/bytecode-register-allocator.h"
17 #include "src/interpreter/bytecode-register.h"
18 #include "src/interpreter/control-flow-builders.h"
19 #include "src/logging/local-logger.h"
20 #include "src/logging/log.h"
21 #include "src/objects/debug-objects.h"
22 #include "src/objects/literal-objects-inl.h"
23 #include "src/objects/objects-inl.h"
24 #include "src/objects/smi.h"
25 #include "src/objects/template-objects-inl.h"
26 #include "src/parsing/parse-info.h"
27 #include "src/parsing/token.h"
28 #include "src/utils/ostreams.h"
29 
30 namespace v8 {
31 namespace internal {
32 namespace interpreter {
33 
34 // Scoped class tracking context objects created by the visitor. Represents
35 // mutations of the context chain within the function body, allowing pushing and
36 // popping of the current {context_register} during visitation.
37 class BytecodeGenerator::ContextScope {
38  public:
ContextScope(BytecodeGenerator * generator,Scope * scope)39   ContextScope(BytecodeGenerator* generator, Scope* scope)
40       : generator_(generator),
41         scope_(scope),
42         outer_(generator_->execution_context()),
43         register_(Register::current_context()),
44         depth_(0) {
45     DCHECK(scope->NeedsContext() || outer_ == nullptr);
46     if (outer_) {
47       depth_ = outer_->depth_ + 1;
48 
49       // Push the outer context into a new context register.
50       Register outer_context_reg =
51           generator_->register_allocator()->NewRegister();
52       outer_->set_register(outer_context_reg);
53       generator_->builder()->PushContext(outer_context_reg);
54     }
55     generator_->set_execution_context(this);
56   }
57 
~ContextScope()58   ~ContextScope() {
59     if (outer_) {
60       DCHECK_EQ(register_.index(), Register::current_context().index());
61       generator_->builder()->PopContext(outer_->reg());
62       outer_->set_register(register_);
63     }
64     generator_->set_execution_context(outer_);
65   }
66 
67   // Returns the depth of the given |scope| for the current execution context.
ContextChainDepth(Scope * scope)68   int ContextChainDepth(Scope* scope) {
69     return scope_->ContextChainLength(scope);
70   }
71 
72   // Returns the execution context at |depth| in the current context chain if it
73   // is a function local execution context, otherwise returns nullptr.
Previous(int depth)74   ContextScope* Previous(int depth) {
75     if (depth > depth_) {
76       return nullptr;
77     }
78 
79     ContextScope* previous = this;
80     for (int i = depth; i > 0; --i) {
81       previous = previous->outer_;
82     }
83     return previous;
84   }
85 
reg() const86   Register reg() const { return register_; }
87 
88  private:
builder() const89   const BytecodeArrayBuilder* builder() const { return generator_->builder(); }
90 
set_register(Register reg)91   void set_register(Register reg) { register_ = reg; }
92 
93   BytecodeGenerator* generator_;
94   Scope* scope_;
95   ContextScope* outer_;
96   Register register_;
97   int depth_;
98 };
99 
100 // Scoped class for tracking control statements entered by the
101 // visitor. The pattern derives AstGraphBuilder::ControlScope.
102 class BytecodeGenerator::ControlScope {
103  public:
ControlScope(BytecodeGenerator * generator)104   explicit ControlScope(BytecodeGenerator* generator)
105       : generator_(generator),
106         outer_(generator->execution_control()),
107         context_(generator->execution_context()) {
108     generator_->set_execution_control(this);
109   }
~ControlScope()110   virtual ~ControlScope() { generator_->set_execution_control(outer()); }
111   ControlScope(const ControlScope&) = delete;
112   ControlScope& operator=(const ControlScope&) = delete;
113 
Break(Statement * stmt)114   void Break(Statement* stmt) {
115     PerformCommand(CMD_BREAK, stmt, kNoSourcePosition);
116   }
Continue(Statement * stmt)117   void Continue(Statement* stmt) {
118     PerformCommand(CMD_CONTINUE, stmt, kNoSourcePosition);
119   }
ReturnAccumulator(int source_position=kNoSourcePosition)120   void ReturnAccumulator(int source_position = kNoSourcePosition) {
121     PerformCommand(CMD_RETURN, nullptr, source_position);
122   }
AsyncReturnAccumulator(int source_position=kNoSourcePosition)123   void AsyncReturnAccumulator(int source_position = kNoSourcePosition) {
124     PerformCommand(CMD_ASYNC_RETURN, nullptr, source_position);
125   }
126 
127   class DeferredCommands;
128 
129  protected:
130   enum Command {
131     CMD_BREAK,
132     CMD_CONTINUE,
133     CMD_RETURN,
134     CMD_ASYNC_RETURN,
135     CMD_RETHROW
136   };
CommandUsesAccumulator(Command command)137   static constexpr bool CommandUsesAccumulator(Command command) {
138     return command != CMD_BREAK && command != CMD_CONTINUE;
139   }
140 
141   void PerformCommand(Command command, Statement* statement,
142                       int source_position);
143   virtual bool Execute(Command command, Statement* statement,
144                        int source_position) = 0;
145 
146   // Helper to pop the context chain to a depth expected by this control scope.
147   // Note that it is the responsibility of each individual {Execute} method to
148   // trigger this when commands are handled and control-flow continues locally.
149   void PopContextToExpectedDepth();
150 
generator() const151   BytecodeGenerator* generator() const { return generator_; }
outer() const152   ControlScope* outer() const { return outer_; }
context() const153   ContextScope* context() const { return context_; }
154 
155  private:
156   BytecodeGenerator* generator_;
157   ControlScope* outer_;
158   ContextScope* context_;
159 };
160 
161 // Helper class for a try-finally control scope. It can record intercepted
162 // control-flow commands that cause entry into a finally-block, and re-apply
163 // them after again leaving that block. Special tokens are used to identify
164 // paths going through the finally-block to dispatch after leaving the block.
165 class BytecodeGenerator::ControlScope::DeferredCommands final {
166  public:
167   // Fixed value tokens for paths we know we need.
168   // Fallthrough is set to -1 to make it the fallthrough case of the jump table,
169   // where the remaining cases start at 0.
170   static const int kFallthroughToken = -1;
171   // TODO(leszeks): Rethrow being 0 makes it use up a valuable LdaZero, which
172   // means that other commands (such as break or return) have to use LdaSmi.
173   // This can very slightly bloat bytecode, so perhaps token values should all
174   // be shifted down by 1.
175   static const int kRethrowToken = 0;
176 
DeferredCommands(BytecodeGenerator * generator,Register token_register,Register result_register)177   DeferredCommands(BytecodeGenerator* generator, Register token_register,
178                    Register result_register)
179       : generator_(generator),
180         deferred_(generator->zone()),
181         token_register_(token_register),
182         result_register_(result_register),
183         return_token_(-1),
184         async_return_token_(-1) {
185     // There's always a rethrow path.
186     // TODO(leszeks): We could decouple deferred_ index and token to allow us
187     // to still push this lazily.
188     STATIC_ASSERT(kRethrowToken == 0);
189     deferred_.push_back({CMD_RETHROW, nullptr, kRethrowToken});
190   }
191 
192   // One recorded control-flow command.
193   struct Entry {
194     Command command;       // The command type being applied on this path.
195     Statement* statement;  // The target statement for the command or {nullptr}.
196     int token;             // A token identifying this particular path.
197   };
198 
199   // Records a control-flow command while entering the finally-block. This also
200   // generates a new dispatch token that identifies one particular path. This
201   // expects the result to be in the accumulator.
RecordCommand(Command command,Statement * statement)202   void RecordCommand(Command command, Statement* statement) {
203     int token = GetTokenForCommand(command, statement);
204 
205     DCHECK_LT(token, deferred_.size());
206     DCHECK_EQ(deferred_[token].command, command);
207     DCHECK_EQ(deferred_[token].statement, statement);
208     DCHECK_EQ(deferred_[token].token, token);
209 
210     if (CommandUsesAccumulator(command)) {
211       builder()->StoreAccumulatorInRegister(result_register_);
212     }
213     builder()->LoadLiteral(Smi::FromInt(token));
214     builder()->StoreAccumulatorInRegister(token_register_);
215     if (!CommandUsesAccumulator(command)) {
216       // If we're not saving the accumulator in the result register, shove a
217       // harmless value there instead so that it is still considered "killed" in
218       // the liveness analysis. Normally we would LdaUndefined first, but the
219       // Smi token value is just as good, and by reusing it we save a bytecode.
220       builder()->StoreAccumulatorInRegister(result_register_);
221     }
222   }
223 
224   // Records the dispatch token to be used to identify the re-throw path when
225   // the finally-block has been entered through the exception handler. This
226   // expects the exception to be in the accumulator.
RecordHandlerReThrowPath()227   void RecordHandlerReThrowPath() {
228     // The accumulator contains the exception object.
229     RecordCommand(CMD_RETHROW, nullptr);
230   }
231 
232   // Records the dispatch token to be used to identify the implicit fall-through
233   // path at the end of a try-block into the corresponding finally-block.
RecordFallThroughPath()234   void RecordFallThroughPath() {
235     builder()->LoadLiteral(Smi::FromInt(kFallthroughToken));
236     builder()->StoreAccumulatorInRegister(token_register_);
237     // Since we're not saving the accumulator in the result register, shove a
238     // harmless value there instead so that it is still considered "killed" in
239     // the liveness analysis. Normally we would LdaUndefined first, but the Smi
240     // token value is just as good, and by reusing it we save a bytecode.
241     builder()->StoreAccumulatorInRegister(result_register_);
242   }
243 
244   // Applies all recorded control-flow commands after the finally-block again.
245   // This generates a dynamic dispatch on the token from the entry point.
ApplyDeferredCommands()246   void ApplyDeferredCommands() {
247     if (deferred_.size() == 0) return;
248 
249     BytecodeLabel fall_through;
250 
251     if (deferred_.size() == 1) {
252       // For a single entry, just jump to the fallthrough if we don't match the
253       // entry token.
254       const Entry& entry = deferred_[0];
255 
256       builder()
257           ->LoadLiteral(Smi::FromInt(entry.token))
258           .CompareReference(token_register_)
259           .JumpIfFalse(ToBooleanMode::kAlreadyBoolean, &fall_through);
260 
261       if (CommandUsesAccumulator(entry.command)) {
262         builder()->LoadAccumulatorWithRegister(result_register_);
263       }
264       execution_control()->PerformCommand(entry.command, entry.statement,
265                                           kNoSourcePosition);
266     } else {
267       // For multiple entries, build a jump table and switch on the token,
268       // jumping to the fallthrough if none of them match.
269 
270       BytecodeJumpTable* jump_table =
271           builder()->AllocateJumpTable(static_cast<int>(deferred_.size()), 0);
272       builder()
273           ->LoadAccumulatorWithRegister(token_register_)
274           .SwitchOnSmiNoFeedback(jump_table)
275           .Jump(&fall_through);
276       for (const Entry& entry : deferred_) {
277         builder()->Bind(jump_table, entry.token);
278 
279         if (CommandUsesAccumulator(entry.command)) {
280           builder()->LoadAccumulatorWithRegister(result_register_);
281         }
282         execution_control()->PerformCommand(entry.command, entry.statement,
283                                             kNoSourcePosition);
284       }
285     }
286 
287     builder()->Bind(&fall_through);
288   }
289 
builder()290   BytecodeArrayBuilder* builder() { return generator_->builder(); }
execution_control()291   ControlScope* execution_control() { return generator_->execution_control(); }
292 
293  private:
GetTokenForCommand(Command command,Statement * statement)294   int GetTokenForCommand(Command command, Statement* statement) {
295     switch (command) {
296       case CMD_RETURN:
297         return GetReturnToken();
298       case CMD_ASYNC_RETURN:
299         return GetAsyncReturnToken();
300       case CMD_RETHROW:
301         return kRethrowToken;
302       default:
303         // TODO(leszeks): We could also search for entries with the same
304         // command and statement.
305         return GetNewTokenForCommand(command, statement);
306     }
307   }
308 
GetReturnToken()309   int GetReturnToken() {
310     if (return_token_ == -1) {
311       return_token_ = GetNewTokenForCommand(CMD_RETURN, nullptr);
312     }
313     return return_token_;
314   }
315 
GetAsyncReturnToken()316   int GetAsyncReturnToken() {
317     if (async_return_token_ == -1) {
318       async_return_token_ = GetNewTokenForCommand(CMD_ASYNC_RETURN, nullptr);
319     }
320     return async_return_token_;
321   }
322 
GetNewTokenForCommand(Command command,Statement * statement)323   int GetNewTokenForCommand(Command command, Statement* statement) {
324     int token = static_cast<int>(deferred_.size());
325     deferred_.push_back({command, statement, token});
326     return token;
327   }
328 
329   BytecodeGenerator* generator_;
330   ZoneVector<Entry> deferred_;
331   Register token_register_;
332   Register result_register_;
333 
334   // Tokens for commands that don't need a statement.
335   int return_token_;
336   int async_return_token_;
337 };
338 
339 // Scoped class for dealing with control flow reaching the function level.
340 class BytecodeGenerator::ControlScopeForTopLevel final
341     : public BytecodeGenerator::ControlScope {
342  public:
ControlScopeForTopLevel(BytecodeGenerator * generator)343   explicit ControlScopeForTopLevel(BytecodeGenerator* generator)
344       : ControlScope(generator) {}
345 
346  protected:
Execute(Command command,Statement * statement,int source_position)347   bool Execute(Command command, Statement* statement,
348                int source_position) override {
349     switch (command) {
350       case CMD_BREAK:  // We should never see break/continue in top-level.
351       case CMD_CONTINUE:
352         UNREACHABLE();
353       case CMD_RETURN:
354         // No need to pop contexts, execution leaves the method body.
355         generator()->BuildReturn(source_position);
356         return true;
357       case CMD_ASYNC_RETURN:
358         // No need to pop contexts, execution leaves the method body.
359         generator()->BuildAsyncReturn(source_position);
360         return true;
361       case CMD_RETHROW:
362         // No need to pop contexts, execution leaves the method body.
363         generator()->BuildReThrow();
364         return true;
365     }
366     return false;
367   }
368 };
369 
370 // Scoped class for enabling break inside blocks and switch blocks.
371 class BytecodeGenerator::ControlScopeForBreakable final
372     : public BytecodeGenerator::ControlScope {
373  public:
ControlScopeForBreakable(BytecodeGenerator * generator,BreakableStatement * statement,BreakableControlFlowBuilder * control_builder)374   ControlScopeForBreakable(BytecodeGenerator* generator,
375                            BreakableStatement* statement,
376                            BreakableControlFlowBuilder* control_builder)
377       : ControlScope(generator),
378         statement_(statement),
379         control_builder_(control_builder) {}
380 
381  protected:
Execute(Command command,Statement * statement,int source_position)382   bool Execute(Command command, Statement* statement,
383                int source_position) override {
384     if (statement != statement_) return false;
385     switch (command) {
386       case CMD_BREAK:
387         PopContextToExpectedDepth();
388         control_builder_->Break();
389         return true;
390       case CMD_CONTINUE:
391       case CMD_RETURN:
392       case CMD_ASYNC_RETURN:
393       case CMD_RETHROW:
394         break;
395     }
396     return false;
397   }
398 
399  private:
400   Statement* statement_;
401   BreakableControlFlowBuilder* control_builder_;
402 };
403 
404 // Scoped class for enabling 'break' and 'continue' in iteration
405 // constructs, e.g. do...while, while..., for...
406 class BytecodeGenerator::ControlScopeForIteration final
407     : public BytecodeGenerator::ControlScope {
408  public:
ControlScopeForIteration(BytecodeGenerator * generator,IterationStatement * statement,LoopBuilder * loop_builder)409   ControlScopeForIteration(BytecodeGenerator* generator,
410                            IterationStatement* statement,
411                            LoopBuilder* loop_builder)
412       : ControlScope(generator),
413         statement_(statement),
414         loop_builder_(loop_builder) {}
415 
416  protected:
Execute(Command command,Statement * statement,int source_position)417   bool Execute(Command command, Statement* statement,
418                int source_position) override {
419     if (statement != statement_) return false;
420     switch (command) {
421       case CMD_BREAK:
422         PopContextToExpectedDepth();
423         loop_builder_->Break();
424         return true;
425       case CMD_CONTINUE:
426         PopContextToExpectedDepth();
427         loop_builder_->Continue();
428         return true;
429       case CMD_RETURN:
430       case CMD_ASYNC_RETURN:
431       case CMD_RETHROW:
432         break;
433     }
434     return false;
435   }
436 
437  private:
438   Statement* statement_;
439   LoopBuilder* loop_builder_;
440 };
441 
442 // Scoped class for enabling 'throw' in try-catch constructs.
443 class BytecodeGenerator::ControlScopeForTryCatch final
444     : public BytecodeGenerator::ControlScope {
445  public:
ControlScopeForTryCatch(BytecodeGenerator * generator,TryCatchBuilder * try_catch_builder)446   ControlScopeForTryCatch(BytecodeGenerator* generator,
447                           TryCatchBuilder* try_catch_builder)
448       : ControlScope(generator) {}
449 
450  protected:
Execute(Command command,Statement * statement,int source_position)451   bool Execute(Command command, Statement* statement,
452                int source_position) override {
453     switch (command) {
454       case CMD_BREAK:
455       case CMD_CONTINUE:
456       case CMD_RETURN:
457       case CMD_ASYNC_RETURN:
458         break;
459       case CMD_RETHROW:
460         // No need to pop contexts, execution re-enters the method body via the
461         // stack unwinding mechanism which itself restores contexts correctly.
462         generator()->BuildReThrow();
463         return true;
464     }
465     return false;
466   }
467 };
468 
469 // Scoped class for enabling control flow through try-finally constructs.
470 class BytecodeGenerator::ControlScopeForTryFinally final
471     : public BytecodeGenerator::ControlScope {
472  public:
ControlScopeForTryFinally(BytecodeGenerator * generator,TryFinallyBuilder * try_finally_builder,DeferredCommands * commands)473   ControlScopeForTryFinally(BytecodeGenerator* generator,
474                             TryFinallyBuilder* try_finally_builder,
475                             DeferredCommands* commands)
476       : ControlScope(generator),
477         try_finally_builder_(try_finally_builder),
478         commands_(commands) {}
479 
480  protected:
Execute(Command command,Statement * statement,int source_position)481   bool Execute(Command command, Statement* statement,
482                int source_position) override {
483     switch (command) {
484       case CMD_BREAK:
485       case CMD_CONTINUE:
486       case CMD_RETURN:
487       case CMD_ASYNC_RETURN:
488       case CMD_RETHROW:
489         PopContextToExpectedDepth();
490         // We don't record source_position here since we don't generate return
491         // bytecode right here and will generate it later as part of finally
492         // block. Each return bytecode generated in finally block will get own
493         // return source position from corresponded return statement or we'll
494         // use end of function if no return statement is presented.
495         commands_->RecordCommand(command, statement);
496         try_finally_builder_->LeaveTry();
497         return true;
498     }
499     return false;
500   }
501 
502  private:
503   TryFinallyBuilder* try_finally_builder_;
504   DeferredCommands* commands_;
505 };
506 
507 // Allocate and fetch the coverage indices tracking NaryLogical Expressions.
508 class BytecodeGenerator::NaryCodeCoverageSlots {
509  public:
NaryCodeCoverageSlots(BytecodeGenerator * generator,NaryOperation * expr)510   NaryCodeCoverageSlots(BytecodeGenerator* generator, NaryOperation* expr)
511       : generator_(generator) {
512     if (generator_->block_coverage_builder_ == nullptr) return;
513     for (size_t i = 0; i < expr->subsequent_length(); i++) {
514       coverage_slots_.push_back(
515           generator_->AllocateNaryBlockCoverageSlotIfEnabled(expr, i));
516     }
517   }
518 
GetSlotFor(size_t subsequent_expr_index) const519   int GetSlotFor(size_t subsequent_expr_index) const {
520     if (generator_->block_coverage_builder_ == nullptr) {
521       return BlockCoverageBuilder::kNoCoverageArraySlot;
522     }
523     DCHECK(coverage_slots_.size() > subsequent_expr_index);
524     return coverage_slots_[subsequent_expr_index];
525   }
526 
527  private:
528   BytecodeGenerator* generator_;
529   std::vector<int> coverage_slots_;
530 };
531 
PerformCommand(Command command,Statement * statement,int source_position)532 void BytecodeGenerator::ControlScope::PerformCommand(Command command,
533                                                      Statement* statement,
534                                                      int source_position) {
535   ControlScope* current = this;
536   do {
537     if (current->Execute(command, statement, source_position)) {
538       return;
539     }
540     current = current->outer();
541   } while (current != nullptr);
542   UNREACHABLE();
543 }
544 
PopContextToExpectedDepth()545 void BytecodeGenerator::ControlScope::PopContextToExpectedDepth() {
546   // Pop context to the expected depth. Note that this can in fact pop multiple
547   // contexts at once because the {PopContext} bytecode takes a saved register.
548   if (generator()->execution_context() != context()) {
549     generator()->builder()->PopContext(context()->reg());
550   }
551 }
552 
553 class BytecodeGenerator::RegisterAllocationScope final {
554  public:
RegisterAllocationScope(BytecodeGenerator * generator)555   explicit RegisterAllocationScope(BytecodeGenerator* generator)
556       : generator_(generator),
557         outer_next_register_index_(
558             generator->register_allocator()->next_register_index()) {}
559 
~RegisterAllocationScope()560   ~RegisterAllocationScope() {
561     generator_->register_allocator()->ReleaseRegisters(
562         outer_next_register_index_);
563   }
564 
565   RegisterAllocationScope(const RegisterAllocationScope&) = delete;
566   RegisterAllocationScope& operator=(const RegisterAllocationScope&) = delete;
567 
generator() const568   BytecodeGenerator* generator() const { return generator_; }
569 
570  private:
571   BytecodeGenerator* generator_;
572   int outer_next_register_index_;
573 };
574 
575 class BytecodeGenerator::AccumulatorPreservingScope final {
576  public:
AccumulatorPreservingScope(BytecodeGenerator * generator,AccumulatorPreservingMode mode)577   explicit AccumulatorPreservingScope(BytecodeGenerator* generator,
578                                       AccumulatorPreservingMode mode)
579       : generator_(generator) {
580     if (mode == AccumulatorPreservingMode::kPreserve) {
581       saved_accumulator_register_ =
582           generator_->register_allocator()->NewRegister();
583       generator_->builder()->StoreAccumulatorInRegister(
584           saved_accumulator_register_);
585     }
586   }
587 
~AccumulatorPreservingScope()588   ~AccumulatorPreservingScope() {
589     if (saved_accumulator_register_.is_valid()) {
590       generator_->builder()->LoadAccumulatorWithRegister(
591           saved_accumulator_register_);
592     }
593   }
594 
595   AccumulatorPreservingScope(const AccumulatorPreservingScope&) = delete;
596   AccumulatorPreservingScope& operator=(const AccumulatorPreservingScope&) =
597       delete;
598 
599  private:
600   BytecodeGenerator* generator_;
601   Register saved_accumulator_register_;
602 };
603 
604 // Scoped base class for determining how the result of an expression will be
605 // used.
606 class BytecodeGenerator::ExpressionResultScope {
607  public:
ExpressionResultScope(BytecodeGenerator * generator,Expression::Context kind)608   ExpressionResultScope(BytecodeGenerator* generator, Expression::Context kind)
609       : outer_(generator->execution_result()),
610         allocator_(generator),
611         kind_(kind),
612         type_hint_(TypeHint::kAny) {
613     generator->set_execution_result(this);
614   }
615 
~ExpressionResultScope()616   ~ExpressionResultScope() {
617     allocator_.generator()->set_execution_result(outer_);
618   }
619 
620   ExpressionResultScope(const ExpressionResultScope&) = delete;
621   ExpressionResultScope& operator=(const ExpressionResultScope&) = delete;
622 
IsEffect() const623   bool IsEffect() const { return kind_ == Expression::kEffect; }
IsValue() const624   bool IsValue() const { return kind_ == Expression::kValue; }
IsTest() const625   bool IsTest() const { return kind_ == Expression::kTest; }
626 
AsTest()627   TestResultScope* AsTest() {
628     DCHECK(IsTest());
629     return reinterpret_cast<TestResultScope*>(this);
630   }
631 
632   // Specify expression always returns a Boolean result value.
SetResultIsBoolean()633   void SetResultIsBoolean() {
634     DCHECK_EQ(type_hint_, TypeHint::kAny);
635     type_hint_ = TypeHint::kBoolean;
636   }
637 
SetResultIsString()638   void SetResultIsString() {
639     DCHECK_EQ(type_hint_, TypeHint::kAny);
640     type_hint_ = TypeHint::kString;
641   }
642 
type_hint() const643   TypeHint type_hint() const { return type_hint_; }
644 
645  private:
646   ExpressionResultScope* outer_;
647   RegisterAllocationScope allocator_;
648   Expression::Context kind_;
649   TypeHint type_hint_;
650 };
651 
652 // Scoped class used when the result of the current expression is not
653 // expected to produce a result.
654 class BytecodeGenerator::EffectResultScope final
655     : public ExpressionResultScope {
656  public:
EffectResultScope(BytecodeGenerator * generator)657   explicit EffectResultScope(BytecodeGenerator* generator)
658       : ExpressionResultScope(generator, Expression::kEffect) {}
659 };
660 
661 // Scoped class used when the result of the current expression to be
662 // evaluated should go into the interpreter's accumulator.
663 class BytecodeGenerator::ValueResultScope final : public ExpressionResultScope {
664  public:
ValueResultScope(BytecodeGenerator * generator)665   explicit ValueResultScope(BytecodeGenerator* generator)
666       : ExpressionResultScope(generator, Expression::kValue) {}
667 };
668 
669 // Scoped class used when the result of the current expression to be
670 // evaluated is only tested with jumps to two branches.
671 class BytecodeGenerator::TestResultScope final : public ExpressionResultScope {
672  public:
TestResultScope(BytecodeGenerator * generator,BytecodeLabels * then_labels,BytecodeLabels * else_labels,TestFallthrough fallthrough)673   TestResultScope(BytecodeGenerator* generator, BytecodeLabels* then_labels,
674                   BytecodeLabels* else_labels, TestFallthrough fallthrough)
675       : ExpressionResultScope(generator, Expression::kTest),
676         result_consumed_by_test_(false),
677         fallthrough_(fallthrough),
678         then_labels_(then_labels),
679         else_labels_(else_labels) {}
680 
681   TestResultScope(const TestResultScope&) = delete;
682   TestResultScope& operator=(const TestResultScope&) = delete;
683 
684   // Used when code special cases for TestResultScope and consumes any
685   // possible value by testing and jumping to a then/else label.
SetResultConsumedByTest()686   void SetResultConsumedByTest() { result_consumed_by_test_ = true; }
result_consumed_by_test()687   bool result_consumed_by_test() { return result_consumed_by_test_; }
688 
689   // Inverts the control flow of the operation, swapping the then and else
690   // labels and the fallthrough.
InvertControlFlow()691   void InvertControlFlow() {
692     std::swap(then_labels_, else_labels_);
693     fallthrough_ = inverted_fallthrough();
694   }
695 
NewThenLabel()696   BytecodeLabel* NewThenLabel() { return then_labels_->New(); }
NewElseLabel()697   BytecodeLabel* NewElseLabel() { return else_labels_->New(); }
698 
then_labels() const699   BytecodeLabels* then_labels() const { return then_labels_; }
else_labels() const700   BytecodeLabels* else_labels() const { return else_labels_; }
701 
set_then_labels(BytecodeLabels * then_labels)702   void set_then_labels(BytecodeLabels* then_labels) {
703     then_labels_ = then_labels;
704   }
set_else_labels(BytecodeLabels * else_labels)705   void set_else_labels(BytecodeLabels* else_labels) {
706     else_labels_ = else_labels;
707   }
708 
fallthrough() const709   TestFallthrough fallthrough() const { return fallthrough_; }
inverted_fallthrough() const710   TestFallthrough inverted_fallthrough() const {
711     switch (fallthrough_) {
712       case TestFallthrough::kThen:
713         return TestFallthrough::kElse;
714       case TestFallthrough::kElse:
715         return TestFallthrough::kThen;
716       default:
717         return TestFallthrough::kNone;
718     }
719   }
set_fallthrough(TestFallthrough fallthrough)720   void set_fallthrough(TestFallthrough fallthrough) {
721     fallthrough_ = fallthrough;
722   }
723 
724  private:
725   bool result_consumed_by_test_;
726   TestFallthrough fallthrough_;
727   BytecodeLabels* then_labels_;
728   BytecodeLabels* else_labels_;
729 };
730 
731 // Used to build a list of toplevel declaration data.
732 class BytecodeGenerator::TopLevelDeclarationsBuilder final : public ZoneObject {
733  public:
734   template <typename LocalIsolate>
AllocateDeclarations(UnoptimizedCompilationInfo * info,BytecodeGenerator * generator,Handle<Script> script,LocalIsolate * isolate)735   Handle<FixedArray> AllocateDeclarations(UnoptimizedCompilationInfo* info,
736                                           BytecodeGenerator* generator,
737                                           Handle<Script> script,
738                                           LocalIsolate* isolate) {
739     DCHECK(has_constant_pool_entry_);
740 
741     Handle<FixedArray> data =
742         isolate->factory()->NewFixedArray(entry_slots_, AllocationType::kOld);
743 
744     int array_index = 0;
745     if (info->scope()->is_module_scope()) {
746       for (Declaration* decl : *info->scope()->declarations()) {
747         Variable* var = decl->var();
748         if (!var->is_used()) continue;
749         if (var->location() != VariableLocation::MODULE) continue;
750 #ifdef DEBUG
751         int start = array_index;
752 #endif
753         if (decl->IsFunctionDeclaration()) {
754           FunctionLiteral* f = static_cast<FunctionDeclaration*>(decl)->fun();
755           Handle<SharedFunctionInfo> sfi(
756               Compiler::GetSharedFunctionInfo(f, script, isolate));
757           // Return a null handle if any initial values can't be created. Caller
758           // will set stack overflow.
759           if (sfi.is_null()) return Handle<FixedArray>();
760           data->set(array_index++, *sfi);
761           int literal_index = generator->GetCachedCreateClosureSlot(f);
762           data->set(array_index++, Smi::FromInt(literal_index));
763           DCHECK(var->IsExport());
764           data->set(array_index++, Smi::FromInt(var->index()));
765           DCHECK_EQ(start + kModuleFunctionDeclarationSize, array_index);
766         } else if (var->IsExport() && var->binding_needs_init()) {
767           data->set(array_index++, Smi::FromInt(var->index()));
768           DCHECK_EQ(start + kModuleVariableDeclarationSize, array_index);
769         }
770       }
771     } else {
772       for (Declaration* decl : *info->scope()->declarations()) {
773         Variable* var = decl->var();
774         if (!var->is_used()) continue;
775         if (var->location() != VariableLocation::UNALLOCATED) continue;
776 #ifdef DEBUG
777         int start = array_index;
778 #endif
779         if (decl->IsVariableDeclaration()) {
780           data->set(array_index++, *var->raw_name()->string());
781           DCHECK_EQ(start + kGlobalVariableDeclarationSize, array_index);
782         } else {
783           FunctionLiteral* f = static_cast<FunctionDeclaration*>(decl)->fun();
784           Handle<SharedFunctionInfo> sfi(
785               Compiler::GetSharedFunctionInfo(f, script, isolate));
786           // Return a null handle if any initial values can't be created. Caller
787           // will set stack overflow.
788           if (sfi.is_null()) return Handle<FixedArray>();
789           data->set(array_index++, *sfi);
790           int literal_index = generator->GetCachedCreateClosureSlot(f);
791           data->set(array_index++, Smi::FromInt(literal_index));
792           DCHECK_EQ(start + kGlobalFunctionDeclarationSize, array_index);
793         }
794       }
795     }
796     DCHECK_EQ(array_index, data->length());
797     return data;
798   }
799 
constant_pool_entry()800   size_t constant_pool_entry() {
801     DCHECK(has_constant_pool_entry_);
802     return constant_pool_entry_;
803   }
804 
set_constant_pool_entry(size_t constant_pool_entry)805   void set_constant_pool_entry(size_t constant_pool_entry) {
806     DCHECK(has_top_level_declaration());
807     DCHECK(!has_constant_pool_entry_);
808     constant_pool_entry_ = constant_pool_entry;
809     has_constant_pool_entry_ = true;
810   }
811 
record_global_variable_declaration()812   void record_global_variable_declaration() {
813     entry_slots_ += kGlobalVariableDeclarationSize;
814   }
record_global_function_declaration()815   void record_global_function_declaration() {
816     entry_slots_ += kGlobalFunctionDeclarationSize;
817   }
record_module_variable_declaration()818   void record_module_variable_declaration() {
819     entry_slots_ += kModuleVariableDeclarationSize;
820   }
record_module_function_declaration()821   void record_module_function_declaration() {
822     entry_slots_ += kModuleFunctionDeclarationSize;
823   }
has_top_level_declaration()824   bool has_top_level_declaration() { return entry_slots_ > 0; }
processed()825   bool processed() { return processed_; }
mark_processed()826   void mark_processed() { processed_ = true; }
827 
828  private:
829   const int kGlobalVariableDeclarationSize = 1;
830   const int kGlobalFunctionDeclarationSize = 2;
831   const int kModuleVariableDeclarationSize = 1;
832   const int kModuleFunctionDeclarationSize = 3;
833 
834   size_t constant_pool_entry_ = 0;
835   int entry_slots_ = 0;
836   bool has_constant_pool_entry_ = false;
837   bool processed_ = false;
838 };
839 
840 class BytecodeGenerator::CurrentScope final {
841  public:
CurrentScope(BytecodeGenerator * generator,Scope * scope)842   CurrentScope(BytecodeGenerator* generator, Scope* scope)
843       : generator_(generator), outer_scope_(generator->current_scope()) {
844     if (scope != nullptr) {
845       DCHECK_EQ(outer_scope_, scope->outer_scope());
846       generator_->set_current_scope(scope);
847     }
848   }
~CurrentScope()849   ~CurrentScope() {
850     if (outer_scope_ != generator_->current_scope()) {
851       generator_->set_current_scope(outer_scope_);
852     }
853   }
854 
855  private:
856   BytecodeGenerator* generator_;
857   Scope* outer_scope_;
858 };
859 
860 class BytecodeGenerator::FeedbackSlotCache : public ZoneObject {
861  public:
862   enum class SlotKind {
863     kStoreGlobalSloppy,
864     kStoreGlobalStrict,
865     kStoreNamedStrict,
866     kStoreNamedSloppy,
867     kLoadProperty,
868     kLoadSuperProperty,
869     kLoadGlobalNotInsideTypeof,
870     kLoadGlobalInsideTypeof,
871     kClosureFeedbackCell
872   };
873 
FeedbackSlotCache(Zone * zone)874   explicit FeedbackSlotCache(Zone* zone) : map_(zone) {}
875 
Put(SlotKind slot_kind,Variable * variable,int slot_index)876   void Put(SlotKind slot_kind, Variable* variable, int slot_index) {
877     PutImpl(slot_kind, 0, variable, slot_index);
878   }
Put(SlotKind slot_kind,AstNode * node,int slot_index)879   void Put(SlotKind slot_kind, AstNode* node, int slot_index) {
880     PutImpl(slot_kind, 0, node, slot_index);
881   }
Put(SlotKind slot_kind,int variable_index,const AstRawString * name,int slot_index)882   void Put(SlotKind slot_kind, int variable_index, const AstRawString* name,
883            int slot_index) {
884     PutImpl(slot_kind, variable_index, name, slot_index);
885   }
Put(SlotKind slot_kind,const AstRawString * name,int slot_index)886   void Put(SlotKind slot_kind, const AstRawString* name, int slot_index) {
887     PutImpl(slot_kind, 0, name, slot_index);
888   }
889 
Get(SlotKind slot_kind,Variable * variable) const890   int Get(SlotKind slot_kind, Variable* variable) const {
891     return GetImpl(slot_kind, 0, variable);
892   }
Get(SlotKind slot_kind,AstNode * node) const893   int Get(SlotKind slot_kind, AstNode* node) const {
894     return GetImpl(slot_kind, 0, node);
895   }
Get(SlotKind slot_kind,int variable_index,const AstRawString * name) const896   int Get(SlotKind slot_kind, int variable_index,
897           const AstRawString* name) const {
898     return GetImpl(slot_kind, variable_index, name);
899   }
Get(SlotKind slot_kind,const AstRawString * name) const900   int Get(SlotKind slot_kind, const AstRawString* name) const {
901     return GetImpl(slot_kind, 0, name);
902   }
903 
904  private:
905   using Key = std::tuple<SlotKind, int, const void*>;
906 
PutImpl(SlotKind slot_kind,int index,const void * node,int slot_index)907   void PutImpl(SlotKind slot_kind, int index, const void* node,
908                int slot_index) {
909     Key key = std::make_tuple(slot_kind, index, node);
910     auto entry = std::make_pair(key, slot_index);
911     map_.insert(entry);
912   }
913 
GetImpl(SlotKind slot_kind,int index,const void * node) const914   int GetImpl(SlotKind slot_kind, int index, const void* node) const {
915     Key key = std::make_tuple(slot_kind, index, node);
916     auto iter = map_.find(key);
917     if (iter != map_.end()) {
918       return iter->second;
919     }
920     return -1;
921   }
922 
923   ZoneMap<Key, int> map_;
924 };
925 
926 class BytecodeGenerator::IteratorRecord final {
927  public:
IteratorRecord(Register object_register,Register next_register,IteratorType type=IteratorType::kNormal)928   IteratorRecord(Register object_register, Register next_register,
929                  IteratorType type = IteratorType::kNormal)
930       : type_(type), object_(object_register), next_(next_register) {
931     DCHECK(object_.is_valid() && next_.is_valid());
932   }
933 
type() const934   inline IteratorType type() const { return type_; }
object() const935   inline Register object() const { return object_; }
next() const936   inline Register next() const { return next_; }
937 
938  private:
939   IteratorType type_;
940   Register object_;
941   Register next_;
942 };
943 
944 class BytecodeGenerator::OptionalChainNullLabelScope final {
945  public:
OptionalChainNullLabelScope(BytecodeGenerator * bytecode_generator)946   explicit OptionalChainNullLabelScope(BytecodeGenerator* bytecode_generator)
947       : bytecode_generator_(bytecode_generator),
948         labels_(bytecode_generator->zone()) {
949     prev_ = bytecode_generator_->optional_chaining_null_labels_;
950     bytecode_generator_->optional_chaining_null_labels_ = &labels_;
951   }
952 
~OptionalChainNullLabelScope()953   ~OptionalChainNullLabelScope() {
954     bytecode_generator_->optional_chaining_null_labels_ = prev_;
955   }
956 
labels()957   BytecodeLabels* labels() { return &labels_; }
958 
959  private:
960   BytecodeGenerator* bytecode_generator_;
961   BytecodeLabels labels_;
962   BytecodeLabels* prev_;
963 };
964 
965 // LoopScope delimits the scope of {loop}, from its header to its final jump.
966 // It should be constructed iff a (conceptual) back edge should be produced. In
967 // the case of creating a LoopBuilder but never emitting the loop, it is valid
968 // to skip the creation of LoopScope.
969 class BytecodeGenerator::LoopScope final {
970  public:
LoopScope(BytecodeGenerator * bytecode_generator,LoopBuilder * loop)971   explicit LoopScope(BytecodeGenerator* bytecode_generator, LoopBuilder* loop)
972       : bytecode_generator_(bytecode_generator),
973         parent_loop_scope_(bytecode_generator_->current_loop_scope()),
974         loop_builder_(loop) {
975     loop_builder_->LoopHeader();
976     bytecode_generator_->set_current_loop_scope(this);
977     bytecode_generator_->loop_depth_++;
978   }
979 
~LoopScope()980   ~LoopScope() {
981     bytecode_generator_->loop_depth_--;
982     bytecode_generator_->set_current_loop_scope(parent_loop_scope_);
983     DCHECK_GE(bytecode_generator_->loop_depth_, 0);
984     loop_builder_->JumpToHeader(
985         bytecode_generator_->loop_depth_,
986         parent_loop_scope_ ? parent_loop_scope_->loop_builder_ : nullptr);
987   }
988 
989  private:
990   BytecodeGenerator* const bytecode_generator_;
991   LoopScope* const parent_loop_scope_;
992   LoopBuilder* const loop_builder_;
993 };
994 
995 namespace {
996 
997 template <typename PropertyT>
998 struct Accessors : public ZoneObject {
Accessorsv8::internal::interpreter::__anonbda690ea0111::Accessors999   Accessors() : getter(nullptr), setter(nullptr) {}
1000   PropertyT* getter;
1001   PropertyT* setter;
1002 };
1003 
1004 // A map from property names to getter/setter pairs allocated in the zone that
1005 // also provides a way of accessing the pairs in the order they were first
1006 // added so that the generated bytecode is always the same.
1007 template <typename PropertyT>
1008 class AccessorTable
1009     : public base::TemplateHashMap<Literal, Accessors<PropertyT>,
1010                                    bool (*)(void*, void*),
1011                                    ZoneAllocationPolicy> {
1012  public:
AccessorTable(Zone * zone)1013   explicit AccessorTable(Zone* zone)
1014       : base::TemplateHashMap<Literal, Accessors<PropertyT>,
1015                               bool (*)(void*, void*), ZoneAllocationPolicy>(
1016             Literal::Match, ZoneAllocationPolicy(zone)),
1017         zone_(zone) {}
1018 
LookupOrInsert(Literal * key)1019   Accessors<PropertyT>* LookupOrInsert(Literal* key) {
1020     auto it = this->find(key, true);
1021     if (it->second == nullptr) {
1022       it->second = zone_->New<Accessors<PropertyT>>();
1023       ordered_accessors_.push_back({key, it->second});
1024     }
1025     return it->second;
1026   }
1027 
1028   const std::vector<std::pair<Literal*, Accessors<PropertyT>*>>&
ordered_accessors()1029   ordered_accessors() {
1030     return ordered_accessors_;
1031   }
1032 
1033  private:
1034   std::vector<std::pair<Literal*, Accessors<PropertyT>*>> ordered_accessors_;
1035 
1036   Zone* zone_;
1037 };
1038 
1039 }  // namespace
1040 
1041 #ifdef DEBUG
1042 
IsInEagerLiterals(FunctionLiteral * literal,const std::vector<FunctionLiteral * > & eager_literals)1043 static bool IsInEagerLiterals(
1044     FunctionLiteral* literal,
1045     const std::vector<FunctionLiteral*>& eager_literals) {
1046   for (FunctionLiteral* eager_literal : eager_literals) {
1047     if (literal == eager_literal) return true;
1048   }
1049   return false;
1050 }
1051 
1052 #endif  // DEBUG
1053 
BytecodeGenerator(Zone * compile_zone,UnoptimizedCompilationInfo * info,const AstStringConstants * ast_string_constants,std::vector<FunctionLiteral * > * eager_inner_literals)1054 BytecodeGenerator::BytecodeGenerator(
1055     Zone* compile_zone, UnoptimizedCompilationInfo* info,
1056     const AstStringConstants* ast_string_constants,
1057     std::vector<FunctionLiteral*>* eager_inner_literals)
1058     : zone_(compile_zone),
1059       builder_(zone(), info->num_parameters_including_this(),
1060                info->scope()->num_stack_slots(), info->feedback_vector_spec(),
1061                info->SourcePositionRecordingMode()),
1062       info_(info),
1063       ast_string_constants_(ast_string_constants),
1064       closure_scope_(info->scope()),
1065       current_scope_(info->scope()),
1066       eager_inner_literals_(eager_inner_literals),
1067       feedback_slot_cache_(zone()->New<FeedbackSlotCache>(zone())),
1068       top_level_builder_(zone()->New<TopLevelDeclarationsBuilder>()),
1069       block_coverage_builder_(nullptr),
1070       function_literals_(0, zone()),
1071       native_function_literals_(0, zone()),
1072       object_literals_(0, zone()),
1073       array_literals_(0, zone()),
1074       class_literals_(0, zone()),
1075       template_objects_(0, zone()),
1076       execution_control_(nullptr),
1077       execution_context_(nullptr),
1078       execution_result_(nullptr),
1079       incoming_new_target_or_generator_(),
1080       optional_chaining_null_labels_(nullptr),
1081       dummy_feedback_slot_(feedback_spec(), FeedbackSlotKind::kCompareOp),
1082       generator_jump_table_(nullptr),
1083       suspend_count_(0),
1084       loop_depth_(0),
1085       current_loop_scope_(nullptr),
1086       catch_prediction_(HandlerTable::UNCAUGHT) {
1087   DCHECK_EQ(closure_scope(), closure_scope()->GetClosureScope());
1088   if (info->has_source_range_map()) {
1089     block_coverage_builder_ = zone()->New<BlockCoverageBuilder>(
1090         zone(), builder(), info->source_range_map());
1091   }
1092 }
1093 
1094 namespace {
1095 
1096 template <typename Isolate>
1097 struct NullContextScopeHelper;
1098 
1099 template <>
1100 struct NullContextScopeHelper<Isolate> {
1101   using Type = NullContextScope;
1102 };
1103 
1104 template <>
1105 struct NullContextScopeHelper<LocalIsolate> {
1106   class DummyNullContextScope {
1107    public:
DummyNullContextScope(LocalIsolate *)1108     explicit DummyNullContextScope(LocalIsolate*) {}
1109   };
1110   using Type = DummyNullContextScope;
1111 };
1112 
1113 template <typename Isolate>
1114 using NullContextScopeFor = typename NullContextScopeHelper<Isolate>::Type;
1115 
1116 }  // namespace
1117 
1118 template <typename LocalIsolate>
FinalizeBytecode(LocalIsolate * isolate,Handle<Script> script)1119 Handle<BytecodeArray> BytecodeGenerator::FinalizeBytecode(
1120     LocalIsolate* isolate, Handle<Script> script) {
1121   DCHECK_EQ(ThreadId::Current(), isolate->thread_id());
1122 #ifdef DEBUG
1123   // Unoptimized compilation should be context-independent. Verify that we don't
1124   // access the native context by nulling it out during finalization.
1125   NullContextScopeFor<LocalIsolate> null_context_scope(isolate);
1126 #endif
1127 
1128   AllocateDeferredConstants(isolate, script);
1129 
1130   if (block_coverage_builder_) {
1131     Handle<CoverageInfo> coverage_info =
1132         isolate->factory()->NewCoverageInfo(block_coverage_builder_->slots());
1133     info()->set_coverage_info(coverage_info);
1134     if (FLAG_trace_block_coverage) {
1135       StdoutStream os;
1136       coverage_info->CoverageInfoPrint(os, info()->literal()->GetDebugName());
1137     }
1138   }
1139 
1140   if (HasStackOverflow()) return Handle<BytecodeArray>();
1141   Handle<BytecodeArray> bytecode_array = builder()->ToBytecodeArray(isolate);
1142 
1143   if (incoming_new_target_or_generator_.is_valid()) {
1144     bytecode_array->set_incoming_new_target_or_generator_register(
1145         incoming_new_target_or_generator_);
1146   }
1147 
1148   return bytecode_array;
1149 }
1150 
1151 template Handle<BytecodeArray> BytecodeGenerator::FinalizeBytecode(
1152     Isolate* isolate, Handle<Script> script);
1153 template Handle<BytecodeArray> BytecodeGenerator::FinalizeBytecode(
1154     LocalIsolate* isolate, Handle<Script> script);
1155 
1156 template <typename LocalIsolate>
FinalizeSourcePositionTable(LocalIsolate * isolate)1157 Handle<ByteArray> BytecodeGenerator::FinalizeSourcePositionTable(
1158     LocalIsolate* isolate) {
1159   DCHECK_EQ(ThreadId::Current(), isolate->thread_id());
1160 #ifdef DEBUG
1161   // Unoptimized compilation should be context-independent. Verify that we don't
1162   // access the native context by nulling it out during finalization.
1163   NullContextScopeFor<LocalIsolate> null_context_scope(isolate);
1164 #endif
1165 
1166   Handle<ByteArray> source_position_table =
1167       builder()->ToSourcePositionTable(isolate);
1168 
1169   LOG_CODE_EVENT(isolate,
1170                  CodeLinePosInfoRecordEvent(
1171                      info_->bytecode_array()->GetFirstBytecodeAddress(),
1172                      *source_position_table));
1173 
1174   return source_position_table;
1175 }
1176 
1177 template Handle<ByteArray> BytecodeGenerator::FinalizeSourcePositionTable(
1178     Isolate* isolate);
1179 template Handle<ByteArray> BytecodeGenerator::FinalizeSourcePositionTable(
1180     LocalIsolate* isolate);
1181 
1182 #ifdef DEBUG
CheckBytecodeMatches(BytecodeArray bytecode)1183 int BytecodeGenerator::CheckBytecodeMatches(BytecodeArray bytecode) {
1184   return builder()->CheckBytecodeMatches(bytecode);
1185 }
1186 #endif
1187 
1188 template <typename LocalIsolate>
AllocateDeferredConstants(LocalIsolate * isolate,Handle<Script> script)1189 void BytecodeGenerator::AllocateDeferredConstants(LocalIsolate* isolate,
1190                                                   Handle<Script> script) {
1191   if (top_level_builder()->has_top_level_declaration()) {
1192     // Build global declaration pair array.
1193     Handle<FixedArray> declarations = top_level_builder()->AllocateDeclarations(
1194         info(), this, script, isolate);
1195     if (declarations.is_null()) return SetStackOverflow();
1196     builder()->SetDeferredConstantPoolEntry(
1197         top_level_builder()->constant_pool_entry(), declarations);
1198   }
1199 
1200   // Find or build shared function infos.
1201   for (std::pair<FunctionLiteral*, size_t> literal : function_literals_) {
1202     FunctionLiteral* expr = literal.first;
1203     Handle<SharedFunctionInfo> shared_info =
1204         Compiler::GetSharedFunctionInfo(expr, script, isolate);
1205     if (shared_info.is_null()) return SetStackOverflow();
1206     builder()->SetDeferredConstantPoolEntry(literal.second, shared_info);
1207   }
1208 
1209   // Find or build shared function infos for the native function templates.
1210   for (std::pair<NativeFunctionLiteral*, size_t> literal :
1211        native_function_literals_) {
1212     // This should only happen for main-thread compilations.
1213     DCHECK((std::is_same<Isolate, v8::internal::Isolate>::value));
1214 
1215     NativeFunctionLiteral* expr = literal.first;
1216     v8::Isolate* v8_isolate = reinterpret_cast<v8::Isolate*>(isolate);
1217 
1218     // Compute the function template for the native function.
1219     v8::Local<v8::FunctionTemplate> info =
1220         expr->extension()->GetNativeFunctionTemplate(
1221             v8_isolate, Utils::ToLocal(expr->name()));
1222     DCHECK(!info.IsEmpty());
1223 
1224     Handle<SharedFunctionInfo> shared_info =
1225         FunctionTemplateInfo::GetOrCreateSharedFunctionInfo(
1226             isolate, Utils::OpenHandle(*info), expr->name());
1227     DCHECK(!shared_info.is_null());
1228     builder()->SetDeferredConstantPoolEntry(literal.second, shared_info);
1229   }
1230 
1231   // Build object literal constant properties
1232   for (std::pair<ObjectLiteral*, size_t> literal : object_literals_) {
1233     ObjectLiteral* object_literal = literal.first;
1234     if (object_literal->properties_count() > 0) {
1235       // If constant properties is an empty fixed array, we've already added it
1236       // to the constant pool when visiting the object literal.
1237       Handle<ObjectBoilerplateDescription> constant_properties =
1238           object_literal->GetOrBuildBoilerplateDescription(isolate);
1239 
1240       builder()->SetDeferredConstantPoolEntry(literal.second,
1241                                               constant_properties);
1242     }
1243   }
1244 
1245   // Build array literal constant elements
1246   for (std::pair<ArrayLiteral*, size_t> literal : array_literals_) {
1247     ArrayLiteral* array_literal = literal.first;
1248     Handle<ArrayBoilerplateDescription> constant_elements =
1249         array_literal->GetOrBuildBoilerplateDescription(isolate);
1250     builder()->SetDeferredConstantPoolEntry(literal.second, constant_elements);
1251   }
1252 
1253   // Build class literal boilerplates.
1254   for (std::pair<ClassLiteral*, size_t> literal : class_literals_) {
1255     ClassLiteral* class_literal = literal.first;
1256     Handle<ClassBoilerplate> class_boilerplate =
1257         ClassBoilerplate::BuildClassBoilerplate(isolate, class_literal);
1258     builder()->SetDeferredConstantPoolEntry(literal.second, class_boilerplate);
1259   }
1260 
1261   // Build template literals.
1262   for (std::pair<GetTemplateObject*, size_t> literal : template_objects_) {
1263     GetTemplateObject* get_template_object = literal.first;
1264     Handle<TemplateObjectDescription> description =
1265         get_template_object->GetOrBuildDescription(isolate);
1266     builder()->SetDeferredConstantPoolEntry(literal.second, description);
1267   }
1268 }
1269 
1270 template void BytecodeGenerator::AllocateDeferredConstants(
1271     Isolate* isolate, Handle<Script> script);
1272 template void BytecodeGenerator::AllocateDeferredConstants(
1273     LocalIsolate* isolate, Handle<Script> script);
1274 
1275 namespace {
NeedsContextInitialization(DeclarationScope * scope)1276 bool NeedsContextInitialization(DeclarationScope* scope) {
1277   return scope->NeedsContext() && !scope->is_script_scope() &&
1278          !scope->is_module_scope();
1279 }
1280 }  // namespace
1281 
GenerateBytecode(uintptr_t stack_limit)1282 void BytecodeGenerator::GenerateBytecode(uintptr_t stack_limit) {
1283   DisallowHeapAllocation no_allocation;
1284   DisallowHandleAllocation no_handles;
1285   DisallowHandleDereference no_deref;
1286 
1287   InitializeAstVisitor(stack_limit);
1288 
1289   // Initialize the incoming context.
1290   ContextScope incoming_context(this, closure_scope());
1291 
1292   // Initialize control scope.
1293   ControlScopeForTopLevel control(this);
1294 
1295   RegisterAllocationScope register_scope(this);
1296 
1297   AllocateTopLevelRegisters();
1298 
1299   builder()->EmitFunctionStartSourcePosition(
1300       info()->literal()->start_position());
1301 
1302   if (info()->literal()->CanSuspend()) {
1303     BuildGeneratorPrologue();
1304   }
1305 
1306   if (NeedsContextInitialization(closure_scope())) {
1307     // Push a new inner context scope for the function.
1308     BuildNewLocalActivationContext();
1309     ContextScope local_function_context(this, closure_scope());
1310     BuildLocalActivationContextInitialization();
1311     GenerateBytecodeBody();
1312   } else {
1313     GenerateBytecodeBody();
1314   }
1315 
1316   // Check that we are not falling off the end.
1317   DCHECK(builder()->RemainderOfBlockIsDead());
1318 }
1319 
GenerateBytecodeBody()1320 void BytecodeGenerator::GenerateBytecodeBody() {
1321   // Build the arguments object if it is used.
1322   VisitArgumentsObject(closure_scope()->arguments());
1323 
1324   // Build rest arguments array if it is used.
1325   Variable* rest_parameter = closure_scope()->rest_parameter();
1326   VisitRestArgumentsArray(rest_parameter);
1327 
1328   // Build assignment to the function name or {.this_function}
1329   // variables if used.
1330   VisitThisFunctionVariable(closure_scope()->function_var());
1331   VisitThisFunctionVariable(closure_scope()->this_function_var());
1332 
1333   // Build assignment to {new.target} variable if it is used.
1334   VisitNewTargetVariable(closure_scope()->new_target_var());
1335 
1336   // Create a generator object if necessary and initialize the
1337   // {.generator_object} variable.
1338   FunctionLiteral* literal = info()->literal();
1339   if (IsResumableFunction(literal->kind())) {
1340     BuildGeneratorObjectVariableInitialization();
1341   }
1342 
1343   // Emit tracing call if requested to do so.
1344   if (FLAG_trace) builder()->CallRuntime(Runtime::kTraceEnter);
1345 
1346   // Emit type profile call.
1347   if (info()->flags().collect_type_profile()) {
1348     feedback_spec()->AddTypeProfileSlot();
1349     int num_parameters = closure_scope()->num_parameters();
1350     for (int i = 0; i < num_parameters; i++) {
1351       Register parameter(builder()->Parameter(i));
1352       builder()->LoadAccumulatorWithRegister(parameter).CollectTypeProfile(
1353           closure_scope()->parameter(i)->initializer_position());
1354     }
1355   }
1356 
1357   // Increment the function-scope block coverage counter.
1358   BuildIncrementBlockCoverageCounterIfEnabled(literal, SourceRangeKind::kBody);
1359 
1360   // Visit declarations within the function scope.
1361   if (closure_scope()->is_script_scope()) {
1362     VisitGlobalDeclarations(closure_scope()->declarations());
1363   } else if (closure_scope()->is_module_scope()) {
1364     VisitModuleDeclarations(closure_scope()->declarations());
1365   } else {
1366     VisitDeclarations(closure_scope()->declarations());
1367   }
1368 
1369   // Emit initializing assignments for module namespace imports (if any).
1370   VisitModuleNamespaceImports();
1371 
1372   // The derived constructor case is handled in VisitCallSuper.
1373   if (IsBaseConstructor(function_kind())) {
1374     if (literal->class_scope_has_private_brand()) {
1375       BuildPrivateBrandInitialization(builder()->Receiver());
1376     }
1377 
1378     if (literal->requires_instance_members_initializer()) {
1379       BuildInstanceMemberInitialization(Register::function_closure(),
1380                                         builder()->Receiver());
1381     }
1382   }
1383 
1384   // Visit statements in the function body.
1385   VisitStatements(literal->body());
1386 
1387   // Emit an implicit return instruction in case control flow can fall off the
1388   // end of the function without an explicit return being present on all paths.
1389   if (!builder()->RemainderOfBlockIsDead()) {
1390     builder()->LoadUndefined();
1391     BuildReturn();
1392   }
1393 }
1394 
AllocateTopLevelRegisters()1395 void BytecodeGenerator::AllocateTopLevelRegisters() {
1396   if (IsResumableFunction(info()->literal()->kind())) {
1397     // Either directly use generator_object_var or allocate a new register for
1398     // the incoming generator object.
1399     Variable* generator_object_var = closure_scope()->generator_object_var();
1400     if (generator_object_var->location() == VariableLocation::LOCAL) {
1401       incoming_new_target_or_generator_ =
1402           GetRegisterForLocalVariable(generator_object_var);
1403     } else {
1404       incoming_new_target_or_generator_ = register_allocator()->NewRegister();
1405     }
1406   } else if (closure_scope()->new_target_var()) {
1407     // Either directly use new_target_var or allocate a new register for
1408     // the incoming new target object.
1409     Variable* new_target_var = closure_scope()->new_target_var();
1410     if (new_target_var->location() == VariableLocation::LOCAL) {
1411       incoming_new_target_or_generator_ =
1412           GetRegisterForLocalVariable(new_target_var);
1413     } else {
1414       incoming_new_target_or_generator_ = register_allocator()->NewRegister();
1415     }
1416   }
1417 }
1418 
BuildGeneratorPrologue()1419 void BytecodeGenerator::BuildGeneratorPrologue() {
1420   DCHECK_GT(info()->literal()->suspend_count(), 0);
1421   DCHECK(generator_object().is_valid());
1422   generator_jump_table_ =
1423       builder()->AllocateJumpTable(info()->literal()->suspend_count(), 0);
1424 
1425   // If the generator is not undefined, this is a resume, so perform state
1426   // dispatch.
1427   builder()->SwitchOnGeneratorState(generator_object(), generator_jump_table_);
1428 
1429   // Otherwise, fall-through to the ordinary function prologue, after which we
1430   // will run into the generator object creation and other extra code inserted
1431   // by the parser.
1432 }
1433 
VisitBlock(Block * stmt)1434 void BytecodeGenerator::VisitBlock(Block* stmt) {
1435   // Visit declarations and statements.
1436   CurrentScope current_scope(this, stmt->scope());
1437   if (stmt->scope() != nullptr && stmt->scope()->NeedsContext()) {
1438     BuildNewLocalBlockContext(stmt->scope());
1439     ContextScope scope(this, stmt->scope());
1440     VisitBlockDeclarationsAndStatements(stmt);
1441   } else {
1442     VisitBlockDeclarationsAndStatements(stmt);
1443   }
1444 }
1445 
VisitBlockDeclarationsAndStatements(Block * stmt)1446 void BytecodeGenerator::VisitBlockDeclarationsAndStatements(Block* stmt) {
1447   BlockBuilder block_builder(builder(), block_coverage_builder_, stmt);
1448   ControlScopeForBreakable execution_control(this, stmt, &block_builder);
1449   if (stmt->scope() != nullptr) {
1450     VisitDeclarations(stmt->scope()->declarations());
1451   }
1452   VisitStatements(stmt->statements());
1453 }
1454 
VisitVariableDeclaration(VariableDeclaration * decl)1455 void BytecodeGenerator::VisitVariableDeclaration(VariableDeclaration* decl) {
1456   Variable* variable = decl->var();
1457   // Unused variables don't need to be visited.
1458   if (!variable->is_used()) return;
1459 
1460   switch (variable->location()) {
1461     case VariableLocation::UNALLOCATED:
1462     case VariableLocation::MODULE:
1463       UNREACHABLE();
1464     case VariableLocation::LOCAL:
1465       if (variable->binding_needs_init()) {
1466         Register destination(builder()->Local(variable->index()));
1467         builder()->LoadTheHole().StoreAccumulatorInRegister(destination);
1468       }
1469       break;
1470     case VariableLocation::PARAMETER:
1471       if (variable->binding_needs_init()) {
1472         Register destination(builder()->Parameter(variable->index()));
1473         builder()->LoadTheHole().StoreAccumulatorInRegister(destination);
1474       }
1475       break;
1476     case VariableLocation::REPL_GLOBAL:
1477       // REPL let's are stored in script contexts. They get initialized
1478       // with the hole the same way as normal context allocated variables.
1479     case VariableLocation::CONTEXT:
1480       if (variable->binding_needs_init()) {
1481         DCHECK_EQ(0, execution_context()->ContextChainDepth(variable->scope()));
1482         builder()->LoadTheHole().StoreContextSlot(execution_context()->reg(),
1483                                                   variable->index(), 0);
1484       }
1485       break;
1486     case VariableLocation::LOOKUP: {
1487       DCHECK_EQ(VariableMode::kDynamic, variable->mode());
1488       DCHECK(!variable->binding_needs_init());
1489 
1490       Register name = register_allocator()->NewRegister();
1491 
1492       builder()
1493           ->LoadLiteral(variable->raw_name())
1494           .StoreAccumulatorInRegister(name)
1495           .CallRuntime(Runtime::kDeclareEvalVar, name);
1496       break;
1497     }
1498   }
1499 }
1500 
VisitFunctionDeclaration(FunctionDeclaration * decl)1501 void BytecodeGenerator::VisitFunctionDeclaration(FunctionDeclaration* decl) {
1502   Variable* variable = decl->var();
1503   DCHECK(variable->mode() == VariableMode::kLet ||
1504          variable->mode() == VariableMode::kVar ||
1505          variable->mode() == VariableMode::kDynamic);
1506   // Unused variables don't need to be visited.
1507   if (!variable->is_used()) return;
1508 
1509   switch (variable->location()) {
1510     case VariableLocation::UNALLOCATED:
1511     case VariableLocation::MODULE:
1512       UNREACHABLE();
1513     case VariableLocation::PARAMETER:
1514     case VariableLocation::LOCAL: {
1515       VisitFunctionLiteral(decl->fun());
1516       BuildVariableAssignment(variable, Token::INIT, HoleCheckMode::kElided);
1517       break;
1518     }
1519     case VariableLocation::REPL_GLOBAL:
1520     case VariableLocation::CONTEXT: {
1521       DCHECK_EQ(0, execution_context()->ContextChainDepth(variable->scope()));
1522       VisitFunctionLiteral(decl->fun());
1523       builder()->StoreContextSlot(execution_context()->reg(), variable->index(),
1524                                   0);
1525       break;
1526     }
1527     case VariableLocation::LOOKUP: {
1528       RegisterList args = register_allocator()->NewRegisterList(2);
1529       builder()
1530           ->LoadLiteral(variable->raw_name())
1531           .StoreAccumulatorInRegister(args[0]);
1532       VisitFunctionLiteral(decl->fun());
1533       builder()->StoreAccumulatorInRegister(args[1]).CallRuntime(
1534           Runtime::kDeclareEvalFunction, args);
1535       break;
1536     }
1537   }
1538   DCHECK_IMPLIES(
1539       eager_inner_literals_ != nullptr && decl->fun()->ShouldEagerCompile(),
1540       IsInEagerLiterals(decl->fun(), *eager_inner_literals_));
1541 }
1542 
VisitModuleNamespaceImports()1543 void BytecodeGenerator::VisitModuleNamespaceImports() {
1544   if (!closure_scope()->is_module_scope()) return;
1545 
1546   RegisterAllocationScope register_scope(this);
1547   Register module_request = register_allocator()->NewRegister();
1548 
1549   SourceTextModuleDescriptor* descriptor =
1550       closure_scope()->AsModuleScope()->module();
1551   for (auto entry : descriptor->namespace_imports()) {
1552     builder()
1553         ->LoadLiteral(Smi::FromInt(entry->module_request))
1554         .StoreAccumulatorInRegister(module_request)
1555         .CallRuntime(Runtime::kGetModuleNamespace, module_request);
1556     Variable* var = closure_scope()->LookupInModule(entry->local_name);
1557     BuildVariableAssignment(var, Token::INIT, HoleCheckMode::kElided);
1558   }
1559 }
1560 
BuildDeclareCall(Runtime::FunctionId id)1561 void BytecodeGenerator::BuildDeclareCall(Runtime::FunctionId id) {
1562   if (!top_level_builder()->has_top_level_declaration()) return;
1563   DCHECK(!top_level_builder()->processed());
1564 
1565   top_level_builder()->set_constant_pool_entry(
1566       builder()->AllocateDeferredConstantPoolEntry());
1567 
1568   // Emit code to declare globals.
1569   RegisterList args = register_allocator()->NewRegisterList(2);
1570   builder()
1571       ->LoadConstantPoolEntry(top_level_builder()->constant_pool_entry())
1572       .StoreAccumulatorInRegister(args[0])
1573       .MoveRegister(Register::function_closure(), args[1])
1574       .CallRuntime(id, args);
1575 
1576   top_level_builder()->mark_processed();
1577 }
1578 
VisitModuleDeclarations(Declaration::List * decls)1579 void BytecodeGenerator::VisitModuleDeclarations(Declaration::List* decls) {
1580   RegisterAllocationScope register_scope(this);
1581   for (Declaration* decl : *decls) {
1582     Variable* var = decl->var();
1583     if (!var->is_used()) continue;
1584     if (var->location() == VariableLocation::MODULE) {
1585       if (decl->IsFunctionDeclaration()) {
1586         DCHECK(var->IsExport());
1587         FunctionDeclaration* f = static_cast<FunctionDeclaration*>(decl);
1588         AddToEagerLiteralsIfEager(f->fun());
1589         top_level_builder()->record_module_function_declaration();
1590       } else if (var->IsExport() && var->binding_needs_init()) {
1591         DCHECK(decl->IsVariableDeclaration());
1592         top_level_builder()->record_module_variable_declaration();
1593       }
1594     } else {
1595       RegisterAllocationScope register_scope(this);
1596       Visit(decl);
1597     }
1598   }
1599   BuildDeclareCall(Runtime::kDeclareModuleExports);
1600 }
1601 
VisitGlobalDeclarations(Declaration::List * decls)1602 void BytecodeGenerator::VisitGlobalDeclarations(Declaration::List* decls) {
1603   RegisterAllocationScope register_scope(this);
1604   for (Declaration* decl : *decls) {
1605     Variable* var = decl->var();
1606     DCHECK(var->is_used());
1607     if (var->location() == VariableLocation::UNALLOCATED) {
1608       // var or function.
1609       if (decl->IsFunctionDeclaration()) {
1610         top_level_builder()->record_global_function_declaration();
1611         FunctionDeclaration* f = static_cast<FunctionDeclaration*>(decl);
1612         AddToEagerLiteralsIfEager(f->fun());
1613       } else {
1614         top_level_builder()->record_global_variable_declaration();
1615       }
1616     } else {
1617       // let or const. Handled in NewScriptContext.
1618       DCHECK(decl->IsVariableDeclaration());
1619       DCHECK(IsLexicalVariableMode(var->mode()));
1620     }
1621   }
1622 
1623   BuildDeclareCall(Runtime::kDeclareGlobals);
1624 }
1625 
VisitDeclarations(Declaration::List * declarations)1626 void BytecodeGenerator::VisitDeclarations(Declaration::List* declarations) {
1627   for (Declaration* decl : *declarations) {
1628     RegisterAllocationScope register_scope(this);
1629     Visit(decl);
1630   }
1631 }
1632 
VisitStatements(const ZonePtrList<Statement> * statements)1633 void BytecodeGenerator::VisitStatements(
1634     const ZonePtrList<Statement>* statements) {
1635   for (int i = 0; i < statements->length(); i++) {
1636     // Allocate an outer register allocations scope for the statement.
1637     RegisterAllocationScope allocation_scope(this);
1638     Statement* stmt = statements->at(i);
1639     Visit(stmt);
1640     if (builder()->RemainderOfBlockIsDead()) break;
1641   }
1642 }
1643 
VisitExpressionStatement(ExpressionStatement * stmt)1644 void BytecodeGenerator::VisitExpressionStatement(ExpressionStatement* stmt) {
1645   builder()->SetStatementPosition(stmt);
1646   VisitForEffect(stmt->expression());
1647 }
1648 
VisitEmptyStatement(EmptyStatement * stmt)1649 void BytecodeGenerator::VisitEmptyStatement(EmptyStatement* stmt) {}
1650 
VisitIfStatement(IfStatement * stmt)1651 void BytecodeGenerator::VisitIfStatement(IfStatement* stmt) {
1652   ConditionalControlFlowBuilder conditional_builder(
1653       builder(), block_coverage_builder_, stmt);
1654   builder()->SetStatementPosition(stmt);
1655 
1656   if (stmt->condition()->ToBooleanIsTrue()) {
1657     // Generate then block unconditionally as always true.
1658     conditional_builder.Then();
1659     Visit(stmt->then_statement());
1660   } else if (stmt->condition()->ToBooleanIsFalse()) {
1661     // Generate else block unconditionally if it exists.
1662     if (stmt->HasElseStatement()) {
1663       conditional_builder.Else();
1664       Visit(stmt->else_statement());
1665     }
1666   } else {
1667     // TODO(oth): If then statement is BreakStatement or
1668     // ContinueStatement we can reduce number of generated
1669     // jump/jump_ifs here. See BasicLoops test.
1670     VisitForTest(stmt->condition(), conditional_builder.then_labels(),
1671                  conditional_builder.else_labels(), TestFallthrough::kThen);
1672 
1673     conditional_builder.Then();
1674     Visit(stmt->then_statement());
1675 
1676     if (stmt->HasElseStatement()) {
1677       conditional_builder.JumpToEnd();
1678       conditional_builder.Else();
1679       Visit(stmt->else_statement());
1680     }
1681   }
1682 }
1683 
VisitSloppyBlockFunctionStatement(SloppyBlockFunctionStatement * stmt)1684 void BytecodeGenerator::VisitSloppyBlockFunctionStatement(
1685     SloppyBlockFunctionStatement* stmt) {
1686   Visit(stmt->statement());
1687 }
1688 
VisitContinueStatement(ContinueStatement * stmt)1689 void BytecodeGenerator::VisitContinueStatement(ContinueStatement* stmt) {
1690   AllocateBlockCoverageSlotIfEnabled(stmt, SourceRangeKind::kContinuation);
1691   builder()->SetStatementPosition(stmt);
1692   execution_control()->Continue(stmt->target());
1693 }
1694 
VisitBreakStatement(BreakStatement * stmt)1695 void BytecodeGenerator::VisitBreakStatement(BreakStatement* stmt) {
1696   AllocateBlockCoverageSlotIfEnabled(stmt, SourceRangeKind::kContinuation);
1697   builder()->SetStatementPosition(stmt);
1698   execution_control()->Break(stmt->target());
1699 }
1700 
VisitReturnStatement(ReturnStatement * stmt)1701 void BytecodeGenerator::VisitReturnStatement(ReturnStatement* stmt) {
1702   AllocateBlockCoverageSlotIfEnabled(stmt, SourceRangeKind::kContinuation);
1703   builder()->SetStatementPosition(stmt);
1704   VisitForAccumulatorValue(stmt->expression());
1705   if (stmt->is_async_return()) {
1706     execution_control()->AsyncReturnAccumulator(stmt->end_position());
1707   } else {
1708     execution_control()->ReturnAccumulator(stmt->end_position());
1709   }
1710 }
1711 
VisitWithStatement(WithStatement * stmt)1712 void BytecodeGenerator::VisitWithStatement(WithStatement* stmt) {
1713   builder()->SetStatementPosition(stmt);
1714   VisitForAccumulatorValue(stmt->expression());
1715   BuildNewLocalWithContext(stmt->scope());
1716   VisitInScope(stmt->statement(), stmt->scope());
1717 }
1718 
VisitSwitchStatement(SwitchStatement * stmt)1719 void BytecodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
1720   // We need this scope because we visit for register values. We have to
1721   // maintain a execution result scope where registers can be allocated.
1722   ZonePtrList<CaseClause>* clauses = stmt->cases();
1723   SwitchBuilder switch_builder(builder(), block_coverage_builder_, stmt,
1724                                clauses->length());
1725   ControlScopeForBreakable scope(this, stmt, &switch_builder);
1726   int default_index = -1;
1727 
1728   builder()->SetStatementPosition(stmt);
1729 
1730   // Keep the switch value in a register until a case matches.
1731   Register tag = VisitForRegisterValue(stmt->tag());
1732   FeedbackSlot slot = clauses->length() > 0
1733                           ? feedback_spec()->AddCompareICSlot()
1734                           : FeedbackSlot::Invalid();
1735 
1736   // Iterate over all cases and create nodes for label comparison.
1737   for (int i = 0; i < clauses->length(); i++) {
1738     CaseClause* clause = clauses->at(i);
1739 
1740     // The default is not a test, remember index.
1741     if (clause->is_default()) {
1742       default_index = i;
1743       continue;
1744     }
1745 
1746     // Perform label comparison as if via '===' with tag.
1747     VisitForAccumulatorValue(clause->label());
1748     builder()->CompareOperation(Token::Value::EQ_STRICT, tag,
1749                                 feedback_index(slot));
1750     switch_builder.Case(ToBooleanMode::kAlreadyBoolean, i);
1751   }
1752 
1753   if (default_index >= 0) {
1754     // Emit default jump if there is a default case.
1755     switch_builder.DefaultAt(default_index);
1756   } else {
1757     // Otherwise if we have reached here none of the cases matched, so jump to
1758     // the end.
1759     switch_builder.Break();
1760   }
1761 
1762   // Iterate over all cases and create the case bodies.
1763   for (int i = 0; i < clauses->length(); i++) {
1764     CaseClause* clause = clauses->at(i);
1765     switch_builder.SetCaseTarget(i, clause);
1766     VisitStatements(clause->statements());
1767   }
1768 }
1769 
1770 template <typename TryBodyFunc, typename CatchBodyFunc>
BuildTryCatch(TryBodyFunc try_body_func,CatchBodyFunc catch_body_func,HandlerTable::CatchPrediction catch_prediction,TryCatchStatement * stmt_for_coverage)1771 void BytecodeGenerator::BuildTryCatch(
1772     TryBodyFunc try_body_func, CatchBodyFunc catch_body_func,
1773     HandlerTable::CatchPrediction catch_prediction,
1774     TryCatchStatement* stmt_for_coverage) {
1775   TryCatchBuilder try_control_builder(
1776       builder(),
1777       stmt_for_coverage == nullptr ? nullptr : block_coverage_builder_,
1778       stmt_for_coverage, catch_prediction);
1779 
1780   // Preserve the context in a dedicated register, so that it can be restored
1781   // when the handler is entered by the stack-unwinding machinery.
1782   // TODO(ignition): Be smarter about register allocation.
1783   Register context = register_allocator()->NewRegister();
1784   builder()->MoveRegister(Register::current_context(), context);
1785 
1786   // Evaluate the try-block inside a control scope. This simulates a handler
1787   // that is intercepting 'throw' control commands.
1788   try_control_builder.BeginTry(context);
1789   {
1790     ControlScopeForTryCatch scope(this, &try_control_builder);
1791     try_body_func();
1792   }
1793   try_control_builder.EndTry();
1794 
1795   catch_body_func(context);
1796 
1797   try_control_builder.EndCatch();
1798 }
1799 
1800 template <typename TryBodyFunc, typename FinallyBodyFunc>
BuildTryFinally(TryBodyFunc try_body_func,FinallyBodyFunc finally_body_func,HandlerTable::CatchPrediction catch_prediction,TryFinallyStatement * stmt_for_coverage)1801 void BytecodeGenerator::BuildTryFinally(
1802     TryBodyFunc try_body_func, FinallyBodyFunc finally_body_func,
1803     HandlerTable::CatchPrediction catch_prediction,
1804     TryFinallyStatement* stmt_for_coverage) {
1805   // We can't know whether the finally block will override ("catch") an
1806   // exception thrown in the try block, so we just adopt the outer prediction.
1807   TryFinallyBuilder try_control_builder(
1808       builder(),
1809       stmt_for_coverage == nullptr ? nullptr : block_coverage_builder_,
1810       stmt_for_coverage, catch_prediction);
1811 
1812   // We keep a record of all paths that enter the finally-block to be able to
1813   // dispatch to the correct continuation point after the statements in the
1814   // finally-block have been evaluated.
1815   //
1816   // The try-finally construct can enter the finally-block in three ways:
1817   // 1. By exiting the try-block normally, falling through at the end.
1818   // 2. By exiting the try-block with a function-local control flow transfer
1819   //    (i.e. through break/continue/return statements).
1820   // 3. By exiting the try-block with a thrown exception.
1821   //
1822   // The result register semantics depend on how the block was entered:
1823   //  - ReturnStatement: It represents the return value being returned.
1824   //  - ThrowStatement: It represents the exception being thrown.
1825   //  - BreakStatement/ContinueStatement: Undefined and not used.
1826   //  - Falling through into finally-block: Undefined and not used.
1827   Register token = register_allocator()->NewRegister();
1828   Register result = register_allocator()->NewRegister();
1829   ControlScope::DeferredCommands commands(this, token, result);
1830 
1831   // Preserve the context in a dedicated register, so that it can be restored
1832   // when the handler is entered by the stack-unwinding machinery.
1833   // TODO(ignition): Be smarter about register allocation.
1834   Register context = register_allocator()->NewRegister();
1835   builder()->MoveRegister(Register::current_context(), context);
1836 
1837   // Evaluate the try-block inside a control scope. This simulates a handler
1838   // that is intercepting all control commands.
1839   try_control_builder.BeginTry(context);
1840   {
1841     ControlScopeForTryFinally scope(this, &try_control_builder, &commands);
1842     try_body_func();
1843   }
1844   try_control_builder.EndTry();
1845 
1846   // Record fall-through and exception cases.
1847   commands.RecordFallThroughPath();
1848   try_control_builder.LeaveTry();
1849   try_control_builder.BeginHandler();
1850   commands.RecordHandlerReThrowPath();
1851 
1852   // Pending message object is saved on entry.
1853   try_control_builder.BeginFinally();
1854   Register message = context;  // Reuse register.
1855 
1856   // Clear message object as we enter the finally block.
1857   builder()->LoadTheHole().SetPendingMessage().StoreAccumulatorInRegister(
1858       message);
1859 
1860   // Evaluate the finally-block.
1861   finally_body_func(token);
1862   try_control_builder.EndFinally();
1863 
1864   // Pending message object is restored on exit.
1865   builder()->LoadAccumulatorWithRegister(message).SetPendingMessage();
1866 
1867   // Dynamic dispatch after the finally-block.
1868   commands.ApplyDeferredCommands();
1869 }
1870 
VisitIterationBody(IterationStatement * stmt,LoopBuilder * loop_builder)1871 void BytecodeGenerator::VisitIterationBody(IterationStatement* stmt,
1872                                            LoopBuilder* loop_builder) {
1873   loop_builder->LoopBody();
1874   ControlScopeForIteration execution_control(this, stmt, loop_builder);
1875   Visit(stmt->body());
1876   loop_builder->BindContinueTarget();
1877 }
1878 
VisitDoWhileStatement(DoWhileStatement * stmt)1879 void BytecodeGenerator::VisitDoWhileStatement(DoWhileStatement* stmt) {
1880   LoopBuilder loop_builder(builder(), block_coverage_builder_, stmt);
1881   if (stmt->cond()->ToBooleanIsFalse()) {
1882     // Since we know that the condition is false, we don't create a loop.
1883     // Therefore, we don't create a LoopScope (and thus we don't create a header
1884     // and a JumpToHeader). However, we still need to iterate once through the
1885     // body.
1886     VisitIterationBody(stmt, &loop_builder);
1887   } else if (stmt->cond()->ToBooleanIsTrue()) {
1888     LoopScope loop_scope(this, &loop_builder);
1889     VisitIterationBody(stmt, &loop_builder);
1890   } else {
1891     LoopScope loop_scope(this, &loop_builder);
1892     VisitIterationBody(stmt, &loop_builder);
1893     builder()->SetExpressionAsStatementPosition(stmt->cond());
1894     BytecodeLabels loop_backbranch(zone());
1895     VisitForTest(stmt->cond(), &loop_backbranch, loop_builder.break_labels(),
1896                  TestFallthrough::kThen);
1897     loop_backbranch.Bind(builder());
1898   }
1899 }
1900 
VisitWhileStatement(WhileStatement * stmt)1901 void BytecodeGenerator::VisitWhileStatement(WhileStatement* stmt) {
1902   LoopBuilder loop_builder(builder(), block_coverage_builder_, stmt);
1903 
1904   if (stmt->cond()->ToBooleanIsFalse()) {
1905     // If the condition is false there is no need to generate the loop.
1906     return;
1907   }
1908 
1909   LoopScope loop_scope(this, &loop_builder);
1910   if (!stmt->cond()->ToBooleanIsTrue()) {
1911     builder()->SetExpressionAsStatementPosition(stmt->cond());
1912     BytecodeLabels loop_body(zone());
1913     VisitForTest(stmt->cond(), &loop_body, loop_builder.break_labels(),
1914                  TestFallthrough::kThen);
1915     loop_body.Bind(builder());
1916   }
1917   VisitIterationBody(stmt, &loop_builder);
1918 }
1919 
VisitForStatement(ForStatement * stmt)1920 void BytecodeGenerator::VisitForStatement(ForStatement* stmt) {
1921   if (stmt->init() != nullptr) {
1922     Visit(stmt->init());
1923   }
1924 
1925   LoopBuilder loop_builder(builder(), block_coverage_builder_, stmt);
1926   if (stmt->cond() && stmt->cond()->ToBooleanIsFalse()) {
1927     // If the condition is known to be false there is no need to generate
1928     // body, next or condition blocks. Init block should be generated.
1929     return;
1930   }
1931 
1932   LoopScope loop_scope(this, &loop_builder);
1933   if (stmt->cond() && !stmt->cond()->ToBooleanIsTrue()) {
1934     builder()->SetExpressionAsStatementPosition(stmt->cond());
1935     BytecodeLabels loop_body(zone());
1936     VisitForTest(stmt->cond(), &loop_body, loop_builder.break_labels(),
1937                  TestFallthrough::kThen);
1938     loop_body.Bind(builder());
1939   }
1940   VisitIterationBody(stmt, &loop_builder);
1941   if (stmt->next() != nullptr) {
1942     builder()->SetStatementPosition(stmt->next());
1943     Visit(stmt->next());
1944   }
1945 }
1946 
VisitForInStatement(ForInStatement * stmt)1947 void BytecodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1948   if (stmt->subject()->IsNullLiteral() ||
1949       stmt->subject()->IsUndefinedLiteral()) {
1950     // ForIn generates lots of code, skip if it wouldn't produce any effects.
1951     return;
1952   }
1953 
1954   BytecodeLabel subject_undefined_label;
1955   FeedbackSlot slot = feedback_spec()->AddForInSlot();
1956 
1957   // Prepare the state for executing ForIn.
1958   builder()->SetExpressionAsStatementPosition(stmt->subject());
1959   VisitForAccumulatorValue(stmt->subject());
1960   builder()->JumpIfUndefinedOrNull(&subject_undefined_label);
1961   Register receiver = register_allocator()->NewRegister();
1962   builder()->ToObject(receiver);
1963 
1964   // Used as kRegTriple and kRegPair in ForInPrepare and ForInNext.
1965   RegisterList triple = register_allocator()->NewRegisterList(3);
1966   Register cache_length = triple[2];
1967   builder()->ForInEnumerate(receiver);
1968   builder()->ForInPrepare(triple, feedback_index(slot));
1969 
1970   // Set up loop counter
1971   Register index = register_allocator()->NewRegister();
1972   builder()->LoadLiteral(Smi::zero());
1973   builder()->StoreAccumulatorInRegister(index);
1974 
1975   // The loop
1976   {
1977     LoopBuilder loop_builder(builder(), block_coverage_builder_, stmt);
1978     LoopScope loop_scope(this, &loop_builder);
1979     builder()->SetExpressionAsStatementPosition(stmt->each());
1980     builder()->ForInContinue(index, cache_length);
1981     loop_builder.BreakIfFalse(ToBooleanMode::kAlreadyBoolean);
1982     builder()->ForInNext(receiver, index, triple.Truncate(2),
1983                          feedback_index(slot));
1984     loop_builder.ContinueIfUndefined();
1985 
1986     // Assign accumulator value to the 'each' target.
1987     {
1988       EffectResultScope scope(this);
1989       // Make sure to preserve the accumulator across the PrepareAssignmentLhs
1990       // call.
1991       AssignmentLhsData lhs_data = PrepareAssignmentLhs(
1992           stmt->each(), AccumulatorPreservingMode::kPreserve);
1993       builder()->SetExpressionPosition(stmt->each());
1994       BuildAssignment(lhs_data, Token::ASSIGN, LookupHoistingMode::kNormal);
1995     }
1996 
1997     VisitIterationBody(stmt, &loop_builder);
1998     builder()->ForInStep(index);
1999     builder()->StoreAccumulatorInRegister(index);
2000   }
2001   builder()->Bind(&subject_undefined_label);
2002 }
2003 
2004 // Desugar a for-of statement into an application of the iteration protocol.
2005 //
2006 // for (EACH of SUBJECT) BODY
2007 //
2008 //   becomes
2009 //
2010 // iterator = %GetIterator(SUBJECT)
2011 // try {
2012 //
2013 //   loop {
2014 //     // Make sure we are considered 'done' if .next(), .done or .value fail.
2015 //     done = true
2016 //     value = iterator.next()
2017 //     if (value.done) break;
2018 //     value = value.value
2019 //     done = false
2020 //
2021 //     EACH = value
2022 //     BODY
2023 //   }
2024 //   done = true
2025 //
2026 // } catch(e) {
2027 //   iteration_continuation = RETHROW
2028 // } finally {
2029 //   %FinalizeIteration(iterator, done, iteration_continuation)
2030 // }
VisitForOfStatement(ForOfStatement * stmt)2031 void BytecodeGenerator::VisitForOfStatement(ForOfStatement* stmt) {
2032   EffectResultScope effect_scope(this);
2033 
2034   builder()->SetExpressionAsStatementPosition(stmt->subject());
2035   VisitForAccumulatorValue(stmt->subject());
2036 
2037   // Store the iterator in a dedicated register so that it can be closed on
2038   // exit, and the 'done' value in a dedicated register so that it can be
2039   // changed and accessed independently of the iteration result.
2040   IteratorRecord iterator = BuildGetIteratorRecord(stmt->type());
2041   Register done = register_allocator()->NewRegister();
2042   builder()->LoadFalse();
2043   builder()->StoreAccumulatorInRegister(done);
2044 
2045   BuildTryFinally(
2046       // Try block.
2047       [&]() {
2048         Register next_result = register_allocator()->NewRegister();
2049 
2050         LoopBuilder loop_builder(builder(), block_coverage_builder_, stmt);
2051         LoopScope loop_scope(this, &loop_builder);
2052 
2053         builder()->LoadTrue().StoreAccumulatorInRegister(done);
2054 
2055         // Call the iterator's .next() method. Break from the loop if the `done`
2056         // property is truthy, otherwise load the value from the iterator result
2057         // and append the argument.
2058         builder()->SetExpressionAsStatementPosition(stmt->each());
2059         BuildIteratorNext(iterator, next_result);
2060         builder()->LoadNamedProperty(
2061             next_result, ast_string_constants()->done_string(),
2062             feedback_index(feedback_spec()->AddLoadICSlot()));
2063         loop_builder.BreakIfTrue(ToBooleanMode::kConvertToBoolean);
2064 
2065         builder()
2066             // value = value.value
2067             ->LoadNamedProperty(
2068                 next_result, ast_string_constants()->value_string(),
2069                 feedback_index(feedback_spec()->AddLoadICSlot()));
2070         // done = false, before the assignment to each happens, so that done is
2071         // false if the assignment throws.
2072         builder()
2073             ->StoreAccumulatorInRegister(next_result)
2074             .LoadFalse()
2075             .StoreAccumulatorInRegister(done);
2076 
2077         // Assign to the 'each' target.
2078         AssignmentLhsData lhs_data = PrepareAssignmentLhs(stmt->each());
2079         builder()->LoadAccumulatorWithRegister(next_result);
2080         BuildAssignment(lhs_data, Token::ASSIGN, LookupHoistingMode::kNormal);
2081 
2082         VisitIterationBody(stmt, &loop_builder);
2083       },
2084       // Finally block.
2085       [&](Register iteration_continuation_token) {
2086         // Finish the iteration in the finally block.
2087         BuildFinalizeIteration(iterator, done, iteration_continuation_token);
2088       },
2089       HandlerTable::UNCAUGHT);
2090 }
2091 
VisitTryCatchStatement(TryCatchStatement * stmt)2092 void BytecodeGenerator::VisitTryCatchStatement(TryCatchStatement* stmt) {
2093   // Update catch prediction tracking. The updated catch_prediction value lasts
2094   // until the end of the try_block in the AST node, and does not apply to the
2095   // catch_block.
2096   HandlerTable::CatchPrediction outer_catch_prediction = catch_prediction();
2097   set_catch_prediction(stmt->GetCatchPrediction(outer_catch_prediction));
2098 
2099   BuildTryCatch(
2100       // Try body.
2101       [&]() {
2102         Visit(stmt->try_block());
2103         set_catch_prediction(outer_catch_prediction);
2104       },
2105       // Catch body.
2106       [&](Register context) {
2107         if (stmt->scope()) {
2108           // Create a catch scope that binds the exception.
2109           BuildNewLocalCatchContext(stmt->scope());
2110           builder()->StoreAccumulatorInRegister(context);
2111         }
2112 
2113         // If requested, clear message object as we enter the catch block.
2114         if (stmt->ShouldClearPendingException(outer_catch_prediction)) {
2115           builder()->LoadTheHole().SetPendingMessage();
2116         }
2117 
2118         // Load the catch context into the accumulator.
2119         builder()->LoadAccumulatorWithRegister(context);
2120 
2121         // Evaluate the catch-block.
2122         if (stmt->scope()) {
2123           VisitInScope(stmt->catch_block(), stmt->scope());
2124         } else {
2125           VisitBlock(stmt->catch_block());
2126         }
2127       },
2128       catch_prediction(), stmt);
2129 }
2130 
VisitTryFinallyStatement(TryFinallyStatement * stmt)2131 void BytecodeGenerator::VisitTryFinallyStatement(TryFinallyStatement* stmt) {
2132   BuildTryFinally(
2133       // Try block.
2134       [&]() { Visit(stmt->try_block()); },
2135       // Finally block.
2136       [&](Register body_continuation_token) { Visit(stmt->finally_block()); },
2137       catch_prediction(), stmt);
2138 }
2139 
VisitDebuggerStatement(DebuggerStatement * stmt)2140 void BytecodeGenerator::VisitDebuggerStatement(DebuggerStatement* stmt) {
2141   builder()->SetStatementPosition(stmt);
2142   builder()->Debugger();
2143 }
2144 
VisitFunctionLiteral(FunctionLiteral * expr)2145 void BytecodeGenerator::VisitFunctionLiteral(FunctionLiteral* expr) {
2146   DCHECK(expr->scope()->outer_scope() == current_scope());
2147   uint8_t flags = CreateClosureFlags::Encode(
2148       expr->pretenure(), closure_scope()->is_function_scope(),
2149       info()->flags().might_always_opt());
2150   size_t entry = builder()->AllocateDeferredConstantPoolEntry();
2151   builder()->CreateClosure(entry, GetCachedCreateClosureSlot(expr), flags);
2152   function_literals_.push_back(std::make_pair(expr, entry));
2153   AddToEagerLiteralsIfEager(expr);
2154 }
2155 
AddToEagerLiteralsIfEager(FunctionLiteral * literal)2156 void BytecodeGenerator::AddToEagerLiteralsIfEager(FunctionLiteral* literal) {
2157   if (eager_inner_literals_ && literal->ShouldEagerCompile()) {
2158     DCHECK(!IsInEagerLiterals(literal, *eager_inner_literals_));
2159     eager_inner_literals_->push_back(literal);
2160   }
2161 }
2162 
ShouldOptimizeAsOneShot() const2163 bool BytecodeGenerator::ShouldOptimizeAsOneShot() const {
2164   if (!FLAG_enable_one_shot_optimization) return false;
2165 
2166   if (loop_depth_ > 0) return false;
2167 
2168   return info()->literal()->is_toplevel() ||
2169          info()->literal()->is_oneshot_iife();
2170 }
2171 
BuildClassLiteral(ClassLiteral * expr,Register name)2172 void BytecodeGenerator::BuildClassLiteral(ClassLiteral* expr, Register name) {
2173   size_t class_boilerplate_entry =
2174       builder()->AllocateDeferredConstantPoolEntry();
2175   class_literals_.push_back(std::make_pair(expr, class_boilerplate_entry));
2176 
2177   VisitDeclarations(expr->scope()->declarations());
2178   Register class_constructor = register_allocator()->NewRegister();
2179 
2180   // Create the class brand symbol and store it on the context during class
2181   // evaluation. This will be stored in the instance later in the constructor.
2182   // We do this early so that invalid access to private methods or accessors
2183   // in computed property keys throw.
2184   if (expr->scope()->brand() != nullptr) {
2185     Register brand = register_allocator()->NewRegister();
2186     const AstRawString* class_name =
2187         expr->scope()->class_variable() != nullptr
2188             ? expr->scope()->class_variable()->raw_name()
2189             : ast_string_constants()->empty_string();
2190     builder()
2191         ->LoadLiteral(class_name)
2192         .StoreAccumulatorInRegister(brand)
2193         .CallRuntime(Runtime::kCreatePrivateBrandSymbol, brand);
2194     BuildVariableAssignment(expr->scope()->brand(), Token::INIT,
2195                             HoleCheckMode::kElided);
2196   }
2197 
2198   AccessorTable<ClassLiteral::Property> private_accessors(zone());
2199   for (int i = 0; i < expr->private_members()->length(); i++) {
2200     ClassLiteral::Property* property = expr->private_members()->at(i);
2201     DCHECK(property->is_private());
2202     switch (property->kind()) {
2203       case ClassLiteral::Property::FIELD: {
2204         // Initialize the private field variables early.
2205         // Create the private name symbols for fields during class
2206         // evaluation and store them on the context. These will be
2207         // used as keys later during instance or static initialization.
2208         RegisterAllocationScope private_name_register_scope(this);
2209         Register private_name = register_allocator()->NewRegister();
2210         VisitForRegisterValue(property->key(), private_name);
2211         builder()
2212             ->LoadLiteral(property->key()->AsLiteral()->AsRawPropertyName())
2213             .StoreAccumulatorInRegister(private_name)
2214             .CallRuntime(Runtime::kCreatePrivateNameSymbol, private_name);
2215         DCHECK_NOT_NULL(property->private_name_var());
2216         BuildVariableAssignment(property->private_name_var(), Token::INIT,
2217                                 HoleCheckMode::kElided);
2218         break;
2219       }
2220       case ClassLiteral::Property::METHOD: {
2221         // We can initialize the private methods and accessors later so that the
2222         // home objects can be assigned right after the creation of the
2223         // closures, and those are guarded by the brand checks.
2224         break;
2225       }
2226       // Collect private accessors into a table to merge the creation of
2227       // those closures later.
2228       case ClassLiteral::Property::GETTER: {
2229         Literal* key = property->key()->AsLiteral();
2230         DCHECK_NULL(private_accessors.LookupOrInsert(key)->getter);
2231         private_accessors.LookupOrInsert(key)->getter = property;
2232         break;
2233       }
2234       case ClassLiteral::Property::SETTER: {
2235         Literal* key = property->key()->AsLiteral();
2236         DCHECK_NULL(private_accessors.LookupOrInsert(key)->setter);
2237         private_accessors.LookupOrInsert(key)->setter = property;
2238         break;
2239       }
2240       default:
2241         UNREACHABLE();
2242     }
2243   }
2244 
2245   {
2246     RegisterAllocationScope register_scope(this);
2247     RegisterList args = register_allocator()->NewGrowableRegisterList();
2248 
2249     Register class_boilerplate = register_allocator()->GrowRegisterList(&args);
2250     Register class_constructor_in_args =
2251         register_allocator()->GrowRegisterList(&args);
2252     Register super_class = register_allocator()->GrowRegisterList(&args);
2253     DCHECK_EQ(ClassBoilerplate::kFirstDynamicArgumentIndex,
2254               args.register_count());
2255 
2256     VisitForAccumulatorValueOrTheHole(expr->extends());
2257     builder()->StoreAccumulatorInRegister(super_class);
2258 
2259     VisitFunctionLiteral(expr->constructor());
2260     builder()
2261         ->StoreAccumulatorInRegister(class_constructor)
2262         .MoveRegister(class_constructor, class_constructor_in_args)
2263         .LoadConstantPoolEntry(class_boilerplate_entry)
2264         .StoreAccumulatorInRegister(class_boilerplate);
2265 
2266     // Create computed names and method values nodes to store into the literal.
2267     for (int i = 0; i < expr->public_members()->length(); i++) {
2268       ClassLiteral::Property* property = expr->public_members()->at(i);
2269       if (property->is_computed_name()) {
2270         Register key = register_allocator()->GrowRegisterList(&args);
2271 
2272         builder()->SetExpressionAsStatementPosition(property->key());
2273         BuildLoadPropertyKey(property, key);
2274         if (property->is_static()) {
2275           // The static prototype property is read only. We handle the non
2276           // computed property name case in the parser. Since this is the only
2277           // case where we need to check for an own read only property we
2278           // special case this so we do not need to do this for every property.
2279 
2280           FeedbackSlot slot = GetDummyCompareICSlot();
2281           BytecodeLabel done;
2282           builder()
2283               ->LoadLiteral(ast_string_constants()->prototype_string())
2284               .CompareOperation(Token::Value::EQ_STRICT, key,
2285                                 feedback_index(slot))
2286               .JumpIfFalse(ToBooleanMode::kAlreadyBoolean, &done)
2287               .CallRuntime(Runtime::kThrowStaticPrototypeError)
2288               .Bind(&done);
2289         }
2290 
2291         if (property->kind() == ClassLiteral::Property::FIELD) {
2292           DCHECK(!property->is_private());
2293           // Initialize field's name variable with the computed name.
2294           DCHECK_NOT_NULL(property->computed_name_var());
2295           builder()->LoadAccumulatorWithRegister(key);
2296           BuildVariableAssignment(property->computed_name_var(), Token::INIT,
2297                                   HoleCheckMode::kElided);
2298         }
2299       }
2300 
2301       DCHECK(!property->is_private());
2302 
2303       if (property->kind() == ClassLiteral::Property::FIELD) {
2304         // We don't compute field's value here, but instead do it in the
2305         // initializer function.
2306         continue;
2307       }
2308 
2309       Register value = register_allocator()->GrowRegisterList(&args);
2310       VisitForRegisterValue(property->value(), value);
2311     }
2312 
2313     builder()->CallRuntime(Runtime::kDefineClass, args);
2314   }
2315   Register prototype = register_allocator()->NewRegister();
2316   builder()->StoreAccumulatorInRegister(prototype);
2317 
2318   // Assign to class variable.
2319   Variable* class_variable = expr->scope()->class_variable();
2320   if (class_variable != nullptr && class_variable->is_used()) {
2321     DCHECK(class_variable->IsStackLocal() || class_variable->IsContextSlot());
2322     builder()->LoadAccumulatorWithRegister(class_constructor);
2323     BuildVariableAssignment(class_variable, Token::INIT,
2324                             HoleCheckMode::kElided);
2325   }
2326 
2327   // Create the closures of private methods, and store the home object for
2328   // any private methods that need them.
2329   if (expr->has_private_methods()) {
2330     for (int i = 0; i < expr->private_members()->length(); i++) {
2331       ClassLiteral::Property* property = expr->private_members()->at(i);
2332       if (property->kind() != ClassLiteral::Property::METHOD) {
2333         continue;
2334       }
2335       RegisterAllocationScope register_scope(this);
2336       VisitForAccumulatorValue(property->value());
2337       BuildVariableAssignment(property->private_name_var(), Token::INIT,
2338                               HoleCheckMode::kElided);
2339       Register home_object = property->private_name_var()->is_static()
2340                                  ? class_constructor
2341                                  : prototype;
2342       if (property->NeedsHomeObjectOnClassPrototype()) {
2343         Register func = register_allocator()->NewRegister();
2344         builder()->StoreAccumulatorInRegister(func);
2345         VisitSetHomeObject(func, home_object, property);
2346       }
2347     }
2348   }
2349 
2350   // Define private accessors, using only a single call to the runtime for
2351   // each pair of corresponding getters and setters, in the order the first
2352   // component is declared. Store the home objects if necessary.
2353   for (auto accessors : private_accessors.ordered_accessors()) {
2354     RegisterAllocationScope inner_register_scope(this);
2355     RegisterList accessors_reg = register_allocator()->NewRegisterList(2);
2356     ClassLiteral::Property* getter = accessors.second->getter;
2357     ClassLiteral::Property* setter = accessors.second->setter;
2358     bool is_static =
2359         getter != nullptr ? getter->is_static() : setter->is_static();
2360     Register home_object = is_static ? class_constructor : prototype;
2361     VisitLiteralAccessor(home_object, getter, accessors_reg[0]);
2362     VisitLiteralAccessor(home_object, setter, accessors_reg[1]);
2363     builder()->CallRuntime(Runtime::kCreatePrivateAccessors, accessors_reg);
2364     Variable* var = getter != nullptr ? getter->private_name_var()
2365                                       : setter->private_name_var();
2366     DCHECK_NOT_NULL(var);
2367     BuildVariableAssignment(var, Token::INIT, HoleCheckMode::kElided);
2368   }
2369 
2370   if (expr->instance_members_initializer_function() != nullptr) {
2371     Register initializer =
2372         VisitForRegisterValue(expr->instance_members_initializer_function());
2373 
2374     if (FunctionLiteral::NeedsHomeObject(
2375             expr->instance_members_initializer_function())) {
2376       FeedbackSlot slot = feedback_spec()->AddStoreICSlot(language_mode());
2377       builder()->LoadAccumulatorWithRegister(prototype).StoreHomeObjectProperty(
2378           initializer, feedback_index(slot), language_mode());
2379     }
2380 
2381     FeedbackSlot slot = feedback_spec()->AddStoreICSlot(language_mode());
2382     builder()
2383         ->LoadAccumulatorWithRegister(initializer)
2384         .StoreClassFieldsInitializer(class_constructor, feedback_index(slot))
2385         .LoadAccumulatorWithRegister(class_constructor);
2386   }
2387 
2388   if (expr->static_fields_initializer() != nullptr) {
2389     // TODO(gsathya): This can be optimized away to be a part of the
2390     // class boilerplate in the future. The name argument can be
2391     // passed to the DefineClass runtime function and have it set
2392     // there.
2393     if (name.is_valid()) {
2394       Register key = register_allocator()->NewRegister();
2395       builder()
2396           ->LoadLiteral(ast_string_constants()->name_string())
2397           .StoreAccumulatorInRegister(key);
2398 
2399       DataPropertyInLiteralFlags data_property_flags =
2400           DataPropertyInLiteralFlag::kNoFlags;
2401       FeedbackSlot slot =
2402           feedback_spec()->AddStoreDataPropertyInLiteralICSlot();
2403       builder()->LoadAccumulatorWithRegister(name).StoreDataPropertyInLiteral(
2404           class_constructor, key, data_property_flags, feedback_index(slot));
2405     }
2406 
2407     RegisterList args = register_allocator()->NewRegisterList(1);
2408     Register initializer =
2409         VisitForRegisterValue(expr->static_fields_initializer());
2410 
2411     if (FunctionLiteral::NeedsHomeObject(expr->static_fields_initializer())) {
2412       FeedbackSlot slot = feedback_spec()->AddStoreICSlot(language_mode());
2413       builder()
2414           ->LoadAccumulatorWithRegister(class_constructor)
2415           .StoreHomeObjectProperty(initializer, feedback_index(slot),
2416                                    language_mode());
2417     }
2418 
2419     builder()
2420         ->MoveRegister(class_constructor, args[0])
2421         .CallProperty(initializer, args,
2422                       feedback_index(feedback_spec()->AddCallICSlot()));
2423   }
2424   builder()->LoadAccumulatorWithRegister(class_constructor);
2425 }
2426 
VisitClassLiteral(ClassLiteral * expr)2427 void BytecodeGenerator::VisitClassLiteral(ClassLiteral* expr) {
2428   VisitClassLiteral(expr, Register::invalid_value());
2429 }
2430 
VisitClassLiteral(ClassLiteral * expr,Register name)2431 void BytecodeGenerator::VisitClassLiteral(ClassLiteral* expr, Register name) {
2432   CurrentScope current_scope(this, expr->scope());
2433   DCHECK_NOT_NULL(expr->scope());
2434   if (expr->scope()->NeedsContext()) {
2435     BuildNewLocalBlockContext(expr->scope());
2436     ContextScope scope(this, expr->scope());
2437     BuildClassLiteral(expr, name);
2438   } else {
2439     BuildClassLiteral(expr, name);
2440   }
2441 }
2442 
VisitInitializeClassMembersStatement(InitializeClassMembersStatement * stmt)2443 void BytecodeGenerator::VisitInitializeClassMembersStatement(
2444     InitializeClassMembersStatement* stmt) {
2445   RegisterList args = register_allocator()->NewRegisterList(3);
2446   Register constructor = args[0], key = args[1], value = args[2];
2447   builder()->MoveRegister(builder()->Receiver(), constructor);
2448 
2449   for (int i = 0; i < stmt->fields()->length(); i++) {
2450     ClassLiteral::Property* property = stmt->fields()->at(i);
2451     // Private methods are not initialized in the
2452     // InitializeClassMembersStatement.
2453     DCHECK_IMPLIES(property->is_private(),
2454                    property->kind() == ClassLiteral::Property::FIELD);
2455 
2456     if (property->is_computed_name()) {
2457       DCHECK_EQ(property->kind(), ClassLiteral::Property::FIELD);
2458       DCHECK(!property->is_private());
2459       Variable* var = property->computed_name_var();
2460       DCHECK_NOT_NULL(var);
2461       // The computed name is already evaluated and stored in a
2462       // variable at class definition time.
2463       BuildVariableLoad(var, HoleCheckMode::kElided);
2464       builder()->StoreAccumulatorInRegister(key);
2465     } else if (property->is_private()) {
2466       Variable* private_name_var = property->private_name_var();
2467       DCHECK_NOT_NULL(private_name_var);
2468       BuildVariableLoad(private_name_var, HoleCheckMode::kElided);
2469       builder()->StoreAccumulatorInRegister(key);
2470     } else {
2471       BuildLoadPropertyKey(property, key);
2472     }
2473 
2474     builder()->SetExpressionAsStatementPosition(property->value());
2475     VisitForRegisterValue(property->value(), value);
2476     VisitSetHomeObject(value, constructor, property);
2477 
2478     Runtime::FunctionId function_id =
2479         property->kind() == ClassLiteral::Property::FIELD &&
2480                 !property->is_private()
2481             ? Runtime::kCreateDataProperty
2482             : Runtime::kAddPrivateField;
2483     builder()->CallRuntime(function_id, args);
2484   }
2485 }
2486 
BuildInvalidPropertyAccess(MessageTemplate tmpl,Property * property)2487 void BytecodeGenerator::BuildInvalidPropertyAccess(MessageTemplate tmpl,
2488                                                    Property* property) {
2489   RegisterAllocationScope register_scope(this);
2490   const AstRawString* name = property->key()->AsVariableProxy()->raw_name();
2491   RegisterList args = register_allocator()->NewRegisterList(2);
2492   builder()
2493       ->LoadLiteral(Smi::FromEnum(tmpl))
2494       .StoreAccumulatorInRegister(args[0])
2495       .LoadLiteral(name)
2496       .StoreAccumulatorInRegister(args[1])
2497       .CallRuntime(Runtime::kNewTypeError, args)
2498       .Throw();
2499 }
2500 
BuildPrivateBrandInitialization(Register receiver)2501 void BytecodeGenerator::BuildPrivateBrandInitialization(Register receiver) {
2502   RegisterList brand_args = register_allocator()->NewRegisterList(3);
2503   Variable* brand = info()->scope()->outer_scope()->AsClassScope()->brand();
2504   int depth = execution_context()->ContextChainDepth(brand->scope());
2505   ContextScope* class_context = execution_context()->Previous(depth);
2506 
2507   BuildVariableLoad(brand, HoleCheckMode::kElided);
2508   builder()
2509       ->StoreAccumulatorInRegister(brand_args[1])
2510       .MoveRegister(receiver, brand_args[0])
2511       .MoveRegister(class_context->reg(), brand_args[2])
2512       .CallRuntime(Runtime::kAddPrivateBrand, brand_args);
2513 }
2514 
BuildInstanceMemberInitialization(Register constructor,Register instance)2515 void BytecodeGenerator::BuildInstanceMemberInitialization(Register constructor,
2516                                                           Register instance) {
2517   RegisterList args = register_allocator()->NewRegisterList(1);
2518   Register initializer = register_allocator()->NewRegister();
2519 
2520   FeedbackSlot slot = feedback_spec()->AddLoadICSlot();
2521   BytecodeLabel done;
2522 
2523   builder()
2524       ->LoadClassFieldsInitializer(constructor, feedback_index(slot))
2525       // TODO(gsathya): This jump can be elided for the base
2526       // constructor and derived constructor. This is only required
2527       // when called from an arrow function.
2528       .JumpIfUndefined(&done)
2529       .StoreAccumulatorInRegister(initializer)
2530       .MoveRegister(instance, args[0])
2531       .CallProperty(initializer, args,
2532                     feedback_index(feedback_spec()->AddCallICSlot()))
2533       .Bind(&done);
2534 }
2535 
VisitNativeFunctionLiteral(NativeFunctionLiteral * expr)2536 void BytecodeGenerator::VisitNativeFunctionLiteral(
2537     NativeFunctionLiteral* expr) {
2538   size_t entry = builder()->AllocateDeferredConstantPoolEntry();
2539   int index = feedback_spec()->AddCreateClosureSlot();
2540   uint8_t flags = CreateClosureFlags::Encode(false, false, false);
2541   builder()->CreateClosure(entry, index, flags);
2542   native_function_literals_.push_back(std::make_pair(expr, entry));
2543 }
2544 
VisitConditional(Conditional * expr)2545 void BytecodeGenerator::VisitConditional(Conditional* expr) {
2546   ConditionalControlFlowBuilder conditional_builder(
2547       builder(), block_coverage_builder_, expr);
2548 
2549   if (expr->condition()->ToBooleanIsTrue()) {
2550     // Generate then block unconditionally as always true.
2551     conditional_builder.Then();
2552     VisitForAccumulatorValue(expr->then_expression());
2553   } else if (expr->condition()->ToBooleanIsFalse()) {
2554     // Generate else block unconditionally if it exists.
2555     conditional_builder.Else();
2556     VisitForAccumulatorValue(expr->else_expression());
2557   } else {
2558     VisitForTest(expr->condition(), conditional_builder.then_labels(),
2559                  conditional_builder.else_labels(), TestFallthrough::kThen);
2560 
2561     conditional_builder.Then();
2562     VisitForAccumulatorValue(expr->then_expression());
2563     conditional_builder.JumpToEnd();
2564 
2565     conditional_builder.Else();
2566     VisitForAccumulatorValue(expr->else_expression());
2567   }
2568 }
2569 
VisitLiteral(Literal * expr)2570 void BytecodeGenerator::VisitLiteral(Literal* expr) {
2571   if (execution_result()->IsEffect()) return;
2572   switch (expr->type()) {
2573     case Literal::kSmi:
2574       builder()->LoadLiteral(expr->AsSmiLiteral());
2575       break;
2576     case Literal::kHeapNumber:
2577       builder()->LoadLiteral(expr->AsNumber());
2578       break;
2579     case Literal::kUndefined:
2580       builder()->LoadUndefined();
2581       break;
2582     case Literal::kBoolean:
2583       builder()->LoadBoolean(expr->ToBooleanIsTrue());
2584       execution_result()->SetResultIsBoolean();
2585       break;
2586     case Literal::kNull:
2587       builder()->LoadNull();
2588       break;
2589     case Literal::kTheHole:
2590       builder()->LoadTheHole();
2591       break;
2592     case Literal::kString:
2593       builder()->LoadLiteral(expr->AsRawString());
2594       execution_result()->SetResultIsString();
2595       break;
2596     case Literal::kSymbol:
2597       builder()->LoadLiteral(expr->AsSymbol());
2598       break;
2599     case Literal::kBigInt:
2600       builder()->LoadLiteral(expr->AsBigInt());
2601       break;
2602   }
2603 }
2604 
VisitRegExpLiteral(RegExpLiteral * expr)2605 void BytecodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
2606   // Materialize a regular expression literal.
2607   builder()->CreateRegExpLiteral(
2608       expr->raw_pattern(), feedback_index(feedback_spec()->AddLiteralSlot()),
2609       expr->flags());
2610 }
2611 
BuildCreateObjectLiteral(Register literal,uint8_t flags,size_t entry)2612 void BytecodeGenerator::BuildCreateObjectLiteral(Register literal,
2613                                                  uint8_t flags, size_t entry) {
2614   if (ShouldOptimizeAsOneShot()) {
2615     RegisterList args = register_allocator()->NewRegisterList(2);
2616     builder()
2617         ->LoadConstantPoolEntry(entry)
2618         .StoreAccumulatorInRegister(args[0])
2619         .LoadLiteral(Smi::FromInt(flags))
2620         .StoreAccumulatorInRegister(args[1])
2621         .CallRuntime(Runtime::kCreateObjectLiteralWithoutAllocationSite, args)
2622         .StoreAccumulatorInRegister(literal);
2623 
2624   } else {
2625     // TODO(cbruni): Directly generate runtime call for literals we cannot
2626     // optimize once the CreateShallowObjectLiteral stub is in sync with the TF
2627     // optimizations.
2628     int literal_index = feedback_index(feedback_spec()->AddLiteralSlot());
2629     builder()
2630         ->CreateObjectLiteral(entry, literal_index, flags)
2631         .StoreAccumulatorInRegister(literal);
2632   }
2633 }
2634 
VisitObjectLiteral(ObjectLiteral * expr)2635 void BytecodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
2636   expr->InitDepthAndFlags();
2637 
2638   // Fast path for the empty object literal which doesn't need an
2639   // AllocationSite.
2640   if (expr->IsEmptyObjectLiteral()) {
2641     DCHECK(expr->IsFastCloningSupported());
2642     builder()->CreateEmptyObjectLiteral();
2643     return;
2644   }
2645 
2646   // Deep-copy the literal boilerplate.
2647   uint8_t flags = CreateObjectLiteralFlags::Encode(
2648       expr->ComputeFlags(), expr->IsFastCloningSupported());
2649 
2650   Register literal = register_allocator()->NewRegister();
2651 
2652   // Create literal object.
2653   int property_index = 0;
2654   bool clone_object_spread =
2655       expr->properties()->first()->kind() == ObjectLiteral::Property::SPREAD;
2656   if (clone_object_spread) {
2657     // Avoid the slow path for spreads in the following common cases:
2658     //   1) `let obj = { ...source }`
2659     //   2) `let obj = { ...source, override: 1 }`
2660     //   3) `let obj = { ...source, ...overrides }`
2661     RegisterAllocationScope register_scope(this);
2662     Expression* property = expr->properties()->first()->value();
2663     Register from_value = VisitForRegisterValue(property);
2664     int clone_index = feedback_index(feedback_spec()->AddCloneObjectSlot());
2665     builder()->CloneObject(from_value, flags, clone_index);
2666     builder()->StoreAccumulatorInRegister(literal);
2667     property_index++;
2668   } else {
2669     size_t entry;
2670     // If constant properties is an empty fixed array, use a cached empty fixed
2671     // array to ensure it's only added to the constant pool once.
2672     if (expr->properties_count() == 0) {
2673       entry = builder()->EmptyObjectBoilerplateDescriptionConstantPoolEntry();
2674     } else {
2675       entry = builder()->AllocateDeferredConstantPoolEntry();
2676       object_literals_.push_back(std::make_pair(expr, entry));
2677     }
2678     BuildCreateObjectLiteral(literal, flags, entry);
2679   }
2680 
2681   // Store computed values into the literal.
2682   AccessorTable<ObjectLiteral::Property> accessor_table(zone());
2683   for (; property_index < expr->properties()->length(); property_index++) {
2684     ObjectLiteral::Property* property = expr->properties()->at(property_index);
2685     if (property->is_computed_name()) break;
2686     if (!clone_object_spread && property->IsCompileTimeValue()) continue;
2687 
2688     RegisterAllocationScope inner_register_scope(this);
2689     Literal* key = property->key()->AsLiteral();
2690     switch (property->kind()) {
2691       case ObjectLiteral::Property::SPREAD:
2692         UNREACHABLE();
2693       case ObjectLiteral::Property::CONSTANT:
2694       case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2695         DCHECK(clone_object_spread || !property->value()->IsCompileTimeValue());
2696         V8_FALLTHROUGH;
2697       case ObjectLiteral::Property::COMPUTED: {
2698         // It is safe to use [[Put]] here because the boilerplate already
2699         // contains computed properties with an uninitialized value.
2700         if (key->IsStringLiteral()) {
2701           DCHECK(key->IsPropertyName());
2702           if (property->emit_store()) {
2703             builder()->SetExpressionPosition(property->value());
2704             VisitForAccumulatorValue(property->value());
2705             FeedbackSlot slot = feedback_spec()->AddStoreOwnICSlot();
2706             if (FunctionLiteral::NeedsHomeObject(property->value())) {
2707               RegisterAllocationScope register_scope(this);
2708               Register value = register_allocator()->NewRegister();
2709               builder()->StoreAccumulatorInRegister(value);
2710               builder()->StoreNamedOwnProperty(
2711                   literal, key->AsRawPropertyName(), feedback_index(slot));
2712               VisitSetHomeObject(value, literal, property);
2713             } else {
2714               builder()->StoreNamedOwnProperty(
2715                   literal, key->AsRawPropertyName(), feedback_index(slot));
2716             }
2717           } else {
2718             builder()->SetExpressionPosition(property->value());
2719             VisitForEffect(property->value());
2720           }
2721         } else {
2722           RegisterList args = register_allocator()->NewRegisterList(3);
2723 
2724           builder()->MoveRegister(literal, args[0]);
2725           builder()->SetExpressionPosition(property->key());
2726           VisitForRegisterValue(property->key(), args[1]);
2727           builder()->SetExpressionPosition(property->value());
2728           VisitForRegisterValue(property->value(), args[2]);
2729           if (property->emit_store()) {
2730             builder()->CallRuntime(Runtime::kSetKeyedProperty, args);
2731             Register value = args[2];
2732             VisitSetHomeObject(value, literal, property);
2733           }
2734         }
2735         break;
2736       }
2737       case ObjectLiteral::Property::PROTOTYPE: {
2738         // __proto__:null is handled by CreateObjectLiteral.
2739         if (property->IsNullPrototype()) break;
2740         DCHECK(property->emit_store());
2741         DCHECK(!property->NeedsSetFunctionName());
2742         RegisterList args = register_allocator()->NewRegisterList(2);
2743         builder()->MoveRegister(literal, args[0]);
2744         builder()->SetExpressionPosition(property->value());
2745         VisitForRegisterValue(property->value(), args[1]);
2746         builder()->CallRuntime(Runtime::kInternalSetPrototype, args);
2747         break;
2748       }
2749       case ObjectLiteral::Property::GETTER:
2750         if (property->emit_store()) {
2751           accessor_table.LookupOrInsert(key)->getter = property;
2752         }
2753         break;
2754       case ObjectLiteral::Property::SETTER:
2755         if (property->emit_store()) {
2756           accessor_table.LookupOrInsert(key)->setter = property;
2757         }
2758         break;
2759     }
2760   }
2761 
2762   // Define accessors, using only a single call to the runtime for each pair of
2763   // corresponding getters and setters.
2764   for (auto accessors : accessor_table.ordered_accessors()) {
2765     RegisterAllocationScope inner_register_scope(this);
2766     RegisterList args = register_allocator()->NewRegisterList(5);
2767     builder()->MoveRegister(literal, args[0]);
2768     VisitForRegisterValue(accessors.first, args[1]);
2769     VisitLiteralAccessor(literal, accessors.second->getter, args[2]);
2770     VisitLiteralAccessor(literal, accessors.second->setter, args[3]);
2771     builder()
2772         ->LoadLiteral(Smi::FromInt(NONE))
2773         .StoreAccumulatorInRegister(args[4])
2774         .CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, args);
2775   }
2776 
2777   // Object literals have two parts. The "static" part on the left contains no
2778   // computed property names, and so we can compute its map ahead of time; see
2779   // Runtime_CreateObjectLiteralBoilerplate. The second "dynamic" part starts
2780   // with the first computed property name and continues with all properties to
2781   // its right. All the code from above initializes the static component of the
2782   // object literal, and arranges for the map of the result to reflect the
2783   // static order in which the keys appear. For the dynamic properties, we
2784   // compile them into a series of "SetOwnProperty" runtime calls. This will
2785   // preserve insertion order.
2786   for (; property_index < expr->properties()->length(); property_index++) {
2787     ObjectLiteral::Property* property = expr->properties()->at(property_index);
2788     RegisterAllocationScope inner_register_scope(this);
2789 
2790     if (property->IsPrototype()) {
2791       // __proto__:null is handled by CreateObjectLiteral.
2792       if (property->IsNullPrototype()) continue;
2793       DCHECK(property->emit_store());
2794       DCHECK(!property->NeedsSetFunctionName());
2795       RegisterList args = register_allocator()->NewRegisterList(2);
2796       builder()->MoveRegister(literal, args[0]);
2797       builder()->SetExpressionPosition(property->value());
2798       VisitForRegisterValue(property->value(), args[1]);
2799       builder()->CallRuntime(Runtime::kInternalSetPrototype, args);
2800       continue;
2801     }
2802 
2803     switch (property->kind()) {
2804       case ObjectLiteral::Property::CONSTANT:
2805       case ObjectLiteral::Property::COMPUTED:
2806       case ObjectLiteral::Property::MATERIALIZED_LITERAL: {
2807         Register key = register_allocator()->NewRegister();
2808         BuildLoadPropertyKey(property, key);
2809         builder()->SetExpressionPosition(property->value());
2810         Register value;
2811 
2812         // Static class fields require the name property to be set on
2813         // the class, meaning we can't wait until the
2814         // StoreDataPropertyInLiteral call later to set the name.
2815         if (property->value()->IsClassLiteral() &&
2816             property->value()->AsClassLiteral()->static_fields_initializer() !=
2817                 nullptr) {
2818           value = register_allocator()->NewRegister();
2819           VisitClassLiteral(property->value()->AsClassLiteral(), key);
2820           builder()->StoreAccumulatorInRegister(value);
2821         } else {
2822           value = VisitForRegisterValue(property->value());
2823         }
2824         VisitSetHomeObject(value, literal, property);
2825 
2826         DataPropertyInLiteralFlags data_property_flags =
2827             DataPropertyInLiteralFlag::kNoFlags;
2828         if (property->NeedsSetFunctionName()) {
2829           data_property_flags |= DataPropertyInLiteralFlag::kSetFunctionName;
2830         }
2831 
2832         FeedbackSlot slot =
2833             feedback_spec()->AddStoreDataPropertyInLiteralICSlot();
2834         builder()
2835             ->LoadAccumulatorWithRegister(value)
2836             .StoreDataPropertyInLiteral(literal, key, data_property_flags,
2837                                         feedback_index(slot));
2838         break;
2839       }
2840       case ObjectLiteral::Property::GETTER:
2841       case ObjectLiteral::Property::SETTER: {
2842         RegisterList args = register_allocator()->NewRegisterList(4);
2843         builder()->MoveRegister(literal, args[0]);
2844         BuildLoadPropertyKey(property, args[1]);
2845         builder()->SetExpressionPosition(property->value());
2846         VisitForRegisterValue(property->value(), args[2]);
2847         VisitSetHomeObject(args[2], literal, property);
2848         builder()
2849             ->LoadLiteral(Smi::FromInt(NONE))
2850             .StoreAccumulatorInRegister(args[3]);
2851         Runtime::FunctionId function_id =
2852             property->kind() == ObjectLiteral::Property::GETTER
2853                 ? Runtime::kDefineGetterPropertyUnchecked
2854                 : Runtime::kDefineSetterPropertyUnchecked;
2855         builder()->CallRuntime(function_id, args);
2856         break;
2857       }
2858       case ObjectLiteral::Property::SPREAD: {
2859         RegisterList args = register_allocator()->NewRegisterList(2);
2860         builder()->MoveRegister(literal, args[0]);
2861         builder()->SetExpressionPosition(property->value());
2862         VisitForRegisterValue(property->value(), args[1]);
2863         builder()->CallRuntime(Runtime::kInlineCopyDataProperties, args);
2864         break;
2865       }
2866       case ObjectLiteral::Property::PROTOTYPE:
2867         UNREACHABLE();  // Handled specially above.
2868         break;
2869     }
2870   }
2871 
2872   builder()->LoadAccumulatorWithRegister(literal);
2873 }
2874 
2875 // Fill an array with values from an iterator, starting at a given index. It is
2876 // guaranteed that the loop will only terminate if the iterator is exhausted, or
2877 // if one of iterator.next(), value.done, or value.value fail.
2878 //
2879 // In pseudocode:
2880 //
2881 // loop {
2882 //   value = iterator.next()
2883 //   if (value.done) break;
2884 //   value = value.value
2885 //   array[index++] = value
2886 // }
BuildFillArrayWithIterator(IteratorRecord iterator,Register array,Register index,Register value,FeedbackSlot next_value_slot,FeedbackSlot next_done_slot,FeedbackSlot index_slot,FeedbackSlot element_slot)2887 void BytecodeGenerator::BuildFillArrayWithIterator(
2888     IteratorRecord iterator, Register array, Register index, Register value,
2889     FeedbackSlot next_value_slot, FeedbackSlot next_done_slot,
2890     FeedbackSlot index_slot, FeedbackSlot element_slot) {
2891   DCHECK(array.is_valid());
2892   DCHECK(index.is_valid());
2893   DCHECK(value.is_valid());
2894 
2895   LoopBuilder loop_builder(builder(), nullptr, nullptr);
2896   LoopScope loop_scope(this, &loop_builder);
2897 
2898   // Call the iterator's .next() method. Break from the loop if the `done`
2899   // property is truthy, otherwise load the value from the iterator result and
2900   // append the argument.
2901   BuildIteratorNext(iterator, value);
2902   builder()->LoadNamedProperty(
2903       value, ast_string_constants()->done_string(),
2904       feedback_index(feedback_spec()->AddLoadICSlot()));
2905   loop_builder.BreakIfTrue(ToBooleanMode::kConvertToBoolean);
2906 
2907   loop_builder.LoopBody();
2908   builder()
2909       // value = value.value
2910       ->LoadNamedProperty(value, ast_string_constants()->value_string(),
2911                           feedback_index(next_value_slot))
2912       // array[index] = value
2913       .StoreInArrayLiteral(array, index, feedback_index(element_slot))
2914       // index++
2915       .LoadAccumulatorWithRegister(index)
2916       .UnaryOperation(Token::INC, feedback_index(index_slot))
2917       .StoreAccumulatorInRegister(index);
2918   loop_builder.BindContinueTarget();
2919 }
2920 
BuildCreateArrayLiteral(const ZonePtrList<Expression> * elements,ArrayLiteral * expr)2921 void BytecodeGenerator::BuildCreateArrayLiteral(
2922     const ZonePtrList<Expression>* elements, ArrayLiteral* expr) {
2923   RegisterAllocationScope register_scope(this);
2924   Register index = register_allocator()->NewRegister();
2925   Register array = register_allocator()->NewRegister();
2926   SharedFeedbackSlot element_slot(feedback_spec(),
2927                                   FeedbackSlotKind::kStoreInArrayLiteral);
2928   ZonePtrList<Expression>::const_iterator current = elements->begin();
2929   ZonePtrList<Expression>::const_iterator end = elements->end();
2930   bool is_empty = elements->is_empty();
2931 
2932   if (!is_empty && (*current)->IsSpread()) {
2933     // If we have a leading spread, use CreateArrayFromIterable to create
2934     // an array from it and then add the remaining components to that array.
2935     VisitForAccumulatorValue(*current);
2936     builder()->SetExpressionPosition((*current)->AsSpread()->expression());
2937     builder()->CreateArrayFromIterable().StoreAccumulatorInRegister(array);
2938 
2939     if (++current != end) {
2940       // If there are remaning elements, prepare the index register that is
2941       // used for adding those elements. The next index is the length of the
2942       // newly created array.
2943       auto length = ast_string_constants()->length_string();
2944       int length_load_slot = feedback_index(feedback_spec()->AddLoadICSlot());
2945       builder()
2946           ->LoadNamedProperty(array, length, length_load_slot)
2947           .StoreAccumulatorInRegister(index);
2948     }
2949   } else if (expr != nullptr) {
2950     // There are some elements before the first (if any) spread, and we can
2951     // use a boilerplate when creating the initial array from those elements.
2952 
2953     // First, allocate a constant pool entry for the boilerplate that will
2954     // be created during finalization, and will contain all the constant
2955     // elements before the first spread. This also handle the empty array case
2956     // and one-shot optimization.
2957     uint8_t flags = CreateArrayLiteralFlags::Encode(
2958         expr->IsFastCloningSupported(), expr->ComputeFlags());
2959     bool optimize_as_one_shot = ShouldOptimizeAsOneShot();
2960     size_t entry;
2961     if (is_empty && optimize_as_one_shot) {
2962       entry = builder()->EmptyArrayBoilerplateDescriptionConstantPoolEntry();
2963     } else if (!is_empty) {
2964       entry = builder()->AllocateDeferredConstantPoolEntry();
2965       array_literals_.push_back(std::make_pair(expr, entry));
2966     }
2967 
2968     if (optimize_as_one_shot) {
2969       RegisterList args = register_allocator()->NewRegisterList(2);
2970       builder()
2971           ->LoadConstantPoolEntry(entry)
2972           .StoreAccumulatorInRegister(args[0])
2973           .LoadLiteral(Smi::FromInt(flags))
2974           .StoreAccumulatorInRegister(args[1])
2975           .CallRuntime(Runtime::kCreateArrayLiteralWithoutAllocationSite, args);
2976     } else if (is_empty) {
2977       // Empty array literal fast-path.
2978       int literal_index = feedback_index(feedback_spec()->AddLiteralSlot());
2979       DCHECK(expr->IsFastCloningSupported());
2980       builder()->CreateEmptyArrayLiteral(literal_index);
2981     } else {
2982       // Create array literal from boilerplate.
2983       int literal_index = feedback_index(feedback_spec()->AddLiteralSlot());
2984       builder()->CreateArrayLiteral(entry, literal_index, flags);
2985     }
2986     builder()->StoreAccumulatorInRegister(array);
2987 
2988     // Insert the missing non-constant elements, up until the first spread
2989     // index, into the initial array (the remaining elements will be inserted
2990     // below).
2991     DCHECK_EQ(current, elements->begin());
2992     ZonePtrList<Expression>::const_iterator first_spread_or_end =
2993         expr->first_spread_index() >= 0 ? current + expr->first_spread_index()
2994                                         : end;
2995     int array_index = 0;
2996     for (; current != first_spread_or_end; ++current, array_index++) {
2997       Expression* subexpr = *current;
2998       DCHECK(!subexpr->IsSpread());
2999       // Skip the constants.
3000       if (subexpr->IsCompileTimeValue()) continue;
3001 
3002       builder()
3003           ->LoadLiteral(Smi::FromInt(array_index))
3004           .StoreAccumulatorInRegister(index);
3005       VisitForAccumulatorValue(subexpr);
3006       builder()->StoreInArrayLiteral(array, index,
3007                                      feedback_index(element_slot.Get()));
3008     }
3009 
3010     if (current != end) {
3011       // If there are remaining elements, prepare the index register
3012       // to store the next element, which comes from the first spread.
3013       builder()->LoadLiteral(array_index).StoreAccumulatorInRegister(index);
3014     }
3015   } else {
3016     // In other cases, we prepare an empty array to be filled in below.
3017     DCHECK(!elements->is_empty());
3018     int literal_index = feedback_index(feedback_spec()->AddLiteralSlot());
3019     builder()
3020         ->CreateEmptyArrayLiteral(literal_index)
3021         .StoreAccumulatorInRegister(array);
3022     // Prepare the index for the first element.
3023     builder()->LoadLiteral(Smi::FromInt(0)).StoreAccumulatorInRegister(index);
3024   }
3025 
3026   // Now build insertions for the remaining elements from current to end.
3027   SharedFeedbackSlot index_slot(feedback_spec(), FeedbackSlotKind::kBinaryOp);
3028   SharedFeedbackSlot length_slot(
3029       feedback_spec(), feedback_spec()->GetStoreICSlot(LanguageMode::kStrict));
3030   for (; current != end; ++current) {
3031     Expression* subexpr = *current;
3032     if (subexpr->IsSpread()) {
3033       RegisterAllocationScope scope(this);
3034       builder()->SetExpressionAsStatementPosition(
3035           subexpr->AsSpread()->expression());
3036       VisitForAccumulatorValue(subexpr->AsSpread()->expression());
3037       builder()->SetExpressionPosition(subexpr->AsSpread()->expression());
3038       IteratorRecord iterator = BuildGetIteratorRecord(IteratorType::kNormal);
3039 
3040       Register value = register_allocator()->NewRegister();
3041       FeedbackSlot next_value_load_slot = feedback_spec()->AddLoadICSlot();
3042       FeedbackSlot next_done_load_slot = feedback_spec()->AddLoadICSlot();
3043       FeedbackSlot real_index_slot = index_slot.Get();
3044       FeedbackSlot real_element_slot = element_slot.Get();
3045       BuildFillArrayWithIterator(iterator, array, index, value,
3046                                  next_value_load_slot, next_done_load_slot,
3047                                  real_index_slot, real_element_slot);
3048     } else if (!subexpr->IsTheHoleLiteral()) {
3049       // literal[index++] = subexpr
3050       VisitForAccumulatorValue(subexpr);
3051       builder()
3052           ->StoreInArrayLiteral(array, index,
3053                                 feedback_index(element_slot.Get()))
3054           .LoadAccumulatorWithRegister(index);
3055       // Only increase the index if we are not the last element.
3056       if (current + 1 != end) {
3057         builder()
3058             ->UnaryOperation(Token::INC, feedback_index(index_slot.Get()))
3059             .StoreAccumulatorInRegister(index);
3060       }
3061     } else {
3062       // literal.length = ++index
3063       // length_slot is only used when there are holes.
3064       auto length = ast_string_constants()->length_string();
3065       builder()
3066           ->LoadAccumulatorWithRegister(index)
3067           .UnaryOperation(Token::INC, feedback_index(index_slot.Get()))
3068           .StoreAccumulatorInRegister(index)
3069           .StoreNamedProperty(array, length, feedback_index(length_slot.Get()),
3070                               LanguageMode::kStrict);
3071     }
3072   }
3073 
3074   builder()->LoadAccumulatorWithRegister(array);
3075 }
3076 
VisitArrayLiteral(ArrayLiteral * expr)3077 void BytecodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
3078   expr->InitDepthAndFlags();
3079   BuildCreateArrayLiteral(expr->values(), expr);
3080 }
3081 
VisitVariableProxy(VariableProxy * proxy)3082 void BytecodeGenerator::VisitVariableProxy(VariableProxy* proxy) {
3083   builder()->SetExpressionPosition(proxy);
3084   BuildVariableLoad(proxy->var(), proxy->hole_check_mode());
3085 }
3086 
BuildVariableLoad(Variable * variable,HoleCheckMode hole_check_mode,TypeofMode typeof_mode)3087 void BytecodeGenerator::BuildVariableLoad(Variable* variable,
3088                                           HoleCheckMode hole_check_mode,
3089                                           TypeofMode typeof_mode) {
3090   switch (variable->location()) {
3091     case VariableLocation::LOCAL: {
3092       Register source(builder()->Local(variable->index()));
3093       // We need to load the variable into the accumulator, even when in a
3094       // VisitForRegisterScope, in order to avoid register aliasing if
3095       // subsequent expressions assign to the same variable.
3096       builder()->LoadAccumulatorWithRegister(source);
3097       if (hole_check_mode == HoleCheckMode::kRequired) {
3098         BuildThrowIfHole(variable);
3099       }
3100       break;
3101     }
3102     case VariableLocation::PARAMETER: {
3103       Register source;
3104       if (variable->IsReceiver()) {
3105         source = builder()->Receiver();
3106       } else {
3107         source = builder()->Parameter(variable->index());
3108       }
3109       // We need to load the variable into the accumulator, even when in a
3110       // VisitForRegisterScope, in order to avoid register aliasing if
3111       // subsequent expressions assign to the same variable.
3112       builder()->LoadAccumulatorWithRegister(source);
3113       if (hole_check_mode == HoleCheckMode::kRequired) {
3114         BuildThrowIfHole(variable);
3115       }
3116       break;
3117     }
3118     case VariableLocation::UNALLOCATED: {
3119       // The global identifier "undefined" is immutable. Everything
3120       // else could be reassigned. For performance, we do a pointer comparison
3121       // rather than checking if the raw_name is really "undefined".
3122       if (variable->raw_name() == ast_string_constants()->undefined_string()) {
3123         builder()->LoadUndefined();
3124       } else {
3125         FeedbackSlot slot = GetCachedLoadGlobalICSlot(typeof_mode, variable);
3126         builder()->LoadGlobal(variable->raw_name(), feedback_index(slot),
3127                               typeof_mode);
3128       }
3129       break;
3130     }
3131     case VariableLocation::CONTEXT: {
3132       int depth = execution_context()->ContextChainDepth(variable->scope());
3133       ContextScope* context = execution_context()->Previous(depth);
3134       Register context_reg;
3135       if (context) {
3136         context_reg = context->reg();
3137         depth = 0;
3138       } else {
3139         context_reg = execution_context()->reg();
3140       }
3141 
3142       BytecodeArrayBuilder::ContextSlotMutability immutable =
3143           (variable->maybe_assigned() == kNotAssigned)
3144               ? BytecodeArrayBuilder::kImmutableSlot
3145               : BytecodeArrayBuilder::kMutableSlot;
3146 
3147       builder()->LoadContextSlot(context_reg, variable->index(), depth,
3148                                  immutable);
3149       if (hole_check_mode == HoleCheckMode::kRequired) {
3150         BuildThrowIfHole(variable);
3151       }
3152       break;
3153     }
3154     case VariableLocation::LOOKUP: {
3155       switch (variable->mode()) {
3156         case VariableMode::kDynamicLocal: {
3157           Variable* local_variable = variable->local_if_not_shadowed();
3158           int depth =
3159               execution_context()->ContextChainDepth(local_variable->scope());
3160           builder()->LoadLookupContextSlot(variable->raw_name(), typeof_mode,
3161                                            local_variable->index(), depth);
3162           if (hole_check_mode == HoleCheckMode::kRequired) {
3163             BuildThrowIfHole(variable);
3164           }
3165           break;
3166         }
3167         case VariableMode::kDynamicGlobal: {
3168           int depth =
3169               current_scope()->ContextChainLengthUntilOutermostSloppyEval();
3170           // TODO(1008414): Add back caching here when bug is fixed properly.
3171           FeedbackSlot slot = feedback_spec()->AddLoadGlobalICSlot(typeof_mode);
3172 
3173           builder()->LoadLookupGlobalSlot(variable->raw_name(), typeof_mode,
3174                                           feedback_index(slot), depth);
3175           break;
3176         }
3177         default:
3178           builder()->LoadLookupSlot(variable->raw_name(), typeof_mode);
3179       }
3180       break;
3181     }
3182     case VariableLocation::MODULE: {
3183       int depth = execution_context()->ContextChainDepth(variable->scope());
3184       builder()->LoadModuleVariable(variable->index(), depth);
3185       if (hole_check_mode == HoleCheckMode::kRequired) {
3186         BuildThrowIfHole(variable);
3187       }
3188       break;
3189     }
3190     case VariableLocation::REPL_GLOBAL: {
3191       DCHECK(variable->IsReplGlobalLet());
3192       FeedbackSlot slot = GetCachedLoadGlobalICSlot(typeof_mode, variable);
3193       builder()->LoadGlobal(variable->raw_name(), feedback_index(slot),
3194                             typeof_mode);
3195       break;
3196     }
3197   }
3198 }
3199 
BuildVariableLoadForAccumulatorValue(Variable * variable,HoleCheckMode hole_check_mode,TypeofMode typeof_mode)3200 void BytecodeGenerator::BuildVariableLoadForAccumulatorValue(
3201     Variable* variable, HoleCheckMode hole_check_mode, TypeofMode typeof_mode) {
3202   ValueResultScope accumulator_result(this);
3203   BuildVariableLoad(variable, hole_check_mode, typeof_mode);
3204 }
3205 
BuildReturn(int source_position)3206 void BytecodeGenerator::BuildReturn(int source_position) {
3207   if (FLAG_trace) {
3208     RegisterAllocationScope register_scope(this);
3209     Register result = register_allocator()->NewRegister();
3210     // Runtime returns {result} value, preserving accumulator.
3211     builder()->StoreAccumulatorInRegister(result).CallRuntime(
3212         Runtime::kTraceExit, result);
3213   }
3214   if (info()->flags().collect_type_profile()) {
3215     builder()->CollectTypeProfile(info()->literal()->return_position());
3216   }
3217   builder()->SetReturnPosition(source_position, info()->literal());
3218   builder()->Return();
3219 }
3220 
BuildAsyncReturn(int source_position)3221 void BytecodeGenerator::BuildAsyncReturn(int source_position) {
3222   RegisterAllocationScope register_scope(this);
3223 
3224   if (IsAsyncGeneratorFunction(info()->literal()->kind())) {
3225     RegisterList args = register_allocator()->NewRegisterList(3);
3226     builder()
3227         ->MoveRegister(generator_object(), args[0])  // generator
3228         .StoreAccumulatorInRegister(args[1])         // value
3229         .LoadTrue()
3230         .StoreAccumulatorInRegister(args[2])  // done
3231         .CallRuntime(Runtime::kInlineAsyncGeneratorResolve, args);
3232   } else {
3233     DCHECK(IsAsyncFunction(info()->literal()->kind()) ||
3234            IsAsyncModule(info()->literal()->kind()));
3235     RegisterList args = register_allocator()->NewRegisterList(3);
3236     builder()
3237         ->MoveRegister(generator_object(), args[0])  // generator
3238         .StoreAccumulatorInRegister(args[1])         // value
3239         .LoadBoolean(info()->literal()->CanSuspend())
3240         .StoreAccumulatorInRegister(args[2])  // can_suspend
3241         .CallRuntime(Runtime::kInlineAsyncFunctionResolve, args);
3242   }
3243 
3244   BuildReturn(source_position);
3245 }
3246 
BuildReThrow()3247 void BytecodeGenerator::BuildReThrow() { builder()->ReThrow(); }
3248 
BuildThrowIfHole(Variable * variable)3249 void BytecodeGenerator::BuildThrowIfHole(Variable* variable) {
3250   if (variable->is_this()) {
3251     DCHECK(variable->mode() == VariableMode::kConst);
3252     builder()->ThrowSuperNotCalledIfHole();
3253   } else {
3254     builder()->ThrowReferenceErrorIfHole(variable->raw_name());
3255   }
3256 }
3257 
BuildHoleCheckForVariableAssignment(Variable * variable,Token::Value op)3258 void BytecodeGenerator::BuildHoleCheckForVariableAssignment(Variable* variable,
3259                                                             Token::Value op) {
3260   DCHECK(!IsPrivateMethodOrAccessorVariableMode(variable->mode()));
3261   if (variable->is_this() && variable->mode() == VariableMode::kConst &&
3262       op == Token::INIT) {
3263     // Perform an initialization check for 'this'. 'this' variable is the
3264     // only variable able to trigger bind operations outside the TDZ
3265     // via 'super' calls.
3266     builder()->ThrowSuperAlreadyCalledIfNotHole();
3267   } else {
3268     // Perform an initialization check for let/const declared variables.
3269     // E.g. let x = (x = 20); is not allowed.
3270     DCHECK(IsLexicalVariableMode(variable->mode()));
3271     BuildThrowIfHole(variable);
3272   }
3273 }
3274 
BuildVariableAssignment(Variable * variable,Token::Value op,HoleCheckMode hole_check_mode,LookupHoistingMode lookup_hoisting_mode)3275 void BytecodeGenerator::BuildVariableAssignment(
3276     Variable* variable, Token::Value op, HoleCheckMode hole_check_mode,
3277     LookupHoistingMode lookup_hoisting_mode) {
3278   VariableMode mode = variable->mode();
3279   RegisterAllocationScope assignment_register_scope(this);
3280   BytecodeLabel end_label;
3281   switch (variable->location()) {
3282     case VariableLocation::PARAMETER:
3283     case VariableLocation::LOCAL: {
3284       Register destination;
3285       if (VariableLocation::PARAMETER == variable->location()) {
3286         if (variable->IsReceiver()) {
3287           destination = builder()->Receiver();
3288         } else {
3289           destination = builder()->Parameter(variable->index());
3290         }
3291       } else {
3292         destination = builder()->Local(variable->index());
3293       }
3294 
3295       if (hole_check_mode == HoleCheckMode::kRequired) {
3296         // Load destination to check for hole.
3297         Register value_temp = register_allocator()->NewRegister();
3298         builder()
3299             ->StoreAccumulatorInRegister(value_temp)
3300             .LoadAccumulatorWithRegister(destination);
3301 
3302         BuildHoleCheckForVariableAssignment(variable, op);
3303         builder()->LoadAccumulatorWithRegister(value_temp);
3304       }
3305 
3306       if (mode != VariableMode::kConst || op == Token::INIT) {
3307         builder()->StoreAccumulatorInRegister(destination);
3308       } else if (variable->throw_on_const_assignment(language_mode())) {
3309         builder()->CallRuntime(Runtime::kThrowConstAssignError);
3310       }
3311       break;
3312     }
3313     case VariableLocation::UNALLOCATED: {
3314       FeedbackSlot slot = GetCachedStoreGlobalICSlot(language_mode(), variable);
3315       builder()->StoreGlobal(variable->raw_name(), feedback_index(slot));
3316       break;
3317     }
3318     case VariableLocation::CONTEXT: {
3319       int depth = execution_context()->ContextChainDepth(variable->scope());
3320       ContextScope* context = execution_context()->Previous(depth);
3321       Register context_reg;
3322 
3323       if (context) {
3324         context_reg = context->reg();
3325         depth = 0;
3326       } else {
3327         context_reg = execution_context()->reg();
3328       }
3329 
3330       if (hole_check_mode == HoleCheckMode::kRequired) {
3331         // Load destination to check for hole.
3332         Register value_temp = register_allocator()->NewRegister();
3333         builder()
3334             ->StoreAccumulatorInRegister(value_temp)
3335             .LoadContextSlot(context_reg, variable->index(), depth,
3336                              BytecodeArrayBuilder::kMutableSlot);
3337 
3338         BuildHoleCheckForVariableAssignment(variable, op);
3339         builder()->LoadAccumulatorWithRegister(value_temp);
3340       }
3341 
3342       if (mode != VariableMode::kConst || op == Token::INIT) {
3343         builder()->StoreContextSlot(context_reg, variable->index(), depth);
3344       } else if (variable->throw_on_const_assignment(language_mode())) {
3345         builder()->CallRuntime(Runtime::kThrowConstAssignError);
3346       }
3347       break;
3348     }
3349     case VariableLocation::LOOKUP: {
3350       builder()->StoreLookupSlot(variable->raw_name(), language_mode(),
3351                                  lookup_hoisting_mode);
3352       break;
3353     }
3354     case VariableLocation::MODULE: {
3355       DCHECK(IsDeclaredVariableMode(mode));
3356 
3357       if (mode == VariableMode::kConst && op != Token::INIT) {
3358         builder()->CallRuntime(Runtime::kThrowConstAssignError);
3359         break;
3360       }
3361 
3362       // If we don't throw above, we know that we're dealing with an
3363       // export because imports are const and we do not generate initializing
3364       // assignments for them.
3365       DCHECK(variable->IsExport());
3366 
3367       int depth = execution_context()->ContextChainDepth(variable->scope());
3368       if (hole_check_mode == HoleCheckMode::kRequired) {
3369         Register value_temp = register_allocator()->NewRegister();
3370         builder()
3371             ->StoreAccumulatorInRegister(value_temp)
3372             .LoadModuleVariable(variable->index(), depth);
3373         BuildHoleCheckForVariableAssignment(variable, op);
3374         builder()->LoadAccumulatorWithRegister(value_temp);
3375       }
3376       builder()->StoreModuleVariable(variable->index(), depth);
3377       break;
3378     }
3379     case VariableLocation::REPL_GLOBAL: {
3380       // A let declaration like 'let x = 7' is effectively translated to:
3381       //   <top of the script>:
3382       //     ScriptContext.x = TheHole;
3383       //   ...
3384       //   <where the actual 'let' is>:
3385       //     ScriptContextTable.x = 7; // no hole check
3386       //
3387       // The ScriptContext slot for 'x' that we store to here is not
3388       // necessarily the ScriptContext of this script, but rather the
3389       // first ScriptContext that has a slot for name 'x'.
3390       DCHECK(variable->IsReplGlobalLet());
3391       if (op == Token::INIT) {
3392         RegisterList store_args = register_allocator()->NewRegisterList(2);
3393         builder()
3394             ->StoreAccumulatorInRegister(store_args[1])
3395             .LoadLiteral(variable->raw_name())
3396             .StoreAccumulatorInRegister(store_args[0]);
3397         builder()->CallRuntime(Runtime::kStoreGlobalNoHoleCheckForReplLet,
3398                                store_args);
3399       } else {
3400         FeedbackSlot slot =
3401             GetCachedStoreGlobalICSlot(language_mode(), variable);
3402         builder()->StoreGlobal(variable->raw_name(), feedback_index(slot));
3403       }
3404       break;
3405     }
3406   }
3407 }
3408 
BuildLoadNamedProperty(const Expression * object_expr,Register object,const AstRawString * name)3409 void BytecodeGenerator::BuildLoadNamedProperty(const Expression* object_expr,
3410                                                Register object,
3411                                                const AstRawString* name) {
3412   if (ShouldOptimizeAsOneShot()) {
3413     builder()->LoadNamedPropertyNoFeedback(object, name);
3414   } else {
3415     FeedbackSlot slot = GetCachedLoadICSlot(object_expr, name);
3416     builder()->LoadNamedProperty(object, name, feedback_index(slot));
3417   }
3418 }
3419 
BuildStoreNamedProperty(const Expression * object_expr,Register object,const AstRawString * name)3420 void BytecodeGenerator::BuildStoreNamedProperty(const Expression* object_expr,
3421                                                 Register object,
3422                                                 const AstRawString* name) {
3423   Register value;
3424   if (!execution_result()->IsEffect()) {
3425     value = register_allocator()->NewRegister();
3426     builder()->StoreAccumulatorInRegister(value);
3427   }
3428 
3429   if (ShouldOptimizeAsOneShot()) {
3430     builder()->StoreNamedPropertyNoFeedback(object, name, language_mode());
3431   } else {
3432     FeedbackSlot slot = GetCachedStoreICSlot(object_expr, name);
3433     builder()->StoreNamedProperty(object, name, feedback_index(slot),
3434                                   language_mode());
3435   }
3436 
3437   if (!execution_result()->IsEffect()) {
3438     builder()->LoadAccumulatorWithRegister(value);
3439   }
3440 }
3441 
3442 // static
3443 BytecodeGenerator::AssignmentLhsData
NonProperty(Expression * expr)3444 BytecodeGenerator::AssignmentLhsData::NonProperty(Expression* expr) {
3445   return AssignmentLhsData(NON_PROPERTY, expr, RegisterList(), Register(),
3446                            Register(), nullptr, nullptr);
3447 }
3448 // static
3449 BytecodeGenerator::AssignmentLhsData
NamedProperty(Expression * object_expr,Register object,const AstRawString * name)3450 BytecodeGenerator::AssignmentLhsData::NamedProperty(Expression* object_expr,
3451                                                     Register object,
3452                                                     const AstRawString* name) {
3453   return AssignmentLhsData(NAMED_PROPERTY, nullptr, RegisterList(), object,
3454                            Register(), object_expr, name);
3455 }
3456 // static
3457 BytecodeGenerator::AssignmentLhsData
KeyedProperty(Register object,Register key)3458 BytecodeGenerator::AssignmentLhsData::KeyedProperty(Register object,
3459                                                     Register key) {
3460   return AssignmentLhsData(KEYED_PROPERTY, nullptr, RegisterList(), object, key,
3461                            nullptr, nullptr);
3462 }
3463 // static
3464 BytecodeGenerator::AssignmentLhsData
NamedSuperProperty(RegisterList super_property_args)3465 BytecodeGenerator::AssignmentLhsData::NamedSuperProperty(
3466     RegisterList super_property_args) {
3467   return AssignmentLhsData(NAMED_SUPER_PROPERTY, nullptr, super_property_args,
3468                            Register(), Register(), nullptr, nullptr);
3469 }
3470 // static
3471 BytecodeGenerator::AssignmentLhsData
PrivateMethodOrAccessor(AssignType type,Property * property)3472 BytecodeGenerator::AssignmentLhsData::PrivateMethodOrAccessor(
3473     AssignType type, Property* property) {
3474   return AssignmentLhsData(type, property, RegisterList(), Register(),
3475                            Register(), nullptr, nullptr);
3476 }
3477 // static
3478 BytecodeGenerator::AssignmentLhsData
KeyedSuperProperty(RegisterList super_property_args)3479 BytecodeGenerator::AssignmentLhsData::KeyedSuperProperty(
3480     RegisterList super_property_args) {
3481   return AssignmentLhsData(KEYED_SUPER_PROPERTY, nullptr, super_property_args,
3482                            Register(), Register(), nullptr, nullptr);
3483 }
3484 
PrepareAssignmentLhs(Expression * lhs,AccumulatorPreservingMode accumulator_preserving_mode)3485 BytecodeGenerator::AssignmentLhsData BytecodeGenerator::PrepareAssignmentLhs(
3486     Expression* lhs, AccumulatorPreservingMode accumulator_preserving_mode) {
3487   // Left-hand side can only be a property, a global or a variable slot.
3488   Property* property = lhs->AsProperty();
3489   AssignType assign_type = Property::GetAssignType(property);
3490 
3491   // Evaluate LHS expression.
3492   switch (assign_type) {
3493     case NON_PROPERTY:
3494       return AssignmentLhsData::NonProperty(lhs);
3495     case NAMED_PROPERTY: {
3496       AccumulatorPreservingScope scope(this, accumulator_preserving_mode);
3497       Register object = VisitForRegisterValue(property->obj());
3498       const AstRawString* name =
3499           property->key()->AsLiteral()->AsRawPropertyName();
3500       return AssignmentLhsData::NamedProperty(property->obj(), object, name);
3501     }
3502     case KEYED_PROPERTY: {
3503       AccumulatorPreservingScope scope(this, accumulator_preserving_mode);
3504       Register object = VisitForRegisterValue(property->obj());
3505       Register key = VisitForRegisterValue(property->key());
3506       return AssignmentLhsData::KeyedProperty(object, key);
3507     }
3508     case PRIVATE_METHOD:
3509     case PRIVATE_GETTER_ONLY:
3510     case PRIVATE_SETTER_ONLY:
3511     case PRIVATE_GETTER_AND_SETTER: {
3512       DCHECK(!property->IsSuperAccess());
3513       return AssignmentLhsData::PrivateMethodOrAccessor(assign_type, property);
3514     }
3515     case NAMED_SUPER_PROPERTY: {
3516       AccumulatorPreservingScope scope(this, accumulator_preserving_mode);
3517       RegisterList super_property_args =
3518           register_allocator()->NewRegisterList(4);
3519       SuperPropertyReference* super_property =
3520           property->obj()->AsSuperPropertyReference();
3521       BuildThisVariableLoad();
3522       builder()->StoreAccumulatorInRegister(super_property_args[0]);
3523       VisitForRegisterValue(super_property->home_object(),
3524                             super_property_args[1]);
3525       builder()
3526           ->LoadLiteral(property->key()->AsLiteral()->AsRawPropertyName())
3527           .StoreAccumulatorInRegister(super_property_args[2]);
3528       return AssignmentLhsData::NamedSuperProperty(super_property_args);
3529     }
3530     case KEYED_SUPER_PROPERTY: {
3531       AccumulatorPreservingScope scope(this, accumulator_preserving_mode);
3532       RegisterList super_property_args =
3533           register_allocator()->NewRegisterList(4);
3534       SuperPropertyReference* super_property =
3535           property->obj()->AsSuperPropertyReference();
3536       BuildThisVariableLoad();
3537       builder()->StoreAccumulatorInRegister(super_property_args[0]);
3538       VisitForRegisterValue(super_property->home_object(),
3539                             super_property_args[1]);
3540       VisitForRegisterValue(property->key(), super_property_args[2]);
3541       return AssignmentLhsData::KeyedSuperProperty(super_property_args);
3542     }
3543   }
3544   UNREACHABLE();
3545 }
3546 
3547 // Build the iteration finalizer called in the finally block of an iteration
3548 // protocol execution. This closes the iterator if needed, and suppresses any
3549 // exception it throws if necessary, including the exception when the return
3550 // method is not callable.
3551 //
3552 // In pseudo-code, this builds:
3553 //
3554 // if (!done) {
3555 //   try {
3556 //     let method = iterator.return
3557 //     if (method !== null && method !== undefined) {
3558 //       let return_val = method.call(iterator)
3559 //       if (!%IsObject(return_val)) throw TypeError
3560 //     }
3561 //   } catch (e) {
3562 //     if (iteration_continuation != RETHROW)
3563 //       rethrow e
3564 //   }
3565 // }
3566 //
3567 // For async iterators, iterator.close() becomes await iterator.close().
BuildFinalizeIteration(IteratorRecord iterator,Register done,Register iteration_continuation_token)3568 void BytecodeGenerator::BuildFinalizeIteration(
3569     IteratorRecord iterator, Register done,
3570     Register iteration_continuation_token) {
3571   RegisterAllocationScope register_scope(this);
3572   BytecodeLabels iterator_is_done(zone());
3573 
3574   // if (!done) {
3575   builder()->LoadAccumulatorWithRegister(done).JumpIfTrue(
3576       ToBooleanMode::kConvertToBoolean, iterator_is_done.New());
3577 
3578   {
3579     RegisterAllocationScope register_scope(this);
3580     BuildTryCatch(
3581         // try {
3582         //   let method = iterator.return
3583         //   if (method !== null && method !== undefined) {
3584         //     let return_val = method.call(iterator)
3585         //     if (!%IsObject(return_val)) throw TypeError
3586         //   }
3587         // }
3588         [&]() {
3589           Register method = register_allocator()->NewRegister();
3590           builder()
3591               ->LoadNamedProperty(
3592                   iterator.object(), ast_string_constants()->return_string(),
3593                   feedback_index(feedback_spec()->AddLoadICSlot()))
3594               .JumpIfUndefinedOrNull(iterator_is_done.New())
3595               .StoreAccumulatorInRegister(method);
3596 
3597           RegisterList args(iterator.object());
3598           builder()->CallProperty(
3599               method, args, feedback_index(feedback_spec()->AddCallICSlot()));
3600           if (iterator.type() == IteratorType::kAsync) {
3601             BuildAwait();
3602           }
3603           builder()->JumpIfJSReceiver(iterator_is_done.New());
3604           {
3605             // Throw this exception inside the try block so that it is
3606             // suppressed by the iteration continuation if necessary.
3607             RegisterAllocationScope register_scope(this);
3608             Register return_result = register_allocator()->NewRegister();
3609             builder()
3610                 ->StoreAccumulatorInRegister(return_result)
3611                 .CallRuntime(Runtime::kThrowIteratorResultNotAnObject,
3612                              return_result);
3613           }
3614         },
3615 
3616         // catch (e) {
3617         //   if (iteration_continuation != RETHROW)
3618         //     rethrow e
3619         // }
3620         [&](Register context) {
3621           // Reuse context register to store the exception.
3622           Register close_exception = context;
3623           builder()->StoreAccumulatorInRegister(close_exception);
3624 
3625           BytecodeLabel suppress_close_exception;
3626           builder()
3627               ->LoadLiteral(
3628                   Smi::FromInt(ControlScope::DeferredCommands::kRethrowToken))
3629               .CompareReference(iteration_continuation_token)
3630               .JumpIfTrue(ToBooleanMode::kAlreadyBoolean,
3631                           &suppress_close_exception)
3632               .LoadAccumulatorWithRegister(close_exception)
3633               .ReThrow()
3634               .Bind(&suppress_close_exception);
3635         },
3636         HandlerTable::UNCAUGHT);
3637   }
3638 
3639   iterator_is_done.Bind(builder());
3640 }
3641 
3642 // Get the default value of a destructuring target. Will mutate the
3643 // destructuring target expression if there is a default value.
3644 //
3645 // For
3646 //   a = b
3647 // in
3648 //   let {a = b} = c
3649 // returns b and mutates the input into a.
GetDestructuringDefaultValue(Expression ** target)3650 Expression* BytecodeGenerator::GetDestructuringDefaultValue(
3651     Expression** target) {
3652   Expression* default_value = nullptr;
3653   if ((*target)->IsAssignment()) {
3654     Assignment* default_init = (*target)->AsAssignment();
3655     DCHECK_EQ(default_init->op(), Token::ASSIGN);
3656     default_value = default_init->value();
3657     *target = default_init->target();
3658     DCHECK((*target)->IsValidReferenceExpression() || (*target)->IsPattern());
3659   }
3660   return default_value;
3661 }
3662 
3663 // Convert a destructuring assignment to an array literal into a sequence of
3664 // iterator accesses into the value being assigned (in the accumulator).
3665 //
3666 // [a().x, ...b] = accumulator
3667 //
3668 //   becomes
3669 //
3670 // iterator = %GetIterator(accumulator)
3671 // try {
3672 //
3673 //   // Individual assignments read off the value from iterator.next() This gets
3674 //   // repeated per destructuring element.
3675 //   if (!done) {
3676 //     // Make sure we are considered 'done' if .next(), .done or .value fail.
3677 //     done = true
3678 //     var next_result = iterator.next()
3679 //     var tmp_done = next_result.done
3680 //     if (!tmp_done) {
3681 //       value = next_result.value
3682 //       done = false
3683 //     }
3684 //   }
3685 //   if (done)
3686 //     value = undefined
3687 //   a().x = value
3688 //
3689 //   // A spread receives the remaining items in the iterator.
3690 //   var array = []
3691 //   var index = 0
3692 //   %FillArrayWithIterator(iterator, array, index, done)
3693 //   done = true
3694 //   b = array
3695 //
3696 // } catch(e) {
3697 //   iteration_continuation = RETHROW
3698 // } finally {
3699 //   %FinalizeIteration(iterator, done, iteration_continuation)
3700 // }
BuildDestructuringArrayAssignment(ArrayLiteral * pattern,Token::Value op,LookupHoistingMode lookup_hoisting_mode)3701 void BytecodeGenerator::BuildDestructuringArrayAssignment(
3702     ArrayLiteral* pattern, Token::Value op,
3703     LookupHoistingMode lookup_hoisting_mode) {
3704   RegisterAllocationScope scope(this);
3705 
3706   Register value = register_allocator()->NewRegister();
3707   builder()->StoreAccumulatorInRegister(value);
3708 
3709   // Store the iterator in a dedicated register so that it can be closed on
3710   // exit, and the 'done' value in a dedicated register so that it can be
3711   // changed and accessed independently of the iteration result.
3712   IteratorRecord iterator = BuildGetIteratorRecord(IteratorType::kNormal);
3713   Register done = register_allocator()->NewRegister();
3714   builder()->LoadFalse();
3715   builder()->StoreAccumulatorInRegister(done);
3716 
3717   BuildTryFinally(
3718       // Try block.
3719       [&]() {
3720         Register next_result = register_allocator()->NewRegister();
3721         FeedbackSlot next_value_load_slot = feedback_spec()->AddLoadICSlot();
3722         FeedbackSlot next_done_load_slot = feedback_spec()->AddLoadICSlot();
3723 
3724         Spread* spread = nullptr;
3725         for (Expression* target : *pattern->values()) {
3726           if (target->IsSpread()) {
3727             spread = target->AsSpread();
3728             break;
3729           }
3730 
3731           Expression* default_value = GetDestructuringDefaultValue(&target);
3732           if (!target->IsPattern()) {
3733             builder()->SetExpressionAsStatementPosition(target);
3734           }
3735 
3736           AssignmentLhsData lhs_data = PrepareAssignmentLhs(target);
3737 
3738           // if (!done) {
3739           //   // Make sure we are considered done if .next(), .done or .value
3740           //   // fail.
3741           //   done = true
3742           //   var next_result = iterator.next()
3743           //   var tmp_done = next_result.done
3744           //   if (!tmp_done) {
3745           //     value = next_result.value
3746           //     done = false
3747           //   }
3748           // }
3749           // if (done)
3750           //   value = undefined
3751           BytecodeLabels is_done(zone());
3752 
3753           builder()->LoadAccumulatorWithRegister(done);
3754           builder()->JumpIfTrue(ToBooleanMode::kConvertToBoolean,
3755                                 is_done.New());
3756 
3757           builder()->LoadTrue().StoreAccumulatorInRegister(done);
3758           BuildIteratorNext(iterator, next_result);
3759           builder()
3760               ->LoadNamedProperty(next_result,
3761                                   ast_string_constants()->done_string(),
3762                                   feedback_index(next_done_load_slot))
3763               .JumpIfTrue(ToBooleanMode::kConvertToBoolean, is_done.New())
3764               .LoadNamedProperty(next_result,
3765                                  ast_string_constants()->value_string(),
3766                                  feedback_index(next_value_load_slot))
3767               .StoreAccumulatorInRegister(next_result)
3768               .LoadFalse()
3769               .StoreAccumulatorInRegister(done)
3770               .LoadAccumulatorWithRegister(next_result);
3771 
3772           // Only do the assignment if this is not a hole (i.e. 'elided').
3773           if (!target->IsTheHoleLiteral()) {
3774             // [<pattern> = <init>] = <value>
3775             //   becomes (roughly)
3776             // temp = <value>.next();
3777             // <pattern> = temp === undefined ? <init> : temp;
3778             BytecodeLabel do_assignment;
3779             if (default_value) {
3780               builder()->JumpIfNotUndefined(&do_assignment);
3781               // Since done == true => temp == undefined, jump directly to using
3782               // the default value for that case.
3783               is_done.Bind(builder());
3784               VisitForAccumulatorValue(default_value);
3785             } else {
3786               builder()->Jump(&do_assignment);
3787               is_done.Bind(builder());
3788               builder()->LoadUndefined();
3789             }
3790             builder()->Bind(&do_assignment);
3791 
3792             BuildAssignment(lhs_data, op, lookup_hoisting_mode);
3793           } else {
3794             DCHECK_EQ(lhs_data.assign_type(), NON_PROPERTY);
3795             is_done.Bind(builder());
3796           }
3797         }
3798 
3799         if (spread) {
3800           RegisterAllocationScope scope(this);
3801           BytecodeLabel is_done;
3802 
3803           // A spread is turned into a loop over the remainer of the iterator.
3804           Expression* target = spread->expression();
3805 
3806           if (!target->IsPattern()) {
3807             builder()->SetExpressionAsStatementPosition(spread);
3808           }
3809 
3810           AssignmentLhsData lhs_data = PrepareAssignmentLhs(target);
3811 
3812           // var array = [];
3813           Register array = register_allocator()->NewRegister();
3814           builder()->CreateEmptyArrayLiteral(
3815               feedback_index(feedback_spec()->AddLiteralSlot()));
3816           builder()->StoreAccumulatorInRegister(array);
3817 
3818           // If done, jump to assigning empty array
3819           builder()->LoadAccumulatorWithRegister(done);
3820           builder()->JumpIfTrue(ToBooleanMode::kConvertToBoolean, &is_done);
3821 
3822           // var index = 0;
3823           Register index = register_allocator()->NewRegister();
3824           builder()->LoadLiteral(Smi::zero());
3825           builder()->StoreAccumulatorInRegister(index);
3826 
3827           // Set done to true, since it's guaranteed to be true by the time the
3828           // array fill completes.
3829           builder()->LoadTrue().StoreAccumulatorInRegister(done);
3830 
3831           // Fill the array with the iterator.
3832           FeedbackSlot element_slot =
3833               feedback_spec()->AddStoreInArrayLiteralICSlot();
3834           FeedbackSlot index_slot = feedback_spec()->AddBinaryOpICSlot();
3835           BuildFillArrayWithIterator(iterator, array, index, next_result,
3836                                      next_value_load_slot, next_done_load_slot,
3837                                      index_slot, element_slot);
3838 
3839           builder()->Bind(&is_done);
3840           // Assign the array to the LHS.
3841           builder()->LoadAccumulatorWithRegister(array);
3842           BuildAssignment(lhs_data, op, lookup_hoisting_mode);
3843         }
3844       },
3845       // Finally block.
3846       [&](Register iteration_continuation_token) {
3847         // Finish the iteration in the finally block.
3848         BuildFinalizeIteration(iterator, done, iteration_continuation_token);
3849       },
3850       HandlerTable::UNCAUGHT);
3851 
3852   if (!execution_result()->IsEffect()) {
3853     builder()->LoadAccumulatorWithRegister(value);
3854   }
3855 }
3856 
3857 // Convert a destructuring assignment to an object literal into a sequence of
3858 // property accesses into the value being assigned (in the accumulator).
3859 //
3860 // { y, [x++]: a(), ...b.c } = value
3861 //
3862 //   becomes
3863 //
3864 // var rest_runtime_callargs = new Array(3);
3865 // rest_runtime_callargs[0] = value;
3866 //
3867 // rest_runtime_callargs[1] = value;
3868 // y = value.y;
3869 //
3870 // var temp1 = %ToName(x++);
3871 // rest_runtime_callargs[2] = temp1;
3872 // a() = value[temp1];
3873 //
3874 // b.c = %CopyDataPropertiesWithExcludedProperties.call(rest_runtime_callargs);
BuildDestructuringObjectAssignment(ObjectLiteral * pattern,Token::Value op,LookupHoistingMode lookup_hoisting_mode)3875 void BytecodeGenerator::BuildDestructuringObjectAssignment(
3876     ObjectLiteral* pattern, Token::Value op,
3877     LookupHoistingMode lookup_hoisting_mode) {
3878   RegisterAllocationScope scope(this);
3879 
3880   // Store the assignment value in a register.
3881   Register value;
3882   RegisterList rest_runtime_callargs;
3883   if (pattern->has_rest_property()) {
3884     rest_runtime_callargs =
3885         register_allocator()->NewRegisterList(pattern->properties()->length());
3886     value = rest_runtime_callargs[0];
3887   } else {
3888     value = register_allocator()->NewRegister();
3889   }
3890   builder()->StoreAccumulatorInRegister(value);
3891 
3892   // if (value === null || value === undefined)
3893   //   throw new TypeError(kNonCoercible);
3894   //
3895   // Since the first property access on null/undefined will also trigger a
3896   // TypeError, we can elide this check. The exception is when there are no
3897   // properties and no rest property (this is an empty literal), or when the
3898   // first property is a computed name and accessing it can have side effects.
3899   //
3900   // TODO(leszeks): Also eliminate this check if the value is known to be
3901   // non-null (e.g. an object literal).
3902   if (pattern->properties()->is_empty() ||
3903       (pattern->properties()->at(0)->is_computed_name() &&
3904        pattern->properties()->at(0)->kind() != ObjectLiteralProperty::SPREAD)) {
3905     BytecodeLabel is_null_or_undefined, not_null_or_undefined;
3906     builder()
3907         ->JumpIfUndefinedOrNull(&is_null_or_undefined)
3908         .Jump(&not_null_or_undefined);
3909 
3910     {
3911       builder()->Bind(&is_null_or_undefined);
3912       builder()->SetExpressionPosition(pattern);
3913       builder()->CallRuntime(Runtime::kThrowPatternAssignmentNonCoercible,
3914                              value);
3915     }
3916     builder()->Bind(&not_null_or_undefined);
3917   }
3918 
3919   int i = 0;
3920   for (ObjectLiteralProperty* pattern_property : *pattern->properties()) {
3921     RegisterAllocationScope scope(this);
3922 
3923     // The key of the pattern becomes the key into the RHS value, and the value
3924     // of the pattern becomes the target of the assignment.
3925     //
3926     // e.g. { a: b } = o becomes b = o.a
3927     Expression* pattern_key = pattern_property->key();
3928     Expression* target = pattern_property->value();
3929     Expression* default_value = GetDestructuringDefaultValue(&target);
3930 
3931     if (!target->IsPattern()) {
3932       builder()->SetExpressionAsStatementPosition(target);
3933     }
3934 
3935     // Calculate this property's key into the assignment RHS value, additionally
3936     // storing the key for rest_runtime_callargs if needed.
3937     //
3938     // The RHS is accessed using the key either by LoadNamedProperty (if
3939     // value_name is valid) or by LoadKeyedProperty (otherwise).
3940     const AstRawString* value_name = nullptr;
3941     Register value_key;
3942 
3943     if (pattern_property->kind() != ObjectLiteralProperty::Kind::SPREAD) {
3944       if (pattern_key->IsPropertyName()) {
3945         value_name = pattern_key->AsLiteral()->AsRawPropertyName();
3946       }
3947       if (pattern->has_rest_property() || !value_name) {
3948         if (pattern->has_rest_property()) {
3949           value_key = rest_runtime_callargs[i + 1];
3950         } else {
3951           value_key = register_allocator()->NewRegister();
3952         }
3953         if (pattern_property->is_computed_name()) {
3954           // { [a()]: b().x } = c
3955           // becomes
3956           // var tmp = a()
3957           // b().x = c[tmp]
3958           DCHECK(!pattern_key->IsPropertyName() ||
3959                  !pattern_key->IsNumberLiteral());
3960           VisitForAccumulatorValue(pattern_key);
3961           builder()->ToName(value_key);
3962         } else {
3963           // We only need the key for non-computed properties when it is numeric
3964           // or is being saved for the rest_runtime_callargs.
3965           DCHECK(
3966               pattern_key->IsNumberLiteral() ||
3967               (pattern->has_rest_property() && pattern_key->IsPropertyName()));
3968           VisitForRegisterValue(pattern_key, value_key);
3969         }
3970       }
3971     }
3972 
3973     AssignmentLhsData lhs_data = PrepareAssignmentLhs(target);
3974 
3975     // Get the value from the RHS.
3976     if (pattern_property->kind() == ObjectLiteralProperty::Kind::SPREAD) {
3977       DCHECK_EQ(i, pattern->properties()->length() - 1);
3978       DCHECK(!value_key.is_valid());
3979       DCHECK_NULL(value_name);
3980       builder()->CallRuntime(Runtime::kCopyDataPropertiesWithExcludedProperties,
3981                              rest_runtime_callargs);
3982     } else if (value_name) {
3983       builder()->LoadNamedProperty(
3984           value, value_name, feedback_index(feedback_spec()->AddLoadICSlot()));
3985     } else {
3986       DCHECK(value_key.is_valid());
3987       builder()->LoadAccumulatorWithRegister(value_key).LoadKeyedProperty(
3988           value, feedback_index(feedback_spec()->AddKeyedLoadICSlot()));
3989     }
3990 
3991     // {<pattern> = <init>} = <value>
3992     //   becomes
3993     // temp = <value>;
3994     // <pattern> = temp === undefined ? <init> : temp;
3995     if (default_value) {
3996       BytecodeLabel value_not_undefined;
3997       builder()->JumpIfNotUndefined(&value_not_undefined);
3998       VisitForAccumulatorValue(default_value);
3999       builder()->Bind(&value_not_undefined);
4000     }
4001 
4002     BuildAssignment(lhs_data, op, lookup_hoisting_mode);
4003 
4004     i++;
4005   }
4006 
4007   if (!execution_result()->IsEffect()) {
4008     builder()->LoadAccumulatorWithRegister(value);
4009   }
4010 }
4011 
BuildAssignment(const AssignmentLhsData & lhs_data,Token::Value op,LookupHoistingMode lookup_hoisting_mode)4012 void BytecodeGenerator::BuildAssignment(
4013     const AssignmentLhsData& lhs_data, Token::Value op,
4014     LookupHoistingMode lookup_hoisting_mode) {
4015   // Assign the value to the LHS.
4016   switch (lhs_data.assign_type()) {
4017     case NON_PROPERTY: {
4018       if (ObjectLiteral* pattern = lhs_data.expr()->AsObjectLiteral()) {
4019         // Split object literals into destructuring.
4020         BuildDestructuringObjectAssignment(pattern, op, lookup_hoisting_mode);
4021       } else if (ArrayLiteral* pattern = lhs_data.expr()->AsArrayLiteral()) {
4022         // Split object literals into destructuring.
4023         BuildDestructuringArrayAssignment(pattern, op, lookup_hoisting_mode);
4024       } else {
4025         DCHECK(lhs_data.expr()->IsVariableProxy());
4026         VariableProxy* proxy = lhs_data.expr()->AsVariableProxy();
4027         BuildVariableAssignment(proxy->var(), op, proxy->hole_check_mode(),
4028                                 lookup_hoisting_mode);
4029       }
4030       break;
4031     }
4032     case NAMED_PROPERTY: {
4033       BuildStoreNamedProperty(lhs_data.object_expr(), lhs_data.object(),
4034                               lhs_data.name());
4035       break;
4036     }
4037     case KEYED_PROPERTY: {
4038       FeedbackSlot slot = feedback_spec()->AddKeyedStoreICSlot(language_mode());
4039       Register value;
4040       if (!execution_result()->IsEffect()) {
4041         value = register_allocator()->NewRegister();
4042         builder()->StoreAccumulatorInRegister(value);
4043       }
4044       builder()->StoreKeyedProperty(lhs_data.object(), lhs_data.key(),
4045                                     feedback_index(slot), language_mode());
4046       if (!execution_result()->IsEffect()) {
4047         builder()->LoadAccumulatorWithRegister(value);
4048       }
4049       break;
4050     }
4051     case NAMED_SUPER_PROPERTY: {
4052       builder()
4053           ->StoreAccumulatorInRegister(lhs_data.super_property_args()[3])
4054           .CallRuntime(Runtime::kStoreToSuper, lhs_data.super_property_args());
4055       break;
4056     }
4057     case KEYED_SUPER_PROPERTY: {
4058       builder()
4059           ->StoreAccumulatorInRegister(lhs_data.super_property_args()[3])
4060           .CallRuntime(Runtime::kStoreKeyedToSuper,
4061                        lhs_data.super_property_args());
4062       break;
4063     }
4064     case PRIVATE_METHOD: {
4065       BuildInvalidPropertyAccess(MessageTemplate::kInvalidPrivateMethodWrite,
4066                                  lhs_data.expr()->AsProperty());
4067       break;
4068     }
4069     case PRIVATE_GETTER_ONLY: {
4070       BuildInvalidPropertyAccess(MessageTemplate::kInvalidPrivateSetterAccess,
4071                                  lhs_data.expr()->AsProperty());
4072       break;
4073     }
4074     case PRIVATE_SETTER_ONLY:
4075     case PRIVATE_GETTER_AND_SETTER: {
4076       Register value = register_allocator()->NewRegister();
4077       builder()->StoreAccumulatorInRegister(value);
4078       Property* property = lhs_data.expr()->AsProperty();
4079       Register object = VisitForRegisterValue(property->obj());
4080       Register key = VisitForRegisterValue(property->key());
4081       BuildPrivateBrandCheck(property, object,
4082                              MessageTemplate::kInvalidPrivateMemberWrite);
4083       BuildPrivateSetterAccess(object, key, value);
4084       if (!execution_result()->IsEffect()) {
4085         builder()->LoadAccumulatorWithRegister(value);
4086       }
4087       break;
4088     }
4089   }
4090 }
4091 
VisitAssignment(Assignment * expr)4092 void BytecodeGenerator::VisitAssignment(Assignment* expr) {
4093   AssignmentLhsData lhs_data = PrepareAssignmentLhs(expr->target());
4094 
4095   VisitForAccumulatorValue(expr->value());
4096 
4097   builder()->SetExpressionPosition(expr);
4098   BuildAssignment(lhs_data, expr->op(), expr->lookup_hoisting_mode());
4099 }
4100 
VisitCompoundAssignment(CompoundAssignment * expr)4101 void BytecodeGenerator::VisitCompoundAssignment(CompoundAssignment* expr) {
4102   AssignmentLhsData lhs_data = PrepareAssignmentLhs(expr->target());
4103 
4104   // Evaluate the value and potentially handle compound assignments by loading
4105   // the left-hand side value and performing a binary operation.
4106   switch (lhs_data.assign_type()) {
4107     case NON_PROPERTY: {
4108       VariableProxy* proxy = expr->target()->AsVariableProxy();
4109       BuildVariableLoad(proxy->var(), proxy->hole_check_mode());
4110       break;
4111     }
4112     case NAMED_PROPERTY: {
4113       BuildLoadNamedProperty(lhs_data.object_expr(), lhs_data.object(),
4114                              lhs_data.name());
4115       break;
4116     }
4117     case KEYED_PROPERTY: {
4118       FeedbackSlot slot = feedback_spec()->AddKeyedLoadICSlot();
4119       builder()
4120           ->LoadAccumulatorWithRegister(lhs_data.key())
4121           .LoadKeyedProperty(lhs_data.object(), feedback_index(slot));
4122       break;
4123     }
4124     case NAMED_SUPER_PROPERTY: {
4125       builder()->CallRuntime(Runtime::kLoadFromSuper,
4126                              lhs_data.super_property_args().Truncate(3));
4127       break;
4128     }
4129     case KEYED_SUPER_PROPERTY: {
4130       builder()->CallRuntime(Runtime::kLoadKeyedFromSuper,
4131                              lhs_data.super_property_args().Truncate(3));
4132       break;
4133     }
4134     case PRIVATE_METHOD:
4135     case PRIVATE_GETTER_ONLY:
4136     case PRIVATE_SETTER_ONLY:
4137     case PRIVATE_GETTER_AND_SETTER: {
4138       // ({ #foo: name } = obj) is currently syntactically invalid.
4139       UNREACHABLE();
4140       break;
4141     }
4142   }
4143 
4144   BinaryOperation* binop = expr->binary_operation();
4145   FeedbackSlot slot = feedback_spec()->AddBinaryOpICSlot();
4146   BytecodeLabel short_circuit;
4147   if (binop->op() == Token::NULLISH) {
4148     BytecodeLabel nullish;
4149     builder()
4150         ->JumpIfUndefinedOrNull(&nullish)
4151         .Jump(&short_circuit)
4152         .Bind(&nullish);
4153     VisitForAccumulatorValue(expr->value());
4154   } else if (binop->op() == Token::OR) {
4155     builder()->JumpIfTrue(ToBooleanMode::kConvertToBoolean, &short_circuit);
4156     VisitForAccumulatorValue(expr->value());
4157   } else if (binop->op() == Token::AND) {
4158     builder()->JumpIfFalse(ToBooleanMode::kConvertToBoolean, &short_circuit);
4159     VisitForAccumulatorValue(expr->value());
4160   } else if (expr->value()->IsSmiLiteral()) {
4161     builder()->BinaryOperationSmiLiteral(
4162         binop->op(), expr->value()->AsLiteral()->AsSmiLiteral(),
4163         feedback_index(slot));
4164   } else {
4165     Register old_value = register_allocator()->NewRegister();
4166     builder()->StoreAccumulatorInRegister(old_value);
4167     VisitForAccumulatorValue(expr->value());
4168     builder()->BinaryOperation(binop->op(), old_value, feedback_index(slot));
4169   }
4170   builder()->SetExpressionPosition(expr);
4171   BuildAssignment(lhs_data, expr->op(), expr->lookup_hoisting_mode());
4172   builder()->Bind(&short_circuit);
4173 }
4174 
4175 // Suspends the generator to resume at the next suspend_id, with output stored
4176 // in the accumulator. When the generator is resumed, the sent value is loaded
4177 // in the accumulator.
BuildSuspendPoint(int position)4178 void BytecodeGenerator::BuildSuspendPoint(int position) {
4179   // Because we eliminate jump targets in dead code, we also eliminate resumes
4180   // when the suspend is not emitted because otherwise the below call to Bind
4181   // would start a new basic block and the code would be considered alive.
4182   if (builder()->RemainderOfBlockIsDead()) {
4183     return;
4184   }
4185   const int suspend_id = suspend_count_++;
4186 
4187   RegisterList registers = register_allocator()->AllLiveRegisters();
4188 
4189   // Save context, registers, and state. This bytecode then returns the value
4190   // in the accumulator.
4191   builder()->SetExpressionPosition(position);
4192   builder()->SuspendGenerator(generator_object(), registers, suspend_id);
4193 
4194   // Upon resume, we continue here.
4195   builder()->Bind(generator_jump_table_, suspend_id);
4196 
4197   // Clobbers all registers and sets the accumulator to the
4198   // [[input_or_debug_pos]] slot of the generator object.
4199   builder()->ResumeGenerator(generator_object(), registers);
4200 }
4201 
VisitYield(Yield * expr)4202 void BytecodeGenerator::VisitYield(Yield* expr) {
4203   builder()->SetExpressionPosition(expr);
4204   VisitForAccumulatorValue(expr->expression());
4205 
4206   // If this is not the first yield
4207   if (suspend_count_ > 0) {
4208     if (IsAsyncGeneratorFunction(function_kind())) {
4209       // AsyncGenerator yields (with the exception of the initial yield)
4210       // delegate work to the AsyncGeneratorYield stub, which Awaits the operand
4211       // and on success, wraps the value in an IteratorResult.
4212       RegisterAllocationScope register_scope(this);
4213       RegisterList args = register_allocator()->NewRegisterList(3);
4214       builder()
4215           ->MoveRegister(generator_object(), args[0])  // generator
4216           .StoreAccumulatorInRegister(args[1])         // value
4217           .LoadBoolean(catch_prediction() != HandlerTable::ASYNC_AWAIT)
4218           .StoreAccumulatorInRegister(args[2])  // is_caught
4219           .CallRuntime(Runtime::kInlineAsyncGeneratorYield, args);
4220     } else {
4221       // Generator yields (with the exception of the initial yield) wrap the
4222       // value into IteratorResult.
4223       RegisterAllocationScope register_scope(this);
4224       RegisterList args = register_allocator()->NewRegisterList(2);
4225       builder()
4226           ->StoreAccumulatorInRegister(args[0])  // value
4227           .LoadFalse()
4228           .StoreAccumulatorInRegister(args[1])  // done
4229           .CallRuntime(Runtime::kInlineCreateIterResultObject, args);
4230     }
4231   }
4232 
4233   BuildSuspendPoint(expr->position());
4234   // At this point, the generator has been resumed, with the received value in
4235   // the accumulator.
4236 
4237   // TODO(caitp): remove once yield* desugaring for async generators is handled
4238   // in BytecodeGenerator.
4239   if (expr->on_abrupt_resume() == Yield::kNoControl) {
4240     DCHECK(IsAsyncGeneratorFunction(function_kind()));
4241     return;
4242   }
4243 
4244   Register input = register_allocator()->NewRegister();
4245   builder()->StoreAccumulatorInRegister(input).CallRuntime(
4246       Runtime::kInlineGeneratorGetResumeMode, generator_object());
4247 
4248   // Now dispatch on resume mode.
4249   STATIC_ASSERT(JSGeneratorObject::kNext + 1 == JSGeneratorObject::kReturn);
4250   BytecodeJumpTable* jump_table =
4251       builder()->AllocateJumpTable(2, JSGeneratorObject::kNext);
4252 
4253   builder()->SwitchOnSmiNoFeedback(jump_table);
4254 
4255   {
4256     // Resume with throw (switch fallthrough).
4257     // TODO(leszeks): Add a debug-only check that the accumulator is
4258     // JSGeneratorObject::kThrow.
4259     builder()->SetExpressionPosition(expr);
4260     builder()->LoadAccumulatorWithRegister(input);
4261     builder()->Throw();
4262   }
4263 
4264   {
4265     // Resume with return.
4266     builder()->Bind(jump_table, JSGeneratorObject::kReturn);
4267     builder()->LoadAccumulatorWithRegister(input);
4268     if (IsAsyncGeneratorFunction(function_kind())) {
4269       execution_control()->AsyncReturnAccumulator();
4270     } else {
4271       execution_control()->ReturnAccumulator();
4272     }
4273   }
4274 
4275   {
4276     // Resume with next.
4277     builder()->Bind(jump_table, JSGeneratorObject::kNext);
4278     BuildIncrementBlockCoverageCounterIfEnabled(expr,
4279                                                 SourceRangeKind::kContinuation);
4280     builder()->LoadAccumulatorWithRegister(input);
4281   }
4282 }
4283 
4284 // Desugaring of (yield* iterable)
4285 //
4286 //   do {
4287 //     const kNext = 0;
4288 //     const kReturn = 1;
4289 //     const kThrow = 2;
4290 //
4291 //     let output; // uninitialized
4292 //
4293 //     let iteratorRecord = GetIterator(iterable);
4294 //     let iterator = iteratorRecord.[[Iterator]];
4295 //     let next = iteratorRecord.[[NextMethod]];
4296 //     let input = undefined;
4297 //     let resumeMode = kNext;
4298 //
4299 //     while (true) {
4300 //       // From the generator to the iterator:
4301 //       // Forward input according to resumeMode and obtain output.
4302 //       switch (resumeMode) {
4303 //         case kNext:
4304 //           output = next.[[Call]](iterator, « »);;
4305 //           break;
4306 //         case kReturn:
4307 //           let iteratorReturn = iterator.return;
4308 //           if (IS_NULL_OR_UNDEFINED(iteratorReturn)) {
4309 //             if (IS_ASYNC_GENERATOR) input = await input;
4310 //             return input;
4311 //           }
4312 //           output = iteratorReturn.[[Call]](iterator, «input»);
4313 //           break;
4314 //         case kThrow:
4315 //           let iteratorThrow = iterator.throw;
4316 //           if (IS_NULL_OR_UNDEFINED(iteratorThrow)) {
4317 //             let iteratorReturn = iterator.return;
4318 //             if (!IS_NULL_OR_UNDEFINED(iteratorReturn)) {
4319 //               output = iteratorReturn.[[Call]](iterator, « »);
4320 //               if (IS_ASYNC_GENERATOR) output = await output;
4321 //               if (!IS_RECEIVER(output)) %ThrowIterResultNotAnObject(output);
4322 //             }
4323 //             throw MakeTypeError(kThrowMethodMissing);
4324 //           }
4325 //           output = iteratorThrow.[[Call]](iterator, «input»);
4326 //           break;
4327 //       }
4328 //
4329 //       if (IS_ASYNC_GENERATOR) output = await output;
4330 //       if (!IS_RECEIVER(output)) %ThrowIterResultNotAnObject(output);
4331 //       if (output.done) break;
4332 //
4333 //       // From the generator to its user:
4334 //       // Forward output, receive new input, and determine resume mode.
4335 //       if (IS_ASYNC_GENERATOR) {
4336 //         // AsyncGeneratorYield abstract operation awaits the operand before
4337 //         // resolving the promise for the current AsyncGeneratorRequest.
4338 //         %_AsyncGeneratorYield(output.value)
4339 //       }
4340 //       input = Suspend(output);
4341 //       resumeMode = %GeneratorGetResumeMode();
4342 //     }
4343 //
4344 //     if (resumeMode === kReturn) {
4345 //       return output.value;
4346 //     }
4347 //     output.value
4348 //   }
VisitYieldStar(YieldStar * expr)4349 void BytecodeGenerator::VisitYieldStar(YieldStar* expr) {
4350   Register output = register_allocator()->NewRegister();
4351   Register resume_mode = register_allocator()->NewRegister();
4352   IteratorType iterator_type = IsAsyncGeneratorFunction(function_kind())
4353                                    ? IteratorType::kAsync
4354                                    : IteratorType::kNormal;
4355 
4356   {
4357     RegisterAllocationScope register_scope(this);
4358     RegisterList iterator_and_input = register_allocator()->NewRegisterList(2);
4359     VisitForAccumulatorValue(expr->expression());
4360     IteratorRecord iterator = BuildGetIteratorRecord(
4361         register_allocator()->NewRegister() /* next method */,
4362         iterator_and_input[0], iterator_type);
4363 
4364     Register input = iterator_and_input[1];
4365     builder()->LoadUndefined().StoreAccumulatorInRegister(input);
4366     builder()
4367         ->LoadLiteral(Smi::FromInt(JSGeneratorObject::kNext))
4368         .StoreAccumulatorInRegister(resume_mode);
4369 
4370     {
4371       // This loop builder does not construct counters as the loop is not
4372       // visible to the user, and we therefore neither pass the block coverage
4373       // builder nor the expression.
4374       //
4375       // In addition to the normal suspend for yield*, a yield* in an async
4376       // generator has 2 additional suspends:
4377       //   - One for awaiting the iterator result of closing the generator when
4378       //     resumed with a "throw" completion, and a throw method is not
4379       //     present on the delegated iterator
4380       //   - One for awaiting the iterator result yielded by the delegated
4381       //     iterator
4382 
4383       LoopBuilder loop_builder(builder(), nullptr, nullptr);
4384       LoopScope loop_scope(this, &loop_builder);
4385 
4386       {
4387         BytecodeLabels after_switch(zone());
4388         BytecodeJumpTable* switch_jump_table =
4389             builder()->AllocateJumpTable(2, 1);
4390 
4391         builder()
4392             ->LoadAccumulatorWithRegister(resume_mode)
4393             .SwitchOnSmiNoFeedback(switch_jump_table);
4394 
4395         // Fallthrough to default case.
4396         // TODO(tebbi): Add debug code to check that {resume_mode} really is
4397         // {JSGeneratorObject::kNext} in this case.
4398         STATIC_ASSERT(JSGeneratorObject::kNext == 0);
4399         {
4400           FeedbackSlot slot = feedback_spec()->AddCallICSlot();
4401           builder()->CallProperty(iterator.next(), iterator_and_input,
4402                                   feedback_index(slot));
4403           builder()->Jump(after_switch.New());
4404         }
4405 
4406         STATIC_ASSERT(JSGeneratorObject::kReturn == 1);
4407         builder()->Bind(switch_jump_table, JSGeneratorObject::kReturn);
4408         {
4409           const AstRawString* return_string =
4410               ast_string_constants()->return_string();
4411           BytecodeLabels no_return_method(zone());
4412 
4413           BuildCallIteratorMethod(iterator.object(), return_string,
4414                                   iterator_and_input, after_switch.New(),
4415                                   &no_return_method);
4416           no_return_method.Bind(builder());
4417           builder()->LoadAccumulatorWithRegister(input);
4418           if (iterator_type == IteratorType::kAsync) {
4419             // Await input.
4420             BuildAwait(expr->position());
4421             execution_control()->AsyncReturnAccumulator();
4422           } else {
4423             execution_control()->ReturnAccumulator();
4424           }
4425         }
4426 
4427         STATIC_ASSERT(JSGeneratorObject::kThrow == 2);
4428         builder()->Bind(switch_jump_table, JSGeneratorObject::kThrow);
4429         {
4430           const AstRawString* throw_string =
4431               ast_string_constants()->throw_string();
4432           BytecodeLabels no_throw_method(zone());
4433           BuildCallIteratorMethod(iterator.object(), throw_string,
4434                                   iterator_and_input, after_switch.New(),
4435                                   &no_throw_method);
4436 
4437           // If there is no "throw" method, perform IteratorClose, and finally
4438           // throw a TypeError.
4439           no_throw_method.Bind(builder());
4440           BuildIteratorClose(iterator, expr);
4441           builder()->CallRuntime(Runtime::kThrowThrowMethodMissing);
4442         }
4443 
4444         after_switch.Bind(builder());
4445       }
4446 
4447       if (iterator_type == IteratorType::kAsync) {
4448         // Await the result of the method invocation.
4449         BuildAwait(expr->position());
4450       }
4451 
4452       // Check that output is an object.
4453       BytecodeLabel check_if_done;
4454       builder()
4455           ->StoreAccumulatorInRegister(output)
4456           .JumpIfJSReceiver(&check_if_done)
4457           .CallRuntime(Runtime::kThrowIteratorResultNotAnObject, output);
4458 
4459       builder()->Bind(&check_if_done);
4460       // Break once output.done is true.
4461       builder()->LoadNamedProperty(
4462           output, ast_string_constants()->done_string(),
4463           feedback_index(feedback_spec()->AddLoadICSlot()));
4464 
4465       loop_builder.BreakIfTrue(ToBooleanMode::kConvertToBoolean);
4466 
4467       // Suspend the current generator.
4468       if (iterator_type == IteratorType::kNormal) {
4469         builder()->LoadAccumulatorWithRegister(output);
4470       } else {
4471         RegisterAllocationScope register_scope(this);
4472         DCHECK_EQ(iterator_type, IteratorType::kAsync);
4473         // If generatorKind is async, perform AsyncGeneratorYield(output.value),
4474         // which will await `output.value` before resolving the current
4475         // AsyncGeneratorRequest's promise.
4476         builder()->LoadNamedProperty(
4477             output, ast_string_constants()->value_string(),
4478             feedback_index(feedback_spec()->AddLoadICSlot()));
4479 
4480         RegisterList args = register_allocator()->NewRegisterList(3);
4481         builder()
4482             ->MoveRegister(generator_object(), args[0])  // generator
4483             .StoreAccumulatorInRegister(args[1])         // value
4484             .LoadBoolean(catch_prediction() != HandlerTable::ASYNC_AWAIT)
4485             .StoreAccumulatorInRegister(args[2])  // is_caught
4486             .CallRuntime(Runtime::kInlineAsyncGeneratorYield, args);
4487       }
4488 
4489       BuildSuspendPoint(expr->position());
4490       builder()->StoreAccumulatorInRegister(input);
4491       builder()
4492           ->CallRuntime(Runtime::kInlineGeneratorGetResumeMode,
4493                         generator_object())
4494           .StoreAccumulatorInRegister(resume_mode);
4495 
4496       loop_builder.BindContinueTarget();
4497     }
4498   }
4499 
4500   // Decide if we trigger a return or if the yield* expression should just
4501   // produce a value.
4502   BytecodeLabel completion_is_output_value;
4503   Register output_value = register_allocator()->NewRegister();
4504   builder()
4505       ->LoadNamedProperty(output, ast_string_constants()->value_string(),
4506                           feedback_index(feedback_spec()->AddLoadICSlot()))
4507       .StoreAccumulatorInRegister(output_value)
4508       .LoadLiteral(Smi::FromInt(JSGeneratorObject::kReturn))
4509       .CompareReference(resume_mode)
4510       .JumpIfFalse(ToBooleanMode::kAlreadyBoolean, &completion_is_output_value)
4511       .LoadAccumulatorWithRegister(output_value);
4512   if (iterator_type == IteratorType::kAsync) {
4513     execution_control()->AsyncReturnAccumulator();
4514   } else {
4515     execution_control()->ReturnAccumulator();
4516   }
4517 
4518   builder()->Bind(&completion_is_output_value);
4519   BuildIncrementBlockCoverageCounterIfEnabled(expr,
4520                                               SourceRangeKind::kContinuation);
4521   builder()->LoadAccumulatorWithRegister(output_value);
4522 }
4523 
BuildAwait(int position)4524 void BytecodeGenerator::BuildAwait(int position) {
4525   // Rather than HandlerTable::UNCAUGHT, async functions use
4526   // HandlerTable::ASYNC_AWAIT to communicate that top-level exceptions are
4527   // transformed into promise rejections. This is necessary to prevent emitting
4528   // multiple debug events for the same uncaught exception. There is no point
4529   // in the body of an async function where catch prediction is
4530   // HandlerTable::UNCAUGHT.
4531   DCHECK(catch_prediction() != HandlerTable::UNCAUGHT ||
4532          info()->scope()->is_repl_mode_scope());
4533 
4534   {
4535     // Await(operand) and suspend.
4536     RegisterAllocationScope register_scope(this);
4537 
4538     Runtime::FunctionId await_intrinsic_id;
4539     if (IsAsyncGeneratorFunction(function_kind())) {
4540       await_intrinsic_id = catch_prediction() == HandlerTable::ASYNC_AWAIT
4541                                ? Runtime::kInlineAsyncGeneratorAwaitUncaught
4542                                : Runtime::kInlineAsyncGeneratorAwaitCaught;
4543     } else {
4544       await_intrinsic_id = catch_prediction() == HandlerTable::ASYNC_AWAIT
4545                                ? Runtime::kInlineAsyncFunctionAwaitUncaught
4546                                : Runtime::kInlineAsyncFunctionAwaitCaught;
4547     }
4548     RegisterList args = register_allocator()->NewRegisterList(2);
4549     builder()
4550         ->MoveRegister(generator_object(), args[0])
4551         .StoreAccumulatorInRegister(args[1])
4552         .CallRuntime(await_intrinsic_id, args);
4553   }
4554 
4555   BuildSuspendPoint(position);
4556 
4557   Register input = register_allocator()->NewRegister();
4558   Register resume_mode = register_allocator()->NewRegister();
4559 
4560   // Now dispatch on resume mode.
4561   BytecodeLabel resume_next;
4562   builder()
4563       ->StoreAccumulatorInRegister(input)
4564       .CallRuntime(Runtime::kInlineGeneratorGetResumeMode, generator_object())
4565       .StoreAccumulatorInRegister(resume_mode)
4566       .LoadLiteral(Smi::FromInt(JSGeneratorObject::kNext))
4567       .CompareReference(resume_mode)
4568       .JumpIfTrue(ToBooleanMode::kAlreadyBoolean, &resume_next);
4569 
4570   // Resume with "throw" completion (rethrow the received value).
4571   // TODO(leszeks): Add a debug-only check that the accumulator is
4572   // JSGeneratorObject::kThrow.
4573   builder()->LoadAccumulatorWithRegister(input).ReThrow();
4574 
4575   // Resume with next.
4576   builder()->Bind(&resume_next);
4577   builder()->LoadAccumulatorWithRegister(input);
4578 }
4579 
VisitAwait(Await * expr)4580 void BytecodeGenerator::VisitAwait(Await* expr) {
4581   builder()->SetExpressionPosition(expr);
4582   VisitForAccumulatorValue(expr->expression());
4583   BuildAwait(expr->position());
4584   BuildIncrementBlockCoverageCounterIfEnabled(expr,
4585                                               SourceRangeKind::kContinuation);
4586 }
4587 
VisitThrow(Throw * expr)4588 void BytecodeGenerator::VisitThrow(Throw* expr) {
4589   AllocateBlockCoverageSlotIfEnabled(expr, SourceRangeKind::kContinuation);
4590   VisitForAccumulatorValue(expr->exception());
4591   builder()->SetExpressionPosition(expr);
4592   builder()->Throw();
4593 }
4594 
VisitPropertyLoad(Register obj,Property * property)4595 void BytecodeGenerator::VisitPropertyLoad(Register obj, Property* property) {
4596   if (property->is_optional_chain_link()) {
4597     DCHECK_NOT_NULL(optional_chaining_null_labels_);
4598     builder()->LoadAccumulatorWithRegister(obj).JumpIfUndefinedOrNull(
4599         optional_chaining_null_labels_->New());
4600   }
4601 
4602   AssignType property_kind = Property::GetAssignType(property);
4603 
4604   switch (property_kind) {
4605     case NON_PROPERTY:
4606       UNREACHABLE();
4607     case NAMED_PROPERTY: {
4608       builder()->SetExpressionPosition(property);
4609       const AstRawString* name =
4610           property->key()->AsLiteral()->AsRawPropertyName();
4611       BuildLoadNamedProperty(property->obj(), obj, name);
4612       break;
4613     }
4614     case KEYED_PROPERTY: {
4615       VisitForAccumulatorValue(property->key());
4616       builder()->SetExpressionPosition(property);
4617       builder()->LoadKeyedProperty(
4618           obj, feedback_index(feedback_spec()->AddKeyedLoadICSlot()));
4619       break;
4620     }
4621     case NAMED_SUPER_PROPERTY:
4622       VisitNamedSuperPropertyLoad(property, Register::invalid_value());
4623       break;
4624     case KEYED_SUPER_PROPERTY:
4625       VisitKeyedSuperPropertyLoad(property, Register::invalid_value());
4626       break;
4627     case PRIVATE_SETTER_ONLY: {
4628       BuildInvalidPropertyAccess(MessageTemplate::kInvalidPrivateGetterAccess,
4629                                  property);
4630       break;
4631     }
4632     case PRIVATE_GETTER_ONLY:
4633     case PRIVATE_GETTER_AND_SETTER: {
4634       Register key = VisitForRegisterValue(property->key());
4635       BuildPrivateBrandCheck(property, obj,
4636                              MessageTemplate::kInvalidPrivateMemberRead);
4637       BuildPrivateGetterAccess(obj, key);
4638       break;
4639     }
4640     case PRIVATE_METHOD: {
4641       BuildPrivateBrandCheck(property, obj,
4642                              MessageTemplate::kInvalidPrivateMemberRead);
4643       // In the case of private methods, property->key() is the function to be
4644       // loaded (stored in a context slot), so load this directly.
4645       VisitForAccumulatorValue(property->key());
4646       break;
4647     }
4648   }
4649 }
4650 
BuildPrivateGetterAccess(Register object,Register accessor_pair)4651 void BytecodeGenerator::BuildPrivateGetterAccess(Register object,
4652                                                  Register accessor_pair) {
4653   RegisterAllocationScope scope(this);
4654   Register accessor = register_allocator()->NewRegister();
4655   RegisterList args = register_allocator()->NewRegisterList(1);
4656 
4657   builder()
4658       ->CallRuntime(Runtime::kLoadPrivateGetter, accessor_pair)
4659       .StoreAccumulatorInRegister(accessor)
4660       .MoveRegister(object, args[0])
4661       .CallProperty(accessor, args,
4662                     feedback_index(feedback_spec()->AddCallICSlot()));
4663 }
4664 
BuildPrivateSetterAccess(Register object,Register accessor_pair,Register value)4665 void BytecodeGenerator::BuildPrivateSetterAccess(Register object,
4666                                                  Register accessor_pair,
4667                                                  Register value) {
4668   RegisterAllocationScope scope(this);
4669   Register accessor = register_allocator()->NewRegister();
4670   RegisterList args = register_allocator()->NewRegisterList(2);
4671 
4672   builder()
4673       ->CallRuntime(Runtime::kLoadPrivateSetter, accessor_pair)
4674       .StoreAccumulatorInRegister(accessor)
4675       .MoveRegister(object, args[0])
4676       .MoveRegister(value, args[1])
4677       .CallProperty(accessor, args,
4678                     feedback_index(feedback_spec()->AddCallICSlot()));
4679 }
4680 
BuildPrivateBrandCheck(Property * property,Register object,MessageTemplate tmpl)4681 void BytecodeGenerator::BuildPrivateBrandCheck(Property* property,
4682                                                Register object,
4683                                                MessageTemplate tmpl) {
4684   Variable* private_name = property->key()->AsVariableProxy()->var();
4685   DCHECK(IsPrivateMethodOrAccessorVariableMode(private_name->mode()));
4686   ClassScope* scope = private_name->scope()->AsClassScope();
4687   if (private_name->is_static()) {
4688     // For static private methods, the only valid receiver is the class.
4689     // Load the class constructor.
4690     if (scope->class_variable() == nullptr) {
4691       // If the static private method has not been used used in source
4692       // code (either explicitly or through the presence of eval), but is
4693       // accessed by the debugger at runtime, reference to the class variable
4694       // is not available since it was not be context-allocated. Therefore we
4695       // can't build a branch check, and throw an ReferenceError as if the
4696       // method was optimized away.
4697       // TODO(joyee): get a reference to the class constructor through
4698       // something other than scope->class_variable() in this scenario.
4699       RegisterAllocationScope register_scope(this);
4700       RegisterList args = register_allocator()->NewRegisterList(2);
4701       builder()
4702           ->LoadLiteral(Smi::FromEnum(
4703               MessageTemplate::
4704                   kInvalidUnusedPrivateStaticMethodAccessedByDebugger))
4705           .StoreAccumulatorInRegister(args[0])
4706           .LoadLiteral(private_name->raw_name())
4707           .StoreAccumulatorInRegister(args[1])
4708           .CallRuntime(Runtime::kNewError, args)
4709           .Throw();
4710     } else {
4711       BuildVariableLoadForAccumulatorValue(scope->class_variable(),
4712                                            HoleCheckMode::kElided);
4713       BytecodeLabel return_check;
4714       builder()->CompareReference(object).JumpIfTrue(
4715           ToBooleanMode::kAlreadyBoolean, &return_check);
4716       BuildInvalidPropertyAccess(tmpl, property);
4717       builder()->Bind(&return_check);
4718     }
4719   } else {
4720     BuildVariableLoadForAccumulatorValue(scope->brand(),
4721                                          HoleCheckMode::kElided);
4722     builder()->SetExpressionPosition(property);
4723     builder()->LoadKeyedProperty(
4724         object, feedback_index(feedback_spec()->AddKeyedLoadICSlot()));
4725   }
4726 }
4727 
VisitPropertyLoadForRegister(Register obj,Property * expr,Register destination)4728 void BytecodeGenerator::VisitPropertyLoadForRegister(Register obj,
4729                                                      Property* expr,
4730                                                      Register destination) {
4731   ValueResultScope result_scope(this);
4732   VisitPropertyLoad(obj, expr);
4733   builder()->StoreAccumulatorInRegister(destination);
4734 }
4735 
VisitNamedSuperPropertyLoad(Property * property,Register opt_receiver_out)4736 void BytecodeGenerator::VisitNamedSuperPropertyLoad(Property* property,
4737                                                     Register opt_receiver_out) {
4738   RegisterAllocationScope register_scope(this);
4739   SuperPropertyReference* super_property =
4740       property->obj()->AsSuperPropertyReference();
4741   if (FLAG_super_ic) {
4742     Register receiver = register_allocator()->NewRegister();
4743     BuildThisVariableLoad();
4744     builder()->StoreAccumulatorInRegister(receiver);
4745     VisitForAccumulatorValue(super_property->home_object());
4746     builder()->SetExpressionPosition(property);
4747     auto name = property->key()->AsLiteral()->AsRawPropertyName();
4748     FeedbackSlot slot = GetCachedLoadSuperICSlot(name);
4749     builder()->LoadNamedPropertyFromSuper(receiver, name, feedback_index(slot));
4750     if (opt_receiver_out.is_valid()) {
4751       builder()->MoveRegister(receiver, opt_receiver_out);
4752     }
4753   } else {
4754     RegisterList args = register_allocator()->NewRegisterList(3);
4755     BuildThisVariableLoad();
4756     builder()->StoreAccumulatorInRegister(args[0]);
4757     VisitForRegisterValue(super_property->home_object(), args[1]);
4758 
4759     builder()->SetExpressionPosition(property);
4760     builder()
4761         ->LoadLiteral(property->key()->AsLiteral()->AsRawPropertyName())
4762         .StoreAccumulatorInRegister(args[2])
4763         .CallRuntime(Runtime::kLoadFromSuper, args);
4764 
4765     if (opt_receiver_out.is_valid()) {
4766       builder()->MoveRegister(args[0], opt_receiver_out);
4767     }
4768   }
4769 }
4770 
VisitKeyedSuperPropertyLoad(Property * property,Register opt_receiver_out)4771 void BytecodeGenerator::VisitKeyedSuperPropertyLoad(Property* property,
4772                                                     Register opt_receiver_out) {
4773   RegisterAllocationScope register_scope(this);
4774   SuperPropertyReference* super_property =
4775       property->obj()->AsSuperPropertyReference();
4776   RegisterList args = register_allocator()->NewRegisterList(3);
4777   BuildThisVariableLoad();
4778   builder()->StoreAccumulatorInRegister(args[0]);
4779   VisitForRegisterValue(super_property->home_object(), args[1]);
4780   VisitForRegisterValue(property->key(), args[2]);
4781 
4782   builder()->SetExpressionPosition(property);
4783   builder()->CallRuntime(Runtime::kLoadKeyedFromSuper, args);
4784 
4785   if (opt_receiver_out.is_valid()) {
4786     builder()->MoveRegister(args[0], opt_receiver_out);
4787   }
4788 }
4789 
4790 template <typename ExpressionFunc>
BuildOptionalChain(ExpressionFunc expression_func)4791 void BytecodeGenerator::BuildOptionalChain(ExpressionFunc expression_func) {
4792   BytecodeLabel done;
4793   OptionalChainNullLabelScope label_scope(this);
4794   expression_func();
4795   builder()->Jump(&done);
4796   label_scope.labels()->Bind(builder());
4797   builder()->LoadUndefined();
4798   builder()->Bind(&done);
4799 }
4800 
VisitOptionalChain(OptionalChain * expr)4801 void BytecodeGenerator::VisitOptionalChain(OptionalChain* expr) {
4802   BuildOptionalChain([&]() { VisitForAccumulatorValue(expr->expression()); });
4803 }
4804 
VisitProperty(Property * expr)4805 void BytecodeGenerator::VisitProperty(Property* expr) {
4806   AssignType property_kind = Property::GetAssignType(expr);
4807   if (property_kind != NAMED_SUPER_PROPERTY &&
4808       property_kind != KEYED_SUPER_PROPERTY) {
4809     Register obj = VisitForRegisterValue(expr->obj());
4810     VisitPropertyLoad(obj, expr);
4811   } else {
4812     VisitPropertyLoad(Register::invalid_value(), expr);
4813   }
4814 }
4815 
VisitArguments(const ZonePtrList<Expression> * args,RegisterList * arg_regs)4816 void BytecodeGenerator::VisitArguments(const ZonePtrList<Expression>* args,
4817                                        RegisterList* arg_regs) {
4818   // Visit arguments.
4819   for (int i = 0; i < static_cast<int>(args->length()); i++) {
4820     VisitAndPushIntoRegisterList(args->at(i), arg_regs);
4821   }
4822 }
4823 
VisitCall(Call * expr)4824 void BytecodeGenerator::VisitCall(Call* expr) {
4825   Expression* callee_expr = expr->expression();
4826   Call::CallType call_type = expr->GetCallType();
4827 
4828   if (call_type == Call::SUPER_CALL) {
4829     return VisitCallSuper(expr);
4830   }
4831 
4832   // Grow the args list as we visit receiver / arguments to avoid allocating all
4833   // the registers up-front. Otherwise these registers are unavailable during
4834   // receiver / argument visiting and we can end up with memory leaks due to
4835   // registers keeping objects alive.
4836   Register callee = register_allocator()->NewRegister();
4837   RegisterList args = register_allocator()->NewGrowableRegisterList();
4838 
4839   bool implicit_undefined_receiver = false;
4840   // When a call contains a spread, a Call AST node is only created if there is
4841   // exactly one spread, and it is the last argument.
4842   bool is_spread_call = expr->only_last_arg_is_spread();
4843   bool optimize_as_one_shot = ShouldOptimizeAsOneShot();
4844 
4845   // TODO(petermarshall): We have a lot of call bytecodes that are very similar,
4846   // see if we can reduce the number by adding a separate argument which
4847   // specifies the call type (e.g., property, spread, tailcall, etc.).
4848 
4849   // Prepare the callee and the receiver to the function call. This depends on
4850   // the semantics of the underlying call type.
4851   switch (call_type) {
4852     case Call::NAMED_PROPERTY_CALL:
4853     case Call::KEYED_PROPERTY_CALL:
4854     case Call::PRIVATE_CALL: {
4855       Property* property = callee_expr->AsProperty();
4856       VisitAndPushIntoRegisterList(property->obj(), &args);
4857       VisitPropertyLoadForRegister(args.last_register(), property, callee);
4858       break;
4859     }
4860     case Call::GLOBAL_CALL: {
4861       // Receiver is undefined for global calls.
4862       if (!is_spread_call && !optimize_as_one_shot) {
4863         implicit_undefined_receiver = true;
4864       } else {
4865         // TODO(leszeks): There's no special bytecode for tail calls or spread
4866         // calls with an undefined receiver, so just push undefined ourselves.
4867         BuildPushUndefinedIntoRegisterList(&args);
4868       }
4869       // Load callee as a global variable.
4870       VariableProxy* proxy = callee_expr->AsVariableProxy();
4871       BuildVariableLoadForAccumulatorValue(proxy->var(),
4872                                            proxy->hole_check_mode());
4873       builder()->StoreAccumulatorInRegister(callee);
4874       break;
4875     }
4876     case Call::WITH_CALL: {
4877       Register receiver = register_allocator()->GrowRegisterList(&args);
4878       DCHECK(callee_expr->AsVariableProxy()->var()->IsLookupSlot());
4879       {
4880         RegisterAllocationScope inner_register_scope(this);
4881         Register name = register_allocator()->NewRegister();
4882 
4883         // Call %LoadLookupSlotForCall to get the callee and receiver.
4884         RegisterList result_pair = register_allocator()->NewRegisterList(2);
4885         Variable* variable = callee_expr->AsVariableProxy()->var();
4886         builder()
4887             ->LoadLiteral(variable->raw_name())
4888             .StoreAccumulatorInRegister(name)
4889             .CallRuntimeForPair(Runtime::kLoadLookupSlotForCall, name,
4890                                 result_pair)
4891             .MoveRegister(result_pair[0], callee)
4892             .MoveRegister(result_pair[1], receiver);
4893       }
4894       break;
4895     }
4896     case Call::OTHER_CALL: {
4897       // Receiver is undefined for other calls.
4898       if (!is_spread_call && !optimize_as_one_shot) {
4899         implicit_undefined_receiver = true;
4900       } else {
4901         // TODO(leszeks): There's no special bytecode for tail calls or spread
4902         // calls with an undefined receiver, so just push undefined ourselves.
4903         BuildPushUndefinedIntoRegisterList(&args);
4904       }
4905       VisitForRegisterValue(callee_expr, callee);
4906       break;
4907     }
4908     case Call::NAMED_SUPER_PROPERTY_CALL: {
4909       Register receiver = register_allocator()->GrowRegisterList(&args);
4910       Property* property = callee_expr->AsProperty();
4911       VisitNamedSuperPropertyLoad(property, receiver);
4912       builder()->StoreAccumulatorInRegister(callee);
4913       break;
4914     }
4915     case Call::KEYED_SUPER_PROPERTY_CALL: {
4916       Register receiver = register_allocator()->GrowRegisterList(&args);
4917       Property* property = callee_expr->AsProperty();
4918       VisitKeyedSuperPropertyLoad(property, receiver);
4919       builder()->StoreAccumulatorInRegister(callee);
4920       break;
4921     }
4922     case Call::NAMED_OPTIONAL_CHAIN_PROPERTY_CALL:
4923     case Call::KEYED_OPTIONAL_CHAIN_PROPERTY_CALL:
4924     case Call::PRIVATE_OPTIONAL_CHAIN_CALL: {
4925       OptionalChain* chain = callee_expr->AsOptionalChain();
4926       Property* property = chain->expression()->AsProperty();
4927       BuildOptionalChain([&]() {
4928         VisitAndPushIntoRegisterList(property->obj(), &args);
4929         VisitPropertyLoadForRegister(args.last_register(), property, callee);
4930       });
4931       break;
4932     }
4933     case Call::SUPER_CALL:
4934       UNREACHABLE();
4935   }
4936 
4937   if (expr->is_optional_chain_link()) {
4938     DCHECK_NOT_NULL(optional_chaining_null_labels_);
4939     builder()->LoadAccumulatorWithRegister(callee).JumpIfUndefinedOrNull(
4940         optional_chaining_null_labels_->New());
4941   }
4942 
4943   // Evaluate all arguments to the function call and store in sequential args
4944   // registers.
4945   VisitArguments(expr->arguments(), &args);
4946   int receiver_arg_count = implicit_undefined_receiver ? 0 : 1;
4947   CHECK_EQ(receiver_arg_count + expr->arguments()->length(),
4948            args.register_count());
4949 
4950   // Resolve callee for a potential direct eval call. This block will mutate the
4951   // callee value.
4952   if (expr->is_possibly_eval() && expr->arguments()->length() > 0) {
4953     RegisterAllocationScope inner_register_scope(this);
4954     // Set up arguments for ResolvePossiblyDirectEval by copying callee, source
4955     // strings and function closure, and loading language and
4956     // position.
4957     Register first_arg = args[receiver_arg_count];
4958     RegisterList runtime_call_args = register_allocator()->NewRegisterList(6);
4959     builder()
4960         ->MoveRegister(callee, runtime_call_args[0])
4961         .MoveRegister(first_arg, runtime_call_args[1])
4962         .MoveRegister(Register::function_closure(), runtime_call_args[2])
4963         .LoadLiteral(Smi::FromEnum(language_mode()))
4964         .StoreAccumulatorInRegister(runtime_call_args[3])
4965         .LoadLiteral(Smi::FromInt(current_scope()->start_position()))
4966         .StoreAccumulatorInRegister(runtime_call_args[4])
4967         .LoadLiteral(Smi::FromInt(expr->position()))
4968         .StoreAccumulatorInRegister(runtime_call_args[5]);
4969 
4970     // Call ResolvePossiblyDirectEval and modify the callee.
4971     builder()
4972         ->CallRuntime(Runtime::kResolvePossiblyDirectEval, runtime_call_args)
4973         .StoreAccumulatorInRegister(callee);
4974   }
4975 
4976   builder()->SetExpressionPosition(expr);
4977 
4978   if (is_spread_call) {
4979     DCHECK(!implicit_undefined_receiver);
4980     builder()->CallWithSpread(callee, args,
4981                               feedback_index(feedback_spec()->AddCallICSlot()));
4982   } else if (optimize_as_one_shot) {
4983     DCHECK(!implicit_undefined_receiver);
4984     builder()->CallNoFeedback(callee, args);
4985   } else if (call_type == Call::NAMED_PROPERTY_CALL ||
4986              call_type == Call::KEYED_PROPERTY_CALL) {
4987     DCHECK(!implicit_undefined_receiver);
4988     builder()->CallProperty(callee, args,
4989                             feedback_index(feedback_spec()->AddCallICSlot()));
4990   } else if (implicit_undefined_receiver) {
4991     builder()->CallUndefinedReceiver(
4992         callee, args, feedback_index(feedback_spec()->AddCallICSlot()));
4993   } else {
4994     builder()->CallAnyReceiver(
4995         callee, args, feedback_index(feedback_spec()->AddCallICSlot()));
4996   }
4997 }
4998 
VisitCallSuper(Call * expr)4999 void BytecodeGenerator::VisitCallSuper(Call* expr) {
5000   RegisterAllocationScope register_scope(this);
5001   SuperCallReference* super = expr->expression()->AsSuperCallReference();
5002   const ZonePtrList<Expression>* args = expr->arguments();
5003 
5004   int first_spread_index = 0;
5005   for (; first_spread_index < args->length(); first_spread_index++) {
5006     if (args->at(first_spread_index)->IsSpread()) break;
5007   }
5008 
5009   // Prepare the constructor to the super call.
5010   Register this_function = VisitForRegisterValue(super->this_function_var());
5011   Register constructor = register_allocator()->NewRegister();
5012   builder()
5013       ->LoadAccumulatorWithRegister(this_function)
5014       .GetSuperConstructor(constructor);
5015 
5016   if (first_spread_index < expr->arguments()->length() - 1) {
5017     // We rewrite something like
5018     //    super(1, ...x, 2)
5019     // to
5020     //    %reflect_construct(constructor, [1, ...x, 2], new_target)
5021     // That is, we implement (non-last-arg) spreads in super calls via our
5022     // mechanism for spreads in array literals.
5023 
5024     // First generate the array containing all arguments.
5025     BuildCreateArrayLiteral(args, nullptr);
5026 
5027     // Check if the constructor is in fact a constructor.
5028     builder()->ThrowIfNotSuperConstructor(constructor);
5029 
5030     // Now pass that array to %reflect_construct.
5031     RegisterList construct_args = register_allocator()->NewRegisterList(3);
5032     builder()->StoreAccumulatorInRegister(construct_args[1]);
5033     builder()->MoveRegister(constructor, construct_args[0]);
5034     VisitForRegisterValue(super->new_target_var(), construct_args[2]);
5035     builder()->CallJSRuntime(Context::REFLECT_CONSTRUCT_INDEX, construct_args);
5036   } else {
5037     RegisterList args_regs = register_allocator()->NewGrowableRegisterList();
5038     VisitArguments(args, &args_regs);
5039 
5040     // Check if the constructor is in fact a constructor.
5041     builder()->ThrowIfNotSuperConstructor(constructor);
5042 
5043     // The new target is loaded into the accumulator from the
5044     // {new.target} variable.
5045     VisitForAccumulatorValue(super->new_target_var());
5046     builder()->SetExpressionPosition(expr);
5047 
5048     int feedback_slot_index = feedback_index(feedback_spec()->AddCallICSlot());
5049 
5050     if (first_spread_index == expr->arguments()->length() - 1) {
5051       builder()->ConstructWithSpread(constructor, args_regs,
5052                                      feedback_slot_index);
5053     } else {
5054       DCHECK_EQ(first_spread_index, expr->arguments()->length());
5055       // Call construct.
5056       // TODO(turbofan): For now we do gather feedback on super constructor
5057       // calls, utilizing the existing machinery to inline the actual call
5058       // target and the JSCreate for the implicit receiver allocation. This
5059       // is not an ideal solution for super constructor calls, but it gets
5060       // the job done for now. In the long run we might want to revisit this
5061       // and come up with a better way.
5062       builder()->Construct(constructor, args_regs, feedback_slot_index);
5063     }
5064   }
5065 
5066   // Explicit calls to the super constructor using super() perform an
5067   // implicit binding assignment to the 'this' variable.
5068   //
5069   // Default constructors don't need have to do the assignment because
5070   // 'this' isn't accessed in default constructors.
5071   if (!IsDefaultConstructor(info()->literal()->kind())) {
5072     Variable* var = closure_scope()->GetReceiverScope()->receiver();
5073     BuildVariableAssignment(var, Token::INIT, HoleCheckMode::kRequired);
5074   }
5075 
5076   Register instance = register_allocator()->NewRegister();
5077   builder()->StoreAccumulatorInRegister(instance);
5078 
5079   if (info()->literal()->class_scope_has_private_brand()) {
5080     BuildPrivateBrandInitialization(instance);
5081   }
5082 
5083   // The derived constructor has the correct bit set always, so we
5084   // don't emit code to load and call the initializer if not
5085   // required.
5086   //
5087   // For the arrow function or eval case, we always emit code to load
5088   // and call the initializer.
5089   //
5090   // TODO(gsathya): In the future, we could tag nested arrow functions
5091   // or eval with the correct bit so that we do the load conditionally
5092   // if required.
5093   if (info()->literal()->requires_instance_members_initializer() ||
5094       !IsDerivedConstructor(info()->literal()->kind())) {
5095     BuildInstanceMemberInitialization(this_function, instance);
5096   }
5097 
5098   builder()->LoadAccumulatorWithRegister(instance);
5099 }
5100 
VisitCallNew(CallNew * expr)5101 void BytecodeGenerator::VisitCallNew(CallNew* expr) {
5102   Register constructor = VisitForRegisterValue(expr->expression());
5103   RegisterList args = register_allocator()->NewGrowableRegisterList();
5104   VisitArguments(expr->arguments(), &args);
5105 
5106   // The accumulator holds new target which is the same as the
5107   // constructor for CallNew.
5108   builder()->SetExpressionPosition(expr);
5109   builder()->LoadAccumulatorWithRegister(constructor);
5110 
5111   int feedback_slot_index = feedback_index(feedback_spec()->AddCallICSlot());
5112   if (expr->only_last_arg_is_spread()) {
5113     builder()->ConstructWithSpread(constructor, args, feedback_slot_index);
5114   } else {
5115     builder()->Construct(constructor, args, feedback_slot_index);
5116   }
5117 }
5118 
VisitCallRuntime(CallRuntime * expr)5119 void BytecodeGenerator::VisitCallRuntime(CallRuntime* expr) {
5120   if (expr->is_jsruntime()) {
5121     RegisterList args = register_allocator()->NewGrowableRegisterList();
5122     VisitArguments(expr->arguments(), &args);
5123     builder()->CallJSRuntime(expr->context_index(), args);
5124   } else {
5125     // Evaluate all arguments to the runtime call.
5126     RegisterList args = register_allocator()->NewGrowableRegisterList();
5127     VisitArguments(expr->arguments(), &args);
5128     Runtime::FunctionId function_id = expr->function()->function_id;
5129     builder()->CallRuntime(function_id, args);
5130   }
5131 }
5132 
VisitVoid(UnaryOperation * expr)5133 void BytecodeGenerator::VisitVoid(UnaryOperation* expr) {
5134   VisitForEffect(expr->expression());
5135   builder()->LoadUndefined();
5136 }
5137 
VisitForTypeOfValue(Expression * expr)5138 void BytecodeGenerator::VisitForTypeOfValue(Expression* expr) {
5139   if (expr->IsVariableProxy()) {
5140     // Typeof does not throw a reference error on global variables, hence we
5141     // perform a non-contextual load in case the operand is a variable proxy.
5142     VariableProxy* proxy = expr->AsVariableProxy();
5143     BuildVariableLoadForAccumulatorValue(proxy->var(), proxy->hole_check_mode(),
5144                                          INSIDE_TYPEOF);
5145   } else {
5146     VisitForAccumulatorValue(expr);
5147   }
5148 }
5149 
VisitTypeOf(UnaryOperation * expr)5150 void BytecodeGenerator::VisitTypeOf(UnaryOperation* expr) {
5151   VisitForTypeOfValue(expr->expression());
5152   builder()->TypeOf();
5153 }
5154 
VisitNot(UnaryOperation * expr)5155 void BytecodeGenerator::VisitNot(UnaryOperation* expr) {
5156   if (execution_result()->IsEffect()) {
5157     VisitForEffect(expr->expression());
5158   } else if (execution_result()->IsTest()) {
5159     // No actual logical negation happening, we just swap the control flow, by
5160     // swapping the target labels and the fallthrough branch, and visit in the
5161     // same test result context.
5162     TestResultScope* test_result = execution_result()->AsTest();
5163     test_result->InvertControlFlow();
5164     VisitInSameTestExecutionScope(expr->expression());
5165   } else {
5166     TypeHint type_hint = VisitForAccumulatorValue(expr->expression());
5167     builder()->LogicalNot(ToBooleanModeFromTypeHint(type_hint));
5168     // Always returns a boolean value.
5169     execution_result()->SetResultIsBoolean();
5170   }
5171 }
5172 
VisitUnaryOperation(UnaryOperation * expr)5173 void BytecodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
5174   switch (expr->op()) {
5175     case Token::Value::NOT:
5176       VisitNot(expr);
5177       break;
5178     case Token::Value::TYPEOF:
5179       VisitTypeOf(expr);
5180       break;
5181     case Token::Value::VOID:
5182       VisitVoid(expr);
5183       break;
5184     case Token::Value::DELETE:
5185       VisitDelete(expr);
5186       break;
5187     case Token::Value::ADD:
5188     case Token::Value::SUB:
5189     case Token::Value::BIT_NOT:
5190       VisitForAccumulatorValue(expr->expression());
5191       builder()->SetExpressionPosition(expr);
5192       builder()->UnaryOperation(
5193           expr->op(), feedback_index(feedback_spec()->AddBinaryOpICSlot()));
5194       break;
5195     default:
5196       UNREACHABLE();
5197   }
5198 }
5199 
VisitDelete(UnaryOperation * unary)5200 void BytecodeGenerator::VisitDelete(UnaryOperation* unary) {
5201   Expression* expr = unary->expression();
5202   if (expr->IsProperty()) {
5203     // Delete of an object property is allowed both in sloppy
5204     // and strict modes.
5205     Property* property = expr->AsProperty();
5206     DCHECK(!property->IsPrivateReference());
5207     Register object = VisitForRegisterValue(property->obj());
5208     VisitForAccumulatorValue(property->key());
5209     builder()->Delete(object, language_mode());
5210   } else if (expr->IsOptionalChain()) {
5211     Expression* expr_inner = expr->AsOptionalChain()->expression();
5212     if (expr_inner->IsProperty()) {
5213       Property* property = expr_inner->AsProperty();
5214       DCHECK(!property->IsPrivateReference());
5215       BytecodeLabel done;
5216       OptionalChainNullLabelScope label_scope(this);
5217       VisitForAccumulatorValue(property->obj());
5218       if (property->is_optional_chain_link()) {
5219         builder()->JumpIfUndefinedOrNull(label_scope.labels()->New());
5220       }
5221       Register object = register_allocator()->NewRegister();
5222       builder()->StoreAccumulatorInRegister(object);
5223       VisitForAccumulatorValue(property->key());
5224       builder()->Delete(object, language_mode());
5225       builder()->Jump(&done);
5226       label_scope.labels()->Bind(builder());
5227       builder()->LoadTrue();
5228       builder()->Bind(&done);
5229     } else {
5230       VisitForEffect(expr);
5231       builder()->LoadTrue();
5232     }
5233   } else if (expr->IsVariableProxy() &&
5234              !expr->AsVariableProxy()->is_new_target()) {
5235     // Delete of an unqualified identifier is allowed in sloppy mode but is
5236     // not allowed in strict mode.
5237     DCHECK(is_sloppy(language_mode()));
5238     Variable* variable = expr->AsVariableProxy()->var();
5239     switch (variable->location()) {
5240       case VariableLocation::PARAMETER:
5241       case VariableLocation::LOCAL:
5242       case VariableLocation::CONTEXT:
5243       case VariableLocation::REPL_GLOBAL: {
5244         // Deleting local var/let/const, context variables, and arguments
5245         // does not have any effect.
5246         builder()->LoadFalse();
5247         break;
5248       }
5249       case VariableLocation::UNALLOCATED:
5250       // TODO(adamk): Falling through to the runtime results in correct
5251       // behavior, but does unnecessary context-walking (since scope
5252       // analysis has already proven that the variable doesn't exist in
5253       // any non-global scope). Consider adding a DeleteGlobal bytecode
5254       // that knows how to deal with ScriptContexts as well as global
5255       // object properties.
5256       case VariableLocation::LOOKUP: {
5257         Register name_reg = register_allocator()->NewRegister();
5258         builder()
5259             ->LoadLiteral(variable->raw_name())
5260             .StoreAccumulatorInRegister(name_reg)
5261             .CallRuntime(Runtime::kDeleteLookupSlot, name_reg);
5262         break;
5263       }
5264       case VariableLocation::MODULE:
5265         // Modules are always in strict mode and unqualified identifers are not
5266         // allowed in strict mode.
5267         UNREACHABLE();
5268     }
5269   } else {
5270     // Delete of an unresolvable reference, new.target, and this returns true.
5271     VisitForEffect(expr);
5272     builder()->LoadTrue();
5273   }
5274 }
5275 
VisitCountOperation(CountOperation * expr)5276 void BytecodeGenerator::VisitCountOperation(CountOperation* expr) {
5277   DCHECK(expr->expression()->IsValidReferenceExpression());
5278 
5279   // Left-hand side can only be a property, a global or a variable slot.
5280   Property* property = expr->expression()->AsProperty();
5281   AssignType assign_type = Property::GetAssignType(property);
5282 
5283   bool is_postfix = expr->is_postfix() && !execution_result()->IsEffect();
5284 
5285   // Evaluate LHS expression and get old value.
5286   Register object, key, old_value;
5287   RegisterList super_property_args;
5288   const AstRawString* name;
5289   switch (assign_type) {
5290     case NON_PROPERTY: {
5291       VariableProxy* proxy = expr->expression()->AsVariableProxy();
5292       BuildVariableLoadForAccumulatorValue(proxy->var(),
5293                                            proxy->hole_check_mode());
5294       break;
5295     }
5296     case NAMED_PROPERTY: {
5297       object = VisitForRegisterValue(property->obj());
5298       name = property->key()->AsLiteral()->AsRawPropertyName();
5299       builder()->LoadNamedProperty(
5300           object, name,
5301           feedback_index(GetCachedLoadICSlot(property->obj(), name)));
5302       break;
5303     }
5304     case KEYED_PROPERTY: {
5305       object = VisitForRegisterValue(property->obj());
5306       // Use visit for accumulator here since we need the key in the accumulator
5307       // for the LoadKeyedProperty.
5308       key = register_allocator()->NewRegister();
5309       VisitForAccumulatorValue(property->key());
5310       builder()->StoreAccumulatorInRegister(key).LoadKeyedProperty(
5311           object, feedback_index(feedback_spec()->AddKeyedLoadICSlot()));
5312       break;
5313     }
5314     case NAMED_SUPER_PROPERTY: {
5315       super_property_args = register_allocator()->NewRegisterList(4);
5316       RegisterList load_super_args = super_property_args.Truncate(3);
5317       SuperPropertyReference* super_property =
5318           property->obj()->AsSuperPropertyReference();
5319       BuildThisVariableLoad();
5320       builder()->StoreAccumulatorInRegister(load_super_args[0]);
5321       VisitForRegisterValue(super_property->home_object(), load_super_args[1]);
5322       builder()
5323           ->LoadLiteral(property->key()->AsLiteral()->AsRawPropertyName())
5324           .StoreAccumulatorInRegister(load_super_args[2])
5325           .CallRuntime(Runtime::kLoadFromSuper, load_super_args);
5326       break;
5327     }
5328     case KEYED_SUPER_PROPERTY: {
5329       super_property_args = register_allocator()->NewRegisterList(4);
5330       RegisterList load_super_args = super_property_args.Truncate(3);
5331       SuperPropertyReference* super_property =
5332           property->obj()->AsSuperPropertyReference();
5333       BuildThisVariableLoad();
5334       builder()->StoreAccumulatorInRegister(load_super_args[0]);
5335       VisitForRegisterValue(super_property->home_object(), load_super_args[1]);
5336       VisitForRegisterValue(property->key(), load_super_args[2]);
5337       builder()->CallRuntime(Runtime::kLoadKeyedFromSuper, load_super_args);
5338       break;
5339     }
5340     case PRIVATE_METHOD: {
5341       BuildInvalidPropertyAccess(MessageTemplate::kInvalidPrivateMethodWrite,
5342                                  property);
5343       return;
5344     }
5345     case PRIVATE_GETTER_ONLY: {
5346       BuildInvalidPropertyAccess(MessageTemplate::kInvalidPrivateSetterAccess,
5347                                  property);
5348       return;
5349     }
5350     case PRIVATE_SETTER_ONLY: {
5351       BuildInvalidPropertyAccess(MessageTemplate::kInvalidPrivateGetterAccess,
5352                                  property);
5353       return;
5354     }
5355     case PRIVATE_GETTER_AND_SETTER: {
5356       object = VisitForRegisterValue(property->obj());
5357       key = VisitForRegisterValue(property->key());
5358       BuildPrivateBrandCheck(property, object,
5359                              MessageTemplate::kInvalidPrivateMemberRead);
5360       BuildPrivateGetterAccess(object, key);
5361       break;
5362     }
5363   }
5364 
5365   // Save result for postfix expressions.
5366   FeedbackSlot count_slot = feedback_spec()->AddBinaryOpICSlot();
5367   if (is_postfix) {
5368     old_value = register_allocator()->NewRegister();
5369     // Convert old value into a number before saving it.
5370     // TODO(ignition): Think about adding proper PostInc/PostDec bytecodes
5371     // instead of this ToNumeric + Inc/Dec dance.
5372     builder()
5373         ->ToNumeric(feedback_index(count_slot))
5374         .StoreAccumulatorInRegister(old_value);
5375   }
5376 
5377   // Perform +1/-1 operation.
5378   builder()->UnaryOperation(expr->op(), feedback_index(count_slot));
5379 
5380   // Store the value.
5381   builder()->SetExpressionPosition(expr);
5382   switch (assign_type) {
5383     case NON_PROPERTY: {
5384       VariableProxy* proxy = expr->expression()->AsVariableProxy();
5385       BuildVariableAssignment(proxy->var(), expr->op(),
5386                               proxy->hole_check_mode());
5387       break;
5388     }
5389     case NAMED_PROPERTY: {
5390       FeedbackSlot slot = GetCachedStoreICSlot(property->obj(), name);
5391       Register value;
5392       if (!execution_result()->IsEffect()) {
5393         value = register_allocator()->NewRegister();
5394         builder()->StoreAccumulatorInRegister(value);
5395       }
5396       builder()->StoreNamedProperty(object, name, feedback_index(slot),
5397                                     language_mode());
5398       if (!execution_result()->IsEffect()) {
5399         builder()->LoadAccumulatorWithRegister(value);
5400       }
5401       break;
5402     }
5403     case KEYED_PROPERTY: {
5404       FeedbackSlot slot = feedback_spec()->AddKeyedStoreICSlot(language_mode());
5405       Register value;
5406       if (!execution_result()->IsEffect()) {
5407         value = register_allocator()->NewRegister();
5408         builder()->StoreAccumulatorInRegister(value);
5409       }
5410       builder()->StoreKeyedProperty(object, key, feedback_index(slot),
5411                                     language_mode());
5412       if (!execution_result()->IsEffect()) {
5413         builder()->LoadAccumulatorWithRegister(value);
5414       }
5415       break;
5416     }
5417     case NAMED_SUPER_PROPERTY: {
5418       builder()
5419           ->StoreAccumulatorInRegister(super_property_args[3])
5420           .CallRuntime(Runtime::kStoreToSuper, super_property_args);
5421       break;
5422     }
5423     case KEYED_SUPER_PROPERTY: {
5424       builder()
5425           ->StoreAccumulatorInRegister(super_property_args[3])
5426           .CallRuntime(Runtime::kStoreKeyedToSuper, super_property_args);
5427       break;
5428     }
5429     case PRIVATE_SETTER_ONLY:
5430     case PRIVATE_GETTER_ONLY:
5431     case PRIVATE_METHOD: {
5432       UNREACHABLE();
5433     }
5434     case PRIVATE_GETTER_AND_SETTER: {
5435       Register value = register_allocator()->NewRegister();
5436       builder()->StoreAccumulatorInRegister(value);
5437       BuildPrivateSetterAccess(object, key, value);
5438       if (!execution_result()->IsEffect()) {
5439         builder()->LoadAccumulatorWithRegister(value);
5440       }
5441       break;
5442     }
5443   }
5444 
5445   // Restore old value for postfix expressions.
5446   if (is_postfix) {
5447     builder()->LoadAccumulatorWithRegister(old_value);
5448   }
5449 }
5450 
VisitBinaryOperation(BinaryOperation * binop)5451 void BytecodeGenerator::VisitBinaryOperation(BinaryOperation* binop) {
5452   switch (binop->op()) {
5453     case Token::COMMA:
5454       VisitCommaExpression(binop);
5455       break;
5456     case Token::OR:
5457       VisitLogicalOrExpression(binop);
5458       break;
5459     case Token::AND:
5460       VisitLogicalAndExpression(binop);
5461       break;
5462     case Token::NULLISH:
5463       VisitNullishExpression(binop);
5464       break;
5465     default:
5466       VisitArithmeticExpression(binop);
5467       break;
5468   }
5469 }
5470 
VisitNaryOperation(NaryOperation * expr)5471 void BytecodeGenerator::VisitNaryOperation(NaryOperation* expr) {
5472   switch (expr->op()) {
5473     case Token::COMMA:
5474       VisitNaryCommaExpression(expr);
5475       break;
5476     case Token::OR:
5477       VisitNaryLogicalOrExpression(expr);
5478       break;
5479     case Token::AND:
5480       VisitNaryLogicalAndExpression(expr);
5481       break;
5482     case Token::NULLISH:
5483       VisitNaryNullishExpression(expr);
5484       break;
5485     default:
5486       VisitNaryArithmeticExpression(expr);
5487       break;
5488   }
5489 }
5490 
BuildLiteralCompareNil(Token::Value op,BytecodeArrayBuilder::NilValue nil)5491 void BytecodeGenerator::BuildLiteralCompareNil(
5492     Token::Value op, BytecodeArrayBuilder::NilValue nil) {
5493   if (execution_result()->IsTest()) {
5494     TestResultScope* test_result = execution_result()->AsTest();
5495     switch (test_result->fallthrough()) {
5496       case TestFallthrough::kThen:
5497         builder()->JumpIfNotNil(test_result->NewElseLabel(), op, nil);
5498         break;
5499       case TestFallthrough::kElse:
5500         builder()->JumpIfNil(test_result->NewThenLabel(), op, nil);
5501         break;
5502       case TestFallthrough::kNone:
5503         builder()
5504             ->JumpIfNil(test_result->NewThenLabel(), op, nil)
5505             .Jump(test_result->NewElseLabel());
5506     }
5507     test_result->SetResultConsumedByTest();
5508   } else {
5509     builder()->CompareNil(op, nil);
5510   }
5511 }
5512 
VisitCompareOperation(CompareOperation * expr)5513 void BytecodeGenerator::VisitCompareOperation(CompareOperation* expr) {
5514   Expression* sub_expr;
5515   Literal* literal;
5516   if (expr->IsLiteralCompareTypeof(&sub_expr, &literal)) {
5517     // Emit a fast literal comparion for expressions of the form:
5518     // typeof(x) === 'string'.
5519     VisitForTypeOfValue(sub_expr);
5520     builder()->SetExpressionPosition(expr);
5521     TestTypeOfFlags::LiteralFlag literal_flag =
5522         TestTypeOfFlags::GetFlagForLiteral(ast_string_constants(), literal);
5523     if (literal_flag == TestTypeOfFlags::LiteralFlag::kOther) {
5524       builder()->LoadFalse();
5525     } else {
5526       builder()->CompareTypeOf(literal_flag);
5527     }
5528   } else if (expr->IsLiteralCompareUndefined(&sub_expr)) {
5529     VisitForAccumulatorValue(sub_expr);
5530     builder()->SetExpressionPosition(expr);
5531     BuildLiteralCompareNil(expr->op(), BytecodeArrayBuilder::kUndefinedValue);
5532   } else if (expr->IsLiteralCompareNull(&sub_expr)) {
5533     VisitForAccumulatorValue(sub_expr);
5534     builder()->SetExpressionPosition(expr);
5535     BuildLiteralCompareNil(expr->op(), BytecodeArrayBuilder::kNullValue);
5536   } else {
5537     Register lhs = VisitForRegisterValue(expr->left());
5538     VisitForAccumulatorValue(expr->right());
5539     builder()->SetExpressionPosition(expr);
5540     FeedbackSlot slot;
5541     if (expr->op() == Token::IN) {
5542       slot = feedback_spec()->AddKeyedHasICSlot();
5543     } else if (expr->op() == Token::INSTANCEOF) {
5544       slot = feedback_spec()->AddInstanceOfSlot();
5545     } else {
5546       slot = feedback_spec()->AddCompareICSlot();
5547     }
5548     builder()->CompareOperation(expr->op(), lhs, feedback_index(slot));
5549   }
5550   // Always returns a boolean value.
5551   execution_result()->SetResultIsBoolean();
5552 }
5553 
VisitArithmeticExpression(BinaryOperation * expr)5554 void BytecodeGenerator::VisitArithmeticExpression(BinaryOperation* expr) {
5555   FeedbackSlot slot = feedback_spec()->AddBinaryOpICSlot();
5556   Expression* subexpr;
5557   Smi literal;
5558   if (expr->IsSmiLiteralOperation(&subexpr, &literal)) {
5559     TypeHint type_hint = VisitForAccumulatorValue(subexpr);
5560     builder()->SetExpressionPosition(expr);
5561     builder()->BinaryOperationSmiLiteral(expr->op(), literal,
5562                                          feedback_index(slot));
5563     if (expr->op() == Token::ADD && type_hint == TypeHint::kString) {
5564       execution_result()->SetResultIsString();
5565     }
5566   } else {
5567     TypeHint lhs_type = VisitForAccumulatorValue(expr->left());
5568     Register lhs = register_allocator()->NewRegister();
5569     builder()->StoreAccumulatorInRegister(lhs);
5570     TypeHint rhs_type = VisitForAccumulatorValue(expr->right());
5571     if (expr->op() == Token::ADD &&
5572         (lhs_type == TypeHint::kString || rhs_type == TypeHint::kString)) {
5573       execution_result()->SetResultIsString();
5574     }
5575 
5576     builder()->SetExpressionPosition(expr);
5577     builder()->BinaryOperation(expr->op(), lhs, feedback_index(slot));
5578   }
5579 }
5580 
VisitNaryArithmeticExpression(NaryOperation * expr)5581 void BytecodeGenerator::VisitNaryArithmeticExpression(NaryOperation* expr) {
5582   // TODO(leszeks): Add support for lhs smi in commutative ops.
5583   TypeHint type_hint = VisitForAccumulatorValue(expr->first());
5584 
5585   for (size_t i = 0; i < expr->subsequent_length(); ++i) {
5586     RegisterAllocationScope register_scope(this);
5587     if (expr->subsequent(i)->IsSmiLiteral()) {
5588       builder()->SetExpressionPosition(expr->subsequent_op_position(i));
5589       builder()->BinaryOperationSmiLiteral(
5590           expr->op(), expr->subsequent(i)->AsLiteral()->AsSmiLiteral(),
5591           feedback_index(feedback_spec()->AddBinaryOpICSlot()));
5592     } else {
5593       Register lhs = register_allocator()->NewRegister();
5594       builder()->StoreAccumulatorInRegister(lhs);
5595       TypeHint rhs_hint = VisitForAccumulatorValue(expr->subsequent(i));
5596       if (rhs_hint == TypeHint::kString) type_hint = TypeHint::kString;
5597       builder()->SetExpressionPosition(expr->subsequent_op_position(i));
5598       builder()->BinaryOperation(
5599           expr->op(), lhs,
5600           feedback_index(feedback_spec()->AddBinaryOpICSlot()));
5601     }
5602   }
5603 
5604   if (type_hint == TypeHint::kString && expr->op() == Token::ADD) {
5605     // If any operand of an ADD is a String, a String is produced.
5606     execution_result()->SetResultIsString();
5607   }
5608 }
5609 
5610 // Note: the actual spreading is performed by the surrounding expression's
5611 // visitor.
VisitSpread(Spread * expr)5612 void BytecodeGenerator::VisitSpread(Spread* expr) { Visit(expr->expression()); }
5613 
VisitEmptyParentheses(EmptyParentheses * expr)5614 void BytecodeGenerator::VisitEmptyParentheses(EmptyParentheses* expr) {
5615   UNREACHABLE();
5616 }
5617 
VisitImportCallExpression(ImportCallExpression * expr)5618 void BytecodeGenerator::VisitImportCallExpression(ImportCallExpression* expr) {
5619   RegisterList args = register_allocator()->NewRegisterList(2);
5620   VisitForRegisterValue(expr->argument(), args[1]);
5621   builder()
5622       ->MoveRegister(Register::function_closure(), args[0])
5623       .CallRuntime(Runtime::kDynamicImportCall, args);
5624 }
5625 
BuildGetIterator(IteratorType hint)5626 void BytecodeGenerator::BuildGetIterator(IteratorType hint) {
5627   if (hint == IteratorType::kAsync) {
5628     RegisterAllocationScope scope(this);
5629 
5630     Register obj = register_allocator()->NewRegister();
5631     Register method = register_allocator()->NewRegister();
5632 
5633     // Set method to GetMethod(obj, @@asyncIterator)
5634     builder()->StoreAccumulatorInRegister(obj).LoadAsyncIteratorProperty(
5635         obj, feedback_index(feedback_spec()->AddLoadICSlot()));
5636 
5637     BytecodeLabel async_iterator_undefined, done;
5638     builder()->JumpIfUndefinedOrNull(&async_iterator_undefined);
5639 
5640     // Let iterator be Call(method, obj)
5641     builder()->StoreAccumulatorInRegister(method).CallProperty(
5642         method, RegisterList(obj),
5643         feedback_index(feedback_spec()->AddCallICSlot()));
5644 
5645     // If Type(iterator) is not Object, throw a TypeError exception.
5646     builder()->JumpIfJSReceiver(&done);
5647     builder()->CallRuntime(Runtime::kThrowSymbolAsyncIteratorInvalid);
5648 
5649     builder()->Bind(&async_iterator_undefined);
5650     // If method is undefined,
5651     //     Let syncMethod be GetMethod(obj, @@iterator)
5652     builder()
5653         ->LoadIteratorProperty(obj,
5654                                feedback_index(feedback_spec()->AddLoadICSlot()))
5655         .StoreAccumulatorInRegister(method);
5656 
5657     //     Let syncIterator be Call(syncMethod, obj)
5658     builder()->CallProperty(method, RegisterList(obj),
5659                             feedback_index(feedback_spec()->AddCallICSlot()));
5660 
5661     // Return CreateAsyncFromSyncIterator(syncIterator)
5662     // alias `method` register as it's no longer used
5663     Register sync_iter = method;
5664     builder()->StoreAccumulatorInRegister(sync_iter).CallRuntime(
5665         Runtime::kInlineCreateAsyncFromSyncIterator, sync_iter);
5666 
5667     builder()->Bind(&done);
5668   } else {
5669     {
5670       RegisterAllocationScope scope(this);
5671 
5672       Register obj = register_allocator()->NewRegister();
5673       int load_feedback_index =
5674           feedback_index(feedback_spec()->AddLoadICSlot());
5675       int call_feedback_index =
5676           feedback_index(feedback_spec()->AddCallICSlot());
5677 
5678       // Let method be GetMethod(obj, @@iterator) and
5679       // iterator be Call(method, obj).
5680       builder()->StoreAccumulatorInRegister(obj).GetIterator(
5681           obj, load_feedback_index, call_feedback_index);
5682     }
5683 
5684     // If Type(iterator) is not Object, throw a TypeError exception.
5685     BytecodeLabel no_type_error;
5686     builder()->JumpIfJSReceiver(&no_type_error);
5687     builder()->CallRuntime(Runtime::kThrowSymbolIteratorInvalid);
5688     builder()->Bind(&no_type_error);
5689   }
5690 }
5691 
5692 // Returns an IteratorRecord which is valid for the lifetime of the current
5693 // register_allocation_scope.
BuildGetIteratorRecord(Register next,Register object,IteratorType hint)5694 BytecodeGenerator::IteratorRecord BytecodeGenerator::BuildGetIteratorRecord(
5695     Register next, Register object, IteratorType hint) {
5696   DCHECK(next.is_valid() && object.is_valid());
5697   BuildGetIterator(hint);
5698 
5699   builder()
5700       ->StoreAccumulatorInRegister(object)
5701       .LoadNamedProperty(object, ast_string_constants()->next_string(),
5702                          feedback_index(feedback_spec()->AddLoadICSlot()))
5703       .StoreAccumulatorInRegister(next);
5704   return IteratorRecord(object, next, hint);
5705 }
5706 
BuildGetIteratorRecord(IteratorType hint)5707 BytecodeGenerator::IteratorRecord BytecodeGenerator::BuildGetIteratorRecord(
5708     IteratorType hint) {
5709   Register next = register_allocator()->NewRegister();
5710   Register object = register_allocator()->NewRegister();
5711   return BuildGetIteratorRecord(next, object, hint);
5712 }
5713 
BuildIteratorNext(const IteratorRecord & iterator,Register next_result)5714 void BytecodeGenerator::BuildIteratorNext(const IteratorRecord& iterator,
5715                                           Register next_result) {
5716   DCHECK(next_result.is_valid());
5717   builder()->CallProperty(iterator.next(), RegisterList(iterator.object()),
5718                           feedback_index(feedback_spec()->AddCallICSlot()));
5719 
5720   if (iterator.type() == IteratorType::kAsync) {
5721     BuildAwait();
5722   }
5723 
5724   BytecodeLabel is_object;
5725   builder()
5726       ->StoreAccumulatorInRegister(next_result)
5727       .JumpIfJSReceiver(&is_object)
5728       .CallRuntime(Runtime::kThrowIteratorResultNotAnObject, next_result)
5729       .Bind(&is_object);
5730 }
5731 
BuildCallIteratorMethod(Register iterator,const AstRawString * method_name,RegisterList receiver_and_args,BytecodeLabel * if_called,BytecodeLabels * if_notcalled)5732 void BytecodeGenerator::BuildCallIteratorMethod(Register iterator,
5733                                                 const AstRawString* method_name,
5734                                                 RegisterList receiver_and_args,
5735                                                 BytecodeLabel* if_called,
5736                                                 BytecodeLabels* if_notcalled) {
5737   RegisterAllocationScope register_scope(this);
5738 
5739   Register method = register_allocator()->NewRegister();
5740   FeedbackSlot slot = feedback_spec()->AddLoadICSlot();
5741   builder()
5742       ->LoadNamedProperty(iterator, method_name, feedback_index(slot))
5743       .JumpIfUndefinedOrNull(if_notcalled->New())
5744       .StoreAccumulatorInRegister(method)
5745       .CallProperty(method, receiver_and_args,
5746                     feedback_index(feedback_spec()->AddCallICSlot()))
5747       .Jump(if_called);
5748 }
5749 
BuildIteratorClose(const IteratorRecord & iterator,Expression * expr)5750 void BytecodeGenerator::BuildIteratorClose(const IteratorRecord& iterator,
5751                                            Expression* expr) {
5752   RegisterAllocationScope register_scope(this);
5753   BytecodeLabels done(zone());
5754   BytecodeLabel if_called;
5755   RegisterList args = RegisterList(iterator.object());
5756   BuildCallIteratorMethod(iterator.object(),
5757                           ast_string_constants()->return_string(), args,
5758                           &if_called, &done);
5759   builder()->Bind(&if_called);
5760 
5761   if (iterator.type() == IteratorType::kAsync) {
5762     DCHECK_NOT_NULL(expr);
5763     BuildAwait(expr->position());
5764   }
5765 
5766   builder()->JumpIfJSReceiver(done.New());
5767   {
5768     RegisterAllocationScope register_scope(this);
5769     Register return_result = register_allocator()->NewRegister();
5770     builder()
5771         ->StoreAccumulatorInRegister(return_result)
5772         .CallRuntime(Runtime::kThrowIteratorResultNotAnObject, return_result);
5773   }
5774 
5775   done.Bind(builder());
5776 }
5777 
VisitGetTemplateObject(GetTemplateObject * expr)5778 void BytecodeGenerator::VisitGetTemplateObject(GetTemplateObject* expr) {
5779   builder()->SetExpressionPosition(expr);
5780   size_t entry = builder()->AllocateDeferredConstantPoolEntry();
5781   template_objects_.push_back(std::make_pair(expr, entry));
5782   FeedbackSlot literal_slot = feedback_spec()->AddLiteralSlot();
5783   builder()->GetTemplateObject(entry, feedback_index(literal_slot));
5784 }
5785 
VisitTemplateLiteral(TemplateLiteral * expr)5786 void BytecodeGenerator::VisitTemplateLiteral(TemplateLiteral* expr) {
5787   const ZonePtrList<const AstRawString>& parts = *expr->string_parts();
5788   const ZonePtrList<Expression>& substitutions = *expr->substitutions();
5789   // Template strings with no substitutions are turned into StringLiterals.
5790   DCHECK_GT(substitutions.length(), 0);
5791   DCHECK_EQ(parts.length(), substitutions.length() + 1);
5792 
5793   // Generate string concatenation
5794   // TODO(caitp): Don't generate feedback slot if it's not used --- introduce
5795   // a simple, concise, reusable mechanism to lazily create reusable slots.
5796   FeedbackSlot slot = feedback_spec()->AddBinaryOpICSlot();
5797   Register last_part = register_allocator()->NewRegister();
5798   bool last_part_valid = false;
5799 
5800   builder()->SetExpressionPosition(expr);
5801   for (int i = 0; i < substitutions.length(); ++i) {
5802     if (i != 0) {
5803       builder()->StoreAccumulatorInRegister(last_part);
5804       last_part_valid = true;
5805     }
5806 
5807     if (!parts[i]->IsEmpty()) {
5808       builder()->LoadLiteral(parts[i]);
5809       if (last_part_valid) {
5810         builder()->BinaryOperation(Token::ADD, last_part, feedback_index(slot));
5811       }
5812       builder()->StoreAccumulatorInRegister(last_part);
5813       last_part_valid = true;
5814     }
5815 
5816     TypeHint type_hint = VisitForAccumulatorValue(substitutions[i]);
5817     if (type_hint != TypeHint::kString) {
5818       builder()->ToString();
5819     }
5820     if (last_part_valid) {
5821       builder()->BinaryOperation(Token::ADD, last_part, feedback_index(slot));
5822     }
5823     last_part_valid = false;
5824   }
5825 
5826   if (!parts.last()->IsEmpty()) {
5827     builder()->StoreAccumulatorInRegister(last_part);
5828     builder()->LoadLiteral(parts.last());
5829     builder()->BinaryOperation(Token::ADD, last_part, feedback_index(slot));
5830   }
5831 }
5832 
BuildThisVariableLoad()5833 void BytecodeGenerator::BuildThisVariableLoad() {
5834   DeclarationScope* receiver_scope = closure_scope()->GetReceiverScope();
5835   Variable* var = receiver_scope->receiver();
5836   // TODO(littledan): implement 'this' hole check elimination.
5837   HoleCheckMode hole_check_mode =
5838       IsDerivedConstructor(receiver_scope->function_kind())
5839           ? HoleCheckMode::kRequired
5840           : HoleCheckMode::kElided;
5841   BuildVariableLoad(var, hole_check_mode);
5842 }
5843 
VisitThisExpression(ThisExpression * expr)5844 void BytecodeGenerator::VisitThisExpression(ThisExpression* expr) {
5845   BuildThisVariableLoad();
5846 }
5847 
VisitSuperCallReference(SuperCallReference * expr)5848 void BytecodeGenerator::VisitSuperCallReference(SuperCallReference* expr) {
5849   // Handled by VisitCall().
5850   UNREACHABLE();
5851 }
5852 
VisitSuperPropertyReference(SuperPropertyReference * expr)5853 void BytecodeGenerator::VisitSuperPropertyReference(
5854     SuperPropertyReference* expr) {
5855   builder()->CallRuntime(Runtime::kThrowUnsupportedSuperError);
5856 }
5857 
VisitCommaExpression(BinaryOperation * binop)5858 void BytecodeGenerator::VisitCommaExpression(BinaryOperation* binop) {
5859   VisitForEffect(binop->left());
5860   Visit(binop->right());
5861 }
5862 
VisitNaryCommaExpression(NaryOperation * expr)5863 void BytecodeGenerator::VisitNaryCommaExpression(NaryOperation* expr) {
5864   DCHECK_GT(expr->subsequent_length(), 0);
5865 
5866   VisitForEffect(expr->first());
5867   for (size_t i = 0; i < expr->subsequent_length() - 1; ++i) {
5868     VisitForEffect(expr->subsequent(i));
5869   }
5870   Visit(expr->subsequent(expr->subsequent_length() - 1));
5871 }
5872 
VisitLogicalTestSubExpression(Token::Value token,Expression * expr,BytecodeLabels * then_labels,BytecodeLabels * else_labels,int coverage_slot)5873 void BytecodeGenerator::VisitLogicalTestSubExpression(
5874     Token::Value token, Expression* expr, BytecodeLabels* then_labels,
5875     BytecodeLabels* else_labels, int coverage_slot) {
5876   DCHECK(token == Token::OR || token == Token::AND || token == Token::NULLISH);
5877 
5878   BytecodeLabels test_next(zone());
5879   if (token == Token::OR) {
5880     VisitForTest(expr, then_labels, &test_next, TestFallthrough::kElse);
5881   } else if (token == Token::AND) {
5882     VisitForTest(expr, &test_next, else_labels, TestFallthrough::kThen);
5883   } else {
5884     DCHECK_EQ(Token::NULLISH, token);
5885     VisitForNullishTest(expr, then_labels, &test_next, else_labels);
5886   }
5887   test_next.Bind(builder());
5888 
5889   BuildIncrementBlockCoverageCounterIfEnabled(coverage_slot);
5890 }
5891 
VisitLogicalTest(Token::Value token,Expression * left,Expression * right,int right_coverage_slot)5892 void BytecodeGenerator::VisitLogicalTest(Token::Value token, Expression* left,
5893                                          Expression* right,
5894                                          int right_coverage_slot) {
5895   DCHECK(token == Token::OR || token == Token::AND || token == Token::NULLISH);
5896   TestResultScope* test_result = execution_result()->AsTest();
5897   BytecodeLabels* then_labels = test_result->then_labels();
5898   BytecodeLabels* else_labels = test_result->else_labels();
5899   TestFallthrough fallthrough = test_result->fallthrough();
5900 
5901   VisitLogicalTestSubExpression(token, left, then_labels, else_labels,
5902                                 right_coverage_slot);
5903   // The last test has the same then, else and fallthrough as the parent test.
5904   VisitForTest(right, then_labels, else_labels, fallthrough);
5905 }
5906 
VisitNaryLogicalTest(Token::Value token,NaryOperation * expr,const NaryCodeCoverageSlots * coverage_slots)5907 void BytecodeGenerator::VisitNaryLogicalTest(
5908     Token::Value token, NaryOperation* expr,
5909     const NaryCodeCoverageSlots* coverage_slots) {
5910   DCHECK(token == Token::OR || token == Token::AND || token == Token::NULLISH);
5911   DCHECK_GT(expr->subsequent_length(), 0);
5912 
5913   TestResultScope* test_result = execution_result()->AsTest();
5914   BytecodeLabels* then_labels = test_result->then_labels();
5915   BytecodeLabels* else_labels = test_result->else_labels();
5916   TestFallthrough fallthrough = test_result->fallthrough();
5917 
5918   VisitLogicalTestSubExpression(token, expr->first(), then_labels, else_labels,
5919                                 coverage_slots->GetSlotFor(0));
5920   for (size_t i = 0; i < expr->subsequent_length() - 1; ++i) {
5921     VisitLogicalTestSubExpression(token, expr->subsequent(i), then_labels,
5922                                   else_labels,
5923                                   coverage_slots->GetSlotFor(i + 1));
5924   }
5925   // The last test has the same then, else and fallthrough as the parent test.
5926   VisitForTest(expr->subsequent(expr->subsequent_length() - 1), then_labels,
5927                else_labels, fallthrough);
5928 }
5929 
VisitLogicalOrSubExpression(Expression * expr,BytecodeLabels * end_labels,int coverage_slot)5930 bool BytecodeGenerator::VisitLogicalOrSubExpression(Expression* expr,
5931                                                     BytecodeLabels* end_labels,
5932                                                     int coverage_slot) {
5933   if (expr->ToBooleanIsTrue()) {
5934     VisitForAccumulatorValue(expr);
5935     end_labels->Bind(builder());
5936     return true;
5937   } else if (!expr->ToBooleanIsFalse()) {
5938     TypeHint type_hint = VisitForAccumulatorValue(expr);
5939     builder()->JumpIfTrue(ToBooleanModeFromTypeHint(type_hint),
5940                           end_labels->New());
5941   }
5942 
5943   BuildIncrementBlockCoverageCounterIfEnabled(coverage_slot);
5944 
5945   return false;
5946 }
5947 
VisitLogicalAndSubExpression(Expression * expr,BytecodeLabels * end_labels,int coverage_slot)5948 bool BytecodeGenerator::VisitLogicalAndSubExpression(Expression* expr,
5949                                                      BytecodeLabels* end_labels,
5950                                                      int coverage_slot) {
5951   if (expr->ToBooleanIsFalse()) {
5952     VisitForAccumulatorValue(expr);
5953     end_labels->Bind(builder());
5954     return true;
5955   } else if (!expr->ToBooleanIsTrue()) {
5956     TypeHint type_hint = VisitForAccumulatorValue(expr);
5957     builder()->JumpIfFalse(ToBooleanModeFromTypeHint(type_hint),
5958                            end_labels->New());
5959   }
5960 
5961   BuildIncrementBlockCoverageCounterIfEnabled(coverage_slot);
5962 
5963   return false;
5964 }
5965 
VisitNullishSubExpression(Expression * expr,BytecodeLabels * end_labels,int coverage_slot)5966 bool BytecodeGenerator::VisitNullishSubExpression(Expression* expr,
5967                                                   BytecodeLabels* end_labels,
5968                                                   int coverage_slot) {
5969   if (expr->IsLiteralButNotNullOrUndefined()) {
5970     VisitForAccumulatorValue(expr);
5971     end_labels->Bind(builder());
5972     return true;
5973   } else if (!expr->IsNullOrUndefinedLiteral()) {
5974     VisitForAccumulatorValue(expr);
5975     BytecodeLabel is_null_or_undefined;
5976     builder()
5977         ->JumpIfUndefinedOrNull(&is_null_or_undefined)
5978         .Jump(end_labels->New());
5979     builder()->Bind(&is_null_or_undefined);
5980   }
5981 
5982   BuildIncrementBlockCoverageCounterIfEnabled(coverage_slot);
5983 
5984   return false;
5985 }
5986 
VisitLogicalOrExpression(BinaryOperation * binop)5987 void BytecodeGenerator::VisitLogicalOrExpression(BinaryOperation* binop) {
5988   Expression* left = binop->left();
5989   Expression* right = binop->right();
5990 
5991   int right_coverage_slot =
5992       AllocateBlockCoverageSlotIfEnabled(binop, SourceRangeKind::kRight);
5993 
5994   if (execution_result()->IsTest()) {
5995     TestResultScope* test_result = execution_result()->AsTest();
5996     if (left->ToBooleanIsTrue()) {
5997       builder()->Jump(test_result->NewThenLabel());
5998     } else if (left->ToBooleanIsFalse() && right->ToBooleanIsFalse()) {
5999       BuildIncrementBlockCoverageCounterIfEnabled(right_coverage_slot);
6000       builder()->Jump(test_result->NewElseLabel());
6001     } else {
6002       VisitLogicalTest(Token::OR, left, right, right_coverage_slot);
6003     }
6004     test_result->SetResultConsumedByTest();
6005   } else {
6006     BytecodeLabels end_labels(zone());
6007     if (VisitLogicalOrSubExpression(left, &end_labels, right_coverage_slot)) {
6008       return;
6009     }
6010     VisitForAccumulatorValue(right);
6011     end_labels.Bind(builder());
6012   }
6013 }
6014 
VisitNaryLogicalOrExpression(NaryOperation * expr)6015 void BytecodeGenerator::VisitNaryLogicalOrExpression(NaryOperation* expr) {
6016   Expression* first = expr->first();
6017   DCHECK_GT(expr->subsequent_length(), 0);
6018 
6019   NaryCodeCoverageSlots coverage_slots(this, expr);
6020 
6021   if (execution_result()->IsTest()) {
6022     TestResultScope* test_result = execution_result()->AsTest();
6023     if (first->ToBooleanIsTrue()) {
6024       builder()->Jump(test_result->NewThenLabel());
6025     } else {
6026       VisitNaryLogicalTest(Token::OR, expr, &coverage_slots);
6027     }
6028     test_result->SetResultConsumedByTest();
6029   } else {
6030     BytecodeLabels end_labels(zone());
6031     if (VisitLogicalOrSubExpression(first, &end_labels,
6032                                     coverage_slots.GetSlotFor(0))) {
6033       return;
6034     }
6035     for (size_t i = 0; i < expr->subsequent_length() - 1; ++i) {
6036       if (VisitLogicalOrSubExpression(expr->subsequent(i), &end_labels,
6037                                       coverage_slots.GetSlotFor(i + 1))) {
6038         return;
6039       }
6040     }
6041     // We have to visit the last value even if it's true, because we need its
6042     // actual value.
6043     VisitForAccumulatorValue(expr->subsequent(expr->subsequent_length() - 1));
6044     end_labels.Bind(builder());
6045   }
6046 }
6047 
VisitLogicalAndExpression(BinaryOperation * binop)6048 void BytecodeGenerator::VisitLogicalAndExpression(BinaryOperation* binop) {
6049   Expression* left = binop->left();
6050   Expression* right = binop->right();
6051 
6052   int right_coverage_slot =
6053       AllocateBlockCoverageSlotIfEnabled(binop, SourceRangeKind::kRight);
6054 
6055   if (execution_result()->IsTest()) {
6056     TestResultScope* test_result = execution_result()->AsTest();
6057     if (left->ToBooleanIsFalse()) {
6058       builder()->Jump(test_result->NewElseLabel());
6059     } else if (left->ToBooleanIsTrue() && right->ToBooleanIsTrue()) {
6060       BuildIncrementBlockCoverageCounterIfEnabled(right_coverage_slot);
6061       builder()->Jump(test_result->NewThenLabel());
6062     } else {
6063       VisitLogicalTest(Token::AND, left, right, right_coverage_slot);
6064     }
6065     test_result->SetResultConsumedByTest();
6066   } else {
6067     BytecodeLabels end_labels(zone());
6068     if (VisitLogicalAndSubExpression(left, &end_labels, right_coverage_slot)) {
6069       return;
6070     }
6071     VisitForAccumulatorValue(right);
6072     end_labels.Bind(builder());
6073   }
6074 }
6075 
VisitNaryLogicalAndExpression(NaryOperation * expr)6076 void BytecodeGenerator::VisitNaryLogicalAndExpression(NaryOperation* expr) {
6077   Expression* first = expr->first();
6078   DCHECK_GT(expr->subsequent_length(), 0);
6079 
6080   NaryCodeCoverageSlots coverage_slots(this, expr);
6081 
6082   if (execution_result()->IsTest()) {
6083     TestResultScope* test_result = execution_result()->AsTest();
6084     if (first->ToBooleanIsFalse()) {
6085       builder()->Jump(test_result->NewElseLabel());
6086     } else {
6087       VisitNaryLogicalTest(Token::AND, expr, &coverage_slots);
6088     }
6089     test_result->SetResultConsumedByTest();
6090   } else {
6091     BytecodeLabels end_labels(zone());
6092     if (VisitLogicalAndSubExpression(first, &end_labels,
6093                                      coverage_slots.GetSlotFor(0))) {
6094       return;
6095     }
6096     for (size_t i = 0; i < expr->subsequent_length() - 1; ++i) {
6097       if (VisitLogicalAndSubExpression(expr->subsequent(i), &end_labels,
6098                                        coverage_slots.GetSlotFor(i + 1))) {
6099         return;
6100       }
6101     }
6102     // We have to visit the last value even if it's false, because we need its
6103     // actual value.
6104     VisitForAccumulatorValue(expr->subsequent(expr->subsequent_length() - 1));
6105     end_labels.Bind(builder());
6106   }
6107 }
6108 
VisitNullishExpression(BinaryOperation * binop)6109 void BytecodeGenerator::VisitNullishExpression(BinaryOperation* binop) {
6110   Expression* left = binop->left();
6111   Expression* right = binop->right();
6112 
6113   int right_coverage_slot =
6114       AllocateBlockCoverageSlotIfEnabled(binop, SourceRangeKind::kRight);
6115 
6116   if (execution_result()->IsTest()) {
6117     TestResultScope* test_result = execution_result()->AsTest();
6118     if (left->IsLiteralButNotNullOrUndefined() && left->ToBooleanIsTrue()) {
6119       builder()->Jump(test_result->NewThenLabel());
6120     } else if (left->IsNullOrUndefinedLiteral() &&
6121                right->IsNullOrUndefinedLiteral()) {
6122       BuildIncrementBlockCoverageCounterIfEnabled(right_coverage_slot);
6123       builder()->Jump(test_result->NewElseLabel());
6124     } else {
6125       VisitLogicalTest(Token::NULLISH, left, right, right_coverage_slot);
6126     }
6127     test_result->SetResultConsumedByTest();
6128   } else {
6129     BytecodeLabels end_labels(zone());
6130     if (VisitNullishSubExpression(left, &end_labels, right_coverage_slot)) {
6131       return;
6132     }
6133     VisitForAccumulatorValue(right);
6134     end_labels.Bind(builder());
6135   }
6136 }
6137 
VisitNaryNullishExpression(NaryOperation * expr)6138 void BytecodeGenerator::VisitNaryNullishExpression(NaryOperation* expr) {
6139   Expression* first = expr->first();
6140   DCHECK_GT(expr->subsequent_length(), 0);
6141 
6142   NaryCodeCoverageSlots coverage_slots(this, expr);
6143 
6144   if (execution_result()->IsTest()) {
6145     TestResultScope* test_result = execution_result()->AsTest();
6146     if (first->IsLiteralButNotNullOrUndefined() && first->ToBooleanIsTrue()) {
6147       builder()->Jump(test_result->NewThenLabel());
6148     } else {
6149       VisitNaryLogicalTest(Token::NULLISH, expr, &coverage_slots);
6150     }
6151     test_result->SetResultConsumedByTest();
6152   } else {
6153     BytecodeLabels end_labels(zone());
6154     if (VisitNullishSubExpression(first, &end_labels,
6155                                   coverage_slots.GetSlotFor(0))) {
6156       return;
6157     }
6158     for (size_t i = 0; i < expr->subsequent_length() - 1; ++i) {
6159       if (VisitNullishSubExpression(expr->subsequent(i), &end_labels,
6160                                     coverage_slots.GetSlotFor(i + 1))) {
6161         return;
6162       }
6163     }
6164     // We have to visit the last value even if it's nullish, because we need its
6165     // actual value.
6166     VisitForAccumulatorValue(expr->subsequent(expr->subsequent_length() - 1));
6167     end_labels.Bind(builder());
6168   }
6169 }
6170 
BuildNewLocalActivationContext()6171 void BytecodeGenerator::BuildNewLocalActivationContext() {
6172   ValueResultScope value_execution_result(this);
6173   Scope* scope = closure_scope();
6174   DCHECK_EQ(current_scope(), closure_scope());
6175 
6176   // Create the appropriate context.
6177   DCHECK(scope->is_function_scope() || scope->is_eval_scope());
6178   int slot_count = scope->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
6179   if (slot_count <= ConstructorBuiltins::MaximumFunctionContextSlots()) {
6180     switch (scope->scope_type()) {
6181       case EVAL_SCOPE:
6182         builder()->CreateEvalContext(scope, slot_count);
6183         break;
6184       case FUNCTION_SCOPE:
6185         builder()->CreateFunctionContext(scope, slot_count);
6186         break;
6187       default:
6188         UNREACHABLE();
6189     }
6190   } else {
6191     Register arg = register_allocator()->NewRegister();
6192     builder()->LoadLiteral(scope).StoreAccumulatorInRegister(arg).CallRuntime(
6193         Runtime::kNewFunctionContext, arg);
6194   }
6195 }
6196 
BuildLocalActivationContextInitialization()6197 void BytecodeGenerator::BuildLocalActivationContextInitialization() {
6198   DeclarationScope* scope = closure_scope();
6199 
6200   if (scope->has_this_declaration() && scope->receiver()->IsContextSlot()) {
6201     Variable* variable = scope->receiver();
6202     Register receiver(builder()->Receiver());
6203     // Context variable (at bottom of the context chain).
6204     DCHECK_EQ(0, scope->ContextChainLength(variable->scope()));
6205     builder()->LoadAccumulatorWithRegister(receiver).StoreContextSlot(
6206         execution_context()->reg(), variable->index(), 0);
6207   }
6208 
6209   // Copy parameters into context if necessary.
6210   int num_parameters = scope->num_parameters();
6211   for (int i = 0; i < num_parameters; i++) {
6212     Variable* variable = scope->parameter(i);
6213     if (!variable->IsContextSlot()) continue;
6214 
6215     Register parameter(builder()->Parameter(i));
6216     // Context variable (at bottom of the context chain).
6217     DCHECK_EQ(0, scope->ContextChainLength(variable->scope()));
6218     builder()->LoadAccumulatorWithRegister(parameter).StoreContextSlot(
6219         execution_context()->reg(), variable->index(), 0);
6220   }
6221 }
6222 
BuildNewLocalBlockContext(Scope * scope)6223 void BytecodeGenerator::BuildNewLocalBlockContext(Scope* scope) {
6224   ValueResultScope value_execution_result(this);
6225   DCHECK(scope->is_block_scope());
6226 
6227   builder()->CreateBlockContext(scope);
6228 }
6229 
BuildNewLocalWithContext(Scope * scope)6230 void BytecodeGenerator::BuildNewLocalWithContext(Scope* scope) {
6231   ValueResultScope value_execution_result(this);
6232 
6233   Register extension_object = register_allocator()->NewRegister();
6234 
6235   builder()->ToObject(extension_object);
6236   builder()->CreateWithContext(extension_object, scope);
6237 }
6238 
BuildNewLocalCatchContext(Scope * scope)6239 void BytecodeGenerator::BuildNewLocalCatchContext(Scope* scope) {
6240   ValueResultScope value_execution_result(this);
6241   DCHECK(scope->catch_variable()->IsContextSlot());
6242 
6243   Register exception = register_allocator()->NewRegister();
6244   builder()->StoreAccumulatorInRegister(exception);
6245   builder()->CreateCatchContext(exception, scope);
6246 }
6247 
VisitLiteralAccessor(Register home_object,LiteralProperty * property,Register value_out)6248 void BytecodeGenerator::VisitLiteralAccessor(Register home_object,
6249                                              LiteralProperty* property,
6250                                              Register value_out) {
6251   if (property == nullptr) {
6252     builder()->LoadNull().StoreAccumulatorInRegister(value_out);
6253   } else {
6254     VisitForRegisterValue(property->value(), value_out);
6255     VisitSetHomeObject(value_out, home_object, property);
6256   }
6257 }
6258 
VisitSetHomeObject(Register value,Register home_object,LiteralProperty * property)6259 void BytecodeGenerator::VisitSetHomeObject(Register value, Register home_object,
6260                                            LiteralProperty* property) {
6261   Expression* expr = property->value();
6262   if (FunctionLiteral::NeedsHomeObject(expr)) {
6263     FeedbackSlot slot = feedback_spec()->AddStoreICSlot(language_mode());
6264     builder()
6265         ->LoadAccumulatorWithRegister(home_object)
6266         .StoreHomeObjectProperty(value, feedback_index(slot), language_mode());
6267   }
6268 }
6269 
VisitArgumentsObject(Variable * variable)6270 void BytecodeGenerator::VisitArgumentsObject(Variable* variable) {
6271   if (variable == nullptr) return;
6272 
6273   DCHECK(variable->IsContextSlot() || variable->IsStackAllocated());
6274 
6275   // Allocate and initialize a new arguments object and assign to the
6276   // {arguments} variable.
6277   builder()->CreateArguments(closure_scope()->GetArgumentsType());
6278   BuildVariableAssignment(variable, Token::ASSIGN, HoleCheckMode::kElided);
6279 }
6280 
VisitRestArgumentsArray(Variable * rest)6281 void BytecodeGenerator::VisitRestArgumentsArray(Variable* rest) {
6282   if (rest == nullptr) return;
6283 
6284   // Allocate and initialize a new rest parameter and assign to the {rest}
6285   // variable.
6286   builder()->CreateArguments(CreateArgumentsType::kRestParameter);
6287   DCHECK(rest->IsContextSlot() || rest->IsStackAllocated());
6288   BuildVariableAssignment(rest, Token::ASSIGN, HoleCheckMode::kElided);
6289 }
6290 
VisitThisFunctionVariable(Variable * variable)6291 void BytecodeGenerator::VisitThisFunctionVariable(Variable* variable) {
6292   if (variable == nullptr) return;
6293 
6294   // Store the closure we were called with in the given variable.
6295   builder()->LoadAccumulatorWithRegister(Register::function_closure());
6296   BuildVariableAssignment(variable, Token::INIT, HoleCheckMode::kElided);
6297 }
6298 
VisitNewTargetVariable(Variable * variable)6299 void BytecodeGenerator::VisitNewTargetVariable(Variable* variable) {
6300   if (variable == nullptr) return;
6301 
6302   // The generator resume trampoline abuses the new.target register
6303   // to pass in the generator object.  In ordinary calls, new.target is always
6304   // undefined because generator functions are non-constructible, so don't
6305   // assign anything to the new.target variable.
6306   if (IsResumableFunction(info()->literal()->kind())) return;
6307 
6308   if (variable->location() == VariableLocation::LOCAL) {
6309     // The new.target register was already assigned by entry trampoline.
6310     DCHECK_EQ(incoming_new_target_or_generator_.index(),
6311               GetRegisterForLocalVariable(variable).index());
6312     return;
6313   }
6314 
6315   // Store the new target we were called with in the given variable.
6316   builder()->LoadAccumulatorWithRegister(incoming_new_target_or_generator_);
6317   BuildVariableAssignment(variable, Token::INIT, HoleCheckMode::kElided);
6318 }
6319 
BuildGeneratorObjectVariableInitialization()6320 void BytecodeGenerator::BuildGeneratorObjectVariableInitialization() {
6321   DCHECK(IsResumableFunction(info()->literal()->kind()));
6322 
6323   Variable* generator_object_var = closure_scope()->generator_object_var();
6324   RegisterAllocationScope register_scope(this);
6325   RegisterList args = register_allocator()->NewRegisterList(2);
6326   Runtime::FunctionId function_id =
6327       ((IsAsyncFunction(info()->literal()->kind()) &&
6328         !IsAsyncGeneratorFunction(info()->literal()->kind())) ||
6329        IsAsyncModule(info()->literal()->kind()))
6330           ? Runtime::kInlineAsyncFunctionEnter
6331           : Runtime::kInlineCreateJSGeneratorObject;
6332   builder()
6333       ->MoveRegister(Register::function_closure(), args[0])
6334       .MoveRegister(builder()->Receiver(), args[1])
6335       .CallRuntime(function_id, args)
6336       .StoreAccumulatorInRegister(generator_object());
6337 
6338   if (generator_object_var->location() == VariableLocation::LOCAL) {
6339     // The generator object register is already set to the variable's local
6340     // register.
6341     DCHECK_EQ(generator_object().index(),
6342               GetRegisterForLocalVariable(generator_object_var).index());
6343   } else {
6344     BuildVariableAssignment(generator_object_var, Token::INIT,
6345                             HoleCheckMode::kElided);
6346   }
6347 }
6348 
BuildPushUndefinedIntoRegisterList(RegisterList * reg_list)6349 void BytecodeGenerator::BuildPushUndefinedIntoRegisterList(
6350     RegisterList* reg_list) {
6351   Register reg = register_allocator()->GrowRegisterList(reg_list);
6352   builder()->LoadUndefined().StoreAccumulatorInRegister(reg);
6353 }
6354 
BuildLoadPropertyKey(LiteralProperty * property,Register out_reg)6355 void BytecodeGenerator::BuildLoadPropertyKey(LiteralProperty* property,
6356                                              Register out_reg) {
6357   if (property->key()->IsStringLiteral()) {
6358     builder()
6359         ->LoadLiteral(property->key()->AsLiteral()->AsRawString())
6360         .StoreAccumulatorInRegister(out_reg);
6361   } else {
6362     VisitForAccumulatorValue(property->key());
6363     builder()->ToName(out_reg);
6364   }
6365 }
6366 
AllocateBlockCoverageSlotIfEnabled(AstNode * node,SourceRangeKind kind)6367 int BytecodeGenerator::AllocateBlockCoverageSlotIfEnabled(
6368     AstNode* node, SourceRangeKind kind) {
6369   return (block_coverage_builder_ == nullptr)
6370              ? BlockCoverageBuilder::kNoCoverageArraySlot
6371              : block_coverage_builder_->AllocateBlockCoverageSlot(node, kind);
6372 }
6373 
AllocateNaryBlockCoverageSlotIfEnabled(NaryOperation * node,size_t index)6374 int BytecodeGenerator::AllocateNaryBlockCoverageSlotIfEnabled(
6375     NaryOperation* node, size_t index) {
6376   return (block_coverage_builder_ == nullptr)
6377              ? BlockCoverageBuilder::kNoCoverageArraySlot
6378              : block_coverage_builder_->AllocateNaryBlockCoverageSlot(node,
6379                                                                       index);
6380 }
6381 
BuildIncrementBlockCoverageCounterIfEnabled(AstNode * node,SourceRangeKind kind)6382 void BytecodeGenerator::BuildIncrementBlockCoverageCounterIfEnabled(
6383     AstNode* node, SourceRangeKind kind) {
6384   if (block_coverage_builder_ == nullptr) return;
6385   block_coverage_builder_->IncrementBlockCounter(node, kind);
6386 }
6387 
BuildIncrementBlockCoverageCounterIfEnabled(int coverage_array_slot)6388 void BytecodeGenerator::BuildIncrementBlockCoverageCounterIfEnabled(
6389     int coverage_array_slot) {
6390   if (block_coverage_builder_ != nullptr) {
6391     block_coverage_builder_->IncrementBlockCounter(coverage_array_slot);
6392   }
6393 }
6394 
6395 // Visits the expression |expr| and places the result in the accumulator.
VisitForAccumulatorValue(Expression * expr)6396 BytecodeGenerator::TypeHint BytecodeGenerator::VisitForAccumulatorValue(
6397     Expression* expr) {
6398   ValueResultScope accumulator_scope(this);
6399   Visit(expr);
6400   return accumulator_scope.type_hint();
6401 }
6402 
VisitForAccumulatorValueOrTheHole(Expression * expr)6403 void BytecodeGenerator::VisitForAccumulatorValueOrTheHole(Expression* expr) {
6404   if (expr == nullptr) {
6405     builder()->LoadTheHole();
6406   } else {
6407     VisitForAccumulatorValue(expr);
6408   }
6409 }
6410 
6411 // Visits the expression |expr| and discards the result.
VisitForEffect(Expression * expr)6412 void BytecodeGenerator::VisitForEffect(Expression* expr) {
6413   EffectResultScope effect_scope(this);
6414   Visit(expr);
6415 }
6416 
6417 // Visits the expression |expr| and returns the register containing
6418 // the expression result.
VisitForRegisterValue(Expression * expr)6419 Register BytecodeGenerator::VisitForRegisterValue(Expression* expr) {
6420   VisitForAccumulatorValue(expr);
6421   Register result = register_allocator()->NewRegister();
6422   builder()->StoreAccumulatorInRegister(result);
6423   return result;
6424 }
6425 
6426 // Visits the expression |expr| and stores the expression result in
6427 // |destination|.
VisitForRegisterValue(Expression * expr,Register destination)6428 void BytecodeGenerator::VisitForRegisterValue(Expression* expr,
6429                                               Register destination) {
6430   ValueResultScope register_scope(this);
6431   Visit(expr);
6432   builder()->StoreAccumulatorInRegister(destination);
6433 }
6434 
6435 // Visits the expression |expr| and pushes the result into a new register
6436 // added to the end of |reg_list|.
VisitAndPushIntoRegisterList(Expression * expr,RegisterList * reg_list)6437 void BytecodeGenerator::VisitAndPushIntoRegisterList(Expression* expr,
6438                                                      RegisterList* reg_list) {
6439   {
6440     ValueResultScope register_scope(this);
6441     Visit(expr);
6442   }
6443   // Grow the register list after visiting the expression to avoid reserving
6444   // the register across the expression evaluation, which could cause memory
6445   // leaks for deep expressions due to dead objects being kept alive by pointers
6446   // in registers.
6447   Register destination = register_allocator()->GrowRegisterList(reg_list);
6448   builder()->StoreAccumulatorInRegister(destination);
6449 }
6450 
BuildTest(ToBooleanMode mode,BytecodeLabels * then_labels,BytecodeLabels * else_labels,TestFallthrough fallthrough)6451 void BytecodeGenerator::BuildTest(ToBooleanMode mode,
6452                                   BytecodeLabels* then_labels,
6453                                   BytecodeLabels* else_labels,
6454                                   TestFallthrough fallthrough) {
6455   switch (fallthrough) {
6456     case TestFallthrough::kThen:
6457       builder()->JumpIfFalse(mode, else_labels->New());
6458       break;
6459     case TestFallthrough::kElse:
6460       builder()->JumpIfTrue(mode, then_labels->New());
6461       break;
6462     case TestFallthrough::kNone:
6463       builder()->JumpIfTrue(mode, then_labels->New());
6464       builder()->Jump(else_labels->New());
6465       break;
6466   }
6467 }
6468 
6469 // Visits the expression |expr| for testing its boolean value and jumping to the
6470 // |then| or |other| label depending on value and short-circuit semantics
VisitForTest(Expression * expr,BytecodeLabels * then_labels,BytecodeLabels * else_labels,TestFallthrough fallthrough)6471 void BytecodeGenerator::VisitForTest(Expression* expr,
6472                                      BytecodeLabels* then_labels,
6473                                      BytecodeLabels* else_labels,
6474                                      TestFallthrough fallthrough) {
6475   bool result_consumed;
6476   TypeHint type_hint;
6477   {
6478     // To make sure that all temporary registers are returned before generating
6479     // jumps below, we ensure that the result scope is deleted before doing so.
6480     // Dead registers might be materialized otherwise.
6481     TestResultScope test_result(this, then_labels, else_labels, fallthrough);
6482     Visit(expr);
6483     result_consumed = test_result.result_consumed_by_test();
6484     type_hint = test_result.type_hint();
6485     // Labels and fallthrough might have been mutated, so update based on
6486     // TestResultScope.
6487     then_labels = test_result.then_labels();
6488     else_labels = test_result.else_labels();
6489     fallthrough = test_result.fallthrough();
6490   }
6491   if (!result_consumed) {
6492     BuildTest(ToBooleanModeFromTypeHint(type_hint), then_labels, else_labels,
6493               fallthrough);
6494   }
6495 }
6496 
6497 // Visits the expression |expr| for testing its nullish value and jumping to the
6498 // |then| or |other| label depending on value and short-circuit semantics
VisitForNullishTest(Expression * expr,BytecodeLabels * then_labels,BytecodeLabels * test_next_labels,BytecodeLabels * else_labels)6499 void BytecodeGenerator::VisitForNullishTest(Expression* expr,
6500                                             BytecodeLabels* then_labels,
6501                                             BytecodeLabels* test_next_labels,
6502                                             BytecodeLabels* else_labels) {
6503   // Nullish short circuits on undefined or null, otherwise we fall back to
6504   // BuildTest with no fallthrough.
6505   // TODO(joshualitt): We should do this in a TestResultScope.
6506   TypeHint type_hint = VisitForAccumulatorValue(expr);
6507   ToBooleanMode mode = ToBooleanModeFromTypeHint(type_hint);
6508 
6509   // Skip the nullish shortcircuit if we already have a boolean.
6510   if (mode != ToBooleanMode::kAlreadyBoolean) {
6511     builder()->JumpIfUndefinedOrNull(test_next_labels->New());
6512   }
6513   BuildTest(mode, then_labels, else_labels, TestFallthrough::kNone);
6514 }
6515 
VisitInSameTestExecutionScope(Expression * expr)6516 void BytecodeGenerator::VisitInSameTestExecutionScope(Expression* expr) {
6517   DCHECK(execution_result()->IsTest());
6518   {
6519     RegisterAllocationScope reg_scope(this);
6520     Visit(expr);
6521   }
6522   if (!execution_result()->AsTest()->result_consumed_by_test()) {
6523     TestResultScope* result_scope = execution_result()->AsTest();
6524     BuildTest(ToBooleanModeFromTypeHint(result_scope->type_hint()),
6525               result_scope->then_labels(), result_scope->else_labels(),
6526               result_scope->fallthrough());
6527     result_scope->SetResultConsumedByTest();
6528   }
6529 }
6530 
VisitInScope(Statement * stmt,Scope * scope)6531 void BytecodeGenerator::VisitInScope(Statement* stmt, Scope* scope) {
6532   DCHECK(scope->declarations()->is_empty());
6533   CurrentScope current_scope(this, scope);
6534   ContextScope context_scope(this, scope);
6535   Visit(stmt);
6536 }
6537 
GetRegisterForLocalVariable(Variable * variable)6538 Register BytecodeGenerator::GetRegisterForLocalVariable(Variable* variable) {
6539   DCHECK_EQ(VariableLocation::LOCAL, variable->location());
6540   return builder()->Local(variable->index());
6541 }
6542 
function_kind() const6543 FunctionKind BytecodeGenerator::function_kind() const {
6544   return info()->literal()->kind();
6545 }
6546 
language_mode() const6547 LanguageMode BytecodeGenerator::language_mode() const {
6548   return current_scope()->language_mode();
6549 }
6550 
generator_object() const6551 Register BytecodeGenerator::generator_object() const {
6552   DCHECK(IsResumableFunction(info()->literal()->kind()));
6553   return incoming_new_target_or_generator_;
6554 }
6555 
feedback_spec()6556 FeedbackVectorSpec* BytecodeGenerator::feedback_spec() {
6557   return info()->feedback_vector_spec();
6558 }
6559 
feedback_index(FeedbackSlot slot) const6560 int BytecodeGenerator::feedback_index(FeedbackSlot slot) const {
6561   DCHECK(!slot.IsInvalid());
6562   return FeedbackVector::GetIndex(slot);
6563 }
6564 
GetCachedLoadGlobalICSlot(TypeofMode typeof_mode,Variable * variable)6565 FeedbackSlot BytecodeGenerator::GetCachedLoadGlobalICSlot(
6566     TypeofMode typeof_mode, Variable* variable) {
6567   FeedbackSlotCache::SlotKind slot_kind =
6568       typeof_mode == INSIDE_TYPEOF
6569           ? FeedbackSlotCache::SlotKind::kLoadGlobalInsideTypeof
6570           : FeedbackSlotCache::SlotKind::kLoadGlobalNotInsideTypeof;
6571   FeedbackSlot slot(feedback_slot_cache()->Get(slot_kind, variable));
6572   if (!slot.IsInvalid()) {
6573     return slot;
6574   }
6575   slot = feedback_spec()->AddLoadGlobalICSlot(typeof_mode);
6576   feedback_slot_cache()->Put(slot_kind, variable, feedback_index(slot));
6577   return slot;
6578 }
6579 
GetCachedStoreGlobalICSlot(LanguageMode language_mode,Variable * variable)6580 FeedbackSlot BytecodeGenerator::GetCachedStoreGlobalICSlot(
6581     LanguageMode language_mode, Variable* variable) {
6582   FeedbackSlotCache::SlotKind slot_kind =
6583       is_strict(language_mode)
6584           ? FeedbackSlotCache::SlotKind::kStoreGlobalStrict
6585           : FeedbackSlotCache::SlotKind::kStoreGlobalSloppy;
6586   FeedbackSlot slot(feedback_slot_cache()->Get(slot_kind, variable));
6587   if (!slot.IsInvalid()) {
6588     return slot;
6589   }
6590   slot = feedback_spec()->AddStoreGlobalICSlot(language_mode);
6591   feedback_slot_cache()->Put(slot_kind, variable, feedback_index(slot));
6592   return slot;
6593 }
6594 
GetCachedLoadICSlot(const Expression * expr,const AstRawString * name)6595 FeedbackSlot BytecodeGenerator::GetCachedLoadICSlot(const Expression* expr,
6596                                                     const AstRawString* name) {
6597   DCHECK(!expr->IsSuperPropertyReference());
6598   if (!FLAG_ignition_share_named_property_feedback) {
6599     return feedback_spec()->AddLoadICSlot();
6600   }
6601   FeedbackSlotCache::SlotKind slot_kind =
6602       FeedbackSlotCache::SlotKind::kLoadProperty;
6603   if (!expr->IsVariableProxy()) {
6604     return feedback_spec()->AddLoadICSlot();
6605   }
6606   const VariableProxy* proxy = expr->AsVariableProxy();
6607   FeedbackSlot slot(
6608       feedback_slot_cache()->Get(slot_kind, proxy->var()->index(), name));
6609   if (!slot.IsInvalid()) {
6610     return slot;
6611   }
6612   slot = feedback_spec()->AddLoadICSlot();
6613   feedback_slot_cache()->Put(slot_kind, proxy->var()->index(), name,
6614                              feedback_index(slot));
6615   return slot;
6616 }
6617 
GetCachedLoadSuperICSlot(const AstRawString * name)6618 FeedbackSlot BytecodeGenerator::GetCachedLoadSuperICSlot(
6619     const AstRawString* name) {
6620   if (!FLAG_ignition_share_named_property_feedback) {
6621     return feedback_spec()->AddLoadICSlot();
6622   }
6623   FeedbackSlotCache::SlotKind slot_kind =
6624       FeedbackSlotCache::SlotKind::kLoadSuperProperty;
6625 
6626   FeedbackSlot slot(feedback_slot_cache()->Get(slot_kind, name));
6627   if (!slot.IsInvalid()) {
6628     return slot;
6629   }
6630   slot = feedback_spec()->AddLoadICSlot();
6631   feedback_slot_cache()->Put(slot_kind, name, feedback_index(slot));
6632   return slot;
6633 }
6634 
GetCachedStoreICSlot(const Expression * expr,const AstRawString * name)6635 FeedbackSlot BytecodeGenerator::GetCachedStoreICSlot(const Expression* expr,
6636                                                      const AstRawString* name) {
6637   if (!FLAG_ignition_share_named_property_feedback) {
6638     return feedback_spec()->AddStoreICSlot(language_mode());
6639   }
6640   FeedbackSlotCache::SlotKind slot_kind =
6641       is_strict(language_mode())
6642           ? FeedbackSlotCache::SlotKind::kStoreNamedStrict
6643           : FeedbackSlotCache::SlotKind::kStoreNamedSloppy;
6644   if (!expr->IsVariableProxy()) {
6645     return feedback_spec()->AddStoreICSlot(language_mode());
6646   }
6647   const VariableProxy* proxy = expr->AsVariableProxy();
6648   FeedbackSlot slot(
6649       feedback_slot_cache()->Get(slot_kind, proxy->var()->index(), name));
6650   if (!slot.IsInvalid()) {
6651     return slot;
6652   }
6653   slot = feedback_spec()->AddStoreICSlot(language_mode());
6654   feedback_slot_cache()->Put(slot_kind, proxy->var()->index(), name,
6655                              feedback_index(slot));
6656   return slot;
6657 }
6658 
GetCachedCreateClosureSlot(FunctionLiteral * literal)6659 int BytecodeGenerator::GetCachedCreateClosureSlot(FunctionLiteral* literal) {
6660   FeedbackSlotCache::SlotKind slot_kind =
6661       FeedbackSlotCache::SlotKind::kClosureFeedbackCell;
6662   int index = feedback_slot_cache()->Get(slot_kind, literal);
6663   if (index != -1) {
6664     return index;
6665   }
6666   index = feedback_spec()->AddCreateClosureSlot();
6667   feedback_slot_cache()->Put(slot_kind, literal, index);
6668   return index;
6669 }
6670 
GetDummyCompareICSlot()6671 FeedbackSlot BytecodeGenerator::GetDummyCompareICSlot() {
6672   return dummy_feedback_slot_.Get();
6673 }
6674 
6675 }  // namespace interpreter
6676 }  // namespace internal
6677 }  // namespace v8
6678