• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_FULL_CODEGEN_H_
6 #define V8_FULL_CODEGEN_H_
7 
8 #include "src/v8.h"
9 
10 #include "src/allocation.h"
11 #include "src/assert-scope.h"
12 #include "src/ast.h"
13 #include "src/code-stubs.h"
14 #include "src/codegen.h"
15 #include "src/compiler.h"
16 #include "src/data-flow.h"
17 #include "src/globals.h"
18 #include "src/objects.h"
19 
20 namespace v8 {
21 namespace internal {
22 
23 // Forward declarations.
24 class JumpPatchSite;
25 
26 // AST node visitor which can tell whether a given statement will be breakable
27 // when the code is compiled by the full compiler in the debugger. This means
28 // that there will be an IC (load/store/call) in the code generated for the
29 // debugger to piggybag on.
30 class BreakableStatementChecker: public AstVisitor {
31  public:
BreakableStatementChecker(Zone * zone)32   explicit BreakableStatementChecker(Zone* zone) : is_breakable_(false) {
33     InitializeAstVisitor(zone);
34   }
35 
36   void Check(Statement* stmt);
37   void Check(Expression* stmt);
38 
is_breakable()39   bool is_breakable() { return is_breakable_; }
40 
41  private:
42   // AST node visit functions.
43 #define DECLARE_VISIT(type) virtual void Visit##type(type* node);
44   AST_NODE_LIST(DECLARE_VISIT)
45 #undef DECLARE_VISIT
46 
47   bool is_breakable_;
48 
49   DEFINE_AST_VISITOR_SUBCLASS_MEMBERS();
50   DISALLOW_COPY_AND_ASSIGN(BreakableStatementChecker);
51 };
52 
53 
54 // -----------------------------------------------------------------------------
55 // Full code generator.
56 
57 class FullCodeGenerator: public AstVisitor {
58  public:
59   enum State {
60     NO_REGISTERS,
61     TOS_REG
62   };
63 
FullCodeGenerator(MacroAssembler * masm,CompilationInfo * info)64   FullCodeGenerator(MacroAssembler* masm, CompilationInfo* info)
65       : masm_(masm),
66         info_(info),
67         scope_(info->scope()),
68         nesting_stack_(NULL),
69         loop_depth_(0),
70         globals_(NULL),
71         context_(NULL),
72         bailout_entries_(info->HasDeoptimizationSupport()
73                          ? info->function()->ast_node_count() : 0,
74                          info->zone()),
75         back_edges_(2, info->zone()),
76         ic_total_count_(0) {
77     ASSERT(!info->IsStub());
78     Initialize();
79   }
80 
81   void Initialize();
82 
83   static bool MakeCode(CompilationInfo* info);
84 
85   // Encode state and pc-offset as a BitField<type, start, size>.
86   // Only use 30 bits because we encode the result as a smi.
87   class StateField : public BitField<State, 0, 1> { };
88   class PcField    : public BitField<unsigned, 1, 30-1> { };
89 
State2String(State state)90   static const char* State2String(State state) {
91     switch (state) {
92       case NO_REGISTERS: return "NO_REGISTERS";
93       case TOS_REG: return "TOS_REG";
94     }
95     UNREACHABLE();
96     return NULL;
97   }
98 
99   static const int kMaxBackEdgeWeight = 127;
100 
101   // Platform-specific code size multiplier.
102 #if V8_TARGET_ARCH_IA32 || V8_TARGET_ARCH_X87
103   static const int kCodeSizeMultiplier = 105;
104   static const int kBootCodeSizeMultiplier = 100;
105 #elif V8_TARGET_ARCH_X64
106   static const int kCodeSizeMultiplier = 170;
107   static const int kBootCodeSizeMultiplier = 140;
108 #elif V8_TARGET_ARCH_ARM
109   static const int kCodeSizeMultiplier = 149;
110   static const int kBootCodeSizeMultiplier = 110;
111 #elif V8_TARGET_ARCH_ARM64
112 // TODO(all): Copied ARM value. Check this is sensible for ARM64.
113   static const int kCodeSizeMultiplier = 149;
114   static const int kBootCodeSizeMultiplier = 110;
115 #elif V8_TARGET_ARCH_MIPS
116   static const int kCodeSizeMultiplier = 149;
117   static const int kBootCodeSizeMultiplier = 120;
118 #else
119 #error Unsupported target architecture.
120 #endif
121 
122  private:
123   class Breakable;
124   class Iteration;
125 
126   class TestContext;
127 
128   class NestedStatement BASE_EMBEDDED {
129    public:
NestedStatement(FullCodeGenerator * codegen)130     explicit NestedStatement(FullCodeGenerator* codegen) : codegen_(codegen) {
131       // Link into codegen's nesting stack.
132       previous_ = codegen->nesting_stack_;
133       codegen->nesting_stack_ = this;
134     }
~NestedStatement()135     virtual ~NestedStatement() {
136       // Unlink from codegen's nesting stack.
137       ASSERT_EQ(this, codegen_->nesting_stack_);
138       codegen_->nesting_stack_ = previous_;
139     }
140 
AsBreakable()141     virtual Breakable* AsBreakable() { return NULL; }
AsIteration()142     virtual Iteration* AsIteration() { return NULL; }
143 
IsContinueTarget(Statement * target)144     virtual bool IsContinueTarget(Statement* target) { return false; }
IsBreakTarget(Statement * target)145     virtual bool IsBreakTarget(Statement* target) { return false; }
146 
147     // Notify the statement that we are exiting it via break, continue, or
148     // return and give it a chance to generate cleanup code.  Return the
149     // next outer statement in the nesting stack.  We accumulate in
150     // *stack_depth the amount to drop the stack and in *context_length the
151     // number of context chain links to unwind as we traverse the nesting
152     // stack from an exit to its target.
Exit(int * stack_depth,int * context_length)153     virtual NestedStatement* Exit(int* stack_depth, int* context_length) {
154       return previous_;
155     }
156 
157    protected:
masm()158     MacroAssembler* masm() { return codegen_->masm(); }
159 
160     FullCodeGenerator* codegen_;
161     NestedStatement* previous_;
162 
163    private:
164     DISALLOW_COPY_AND_ASSIGN(NestedStatement);
165   };
166 
167   // A breakable statement such as a block.
168   class Breakable : public NestedStatement {
169    public:
Breakable(FullCodeGenerator * codegen,BreakableStatement * statement)170     Breakable(FullCodeGenerator* codegen, BreakableStatement* statement)
171         : NestedStatement(codegen), statement_(statement) {
172     }
~Breakable()173     virtual ~Breakable() {}
174 
AsBreakable()175     virtual Breakable* AsBreakable() { return this; }
IsBreakTarget(Statement * target)176     virtual bool IsBreakTarget(Statement* target) {
177       return statement() == target;
178     }
179 
statement()180     BreakableStatement* statement() { return statement_; }
break_label()181     Label* break_label() { return &break_label_; }
182 
183    private:
184     BreakableStatement* statement_;
185     Label break_label_;
186   };
187 
188   // An iteration statement such as a while, for, or do loop.
189   class Iteration : public Breakable {
190    public:
Iteration(FullCodeGenerator * codegen,IterationStatement * statement)191     Iteration(FullCodeGenerator* codegen, IterationStatement* statement)
192         : Breakable(codegen, statement) {
193     }
~Iteration()194     virtual ~Iteration() {}
195 
AsIteration()196     virtual Iteration* AsIteration() { return this; }
IsContinueTarget(Statement * target)197     virtual bool IsContinueTarget(Statement* target) {
198       return statement() == target;
199     }
200 
continue_label()201     Label* continue_label() { return &continue_label_; }
202 
203    private:
204     Label continue_label_;
205   };
206 
207   // A nested block statement.
208   class NestedBlock : public Breakable {
209    public:
NestedBlock(FullCodeGenerator * codegen,Block * block)210     NestedBlock(FullCodeGenerator* codegen, Block* block)
211         : Breakable(codegen, block) {
212     }
~NestedBlock()213     virtual ~NestedBlock() {}
214 
Exit(int * stack_depth,int * context_length)215     virtual NestedStatement* Exit(int* stack_depth, int* context_length) {
216       if (statement()->AsBlock()->scope() != NULL) {
217         ++(*context_length);
218       }
219       return previous_;
220     }
221   };
222 
223   // The try block of a try/catch statement.
224   class TryCatch : public NestedStatement {
225    public:
TryCatch(FullCodeGenerator * codegen)226     explicit TryCatch(FullCodeGenerator* codegen) : NestedStatement(codegen) {
227     }
~TryCatch()228     virtual ~TryCatch() {}
229 
230     virtual NestedStatement* Exit(int* stack_depth, int* context_length);
231   };
232 
233   // The try block of a try/finally statement.
234   class TryFinally : public NestedStatement {
235    public:
TryFinally(FullCodeGenerator * codegen,Label * finally_entry)236     TryFinally(FullCodeGenerator* codegen, Label* finally_entry)
237         : NestedStatement(codegen), finally_entry_(finally_entry) {
238     }
~TryFinally()239     virtual ~TryFinally() {}
240 
241     virtual NestedStatement* Exit(int* stack_depth, int* context_length);
242 
243    private:
244     Label* finally_entry_;
245   };
246 
247   // The finally block of a try/finally statement.
248   class Finally : public NestedStatement {
249    public:
250     static const int kElementCount = 5;
251 
Finally(FullCodeGenerator * codegen)252     explicit Finally(FullCodeGenerator* codegen) : NestedStatement(codegen) { }
~Finally()253     virtual ~Finally() {}
254 
Exit(int * stack_depth,int * context_length)255     virtual NestedStatement* Exit(int* stack_depth, int* context_length) {
256       *stack_depth += kElementCount;
257       return previous_;
258     }
259   };
260 
261   // The body of a for/in loop.
262   class ForIn : public Iteration {
263    public:
264     static const int kElementCount = 5;
265 
ForIn(FullCodeGenerator * codegen,ForInStatement * statement)266     ForIn(FullCodeGenerator* codegen, ForInStatement* statement)
267         : Iteration(codegen, statement) {
268     }
~ForIn()269     virtual ~ForIn() {}
270 
Exit(int * stack_depth,int * context_length)271     virtual NestedStatement* Exit(int* stack_depth, int* context_length) {
272       *stack_depth += kElementCount;
273       return previous_;
274     }
275   };
276 
277 
278   // The body of a with or catch.
279   class WithOrCatch : public NestedStatement {
280    public:
WithOrCatch(FullCodeGenerator * codegen)281     explicit WithOrCatch(FullCodeGenerator* codegen)
282         : NestedStatement(codegen) {
283     }
~WithOrCatch()284     virtual ~WithOrCatch() {}
285 
Exit(int * stack_depth,int * context_length)286     virtual NestedStatement* Exit(int* stack_depth, int* context_length) {
287       ++(*context_length);
288       return previous_;
289     }
290   };
291 
292   // Type of a member function that generates inline code for a native function.
293   typedef void (FullCodeGenerator::*InlineFunctionGenerator)(CallRuntime* expr);
294 
295   static const InlineFunctionGenerator kInlineFunctionGenerators[];
296 
297   // A platform-specific utility to overwrite the accumulator register
298   // with a GC-safe value.
299   void ClearAccumulator();
300 
301   // Determine whether or not to inline the smi case for the given
302   // operation.
303   bool ShouldInlineSmiCase(Token::Value op);
304 
305   // Helper function to convert a pure value into a test context.  The value
306   // is expected on the stack or the accumulator, depending on the platform.
307   // See the platform-specific implementation for details.
308   void DoTest(Expression* condition,
309               Label* if_true,
310               Label* if_false,
311               Label* fall_through);
312   void DoTest(const TestContext* context);
313 
314   // Helper function to split control flow and avoid a branch to the
315   // fall-through label if it is set up.
316 #if V8_TARGET_ARCH_MIPS
317   void Split(Condition cc,
318              Register lhs,
319              const Operand&  rhs,
320              Label* if_true,
321              Label* if_false,
322              Label* fall_through);
323 #else  // All non-mips arch.
324   void Split(Condition cc,
325              Label* if_true,
326              Label* if_false,
327              Label* fall_through);
328 #endif  // V8_TARGET_ARCH_MIPS
329 
330   // Load the value of a known (PARAMETER, LOCAL, or CONTEXT) variable into
331   // a register.  Emits a context chain walk if if necessary (so does
332   // SetVar) so avoid calling both on the same variable.
333   void GetVar(Register destination, Variable* var);
334 
335   // Assign to a known (PARAMETER, LOCAL, or CONTEXT) variable.  If it's in
336   // the context, the write barrier will be emitted and source, scratch0,
337   // scratch1 will be clobbered.  Emits a context chain walk if if necessary
338   // (so does GetVar) so avoid calling both on the same variable.
339   void SetVar(Variable* var,
340               Register source,
341               Register scratch0,
342               Register scratch1);
343 
344   // An operand used to read/write a stack-allocated (PARAMETER or LOCAL)
345   // variable.  Writing does not need the write barrier.
346   MemOperand StackOperand(Variable* var);
347 
348   // An operand used to read/write a known (PARAMETER, LOCAL, or CONTEXT)
349   // variable.  May emit code to traverse the context chain, loading the
350   // found context into the scratch register.  Writing to this operand will
351   // need the write barrier if location is CONTEXT.
352   MemOperand VarOperand(Variable* var, Register scratch);
353 
VisitForEffect(Expression * expr)354   void VisitForEffect(Expression* expr) {
355     EffectContext context(this);
356     Visit(expr);
357     PrepareForBailout(expr, NO_REGISTERS);
358   }
359 
VisitForAccumulatorValue(Expression * expr)360   void VisitForAccumulatorValue(Expression* expr) {
361     AccumulatorValueContext context(this);
362     Visit(expr);
363     PrepareForBailout(expr, TOS_REG);
364   }
365 
VisitForStackValue(Expression * expr)366   void VisitForStackValue(Expression* expr) {
367     StackValueContext context(this);
368     Visit(expr);
369     PrepareForBailout(expr, NO_REGISTERS);
370   }
371 
VisitForControl(Expression * expr,Label * if_true,Label * if_false,Label * fall_through)372   void VisitForControl(Expression* expr,
373                        Label* if_true,
374                        Label* if_false,
375                        Label* fall_through) {
376     TestContext context(this, expr, if_true, if_false, fall_through);
377     Visit(expr);
378     // For test contexts, we prepare for bailout before branching, not at
379     // the end of the entire expression.  This happens as part of visiting
380     // the expression.
381   }
382 
383   void VisitInDuplicateContext(Expression* expr);
384 
385   void VisitDeclarations(ZoneList<Declaration*>* declarations);
386   void DeclareModules(Handle<FixedArray> descriptions);
387   void DeclareGlobals(Handle<FixedArray> pairs);
388   int DeclareGlobalsFlags();
389 
390   // Generate code to allocate all (including nested) modules and contexts.
391   // Because of recursive linking and the presence of module alias declarations,
392   // this has to be a separate pass _before_ populating or executing any module.
393   void AllocateModules(ZoneList<Declaration*>* declarations);
394 
395   // Generate code to create an iterator result object.  The "value" property is
396   // set to a value popped from the stack, and "done" is set according to the
397   // argument.  The result object is left in the result register.
398   void EmitCreateIteratorResult(bool done);
399 
400   // Try to perform a comparison as a fast inlined literal compare if
401   // the operands allow it.  Returns true if the compare operations
402   // has been matched and all code generated; false otherwise.
403   bool TryLiteralCompare(CompareOperation* compare);
404 
405   // Platform-specific code for comparing the type of a value with
406   // a given literal string.
407   void EmitLiteralCompareTypeof(Expression* expr,
408                                 Expression* sub_expr,
409                                 Handle<String> check);
410 
411   // Platform-specific code for equality comparison with a nil-like value.
412   void EmitLiteralCompareNil(CompareOperation* expr,
413                              Expression* sub_expr,
414                              NilValue nil);
415 
416   // Bailout support.
417   void PrepareForBailout(Expression* node, State state);
418   void PrepareForBailoutForId(BailoutId id, State state);
419 
420   // Feedback slot support. The feedback vector will be cleared during gc and
421   // collected by the type-feedback oracle.
FeedbackVector()422   Handle<FixedArray> FeedbackVector() {
423     return info_->feedback_vector();
424   }
425   void EnsureSlotContainsAllocationSite(int slot);
426 
427   // Record a call's return site offset, used to rebuild the frame if the
428   // called function was inlined at the site.
429   void RecordJSReturnSite(Call* call);
430 
431   // Prepare for bailout before a test (or compare) and branch.  If
432   // should_normalize, then the following comparison will not handle the
433   // canonical JS true value so we will insert a (dead) test against true at
434   // the actual bailout target from the optimized code. If not
435   // should_normalize, the true and false labels are ignored.
436   void PrepareForBailoutBeforeSplit(Expression* expr,
437                                     bool should_normalize,
438                                     Label* if_true,
439                                     Label* if_false);
440 
441   // If enabled, emit debug code for checking that the current context is
442   // neither a with nor a catch context.
443   void EmitDebugCheckDeclarationContext(Variable* variable);
444 
445   // This is meant to be called at loop back edges, |back_edge_target| is
446   // the jump target of the back edge and is used to approximate the amount
447   // of code inside the loop.
448   void EmitBackEdgeBookkeeping(IterationStatement* stmt,
449                                Label* back_edge_target);
450   // Record the OSR AST id corresponding to a back edge in the code.
451   void RecordBackEdge(BailoutId osr_ast_id);
452   // Emit a table of back edge ids, pcs and loop depths into the code stream.
453   // Return the offset of the start of the table.
454   unsigned EmitBackEdgeTable();
455 
456   void EmitProfilingCounterDecrement(int delta);
457   void EmitProfilingCounterReset();
458 
459   // Emit code to pop values from the stack associated with nested statements
460   // like try/catch, try/finally, etc, running the finallies and unwinding the
461   // handlers as needed.
462   void EmitUnwindBeforeReturn();
463 
464   // Platform-specific return sequence
465   void EmitReturnSequence();
466 
467   // Platform-specific code sequences for calls
468   void EmitCall(Call* expr, CallIC::CallType = CallIC::FUNCTION);
469   void EmitCallWithLoadIC(Call* expr);
470   void EmitKeyedCallWithLoadIC(Call* expr, Expression* key);
471 
472   // Platform-specific code for inline runtime calls.
473   InlineFunctionGenerator FindInlineFunctionGenerator(Runtime::FunctionId id);
474 
475   void EmitInlineRuntimeCall(CallRuntime* expr);
476 
477 #define EMIT_INLINE_RUNTIME_CALL(name, x, y) \
478   void Emit##name(CallRuntime* expr);
479   INLINE_FUNCTION_LIST(EMIT_INLINE_RUNTIME_CALL)
480 #undef EMIT_INLINE_RUNTIME_CALL
481 
482   // Platform-specific code for resuming generators.
483   void EmitGeneratorResume(Expression *generator,
484                            Expression *value,
485                            JSGeneratorObject::ResumeMode resume_mode);
486 
487   // Platform-specific code for loading variables.
488   void EmitLoadGlobalCheckExtensions(Variable* var,
489                                      TypeofState typeof_state,
490                                      Label* slow);
491   MemOperand ContextSlotOperandCheckExtensions(Variable* var, Label* slow);
492   void EmitDynamicLookupFastCase(Variable* var,
493                                  TypeofState typeof_state,
494                                  Label* slow,
495                                  Label* done);
496   void EmitVariableLoad(VariableProxy* proxy);
497 
498   void EmitAccessor(Expression* expression);
499 
500   // Expects the arguments and the function already pushed.
501   void EmitResolvePossiblyDirectEval(int arg_count);
502 
503   // Platform-specific support for allocating a new closure based on
504   // the given function info.
505   void EmitNewClosure(Handle<SharedFunctionInfo> info, bool pretenure);
506 
507   // Platform-specific support for compiling assignments.
508 
509   // Load a value from a named property.
510   // The receiver is left on the stack by the IC.
511   void EmitNamedPropertyLoad(Property* expr);
512 
513   // Load a value from a keyed property.
514   // The receiver and the key is left on the stack by the IC.
515   void EmitKeyedPropertyLoad(Property* expr);
516 
517   // Apply the compound assignment operator. Expects the left operand on top
518   // of the stack and the right one in the accumulator.
519   void EmitBinaryOp(BinaryOperation* expr,
520                     Token::Value op,
521                     OverwriteMode mode);
522 
523   // Helper functions for generating inlined smi code for certain
524   // binary operations.
525   void EmitInlineSmiBinaryOp(BinaryOperation* expr,
526                              Token::Value op,
527                              OverwriteMode mode,
528                              Expression* left,
529                              Expression* right);
530 
531   // Assign to the given expression as if via '='. The right-hand-side value
532   // is expected in the accumulator.
533   void EmitAssignment(Expression* expr);
534 
535   // Complete a variable assignment.  The right-hand-side value is expected
536   // in the accumulator.
537   void EmitVariableAssignment(Variable* var,
538                               Token::Value op);
539 
540   // Helper functions to EmitVariableAssignment
541   void EmitStoreToStackLocalOrContextSlot(Variable* var,
542                                           MemOperand location);
543   void EmitCallStoreContextSlot(Handle<String> name, StrictMode strict_mode);
544 
545   // Complete a named property assignment.  The receiver is expected on top
546   // of the stack and the right-hand-side value in the accumulator.
547   void EmitNamedPropertyAssignment(Assignment* expr);
548 
549   // Complete a keyed property assignment.  The receiver and key are
550   // expected on top of the stack and the right-hand-side value in the
551   // accumulator.
552   void EmitKeyedPropertyAssignment(Assignment* expr);
553 
554   void CallIC(Handle<Code> code,
555               TypeFeedbackId id = TypeFeedbackId::None());
556 
557   void CallLoadIC(ContextualMode mode,
558                   TypeFeedbackId id = TypeFeedbackId::None());
559   void CallStoreIC(TypeFeedbackId id = TypeFeedbackId::None());
560 
561   void SetFunctionPosition(FunctionLiteral* fun);
562   void SetReturnPosition(FunctionLiteral* fun);
563   void SetStatementPosition(Statement* stmt);
564   void SetExpressionPosition(Expression* expr);
565   void SetStatementPosition(int pos);
566   void SetSourcePosition(int pos);
567 
568   // Non-local control flow support.
569   void EnterFinallyBlock();
570   void ExitFinallyBlock();
571 
572   // Loop nesting counter.
loop_depth()573   int loop_depth() { return loop_depth_; }
increment_loop_depth()574   void increment_loop_depth() { loop_depth_++; }
decrement_loop_depth()575   void decrement_loop_depth() {
576     ASSERT(loop_depth_ > 0);
577     loop_depth_--;
578   }
579 
masm()580   MacroAssembler* masm() { return masm_; }
581 
582   class ExpressionContext;
context()583   const ExpressionContext* context() { return context_; }
set_new_context(const ExpressionContext * context)584   void set_new_context(const ExpressionContext* context) { context_ = context; }
585 
script()586   Handle<Script> script() { return info_->script(); }
is_eval()587   bool is_eval() { return info_->is_eval(); }
is_native()588   bool is_native() { return info_->is_native(); }
strict_mode()589   StrictMode strict_mode() { return function()->strict_mode(); }
function()590   FunctionLiteral* function() { return info_->function(); }
scope()591   Scope* scope() { return scope_; }
592 
593   static Register result_register();
594   static Register context_register();
595 
596   // Set fields in the stack frame. Offsets are the frame pointer relative
597   // offsets defined in, e.g., StandardFrameConstants.
598   void StoreToFrameField(int frame_offset, Register value);
599 
600   // Load a value from the current context. Indices are defined as an enum
601   // in v8::internal::Context.
602   void LoadContextField(Register dst, int context_index);
603 
604   // Push the function argument for the runtime functions PushWithContext
605   // and PushCatchContext.
606   void PushFunctionArgumentForContextAllocation();
607 
608   // AST node visit functions.
609 #define DECLARE_VISIT(type) virtual void Visit##type(type* node);
610   AST_NODE_LIST(DECLARE_VISIT)
611 #undef DECLARE_VISIT
612 
613   void VisitComma(BinaryOperation* expr);
614   void VisitLogicalExpression(BinaryOperation* expr);
615   void VisitArithmeticExpression(BinaryOperation* expr);
616 
617   void VisitForTypeofValue(Expression* expr);
618 
619   void Generate();
620   void PopulateDeoptimizationData(Handle<Code> code);
621   void PopulateTypeFeedbackInfo(Handle<Code> code);
622 
handler_table()623   Handle<FixedArray> handler_table() { return handler_table_; }
624 
625   struct BailoutEntry {
626     BailoutId id;
627     unsigned pc_and_state;
628   };
629 
630   struct BackEdgeEntry {
631     BailoutId id;
632     unsigned pc;
633     uint32_t loop_depth;
634   };
635 
636   class ExpressionContext BASE_EMBEDDED {
637    public:
ExpressionContext(FullCodeGenerator * codegen)638     explicit ExpressionContext(FullCodeGenerator* codegen)
639         : masm_(codegen->masm()), old_(codegen->context()), codegen_(codegen) {
640       codegen->set_new_context(this);
641     }
642 
~ExpressionContext()643     virtual ~ExpressionContext() {
644       codegen_->set_new_context(old_);
645     }
646 
isolate()647     Isolate* isolate() const { return codegen_->isolate(); }
648 
649     // Convert constant control flow (true or false) to the result expected for
650     // this expression context.
651     virtual void Plug(bool flag) const = 0;
652 
653     // Emit code to convert a pure value (in a register, known variable
654     // location, as a literal, or on top of the stack) into the result
655     // expected according to this expression context.
656     virtual void Plug(Register reg) const = 0;
657     virtual void Plug(Variable* var) const = 0;
658     virtual void Plug(Handle<Object> lit) const = 0;
659     virtual void Plug(Heap::RootListIndex index) const = 0;
660     virtual void PlugTOS() const = 0;
661 
662     // Emit code to convert pure control flow to a pair of unbound labels into
663     // the result expected according to this expression context.  The
664     // implementation will bind both labels unless it's a TestContext, which
665     // won't bind them at this point.
666     virtual void Plug(Label* materialize_true,
667                       Label* materialize_false) const = 0;
668 
669     // Emit code to discard count elements from the top of stack, then convert
670     // a pure value into the result expected according to this expression
671     // context.
672     virtual void DropAndPlug(int count, Register reg) const = 0;
673 
674     // Set up branch labels for a test expression.  The three Label** parameters
675     // are output parameters.
676     virtual void PrepareTest(Label* materialize_true,
677                              Label* materialize_false,
678                              Label** if_true,
679                              Label** if_false,
680                              Label** fall_through) const = 0;
681 
682     // Returns true if we are evaluating only for side effects (i.e. if the
683     // result will be discarded).
IsEffect()684     virtual bool IsEffect() const { return false; }
685 
686     // Returns true if we are evaluating for the value (in accu/on stack).
IsAccumulatorValue()687     virtual bool IsAccumulatorValue() const { return false; }
IsStackValue()688     virtual bool IsStackValue() const { return false; }
689 
690     // Returns true if we are branching on the value rather than materializing
691     // it.  Only used for asserts.
IsTest()692     virtual bool IsTest() const { return false; }
693 
694    protected:
codegen()695     FullCodeGenerator* codegen() const { return codegen_; }
masm()696     MacroAssembler* masm() const { return masm_; }
697     MacroAssembler* masm_;
698 
699    private:
700     const ExpressionContext* old_;
701     FullCodeGenerator* codegen_;
702   };
703 
704   class AccumulatorValueContext : public ExpressionContext {
705    public:
AccumulatorValueContext(FullCodeGenerator * codegen)706     explicit AccumulatorValueContext(FullCodeGenerator* codegen)
707         : ExpressionContext(codegen) { }
708 
709     virtual void Plug(bool flag) const;
710     virtual void Plug(Register reg) const;
711     virtual void Plug(Label* materialize_true, Label* materialize_false) const;
712     virtual void Plug(Variable* var) const;
713     virtual void Plug(Handle<Object> lit) const;
714     virtual void Plug(Heap::RootListIndex) const;
715     virtual void PlugTOS() const;
716     virtual void DropAndPlug(int count, Register reg) const;
717     virtual void PrepareTest(Label* materialize_true,
718                              Label* materialize_false,
719                              Label** if_true,
720                              Label** if_false,
721                              Label** fall_through) const;
IsAccumulatorValue()722     virtual bool IsAccumulatorValue() const { return true; }
723   };
724 
725   class StackValueContext : public ExpressionContext {
726    public:
StackValueContext(FullCodeGenerator * codegen)727     explicit StackValueContext(FullCodeGenerator* codegen)
728         : ExpressionContext(codegen) { }
729 
730     virtual void Plug(bool flag) const;
731     virtual void Plug(Register reg) const;
732     virtual void Plug(Label* materialize_true, Label* materialize_false) const;
733     virtual void Plug(Variable* var) const;
734     virtual void Plug(Handle<Object> lit) const;
735     virtual void Plug(Heap::RootListIndex) const;
736     virtual void PlugTOS() const;
737     virtual void DropAndPlug(int count, Register reg) const;
738     virtual void PrepareTest(Label* materialize_true,
739                              Label* materialize_false,
740                              Label** if_true,
741                              Label** if_false,
742                              Label** fall_through) const;
IsStackValue()743     virtual bool IsStackValue() const { return true; }
744   };
745 
746   class TestContext : public ExpressionContext {
747    public:
TestContext(FullCodeGenerator * codegen,Expression * condition,Label * true_label,Label * false_label,Label * fall_through)748     TestContext(FullCodeGenerator* codegen,
749                 Expression* condition,
750                 Label* true_label,
751                 Label* false_label,
752                 Label* fall_through)
753         : ExpressionContext(codegen),
754           condition_(condition),
755           true_label_(true_label),
756           false_label_(false_label),
757           fall_through_(fall_through) { }
758 
cast(const ExpressionContext * context)759     static const TestContext* cast(const ExpressionContext* context) {
760       ASSERT(context->IsTest());
761       return reinterpret_cast<const TestContext*>(context);
762     }
763 
condition()764     Expression* condition() const { return condition_; }
true_label()765     Label* true_label() const { return true_label_; }
false_label()766     Label* false_label() const { return false_label_; }
fall_through()767     Label* fall_through() const { return fall_through_; }
768 
769     virtual void Plug(bool flag) const;
770     virtual void Plug(Register reg) const;
771     virtual void Plug(Label* materialize_true, Label* materialize_false) const;
772     virtual void Plug(Variable* var) const;
773     virtual void Plug(Handle<Object> lit) const;
774     virtual void Plug(Heap::RootListIndex) const;
775     virtual void PlugTOS() const;
776     virtual void DropAndPlug(int count, Register reg) const;
777     virtual void PrepareTest(Label* materialize_true,
778                              Label* materialize_false,
779                              Label** if_true,
780                              Label** if_false,
781                              Label** fall_through) const;
IsTest()782     virtual bool IsTest() const { return true; }
783 
784    private:
785     Expression* condition_;
786     Label* true_label_;
787     Label* false_label_;
788     Label* fall_through_;
789   };
790 
791   class EffectContext : public ExpressionContext {
792    public:
EffectContext(FullCodeGenerator * codegen)793     explicit EffectContext(FullCodeGenerator* codegen)
794         : ExpressionContext(codegen) { }
795 
796     virtual void Plug(bool flag) const;
797     virtual void Plug(Register reg) const;
798     virtual void Plug(Label* materialize_true, Label* materialize_false) const;
799     virtual void Plug(Variable* var) const;
800     virtual void Plug(Handle<Object> lit) const;
801     virtual void Plug(Heap::RootListIndex) const;
802     virtual void PlugTOS() const;
803     virtual void DropAndPlug(int count, Register reg) const;
804     virtual void PrepareTest(Label* materialize_true,
805                              Label* materialize_false,
806                              Label** if_true,
807                              Label** if_false,
808                              Label** fall_through) const;
IsEffect()809     virtual bool IsEffect() const { return true; }
810   };
811 
812   MacroAssembler* masm_;
813   CompilationInfo* info_;
814   Scope* scope_;
815   Label return_label_;
816   NestedStatement* nesting_stack_;
817   int loop_depth_;
818   ZoneList<Handle<Object> >* globals_;
819   Handle<FixedArray> modules_;
820   int module_index_;
821   const ExpressionContext* context_;
822   ZoneList<BailoutEntry> bailout_entries_;
823   ZoneList<BackEdgeEntry> back_edges_;
824   int ic_total_count_;
825   Handle<FixedArray> handler_table_;
826   Handle<Cell> profiling_counter_;
827   bool generate_debug_code_;
828 
829   friend class NestedStatement;
830 
831   DEFINE_AST_VISITOR_SUBCLASS_MEMBERS();
832   DISALLOW_COPY_AND_ASSIGN(FullCodeGenerator);
833 };
834 
835 
836 // A map from property names to getter/setter pairs allocated in the zone.
837 class AccessorTable: public TemplateHashMap<Literal,
838                                             ObjectLiteral::Accessors,
839                                             ZoneAllocationPolicy> {
840  public:
AccessorTable(Zone * zone)841   explicit AccessorTable(Zone* zone) :
842       TemplateHashMap<Literal, ObjectLiteral::Accessors,
843                       ZoneAllocationPolicy>(Literal::Match,
844                                             ZoneAllocationPolicy(zone)),
845       zone_(zone) { }
846 
lookup(Literal * literal)847   Iterator lookup(Literal* literal) {
848     Iterator it = find(literal, true, ZoneAllocationPolicy(zone_));
849     if (it->second == NULL) it->second = new(zone_) ObjectLiteral::Accessors();
850     return it;
851   }
852 
853  private:
854   Zone* zone_;
855 };
856 
857 
858 class BackEdgeTable {
859  public:
BackEdgeTable(Code * code,DisallowHeapAllocation * required)860   BackEdgeTable(Code* code, DisallowHeapAllocation* required) {
861     ASSERT(code->kind() == Code::FUNCTION);
862     instruction_start_ = code->instruction_start();
863     Address table_address = instruction_start_ + code->back_edge_table_offset();
864     length_ = Memory::uint32_at(table_address);
865     start_ = table_address + kTableLengthSize;
866   }
867 
length()868   uint32_t length() { return length_; }
869 
ast_id(uint32_t index)870   BailoutId ast_id(uint32_t index) {
871     return BailoutId(static_cast<int>(
872         Memory::uint32_at(entry_at(index) + kAstIdOffset)));
873   }
874 
loop_depth(uint32_t index)875   uint32_t loop_depth(uint32_t index) {
876     return Memory::uint32_at(entry_at(index) + kLoopDepthOffset);
877   }
878 
pc_offset(uint32_t index)879   uint32_t pc_offset(uint32_t index) {
880     return Memory::uint32_at(entry_at(index) + kPcOffsetOffset);
881   }
882 
pc(uint32_t index)883   Address pc(uint32_t index) {
884     return instruction_start_ + pc_offset(index);
885   }
886 
887   enum BackEdgeState {
888     INTERRUPT,
889     ON_STACK_REPLACEMENT,
890     OSR_AFTER_STACK_CHECK
891   };
892 
893   // Patch all interrupts with allowed loop depth in the unoptimized code to
894   // unconditionally call replacement_code.
895   static void Patch(Isolate* isolate,
896                     Code* unoptimized_code);
897 
898   // Patch the back edge to the target state, provided the correct callee.
899   static void PatchAt(Code* unoptimized_code,
900                       Address pc,
901                       BackEdgeState target_state,
902                       Code* replacement_code);
903 
904   // Change all patched back edges back to normal interrupts.
905   static void Revert(Isolate* isolate,
906                      Code* unoptimized_code);
907 
908   // Change a back edge patched for on-stack replacement to perform a
909   // stack check first.
910   static void AddStackCheck(Handle<Code> code, uint32_t pc_offset);
911 
912   // Revert the patch by AddStackCheck.
913   static void RemoveStackCheck(Handle<Code> code, uint32_t pc_offset);
914 
915   // Return the current patch state of the back edge.
916   static BackEdgeState GetBackEdgeState(Isolate* isolate,
917                                         Code* unoptimized_code,
918                                         Address pc_after);
919 
920 #ifdef DEBUG
921   // Verify that all back edges of a certain loop depth are patched.
922   static bool Verify(Isolate* isolate,
923                      Code* unoptimized_code,
924                      int loop_nesting_level);
925 #endif  // DEBUG
926 
927  private:
entry_at(uint32_t index)928   Address entry_at(uint32_t index) {
929     ASSERT(index < length_);
930     return start_ + index * kEntrySize;
931   }
932 
933   static const int kTableLengthSize = kIntSize;
934   static const int kAstIdOffset = 0 * kIntSize;
935   static const int kPcOffsetOffset = 1 * kIntSize;
936   static const int kLoopDepthOffset = 2 * kIntSize;
937   static const int kEntrySize = 3 * kIntSize;
938 
939   Address start_;
940   Address instruction_start_;
941   uint32_t length_;
942 };
943 
944 
945 } }  // namespace v8::internal
946 
947 #endif  // V8_FULL_CODEGEN_H_
948