• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 //     * Redistributions of source code must retain the above copyright
7 //       notice, this list of conditions and the following disclaimer.
8 //     * Redistributions in binary form must reproduce the above
9 //       copyright notice, this list of conditions and the following
10 //       disclaimer in the documentation and/or other materials provided
11 //       with the distribution.
12 //     * Neither the name of Google Inc. nor the names of its
13 //       contributors may be used to endorse or promote products derived
14 //       from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #ifndef V8_FULL_CODEGEN_H_
29 #define V8_FULL_CODEGEN_H_
30 
31 #include "v8.h"
32 
33 #include "allocation.h"
34 #include "assert-scope.h"
35 #include "ast.h"
36 #include "code-stubs.h"
37 #include "codegen.h"
38 #include "compiler.h"
39 #include "data-flow.h"
40 #include "globals.h"
41 #include "objects.h"
42 
43 namespace v8 {
44 namespace internal {
45 
46 // Forward declarations.
47 class JumpPatchSite;
48 
49 // AST node visitor which can tell whether a given statement will be breakable
50 // when the code is compiled by the full compiler in the debugger. This means
51 // that there will be an IC (load/store/call) in the code generated for the
52 // debugger to piggybag on.
53 class BreakableStatementChecker: public AstVisitor {
54  public:
BreakableStatementChecker(Isolate * isolate)55   explicit BreakableStatementChecker(Isolate* isolate) : is_breakable_(false) {
56     InitializeAstVisitor(isolate);
57   }
58 
59   void Check(Statement* stmt);
60   void Check(Expression* stmt);
61 
is_breakable()62   bool is_breakable() { return is_breakable_; }
63 
64  private:
65   // AST node visit functions.
66 #define DECLARE_VISIT(type) virtual void Visit##type(type* node);
67   AST_NODE_LIST(DECLARE_VISIT)
68 #undef DECLARE_VISIT
69 
70   bool is_breakable_;
71 
72   DEFINE_AST_VISITOR_SUBCLASS_MEMBERS();
73   DISALLOW_COPY_AND_ASSIGN(BreakableStatementChecker);
74 };
75 
76 
77 // -----------------------------------------------------------------------------
78 // Full code generator.
79 
80 class FullCodeGenerator: public AstVisitor {
81  public:
82   enum State {
83     NO_REGISTERS,
84     TOS_REG
85   };
86 
FullCodeGenerator(MacroAssembler * masm,CompilationInfo * info)87   FullCodeGenerator(MacroAssembler* masm, CompilationInfo* info)
88       : masm_(masm),
89         info_(info),
90         scope_(info->scope()),
91         nesting_stack_(NULL),
92         loop_depth_(0),
93         globals_(NULL),
94         context_(NULL),
95         bailout_entries_(info->HasDeoptimizationSupport()
96                          ? info->function()->ast_node_count() : 0,
97                          info->zone()),
98         back_edges_(2, info->zone()),
99         type_feedback_cells_(info->HasDeoptimizationSupport()
100                              ? info->function()->ast_node_count() : 0,
101                              info->zone()),
102         ic_total_count_(0),
103         zone_(info->zone()) {
104     Initialize();
105   }
106 
107   void Initialize();
108 
109   static bool MakeCode(CompilationInfo* info);
110 
111   // Encode state and pc-offset as a BitField<type, start, size>.
112   // Only use 30 bits because we encode the result as a smi.
113   class StateField : public BitField<State, 0, 1> { };
114   class PcField    : public BitField<unsigned, 1, 30-1> { };
115 
State2String(State state)116   static const char* State2String(State state) {
117     switch (state) {
118       case NO_REGISTERS: return "NO_REGISTERS";
119       case TOS_REG: return "TOS_REG";
120     }
121     UNREACHABLE();
122     return NULL;
123   }
124 
zone()125   Zone* zone() const { return zone_; }
126 
127   static const int kMaxBackEdgeWeight = 127;
128 
129   // Platform-specific code size multiplier.
130 #if V8_TARGET_ARCH_IA32
131   static const int kCodeSizeMultiplier = 100;
132 #elif V8_TARGET_ARCH_X64
133   static const int kCodeSizeMultiplier = 162;
134 #elif V8_TARGET_ARCH_ARM
135   static const int kCodeSizeMultiplier = 142;
136 #elif V8_TARGET_ARCH_MIPS
137   static const int kCodeSizeMultiplier = 142;
138 #else
139 #error Unsupported target architecture.
140 #endif
141 
142  private:
143   class Breakable;
144   class Iteration;
145 
146   class TestContext;
147 
148   class NestedStatement BASE_EMBEDDED {
149    public:
NestedStatement(FullCodeGenerator * codegen)150     explicit NestedStatement(FullCodeGenerator* codegen) : codegen_(codegen) {
151       // Link into codegen's nesting stack.
152       previous_ = codegen->nesting_stack_;
153       codegen->nesting_stack_ = this;
154     }
~NestedStatement()155     virtual ~NestedStatement() {
156       // Unlink from codegen's nesting stack.
157       ASSERT_EQ(this, codegen_->nesting_stack_);
158       codegen_->nesting_stack_ = previous_;
159     }
160 
AsBreakable()161     virtual Breakable* AsBreakable() { return NULL; }
AsIteration()162     virtual Iteration* AsIteration() { return NULL; }
163 
IsContinueTarget(Statement * target)164     virtual bool IsContinueTarget(Statement* target) { return false; }
IsBreakTarget(Statement * target)165     virtual bool IsBreakTarget(Statement* target) { return false; }
166 
167     // Notify the statement that we are exiting it via break, continue, or
168     // return and give it a chance to generate cleanup code.  Return the
169     // next outer statement in the nesting stack.  We accumulate in
170     // *stack_depth the amount to drop the stack and in *context_length the
171     // number of context chain links to unwind as we traverse the nesting
172     // stack from an exit to its target.
Exit(int * stack_depth,int * context_length)173     virtual NestedStatement* Exit(int* stack_depth, int* context_length) {
174       return previous_;
175     }
176 
177    protected:
masm()178     MacroAssembler* masm() { return codegen_->masm(); }
179 
180     FullCodeGenerator* codegen_;
181     NestedStatement* previous_;
182 
183    private:
184     DISALLOW_COPY_AND_ASSIGN(NestedStatement);
185   };
186 
187   // A breakable statement such as a block.
188   class Breakable : public NestedStatement {
189    public:
Breakable(FullCodeGenerator * codegen,BreakableStatement * statement)190     Breakable(FullCodeGenerator* codegen, BreakableStatement* statement)
191         : NestedStatement(codegen), statement_(statement) {
192     }
~Breakable()193     virtual ~Breakable() {}
194 
AsBreakable()195     virtual Breakable* AsBreakable() { return this; }
IsBreakTarget(Statement * target)196     virtual bool IsBreakTarget(Statement* target) {
197       return statement() == target;
198     }
199 
statement()200     BreakableStatement* statement() { return statement_; }
break_label()201     Label* break_label() { return &break_label_; }
202 
203    private:
204     BreakableStatement* statement_;
205     Label break_label_;
206   };
207 
208   // An iteration statement such as a while, for, or do loop.
209   class Iteration : public Breakable {
210    public:
Iteration(FullCodeGenerator * codegen,IterationStatement * statement)211     Iteration(FullCodeGenerator* codegen, IterationStatement* statement)
212         : Breakable(codegen, statement) {
213     }
~Iteration()214     virtual ~Iteration() {}
215 
AsIteration()216     virtual Iteration* AsIteration() { return this; }
IsContinueTarget(Statement * target)217     virtual bool IsContinueTarget(Statement* target) {
218       return statement() == target;
219     }
220 
continue_label()221     Label* continue_label() { return &continue_label_; }
222 
223    private:
224     Label continue_label_;
225   };
226 
227   // A nested block statement.
228   class NestedBlock : public Breakable {
229    public:
NestedBlock(FullCodeGenerator * codegen,Block * block)230     NestedBlock(FullCodeGenerator* codegen, Block* block)
231         : Breakable(codegen, block) {
232     }
~NestedBlock()233     virtual ~NestedBlock() {}
234 
Exit(int * stack_depth,int * context_length)235     virtual NestedStatement* Exit(int* stack_depth, int* context_length) {
236       if (statement()->AsBlock()->scope() != NULL) {
237         ++(*context_length);
238       }
239       return previous_;
240     };
241   };
242 
243   // The try block of a try/catch statement.
244   class TryCatch : public NestedStatement {
245    public:
TryCatch(FullCodeGenerator * codegen)246     explicit TryCatch(FullCodeGenerator* codegen) : NestedStatement(codegen) {
247     }
~TryCatch()248     virtual ~TryCatch() {}
249 
250     virtual NestedStatement* Exit(int* stack_depth, int* context_length);
251   };
252 
253   // The try block of a try/finally statement.
254   class TryFinally : public NestedStatement {
255    public:
TryFinally(FullCodeGenerator * codegen,Label * finally_entry)256     TryFinally(FullCodeGenerator* codegen, Label* finally_entry)
257         : NestedStatement(codegen), finally_entry_(finally_entry) {
258     }
~TryFinally()259     virtual ~TryFinally() {}
260 
261     virtual NestedStatement* Exit(int* stack_depth, int* context_length);
262 
263    private:
264     Label* finally_entry_;
265   };
266 
267   // The finally block of a try/finally statement.
268   class Finally : public NestedStatement {
269    public:
270     static const int kElementCount = 5;
271 
Finally(FullCodeGenerator * codegen)272     explicit Finally(FullCodeGenerator* codegen) : NestedStatement(codegen) { }
~Finally()273     virtual ~Finally() {}
274 
Exit(int * stack_depth,int * context_length)275     virtual NestedStatement* Exit(int* stack_depth, int* context_length) {
276       *stack_depth += kElementCount;
277       return previous_;
278     }
279   };
280 
281   // The body of a for/in loop.
282   class ForIn : public Iteration {
283    public:
284     static const int kElementCount = 5;
285 
ForIn(FullCodeGenerator * codegen,ForInStatement * statement)286     ForIn(FullCodeGenerator* codegen, ForInStatement* statement)
287         : Iteration(codegen, statement) {
288     }
~ForIn()289     virtual ~ForIn() {}
290 
Exit(int * stack_depth,int * context_length)291     virtual NestedStatement* Exit(int* stack_depth, int* context_length) {
292       *stack_depth += kElementCount;
293       return previous_;
294     }
295   };
296 
297 
298   // The body of a with or catch.
299   class WithOrCatch : public NestedStatement {
300    public:
WithOrCatch(FullCodeGenerator * codegen)301     explicit WithOrCatch(FullCodeGenerator* codegen)
302         : NestedStatement(codegen) {
303     }
~WithOrCatch()304     virtual ~WithOrCatch() {}
305 
Exit(int * stack_depth,int * context_length)306     virtual NestedStatement* Exit(int* stack_depth, int* context_length) {
307       ++(*context_length);
308       return previous_;
309     }
310   };
311 
312   // Type of a member function that generates inline code for a native function.
313   typedef void (FullCodeGenerator::*InlineFunctionGenerator)(CallRuntime* expr);
314 
315   static const InlineFunctionGenerator kInlineFunctionGenerators[];
316 
317   // A platform-specific utility to overwrite the accumulator register
318   // with a GC-safe value.
319   void ClearAccumulator();
320 
321   // Determine whether or not to inline the smi case for the given
322   // operation.
323   bool ShouldInlineSmiCase(Token::Value op);
324 
325   // Helper function to convert a pure value into a test context.  The value
326   // is expected on the stack or the accumulator, depending on the platform.
327   // See the platform-specific implementation for details.
328   void DoTest(Expression* condition,
329               Label* if_true,
330               Label* if_false,
331               Label* fall_through);
332   void DoTest(const TestContext* context);
333 
334   // Helper function to split control flow and avoid a branch to the
335   // fall-through label if it is set up.
336 #if V8_TARGET_ARCH_MIPS
337   void Split(Condition cc,
338              Register lhs,
339              const Operand&  rhs,
340              Label* if_true,
341              Label* if_false,
342              Label* fall_through);
343 #else  // All non-mips arch.
344   void Split(Condition cc,
345              Label* if_true,
346              Label* if_false,
347              Label* fall_through);
348 #endif  // V8_TARGET_ARCH_MIPS
349 
350   // Load the value of a known (PARAMETER, LOCAL, or CONTEXT) variable into
351   // a register.  Emits a context chain walk if if necessary (so does
352   // SetVar) so avoid calling both on the same variable.
353   void GetVar(Register destination, Variable* var);
354 
355   // Assign to a known (PARAMETER, LOCAL, or CONTEXT) variable.  If it's in
356   // the context, the write barrier will be emitted and source, scratch0,
357   // scratch1 will be clobbered.  Emits a context chain walk if if necessary
358   // (so does GetVar) so avoid calling both on the same variable.
359   void SetVar(Variable* var,
360               Register source,
361               Register scratch0,
362               Register scratch1);
363 
364   // An operand used to read/write a stack-allocated (PARAMETER or LOCAL)
365   // variable.  Writing does not need the write barrier.
366   MemOperand StackOperand(Variable* var);
367 
368   // An operand used to read/write a known (PARAMETER, LOCAL, or CONTEXT)
369   // variable.  May emit code to traverse the context chain, loading the
370   // found context into the scratch register.  Writing to this operand will
371   // need the write barrier if location is CONTEXT.
372   MemOperand VarOperand(Variable* var, Register scratch);
373 
VisitForEffect(Expression * expr)374   void VisitForEffect(Expression* expr) {
375     EffectContext context(this);
376     Visit(expr);
377     PrepareForBailout(expr, NO_REGISTERS);
378   }
379 
VisitForAccumulatorValue(Expression * expr)380   void VisitForAccumulatorValue(Expression* expr) {
381     AccumulatorValueContext context(this);
382     Visit(expr);
383     PrepareForBailout(expr, TOS_REG);
384   }
385 
VisitForStackValue(Expression * expr)386   void VisitForStackValue(Expression* expr) {
387     StackValueContext context(this);
388     Visit(expr);
389     PrepareForBailout(expr, NO_REGISTERS);
390   }
391 
VisitForControl(Expression * expr,Label * if_true,Label * if_false,Label * fall_through)392   void VisitForControl(Expression* expr,
393                        Label* if_true,
394                        Label* if_false,
395                        Label* fall_through) {
396     TestContext context(this, expr, if_true, if_false, fall_through);
397     Visit(expr);
398     // For test contexts, we prepare for bailout before branching, not at
399     // the end of the entire expression.  This happens as part of visiting
400     // the expression.
401   }
402 
403   void VisitInDuplicateContext(Expression* expr);
404 
405   void VisitDeclarations(ZoneList<Declaration*>* declarations);
406   void DeclareModules(Handle<FixedArray> descriptions);
407   void DeclareGlobals(Handle<FixedArray> pairs);
408   int DeclareGlobalsFlags();
409 
410   // Generate code to allocate all (including nested) modules and contexts.
411   // Because of recursive linking and the presence of module alias declarations,
412   // this has to be a separate pass _before_ populating or executing any module.
413   void AllocateModules(ZoneList<Declaration*>* declarations);
414 
415   // Generate code to create an iterator result object.  The "value" property is
416   // set to a value popped from the stack, and "done" is set according to the
417   // argument.  The result object is left in the result register.
418   void EmitCreateIteratorResult(bool done);
419 
420   // Try to perform a comparison as a fast inlined literal compare if
421   // the operands allow it.  Returns true if the compare operations
422   // has been matched and all code generated; false otherwise.
423   bool TryLiteralCompare(CompareOperation* compare);
424 
425   // Platform-specific code for comparing the type of a value with
426   // a given literal string.
427   void EmitLiteralCompareTypeof(Expression* expr,
428                                 Expression* sub_expr,
429                                 Handle<String> check);
430 
431   // Platform-specific code for equality comparison with a nil-like value.
432   void EmitLiteralCompareNil(CompareOperation* expr,
433                              Expression* sub_expr,
434                              NilValue nil);
435 
436   // Bailout support.
437   void PrepareForBailout(Expression* node, State state);
438   void PrepareForBailoutForId(BailoutId id, State state);
439 
440   // Cache cell support.  This associates AST ids with global property cells
441   // that will be cleared during GC and collected by the type-feedback oracle.
442   void RecordTypeFeedbackCell(TypeFeedbackId id, Handle<Cell> cell);
443 
444   // Record a call's return site offset, used to rebuild the frame if the
445   // called function was inlined at the site.
446   void RecordJSReturnSite(Call* call);
447 
448   // Prepare for bailout before a test (or compare) and branch.  If
449   // should_normalize, then the following comparison will not handle the
450   // canonical JS true value so we will insert a (dead) test against true at
451   // the actual bailout target from the optimized code. If not
452   // should_normalize, the true and false labels are ignored.
453   void PrepareForBailoutBeforeSplit(Expression* expr,
454                                     bool should_normalize,
455                                     Label* if_true,
456                                     Label* if_false);
457 
458   // If enabled, emit debug code for checking that the current context is
459   // neither a with nor a catch context.
460   void EmitDebugCheckDeclarationContext(Variable* variable);
461 
462   // This is meant to be called at loop back edges, |back_edge_target| is
463   // the jump target of the back edge and is used to approximate the amount
464   // of code inside the loop.
465   void EmitBackEdgeBookkeeping(IterationStatement* stmt,
466                                Label* back_edge_target);
467   // Record the OSR AST id corresponding to a back edge in the code.
468   void RecordBackEdge(BailoutId osr_ast_id);
469   // Emit a table of back edge ids, pcs and loop depths into the code stream.
470   // Return the offset of the start of the table.
471   unsigned EmitBackEdgeTable();
472 
473   void EmitProfilingCounterDecrement(int delta);
474   void EmitProfilingCounterReset();
475 
476   // Emit code to pop values from the stack associated with nested statements
477   // like try/catch, try/finally, etc, running the finallies and unwinding the
478   // handlers as needed.
479   void EmitUnwindBeforeReturn();
480 
481   // Platform-specific return sequence
482   void EmitReturnSequence();
483 
484   // Platform-specific code sequences for calls
485   void EmitCallWithStub(Call* expr, CallFunctionFlags flags);
486   void EmitCallWithIC(Call* expr, Handle<Object> name, RelocInfo::Mode mode);
487   void EmitKeyedCallWithIC(Call* expr, Expression* key);
488 
489   // Platform-specific code for inline runtime calls.
490   InlineFunctionGenerator FindInlineFunctionGenerator(Runtime::FunctionId id);
491 
492   void EmitInlineRuntimeCall(CallRuntime* expr);
493 
494 #define EMIT_INLINE_RUNTIME_CALL(name, x, y) \
495   void Emit##name(CallRuntime* expr);
496   INLINE_FUNCTION_LIST(EMIT_INLINE_RUNTIME_CALL)
497   INLINE_RUNTIME_FUNCTION_LIST(EMIT_INLINE_RUNTIME_CALL)
498 #undef EMIT_INLINE_RUNTIME_CALL
499 
500   // Platform-specific code for resuming generators.
501   void EmitGeneratorResume(Expression *generator,
502                            Expression *value,
503                            JSGeneratorObject::ResumeMode resume_mode);
504 
505   // Platform-specific code for loading variables.
506   void EmitLoadGlobalCheckExtensions(Variable* var,
507                                      TypeofState typeof_state,
508                                      Label* slow);
509   MemOperand ContextSlotOperandCheckExtensions(Variable* var, Label* slow);
510   void EmitDynamicLookupFastCase(Variable* var,
511                                  TypeofState typeof_state,
512                                  Label* slow,
513                                  Label* done);
514   void EmitVariableLoad(VariableProxy* proxy);
515 
516   void EmitAccessor(Expression* expression);
517 
518   // Expects the arguments and the function already pushed.
519   void EmitResolvePossiblyDirectEval(int arg_count);
520 
521   // Platform-specific support for allocating a new closure based on
522   // the given function info.
523   void EmitNewClosure(Handle<SharedFunctionInfo> info, bool pretenure);
524 
525   // Platform-specific support for compiling assignments.
526 
527   // Load a value from a named property.
528   // The receiver is left on the stack by the IC.
529   void EmitNamedPropertyLoad(Property* expr);
530 
531   // Load a value from a keyed property.
532   // The receiver and the key is left on the stack by the IC.
533   void EmitKeyedPropertyLoad(Property* expr);
534 
535   // Apply the compound assignment operator. Expects the left operand on top
536   // of the stack and the right one in the accumulator.
537   void EmitBinaryOp(BinaryOperation* expr,
538                     Token::Value op,
539                     OverwriteMode mode);
540 
541   // Helper functions for generating inlined smi code for certain
542   // binary operations.
543   void EmitInlineSmiBinaryOp(BinaryOperation* expr,
544                              Token::Value op,
545                              OverwriteMode mode,
546                              Expression* left,
547                              Expression* right);
548 
549   // Assign to the given expression as if via '='. The right-hand-side value
550   // is expected in the accumulator.
551   void EmitAssignment(Expression* expr);
552 
553   // Complete a variable assignment.  The right-hand-side value is expected
554   // in the accumulator.
555   void EmitVariableAssignment(Variable* var,
556                               Token::Value op);
557 
558   // Complete a named property assignment.  The receiver is expected on top
559   // of the stack and the right-hand-side value in the accumulator.
560   void EmitNamedPropertyAssignment(Assignment* expr);
561 
562   // Complete a keyed property assignment.  The receiver and key are
563   // expected on top of the stack and the right-hand-side value in the
564   // accumulator.
565   void EmitKeyedPropertyAssignment(Assignment* expr);
566 
567   void CallIC(Handle<Code> code,
568               RelocInfo::Mode rmode = RelocInfo::CODE_TARGET,
569               TypeFeedbackId id = TypeFeedbackId::None());
570 
571   void SetFunctionPosition(FunctionLiteral* fun);
572   void SetReturnPosition(FunctionLiteral* fun);
573   void SetStatementPosition(Statement* stmt);
574   void SetExpressionPosition(Expression* expr);
575   void SetStatementPosition(int pos);
576   void SetSourcePosition(int pos);
577 
578   // Non-local control flow support.
579   void EnterFinallyBlock();
580   void ExitFinallyBlock();
581 
582   // Loop nesting counter.
loop_depth()583   int loop_depth() { return loop_depth_; }
increment_loop_depth()584   void increment_loop_depth() { loop_depth_++; }
decrement_loop_depth()585   void decrement_loop_depth() {
586     ASSERT(loop_depth_ > 0);
587     loop_depth_--;
588   }
589 
masm()590   MacroAssembler* masm() { return masm_; }
591 
592   class ExpressionContext;
context()593   const ExpressionContext* context() { return context_; }
set_new_context(const ExpressionContext * context)594   void set_new_context(const ExpressionContext* context) { context_ = context; }
595 
script()596   Handle<Script> script() { return info_->script(); }
is_eval()597   bool is_eval() { return info_->is_eval(); }
is_native()598   bool is_native() { return info_->is_native(); }
is_classic_mode()599   bool is_classic_mode() { return language_mode() == CLASSIC_MODE; }
language_mode()600   LanguageMode language_mode() { return function()->language_mode(); }
function()601   FunctionLiteral* function() { return info_->function(); }
scope()602   Scope* scope() { return scope_; }
603 
604   static Register result_register();
605   static Register context_register();
606 
607   // Set fields in the stack frame. Offsets are the frame pointer relative
608   // offsets defined in, e.g., StandardFrameConstants.
609   void StoreToFrameField(int frame_offset, Register value);
610 
611   // Load a value from the current context. Indices are defined as an enum
612   // in v8::internal::Context.
613   void LoadContextField(Register dst, int context_index);
614 
615   // Push the function argument for the runtime functions PushWithContext
616   // and PushCatchContext.
617   void PushFunctionArgumentForContextAllocation();
618 
619   // AST node visit functions.
620 #define DECLARE_VISIT(type) virtual void Visit##type(type* node);
621   AST_NODE_LIST(DECLARE_VISIT)
622 #undef DECLARE_VISIT
623 
624   void VisitComma(BinaryOperation* expr);
625   void VisitLogicalExpression(BinaryOperation* expr);
626   void VisitArithmeticExpression(BinaryOperation* expr);
627 
628   void VisitForTypeofValue(Expression* expr);
629 
630   void Generate();
631   void PopulateDeoptimizationData(Handle<Code> code);
632   void PopulateTypeFeedbackInfo(Handle<Code> code);
633   void PopulateTypeFeedbackCells(Handle<Code> code);
634 
handler_table()635   Handle<FixedArray> handler_table() { return handler_table_; }
636 
637   struct BailoutEntry {
638     BailoutId id;
639     unsigned pc_and_state;
640   };
641 
642   struct BackEdgeEntry {
643     BailoutId id;
644     unsigned pc;
645     uint32_t loop_depth;
646   };
647 
648   struct TypeFeedbackCellEntry {
649     TypeFeedbackId ast_id;
650     Handle<Cell> cell;
651   };
652 
653 
654   class ExpressionContext BASE_EMBEDDED {
655    public:
ExpressionContext(FullCodeGenerator * codegen)656     explicit ExpressionContext(FullCodeGenerator* codegen)
657         : masm_(codegen->masm()), old_(codegen->context()), codegen_(codegen) {
658       codegen->set_new_context(this);
659     }
660 
~ExpressionContext()661     virtual ~ExpressionContext() {
662       codegen_->set_new_context(old_);
663     }
664 
isolate()665     Isolate* isolate() const { return codegen_->isolate(); }
666 
667     // Convert constant control flow (true or false) to the result expected for
668     // this expression context.
669     virtual void Plug(bool flag) const = 0;
670 
671     // Emit code to convert a pure value (in a register, known variable
672     // location, as a literal, or on top of the stack) into the result
673     // expected according to this expression context.
674     virtual void Plug(Register reg) const = 0;
675     virtual void Plug(Variable* var) const = 0;
676     virtual void Plug(Handle<Object> lit) const = 0;
677     virtual void Plug(Heap::RootListIndex index) const = 0;
678     virtual void PlugTOS() const = 0;
679 
680     // Emit code to convert pure control flow to a pair of unbound labels into
681     // the result expected according to this expression context.  The
682     // implementation will bind both labels unless it's a TestContext, which
683     // won't bind them at this point.
684     virtual void Plug(Label* materialize_true,
685                       Label* materialize_false) const = 0;
686 
687     // Emit code to discard count elements from the top of stack, then convert
688     // a pure value into the result expected according to this expression
689     // context.
690     virtual void DropAndPlug(int count, Register reg) const = 0;
691 
692     // Set up branch labels for a test expression.  The three Label** parameters
693     // are output parameters.
694     virtual void PrepareTest(Label* materialize_true,
695                              Label* materialize_false,
696                              Label** if_true,
697                              Label** if_false,
698                              Label** fall_through) const = 0;
699 
700     // Returns true if we are evaluating only for side effects (i.e. if the
701     // result will be discarded).
IsEffect()702     virtual bool IsEffect() const { return false; }
703 
704     // Returns true if we are evaluating for the value (in accu/on stack).
IsAccumulatorValue()705     virtual bool IsAccumulatorValue() const { return false; }
IsStackValue()706     virtual bool IsStackValue() const { return false; }
707 
708     // Returns true if we are branching on the value rather than materializing
709     // it.  Only used for asserts.
IsTest()710     virtual bool IsTest() const { return false; }
711 
712    protected:
codegen()713     FullCodeGenerator* codegen() const { return codegen_; }
masm()714     MacroAssembler* masm() const { return masm_; }
715     MacroAssembler* masm_;
716 
717    private:
718     const ExpressionContext* old_;
719     FullCodeGenerator* codegen_;
720   };
721 
722   class AccumulatorValueContext : public ExpressionContext {
723    public:
AccumulatorValueContext(FullCodeGenerator * codegen)724     explicit AccumulatorValueContext(FullCodeGenerator* codegen)
725         : ExpressionContext(codegen) { }
726 
727     virtual void Plug(bool flag) const;
728     virtual void Plug(Register reg) const;
729     virtual void Plug(Label* materialize_true, Label* materialize_false) const;
730     virtual void Plug(Variable* var) const;
731     virtual void Plug(Handle<Object> lit) const;
732     virtual void Plug(Heap::RootListIndex) const;
733     virtual void PlugTOS() const;
734     virtual void DropAndPlug(int count, Register reg) const;
735     virtual void PrepareTest(Label* materialize_true,
736                              Label* materialize_false,
737                              Label** if_true,
738                              Label** if_false,
739                              Label** fall_through) const;
IsAccumulatorValue()740     virtual bool IsAccumulatorValue() const { return true; }
741   };
742 
743   class StackValueContext : public ExpressionContext {
744    public:
StackValueContext(FullCodeGenerator * codegen)745     explicit StackValueContext(FullCodeGenerator* codegen)
746         : ExpressionContext(codegen) { }
747 
748     virtual void Plug(bool flag) const;
749     virtual void Plug(Register reg) const;
750     virtual void Plug(Label* materialize_true, Label* materialize_false) const;
751     virtual void Plug(Variable* var) const;
752     virtual void Plug(Handle<Object> lit) const;
753     virtual void Plug(Heap::RootListIndex) const;
754     virtual void PlugTOS() const;
755     virtual void DropAndPlug(int count, Register reg) const;
756     virtual void PrepareTest(Label* materialize_true,
757                              Label* materialize_false,
758                              Label** if_true,
759                              Label** if_false,
760                              Label** fall_through) const;
IsStackValue()761     virtual bool IsStackValue() const { return true; }
762   };
763 
764   class TestContext : public ExpressionContext {
765    public:
TestContext(FullCodeGenerator * codegen,Expression * condition,Label * true_label,Label * false_label,Label * fall_through)766     TestContext(FullCodeGenerator* codegen,
767                 Expression* condition,
768                 Label* true_label,
769                 Label* false_label,
770                 Label* fall_through)
771         : ExpressionContext(codegen),
772           condition_(condition),
773           true_label_(true_label),
774           false_label_(false_label),
775           fall_through_(fall_through) { }
776 
cast(const ExpressionContext * context)777     static const TestContext* cast(const ExpressionContext* context) {
778       ASSERT(context->IsTest());
779       return reinterpret_cast<const TestContext*>(context);
780     }
781 
condition()782     Expression* condition() const { return condition_; }
true_label()783     Label* true_label() const { return true_label_; }
false_label()784     Label* false_label() const { return false_label_; }
fall_through()785     Label* fall_through() const { return fall_through_; }
786 
787     virtual void Plug(bool flag) const;
788     virtual void Plug(Register reg) const;
789     virtual void Plug(Label* materialize_true, Label* materialize_false) const;
790     virtual void Plug(Variable* var) const;
791     virtual void Plug(Handle<Object> lit) const;
792     virtual void Plug(Heap::RootListIndex) const;
793     virtual void PlugTOS() const;
794     virtual void DropAndPlug(int count, Register reg) const;
795     virtual void PrepareTest(Label* materialize_true,
796                              Label* materialize_false,
797                              Label** if_true,
798                              Label** if_false,
799                              Label** fall_through) const;
IsTest()800     virtual bool IsTest() const { return true; }
801 
802    private:
803     Expression* condition_;
804     Label* true_label_;
805     Label* false_label_;
806     Label* fall_through_;
807   };
808 
809   class EffectContext : public ExpressionContext {
810    public:
EffectContext(FullCodeGenerator * codegen)811     explicit EffectContext(FullCodeGenerator* codegen)
812         : ExpressionContext(codegen) { }
813 
814     virtual void Plug(bool flag) const;
815     virtual void Plug(Register reg) const;
816     virtual void Plug(Label* materialize_true, Label* materialize_false) const;
817     virtual void Plug(Variable* var) const;
818     virtual void Plug(Handle<Object> lit) const;
819     virtual void Plug(Heap::RootListIndex) const;
820     virtual void PlugTOS() const;
821     virtual void DropAndPlug(int count, Register reg) const;
822     virtual void PrepareTest(Label* materialize_true,
823                              Label* materialize_false,
824                              Label** if_true,
825                              Label** if_false,
826                              Label** fall_through) const;
IsEffect()827     virtual bool IsEffect() const { return true; }
828   };
829 
830   MacroAssembler* masm_;
831   CompilationInfo* info_;
832   Scope* scope_;
833   Label return_label_;
834   NestedStatement* nesting_stack_;
835   int loop_depth_;
836   ZoneList<Handle<Object> >* globals_;
837   Handle<FixedArray> modules_;
838   int module_index_;
839   const ExpressionContext* context_;
840   ZoneList<BailoutEntry> bailout_entries_;
841   GrowableBitVector prepared_bailout_ids_;
842   ZoneList<BackEdgeEntry> back_edges_;
843   ZoneList<TypeFeedbackCellEntry> type_feedback_cells_;
844   int ic_total_count_;
845   Handle<FixedArray> handler_table_;
846   Handle<Cell> profiling_counter_;
847   bool generate_debug_code_;
848   Zone* zone_;
849 
850   friend class NestedStatement;
851 
852   DEFINE_AST_VISITOR_SUBCLASS_MEMBERS();
853   DISALLOW_COPY_AND_ASSIGN(FullCodeGenerator);
854 };
855 
856 
857 // A map from property names to getter/setter pairs allocated in the zone.
858 class AccessorTable: public TemplateHashMap<Literal,
859                                             ObjectLiteral::Accessors,
860                                             ZoneAllocationPolicy> {
861  public:
AccessorTable(Zone * zone)862   explicit AccessorTable(Zone* zone) :
863       TemplateHashMap<Literal, ObjectLiteral::Accessors,
864                       ZoneAllocationPolicy>(Literal::Match,
865                                             ZoneAllocationPolicy(zone)),
866       zone_(zone) { }
867 
lookup(Literal * literal)868   Iterator lookup(Literal* literal) {
869     Iterator it = find(literal, true, ZoneAllocationPolicy(zone_));
870     if (it->second == NULL) it->second = new(zone_) ObjectLiteral::Accessors();
871     return it;
872   }
873 
874  private:
875   Zone* zone_;
876 };
877 
878 
879 class BackEdgeTable {
880  public:
BackEdgeTable(Code * code,DisallowHeapAllocation * required)881   BackEdgeTable(Code* code, DisallowHeapAllocation* required) {
882     ASSERT(code->kind() == Code::FUNCTION);
883     instruction_start_ = code->instruction_start();
884     Address table_address = instruction_start_ + code->back_edge_table_offset();
885     length_ = Memory::uint32_at(table_address);
886     start_ = table_address + kTableLengthSize;
887   }
888 
length()889   uint32_t length() { return length_; }
890 
ast_id(uint32_t index)891   BailoutId ast_id(uint32_t index) {
892     return BailoutId(static_cast<int>(
893         Memory::uint32_at(entry_at(index) + kAstIdOffset)));
894   }
895 
loop_depth(uint32_t index)896   uint32_t loop_depth(uint32_t index) {
897     return Memory::uint32_at(entry_at(index) + kLoopDepthOffset);
898   }
899 
pc_offset(uint32_t index)900   uint32_t pc_offset(uint32_t index) {
901     return Memory::uint32_at(entry_at(index) + kPcOffsetOffset);
902   }
903 
pc(uint32_t index)904   Address pc(uint32_t index) {
905     return instruction_start_ + pc_offset(index);
906   }
907 
908   enum BackEdgeState {
909     INTERRUPT,
910     ON_STACK_REPLACEMENT,
911     OSR_AFTER_STACK_CHECK
912   };
913 
914   // Patch all interrupts with allowed loop depth in the unoptimized code to
915   // unconditionally call replacement_code.
916   static void Patch(Isolate* isolate,
917                     Code* unoptimized_code);
918 
919   // Patch the back edge to the target state, provided the correct callee.
920   static void PatchAt(Code* unoptimized_code,
921                       Address pc,
922                       BackEdgeState target_state,
923                       Code* replacement_code);
924 
925   // Change all patched back edges back to normal interrupts.
926   static void Revert(Isolate* isolate,
927                      Code* unoptimized_code);
928 
929   // Change a back edge patched for on-stack replacement to perform a
930   // stack check first.
931   static void AddStackCheck(CompilationInfo* info);
932 
933   // Remove the stack check, if available, and replace by on-stack replacement.
934   static void RemoveStackCheck(CompilationInfo* info);
935 
936   // Return the current patch state of the back edge.
937   static BackEdgeState GetBackEdgeState(Isolate* isolate,
938                                         Code* unoptimized_code,
939                                         Address pc_after);
940 
941 #ifdef DEBUG
942   // Verify that all back edges of a certain loop depth are patched.
943   static bool Verify(Isolate* isolate,
944                      Code* unoptimized_code,
945                      int loop_nesting_level);
946 #endif  // DEBUG
947 
948  private:
entry_at(uint32_t index)949   Address entry_at(uint32_t index) {
950     ASSERT(index < length_);
951     return start_ + index * kEntrySize;
952   }
953 
954   static const int kTableLengthSize = kIntSize;
955   static const int kAstIdOffset = 0 * kIntSize;
956   static const int kPcOffsetOffset = 1 * kIntSize;
957   static const int kLoopDepthOffset = 2 * kIntSize;
958   static const int kEntrySize = 3 * kIntSize;
959 
960   Address start_;
961   Address instruction_start_;
962   uint32_t length_;
963 };
964 
965 
966 } }  // namespace v8::internal
967 
968 #endif  // V8_FULL_CODEGEN_H_
969