• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/full-codegen/full-codegen.h"
6 
7 #include "src/ast/ast-numbering.h"
8 #include "src/ast/ast.h"
9 #include "src/ast/prettyprinter.h"
10 #include "src/ast/scopeinfo.h"
11 #include "src/ast/scopes.h"
12 #include "src/code-factory.h"
13 #include "src/codegen.h"
14 #include "src/compiler.h"
15 #include "src/debug/debug.h"
16 #include "src/debug/liveedit.h"
17 #include "src/frames-inl.h"
18 #include "src/isolate-inl.h"
19 #include "src/macro-assembler.h"
20 #include "src/snapshot/snapshot.h"
21 #include "src/tracing/trace-event.h"
22 
23 namespace v8 {
24 namespace internal {
25 
26 #define __ ACCESS_MASM(masm())
27 
MakeCode(CompilationInfo * info)28 bool FullCodeGenerator::MakeCode(CompilationInfo* info) {
29   Isolate* isolate = info->isolate();
30 
31   RuntimeCallTimerScope runtimeTimer(isolate,
32                                      &RuntimeCallStats::CompileFullCode);
33   TimerEventScope<TimerEventCompileFullCode> timer(info->isolate());
34   TRACE_EVENT0("v8", "V8.CompileFullCode");
35 
36   Handle<Script> script = info->script();
37   if (!script->IsUndefined(isolate) &&
38       !script->source()->IsUndefined(isolate)) {
39     int len = String::cast(script->source())->length();
40     isolate->counters()->total_full_codegen_source_size()->Increment(len);
41   }
42   CodeGenerator::MakeCodePrologue(info, "full");
43   const int kInitialBufferSize = 4 * KB;
44   MacroAssembler masm(info->isolate(), NULL, kInitialBufferSize,
45                       CodeObjectRequired::kYes);
46   if (info->will_serialize()) masm.enable_serializer();
47 
48   LOG_CODE_EVENT(isolate,
49                  CodeStartLinePosInfoRecordEvent(masm.positions_recorder()));
50 
51   FullCodeGenerator cgen(&masm, info);
52   cgen.Generate();
53   if (cgen.HasStackOverflow()) {
54     DCHECK(!isolate->has_pending_exception());
55     return false;
56   }
57   unsigned table_offset = cgen.EmitBackEdgeTable();
58 
59   Handle<Code> code = CodeGenerator::MakeCodeEpilogue(&masm, info);
60   cgen.PopulateDeoptimizationData(code);
61   cgen.PopulateTypeFeedbackInfo(code);
62   cgen.PopulateHandlerTable(code);
63   code->set_has_deoptimization_support(info->HasDeoptimizationSupport());
64   code->set_has_reloc_info_for_serialization(info->will_serialize());
65   code->set_allow_osr_at_loop_nesting_level(0);
66   code->set_profiler_ticks(0);
67   code->set_back_edge_table_offset(table_offset);
68   CodeGenerator::PrintCode(code, info);
69   info->SetCode(code);
70   void* line_info = masm.positions_recorder()->DetachJITHandlerData();
71   LOG_CODE_EVENT(isolate, CodeEndLinePosInfoRecordEvent(
72                               AbstractCode::cast(*code), line_info));
73 
74 #ifdef DEBUG
75   // Check that no context-specific object has been embedded.
76   code->VerifyEmbeddedObjects(Code::kNoContextSpecificPointers);
77 #endif  // DEBUG
78   return true;
79 }
80 
81 
EmitBackEdgeTable()82 unsigned FullCodeGenerator::EmitBackEdgeTable() {
83   // The back edge table consists of a length (in number of entries)
84   // field, and then a sequence of entries.  Each entry is a pair of AST id
85   // and code-relative pc offset.
86   masm()->Align(kPointerSize);
87   unsigned offset = masm()->pc_offset();
88   unsigned length = back_edges_.length();
89   __ dd(length);
90   for (unsigned i = 0; i < length; ++i) {
91     __ dd(back_edges_[i].id.ToInt());
92     __ dd(back_edges_[i].pc);
93     __ dd(back_edges_[i].loop_depth);
94   }
95   return offset;
96 }
97 
98 
PopulateDeoptimizationData(Handle<Code> code)99 void FullCodeGenerator::PopulateDeoptimizationData(Handle<Code> code) {
100   // Fill in the deoptimization information.
101   DCHECK(info_->HasDeoptimizationSupport() || bailout_entries_.is_empty());
102   if (!info_->HasDeoptimizationSupport()) return;
103   int length = bailout_entries_.length();
104   Handle<DeoptimizationOutputData> data =
105       DeoptimizationOutputData::New(isolate(), length, TENURED);
106   for (int i = 0; i < length; i++) {
107     data->SetAstId(i, bailout_entries_[i].id);
108     data->SetPcAndState(i, Smi::FromInt(bailout_entries_[i].pc_and_state));
109   }
110   code->set_deoptimization_data(*data);
111 }
112 
113 
PopulateTypeFeedbackInfo(Handle<Code> code)114 void FullCodeGenerator::PopulateTypeFeedbackInfo(Handle<Code> code) {
115   Handle<TypeFeedbackInfo> info = isolate()->factory()->NewTypeFeedbackInfo();
116   info->set_ic_total_count(ic_total_count_);
117   DCHECK(!isolate()->heap()->InNewSpace(*info));
118   code->set_type_feedback_info(*info);
119 }
120 
121 
PopulateHandlerTable(Handle<Code> code)122 void FullCodeGenerator::PopulateHandlerTable(Handle<Code> code) {
123   int handler_table_size = static_cast<int>(handler_table_.size());
124   Handle<HandlerTable> table =
125       Handle<HandlerTable>::cast(isolate()->factory()->NewFixedArray(
126           HandlerTable::LengthForRange(handler_table_size), TENURED));
127   for (int i = 0; i < handler_table_size; ++i) {
128     HandlerTable::CatchPrediction prediction =
129         handler_table_[i].try_catch_depth > 0 ? HandlerTable::CAUGHT
130                                               : HandlerTable::UNCAUGHT;
131     table->SetRangeStart(i, handler_table_[i].range_start);
132     table->SetRangeEnd(i, handler_table_[i].range_end);
133     table->SetRangeHandler(i, handler_table_[i].handler_offset, prediction);
134     table->SetRangeData(i, handler_table_[i].stack_depth);
135   }
136   code->set_handler_table(*table);
137 }
138 
139 
NewHandlerTableEntry()140 int FullCodeGenerator::NewHandlerTableEntry() {
141   int index = static_cast<int>(handler_table_.size());
142   HandlerTableEntry entry = {0, 0, 0, 0, 0};
143   handler_table_.push_back(entry);
144   return index;
145 }
146 
147 
MustCreateObjectLiteralWithRuntime(ObjectLiteral * expr) const148 bool FullCodeGenerator::MustCreateObjectLiteralWithRuntime(
149     ObjectLiteral* expr) const {
150   return masm()->serializer_enabled() ||
151          !FastCloneShallowObjectStub::IsSupported(expr);
152 }
153 
154 
MustCreateArrayLiteralWithRuntime(ArrayLiteral * expr) const155 bool FullCodeGenerator::MustCreateArrayLiteralWithRuntime(
156     ArrayLiteral* expr) const {
157   return expr->depth() > 1 ||
158          expr->values()->length() > JSArray::kInitialMaxFastElementArray;
159 }
160 
161 
Initialize()162 void FullCodeGenerator::Initialize() {
163   InitializeAstVisitor(info_->isolate());
164   masm_->set_emit_debug_code(FLAG_debug_code);
165   masm_->set_predictable_code_size(true);
166 }
167 
PrepareForBailout(Expression * node,BailoutState state)168 void FullCodeGenerator::PrepareForBailout(Expression* node,
169                                           BailoutState state) {
170   PrepareForBailoutForId(node->id(), state);
171 }
172 
CallLoadIC(TypeFeedbackId id)173 void FullCodeGenerator::CallLoadIC(TypeFeedbackId id) {
174   Handle<Code> ic = CodeFactory::LoadIC(isolate()).code();
175   CallIC(ic, id);
176 }
177 
CallLoadGlobalIC(TypeofMode typeof_mode,TypeFeedbackId id)178 void FullCodeGenerator::CallLoadGlobalIC(TypeofMode typeof_mode,
179                                          TypeFeedbackId id) {
180   Handle<Code> ic = CodeFactory::LoadGlobalIC(isolate(), typeof_mode).code();
181   CallIC(ic, id);
182 }
183 
CallStoreIC(TypeFeedbackId id)184 void FullCodeGenerator::CallStoreIC(TypeFeedbackId id) {
185   Handle<Code> ic = CodeFactory::StoreIC(isolate(), language_mode()).code();
186   CallIC(ic, id);
187 }
188 
189 
RecordJSReturnSite(Call * call)190 void FullCodeGenerator::RecordJSReturnSite(Call* call) {
191   // We record the offset of the function return so we can rebuild the frame
192   // if the function was inlined, i.e., this is the return address in the
193   // inlined function's frame.
194   //
195   // The bailout state is ignored.  We defensively set it to TOS_REGISTER, which
196   // is the real state of the unoptimized code at the return site.
197   PrepareForBailoutForId(call->ReturnId(), BailoutState::TOS_REGISTER);
198 #ifdef DEBUG
199   // In debug builds, mark the return so we can verify that this function
200   // was called.
201   DCHECK(!call->return_is_recorded_);
202   call->return_is_recorded_ = true;
203 #endif
204 }
205 
PrepareForBailoutForId(BailoutId id,BailoutState state)206 void FullCodeGenerator::PrepareForBailoutForId(BailoutId id,
207                                                BailoutState state) {
208   // There's no need to prepare this code for bailouts from already optimized
209   // code or code that can't be optimized.
210   if (!info_->HasDeoptimizationSupport()) return;
211   unsigned pc_and_state =
212       BailoutStateField::encode(state) | PcField::encode(masm_->pc_offset());
213   DCHECK(Smi::IsValid(pc_and_state));
214 #ifdef DEBUG
215   for (int i = 0; i < bailout_entries_.length(); ++i) {
216     DCHECK(bailout_entries_[i].id != id);
217   }
218 #endif
219   BailoutEntry entry = { id, pc_and_state };
220   bailout_entries_.Add(entry, zone());
221 }
222 
223 
RecordBackEdge(BailoutId ast_id)224 void FullCodeGenerator::RecordBackEdge(BailoutId ast_id) {
225   // The pc offset does not need to be encoded and packed together with a state.
226   DCHECK(masm_->pc_offset() > 0);
227   DCHECK(loop_depth() > 0);
228   uint8_t depth = Min(loop_depth(), Code::kMaxLoopNestingMarker);
229   BackEdgeEntry entry =
230       { ast_id, static_cast<unsigned>(masm_->pc_offset()), depth };
231   back_edges_.Add(entry, zone());
232 }
233 
234 
ShouldInlineSmiCase(Token::Value op)235 bool FullCodeGenerator::ShouldInlineSmiCase(Token::Value op) {
236   // Inline smi case inside loops, but not division and modulo which
237   // are too complicated and take up too much space.
238   if (op == Token::DIV ||op == Token::MOD) return false;
239   if (FLAG_always_inline_smi_code) return true;
240   return loop_depth_ > 0;
241 }
242 
243 
Plug(Variable * var) const244 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
245   DCHECK(var->IsStackAllocated() || var->IsContextSlot());
246 }
247 
248 
Plug(Variable * var) const249 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
250   DCHECK(var->IsStackAllocated() || var->IsContextSlot());
251   codegen()->GetVar(result_register(), var);
252 }
253 
254 
Plug(Variable * var) const255 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
256   DCHECK(var->IsStackAllocated() || var->IsContextSlot());
257   // For simplicity we always test the accumulator register.
258   codegen()->GetVar(result_register(), var);
259   codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
260   codegen()->DoTest(this);
261 }
262 
263 
Plug(Register reg) const264 void FullCodeGenerator::EffectContext::Plug(Register reg) const {
265 }
266 
267 
Plug(Register reg) const268 void FullCodeGenerator::AccumulatorValueContext::Plug(Register reg) const {
269   __ Move(result_register(), reg);
270 }
271 
272 
Plug(Register reg) const273 void FullCodeGenerator::StackValueContext::Plug(Register reg) const {
274   codegen()->PushOperand(reg);
275 }
276 
277 
Plug(Register reg) const278 void FullCodeGenerator::TestContext::Plug(Register reg) const {
279   // For simplicity we always test the accumulator register.
280   __ Move(result_register(), reg);
281   codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
282   codegen()->DoTest(this);
283 }
284 
285 
Plug(bool flag) const286 void FullCodeGenerator::EffectContext::Plug(bool flag) const {}
287 
DropAndPlug(int count,Register reg) const288 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
289                                                    Register reg) const {
290   DCHECK(count > 0);
291   codegen()->DropOperands(count);
292 }
293 
DropAndPlug(int count,Register reg) const294 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
295     int count, Register reg) const {
296   DCHECK(count > 0);
297   codegen()->DropOperands(count);
298   __ Move(result_register(), reg);
299 }
300 
DropAndPlug(int count,Register reg) const301 void FullCodeGenerator::TestContext::DropAndPlug(int count,
302                                                  Register reg) const {
303   DCHECK(count > 0);
304   // For simplicity we always test the accumulator register.
305   codegen()->DropOperands(count);
306   __ Move(result_register(), reg);
307   codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
308   codegen()->DoTest(this);
309 }
310 
PlugTOS() const311 void FullCodeGenerator::EffectContext::PlugTOS() const {
312   codegen()->DropOperands(1);
313 }
314 
315 
PlugTOS() const316 void FullCodeGenerator::AccumulatorValueContext::PlugTOS() const {
317   codegen()->PopOperand(result_register());
318 }
319 
320 
PlugTOS() const321 void FullCodeGenerator::StackValueContext::PlugTOS() const {
322 }
323 
324 
PlugTOS() const325 void FullCodeGenerator::TestContext::PlugTOS() const {
326   // For simplicity we always test the accumulator register.
327   codegen()->PopOperand(result_register());
328   codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
329   codegen()->DoTest(this);
330 }
331 
332 
PrepareTest(Label * materialize_true,Label * materialize_false,Label ** if_true,Label ** if_false,Label ** fall_through) const333 void FullCodeGenerator::EffectContext::PrepareTest(
334     Label* materialize_true,
335     Label* materialize_false,
336     Label** if_true,
337     Label** if_false,
338     Label** fall_through) const {
339   // In an effect context, the true and the false case branch to the
340   // same label.
341   *if_true = *if_false = *fall_through = materialize_true;
342 }
343 
344 
PrepareTest(Label * materialize_true,Label * materialize_false,Label ** if_true,Label ** if_false,Label ** fall_through) const345 void FullCodeGenerator::AccumulatorValueContext::PrepareTest(
346     Label* materialize_true,
347     Label* materialize_false,
348     Label** if_true,
349     Label** if_false,
350     Label** fall_through) const {
351   *if_true = *fall_through = materialize_true;
352   *if_false = materialize_false;
353 }
354 
355 
PrepareTest(Label * materialize_true,Label * materialize_false,Label ** if_true,Label ** if_false,Label ** fall_through) const356 void FullCodeGenerator::StackValueContext::PrepareTest(
357     Label* materialize_true,
358     Label* materialize_false,
359     Label** if_true,
360     Label** if_false,
361     Label** fall_through) const {
362   *if_true = *fall_through = materialize_true;
363   *if_false = materialize_false;
364 }
365 
366 
PrepareTest(Label * materialize_true,Label * materialize_false,Label ** if_true,Label ** if_false,Label ** fall_through) const367 void FullCodeGenerator::TestContext::PrepareTest(
368     Label* materialize_true,
369     Label* materialize_false,
370     Label** if_true,
371     Label** if_false,
372     Label** fall_through) const {
373   *if_true = true_label_;
374   *if_false = false_label_;
375   *fall_through = fall_through_;
376 }
377 
378 
DoTest(const TestContext * context)379 void FullCodeGenerator::DoTest(const TestContext* context) {
380   DoTest(context->condition(),
381          context->true_label(),
382          context->false_label(),
383          context->fall_through());
384 }
385 
386 
VisitDeclarations(ZoneList<Declaration * > * declarations)387 void FullCodeGenerator::VisitDeclarations(
388     ZoneList<Declaration*>* declarations) {
389   ZoneList<Handle<Object> >* saved_globals = globals_;
390   ZoneList<Handle<Object> > inner_globals(10, zone());
391   globals_ = &inner_globals;
392 
393   AstVisitor::VisitDeclarations(declarations);
394 
395   if (!globals_->is_empty()) {
396     // Invoke the platform-dependent code generator to do the actual
397     // declaration of the global functions and variables.
398     Handle<FixedArray> array =
399        isolate()->factory()->NewFixedArray(globals_->length(), TENURED);
400     for (int i = 0; i < globals_->length(); ++i)
401       array->set(i, *globals_->at(i));
402     DeclareGlobals(array);
403   }
404 
405   globals_ = saved_globals;
406 }
407 
408 
VisitImportDeclaration(ImportDeclaration * declaration)409 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
410   VariableProxy* proxy = declaration->proxy();
411   Variable* variable = proxy->var();
412   switch (variable->location()) {
413     case VariableLocation::GLOBAL:
414     case VariableLocation::UNALLOCATED:
415       // TODO(rossberg)
416       break;
417 
418     case VariableLocation::CONTEXT: {
419       Comment cmnt(masm_, "[ ImportDeclaration");
420       EmitDebugCheckDeclarationContext(variable);
421       // TODO(rossberg)
422       break;
423     }
424 
425     case VariableLocation::PARAMETER:
426     case VariableLocation::LOCAL:
427     case VariableLocation::LOOKUP:
428       UNREACHABLE();
429   }
430 }
431 
432 
VisitExportDeclaration(ExportDeclaration * declaration)433 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
434   // TODO(rossberg)
435 }
436 
437 
VisitVariableProxy(VariableProxy * expr)438 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
439   Comment cmnt(masm_, "[ VariableProxy");
440   EmitVariableLoad(expr);
441 }
442 
443 
VisitSloppyBlockFunctionStatement(SloppyBlockFunctionStatement * declaration)444 void FullCodeGenerator::VisitSloppyBlockFunctionStatement(
445     SloppyBlockFunctionStatement* declaration) {
446   Visit(declaration->statement());
447 }
448 
449 
DeclareGlobalsFlags()450 int FullCodeGenerator::DeclareGlobalsFlags() {
451   return info_->GetDeclareGlobalsFlags();
452 }
453 
PushOperand(Handle<Object> handle)454 void FullCodeGenerator::PushOperand(Handle<Object> handle) {
455   OperandStackDepthIncrement(1);
456   __ Push(handle);
457 }
458 
PushOperand(Smi * smi)459 void FullCodeGenerator::PushOperand(Smi* smi) {
460   OperandStackDepthIncrement(1);
461   __ Push(smi);
462 }
463 
PushOperand(Register reg)464 void FullCodeGenerator::PushOperand(Register reg) {
465   OperandStackDepthIncrement(1);
466   __ Push(reg);
467 }
468 
PopOperand(Register reg)469 void FullCodeGenerator::PopOperand(Register reg) {
470   OperandStackDepthDecrement(1);
471   __ Pop(reg);
472 }
473 
DropOperands(int count)474 void FullCodeGenerator::DropOperands(int count) {
475   OperandStackDepthDecrement(count);
476   __ Drop(count);
477 }
478 
CallRuntimeWithOperands(Runtime::FunctionId id)479 void FullCodeGenerator::CallRuntimeWithOperands(Runtime::FunctionId id) {
480   OperandStackDepthDecrement(Runtime::FunctionForId(id)->nargs);
481   __ CallRuntime(id);
482 }
483 
OperandStackDepthIncrement(int count)484 void FullCodeGenerator::OperandStackDepthIncrement(int count) {
485   DCHECK_IMPLIES(!HasStackOverflow(), operand_stack_depth_ >= 0);
486   DCHECK_GE(count, 0);
487   operand_stack_depth_ += count;
488 }
489 
OperandStackDepthDecrement(int count)490 void FullCodeGenerator::OperandStackDepthDecrement(int count) {
491   DCHECK_IMPLIES(!HasStackOverflow(), operand_stack_depth_ >= count);
492   DCHECK_GE(count, 0);
493   operand_stack_depth_ -= count;
494 }
495 
EmitSubString(CallRuntime * expr)496 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
497   // Load the arguments on the stack and call the stub.
498   SubStringStub stub(isolate());
499   ZoneList<Expression*>* args = expr->arguments();
500   DCHECK(args->length() == 3);
501   VisitForStackValue(args->at(0));
502   VisitForStackValue(args->at(1));
503   VisitForStackValue(args->at(2));
504   __ CallStub(&stub);
505   OperandStackDepthDecrement(3);
506   context()->Plug(result_register());
507 }
508 
509 
EmitRegExpExec(CallRuntime * expr)510 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
511   // Load the arguments on the stack and call the stub.
512   RegExpExecStub stub(isolate());
513   ZoneList<Expression*>* args = expr->arguments();
514   DCHECK(args->length() == 4);
515   VisitForStackValue(args->at(0));
516   VisitForStackValue(args->at(1));
517   VisitForStackValue(args->at(2));
518   VisitForStackValue(args->at(3));
519   __ CallStub(&stub);
520   OperandStackDepthDecrement(4);
521   context()->Plug(result_register());
522 }
523 
524 
EmitMathPow(CallRuntime * expr)525 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
526   // Load the arguments on the stack and call the runtime function.
527   MathPowStub stub(isolate(), MathPowStub::ON_STACK);
528   ZoneList<Expression*>* args = expr->arguments();
529   DCHECK(args->length() == 2);
530   VisitForStackValue(args->at(0));
531   VisitForStackValue(args->at(1));
532   __ CallStub(&stub);
533   OperandStackDepthDecrement(2);
534   context()->Plug(result_register());
535 }
536 
537 
EmitIntrinsicAsStubCall(CallRuntime * expr,const Callable & callable)538 void FullCodeGenerator::EmitIntrinsicAsStubCall(CallRuntime* expr,
539                                                 const Callable& callable) {
540   ZoneList<Expression*>* args = expr->arguments();
541   int param_count = callable.descriptor().GetRegisterParameterCount();
542   DCHECK_EQ(args->length(), param_count);
543 
544   if (param_count > 0) {
545     int last = param_count - 1;
546     // Put all but last arguments on stack.
547     for (int i = 0; i < last; i++) {
548       VisitForStackValue(args->at(i));
549     }
550     // The last argument goes to the accumulator.
551     VisitForAccumulatorValue(args->at(last));
552 
553     // Move the arguments to the registers, as required by the stub.
554     __ Move(callable.descriptor().GetRegisterParameter(last),
555             result_register());
556     for (int i = last; i-- > 0;) {
557       PopOperand(callable.descriptor().GetRegisterParameter(i));
558     }
559   }
560   __ Call(callable.code(), RelocInfo::CODE_TARGET);
561 
562   // Reload the context register after the call as i.e. TurboFan code stubs
563   // won't preserve the context register.
564   LoadFromFrameField(StandardFrameConstants::kContextOffset,
565                      context_register());
566   context()->Plug(result_register());
567 }
568 
EmitNewObject(CallRuntime * expr)569 void FullCodeGenerator::EmitNewObject(CallRuntime* expr) {
570   EmitIntrinsicAsStubCall(expr, CodeFactory::FastNewObject(isolate()));
571 }
572 
EmitNumberToString(CallRuntime * expr)573 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
574   EmitIntrinsicAsStubCall(expr, CodeFactory::NumberToString(isolate()));
575 }
576 
577 
EmitToString(CallRuntime * expr)578 void FullCodeGenerator::EmitToString(CallRuntime* expr) {
579   EmitIntrinsicAsStubCall(expr, CodeFactory::ToString(isolate()));
580 }
581 
582 
EmitToName(CallRuntime * expr)583 void FullCodeGenerator::EmitToName(CallRuntime* expr) {
584   EmitIntrinsicAsStubCall(expr, CodeFactory::ToName(isolate()));
585 }
586 
587 
EmitToLength(CallRuntime * expr)588 void FullCodeGenerator::EmitToLength(CallRuntime* expr) {
589   EmitIntrinsicAsStubCall(expr, CodeFactory::ToLength(isolate()));
590 }
591 
EmitToInteger(CallRuntime * expr)592 void FullCodeGenerator::EmitToInteger(CallRuntime* expr) {
593   EmitIntrinsicAsStubCall(expr, CodeFactory::ToInteger(isolate()));
594 }
595 
EmitToNumber(CallRuntime * expr)596 void FullCodeGenerator::EmitToNumber(CallRuntime* expr) {
597   EmitIntrinsicAsStubCall(expr, CodeFactory::ToNumber(isolate()));
598 }
599 
600 
EmitToObject(CallRuntime * expr)601 void FullCodeGenerator::EmitToObject(CallRuntime* expr) {
602   EmitIntrinsicAsStubCall(expr, CodeFactory::ToObject(isolate()));
603 }
604 
605 
EmitRegExpConstructResult(CallRuntime * expr)606 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
607   EmitIntrinsicAsStubCall(expr, CodeFactory::RegExpConstructResult(isolate()));
608 }
609 
EmitHasProperty()610 void FullCodeGenerator::EmitHasProperty() {
611   Callable callable = CodeFactory::HasProperty(isolate());
612   PopOperand(callable.descriptor().GetRegisterParameter(1));
613   PopOperand(callable.descriptor().GetRegisterParameter(0));
614   __ Call(callable.code(), RelocInfo::CODE_TARGET);
615   RestoreContext();
616 }
617 
RecordStatementPosition(MacroAssembler * masm,int pos)618 void RecordStatementPosition(MacroAssembler* masm, int pos) {
619   if (pos == RelocInfo::kNoPosition) return;
620   masm->positions_recorder()->RecordStatementPosition(pos);
621 }
622 
RecordPosition(MacroAssembler * masm,int pos)623 void RecordPosition(MacroAssembler* masm, int pos) {
624   if (pos == RelocInfo::kNoPosition) return;
625   masm->positions_recorder()->RecordPosition(pos);
626 }
627 
628 
SetFunctionPosition(FunctionLiteral * fun)629 void FullCodeGenerator::SetFunctionPosition(FunctionLiteral* fun) {
630   RecordPosition(masm_, fun->start_position());
631 }
632 
633 
SetReturnPosition(FunctionLiteral * fun)634 void FullCodeGenerator::SetReturnPosition(FunctionLiteral* fun) {
635   // For default constructors, start position equals end position, and there
636   // is no source code besides the class literal.
637   int pos = std::max(fun->start_position(), fun->end_position() - 1);
638   RecordStatementPosition(masm_, pos);
639   if (info_->is_debug()) {
640     // Always emit a debug break slot before a return.
641     DebugCodegen::GenerateSlot(masm_, RelocInfo::DEBUG_BREAK_SLOT_AT_RETURN);
642   }
643 }
644 
645 
SetStatementPosition(Statement * stmt,FullCodeGenerator::InsertBreak insert_break)646 void FullCodeGenerator::SetStatementPosition(
647     Statement* stmt, FullCodeGenerator::InsertBreak insert_break) {
648   if (stmt->position() == RelocInfo::kNoPosition) return;
649   RecordStatementPosition(masm_, stmt->position());
650   if (insert_break == INSERT_BREAK && info_->is_debug() &&
651       !stmt->IsDebuggerStatement()) {
652     DebugCodegen::GenerateSlot(masm_, RelocInfo::DEBUG_BREAK_SLOT_AT_POSITION);
653   }
654 }
655 
SetExpressionPosition(Expression * expr)656 void FullCodeGenerator::SetExpressionPosition(Expression* expr) {
657   if (expr->position() == RelocInfo::kNoPosition) return;
658   RecordPosition(masm_, expr->position());
659 }
660 
661 
SetExpressionAsStatementPosition(Expression * expr)662 void FullCodeGenerator::SetExpressionAsStatementPosition(Expression* expr) {
663   if (expr->position() == RelocInfo::kNoPosition) return;
664   RecordStatementPosition(masm_, expr->position());
665   if (info_->is_debug()) {
666     DebugCodegen::GenerateSlot(masm_, RelocInfo::DEBUG_BREAK_SLOT_AT_POSITION);
667   }
668 }
669 
SetCallPosition(Expression * expr,TailCallMode tail_call_mode)670 void FullCodeGenerator::SetCallPosition(Expression* expr,
671                                         TailCallMode tail_call_mode) {
672   if (expr->position() == RelocInfo::kNoPosition) return;
673   RecordPosition(masm_, expr->position());
674   if (info_->is_debug()) {
675     RelocInfo::Mode mode = (tail_call_mode == TailCallMode::kAllow)
676                                ? RelocInfo::DEBUG_BREAK_SLOT_AT_TAIL_CALL
677                                : RelocInfo::DEBUG_BREAK_SLOT_AT_CALL;
678     // Always emit a debug break slot before a call.
679     DebugCodegen::GenerateSlot(masm_, mode);
680   }
681 }
682 
683 
VisitSuperPropertyReference(SuperPropertyReference * super)684 void FullCodeGenerator::VisitSuperPropertyReference(
685     SuperPropertyReference* super) {
686   __ CallRuntime(Runtime::kThrowUnsupportedSuperError);
687   // Even though this expression doesn't produce a value, we need to simulate
688   // plugging of the value context to ensure stack depth tracking is in sync.
689   if (context()->IsStackValue()) OperandStackDepthIncrement(1);
690 }
691 
692 
VisitSuperCallReference(SuperCallReference * super)693 void FullCodeGenerator::VisitSuperCallReference(SuperCallReference* super) {
694   // Handled by VisitCall
695   UNREACHABLE();
696 }
697 
698 
EmitDebugBreakInOptimizedCode(CallRuntime * expr)699 void FullCodeGenerator::EmitDebugBreakInOptimizedCode(CallRuntime* expr) {
700   context()->Plug(handle(Smi::FromInt(0), isolate()));
701 }
702 
703 
VisitBinaryOperation(BinaryOperation * expr)704 void FullCodeGenerator::VisitBinaryOperation(BinaryOperation* expr) {
705   switch (expr->op()) {
706     case Token::COMMA:
707       return VisitComma(expr);
708     case Token::OR:
709     case Token::AND:
710       return VisitLogicalExpression(expr);
711     default:
712       return VisitArithmeticExpression(expr);
713   }
714 }
715 
716 
VisitInDuplicateContext(Expression * expr)717 void FullCodeGenerator::VisitInDuplicateContext(Expression* expr) {
718   if (context()->IsEffect()) {
719     VisitForEffect(expr);
720   } else if (context()->IsAccumulatorValue()) {
721     VisitForAccumulatorValue(expr);
722   } else if (context()->IsStackValue()) {
723     VisitForStackValue(expr);
724   } else if (context()->IsTest()) {
725     const TestContext* test = TestContext::cast(context());
726     VisitForControl(expr, test->true_label(), test->false_label(),
727                     test->fall_through());
728   }
729 }
730 
731 
VisitComma(BinaryOperation * expr)732 void FullCodeGenerator::VisitComma(BinaryOperation* expr) {
733   Comment cmnt(masm_, "[ Comma");
734   VisitForEffect(expr->left());
735   VisitInDuplicateContext(expr->right());
736 }
737 
738 
VisitLogicalExpression(BinaryOperation * expr)739 void FullCodeGenerator::VisitLogicalExpression(BinaryOperation* expr) {
740   bool is_logical_and = expr->op() == Token::AND;
741   Comment cmnt(masm_, is_logical_and ? "[ Logical AND" :  "[ Logical OR");
742   Expression* left = expr->left();
743   Expression* right = expr->right();
744   BailoutId right_id = expr->RightId();
745   Label done;
746 
747   if (context()->IsTest()) {
748     Label eval_right;
749     const TestContext* test = TestContext::cast(context());
750     if (is_logical_and) {
751       VisitForControl(left, &eval_right, test->false_label(), &eval_right);
752     } else {
753       VisitForControl(left, test->true_label(), &eval_right, &eval_right);
754     }
755     PrepareForBailoutForId(right_id, BailoutState::NO_REGISTERS);
756     __ bind(&eval_right);
757 
758   } else if (context()->IsAccumulatorValue()) {
759     VisitForAccumulatorValue(left);
760     // We want the value in the accumulator for the test, and on the stack in
761     // case we need it.
762     __ Push(result_register());
763     Label discard, restore;
764     if (is_logical_and) {
765       DoTest(left, &discard, &restore, &restore);
766     } else {
767       DoTest(left, &restore, &discard, &restore);
768     }
769     __ bind(&restore);
770     __ Pop(result_register());
771     __ jmp(&done);
772     __ bind(&discard);
773     __ Drop(1);
774     PrepareForBailoutForId(right_id, BailoutState::NO_REGISTERS);
775 
776   } else if (context()->IsStackValue()) {
777     VisitForAccumulatorValue(left);
778     // We want the value in the accumulator for the test, and on the stack in
779     // case we need it.
780     __ Push(result_register());
781     Label discard;
782     if (is_logical_and) {
783       DoTest(left, &discard, &done, &discard);
784     } else {
785       DoTest(left, &done, &discard, &discard);
786     }
787     __ bind(&discard);
788     __ Drop(1);
789     PrepareForBailoutForId(right_id, BailoutState::NO_REGISTERS);
790 
791   } else {
792     DCHECK(context()->IsEffect());
793     Label eval_right;
794     if (is_logical_and) {
795       VisitForControl(left, &eval_right, &done, &eval_right);
796     } else {
797       VisitForControl(left, &done, &eval_right, &eval_right);
798     }
799     PrepareForBailoutForId(right_id, BailoutState::NO_REGISTERS);
800     __ bind(&eval_right);
801   }
802 
803   VisitInDuplicateContext(right);
804   __ bind(&done);
805 }
806 
807 
VisitArithmeticExpression(BinaryOperation * expr)808 void FullCodeGenerator::VisitArithmeticExpression(BinaryOperation* expr) {
809   Token::Value op = expr->op();
810   Comment cmnt(masm_, "[ ArithmeticExpression");
811   Expression* left = expr->left();
812   Expression* right = expr->right();
813 
814   VisitForStackValue(left);
815   VisitForAccumulatorValue(right);
816 
817   SetExpressionPosition(expr);
818   if (ShouldInlineSmiCase(op)) {
819     EmitInlineSmiBinaryOp(expr, op, left, right);
820   } else {
821     EmitBinaryOp(expr, op);
822   }
823 }
824 
VisitProperty(Property * expr)825 void FullCodeGenerator::VisitProperty(Property* expr) {
826   Comment cmnt(masm_, "[ Property");
827   SetExpressionPosition(expr);
828 
829   Expression* key = expr->key();
830 
831   if (key->IsPropertyName()) {
832     if (!expr->IsSuperAccess()) {
833       VisitForAccumulatorValue(expr->obj());
834       __ Move(LoadDescriptor::ReceiverRegister(), result_register());
835       EmitNamedPropertyLoad(expr);
836     } else {
837       VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
838       VisitForStackValue(
839           expr->obj()->AsSuperPropertyReference()->home_object());
840       EmitNamedSuperPropertyLoad(expr);
841     }
842   } else {
843     if (!expr->IsSuperAccess()) {
844       VisitForStackValue(expr->obj());
845       VisitForAccumulatorValue(expr->key());
846       __ Move(LoadDescriptor::NameRegister(), result_register());
847       PopOperand(LoadDescriptor::ReceiverRegister());
848       EmitKeyedPropertyLoad(expr);
849     } else {
850       VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
851       VisitForStackValue(
852           expr->obj()->AsSuperPropertyReference()->home_object());
853       VisitForStackValue(expr->key());
854       EmitKeyedSuperPropertyLoad(expr);
855     }
856   }
857   PrepareForBailoutForId(expr->LoadId(), BailoutState::TOS_REGISTER);
858   context()->Plug(result_register());
859 }
860 
VisitForTypeofValue(Expression * expr)861 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
862   VariableProxy* proxy = expr->AsVariableProxy();
863   DCHECK(!context()->IsEffect());
864   DCHECK(!context()->IsTest());
865 
866   if (proxy != NULL && (proxy->var()->IsUnallocatedOrGlobalSlot() ||
867                         proxy->var()->IsLookupSlot())) {
868     EmitVariableLoad(proxy, INSIDE_TYPEOF);
869     PrepareForBailout(proxy, BailoutState::TOS_REGISTER);
870   } else {
871     // This expression cannot throw a reference error at the top level.
872     VisitInDuplicateContext(expr);
873   }
874 }
875 
876 
VisitBlock(Block * stmt)877 void FullCodeGenerator::VisitBlock(Block* stmt) {
878   Comment cmnt(masm_, "[ Block");
879   NestedBlock nested_block(this, stmt);
880 
881   {
882     EnterBlockScopeIfNeeded block_scope_state(
883         this, stmt->scope(), stmt->EntryId(), stmt->DeclsId(), stmt->ExitId());
884     VisitStatements(stmt->statements());
885     __ bind(nested_block.break_label());
886   }
887 }
888 
889 
VisitDoExpression(DoExpression * expr)890 void FullCodeGenerator::VisitDoExpression(DoExpression* expr) {
891   Comment cmnt(masm_, "[ Do Expression");
892   SetExpressionPosition(expr);
893   VisitBlock(expr->block());
894   EmitVariableLoad(expr->result());
895 }
896 
897 
VisitExpressionStatement(ExpressionStatement * stmt)898 void FullCodeGenerator::VisitExpressionStatement(ExpressionStatement* stmt) {
899   Comment cmnt(masm_, "[ ExpressionStatement");
900   SetStatementPosition(stmt);
901   VisitForEffect(stmt->expression());
902 }
903 
904 
VisitEmptyStatement(EmptyStatement * stmt)905 void FullCodeGenerator::VisitEmptyStatement(EmptyStatement* stmt) {
906   Comment cmnt(masm_, "[ EmptyStatement");
907 }
908 
909 
VisitIfStatement(IfStatement * stmt)910 void FullCodeGenerator::VisitIfStatement(IfStatement* stmt) {
911   Comment cmnt(masm_, "[ IfStatement");
912   SetStatementPosition(stmt);
913   Label then_part, else_part, done;
914 
915   if (stmt->HasElseStatement()) {
916     VisitForControl(stmt->condition(), &then_part, &else_part, &then_part);
917     PrepareForBailoutForId(stmt->ThenId(), BailoutState::NO_REGISTERS);
918     __ bind(&then_part);
919     Visit(stmt->then_statement());
920     __ jmp(&done);
921 
922     PrepareForBailoutForId(stmt->ElseId(), BailoutState::NO_REGISTERS);
923     __ bind(&else_part);
924     Visit(stmt->else_statement());
925   } else {
926     VisitForControl(stmt->condition(), &then_part, &done, &then_part);
927     PrepareForBailoutForId(stmt->ThenId(), BailoutState::NO_REGISTERS);
928     __ bind(&then_part);
929     Visit(stmt->then_statement());
930 
931     PrepareForBailoutForId(stmt->ElseId(), BailoutState::NO_REGISTERS);
932   }
933   __ bind(&done);
934   PrepareForBailoutForId(stmt->IfId(), BailoutState::NO_REGISTERS);
935 }
936 
EmitContinue(Statement * target)937 void FullCodeGenerator::EmitContinue(Statement* target) {
938   NestedStatement* current = nesting_stack_;
939   int context_length = 0;
940   // When continuing, we clobber the unpredictable value in the accumulator
941   // with one that's safe for GC.  If we hit an exit from the try block of
942   // try...finally on our way out, we will unconditionally preserve the
943   // accumulator on the stack.
944   ClearAccumulator();
945   while (!current->IsContinueTarget(target)) {
946     if (current->IsTryFinally()) {
947       Comment cmnt(masm(), "[ Deferred continue through finally");
948       current->Exit(&context_length);
949       DCHECK_EQ(-1, context_length);
950       current->AsTryFinally()->deferred_commands()->RecordContinue(target);
951       return;
952     }
953     current = current->Exit(&context_length);
954   }
955   int stack_depth = current->GetStackDepthAtTarget();
956   int stack_drop = operand_stack_depth_ - stack_depth;
957   DCHECK_GE(stack_drop, 0);
958   __ Drop(stack_drop);
959   if (context_length > 0) {
960     while (context_length > 0) {
961       LoadContextField(context_register(), Context::PREVIOUS_INDEX);
962       --context_length;
963     }
964     StoreToFrameField(StandardFrameConstants::kContextOffset,
965                       context_register());
966   }
967 
968   __ jmp(current->AsIteration()->continue_label());
969 }
970 
VisitContinueStatement(ContinueStatement * stmt)971 void FullCodeGenerator::VisitContinueStatement(ContinueStatement* stmt) {
972   Comment cmnt(masm_, "[ ContinueStatement");
973   SetStatementPosition(stmt);
974   EmitContinue(stmt->target());
975 }
976 
EmitBreak(Statement * target)977 void FullCodeGenerator::EmitBreak(Statement* target) {
978   NestedStatement* current = nesting_stack_;
979   int context_length = 0;
980   // When breaking, we clobber the unpredictable value in the accumulator
981   // with one that's safe for GC.  If we hit an exit from the try block of
982   // try...finally on our way out, we will unconditionally preserve the
983   // accumulator on the stack.
984   ClearAccumulator();
985   while (!current->IsBreakTarget(target)) {
986     if (current->IsTryFinally()) {
987       Comment cmnt(masm(), "[ Deferred break through finally");
988       current->Exit(&context_length);
989       DCHECK_EQ(-1, context_length);
990       current->AsTryFinally()->deferred_commands()->RecordBreak(target);
991       return;
992     }
993     current = current->Exit(&context_length);
994   }
995   int stack_depth = current->GetStackDepthAtTarget();
996   int stack_drop = operand_stack_depth_ - stack_depth;
997   DCHECK_GE(stack_drop, 0);
998   __ Drop(stack_drop);
999   if (context_length > 0) {
1000     while (context_length > 0) {
1001       LoadContextField(context_register(), Context::PREVIOUS_INDEX);
1002       --context_length;
1003     }
1004     StoreToFrameField(StandardFrameConstants::kContextOffset,
1005                       context_register());
1006   }
1007 
1008   __ jmp(current->AsBreakable()->break_label());
1009 }
1010 
VisitBreakStatement(BreakStatement * stmt)1011 void FullCodeGenerator::VisitBreakStatement(BreakStatement* stmt) {
1012   Comment cmnt(masm_, "[ BreakStatement");
1013   SetStatementPosition(stmt);
1014   EmitBreak(stmt->target());
1015 }
1016 
EmitUnwindAndReturn()1017 void FullCodeGenerator::EmitUnwindAndReturn() {
1018   NestedStatement* current = nesting_stack_;
1019   int context_length = 0;
1020   while (current != NULL) {
1021     if (current->IsTryFinally()) {
1022       Comment cmnt(masm(), "[ Deferred return through finally");
1023       current->Exit(&context_length);
1024       DCHECK_EQ(-1, context_length);
1025       current->AsTryFinally()->deferred_commands()->RecordReturn();
1026       return;
1027     }
1028     current = current->Exit(&context_length);
1029   }
1030   EmitReturnSequence();
1031 }
1032 
EmitNewClosure(Handle<SharedFunctionInfo> info,bool pretenure)1033 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1034                                        bool pretenure) {
1035   // If we're running with the --always-opt or the --prepare-always-opt
1036   // flag, we need to use the runtime function so that the new function
1037   // we are creating here gets a chance to have its code optimized and
1038   // doesn't just get a copy of the existing unoptimized code.
1039   if (!FLAG_always_opt && !FLAG_prepare_always_opt && !pretenure &&
1040       scope()->is_function_scope()) {
1041     FastNewClosureStub stub(isolate(), info->language_mode(), info->kind());
1042     __ Move(stub.GetCallInterfaceDescriptor().GetRegisterParameter(0), info);
1043     __ CallStub(&stub);
1044   } else {
1045     __ Push(info);
1046     __ CallRuntime(pretenure ? Runtime::kNewClosure_Tenured
1047                              : Runtime::kNewClosure);
1048   }
1049   context()->Plug(result_register());
1050 }
1051 
EmitNamedPropertyLoad(Property * prop)1052 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
1053   SetExpressionPosition(prop);
1054   Literal* key = prop->key()->AsLiteral();
1055   DCHECK(!key->value()->IsSmi());
1056   DCHECK(!prop->IsSuperAccess());
1057 
1058   __ Move(LoadDescriptor::NameRegister(), key->value());
1059   __ Move(LoadDescriptor::SlotRegister(),
1060           SmiFromSlot(prop->PropertyFeedbackSlot()));
1061   CallLoadIC();
1062 }
1063 
EmitNamedSuperPropertyLoad(Property * prop)1064 void FullCodeGenerator::EmitNamedSuperPropertyLoad(Property* prop) {
1065   // Stack: receiver, home_object
1066   SetExpressionPosition(prop);
1067   Literal* key = prop->key()->AsLiteral();
1068   DCHECK(!key->value()->IsSmi());
1069   DCHECK(prop->IsSuperAccess());
1070 
1071   PushOperand(key->value());
1072   CallRuntimeWithOperands(Runtime::kLoadFromSuper);
1073 }
1074 
EmitKeyedPropertyLoad(Property * prop)1075 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
1076   SetExpressionPosition(prop);
1077   Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
1078   __ Move(LoadDescriptor::SlotRegister(),
1079           SmiFromSlot(prop->PropertyFeedbackSlot()));
1080   CallIC(ic);
1081 }
1082 
EmitKeyedSuperPropertyLoad(Property * prop)1083 void FullCodeGenerator::EmitKeyedSuperPropertyLoad(Property* prop) {
1084   // Stack: receiver, home_object, key.
1085   SetExpressionPosition(prop);
1086   CallRuntimeWithOperands(Runtime::kLoadKeyedFromSuper);
1087 }
1088 
EmitPropertyKey(ObjectLiteralProperty * property,BailoutId bailout_id)1089 void FullCodeGenerator::EmitPropertyKey(ObjectLiteralProperty* property,
1090                                         BailoutId bailout_id) {
1091   VisitForStackValue(property->key());
1092   CallRuntimeWithOperands(Runtime::kToName);
1093   PrepareForBailoutForId(bailout_id, BailoutState::NO_REGISTERS);
1094   PushOperand(result_register());
1095 }
1096 
EmitLoadStoreICSlot(FeedbackVectorSlot slot)1097 void FullCodeGenerator::EmitLoadStoreICSlot(FeedbackVectorSlot slot) {
1098   DCHECK(!slot.IsInvalid());
1099   __ Move(VectorStoreICTrampolineDescriptor::SlotRegister(), SmiFromSlot(slot));
1100 }
1101 
VisitReturnStatement(ReturnStatement * stmt)1102 void FullCodeGenerator::VisitReturnStatement(ReturnStatement* stmt) {
1103   Comment cmnt(masm_, "[ ReturnStatement");
1104   SetStatementPosition(stmt);
1105   Expression* expr = stmt->expression();
1106   VisitForAccumulatorValue(expr);
1107   EmitUnwindAndReturn();
1108 }
1109 
1110 
VisitWithStatement(WithStatement * stmt)1111 void FullCodeGenerator::VisitWithStatement(WithStatement* stmt) {
1112   Comment cmnt(masm_, "[ WithStatement");
1113   SetStatementPosition(stmt);
1114 
1115   VisitForAccumulatorValue(stmt->expression());
1116   Callable callable = CodeFactory::ToObject(isolate());
1117   __ Move(callable.descriptor().GetRegisterParameter(0), result_register());
1118   __ Call(callable.code(), RelocInfo::CODE_TARGET);
1119   PrepareForBailoutForId(stmt->ToObjectId(), BailoutState::NO_REGISTERS);
1120   PushOperand(result_register());
1121   PushFunctionArgumentForContextAllocation();
1122   CallRuntimeWithOperands(Runtime::kPushWithContext);
1123   StoreToFrameField(StandardFrameConstants::kContextOffset, context_register());
1124   PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS);
1125 
1126   Scope* saved_scope = scope();
1127   scope_ = stmt->scope();
1128   { WithOrCatch body(this);
1129     Visit(stmt->statement());
1130   }
1131   scope_ = saved_scope;
1132 
1133   // Pop context.
1134   LoadContextField(context_register(), Context::PREVIOUS_INDEX);
1135   // Update local stack frame context field.
1136   StoreToFrameField(StandardFrameConstants::kContextOffset, context_register());
1137 }
1138 
1139 
VisitDoWhileStatement(DoWhileStatement * stmt)1140 void FullCodeGenerator::VisitDoWhileStatement(DoWhileStatement* stmt) {
1141   Comment cmnt(masm_, "[ DoWhileStatement");
1142   // Do not insert break location as we do that below.
1143   SetStatementPosition(stmt, SKIP_BREAK);
1144 
1145   Label body, book_keeping;
1146 
1147   Iteration loop_statement(this, stmt);
1148   increment_loop_depth();
1149 
1150   __ bind(&body);
1151   Visit(stmt->body());
1152 
1153   // Record the position of the do while condition and make sure it is
1154   // possible to break on the condition.
1155   __ bind(loop_statement.continue_label());
1156   PrepareForBailoutForId(stmt->ContinueId(), BailoutState::NO_REGISTERS);
1157 
1158   // Here is the actual 'while' keyword.
1159   SetExpressionAsStatementPosition(stmt->cond());
1160   VisitForControl(stmt->cond(),
1161                   &book_keeping,
1162                   loop_statement.break_label(),
1163                   &book_keeping);
1164 
1165   // Check stack before looping.
1166   PrepareForBailoutForId(stmt->BackEdgeId(), BailoutState::NO_REGISTERS);
1167   __ bind(&book_keeping);
1168   EmitBackEdgeBookkeeping(stmt, &body);
1169   __ jmp(&body);
1170 
1171   PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
1172   __ bind(loop_statement.break_label());
1173   decrement_loop_depth();
1174 }
1175 
1176 
VisitWhileStatement(WhileStatement * stmt)1177 void FullCodeGenerator::VisitWhileStatement(WhileStatement* stmt) {
1178   Comment cmnt(masm_, "[ WhileStatement");
1179   Label loop, body;
1180 
1181   Iteration loop_statement(this, stmt);
1182   increment_loop_depth();
1183 
1184   __ bind(&loop);
1185 
1186   SetExpressionAsStatementPosition(stmt->cond());
1187   VisitForControl(stmt->cond(),
1188                   &body,
1189                   loop_statement.break_label(),
1190                   &body);
1191 
1192   PrepareForBailoutForId(stmt->BodyId(), BailoutState::NO_REGISTERS);
1193   __ bind(&body);
1194   Visit(stmt->body());
1195 
1196   __ bind(loop_statement.continue_label());
1197 
1198   // Check stack before looping.
1199   EmitBackEdgeBookkeeping(stmt, &loop);
1200   __ jmp(&loop);
1201 
1202   PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
1203   __ bind(loop_statement.break_label());
1204   decrement_loop_depth();
1205 }
1206 
1207 
VisitForStatement(ForStatement * stmt)1208 void FullCodeGenerator::VisitForStatement(ForStatement* stmt) {
1209   Comment cmnt(masm_, "[ ForStatement");
1210   // Do not insert break location as we do it below.
1211   SetStatementPosition(stmt, SKIP_BREAK);
1212 
1213   Label test, body;
1214 
1215   Iteration loop_statement(this, stmt);
1216 
1217   if (stmt->init() != NULL) {
1218     Visit(stmt->init());
1219   }
1220 
1221   increment_loop_depth();
1222   // Emit the test at the bottom of the loop (even if empty).
1223   __ jmp(&test);
1224 
1225   PrepareForBailoutForId(stmt->BodyId(), BailoutState::NO_REGISTERS);
1226   __ bind(&body);
1227   Visit(stmt->body());
1228 
1229   PrepareForBailoutForId(stmt->ContinueId(), BailoutState::NO_REGISTERS);
1230   __ bind(loop_statement.continue_label());
1231   if (stmt->next() != NULL) {
1232     SetStatementPosition(stmt->next());
1233     Visit(stmt->next());
1234   }
1235 
1236   // Check stack before looping.
1237   EmitBackEdgeBookkeeping(stmt, &body);
1238 
1239   __ bind(&test);
1240   if (stmt->cond() != NULL) {
1241     SetExpressionAsStatementPosition(stmt->cond());
1242     VisitForControl(stmt->cond(),
1243                     &body,
1244                     loop_statement.break_label(),
1245                     loop_statement.break_label());
1246   } else {
1247     __ jmp(&body);
1248   }
1249 
1250   PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
1251   __ bind(loop_statement.break_label());
1252   decrement_loop_depth();
1253 }
1254 
1255 
VisitForOfStatement(ForOfStatement * stmt)1256 void FullCodeGenerator::VisitForOfStatement(ForOfStatement* stmt) {
1257   Comment cmnt(masm_, "[ ForOfStatement");
1258 
1259   Iteration loop_statement(this, stmt);
1260   increment_loop_depth();
1261 
1262   // var iterator = iterable[Symbol.iterator]();
1263   SetExpressionAsStatementPosition(stmt->assign_iterator());
1264   VisitForEffect(stmt->assign_iterator());
1265 
1266   // Loop entry.
1267   __ bind(loop_statement.continue_label());
1268 
1269   // result = iterator.next()
1270   SetExpressionAsStatementPosition(stmt->next_result());
1271   VisitForEffect(stmt->next_result());
1272 
1273   // if (result.done) break;
1274   Label result_not_done;
1275   VisitForControl(stmt->result_done(), loop_statement.break_label(),
1276                   &result_not_done, &result_not_done);
1277   __ bind(&result_not_done);
1278 
1279   // each = result.value
1280   VisitForEffect(stmt->assign_each());
1281 
1282   // Generate code for the body of the loop.
1283   Visit(stmt->body());
1284 
1285   // Check stack before looping.
1286   PrepareForBailoutForId(stmt->BackEdgeId(), BailoutState::NO_REGISTERS);
1287   EmitBackEdgeBookkeeping(stmt, loop_statement.continue_label());
1288   __ jmp(loop_statement.continue_label());
1289 
1290   // Exit and decrement the loop depth.
1291   PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
1292   __ bind(loop_statement.break_label());
1293   decrement_loop_depth();
1294 }
1295 
VisitThisFunction(ThisFunction * expr)1296 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
1297   LoadFromFrameField(JavaScriptFrameConstants::kFunctionOffset,
1298                      result_register());
1299   context()->Plug(result_register());
1300 }
1301 
VisitTryCatchStatement(TryCatchStatement * stmt)1302 void FullCodeGenerator::VisitTryCatchStatement(TryCatchStatement* stmt) {
1303   Comment cmnt(masm_, "[ TryCatchStatement");
1304   SetStatementPosition(stmt, SKIP_BREAK);
1305 
1306   // The try block adds a handler to the exception handler chain before
1307   // entering, and removes it again when exiting normally.  If an exception
1308   // is thrown during execution of the try block, the handler is consumed
1309   // and control is passed to the catch block with the exception in the
1310   // result register.
1311 
1312   Label try_entry, handler_entry, exit;
1313   __ jmp(&try_entry);
1314   __ bind(&handler_entry);
1315   if (stmt->clear_pending_message()) ClearPendingMessage();
1316 
1317   // Exception handler code, the exception is in the result register.
1318   // Extend the context before executing the catch block.
1319   { Comment cmnt(masm_, "[ Extend catch context");
1320     PushOperand(stmt->variable()->name());
1321     PushOperand(result_register());
1322     PushFunctionArgumentForContextAllocation();
1323     CallRuntimeWithOperands(Runtime::kPushCatchContext);
1324     StoreToFrameField(StandardFrameConstants::kContextOffset,
1325                       context_register());
1326   }
1327 
1328   Scope* saved_scope = scope();
1329   scope_ = stmt->scope();
1330   DCHECK(scope_->declarations()->is_empty());
1331   { WithOrCatch catch_body(this);
1332     Visit(stmt->catch_block());
1333   }
1334   // Restore the context.
1335   LoadContextField(context_register(), Context::PREVIOUS_INDEX);
1336   StoreToFrameField(StandardFrameConstants::kContextOffset, context_register());
1337   scope_ = saved_scope;
1338   __ jmp(&exit);
1339 
1340   // Try block code. Sets up the exception handler chain.
1341   __ bind(&try_entry);
1342 
1343   try_catch_depth_++;
1344   int handler_index = NewHandlerTableEntry();
1345   EnterTryBlock(handler_index, &handler_entry);
1346   {
1347     Comment cmnt_try(masm(), "[ Try block");
1348     Visit(stmt->try_block());
1349   }
1350   ExitTryBlock(handler_index);
1351   try_catch_depth_--;
1352   __ bind(&exit);
1353 }
1354 
1355 
VisitTryFinallyStatement(TryFinallyStatement * stmt)1356 void FullCodeGenerator::VisitTryFinallyStatement(TryFinallyStatement* stmt) {
1357   Comment cmnt(masm_, "[ TryFinallyStatement");
1358   SetStatementPosition(stmt, SKIP_BREAK);
1359 
1360   // Try finally is compiled by setting up a try-handler on the stack while
1361   // executing the try body, and removing it again afterwards.
1362   //
1363   // The try-finally construct can enter the finally block in three ways:
1364   // 1. By exiting the try-block normally. This exits the try block,
1365   //    pushes the continuation token and falls through to the finally
1366   //    block.
1367   // 2. By exiting the try-block with a function-local control flow transfer
1368   //    (break/continue/return). The site of the, e.g., break exits the
1369   //    try block, pushes the continuation token and jumps to the
1370   //    finally block. After the finally block executes, the execution
1371   //    continues based on the continuation token to a block that
1372   //    continues with the control flow transfer.
1373   // 3. By exiting the try-block with a thrown exception. In the handler,
1374   //    we push the exception and continuation token and jump to the
1375   //    finally block (which will again dispatch based on the token once
1376   //    it is finished).
1377 
1378   Label try_entry, handler_entry, finally_entry;
1379   DeferredCommands deferred(this, &finally_entry);
1380 
1381   // Jump to try-handler setup and try-block code.
1382   __ jmp(&try_entry);
1383   __ bind(&handler_entry);
1384 
1385   // Exception handler code.  This code is only executed when an exception
1386   // is thrown.  Record the continuation and jump to the finally block.
1387   {
1388     Comment cmnt_handler(masm(), "[ Finally handler");
1389     deferred.RecordThrow();
1390   }
1391 
1392   // Set up try handler.
1393   __ bind(&try_entry);
1394   int handler_index = NewHandlerTableEntry();
1395   EnterTryBlock(handler_index, &handler_entry);
1396   {
1397     Comment cmnt_try(masm(), "[ Try block");
1398     TryFinally try_body(this, &deferred);
1399     Visit(stmt->try_block());
1400   }
1401   ExitTryBlock(handler_index);
1402   // Execute the finally block on the way out.  Clobber the unpredictable
1403   // value in the result register with one that's safe for GC because the
1404   // finally block will unconditionally preserve the result register on the
1405   // stack.
1406   ClearAccumulator();
1407   deferred.EmitFallThrough();
1408   // Fall through to the finally block.
1409 
1410   // Finally block implementation.
1411   __ bind(&finally_entry);
1412   {
1413     Comment cmnt_finally(masm(), "[ Finally block");
1414     OperandStackDepthIncrement(2);  // Token and accumulator are on stack.
1415     EnterFinallyBlock();
1416     Visit(stmt->finally_block());
1417     ExitFinallyBlock();
1418     OperandStackDepthDecrement(2);  // Token and accumulator were on stack.
1419   }
1420 
1421   {
1422     Comment cmnt_deferred(masm(), "[ Post-finally dispatch");
1423     deferred.EmitCommands();  // Return to the calling code.
1424   }
1425 }
1426 
1427 
VisitDebuggerStatement(DebuggerStatement * stmt)1428 void FullCodeGenerator::VisitDebuggerStatement(DebuggerStatement* stmt) {
1429   Comment cmnt(masm_, "[ DebuggerStatement");
1430   SetStatementPosition(stmt);
1431 
1432   __ DebugBreak();
1433   // Ignore the return value.
1434 
1435   PrepareForBailoutForId(stmt->DebugBreakId(), BailoutState::NO_REGISTERS);
1436 }
1437 
1438 
VisitCaseClause(CaseClause * clause)1439 void FullCodeGenerator::VisitCaseClause(CaseClause* clause) {
1440   UNREACHABLE();
1441 }
1442 
1443 
VisitConditional(Conditional * expr)1444 void FullCodeGenerator::VisitConditional(Conditional* expr) {
1445   Comment cmnt(masm_, "[ Conditional");
1446   Label true_case, false_case, done;
1447   VisitForControl(expr->condition(), &true_case, &false_case, &true_case);
1448 
1449   int original_stack_depth = operand_stack_depth_;
1450   PrepareForBailoutForId(expr->ThenId(), BailoutState::NO_REGISTERS);
1451   __ bind(&true_case);
1452   SetExpressionPosition(expr->then_expression());
1453   if (context()->IsTest()) {
1454     const TestContext* for_test = TestContext::cast(context());
1455     VisitForControl(expr->then_expression(),
1456                     for_test->true_label(),
1457                     for_test->false_label(),
1458                     NULL);
1459   } else {
1460     VisitInDuplicateContext(expr->then_expression());
1461     __ jmp(&done);
1462   }
1463 
1464   operand_stack_depth_ = original_stack_depth;
1465   PrepareForBailoutForId(expr->ElseId(), BailoutState::NO_REGISTERS);
1466   __ bind(&false_case);
1467   SetExpressionPosition(expr->else_expression());
1468   VisitInDuplicateContext(expr->else_expression());
1469   // If control flow falls through Visit, merge it with true case here.
1470   if (!context()->IsTest()) {
1471     __ bind(&done);
1472   }
1473 }
1474 
1475 
VisitLiteral(Literal * expr)1476 void FullCodeGenerator::VisitLiteral(Literal* expr) {
1477   Comment cmnt(masm_, "[ Literal");
1478   context()->Plug(expr->value());
1479 }
1480 
1481 
VisitFunctionLiteral(FunctionLiteral * expr)1482 void FullCodeGenerator::VisitFunctionLiteral(FunctionLiteral* expr) {
1483   Comment cmnt(masm_, "[ FunctionLiteral");
1484 
1485   // Build the function boilerplate and instantiate it.
1486   Handle<SharedFunctionInfo> function_info =
1487       Compiler::GetSharedFunctionInfo(expr, script(), info_);
1488   if (function_info.is_null()) {
1489     SetStackOverflow();
1490     return;
1491   }
1492   EmitNewClosure(function_info, expr->pretenure());
1493 }
1494 
1495 
VisitClassLiteral(ClassLiteral * lit)1496 void FullCodeGenerator::VisitClassLiteral(ClassLiteral* lit) {
1497   Comment cmnt(masm_, "[ ClassLiteral");
1498 
1499   {
1500     NestedClassLiteral nested_class_literal(this, lit);
1501     EnterBlockScopeIfNeeded block_scope_state(
1502         this, lit->scope(), lit->EntryId(), lit->DeclsId(), lit->ExitId());
1503 
1504     if (lit->extends() != NULL) {
1505       VisitForStackValue(lit->extends());
1506     } else {
1507       PushOperand(isolate()->factory()->the_hole_value());
1508     }
1509 
1510     VisitForStackValue(lit->constructor());
1511 
1512     PushOperand(Smi::FromInt(lit->start_position()));
1513     PushOperand(Smi::FromInt(lit->end_position()));
1514 
1515     CallRuntimeWithOperands(Runtime::kDefineClass);
1516     PrepareForBailoutForId(lit->CreateLiteralId(), BailoutState::TOS_REGISTER);
1517     PushOperand(result_register());
1518 
1519     // Load the "prototype" from the constructor.
1520     __ Move(LoadDescriptor::ReceiverRegister(), result_register());
1521     __ LoadRoot(LoadDescriptor::NameRegister(),
1522                 Heap::kprototype_stringRootIndex);
1523     __ Move(LoadDescriptor::SlotRegister(), SmiFromSlot(lit->PrototypeSlot()));
1524     CallLoadIC();
1525     PrepareForBailoutForId(lit->PrototypeId(), BailoutState::TOS_REGISTER);
1526     PushOperand(result_register());
1527 
1528     EmitClassDefineProperties(lit);
1529     DropOperands(1);
1530 
1531     // Set the constructor to have fast properties.
1532     CallRuntimeWithOperands(Runtime::kToFastProperties);
1533 
1534     if (lit->class_variable_proxy() != nullptr) {
1535       EmitVariableAssignment(lit->class_variable_proxy()->var(), Token::INIT,
1536                              lit->ProxySlot());
1537     }
1538   }
1539 
1540   context()->Plug(result_register());
1541 }
1542 
VisitRegExpLiteral(RegExpLiteral * expr)1543 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1544   Comment cmnt(masm_, "[ RegExpLiteral");
1545   Callable callable = CodeFactory::FastCloneRegExp(isolate());
1546   CallInterfaceDescriptor descriptor = callable.descriptor();
1547   LoadFromFrameField(JavaScriptFrameConstants::kFunctionOffset,
1548                      descriptor.GetRegisterParameter(0));
1549   __ Move(descriptor.GetRegisterParameter(1),
1550           Smi::FromInt(expr->literal_index()));
1551   __ Move(descriptor.GetRegisterParameter(2), expr->pattern());
1552   __ Move(descriptor.GetRegisterParameter(3), Smi::FromInt(expr->flags()));
1553   __ Call(callable.code(), RelocInfo::CODE_TARGET);
1554   context()->Plug(result_register());
1555 }
1556 
VisitNativeFunctionLiteral(NativeFunctionLiteral * expr)1557 void FullCodeGenerator::VisitNativeFunctionLiteral(
1558     NativeFunctionLiteral* expr) {
1559   Comment cmnt(masm_, "[ NativeFunctionLiteral");
1560   Handle<SharedFunctionInfo> shared =
1561       Compiler::GetSharedFunctionInfoForNative(expr->extension(), expr->name());
1562   EmitNewClosure(shared, false);
1563 }
1564 
1565 
VisitThrow(Throw * expr)1566 void FullCodeGenerator::VisitThrow(Throw* expr) {
1567   Comment cmnt(masm_, "[ Throw");
1568   VisitForStackValue(expr->exception());
1569   SetExpressionPosition(expr);
1570   CallRuntimeWithOperands(Runtime::kThrow);
1571   // Never returns here.
1572 
1573   // Even though this expression doesn't produce a value, we need to simulate
1574   // plugging of the value context to ensure stack depth tracking is in sync.
1575   if (context()->IsStackValue()) OperandStackDepthIncrement(1);
1576 }
1577 
1578 
EnterTryBlock(int handler_index,Label * handler)1579 void FullCodeGenerator::EnterTryBlock(int handler_index, Label* handler) {
1580   HandlerTableEntry* entry = &handler_table_[handler_index];
1581   entry->range_start = masm()->pc_offset();
1582   entry->handler_offset = handler->pos();
1583   entry->try_catch_depth = try_catch_depth_;
1584   entry->stack_depth = operand_stack_depth_;
1585 
1586   // We are using the operand stack depth, check for accuracy.
1587   EmitOperandStackDepthCheck();
1588 
1589   // Push context onto operand stack.
1590   STATIC_ASSERT(TryBlockConstant::kElementCount == 1);
1591   PushOperand(context_register());
1592 }
1593 
1594 
ExitTryBlock(int handler_index)1595 void FullCodeGenerator::ExitTryBlock(int handler_index) {
1596   HandlerTableEntry* entry = &handler_table_[handler_index];
1597   entry->range_end = masm()->pc_offset();
1598 
1599   // Drop context from operand stack.
1600   DropOperands(TryBlockConstant::kElementCount);
1601 }
1602 
1603 
VisitCall(Call * expr)1604 void FullCodeGenerator::VisitCall(Call* expr) {
1605 #ifdef DEBUG
1606   // We want to verify that RecordJSReturnSite gets called on all paths
1607   // through this function.  Avoid early returns.
1608   expr->return_is_recorded_ = false;
1609 #endif
1610 
1611   Comment cmnt(masm_, (expr->tail_call_mode() == TailCallMode::kAllow)
1612                           ? "[ TailCall"
1613                           : "[ Call");
1614   Expression* callee = expr->expression();
1615   Call::CallType call_type = expr->GetCallType(isolate());
1616 
1617   switch (call_type) {
1618     case Call::POSSIBLY_EVAL_CALL:
1619       EmitPossiblyEvalCall(expr);
1620       break;
1621     case Call::GLOBAL_CALL:
1622       EmitCallWithLoadIC(expr);
1623       break;
1624     case Call::LOOKUP_SLOT_CALL:
1625       // Call to a lookup slot (dynamically introduced variable).
1626       PushCalleeAndWithBaseObject(expr);
1627       EmitCall(expr);
1628       break;
1629     case Call::NAMED_PROPERTY_CALL: {
1630       Property* property = callee->AsProperty();
1631       VisitForStackValue(property->obj());
1632       EmitCallWithLoadIC(expr);
1633       break;
1634     }
1635     case Call::KEYED_PROPERTY_CALL: {
1636       Property* property = callee->AsProperty();
1637       VisitForStackValue(property->obj());
1638       EmitKeyedCallWithLoadIC(expr, property->key());
1639       break;
1640     }
1641     case Call::NAMED_SUPER_PROPERTY_CALL:
1642       EmitSuperCallWithLoadIC(expr);
1643       break;
1644     case Call::KEYED_SUPER_PROPERTY_CALL:
1645       EmitKeyedSuperCallWithLoadIC(expr);
1646       break;
1647     case Call::SUPER_CALL:
1648       EmitSuperConstructorCall(expr);
1649       break;
1650     case Call::OTHER_CALL:
1651       // Call to an arbitrary expression not handled specially above.
1652       VisitForStackValue(callee);
1653       OperandStackDepthIncrement(1);
1654       __ PushRoot(Heap::kUndefinedValueRootIndex);
1655       // Emit function call.
1656       EmitCall(expr);
1657       break;
1658   }
1659 
1660 #ifdef DEBUG
1661   // RecordJSReturnSite should have been called.
1662   DCHECK(expr->return_is_recorded_);
1663 #endif
1664 }
1665 
VisitCallRuntime(CallRuntime * expr)1666 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
1667   ZoneList<Expression*>* args = expr->arguments();
1668   int arg_count = args->length();
1669 
1670   if (expr->is_jsruntime()) {
1671     Comment cmnt(masm_, "[ CallRuntime");
1672     EmitLoadJSRuntimeFunction(expr);
1673 
1674     // Push the arguments ("left-to-right").
1675     for (int i = 0; i < arg_count; i++) {
1676       VisitForStackValue(args->at(i));
1677     }
1678 
1679     PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
1680     EmitCallJSRuntimeFunction(expr);
1681     context()->DropAndPlug(1, result_register());
1682 
1683   } else {
1684     const Runtime::Function* function = expr->function();
1685     switch (function->function_id) {
1686 #define CALL_INTRINSIC_GENERATOR(Name)     \
1687   case Runtime::kInline##Name: {           \
1688     Comment cmnt(masm_, "[ Inline" #Name); \
1689     return Emit##Name(expr);               \
1690   }
1691       FOR_EACH_FULL_CODE_INTRINSIC(CALL_INTRINSIC_GENERATOR)
1692 #undef CALL_INTRINSIC_GENERATOR
1693       default: {
1694         Comment cmnt(masm_, "[ CallRuntime for unhandled intrinsic");
1695         // Push the arguments ("left-to-right").
1696         for (int i = 0; i < arg_count; i++) {
1697           VisitForStackValue(args->at(i));
1698         }
1699 
1700         // Call the C runtime function.
1701         PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
1702         __ CallRuntime(expr->function(), arg_count);
1703         OperandStackDepthDecrement(arg_count);
1704         context()->Plug(result_register());
1705       }
1706     }
1707   }
1708 }
1709 
VisitSpread(Spread * expr)1710 void FullCodeGenerator::VisitSpread(Spread* expr) { UNREACHABLE(); }
1711 
1712 
VisitEmptyParentheses(EmptyParentheses * expr)1713 void FullCodeGenerator::VisitEmptyParentheses(EmptyParentheses* expr) {
1714   UNREACHABLE();
1715 }
1716 
1717 
VisitRewritableExpression(RewritableExpression * expr)1718 void FullCodeGenerator::VisitRewritableExpression(RewritableExpression* expr) {
1719   Visit(expr->expression());
1720 }
1721 
Exit(int * context_length)1722 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
1723     int* context_length) {
1724   // The macros used here must preserve the result register.
1725 
1726   // Calculate how many operands to drop to get down to handler block.
1727   int stack_drop = codegen_->operand_stack_depth_ - GetStackDepthAtTarget();
1728   DCHECK_GE(stack_drop, 0);
1729 
1730   // Because the handler block contains the context of the finally
1731   // code, we can restore it directly from there for the finally code
1732   // rather than iteratively unwinding contexts via their previous
1733   // links.
1734   if (*context_length > 0) {
1735     __ Drop(stack_drop);  // Down to the handler block.
1736     // Restore the context to its dedicated register and the stack.
1737     STATIC_ASSERT(TryBlockConstant::kElementCount == 1);
1738     __ Pop(codegen_->context_register());
1739     codegen_->StoreToFrameField(StandardFrameConstants::kContextOffset,
1740                                 codegen_->context_register());
1741   } else {
1742     // Down to the handler block and also drop context.
1743     __ Drop(stack_drop + TryBlockConstant::kElementCount);
1744   }
1745 
1746   // The caller will ignore outputs.
1747   *context_length = -1;
1748   return previous_;
1749 }
1750 
RecordBreak(Statement * target)1751 void FullCodeGenerator::DeferredCommands::RecordBreak(Statement* target) {
1752   TokenId token = dispenser_.GetBreakContinueToken();
1753   commands_.push_back({kBreak, token, target});
1754   EmitJumpToFinally(token);
1755 }
1756 
RecordContinue(Statement * target)1757 void FullCodeGenerator::DeferredCommands::RecordContinue(Statement* target) {
1758   TokenId token = dispenser_.GetBreakContinueToken();
1759   commands_.push_back({kContinue, token, target});
1760   EmitJumpToFinally(token);
1761 }
1762 
RecordReturn()1763 void FullCodeGenerator::DeferredCommands::RecordReturn() {
1764   if (return_token_ == TokenDispenserForFinally::kInvalidToken) {
1765     return_token_ = TokenDispenserForFinally::kReturnToken;
1766     commands_.push_back({kReturn, return_token_, nullptr});
1767   }
1768   EmitJumpToFinally(return_token_);
1769 }
1770 
RecordThrow()1771 void FullCodeGenerator::DeferredCommands::RecordThrow() {
1772   if (throw_token_ == TokenDispenserForFinally::kInvalidToken) {
1773     throw_token_ = TokenDispenserForFinally::kThrowToken;
1774     commands_.push_back({kThrow, throw_token_, nullptr});
1775   }
1776   EmitJumpToFinally(throw_token_);
1777 }
1778 
EmitFallThrough()1779 void FullCodeGenerator::DeferredCommands::EmitFallThrough() {
1780   __ Push(Smi::FromInt(TokenDispenserForFinally::kFallThroughToken));
1781   __ Push(result_register());
1782 }
1783 
EmitJumpToFinally(TokenId token)1784 void FullCodeGenerator::DeferredCommands::EmitJumpToFinally(TokenId token) {
1785   __ Push(Smi::FromInt(token));
1786   __ Push(result_register());
1787   __ jmp(finally_entry_);
1788 }
1789 
TryLiteralCompare(CompareOperation * expr)1790 bool FullCodeGenerator::TryLiteralCompare(CompareOperation* expr) {
1791   Expression* sub_expr;
1792   Handle<String> check;
1793   if (expr->IsLiteralCompareTypeof(&sub_expr, &check)) {
1794     SetExpressionPosition(expr);
1795     EmitLiteralCompareTypeof(expr, sub_expr, check);
1796     return true;
1797   }
1798 
1799   if (expr->IsLiteralCompareUndefined(&sub_expr)) {
1800     SetExpressionPosition(expr);
1801     EmitLiteralCompareNil(expr, sub_expr, kUndefinedValue);
1802     return true;
1803   }
1804 
1805   if (expr->IsLiteralCompareNull(&sub_expr)) {
1806     SetExpressionPosition(expr);
1807     EmitLiteralCompareNil(expr, sub_expr, kNullValue);
1808     return true;
1809   }
1810 
1811   return false;
1812 }
1813 
1814 
Patch(Isolate * isolate,Code * unoptimized)1815 void BackEdgeTable::Patch(Isolate* isolate, Code* unoptimized) {
1816   DisallowHeapAllocation no_gc;
1817   Code* patch = isolate->builtins()->builtin(Builtins::kOnStackReplacement);
1818 
1819   // Increment loop nesting level by one and iterate over the back edge table
1820   // to find the matching loops to patch the interrupt
1821   // call to an unconditional call to the replacement code.
1822   int loop_nesting_level = unoptimized->allow_osr_at_loop_nesting_level() + 1;
1823   if (loop_nesting_level > Code::kMaxLoopNestingMarker) return;
1824 
1825   BackEdgeTable back_edges(unoptimized, &no_gc);
1826   for (uint32_t i = 0; i < back_edges.length(); i++) {
1827     if (static_cast<int>(back_edges.loop_depth(i)) == loop_nesting_level) {
1828       DCHECK_EQ(INTERRUPT, GetBackEdgeState(isolate,
1829                                             unoptimized,
1830                                             back_edges.pc(i)));
1831       PatchAt(unoptimized, back_edges.pc(i), ON_STACK_REPLACEMENT, patch);
1832     }
1833   }
1834 
1835   unoptimized->set_allow_osr_at_loop_nesting_level(loop_nesting_level);
1836   DCHECK(Verify(isolate, unoptimized));
1837 }
1838 
1839 
Revert(Isolate * isolate,Code * unoptimized)1840 void BackEdgeTable::Revert(Isolate* isolate, Code* unoptimized) {
1841   DisallowHeapAllocation no_gc;
1842   Code* patch = isolate->builtins()->builtin(Builtins::kInterruptCheck);
1843 
1844   // Iterate over the back edge table and revert the patched interrupt calls.
1845   int loop_nesting_level = unoptimized->allow_osr_at_loop_nesting_level();
1846 
1847   BackEdgeTable back_edges(unoptimized, &no_gc);
1848   for (uint32_t i = 0; i < back_edges.length(); i++) {
1849     if (static_cast<int>(back_edges.loop_depth(i)) <= loop_nesting_level) {
1850       DCHECK_NE(INTERRUPT, GetBackEdgeState(isolate,
1851                                             unoptimized,
1852                                             back_edges.pc(i)));
1853       PatchAt(unoptimized, back_edges.pc(i), INTERRUPT, patch);
1854     }
1855   }
1856 
1857   unoptimized->set_allow_osr_at_loop_nesting_level(0);
1858   // Assert that none of the back edges are patched anymore.
1859   DCHECK(Verify(isolate, unoptimized));
1860 }
1861 
1862 
1863 #ifdef DEBUG
Verify(Isolate * isolate,Code * unoptimized)1864 bool BackEdgeTable::Verify(Isolate* isolate, Code* unoptimized) {
1865   DisallowHeapAllocation no_gc;
1866   int loop_nesting_level = unoptimized->allow_osr_at_loop_nesting_level();
1867   BackEdgeTable back_edges(unoptimized, &no_gc);
1868   for (uint32_t i = 0; i < back_edges.length(); i++) {
1869     uint32_t loop_depth = back_edges.loop_depth(i);
1870     CHECK_LE(static_cast<int>(loop_depth), Code::kMaxLoopNestingMarker);
1871     // Assert that all back edges for shallower loops (and only those)
1872     // have already been patched.
1873     CHECK_EQ((static_cast<int>(loop_depth) <= loop_nesting_level),
1874              GetBackEdgeState(isolate,
1875                               unoptimized,
1876                               back_edges.pc(i)) != INTERRUPT);
1877   }
1878   return true;
1879 }
1880 #endif  // DEBUG
1881 
1882 
EnterBlockScopeIfNeeded(FullCodeGenerator * codegen,Scope * scope,BailoutId entry_id,BailoutId declarations_id,BailoutId exit_id)1883 FullCodeGenerator::EnterBlockScopeIfNeeded::EnterBlockScopeIfNeeded(
1884     FullCodeGenerator* codegen, Scope* scope, BailoutId entry_id,
1885     BailoutId declarations_id, BailoutId exit_id)
1886     : codegen_(codegen), exit_id_(exit_id) {
1887   saved_scope_ = codegen_->scope();
1888 
1889   if (scope == NULL) {
1890     codegen_->PrepareForBailoutForId(entry_id, BailoutState::NO_REGISTERS);
1891     needs_block_context_ = false;
1892   } else {
1893     needs_block_context_ = scope->NeedsContext();
1894     codegen_->scope_ = scope;
1895     {
1896       if (needs_block_context_) {
1897         Comment cmnt(masm(), "[ Extend block context");
1898         codegen_->PushOperand(scope->GetScopeInfo(codegen->isolate()));
1899         codegen_->PushFunctionArgumentForContextAllocation();
1900         codegen_->CallRuntimeWithOperands(Runtime::kPushBlockContext);
1901 
1902         // Replace the context stored in the frame.
1903         codegen_->StoreToFrameField(StandardFrameConstants::kContextOffset,
1904                                     codegen_->context_register());
1905       }
1906       CHECK_EQ(0, scope->num_stack_slots());
1907       codegen_->PrepareForBailoutForId(entry_id, BailoutState::NO_REGISTERS);
1908     }
1909     {
1910       Comment cmnt(masm(), "[ Declarations");
1911       codegen_->VisitDeclarations(scope->declarations());
1912       codegen_->PrepareForBailoutForId(declarations_id,
1913                                        BailoutState::NO_REGISTERS);
1914     }
1915   }
1916 }
1917 
1918 
~EnterBlockScopeIfNeeded()1919 FullCodeGenerator::EnterBlockScopeIfNeeded::~EnterBlockScopeIfNeeded() {
1920   if (needs_block_context_) {
1921     codegen_->LoadContextField(codegen_->context_register(),
1922                                Context::PREVIOUS_INDEX);
1923     // Update local stack frame context field.
1924     codegen_->StoreToFrameField(StandardFrameConstants::kContextOffset,
1925                                 codegen_->context_register());
1926   }
1927   codegen_->PrepareForBailoutForId(exit_id_, BailoutState::NO_REGISTERS);
1928   codegen_->scope_ = saved_scope_;
1929 }
1930 
1931 
NeedsHoleCheckForLoad(VariableProxy * proxy)1932 bool FullCodeGenerator::NeedsHoleCheckForLoad(VariableProxy* proxy) {
1933   Variable* var = proxy->var();
1934 
1935   if (!var->binding_needs_init()) {
1936     return false;
1937   }
1938 
1939   // var->scope() may be NULL when the proxy is located in eval code and
1940   // refers to a potential outside binding. Currently those bindings are
1941   // always looked up dynamically, i.e. in that case
1942   //     var->location() == LOOKUP.
1943   // always holds.
1944   DCHECK(var->scope() != NULL);
1945   DCHECK(var->location() == VariableLocation::PARAMETER ||
1946          var->location() == VariableLocation::LOCAL ||
1947          var->location() == VariableLocation::CONTEXT);
1948 
1949   // Check if the binding really needs an initialization check. The check
1950   // can be skipped in the following situation: we have a LET or CONST
1951   // binding in harmony mode, both the Variable and the VariableProxy have
1952   // the same declaration scope (i.e. they are both in global code, in the
1953   // same function or in the same eval code), the VariableProxy is in
1954   // the source physically located after the initializer of the variable,
1955   // and that the initializer cannot be skipped due to a nonlinear scope.
1956   //
1957   // We cannot skip any initialization checks for CONST in non-harmony
1958   // mode because const variables may be declared but never initialized:
1959   //   if (false) { const x; }; var y = x;
1960   //
1961   // The condition on the declaration scopes is a conservative check for
1962   // nested functions that access a binding and are called before the
1963   // binding is initialized:
1964   //   function() { f(); let x = 1; function f() { x = 2; } }
1965   //
1966   // The check cannot be skipped on non-linear scopes, namely switch
1967   // scopes, to ensure tests are done in cases like the following:
1968   //   switch (1) { case 0: let x = 2; case 1: f(x); }
1969   // The scope of the variable needs to be checked, in case the use is
1970   // in a sub-block which may be linear.
1971   if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1972     return true;
1973   }
1974 
1975   if (var->is_this()) {
1976     DCHECK(literal() != nullptr &&
1977            (literal()->kind() & kSubclassConstructor) != 0);
1978     // TODO(littledan): implement 'this' hole check elimination.
1979     return true;
1980   }
1981 
1982   // Check that we always have valid source position.
1983   DCHECK(var->initializer_position() != RelocInfo::kNoPosition);
1984   DCHECK(proxy->position() != RelocInfo::kNoPosition);
1985 
1986   return var->scope()->is_nonlinear() ||
1987          var->initializer_position() >= proxy->position();
1988 }
1989 
1990 
1991 #undef __
1992 
1993 
1994 }  // namespace internal
1995 }  // namespace v8
1996