• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #if V8_TARGET_ARCH_X87
6 
7 #include "src/ast/scopes.h"
8 #include "src/code-factory.h"
9 #include "src/code-stubs.h"
10 #include "src/codegen.h"
11 #include "src/debug/debug.h"
12 #include "src/full-codegen/full-codegen.h"
13 #include "src/ic/ic.h"
14 #include "src/parsing/parser.h"
15 #include "src/x87/frames-x87.h"
16 
17 namespace v8 {
18 namespace internal {
19 
20 #define __ ACCESS_MASM(masm())
21 
22 class JumpPatchSite BASE_EMBEDDED {
23  public:
JumpPatchSite(MacroAssembler * masm)24   explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
25 #ifdef DEBUG
26     info_emitted_ = false;
27 #endif
28   }
29 
~JumpPatchSite()30   ~JumpPatchSite() {
31     DCHECK(patch_site_.is_bound() == info_emitted_);
32   }
33 
EmitJumpIfNotSmi(Register reg,Label * target,Label::Distance distance=Label::kFar)34   void EmitJumpIfNotSmi(Register reg,
35                         Label* target,
36                         Label::Distance distance = Label::kFar) {
37     __ test(reg, Immediate(kSmiTagMask));
38     EmitJump(not_carry, target, distance);  // Always taken before patched.
39   }
40 
EmitJumpIfSmi(Register reg,Label * target,Label::Distance distance=Label::kFar)41   void EmitJumpIfSmi(Register reg,
42                      Label* target,
43                      Label::Distance distance = Label::kFar) {
44     __ test(reg, Immediate(kSmiTagMask));
45     EmitJump(carry, target, distance);  // Never taken before patched.
46   }
47 
EmitPatchInfo()48   void EmitPatchInfo() {
49     if (patch_site_.is_bound()) {
50       int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
51       DCHECK(is_uint8(delta_to_patch_site));
52       __ test(eax, Immediate(delta_to_patch_site));
53 #ifdef DEBUG
54       info_emitted_ = true;
55 #endif
56     } else {
57       __ nop();  // Signals no inlined code.
58     }
59   }
60 
61  private:
62   // jc will be patched with jz, jnc will become jnz.
EmitJump(Condition cc,Label * target,Label::Distance distance)63   void EmitJump(Condition cc, Label* target, Label::Distance distance) {
64     DCHECK(!patch_site_.is_bound() && !info_emitted_);
65     DCHECK(cc == carry || cc == not_carry);
66     __ bind(&patch_site_);
67     __ j(cc, target, distance);
68   }
69 
masm()70   MacroAssembler* masm() { return masm_; }
71   MacroAssembler* masm_;
72   Label patch_site_;
73 #ifdef DEBUG
74   bool info_emitted_;
75 #endif
76 };
77 
78 
79 // Generate code for a JS function.  On entry to the function the receiver
80 // and arguments have been pushed on the stack left to right, with the
81 // return address on top of them.  The actual argument count matches the
82 // formal parameter count expected by the function.
83 //
84 // The live registers are:
85 //   o edi: the JS function object being called (i.e. ourselves)
86 //   o edx: the new target value
87 //   o esi: our context
88 //   o ebp: our caller's frame pointer
89 //   o esp: stack pointer (pointing to return address)
90 //
91 // The function builds a JS frame.  Please see JavaScriptFrameConstants in
92 // frames-x87.h for its layout.
Generate()93 void FullCodeGenerator::Generate() {
94   CompilationInfo* info = info_;
95   profiling_counter_ = isolate()->factory()->NewCell(
96       Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
97   SetFunctionPosition(literal());
98   Comment cmnt(masm_, "[ function compiled by full code generator");
99 
100   ProfileEntryHookStub::MaybeCallEntryHook(masm_);
101 
102   if (FLAG_debug_code && info->ExpectsJSReceiverAsReceiver()) {
103     int receiver_offset = (info->scope()->num_parameters() + 1) * kPointerSize;
104     __ mov(ecx, Operand(esp, receiver_offset));
105     __ AssertNotSmi(ecx);
106     __ CmpObjectType(ecx, FIRST_JS_RECEIVER_TYPE, ecx);
107     __ Assert(above_equal, kSloppyFunctionExpectsJSReceiverReceiver);
108   }
109 
110   // Open a frame scope to indicate that there is a frame on the stack.  The
111   // MANUAL indicates that the scope shouldn't actually generate code to set up
112   // the frame (that is done below).
113   FrameScope frame_scope(masm_, StackFrame::MANUAL);
114 
115   info->set_prologue_offset(masm_->pc_offset());
116   __ Prologue(info->GeneratePreagedPrologue());
117 
118   { Comment cmnt(masm_, "[ Allocate locals");
119     int locals_count = info->scope()->num_stack_slots();
120     // Generators allocate locals, if any, in context slots.
121     DCHECK(!IsGeneratorFunction(literal()->kind()) || locals_count == 0);
122     OperandStackDepthIncrement(locals_count);
123     if (locals_count == 1) {
124       __ push(Immediate(isolate()->factory()->undefined_value()));
125     } else if (locals_count > 1) {
126       if (locals_count >= 128) {
127         Label ok;
128         __ mov(ecx, esp);
129         __ sub(ecx, Immediate(locals_count * kPointerSize));
130         ExternalReference stack_limit =
131             ExternalReference::address_of_real_stack_limit(isolate());
132         __ cmp(ecx, Operand::StaticVariable(stack_limit));
133         __ j(above_equal, &ok, Label::kNear);
134         __ CallRuntime(Runtime::kThrowStackOverflow);
135         __ bind(&ok);
136       }
137       __ mov(eax, Immediate(isolate()->factory()->undefined_value()));
138       const int kMaxPushes = 32;
139       if (locals_count >= kMaxPushes) {
140         int loop_iterations = locals_count / kMaxPushes;
141         __ mov(ecx, loop_iterations);
142         Label loop_header;
143         __ bind(&loop_header);
144         // Do pushes.
145         for (int i = 0; i < kMaxPushes; i++) {
146           __ push(eax);
147         }
148         __ dec(ecx);
149         __ j(not_zero, &loop_header, Label::kNear);
150       }
151       int remaining = locals_count % kMaxPushes;
152       // Emit the remaining pushes.
153       for (int i  = 0; i < remaining; i++) {
154         __ push(eax);
155       }
156     }
157   }
158 
159   bool function_in_register = true;
160 
161   // Possibly allocate a local context.
162   if (info->scope()->num_heap_slots() > 0) {
163     Comment cmnt(masm_, "[ Allocate context");
164     bool need_write_barrier = true;
165     int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
166     // Argument to NewContext is the function, which is still in edi.
167     if (info->scope()->is_script_scope()) {
168       __ push(edi);
169       __ Push(info->scope()->GetScopeInfo(info->isolate()));
170       __ CallRuntime(Runtime::kNewScriptContext);
171       PrepareForBailoutForId(BailoutId::ScriptContext(),
172                              BailoutState::TOS_REGISTER);
173       // The new target value is not used, clobbering is safe.
174       DCHECK_NULL(info->scope()->new_target_var());
175     } else {
176       if (info->scope()->new_target_var() != nullptr) {
177         __ push(edx);  // Preserve new target.
178       }
179       if (slots <= FastNewContextStub::kMaximumSlots) {
180         FastNewContextStub stub(isolate(), slots);
181         __ CallStub(&stub);
182         // Result of FastNewContextStub is always in new space.
183         need_write_barrier = false;
184       } else {
185         __ push(edi);
186         __ CallRuntime(Runtime::kNewFunctionContext);
187       }
188       if (info->scope()->new_target_var() != nullptr) {
189         __ pop(edx);  // Restore new target.
190       }
191     }
192     function_in_register = false;
193     // Context is returned in eax.  It replaces the context passed to us.
194     // It's saved in the stack and kept live in esi.
195     __ mov(esi, eax);
196     __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), eax);
197 
198     // Copy parameters into context if necessary.
199     int num_parameters = info->scope()->num_parameters();
200     int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
201     for (int i = first_parameter; i < num_parameters; i++) {
202       Variable* var = (i == -1) ? scope()->receiver() : scope()->parameter(i);
203       if (var->IsContextSlot()) {
204         int parameter_offset = StandardFrameConstants::kCallerSPOffset +
205             (num_parameters - 1 - i) * kPointerSize;
206         // Load parameter from stack.
207         __ mov(eax, Operand(ebp, parameter_offset));
208         // Store it in the context.
209         int context_offset = Context::SlotOffset(var->index());
210         __ mov(Operand(esi, context_offset), eax);
211         // Update the write barrier. This clobbers eax and ebx.
212         if (need_write_barrier) {
213           __ RecordWriteContextSlot(esi, context_offset, eax, ebx,
214                                     kDontSaveFPRegs);
215         } else if (FLAG_debug_code) {
216           Label done;
217           __ JumpIfInNewSpace(esi, eax, &done, Label::kNear);
218           __ Abort(kExpectedNewSpaceObject);
219           __ bind(&done);
220         }
221       }
222     }
223   }
224 
225   // Register holding this function and new target are both trashed in case we
226   // bailout here. But since that can happen only when new target is not used
227   // and we allocate a context, the value of |function_in_register| is correct.
228   PrepareForBailoutForId(BailoutId::FunctionContext(),
229                          BailoutState::NO_REGISTERS);
230 
231   // Possibly set up a local binding to the this function which is used in
232   // derived constructors with super calls.
233   Variable* this_function_var = scope()->this_function_var();
234   if (this_function_var != nullptr) {
235     Comment cmnt(masm_, "[ This function");
236     if (!function_in_register) {
237       __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
238       // The write barrier clobbers register again, keep it marked as such.
239     }
240     SetVar(this_function_var, edi, ebx, ecx);
241   }
242 
243   // Possibly set up a local binding to the new target value.
244   Variable* new_target_var = scope()->new_target_var();
245   if (new_target_var != nullptr) {
246     Comment cmnt(masm_, "[ new.target");
247     SetVar(new_target_var, edx, ebx, ecx);
248   }
249 
250   // Possibly allocate RestParameters
251   int rest_index;
252   Variable* rest_param = scope()->rest_parameter(&rest_index);
253   if (rest_param) {
254     Comment cmnt(masm_, "[ Allocate rest parameter array");
255     if (!function_in_register) {
256       __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
257     }
258     FastNewRestParameterStub stub(isolate());
259     __ CallStub(&stub);
260     function_in_register = false;
261     SetVar(rest_param, eax, ebx, edx);
262   }
263 
264   Variable* arguments = scope()->arguments();
265   if (arguments != NULL) {
266     // Arguments object must be allocated after the context object, in
267     // case the "arguments" or ".arguments" variables are in the context.
268     Comment cmnt(masm_, "[ Allocate arguments object");
269     if (!function_in_register) {
270       __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
271     }
272     if (is_strict(language_mode()) || !has_simple_parameters()) {
273       FastNewStrictArgumentsStub stub(isolate());
274       __ CallStub(&stub);
275     } else if (literal()->has_duplicate_parameters()) {
276       __ Push(edi);
277       __ CallRuntime(Runtime::kNewSloppyArguments_Generic);
278     } else {
279       FastNewSloppyArgumentsStub stub(isolate());
280       __ CallStub(&stub);
281     }
282 
283     SetVar(arguments, eax, ebx, edx);
284   }
285 
286   if (FLAG_trace) {
287     __ CallRuntime(Runtime::kTraceEnter);
288   }
289 
290   // Visit the declarations and body.
291   PrepareForBailoutForId(BailoutId::FunctionEntry(),
292                          BailoutState::NO_REGISTERS);
293   {
294     Comment cmnt(masm_, "[ Declarations");
295     VisitDeclarations(scope()->declarations());
296   }
297 
298   // Assert that the declarations do not use ICs. Otherwise the debugger
299   // won't be able to redirect a PC at an IC to the correct IC in newly
300   // recompiled code.
301   DCHECK_EQ(0, ic_total_count_);
302 
303   {
304     Comment cmnt(masm_, "[ Stack check");
305     PrepareForBailoutForId(BailoutId::Declarations(),
306                            BailoutState::NO_REGISTERS);
307     Label ok;
308     ExternalReference stack_limit =
309         ExternalReference::address_of_stack_limit(isolate());
310     __ cmp(esp, Operand::StaticVariable(stack_limit));
311     __ j(above_equal, &ok, Label::kNear);
312     __ call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
313     __ bind(&ok);
314   }
315 
316   {
317     Comment cmnt(masm_, "[ Body");
318     DCHECK(loop_depth() == 0);
319     VisitStatements(literal()->body());
320     DCHECK(loop_depth() == 0);
321   }
322 
323   // Always emit a 'return undefined' in case control fell off the end of
324   // the body.
325   { Comment cmnt(masm_, "[ return <undefined>;");
326     __ mov(eax, isolate()->factory()->undefined_value());
327     EmitReturnSequence();
328   }
329 }
330 
331 
ClearAccumulator()332 void FullCodeGenerator::ClearAccumulator() {
333   __ Move(eax, Immediate(Smi::FromInt(0)));
334 }
335 
336 
EmitProfilingCounterDecrement(int delta)337 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
338   __ mov(ebx, Immediate(profiling_counter_));
339   __ sub(FieldOperand(ebx, Cell::kValueOffset),
340          Immediate(Smi::FromInt(delta)));
341 }
342 
343 
EmitProfilingCounterReset()344 void FullCodeGenerator::EmitProfilingCounterReset() {
345   int reset_value = FLAG_interrupt_budget;
346   __ mov(ebx, Immediate(profiling_counter_));
347   __ mov(FieldOperand(ebx, Cell::kValueOffset),
348          Immediate(Smi::FromInt(reset_value)));
349 }
350 
351 
EmitBackEdgeBookkeeping(IterationStatement * stmt,Label * back_edge_target)352 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
353                                                 Label* back_edge_target) {
354   Comment cmnt(masm_, "[ Back edge bookkeeping");
355   Label ok;
356 
357   DCHECK(back_edge_target->is_bound());
358   int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
359   int weight = Min(kMaxBackEdgeWeight,
360                    Max(1, distance / kCodeSizeMultiplier));
361   EmitProfilingCounterDecrement(weight);
362   __ j(positive, &ok, Label::kNear);
363   __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
364 
365   // Record a mapping of this PC offset to the OSR id.  This is used to find
366   // the AST id from the unoptimized code in order to use it as a key into
367   // the deoptimization input data found in the optimized code.
368   RecordBackEdge(stmt->OsrEntryId());
369 
370   EmitProfilingCounterReset();
371 
372   __ bind(&ok);
373   PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS);
374   // Record a mapping of the OSR id to this PC.  This is used if the OSR
375   // entry becomes the target of a bailout.  We don't expect it to be, but
376   // we want it to work if it is.
377   PrepareForBailoutForId(stmt->OsrEntryId(), BailoutState::NO_REGISTERS);
378 }
379 
EmitProfilingCounterHandlingForReturnSequence(bool is_tail_call)380 void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence(
381     bool is_tail_call) {
382   // Pretend that the exit is a backwards jump to the entry.
383   int weight = 1;
384   if (info_->ShouldSelfOptimize()) {
385     weight = FLAG_interrupt_budget / FLAG_self_opt_count;
386   } else {
387     int distance = masm_->pc_offset();
388     weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
389   }
390   EmitProfilingCounterDecrement(weight);
391   Label ok;
392   __ j(positive, &ok, Label::kNear);
393   // Don't need to save result register if we are going to do a tail call.
394   if (!is_tail_call) {
395     __ push(eax);
396   }
397   __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
398   if (!is_tail_call) {
399     __ pop(eax);
400   }
401   EmitProfilingCounterReset();
402   __ bind(&ok);
403 }
404 
EmitReturnSequence()405 void FullCodeGenerator::EmitReturnSequence() {
406   Comment cmnt(masm_, "[ Return sequence");
407   if (return_label_.is_bound()) {
408     __ jmp(&return_label_);
409   } else {
410     // Common return label
411     __ bind(&return_label_);
412     if (FLAG_trace) {
413       __ push(eax);
414       __ CallRuntime(Runtime::kTraceExit);
415     }
416     EmitProfilingCounterHandlingForReturnSequence(false);
417 
418     SetReturnPosition(literal());
419     __ leave();
420 
421     int arg_count = info_->scope()->num_parameters() + 1;
422     int arguments_bytes = arg_count * kPointerSize;
423     __ Ret(arguments_bytes, ecx);
424   }
425 }
426 
RestoreContext()427 void FullCodeGenerator::RestoreContext() {
428   __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
429 }
430 
Plug(Variable * var) const431 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
432   DCHECK(var->IsStackAllocated() || var->IsContextSlot());
433   MemOperand operand = codegen()->VarOperand(var, result_register());
434   // Memory operands can be pushed directly.
435   codegen()->PushOperand(operand);
436 }
437 
438 
Plug(Heap::RootListIndex index) const439 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
440   UNREACHABLE();  // Not used on X87.
441 }
442 
443 
Plug(Heap::RootListIndex index) const444 void FullCodeGenerator::AccumulatorValueContext::Plug(
445     Heap::RootListIndex index) const {
446   UNREACHABLE();  // Not used on X87.
447 }
448 
449 
Plug(Heap::RootListIndex index) const450 void FullCodeGenerator::StackValueContext::Plug(
451     Heap::RootListIndex index) const {
452   UNREACHABLE();  // Not used on X87.
453 }
454 
455 
Plug(Heap::RootListIndex index) const456 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
457   UNREACHABLE();  // Not used on X87.
458 }
459 
460 
Plug(Handle<Object> lit) const461 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
462 }
463 
464 
Plug(Handle<Object> lit) const465 void FullCodeGenerator::AccumulatorValueContext::Plug(
466     Handle<Object> lit) const {
467   if (lit->IsSmi()) {
468     __ SafeMove(result_register(), Immediate(lit));
469   } else {
470     __ Move(result_register(), Immediate(lit));
471   }
472 }
473 
474 
Plug(Handle<Object> lit) const475 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
476   codegen()->OperandStackDepthIncrement(1);
477   if (lit->IsSmi()) {
478     __ SafePush(Immediate(lit));
479   } else {
480     __ push(Immediate(lit));
481   }
482 }
483 
484 
Plug(Handle<Object> lit) const485 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
486   codegen()->PrepareForBailoutBeforeSplit(condition(),
487                                           true,
488                                           true_label_,
489                                           false_label_);
490   DCHECK(lit->IsNull(isolate()) || lit->IsUndefined(isolate()) ||
491          !lit->IsUndetectable());
492   if (lit->IsUndefined(isolate()) || lit->IsNull(isolate()) ||
493       lit->IsFalse(isolate())) {
494     if (false_label_ != fall_through_) __ jmp(false_label_);
495   } else if (lit->IsTrue(isolate()) || lit->IsJSObject()) {
496     if (true_label_ != fall_through_) __ jmp(true_label_);
497   } else if (lit->IsString()) {
498     if (String::cast(*lit)->length() == 0) {
499       if (false_label_ != fall_through_) __ jmp(false_label_);
500     } else {
501       if (true_label_ != fall_through_) __ jmp(true_label_);
502     }
503   } else if (lit->IsSmi()) {
504     if (Smi::cast(*lit)->value() == 0) {
505       if (false_label_ != fall_through_) __ jmp(false_label_);
506     } else {
507       if (true_label_ != fall_through_) __ jmp(true_label_);
508     }
509   } else {
510     // For simplicity we always test the accumulator register.
511     __ mov(result_register(), lit);
512     codegen()->DoTest(this);
513   }
514 }
515 
516 
DropAndPlug(int count,Register reg) const517 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
518                                                        Register reg) const {
519   DCHECK(count > 0);
520   if (count > 1) codegen()->DropOperands(count - 1);
521   __ mov(Operand(esp, 0), reg);
522 }
523 
524 
Plug(Label * materialize_true,Label * materialize_false) const525 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
526                                             Label* materialize_false) const {
527   DCHECK(materialize_true == materialize_false);
528   __ bind(materialize_true);
529 }
530 
531 
Plug(Label * materialize_true,Label * materialize_false) const532 void FullCodeGenerator::AccumulatorValueContext::Plug(
533     Label* materialize_true,
534     Label* materialize_false) const {
535   Label done;
536   __ bind(materialize_true);
537   __ mov(result_register(), isolate()->factory()->true_value());
538   __ jmp(&done, Label::kNear);
539   __ bind(materialize_false);
540   __ mov(result_register(), isolate()->factory()->false_value());
541   __ bind(&done);
542 }
543 
544 
Plug(Label * materialize_true,Label * materialize_false) const545 void FullCodeGenerator::StackValueContext::Plug(
546     Label* materialize_true,
547     Label* materialize_false) const {
548   codegen()->OperandStackDepthIncrement(1);
549   Label done;
550   __ bind(materialize_true);
551   __ push(Immediate(isolate()->factory()->true_value()));
552   __ jmp(&done, Label::kNear);
553   __ bind(materialize_false);
554   __ push(Immediate(isolate()->factory()->false_value()));
555   __ bind(&done);
556 }
557 
558 
Plug(Label * materialize_true,Label * materialize_false) const559 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
560                                           Label* materialize_false) const {
561   DCHECK(materialize_true == true_label_);
562   DCHECK(materialize_false == false_label_);
563 }
564 
565 
Plug(bool flag) const566 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
567   Handle<Object> value = flag
568       ? isolate()->factory()->true_value()
569       : isolate()->factory()->false_value();
570   __ mov(result_register(), value);
571 }
572 
573 
Plug(bool flag) const574 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
575   codegen()->OperandStackDepthIncrement(1);
576   Handle<Object> value = flag
577       ? isolate()->factory()->true_value()
578       : isolate()->factory()->false_value();
579   __ push(Immediate(value));
580 }
581 
582 
Plug(bool flag) const583 void FullCodeGenerator::TestContext::Plug(bool flag) const {
584   codegen()->PrepareForBailoutBeforeSplit(condition(),
585                                           true,
586                                           true_label_,
587                                           false_label_);
588   if (flag) {
589     if (true_label_ != fall_through_) __ jmp(true_label_);
590   } else {
591     if (false_label_ != fall_through_) __ jmp(false_label_);
592   }
593 }
594 
595 
DoTest(Expression * condition,Label * if_true,Label * if_false,Label * fall_through)596 void FullCodeGenerator::DoTest(Expression* condition,
597                                Label* if_true,
598                                Label* if_false,
599                                Label* fall_through) {
600   Handle<Code> ic = ToBooleanICStub::GetUninitialized(isolate());
601   CallIC(ic, condition->test_id());
602   __ CompareRoot(result_register(), Heap::kTrueValueRootIndex);
603   Split(equal, if_true, if_false, fall_through);
604 }
605 
606 
Split(Condition cc,Label * if_true,Label * if_false,Label * fall_through)607 void FullCodeGenerator::Split(Condition cc,
608                               Label* if_true,
609                               Label* if_false,
610                               Label* fall_through) {
611   if (if_false == fall_through) {
612     __ j(cc, if_true);
613   } else if (if_true == fall_through) {
614     __ j(NegateCondition(cc), if_false);
615   } else {
616     __ j(cc, if_true);
617     __ jmp(if_false);
618   }
619 }
620 
621 
StackOperand(Variable * var)622 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
623   DCHECK(var->IsStackAllocated());
624   // Offset is negative because higher indexes are at lower addresses.
625   int offset = -var->index() * kPointerSize;
626   // Adjust by a (parameter or local) base offset.
627   if (var->IsParameter()) {
628     offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
629   } else {
630     offset += JavaScriptFrameConstants::kLocal0Offset;
631   }
632   return Operand(ebp, offset);
633 }
634 
635 
VarOperand(Variable * var,Register scratch)636 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
637   DCHECK(var->IsContextSlot() || var->IsStackAllocated());
638   if (var->IsContextSlot()) {
639     int context_chain_length = scope()->ContextChainLength(var->scope());
640     __ LoadContext(scratch, context_chain_length);
641     return ContextOperand(scratch, var->index());
642   } else {
643     return StackOperand(var);
644   }
645 }
646 
647 
GetVar(Register dest,Variable * var)648 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
649   DCHECK(var->IsContextSlot() || var->IsStackAllocated());
650   MemOperand location = VarOperand(var, dest);
651   __ mov(dest, location);
652 }
653 
654 
SetVar(Variable * var,Register src,Register scratch0,Register scratch1)655 void FullCodeGenerator::SetVar(Variable* var,
656                                Register src,
657                                Register scratch0,
658                                Register scratch1) {
659   DCHECK(var->IsContextSlot() || var->IsStackAllocated());
660   DCHECK(!scratch0.is(src));
661   DCHECK(!scratch0.is(scratch1));
662   DCHECK(!scratch1.is(src));
663   MemOperand location = VarOperand(var, scratch0);
664   __ mov(location, src);
665 
666   // Emit the write barrier code if the location is in the heap.
667   if (var->IsContextSlot()) {
668     int offset = Context::SlotOffset(var->index());
669     DCHECK(!scratch0.is(esi) && !src.is(esi) && !scratch1.is(esi));
670     __ RecordWriteContextSlot(scratch0, offset, src, scratch1, kDontSaveFPRegs);
671   }
672 }
673 
674 
PrepareForBailoutBeforeSplit(Expression * expr,bool should_normalize,Label * if_true,Label * if_false)675 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
676                                                      bool should_normalize,
677                                                      Label* if_true,
678                                                      Label* if_false) {
679   // Only prepare for bailouts before splits if we're in a test
680   // context. Otherwise, we let the Visit function deal with the
681   // preparation to avoid preparing with the same AST id twice.
682   if (!context()->IsTest()) return;
683 
684   Label skip;
685   if (should_normalize) __ jmp(&skip, Label::kNear);
686   PrepareForBailout(expr, BailoutState::TOS_REGISTER);
687   if (should_normalize) {
688     __ cmp(eax, isolate()->factory()->true_value());
689     Split(equal, if_true, if_false, NULL);
690     __ bind(&skip);
691   }
692 }
693 
694 
EmitDebugCheckDeclarationContext(Variable * variable)695 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
696   // The variable in the declaration always resides in the current context.
697   DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
698   if (FLAG_debug_code) {
699     // Check that we're not inside a with or catch context.
700     __ mov(ebx, FieldOperand(esi, HeapObject::kMapOffset));
701     __ cmp(ebx, isolate()->factory()->with_context_map());
702     __ Check(not_equal, kDeclarationInWithContext);
703     __ cmp(ebx, isolate()->factory()->catch_context_map());
704     __ Check(not_equal, kDeclarationInCatchContext);
705   }
706 }
707 
708 
VisitVariableDeclaration(VariableDeclaration * declaration)709 void FullCodeGenerator::VisitVariableDeclaration(
710     VariableDeclaration* declaration) {
711   // If it was not possible to allocate the variable at compile time, we
712   // need to "declare" it at runtime to make sure it actually exists in the
713   // local context.
714   VariableProxy* proxy = declaration->proxy();
715   VariableMode mode = declaration->mode();
716   Variable* variable = proxy->var();
717   bool hole_init = mode == LET || mode == CONST;
718   switch (variable->location()) {
719     case VariableLocation::GLOBAL:
720     case VariableLocation::UNALLOCATED:
721       DCHECK(!variable->binding_needs_init());
722       globals_->Add(variable->name(), zone());
723       globals_->Add(isolate()->factory()->undefined_value(), zone());
724       break;
725 
726     case VariableLocation::PARAMETER:
727     case VariableLocation::LOCAL:
728       if (hole_init) {
729         Comment cmnt(masm_, "[ VariableDeclaration");
730         __ mov(StackOperand(variable),
731                Immediate(isolate()->factory()->the_hole_value()));
732       }
733       break;
734 
735     case VariableLocation::CONTEXT:
736       if (hole_init) {
737         Comment cmnt(masm_, "[ VariableDeclaration");
738         EmitDebugCheckDeclarationContext(variable);
739         __ mov(ContextOperand(esi, variable->index()),
740                Immediate(isolate()->factory()->the_hole_value()));
741         // No write barrier since the hole value is in old space.
742         PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
743       }
744       break;
745 
746     case VariableLocation::LOOKUP: {
747       Comment cmnt(masm_, "[ VariableDeclaration");
748       DCHECK_EQ(VAR, mode);
749       DCHECK(!hole_init);
750       __ push(Immediate(variable->name()));
751       __ CallRuntime(Runtime::kDeclareEvalVar);
752       PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
753       break;
754     }
755   }
756 }
757 
VisitFunctionDeclaration(FunctionDeclaration * declaration)758 void FullCodeGenerator::VisitFunctionDeclaration(
759     FunctionDeclaration* declaration) {
760   VariableProxy* proxy = declaration->proxy();
761   Variable* variable = proxy->var();
762   switch (variable->location()) {
763     case VariableLocation::GLOBAL:
764     case VariableLocation::UNALLOCATED: {
765       globals_->Add(variable->name(), zone());
766       Handle<SharedFunctionInfo> function =
767           Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_);
768       // Check for stack-overflow exception.
769       if (function.is_null()) return SetStackOverflow();
770       globals_->Add(function, zone());
771       break;
772     }
773 
774     case VariableLocation::PARAMETER:
775     case VariableLocation::LOCAL: {
776       Comment cmnt(masm_, "[ FunctionDeclaration");
777       VisitForAccumulatorValue(declaration->fun());
778       __ mov(StackOperand(variable), result_register());
779       break;
780     }
781 
782     case VariableLocation::CONTEXT: {
783       Comment cmnt(masm_, "[ FunctionDeclaration");
784       EmitDebugCheckDeclarationContext(variable);
785       VisitForAccumulatorValue(declaration->fun());
786       __ mov(ContextOperand(esi, variable->index()), result_register());
787       // We know that we have written a function, which is not a smi.
788       __ RecordWriteContextSlot(esi, Context::SlotOffset(variable->index()),
789                                 result_register(), ecx, kDontSaveFPRegs,
790                                 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
791       PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
792       break;
793     }
794 
795     case VariableLocation::LOOKUP: {
796       Comment cmnt(masm_, "[ FunctionDeclaration");
797       PushOperand(variable->name());
798       VisitForStackValue(declaration->fun());
799       CallRuntimeWithOperands(Runtime::kDeclareEvalFunction);
800       PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
801       break;
802     }
803   }
804 }
805 
806 
DeclareGlobals(Handle<FixedArray> pairs)807 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
808   // Call the runtime to declare the globals.
809   __ Push(pairs);
810   __ Push(Smi::FromInt(DeclareGlobalsFlags()));
811   __ CallRuntime(Runtime::kDeclareGlobals);
812   // Return value is ignored.
813 }
814 
815 
DeclareModules(Handle<FixedArray> descriptions)816 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
817   // Call the runtime to declare the modules.
818   __ Push(descriptions);
819   __ CallRuntime(Runtime::kDeclareModules);
820   // Return value is ignored.
821 }
822 
823 
VisitSwitchStatement(SwitchStatement * stmt)824 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
825   Comment cmnt(masm_, "[ SwitchStatement");
826   Breakable nested_statement(this, stmt);
827   SetStatementPosition(stmt);
828 
829   // Keep the switch value on the stack until a case matches.
830   VisitForStackValue(stmt->tag());
831   PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS);
832 
833   ZoneList<CaseClause*>* clauses = stmt->cases();
834   CaseClause* default_clause = NULL;  // Can occur anywhere in the list.
835 
836   Label next_test;  // Recycled for each test.
837   // Compile all the tests with branches to their bodies.
838   for (int i = 0; i < clauses->length(); i++) {
839     CaseClause* clause = clauses->at(i);
840     clause->body_target()->Unuse();
841 
842     // The default is not a test, but remember it as final fall through.
843     if (clause->is_default()) {
844       default_clause = clause;
845       continue;
846     }
847 
848     Comment cmnt(masm_, "[ Case comparison");
849     __ bind(&next_test);
850     next_test.Unuse();
851 
852     // Compile the label expression.
853     VisitForAccumulatorValue(clause->label());
854 
855     // Perform the comparison as if via '==='.
856     __ mov(edx, Operand(esp, 0));  // Switch value.
857     bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
858     JumpPatchSite patch_site(masm_);
859     if (inline_smi_code) {
860       Label slow_case;
861       __ mov(ecx, edx);
862       __ or_(ecx, eax);
863       patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear);
864 
865       __ cmp(edx, eax);
866       __ j(not_equal, &next_test);
867       __ Drop(1);  // Switch value is no longer needed.
868       __ jmp(clause->body_target());
869       __ bind(&slow_case);
870     }
871 
872     SetExpressionPosition(clause);
873     Handle<Code> ic =
874         CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
875     CallIC(ic, clause->CompareId());
876     patch_site.EmitPatchInfo();
877 
878     Label skip;
879     __ jmp(&skip, Label::kNear);
880     PrepareForBailout(clause, BailoutState::TOS_REGISTER);
881     __ cmp(eax, isolate()->factory()->true_value());
882     __ j(not_equal, &next_test);
883     __ Drop(1);
884     __ jmp(clause->body_target());
885     __ bind(&skip);
886 
887     __ test(eax, eax);
888     __ j(not_equal, &next_test);
889     __ Drop(1);  // Switch value is no longer needed.
890     __ jmp(clause->body_target());
891   }
892 
893   // Discard the test value and jump to the default if present, otherwise to
894   // the end of the statement.
895   __ bind(&next_test);
896   DropOperands(1);  // Switch value is no longer needed.
897   if (default_clause == NULL) {
898     __ jmp(nested_statement.break_label());
899   } else {
900     __ jmp(default_clause->body_target());
901   }
902 
903   // Compile all the case bodies.
904   for (int i = 0; i < clauses->length(); i++) {
905     Comment cmnt(masm_, "[ Case body");
906     CaseClause* clause = clauses->at(i);
907     __ bind(clause->body_target());
908     PrepareForBailoutForId(clause->EntryId(), BailoutState::NO_REGISTERS);
909     VisitStatements(clause->statements());
910   }
911 
912   __ bind(nested_statement.break_label());
913   PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
914 }
915 
916 
VisitForInStatement(ForInStatement * stmt)917 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
918   Comment cmnt(masm_, "[ ForInStatement");
919   SetStatementPosition(stmt, SKIP_BREAK);
920 
921   FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
922 
923   // Get the object to enumerate over.
924   SetExpressionAsStatementPosition(stmt->enumerable());
925   VisitForAccumulatorValue(stmt->enumerable());
926   OperandStackDepthIncrement(5);
927 
928   Label loop, exit;
929   Iteration loop_statement(this, stmt);
930   increment_loop_depth();
931 
932   // If the object is null or undefined, skip over the loop, otherwise convert
933   // it to a JS receiver.  See ECMA-262 version 5, section 12.6.4.
934   Label convert, done_convert;
935   __ JumpIfSmi(eax, &convert, Label::kNear);
936   __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ecx);
937   __ j(above_equal, &done_convert, Label::kNear);
938   __ cmp(eax, isolate()->factory()->undefined_value());
939   __ j(equal, &exit);
940   __ cmp(eax, isolate()->factory()->null_value());
941   __ j(equal, &exit);
942   __ bind(&convert);
943   ToObjectStub stub(isolate());
944   __ CallStub(&stub);
945   __ bind(&done_convert);
946   PrepareForBailoutForId(stmt->ToObjectId(), BailoutState::TOS_REGISTER);
947   __ push(eax);
948 
949   // Check cache validity in generated code. If we cannot guarantee cache
950   // validity, call the runtime system to check cache validity or get the
951   // property names in a fixed array. Note: Proxies never have an enum cache,
952   // so will always take the slow path.
953   Label call_runtime, use_cache, fixed_array;
954   __ CheckEnumCache(&call_runtime);
955 
956   __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset));
957   __ jmp(&use_cache, Label::kNear);
958 
959   // Get the set of properties to enumerate.
960   __ bind(&call_runtime);
961   __ push(eax);
962   __ CallRuntime(Runtime::kForInEnumerate);
963   PrepareForBailoutForId(stmt->EnumId(), BailoutState::TOS_REGISTER);
964   __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
965          isolate()->factory()->meta_map());
966   __ j(not_equal, &fixed_array);
967 
968 
969   // We got a map in register eax. Get the enumeration cache from it.
970   Label no_descriptors;
971   __ bind(&use_cache);
972 
973   __ EnumLength(edx, eax);
974   __ cmp(edx, Immediate(Smi::FromInt(0)));
975   __ j(equal, &no_descriptors);
976 
977   __ LoadInstanceDescriptors(eax, ecx);
978   __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumCacheOffset));
979   __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumCacheBridgeCacheOffset));
980 
981   // Set up the four remaining stack slots.
982   __ push(eax);  // Map.
983   __ push(ecx);  // Enumeration cache.
984   __ push(edx);  // Number of valid entries for the map in the enum cache.
985   __ push(Immediate(Smi::FromInt(0)));  // Initial index.
986   __ jmp(&loop);
987 
988   __ bind(&no_descriptors);
989   __ add(esp, Immediate(kPointerSize));
990   __ jmp(&exit);
991 
992   // We got a fixed array in register eax. Iterate through that.
993   __ bind(&fixed_array);
994 
995   __ push(Immediate(Smi::FromInt(1)));  // Smi(1) indicates slow check
996   __ push(eax);  // Array
997   __ mov(eax, FieldOperand(eax, FixedArray::kLengthOffset));
998   __ push(eax);  // Fixed array length (as smi).
999   PrepareForBailoutForId(stmt->PrepareId(), BailoutState::NO_REGISTERS);
1000   __ push(Immediate(Smi::FromInt(0)));  // Initial index.
1001 
1002   // Generate code for doing the condition check.
1003   __ bind(&loop);
1004   SetExpressionAsStatementPosition(stmt->each());
1005 
1006   __ mov(eax, Operand(esp, 0 * kPointerSize));  // Get the current index.
1007   __ cmp(eax, Operand(esp, 1 * kPointerSize));  // Compare to the array length.
1008   __ j(above_equal, loop_statement.break_label());
1009 
1010   // Get the current entry of the array into register ebx.
1011   __ mov(ebx, Operand(esp, 2 * kPointerSize));
1012   __ mov(ebx, FieldOperand(ebx, eax, times_2, FixedArray::kHeaderSize));
1013 
1014   // Get the expected map from the stack or a smi in the
1015   // permanent slow case into register edx.
1016   __ mov(edx, Operand(esp, 3 * kPointerSize));
1017 
1018   // Check if the expected map still matches that of the enumerable.
1019   // If not, we may have to filter the key.
1020   Label update_each;
1021   __ mov(ecx, Operand(esp, 4 * kPointerSize));
1022   __ cmp(edx, FieldOperand(ecx, HeapObject::kMapOffset));
1023   __ j(equal, &update_each, Label::kNear);
1024 
1025   // We need to filter the key, record slow-path here.
1026   int const vector_index = SmiFromSlot(slot)->value();
1027   __ EmitLoadTypeFeedbackVector(edx);
1028   __ mov(FieldOperand(edx, FixedArray::OffsetOfElementAt(vector_index)),
1029          Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1030 
1031   // Convert the entry to a string or null if it isn't a property
1032   // anymore. If the property has been removed while iterating, we
1033   // just skip it.
1034   __ push(ecx);  // Enumerable.
1035   __ push(ebx);  // Current entry.
1036   __ CallRuntime(Runtime::kForInFilter);
1037   PrepareForBailoutForId(stmt->FilterId(), BailoutState::TOS_REGISTER);
1038   __ cmp(eax, isolate()->factory()->undefined_value());
1039   __ j(equal, loop_statement.continue_label());
1040   __ mov(ebx, eax);
1041 
1042   // Update the 'each' property or variable from the possibly filtered
1043   // entry in register ebx.
1044   __ bind(&update_each);
1045   __ mov(result_register(), ebx);
1046   // Perform the assignment as if via '='.
1047   { EffectContext context(this);
1048     EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
1049     PrepareForBailoutForId(stmt->AssignmentId(), BailoutState::NO_REGISTERS);
1050   }
1051 
1052   // Both Crankshaft and Turbofan expect BodyId to be right before stmt->body().
1053   PrepareForBailoutForId(stmt->BodyId(), BailoutState::NO_REGISTERS);
1054   // Generate code for the body of the loop.
1055   Visit(stmt->body());
1056 
1057   // Generate code for going to the next element by incrementing the
1058   // index (smi) stored on top of the stack.
1059   __ bind(loop_statement.continue_label());
1060   __ add(Operand(esp, 0 * kPointerSize), Immediate(Smi::FromInt(1)));
1061 
1062   EmitBackEdgeBookkeeping(stmt, &loop);
1063   __ jmp(&loop);
1064 
1065   // Remove the pointers stored on the stack.
1066   __ bind(loop_statement.break_label());
1067   DropOperands(5);
1068 
1069   // Exit and decrement the loop depth.
1070   PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
1071   __ bind(&exit);
1072   decrement_loop_depth();
1073 }
1074 
1075 
EmitSetHomeObject(Expression * initializer,int offset,FeedbackVectorSlot slot)1076 void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset,
1077                                           FeedbackVectorSlot slot) {
1078   DCHECK(NeedsHomeObject(initializer));
1079   __ mov(StoreDescriptor::ReceiverRegister(), Operand(esp, 0));
1080   __ mov(StoreDescriptor::NameRegister(),
1081          Immediate(isolate()->factory()->home_object_symbol()));
1082   __ mov(StoreDescriptor::ValueRegister(), Operand(esp, offset * kPointerSize));
1083   EmitLoadStoreICSlot(slot);
1084   CallStoreIC();
1085 }
1086 
1087 
EmitSetHomeObjectAccumulator(Expression * initializer,int offset,FeedbackVectorSlot slot)1088 void FullCodeGenerator::EmitSetHomeObjectAccumulator(Expression* initializer,
1089                                                      int offset,
1090                                                      FeedbackVectorSlot slot) {
1091   DCHECK(NeedsHomeObject(initializer));
1092   __ mov(StoreDescriptor::ReceiverRegister(), eax);
1093   __ mov(StoreDescriptor::NameRegister(),
1094          Immediate(isolate()->factory()->home_object_symbol()));
1095   __ mov(StoreDescriptor::ValueRegister(), Operand(esp, offset * kPointerSize));
1096   EmitLoadStoreICSlot(slot);
1097   CallStoreIC();
1098 }
1099 
1100 
EmitLoadGlobalCheckExtensions(VariableProxy * proxy,TypeofMode typeof_mode,Label * slow)1101 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1102                                                       TypeofMode typeof_mode,
1103                                                       Label* slow) {
1104   Register context = esi;
1105   Register temp = edx;
1106 
1107   Scope* s = scope();
1108   while (s != NULL) {
1109     if (s->num_heap_slots() > 0) {
1110       if (s->calls_sloppy_eval()) {
1111         // Check that extension is "the hole".
1112         __ JumpIfNotRoot(ContextOperand(context, Context::EXTENSION_INDEX),
1113                          Heap::kTheHoleValueRootIndex, slow);
1114       }
1115       // Load next context in chain.
1116       __ mov(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1117       // Walk the rest of the chain without clobbering esi.
1118       context = temp;
1119     }
1120     // If no outer scope calls eval, we do not need to check more
1121     // context extensions.  If we have reached an eval scope, we check
1122     // all extensions from this point.
1123     if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1124     s = s->outer_scope();
1125   }
1126 
1127   if (s != NULL && s->is_eval_scope()) {
1128     // Loop up the context chain.  There is no frame effect so it is
1129     // safe to use raw labels here.
1130     Label next, fast;
1131     if (!context.is(temp)) {
1132       __ mov(temp, context);
1133     }
1134     __ bind(&next);
1135     // Terminate at native context.
1136     __ cmp(FieldOperand(temp, HeapObject::kMapOffset),
1137            Immediate(isolate()->factory()->native_context_map()));
1138     __ j(equal, &fast, Label::kNear);
1139     // Check that extension is "the hole".
1140     __ JumpIfNotRoot(ContextOperand(temp, Context::EXTENSION_INDEX),
1141                      Heap::kTheHoleValueRootIndex, slow);
1142     // Load next context in chain.
1143     __ mov(temp, ContextOperand(temp, Context::PREVIOUS_INDEX));
1144     __ jmp(&next);
1145     __ bind(&fast);
1146   }
1147 
1148   // All extension objects were empty and it is safe to use a normal global
1149   // load machinery.
1150   EmitGlobalVariableLoad(proxy, typeof_mode);
1151 }
1152 
1153 
ContextSlotOperandCheckExtensions(Variable * var,Label * slow)1154 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1155                                                                 Label* slow) {
1156   DCHECK(var->IsContextSlot());
1157   Register context = esi;
1158   Register temp = ebx;
1159 
1160   for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1161     if (s->num_heap_slots() > 0) {
1162       if (s->calls_sloppy_eval()) {
1163         // Check that extension is "the hole".
1164         __ JumpIfNotRoot(ContextOperand(context, Context::EXTENSION_INDEX),
1165                          Heap::kTheHoleValueRootIndex, slow);
1166       }
1167       __ mov(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1168       // Walk the rest of the chain without clobbering esi.
1169       context = temp;
1170     }
1171   }
1172   // Check that last extension is "the hole".
1173   __ JumpIfNotRoot(ContextOperand(context, Context::EXTENSION_INDEX),
1174                    Heap::kTheHoleValueRootIndex, slow);
1175 
1176   // This function is used only for loads, not stores, so it's safe to
1177   // return an esi-based operand (the write barrier cannot be allowed to
1178   // destroy the esi register).
1179   return ContextOperand(context, var->index());
1180 }
1181 
1182 
EmitDynamicLookupFastCase(VariableProxy * proxy,TypeofMode typeof_mode,Label * slow,Label * done)1183 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1184                                                   TypeofMode typeof_mode,
1185                                                   Label* slow, Label* done) {
1186   // Generate fast-case code for variables that might be shadowed by
1187   // eval-introduced variables.  Eval is used a lot without
1188   // introducing variables.  In those cases, we do not want to
1189   // perform a runtime call for all variables in the scope
1190   // containing the eval.
1191   Variable* var = proxy->var();
1192   if (var->mode() == DYNAMIC_GLOBAL) {
1193     EmitLoadGlobalCheckExtensions(proxy, typeof_mode, slow);
1194     __ jmp(done);
1195   } else if (var->mode() == DYNAMIC_LOCAL) {
1196     Variable* local = var->local_if_not_shadowed();
1197     __ mov(eax, ContextSlotOperandCheckExtensions(local, slow));
1198     if (local->mode() == LET || local->mode() == CONST) {
1199       __ cmp(eax, isolate()->factory()->the_hole_value());
1200       __ j(not_equal, done);
1201       __ push(Immediate(var->name()));
1202       __ CallRuntime(Runtime::kThrowReferenceError);
1203     }
1204     __ jmp(done);
1205   }
1206 }
1207 
1208 
EmitGlobalVariableLoad(VariableProxy * proxy,TypeofMode typeof_mode)1209 void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
1210                                                TypeofMode typeof_mode) {
1211 #ifdef DEBUG
1212   Variable* var = proxy->var();
1213   DCHECK(var->IsUnallocatedOrGlobalSlot() ||
1214          (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL));
1215 #endif
1216   __ mov(LoadGlobalDescriptor::SlotRegister(),
1217          Immediate(SmiFromSlot(proxy->VariableFeedbackSlot())));
1218   CallLoadGlobalIC(typeof_mode);
1219 }
1220 
1221 
EmitVariableLoad(VariableProxy * proxy,TypeofMode typeof_mode)1222 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
1223                                          TypeofMode typeof_mode) {
1224   SetExpressionPosition(proxy);
1225   PrepareForBailoutForId(proxy->BeforeId(), BailoutState::NO_REGISTERS);
1226   Variable* var = proxy->var();
1227 
1228   // Three cases: global variables, lookup variables, and all other types of
1229   // variables.
1230   switch (var->location()) {
1231     case VariableLocation::GLOBAL:
1232     case VariableLocation::UNALLOCATED: {
1233       Comment cmnt(masm_, "[ Global variable");
1234       EmitGlobalVariableLoad(proxy, typeof_mode);
1235       context()->Plug(eax);
1236       break;
1237     }
1238 
1239     case VariableLocation::PARAMETER:
1240     case VariableLocation::LOCAL:
1241     case VariableLocation::CONTEXT: {
1242       DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode);
1243       Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1244                                                : "[ Stack variable");
1245 
1246       if (NeedsHoleCheckForLoad(proxy)) {
1247         // Let and const need a read barrier.
1248         Label done;
1249         GetVar(eax, var);
1250         __ cmp(eax, isolate()->factory()->the_hole_value());
1251         __ j(not_equal, &done, Label::kNear);
1252         if (var->mode() == LET || var->mode() == CONST) {
1253           // Throw a reference error when using an uninitialized let/const
1254           // binding in harmony mode.
1255           __ push(Immediate(var->name()));
1256           __ CallRuntime(Runtime::kThrowReferenceError);
1257         }
1258         __ bind(&done);
1259         context()->Plug(eax);
1260         break;
1261       }
1262       context()->Plug(var);
1263       break;
1264     }
1265 
1266     case VariableLocation::LOOKUP: {
1267       Comment cmnt(masm_, "[ Lookup variable");
1268       Label done, slow;
1269       // Generate code for loading from variables potentially shadowed
1270       // by eval-introduced variables.
1271       EmitDynamicLookupFastCase(proxy, typeof_mode, &slow, &done);
1272       __ bind(&slow);
1273       __ push(Immediate(var->name()));
1274       Runtime::FunctionId function_id =
1275           typeof_mode == NOT_INSIDE_TYPEOF
1276               ? Runtime::kLoadLookupSlot
1277               : Runtime::kLoadLookupSlotInsideTypeof;
1278       __ CallRuntime(function_id);
1279       __ bind(&done);
1280       context()->Plug(eax);
1281       break;
1282     }
1283   }
1284 }
1285 
1286 
EmitAccessor(ObjectLiteralProperty * property)1287 void FullCodeGenerator::EmitAccessor(ObjectLiteralProperty* property) {
1288   Expression* expression = (property == NULL) ? NULL : property->value();
1289   if (expression == NULL) {
1290     PushOperand(isolate()->factory()->null_value());
1291   } else {
1292     VisitForStackValue(expression);
1293     if (NeedsHomeObject(expression)) {
1294       DCHECK(property->kind() == ObjectLiteral::Property::GETTER ||
1295              property->kind() == ObjectLiteral::Property::SETTER);
1296       int offset = property->kind() == ObjectLiteral::Property::GETTER ? 2 : 3;
1297       EmitSetHomeObject(expression, offset, property->GetSlot());
1298     }
1299   }
1300 }
1301 
1302 
VisitObjectLiteral(ObjectLiteral * expr)1303 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1304   Comment cmnt(masm_, "[ ObjectLiteral");
1305 
1306   Handle<FixedArray> constant_properties = expr->constant_properties();
1307   int flags = expr->ComputeFlags();
1308   // If any of the keys would store to the elements array, then we shouldn't
1309   // allow it.
1310   if (MustCreateObjectLiteralWithRuntime(expr)) {
1311     __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1312     __ push(Immediate(Smi::FromInt(expr->literal_index())));
1313     __ push(Immediate(constant_properties));
1314     __ push(Immediate(Smi::FromInt(flags)));
1315     __ CallRuntime(Runtime::kCreateObjectLiteral);
1316   } else {
1317     __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1318     __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
1319     __ mov(ecx, Immediate(constant_properties));
1320     __ mov(edx, Immediate(Smi::FromInt(flags)));
1321     FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1322     __ CallStub(&stub);
1323     RestoreContext();
1324   }
1325   PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
1326 
1327   // If result_saved is true the result is on top of the stack.  If
1328   // result_saved is false the result is in eax.
1329   bool result_saved = false;
1330 
1331   AccessorTable accessor_table(zone());
1332   int property_index = 0;
1333   for (; property_index < expr->properties()->length(); property_index++) {
1334     ObjectLiteral::Property* property = expr->properties()->at(property_index);
1335     if (property->is_computed_name()) break;
1336     if (property->IsCompileTimeValue()) continue;
1337 
1338     Literal* key = property->key()->AsLiteral();
1339     Expression* value = property->value();
1340     if (!result_saved) {
1341       PushOperand(eax);  // Save result on the stack
1342       result_saved = true;
1343     }
1344     switch (property->kind()) {
1345       case ObjectLiteral::Property::CONSTANT:
1346         UNREACHABLE();
1347       case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1348         DCHECK(!CompileTimeValue::IsCompileTimeValue(value));
1349         // Fall through.
1350       case ObjectLiteral::Property::COMPUTED:
1351         // It is safe to use [[Put]] here because the boilerplate already
1352         // contains computed properties with an uninitialized value.
1353         if (key->value()->IsInternalizedString()) {
1354           if (property->emit_store()) {
1355             VisitForAccumulatorValue(value);
1356             DCHECK(StoreDescriptor::ValueRegister().is(eax));
1357             __ mov(StoreDescriptor::NameRegister(), Immediate(key->value()));
1358             __ mov(StoreDescriptor::ReceiverRegister(), Operand(esp, 0));
1359             EmitLoadStoreICSlot(property->GetSlot(0));
1360             CallStoreIC();
1361             PrepareForBailoutForId(key->id(), BailoutState::NO_REGISTERS);
1362             if (NeedsHomeObject(value)) {
1363               EmitSetHomeObjectAccumulator(value, 0, property->GetSlot(1));
1364             }
1365           } else {
1366             VisitForEffect(value);
1367           }
1368           break;
1369         }
1370         PushOperand(Operand(esp, 0));  // Duplicate receiver.
1371         VisitForStackValue(key);
1372         VisitForStackValue(value);
1373         if (property->emit_store()) {
1374           if (NeedsHomeObject(value)) {
1375             EmitSetHomeObject(value, 2, property->GetSlot());
1376           }
1377           PushOperand(Smi::FromInt(SLOPPY));  // Language mode
1378           CallRuntimeWithOperands(Runtime::kSetProperty);
1379         } else {
1380           DropOperands(3);
1381         }
1382         break;
1383       case ObjectLiteral::Property::PROTOTYPE:
1384         PushOperand(Operand(esp, 0));  // Duplicate receiver.
1385         VisitForStackValue(value);
1386         DCHECK(property->emit_store());
1387         CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
1388         PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1389                                BailoutState::NO_REGISTERS);
1390         break;
1391       case ObjectLiteral::Property::GETTER:
1392         if (property->emit_store()) {
1393           AccessorTable::Iterator it = accessor_table.lookup(key);
1394           it->second->bailout_id = expr->GetIdForPropertySet(property_index);
1395           it->second->getter = property;
1396         }
1397         break;
1398       case ObjectLiteral::Property::SETTER:
1399         if (property->emit_store()) {
1400           AccessorTable::Iterator it = accessor_table.lookup(key);
1401           it->second->bailout_id = expr->GetIdForPropertySet(property_index);
1402           it->second->setter = property;
1403         }
1404         break;
1405     }
1406   }
1407 
1408   // Emit code to define accessors, using only a single call to the runtime for
1409   // each pair of corresponding getters and setters.
1410   for (AccessorTable::Iterator it = accessor_table.begin();
1411        it != accessor_table.end();
1412        ++it) {
1413     PushOperand(Operand(esp, 0));  // Duplicate receiver.
1414     VisitForStackValue(it->first);
1415 
1416     EmitAccessor(it->second->getter);
1417     EmitAccessor(it->second->setter);
1418 
1419     PushOperand(Smi::FromInt(NONE));
1420     CallRuntimeWithOperands(Runtime::kDefineAccessorPropertyUnchecked);
1421     PrepareForBailoutForId(it->second->bailout_id, BailoutState::NO_REGISTERS);
1422   }
1423 
1424   // Object literals have two parts. The "static" part on the left contains no
1425   // computed property names, and so we can compute its map ahead of time; see
1426   // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1427   // starts with the first computed property name, and continues with all
1428   // properties to its right.  All the code from above initializes the static
1429   // component of the object literal, and arranges for the map of the result to
1430   // reflect the static order in which the keys appear. For the dynamic
1431   // properties, we compile them into a series of "SetOwnProperty" runtime
1432   // calls. This will preserve insertion order.
1433   for (; property_index < expr->properties()->length(); property_index++) {
1434     ObjectLiteral::Property* property = expr->properties()->at(property_index);
1435 
1436     Expression* value = property->value();
1437     if (!result_saved) {
1438       PushOperand(eax);  // Save result on the stack
1439       result_saved = true;
1440     }
1441 
1442     PushOperand(Operand(esp, 0));  // Duplicate receiver.
1443 
1444     if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1445       DCHECK(!property->is_computed_name());
1446       VisitForStackValue(value);
1447       DCHECK(property->emit_store());
1448       CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
1449       PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1450                              BailoutState::NO_REGISTERS);
1451     } else {
1452       EmitPropertyKey(property, expr->GetIdForPropertyName(property_index));
1453       VisitForStackValue(value);
1454       if (NeedsHomeObject(value)) {
1455         EmitSetHomeObject(value, 2, property->GetSlot());
1456       }
1457 
1458       switch (property->kind()) {
1459         case ObjectLiteral::Property::CONSTANT:
1460         case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1461         case ObjectLiteral::Property::COMPUTED:
1462           if (property->emit_store()) {
1463             PushOperand(Smi::FromInt(NONE));
1464             PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
1465             CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
1466             PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1467                                    BailoutState::NO_REGISTERS);
1468           } else {
1469             DropOperands(3);
1470           }
1471           break;
1472 
1473         case ObjectLiteral::Property::PROTOTYPE:
1474           UNREACHABLE();
1475           break;
1476 
1477         case ObjectLiteral::Property::GETTER:
1478           PushOperand(Smi::FromInt(NONE));
1479           CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
1480           break;
1481 
1482         case ObjectLiteral::Property::SETTER:
1483           PushOperand(Smi::FromInt(NONE));
1484           CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
1485           break;
1486       }
1487     }
1488   }
1489 
1490   if (result_saved) {
1491     context()->PlugTOS();
1492   } else {
1493     context()->Plug(eax);
1494   }
1495 }
1496 
1497 
VisitArrayLiteral(ArrayLiteral * expr)1498 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1499   Comment cmnt(masm_, "[ ArrayLiteral");
1500 
1501   Handle<FixedArray> constant_elements = expr->constant_elements();
1502   bool has_constant_fast_elements =
1503       IsFastObjectElementsKind(expr->constant_elements_kind());
1504 
1505   AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1506   if (has_constant_fast_elements && !FLAG_allocation_site_pretenuring) {
1507     // If the only customer of allocation sites is transitioning, then
1508     // we can turn it off if we don't have anywhere else to transition to.
1509     allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1510   }
1511 
1512   if (MustCreateArrayLiteralWithRuntime(expr)) {
1513     __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1514     __ push(Immediate(Smi::FromInt(expr->literal_index())));
1515     __ push(Immediate(constant_elements));
1516     __ push(Immediate(Smi::FromInt(expr->ComputeFlags())));
1517     __ CallRuntime(Runtime::kCreateArrayLiteral);
1518   } else {
1519     __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1520     __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
1521     __ mov(ecx, Immediate(constant_elements));
1522     FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1523     __ CallStub(&stub);
1524   }
1525   PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
1526 
1527   bool result_saved = false;  // Is the result saved to the stack?
1528   ZoneList<Expression*>* subexprs = expr->values();
1529   int length = subexprs->length();
1530 
1531   // Emit code to evaluate all the non-constant subexpressions and to store
1532   // them into the newly cloned array.
1533   int array_index = 0;
1534   for (; array_index < length; array_index++) {
1535     Expression* subexpr = subexprs->at(array_index);
1536     DCHECK(!subexpr->IsSpread());
1537 
1538     // If the subexpression is a literal or a simple materialized literal it
1539     // is already set in the cloned array.
1540     if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1541 
1542     if (!result_saved) {
1543       PushOperand(eax);  // array literal.
1544       result_saved = true;
1545     }
1546     VisitForAccumulatorValue(subexpr);
1547 
1548     __ mov(StoreDescriptor::NameRegister(),
1549            Immediate(Smi::FromInt(array_index)));
1550     __ mov(StoreDescriptor::ReceiverRegister(), Operand(esp, 0));
1551     EmitLoadStoreICSlot(expr->LiteralFeedbackSlot());
1552     Handle<Code> ic =
1553         CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
1554     CallIC(ic);
1555     PrepareForBailoutForId(expr->GetIdForElement(array_index),
1556                            BailoutState::NO_REGISTERS);
1557   }
1558 
1559   // In case the array literal contains spread expressions it has two parts. The
1560   // first part is  the "static" array which has a literal index is  handled
1561   // above. The second part is the part after the first spread expression
1562   // (inclusive) and these elements gets appended to the array. Note that the
1563   // number elements an iterable produces is unknown ahead of time.
1564   if (array_index < length && result_saved) {
1565     PopOperand(eax);
1566     result_saved = false;
1567   }
1568   for (; array_index < length; array_index++) {
1569     Expression* subexpr = subexprs->at(array_index);
1570 
1571     PushOperand(eax);
1572     DCHECK(!subexpr->IsSpread());
1573     VisitForStackValue(subexpr);
1574     CallRuntimeWithOperands(Runtime::kAppendElement);
1575 
1576     PrepareForBailoutForId(expr->GetIdForElement(array_index),
1577                            BailoutState::NO_REGISTERS);
1578   }
1579 
1580   if (result_saved) {
1581     context()->PlugTOS();
1582   } else {
1583     context()->Plug(eax);
1584   }
1585 }
1586 
1587 
VisitAssignment(Assignment * expr)1588 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1589   DCHECK(expr->target()->IsValidReferenceExpressionOrThis());
1590 
1591   Comment cmnt(masm_, "[ Assignment");
1592 
1593   Property* property = expr->target()->AsProperty();
1594   LhsKind assign_type = Property::GetAssignType(property);
1595 
1596   // Evaluate LHS expression.
1597   switch (assign_type) {
1598     case VARIABLE:
1599       // Nothing to do here.
1600       break;
1601     case NAMED_SUPER_PROPERTY:
1602       VisitForStackValue(
1603           property->obj()->AsSuperPropertyReference()->this_var());
1604       VisitForAccumulatorValue(
1605           property->obj()->AsSuperPropertyReference()->home_object());
1606       PushOperand(result_register());
1607       if (expr->is_compound()) {
1608         PushOperand(MemOperand(esp, kPointerSize));
1609         PushOperand(result_register());
1610       }
1611       break;
1612     case NAMED_PROPERTY:
1613       if (expr->is_compound()) {
1614         // We need the receiver both on the stack and in the register.
1615         VisitForStackValue(property->obj());
1616         __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
1617       } else {
1618         VisitForStackValue(property->obj());
1619       }
1620       break;
1621     case KEYED_SUPER_PROPERTY:
1622       VisitForStackValue(
1623           property->obj()->AsSuperPropertyReference()->this_var());
1624       VisitForStackValue(
1625           property->obj()->AsSuperPropertyReference()->home_object());
1626       VisitForAccumulatorValue(property->key());
1627       PushOperand(result_register());
1628       if (expr->is_compound()) {
1629         PushOperand(MemOperand(esp, 2 * kPointerSize));
1630         PushOperand(MemOperand(esp, 2 * kPointerSize));
1631         PushOperand(result_register());
1632       }
1633       break;
1634     case KEYED_PROPERTY: {
1635       if (expr->is_compound()) {
1636         VisitForStackValue(property->obj());
1637         VisitForStackValue(property->key());
1638         __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, kPointerSize));
1639         __ mov(LoadDescriptor::NameRegister(), Operand(esp, 0));
1640       } else {
1641         VisitForStackValue(property->obj());
1642         VisitForStackValue(property->key());
1643       }
1644       break;
1645     }
1646   }
1647 
1648   // For compound assignments we need another deoptimization point after the
1649   // variable/property load.
1650   if (expr->is_compound()) {
1651     AccumulatorValueContext result_context(this);
1652     { AccumulatorValueContext left_operand_context(this);
1653       switch (assign_type) {
1654         case VARIABLE:
1655           EmitVariableLoad(expr->target()->AsVariableProxy());
1656           PrepareForBailout(expr->target(), BailoutState::TOS_REGISTER);
1657           break;
1658         case NAMED_SUPER_PROPERTY:
1659           EmitNamedSuperPropertyLoad(property);
1660           PrepareForBailoutForId(property->LoadId(),
1661                                  BailoutState::TOS_REGISTER);
1662           break;
1663         case NAMED_PROPERTY:
1664           EmitNamedPropertyLoad(property);
1665           PrepareForBailoutForId(property->LoadId(),
1666                                  BailoutState::TOS_REGISTER);
1667           break;
1668         case KEYED_SUPER_PROPERTY:
1669           EmitKeyedSuperPropertyLoad(property);
1670           PrepareForBailoutForId(property->LoadId(),
1671                                  BailoutState::TOS_REGISTER);
1672           break;
1673         case KEYED_PROPERTY:
1674           EmitKeyedPropertyLoad(property);
1675           PrepareForBailoutForId(property->LoadId(),
1676                                  BailoutState::TOS_REGISTER);
1677           break;
1678       }
1679     }
1680 
1681     Token::Value op = expr->binary_op();
1682     PushOperand(eax);  // Left operand goes on the stack.
1683     VisitForAccumulatorValue(expr->value());
1684 
1685     if (ShouldInlineSmiCase(op)) {
1686       EmitInlineSmiBinaryOp(expr->binary_operation(),
1687                             op,
1688                             expr->target(),
1689                             expr->value());
1690     } else {
1691       EmitBinaryOp(expr->binary_operation(), op);
1692     }
1693 
1694     // Deoptimization point in case the binary operation may have side effects.
1695     PrepareForBailout(expr->binary_operation(), BailoutState::TOS_REGISTER);
1696   } else {
1697     VisitForAccumulatorValue(expr->value());
1698   }
1699 
1700   SetExpressionPosition(expr);
1701 
1702   // Store the value.
1703   switch (assign_type) {
1704     case VARIABLE:
1705       EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1706                              expr->op(), expr->AssignmentSlot());
1707       PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
1708       context()->Plug(eax);
1709       break;
1710     case NAMED_PROPERTY:
1711       EmitNamedPropertyAssignment(expr);
1712       break;
1713     case NAMED_SUPER_PROPERTY:
1714       EmitNamedSuperPropertyStore(property);
1715       context()->Plug(result_register());
1716       break;
1717     case KEYED_SUPER_PROPERTY:
1718       EmitKeyedSuperPropertyStore(property);
1719       context()->Plug(result_register());
1720       break;
1721     case KEYED_PROPERTY:
1722       EmitKeyedPropertyAssignment(expr);
1723       break;
1724   }
1725 }
1726 
1727 
VisitYield(Yield * expr)1728 void FullCodeGenerator::VisitYield(Yield* expr) {
1729   Comment cmnt(masm_, "[ Yield");
1730   SetExpressionPosition(expr);
1731 
1732   // Evaluate yielded value first; the initial iterator definition depends on
1733   // this.  It stays on the stack while we update the iterator.
1734   VisitForStackValue(expr->expression());
1735 
1736   Label suspend, continuation, post_runtime, resume, exception;
1737 
1738   __ jmp(&suspend);
1739   __ bind(&continuation);
1740   // When we arrive here, eax holds the generator object.
1741   __ RecordGeneratorContinuation();
1742   __ mov(ebx, FieldOperand(eax, JSGeneratorObject::kResumeModeOffset));
1743   __ mov(eax, FieldOperand(eax, JSGeneratorObject::kInputOrDebugPosOffset));
1744   STATIC_ASSERT(JSGeneratorObject::kNext < JSGeneratorObject::kReturn);
1745   STATIC_ASSERT(JSGeneratorObject::kThrow > JSGeneratorObject::kReturn);
1746   __ cmp(ebx, Immediate(Smi::FromInt(JSGeneratorObject::kReturn)));
1747   __ j(less, &resume);
1748   __ Push(result_register());
1749   __ j(greater, &exception);
1750   EmitCreateIteratorResult(true);
1751   EmitUnwindAndReturn();
1752 
1753   __ bind(&exception);
1754   __ CallRuntime(Runtime::kThrow);
1755 
1756   __ bind(&suspend);
1757   OperandStackDepthIncrement(1);  // Not popped on this path.
1758   VisitForAccumulatorValue(expr->generator_object());
1759   DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
1760   __ mov(FieldOperand(eax, JSGeneratorObject::kContinuationOffset),
1761          Immediate(Smi::FromInt(continuation.pos())));
1762   __ mov(FieldOperand(eax, JSGeneratorObject::kContextOffset), esi);
1763   __ mov(ecx, esi);
1764   __ RecordWriteField(eax, JSGeneratorObject::kContextOffset, ecx, edx,
1765                       kDontSaveFPRegs);
1766   __ lea(ebx, Operand(ebp, StandardFrameConstants::kExpressionsOffset));
1767   __ cmp(esp, ebx);
1768   __ j(equal, &post_runtime);
1769   __ push(eax);  // generator object
1770   __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
1771   RestoreContext();
1772   __ bind(&post_runtime);
1773   PopOperand(result_register());
1774   EmitReturnSequence();
1775 
1776   __ bind(&resume);
1777   context()->Plug(result_register());
1778 }
1779 
PushOperand(MemOperand operand)1780 void FullCodeGenerator::PushOperand(MemOperand operand) {
1781   OperandStackDepthIncrement(1);
1782   __ Push(operand);
1783 }
1784 
EmitOperandStackDepthCheck()1785 void FullCodeGenerator::EmitOperandStackDepthCheck() {
1786   if (FLAG_debug_code) {
1787     int expected_diff = StandardFrameConstants::kFixedFrameSizeFromFp +
1788                         operand_stack_depth_ * kPointerSize;
1789     __ mov(eax, ebp);
1790     __ sub(eax, esp);
1791     __ cmp(eax, Immediate(expected_diff));
1792     __ Assert(equal, kUnexpectedStackDepth);
1793   }
1794 }
1795 
EmitCreateIteratorResult(bool done)1796 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
1797   Label allocate, done_allocate;
1798 
1799   __ Allocate(JSIteratorResult::kSize, eax, ecx, edx, &allocate,
1800               NO_ALLOCATION_FLAGS);
1801   __ jmp(&done_allocate, Label::kNear);
1802 
1803   __ bind(&allocate);
1804   __ Push(Smi::FromInt(JSIteratorResult::kSize));
1805   __ CallRuntime(Runtime::kAllocateInNewSpace);
1806 
1807   __ bind(&done_allocate);
1808   __ mov(ebx, NativeContextOperand());
1809   __ mov(ebx, ContextOperand(ebx, Context::ITERATOR_RESULT_MAP_INDEX));
1810   __ mov(FieldOperand(eax, HeapObject::kMapOffset), ebx);
1811   __ mov(FieldOperand(eax, JSObject::kPropertiesOffset),
1812          isolate()->factory()->empty_fixed_array());
1813   __ mov(FieldOperand(eax, JSObject::kElementsOffset),
1814          isolate()->factory()->empty_fixed_array());
1815   __ pop(FieldOperand(eax, JSIteratorResult::kValueOffset));
1816   __ mov(FieldOperand(eax, JSIteratorResult::kDoneOffset),
1817          isolate()->factory()->ToBoolean(done));
1818   STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
1819   OperandStackDepthDecrement(1);
1820 }
1821 
1822 
EmitInlineSmiBinaryOp(BinaryOperation * expr,Token::Value op,Expression * left,Expression * right)1823 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
1824                                               Token::Value op,
1825                                               Expression* left,
1826                                               Expression* right) {
1827   // Do combined smi check of the operands. Left operand is on the
1828   // stack. Right operand is in eax.
1829   Label smi_case, done, stub_call;
1830   PopOperand(edx);
1831   __ mov(ecx, eax);
1832   __ or_(eax, edx);
1833   JumpPatchSite patch_site(masm_);
1834   patch_site.EmitJumpIfSmi(eax, &smi_case, Label::kNear);
1835 
1836   __ bind(&stub_call);
1837   __ mov(eax, ecx);
1838   Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
1839   CallIC(code, expr->BinaryOperationFeedbackId());
1840   patch_site.EmitPatchInfo();
1841   __ jmp(&done, Label::kNear);
1842 
1843   // Smi case.
1844   __ bind(&smi_case);
1845   __ mov(eax, edx);  // Copy left operand in case of a stub call.
1846 
1847   switch (op) {
1848     case Token::SAR:
1849       __ SmiUntag(ecx);
1850       __ sar_cl(eax);  // No checks of result necessary
1851       __ and_(eax, Immediate(~kSmiTagMask));
1852       break;
1853     case Token::SHL: {
1854       Label result_ok;
1855       __ SmiUntag(eax);
1856       __ SmiUntag(ecx);
1857       __ shl_cl(eax);
1858       // Check that the *signed* result fits in a smi.
1859       __ cmp(eax, 0xc0000000);
1860       __ j(positive, &result_ok);
1861       __ SmiTag(ecx);
1862       __ jmp(&stub_call);
1863       __ bind(&result_ok);
1864       __ SmiTag(eax);
1865       break;
1866     }
1867     case Token::SHR: {
1868       Label result_ok;
1869       __ SmiUntag(eax);
1870       __ SmiUntag(ecx);
1871       __ shr_cl(eax);
1872       __ test(eax, Immediate(0xc0000000));
1873       __ j(zero, &result_ok);
1874       __ SmiTag(ecx);
1875       __ jmp(&stub_call);
1876       __ bind(&result_ok);
1877       __ SmiTag(eax);
1878       break;
1879     }
1880     case Token::ADD:
1881       __ add(eax, ecx);
1882       __ j(overflow, &stub_call);
1883       break;
1884     case Token::SUB:
1885       __ sub(eax, ecx);
1886       __ j(overflow, &stub_call);
1887       break;
1888     case Token::MUL: {
1889       __ SmiUntag(eax);
1890       __ imul(eax, ecx);
1891       __ j(overflow, &stub_call);
1892       __ test(eax, eax);
1893       __ j(not_zero, &done, Label::kNear);
1894       __ mov(ebx, edx);
1895       __ or_(ebx, ecx);
1896       __ j(negative, &stub_call);
1897       break;
1898     }
1899     case Token::BIT_OR:
1900       __ or_(eax, ecx);
1901       break;
1902     case Token::BIT_AND:
1903       __ and_(eax, ecx);
1904       break;
1905     case Token::BIT_XOR:
1906       __ xor_(eax, ecx);
1907       break;
1908     default:
1909       UNREACHABLE();
1910   }
1911 
1912   __ bind(&done);
1913   context()->Plug(eax);
1914 }
1915 
1916 
EmitClassDefineProperties(ClassLiteral * lit)1917 void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) {
1918   for (int i = 0; i < lit->properties()->length(); i++) {
1919     ObjectLiteral::Property* property = lit->properties()->at(i);
1920     Expression* value = property->value();
1921 
1922     if (property->is_static()) {
1923       PushOperand(Operand(esp, kPointerSize));  // constructor
1924     } else {
1925       PushOperand(Operand(esp, 0));  // prototype
1926     }
1927     EmitPropertyKey(property, lit->GetIdForProperty(i));
1928 
1929     // The static prototype property is read only. We handle the non computed
1930     // property name case in the parser. Since this is the only case where we
1931     // need to check for an own read only property we special case this so we do
1932     // not need to do this for every property.
1933     if (property->is_static() && property->is_computed_name()) {
1934       __ CallRuntime(Runtime::kThrowIfStaticPrototype);
1935       __ push(eax);
1936     }
1937 
1938     VisitForStackValue(value);
1939     if (NeedsHomeObject(value)) {
1940       EmitSetHomeObject(value, 2, property->GetSlot());
1941     }
1942 
1943     switch (property->kind()) {
1944       case ObjectLiteral::Property::CONSTANT:
1945       case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1946       case ObjectLiteral::Property::PROTOTYPE:
1947         UNREACHABLE();
1948       case ObjectLiteral::Property::COMPUTED:
1949         PushOperand(Smi::FromInt(DONT_ENUM));
1950         PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
1951         CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
1952         break;
1953 
1954       case ObjectLiteral::Property::GETTER:
1955         PushOperand(Smi::FromInt(DONT_ENUM));
1956         CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
1957         break;
1958 
1959       case ObjectLiteral::Property::SETTER:
1960         PushOperand(Smi::FromInt(DONT_ENUM));
1961         CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
1962         break;
1963     }
1964   }
1965 }
1966 
1967 
EmitBinaryOp(BinaryOperation * expr,Token::Value op)1968 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
1969   PopOperand(edx);
1970   Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
1971   JumpPatchSite patch_site(masm_);    // unbound, signals no inlined smi code.
1972   CallIC(code, expr->BinaryOperationFeedbackId());
1973   patch_site.EmitPatchInfo();
1974   context()->Plug(eax);
1975 }
1976 
1977 
EmitAssignment(Expression * expr,FeedbackVectorSlot slot)1978 void FullCodeGenerator::EmitAssignment(Expression* expr,
1979                                        FeedbackVectorSlot slot) {
1980   DCHECK(expr->IsValidReferenceExpressionOrThis());
1981 
1982   Property* prop = expr->AsProperty();
1983   LhsKind assign_type = Property::GetAssignType(prop);
1984 
1985   switch (assign_type) {
1986     case VARIABLE: {
1987       Variable* var = expr->AsVariableProxy()->var();
1988       EffectContext context(this);
1989       EmitVariableAssignment(var, Token::ASSIGN, slot);
1990       break;
1991     }
1992     case NAMED_PROPERTY: {
1993       PushOperand(eax);  // Preserve value.
1994       VisitForAccumulatorValue(prop->obj());
1995       __ Move(StoreDescriptor::ReceiverRegister(), eax);
1996       PopOperand(StoreDescriptor::ValueRegister());  // Restore value.
1997       __ mov(StoreDescriptor::NameRegister(),
1998              prop->key()->AsLiteral()->value());
1999       EmitLoadStoreICSlot(slot);
2000       CallStoreIC();
2001       break;
2002     }
2003     case NAMED_SUPER_PROPERTY: {
2004       PushOperand(eax);
2005       VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2006       VisitForAccumulatorValue(
2007           prop->obj()->AsSuperPropertyReference()->home_object());
2008       // stack: value, this; eax: home_object
2009       Register scratch = ecx;
2010       Register scratch2 = edx;
2011       __ mov(scratch, result_register());               // home_object
2012       __ mov(eax, MemOperand(esp, kPointerSize));       // value
2013       __ mov(scratch2, MemOperand(esp, 0));             // this
2014       __ mov(MemOperand(esp, kPointerSize), scratch2);  // this
2015       __ mov(MemOperand(esp, 0), scratch);              // home_object
2016       // stack: this, home_object. eax: value
2017       EmitNamedSuperPropertyStore(prop);
2018       break;
2019     }
2020     case KEYED_SUPER_PROPERTY: {
2021       PushOperand(eax);
2022       VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2023       VisitForStackValue(
2024           prop->obj()->AsSuperPropertyReference()->home_object());
2025       VisitForAccumulatorValue(prop->key());
2026       Register scratch = ecx;
2027       Register scratch2 = edx;
2028       __ mov(scratch2, MemOperand(esp, 2 * kPointerSize));  // value
2029       // stack: value, this, home_object; eax: key, edx: value
2030       __ mov(scratch, MemOperand(esp, kPointerSize));  // this
2031       __ mov(MemOperand(esp, 2 * kPointerSize), scratch);
2032       __ mov(scratch, MemOperand(esp, 0));  // home_object
2033       __ mov(MemOperand(esp, kPointerSize), scratch);
2034       __ mov(MemOperand(esp, 0), eax);
2035       __ mov(eax, scratch2);
2036       // stack: this, home_object, key; eax: value.
2037       EmitKeyedSuperPropertyStore(prop);
2038       break;
2039     }
2040     case KEYED_PROPERTY: {
2041       PushOperand(eax);  // Preserve value.
2042       VisitForStackValue(prop->obj());
2043       VisitForAccumulatorValue(prop->key());
2044       __ Move(StoreDescriptor::NameRegister(), eax);
2045       PopOperand(StoreDescriptor::ReceiverRegister());  // Receiver.
2046       PopOperand(StoreDescriptor::ValueRegister());     // Restore value.
2047       EmitLoadStoreICSlot(slot);
2048       Handle<Code> ic =
2049           CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2050       CallIC(ic);
2051       break;
2052     }
2053   }
2054   context()->Plug(eax);
2055 }
2056 
2057 
EmitStoreToStackLocalOrContextSlot(Variable * var,MemOperand location)2058 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2059     Variable* var, MemOperand location) {
2060   __ mov(location, eax);
2061   if (var->IsContextSlot()) {
2062     __ mov(edx, eax);
2063     int offset = Context::SlotOffset(var->index());
2064     __ RecordWriteContextSlot(ecx, offset, edx, ebx, kDontSaveFPRegs);
2065   }
2066 }
2067 
2068 
EmitVariableAssignment(Variable * var,Token::Value op,FeedbackVectorSlot slot)2069 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
2070                                                FeedbackVectorSlot slot) {
2071   if (var->IsUnallocated()) {
2072     // Global var, const, or let.
2073     __ mov(StoreDescriptor::NameRegister(), var->name());
2074     __ mov(StoreDescriptor::ReceiverRegister(), NativeContextOperand());
2075     __ mov(StoreDescriptor::ReceiverRegister(),
2076            ContextOperand(StoreDescriptor::ReceiverRegister(),
2077                           Context::EXTENSION_INDEX));
2078     EmitLoadStoreICSlot(slot);
2079     CallStoreIC();
2080 
2081   } else if (var->mode() == LET && op != Token::INIT) {
2082     // Non-initializing assignment to let variable needs a write barrier.
2083     DCHECK(!var->IsLookupSlot());
2084     DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2085     Label assign;
2086     MemOperand location = VarOperand(var, ecx);
2087     __ mov(edx, location);
2088     __ cmp(edx, isolate()->factory()->the_hole_value());
2089     __ j(not_equal, &assign, Label::kNear);
2090     __ push(Immediate(var->name()));
2091     __ CallRuntime(Runtime::kThrowReferenceError);
2092     __ bind(&assign);
2093     EmitStoreToStackLocalOrContextSlot(var, location);
2094 
2095   } else if (var->mode() == CONST && op != Token::INIT) {
2096     // Assignment to const variable needs a write barrier.
2097     DCHECK(!var->IsLookupSlot());
2098     DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2099     Label const_error;
2100     MemOperand location = VarOperand(var, ecx);
2101     __ mov(edx, location);
2102     __ cmp(edx, isolate()->factory()->the_hole_value());
2103     __ j(not_equal, &const_error, Label::kNear);
2104     __ push(Immediate(var->name()));
2105     __ CallRuntime(Runtime::kThrowReferenceError);
2106     __ bind(&const_error);
2107     __ CallRuntime(Runtime::kThrowConstAssignError);
2108 
2109   } else if (var->is_this() && var->mode() == CONST && op == Token::INIT) {
2110     // Initializing assignment to const {this} needs a write barrier.
2111     DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2112     Label uninitialized_this;
2113     MemOperand location = VarOperand(var, ecx);
2114     __ mov(edx, location);
2115     __ cmp(edx, isolate()->factory()->the_hole_value());
2116     __ j(equal, &uninitialized_this);
2117     __ push(Immediate(var->name()));
2118     __ CallRuntime(Runtime::kThrowReferenceError);
2119     __ bind(&uninitialized_this);
2120     EmitStoreToStackLocalOrContextSlot(var, location);
2121 
2122   } else if (!var->is_const_mode() || op == Token::INIT) {
2123     if (var->IsLookupSlot()) {
2124       // Assignment to var.
2125       __ Push(Immediate(var->name()));
2126       __ Push(eax);
2127       __ CallRuntime(is_strict(language_mode())
2128                          ? Runtime::kStoreLookupSlot_Strict
2129                          : Runtime::kStoreLookupSlot_Sloppy);
2130     } else {
2131       // Assignment to var or initializing assignment to let/const in harmony
2132       // mode.
2133       DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2134       MemOperand location = VarOperand(var, ecx);
2135       if (FLAG_debug_code && var->mode() == LET && op == Token::INIT) {
2136         // Check for an uninitialized let binding.
2137         __ mov(edx, location);
2138         __ cmp(edx, isolate()->factory()->the_hole_value());
2139         __ Check(equal, kLetBindingReInitialization);
2140       }
2141       EmitStoreToStackLocalOrContextSlot(var, location);
2142     }
2143 
2144   } else {
2145     DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT);
2146     if (is_strict(language_mode())) {
2147       __ CallRuntime(Runtime::kThrowConstAssignError);
2148     }
2149     // Silently ignore store in sloppy mode.
2150   }
2151 }
2152 
2153 
EmitNamedPropertyAssignment(Assignment * expr)2154 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2155   // Assignment to a property, using a named store IC.
2156   // eax    : value
2157   // esp[0] : receiver
2158   Property* prop = expr->target()->AsProperty();
2159   DCHECK(prop != NULL);
2160   DCHECK(prop->key()->IsLiteral());
2161 
2162   __ mov(StoreDescriptor::NameRegister(), prop->key()->AsLiteral()->value());
2163   PopOperand(StoreDescriptor::ReceiverRegister());
2164   EmitLoadStoreICSlot(expr->AssignmentSlot());
2165   CallStoreIC();
2166   PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
2167   context()->Plug(eax);
2168 }
2169 
2170 
EmitNamedSuperPropertyStore(Property * prop)2171 void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2172   // Assignment to named property of super.
2173   // eax : value
2174   // stack : receiver ('this'), home_object
2175   DCHECK(prop != NULL);
2176   Literal* key = prop->key()->AsLiteral();
2177   DCHECK(key != NULL);
2178 
2179   PushOperand(key->value());
2180   PushOperand(eax);
2181   CallRuntimeWithOperands(is_strict(language_mode())
2182                               ? Runtime::kStoreToSuper_Strict
2183                               : Runtime::kStoreToSuper_Sloppy);
2184 }
2185 
2186 
EmitKeyedSuperPropertyStore(Property * prop)2187 void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2188   // Assignment to named property of super.
2189   // eax : value
2190   // stack : receiver ('this'), home_object, key
2191 
2192   PushOperand(eax);
2193   CallRuntimeWithOperands(is_strict(language_mode())
2194                               ? Runtime::kStoreKeyedToSuper_Strict
2195                               : Runtime::kStoreKeyedToSuper_Sloppy);
2196 }
2197 
2198 
EmitKeyedPropertyAssignment(Assignment * expr)2199 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2200   // Assignment to a property, using a keyed store IC.
2201   // eax               : value
2202   // esp[0]            : key
2203   // esp[kPointerSize] : receiver
2204 
2205   PopOperand(StoreDescriptor::NameRegister());  // Key.
2206   PopOperand(StoreDescriptor::ReceiverRegister());
2207   DCHECK(StoreDescriptor::ValueRegister().is(eax));
2208   Handle<Code> ic =
2209       CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2210   EmitLoadStoreICSlot(expr->AssignmentSlot());
2211   CallIC(ic);
2212   PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
2213   context()->Plug(eax);
2214 }
2215 
2216 
CallIC(Handle<Code> code,TypeFeedbackId ast_id)2217 void FullCodeGenerator::CallIC(Handle<Code> code,
2218                                TypeFeedbackId ast_id) {
2219   ic_total_count_++;
2220   __ call(code, RelocInfo::CODE_TARGET, ast_id);
2221 }
2222 
2223 
2224 // Code common for calls using the IC.
EmitCallWithLoadIC(Call * expr)2225 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2226   Expression* callee = expr->expression();
2227 
2228   // Get the target function.
2229   ConvertReceiverMode convert_mode;
2230   if (callee->IsVariableProxy()) {
2231     { StackValueContext context(this);
2232       EmitVariableLoad(callee->AsVariableProxy());
2233       PrepareForBailout(callee, BailoutState::NO_REGISTERS);
2234     }
2235     // Push undefined as receiver. This is patched in the method prologue if it
2236     // is a sloppy mode method.
2237     PushOperand(isolate()->factory()->undefined_value());
2238     convert_mode = ConvertReceiverMode::kNullOrUndefined;
2239   } else {
2240     // Load the function from the receiver.
2241     DCHECK(callee->IsProperty());
2242     DCHECK(!callee->AsProperty()->IsSuperAccess());
2243     __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
2244     EmitNamedPropertyLoad(callee->AsProperty());
2245     PrepareForBailoutForId(callee->AsProperty()->LoadId(),
2246                            BailoutState::TOS_REGISTER);
2247     // Push the target function under the receiver.
2248     PushOperand(Operand(esp, 0));
2249     __ mov(Operand(esp, kPointerSize), eax);
2250     convert_mode = ConvertReceiverMode::kNotNullOrUndefined;
2251   }
2252 
2253   EmitCall(expr, convert_mode);
2254 }
2255 
2256 
EmitSuperCallWithLoadIC(Call * expr)2257 void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2258   SetExpressionPosition(expr);
2259   Expression* callee = expr->expression();
2260   DCHECK(callee->IsProperty());
2261   Property* prop = callee->AsProperty();
2262   DCHECK(prop->IsSuperAccess());
2263 
2264   Literal* key = prop->key()->AsLiteral();
2265   DCHECK(!key->value()->IsSmi());
2266   // Load the function from the receiver.
2267   SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2268   VisitForStackValue(super_ref->home_object());
2269   VisitForAccumulatorValue(super_ref->this_var());
2270   PushOperand(eax);
2271   PushOperand(eax);
2272   PushOperand(Operand(esp, kPointerSize * 2));
2273   PushOperand(key->value());
2274   // Stack here:
2275   //  - home_object
2276   //  - this (receiver)
2277   //  - this (receiver) <-- LoadFromSuper will pop here and below.
2278   //  - home_object
2279   //  - key
2280   CallRuntimeWithOperands(Runtime::kLoadFromSuper);
2281   PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
2282 
2283   // Replace home_object with target function.
2284   __ mov(Operand(esp, kPointerSize), eax);
2285 
2286   // Stack here:
2287   // - target function
2288   // - this (receiver)
2289   EmitCall(expr);
2290 }
2291 
2292 
2293 // Code common for calls using the IC.
EmitKeyedCallWithLoadIC(Call * expr,Expression * key)2294 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2295                                                 Expression* key) {
2296   // Load the key.
2297   VisitForAccumulatorValue(key);
2298 
2299   Expression* callee = expr->expression();
2300 
2301   // Load the function from the receiver.
2302   DCHECK(callee->IsProperty());
2303   __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
2304   __ mov(LoadDescriptor::NameRegister(), eax);
2305   EmitKeyedPropertyLoad(callee->AsProperty());
2306   PrepareForBailoutForId(callee->AsProperty()->LoadId(),
2307                          BailoutState::TOS_REGISTER);
2308 
2309   // Push the target function under the receiver.
2310   PushOperand(Operand(esp, 0));
2311   __ mov(Operand(esp, kPointerSize), eax);
2312 
2313   EmitCall(expr, ConvertReceiverMode::kNotNullOrUndefined);
2314 }
2315 
2316 
EmitKeyedSuperCallWithLoadIC(Call * expr)2317 void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
2318   Expression* callee = expr->expression();
2319   DCHECK(callee->IsProperty());
2320   Property* prop = callee->AsProperty();
2321   DCHECK(prop->IsSuperAccess());
2322 
2323   SetExpressionPosition(prop);
2324   // Load the function from the receiver.
2325   SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2326   VisitForStackValue(super_ref->home_object());
2327   VisitForAccumulatorValue(super_ref->this_var());
2328   PushOperand(eax);
2329   PushOperand(eax);
2330   PushOperand(Operand(esp, kPointerSize * 2));
2331   VisitForStackValue(prop->key());
2332   // Stack here:
2333   //  - home_object
2334   //  - this (receiver)
2335   //  - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
2336   //  - home_object
2337   //  - key
2338   CallRuntimeWithOperands(Runtime::kLoadKeyedFromSuper);
2339   PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
2340 
2341   // Replace home_object with target function.
2342   __ mov(Operand(esp, kPointerSize), eax);
2343 
2344   // Stack here:
2345   // - target function
2346   // - this (receiver)
2347   EmitCall(expr);
2348 }
2349 
2350 
EmitCall(Call * expr,ConvertReceiverMode mode)2351 void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) {
2352   // Load the arguments.
2353   ZoneList<Expression*>* args = expr->arguments();
2354   int arg_count = args->length();
2355   for (int i = 0; i < arg_count; i++) {
2356     VisitForStackValue(args->at(i));
2357   }
2358 
2359   PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
2360   SetCallPosition(expr, expr->tail_call_mode());
2361   if (expr->tail_call_mode() == TailCallMode::kAllow) {
2362     if (FLAG_trace) {
2363       __ CallRuntime(Runtime::kTraceTailCall);
2364     }
2365     // Update profiling counters before the tail call since we will
2366     // not return to this function.
2367     EmitProfilingCounterHandlingForReturnSequence(true);
2368   }
2369   Handle<Code> ic =
2370       CodeFactory::CallIC(isolate(), arg_count, mode, expr->tail_call_mode())
2371           .code();
2372   __ Move(edx, Immediate(SmiFromSlot(expr->CallFeedbackICSlot())));
2373   __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
2374   // Don't assign a type feedback id to the IC, since type feedback is provided
2375   // by the vector above.
2376   CallIC(ic);
2377   OperandStackDepthDecrement(arg_count + 1);
2378 
2379   RecordJSReturnSite(expr);
2380   RestoreContext();
2381   context()->DropAndPlug(1, eax);
2382 }
2383 
EmitResolvePossiblyDirectEval(Call * expr)2384 void FullCodeGenerator::EmitResolvePossiblyDirectEval(Call* expr) {
2385   int arg_count = expr->arguments()->length();
2386   // Push copy of the first argument or undefined if it doesn't exist.
2387   if (arg_count > 0) {
2388     __ push(Operand(esp, arg_count * kPointerSize));
2389   } else {
2390     __ push(Immediate(isolate()->factory()->undefined_value()));
2391   }
2392 
2393   // Push the enclosing function.
2394   __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
2395 
2396   // Push the language mode.
2397   __ push(Immediate(Smi::FromInt(language_mode())));
2398 
2399   // Push the start position of the scope the calls resides in.
2400   __ push(Immediate(Smi::FromInt(scope()->start_position())));
2401 
2402   // Push the source position of the eval call.
2403   __ push(Immediate(Smi::FromInt(expr->position())));
2404 
2405   // Do the runtime call.
2406   __ CallRuntime(Runtime::kResolvePossiblyDirectEval);
2407 }
2408 
2409 
2410 // See http://www.ecma-international.org/ecma-262/6.0/#sec-function-calls.
PushCalleeAndWithBaseObject(Call * expr)2411 void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) {
2412   VariableProxy* callee = expr->expression()->AsVariableProxy();
2413   if (callee->var()->IsLookupSlot()) {
2414     Label slow, done;
2415     SetExpressionPosition(callee);
2416     // Generate code for loading from variables potentially shadowed by
2417     // eval-introduced variables.
2418     EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done);
2419 
2420     __ bind(&slow);
2421     // Call the runtime to find the function to call (returned in eax) and
2422     // the object holding it (returned in edx).
2423     __ Push(callee->name());
2424     __ CallRuntime(Runtime::kLoadLookupSlotForCall);
2425     PushOperand(eax);  // Function.
2426     PushOperand(edx);  // Receiver.
2427     PrepareForBailoutForId(expr->LookupId(), BailoutState::NO_REGISTERS);
2428 
2429     // If fast case code has been generated, emit code to push the function
2430     // and receiver and have the slow path jump around this code.
2431     if (done.is_linked()) {
2432       Label call;
2433       __ jmp(&call, Label::kNear);
2434       __ bind(&done);
2435       // Push function.
2436       __ push(eax);
2437       // The receiver is implicitly the global receiver. Indicate this by
2438       // passing the hole to the call function stub.
2439       __ push(Immediate(isolate()->factory()->undefined_value()));
2440       __ bind(&call);
2441     }
2442   } else {
2443     VisitForStackValue(callee);
2444     // refEnv.WithBaseObject()
2445     PushOperand(isolate()->factory()->undefined_value());
2446   }
2447 }
2448 
2449 
EmitPossiblyEvalCall(Call * expr)2450 void FullCodeGenerator::EmitPossiblyEvalCall(Call* expr) {
2451   // In a call to eval, we first call Runtime_ResolvePossiblyDirectEval
2452   // to resolve the function we need to call.  Then we call the resolved
2453   // function using the given arguments.
2454   ZoneList<Expression*>* args = expr->arguments();
2455   int arg_count = args->length();
2456 
2457   PushCalleeAndWithBaseObject(expr);
2458 
2459   // Push the arguments.
2460   for (int i = 0; i < arg_count; i++) {
2461     VisitForStackValue(args->at(i));
2462   }
2463 
2464   // Push a copy of the function (found below the arguments) and
2465   // resolve eval.
2466   __ push(Operand(esp, (arg_count + 1) * kPointerSize));
2467   EmitResolvePossiblyDirectEval(expr);
2468 
2469   // Touch up the stack with the resolved function.
2470   __ mov(Operand(esp, (arg_count + 1) * kPointerSize), eax);
2471 
2472   PrepareForBailoutForId(expr->EvalId(), BailoutState::NO_REGISTERS);
2473 
2474   SetCallPosition(expr);
2475   __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
2476   __ Set(eax, arg_count);
2477   __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kAny,
2478                                       expr->tail_call_mode()),
2479           RelocInfo::CODE_TARGET);
2480   OperandStackDepthDecrement(arg_count + 1);
2481   RecordJSReturnSite(expr);
2482   RestoreContext();
2483   context()->DropAndPlug(1, eax);
2484 }
2485 
2486 
VisitCallNew(CallNew * expr)2487 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2488   Comment cmnt(masm_, "[ CallNew");
2489   // According to ECMA-262, section 11.2.2, page 44, the function
2490   // expression in new calls must be evaluated before the
2491   // arguments.
2492 
2493   // Push constructor on the stack.  If it's not a function it's used as
2494   // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2495   // ignored.
2496   DCHECK(!expr->expression()->IsSuperPropertyReference());
2497   VisitForStackValue(expr->expression());
2498 
2499   // Push the arguments ("left-to-right") on the stack.
2500   ZoneList<Expression*>* args = expr->arguments();
2501   int arg_count = args->length();
2502   for (int i = 0; i < arg_count; i++) {
2503     VisitForStackValue(args->at(i));
2504   }
2505 
2506   // Call the construct call builtin that handles allocation and
2507   // constructor invocation.
2508   SetConstructCallPosition(expr);
2509 
2510   // Load function and argument count into edi and eax.
2511   __ Move(eax, Immediate(arg_count));
2512   __ mov(edi, Operand(esp, arg_count * kPointerSize));
2513 
2514   // Record call targets in unoptimized code.
2515   __ EmitLoadTypeFeedbackVector(ebx);
2516   __ mov(edx, Immediate(SmiFromSlot(expr->CallNewFeedbackSlot())));
2517 
2518   CallConstructStub stub(isolate());
2519   __ call(stub.GetCode(), RelocInfo::CODE_TARGET);
2520   OperandStackDepthDecrement(arg_count + 1);
2521   PrepareForBailoutForId(expr->ReturnId(), BailoutState::TOS_REGISTER);
2522   RestoreContext();
2523   context()->Plug(eax);
2524 }
2525 
2526 
EmitSuperConstructorCall(Call * expr)2527 void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
2528   SuperCallReference* super_call_ref =
2529       expr->expression()->AsSuperCallReference();
2530   DCHECK_NOT_NULL(super_call_ref);
2531 
2532   // Push the super constructor target on the stack (may be null,
2533   // but the Construct builtin can deal with that properly).
2534   VisitForAccumulatorValue(super_call_ref->this_function_var());
2535   __ AssertFunction(result_register());
2536   __ mov(result_register(),
2537          FieldOperand(result_register(), HeapObject::kMapOffset));
2538   PushOperand(FieldOperand(result_register(), Map::kPrototypeOffset));
2539 
2540   // Push the arguments ("left-to-right") on the stack.
2541   ZoneList<Expression*>* args = expr->arguments();
2542   int arg_count = args->length();
2543   for (int i = 0; i < arg_count; i++) {
2544     VisitForStackValue(args->at(i));
2545   }
2546 
2547   // Call the construct call builtin that handles allocation and
2548   // constructor invocation.
2549   SetConstructCallPosition(expr);
2550 
2551   // Load new target into edx.
2552   VisitForAccumulatorValue(super_call_ref->new_target_var());
2553   __ mov(edx, result_register());
2554 
2555   // Load function and argument count into edi and eax.
2556   __ Move(eax, Immediate(arg_count));
2557   __ mov(edi, Operand(esp, arg_count * kPointerSize));
2558 
2559   __ Call(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
2560   OperandStackDepthDecrement(arg_count + 1);
2561 
2562   RecordJSReturnSite(expr);
2563   RestoreContext();
2564   context()->Plug(eax);
2565 }
2566 
2567 
EmitIsSmi(CallRuntime * expr)2568 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2569   ZoneList<Expression*>* args = expr->arguments();
2570   DCHECK(args->length() == 1);
2571 
2572   VisitForAccumulatorValue(args->at(0));
2573 
2574   Label materialize_true, materialize_false;
2575   Label* if_true = NULL;
2576   Label* if_false = NULL;
2577   Label* fall_through = NULL;
2578   context()->PrepareTest(&materialize_true, &materialize_false,
2579                          &if_true, &if_false, &fall_through);
2580 
2581   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2582   __ test(eax, Immediate(kSmiTagMask));
2583   Split(zero, if_true, if_false, fall_through);
2584 
2585   context()->Plug(if_true, if_false);
2586 }
2587 
2588 
EmitIsJSReceiver(CallRuntime * expr)2589 void FullCodeGenerator::EmitIsJSReceiver(CallRuntime* expr) {
2590   ZoneList<Expression*>* args = expr->arguments();
2591   DCHECK(args->length() == 1);
2592 
2593   VisitForAccumulatorValue(args->at(0));
2594 
2595   Label materialize_true, materialize_false;
2596   Label* if_true = NULL;
2597   Label* if_false = NULL;
2598   Label* fall_through = NULL;
2599   context()->PrepareTest(&materialize_true, &materialize_false,
2600                          &if_true, &if_false, &fall_through);
2601 
2602   __ JumpIfSmi(eax, if_false);
2603   __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ebx);
2604   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2605   Split(above_equal, if_true, if_false, fall_through);
2606 
2607   context()->Plug(if_true, if_false);
2608 }
2609 
2610 
EmitIsArray(CallRuntime * expr)2611 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
2612   ZoneList<Expression*>* args = expr->arguments();
2613   DCHECK(args->length() == 1);
2614 
2615   VisitForAccumulatorValue(args->at(0));
2616 
2617   Label materialize_true, materialize_false;
2618   Label* if_true = NULL;
2619   Label* if_false = NULL;
2620   Label* fall_through = NULL;
2621   context()->PrepareTest(&materialize_true, &materialize_false,
2622                          &if_true, &if_false, &fall_through);
2623 
2624   __ JumpIfSmi(eax, if_false);
2625   __ CmpObjectType(eax, JS_ARRAY_TYPE, ebx);
2626   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2627   Split(equal, if_true, if_false, fall_through);
2628 
2629   context()->Plug(if_true, if_false);
2630 }
2631 
2632 
EmitIsTypedArray(CallRuntime * expr)2633 void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) {
2634   ZoneList<Expression*>* args = expr->arguments();
2635   DCHECK(args->length() == 1);
2636 
2637   VisitForAccumulatorValue(args->at(0));
2638 
2639   Label materialize_true, materialize_false;
2640   Label* if_true = NULL;
2641   Label* if_false = NULL;
2642   Label* fall_through = NULL;
2643   context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2644                          &if_false, &fall_through);
2645 
2646   __ JumpIfSmi(eax, if_false);
2647   __ CmpObjectType(eax, JS_TYPED_ARRAY_TYPE, ebx);
2648   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2649   Split(equal, if_true, if_false, fall_through);
2650 
2651   context()->Plug(if_true, if_false);
2652 }
2653 
2654 
EmitIsRegExp(CallRuntime * expr)2655 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
2656   ZoneList<Expression*>* args = expr->arguments();
2657   DCHECK(args->length() == 1);
2658 
2659   VisitForAccumulatorValue(args->at(0));
2660 
2661   Label materialize_true, materialize_false;
2662   Label* if_true = NULL;
2663   Label* if_false = NULL;
2664   Label* fall_through = NULL;
2665   context()->PrepareTest(&materialize_true, &materialize_false,
2666                          &if_true, &if_false, &fall_through);
2667 
2668   __ JumpIfSmi(eax, if_false);
2669   __ CmpObjectType(eax, JS_REGEXP_TYPE, ebx);
2670   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2671   Split(equal, if_true, if_false, fall_through);
2672 
2673   context()->Plug(if_true, if_false);
2674 }
2675 
2676 
EmitIsJSProxy(CallRuntime * expr)2677 void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
2678   ZoneList<Expression*>* args = expr->arguments();
2679   DCHECK(args->length() == 1);
2680 
2681   VisitForAccumulatorValue(args->at(0));
2682 
2683   Label materialize_true, materialize_false;
2684   Label* if_true = NULL;
2685   Label* if_false = NULL;
2686   Label* fall_through = NULL;
2687   context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2688                          &if_false, &fall_through);
2689 
2690   __ JumpIfSmi(eax, if_false);
2691   __ CmpObjectType(eax, JS_PROXY_TYPE, ebx);
2692   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2693   Split(equal, if_true, if_false, fall_through);
2694 
2695   context()->Plug(if_true, if_false);
2696 }
2697 
2698 
EmitClassOf(CallRuntime * expr)2699 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
2700   ZoneList<Expression*>* args = expr->arguments();
2701   DCHECK(args->length() == 1);
2702   Label done, null, function, non_function_constructor;
2703 
2704   VisitForAccumulatorValue(args->at(0));
2705 
2706   // If the object is not a JSReceiver, we return null.
2707   __ JumpIfSmi(eax, &null, Label::kNear);
2708   STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2709   __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, eax);
2710   __ j(below, &null, Label::kNear);
2711 
2712   // Return 'Function' for JSFunction and JSBoundFunction objects.
2713   __ CmpInstanceType(eax, FIRST_FUNCTION_TYPE);
2714   STATIC_ASSERT(LAST_FUNCTION_TYPE == LAST_TYPE);
2715   __ j(above_equal, &function, Label::kNear);
2716 
2717   // Check if the constructor in the map is a JS function.
2718   __ GetMapConstructor(eax, eax, ebx);
2719   __ CmpInstanceType(ebx, JS_FUNCTION_TYPE);
2720   __ j(not_equal, &non_function_constructor, Label::kNear);
2721 
2722   // eax now contains the constructor function. Grab the
2723   // instance class name from there.
2724   __ mov(eax, FieldOperand(eax, JSFunction::kSharedFunctionInfoOffset));
2725   __ mov(eax, FieldOperand(eax, SharedFunctionInfo::kInstanceClassNameOffset));
2726   __ jmp(&done, Label::kNear);
2727 
2728   // Non-JS objects have class null.
2729   __ bind(&null);
2730   __ mov(eax, isolate()->factory()->null_value());
2731   __ jmp(&done, Label::kNear);
2732 
2733   // Functions have class 'Function'.
2734   __ bind(&function);
2735   __ mov(eax, isolate()->factory()->Function_string());
2736   __ jmp(&done, Label::kNear);
2737 
2738   // Objects with a non-function constructor have class 'Object'.
2739   __ bind(&non_function_constructor);
2740   __ mov(eax, isolate()->factory()->Object_string());
2741 
2742   // All done.
2743   __ bind(&done);
2744 
2745   context()->Plug(eax);
2746 }
2747 
2748 
EmitValueOf(CallRuntime * expr)2749 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
2750   ZoneList<Expression*>* args = expr->arguments();
2751   DCHECK(args->length() == 1);
2752 
2753   VisitForAccumulatorValue(args->at(0));  // Load the object.
2754 
2755   Label done;
2756   // If the object is a smi return the object.
2757   __ JumpIfSmi(eax, &done, Label::kNear);
2758   // If the object is not a value type, return the object.
2759   __ CmpObjectType(eax, JS_VALUE_TYPE, ebx);
2760   __ j(not_equal, &done, Label::kNear);
2761   __ mov(eax, FieldOperand(eax, JSValue::kValueOffset));
2762 
2763   __ bind(&done);
2764   context()->Plug(eax);
2765 }
2766 
2767 
EmitStringCharFromCode(CallRuntime * expr)2768 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
2769   ZoneList<Expression*>* args = expr->arguments();
2770   DCHECK(args->length() == 1);
2771 
2772   VisitForAccumulatorValue(args->at(0));
2773 
2774   Label done;
2775   StringCharFromCodeGenerator generator(eax, ebx);
2776   generator.GenerateFast(masm_);
2777   __ jmp(&done);
2778 
2779   NopRuntimeCallHelper call_helper;
2780   generator.GenerateSlow(masm_, call_helper);
2781 
2782   __ bind(&done);
2783   context()->Plug(ebx);
2784 }
2785 
2786 
EmitStringCharCodeAt(CallRuntime * expr)2787 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
2788   ZoneList<Expression*>* args = expr->arguments();
2789   DCHECK(args->length() == 2);
2790 
2791   VisitForStackValue(args->at(0));
2792   VisitForAccumulatorValue(args->at(1));
2793 
2794   Register object = ebx;
2795   Register index = eax;
2796   Register result = edx;
2797 
2798   PopOperand(object);
2799 
2800   Label need_conversion;
2801   Label index_out_of_range;
2802   Label done;
2803   StringCharCodeAtGenerator generator(object, index, result, &need_conversion,
2804                                       &need_conversion, &index_out_of_range);
2805   generator.GenerateFast(masm_);
2806   __ jmp(&done);
2807 
2808   __ bind(&index_out_of_range);
2809   // When the index is out of range, the spec requires us to return
2810   // NaN.
2811   __ Move(result, Immediate(isolate()->factory()->nan_value()));
2812   __ jmp(&done);
2813 
2814   __ bind(&need_conversion);
2815   // Move the undefined value into the result register, which will
2816   // trigger conversion.
2817   __ Move(result, Immediate(isolate()->factory()->undefined_value()));
2818   __ jmp(&done);
2819 
2820   NopRuntimeCallHelper call_helper;
2821   generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
2822 
2823   __ bind(&done);
2824   context()->Plug(result);
2825 }
2826 
2827 
EmitCall(CallRuntime * expr)2828 void FullCodeGenerator::EmitCall(CallRuntime* expr) {
2829   ZoneList<Expression*>* args = expr->arguments();
2830   DCHECK_LE(2, args->length());
2831   // Push target, receiver and arguments onto the stack.
2832   for (Expression* const arg : *args) {
2833     VisitForStackValue(arg);
2834   }
2835   PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
2836   // Move target to edi.
2837   int const argc = args->length() - 2;
2838   __ mov(edi, Operand(esp, (argc + 1) * kPointerSize));
2839   // Call the target.
2840   __ mov(eax, Immediate(argc));
2841   __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
2842   OperandStackDepthDecrement(argc + 1);
2843   RestoreContext();
2844   // Discard the function left on TOS.
2845   context()->DropAndPlug(1, eax);
2846 }
2847 
2848 
EmitHasCachedArrayIndex(CallRuntime * expr)2849 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
2850   ZoneList<Expression*>* args = expr->arguments();
2851   DCHECK(args->length() == 1);
2852 
2853   VisitForAccumulatorValue(args->at(0));
2854 
2855   __ AssertString(eax);
2856 
2857   Label materialize_true, materialize_false;
2858   Label* if_true = NULL;
2859   Label* if_false = NULL;
2860   Label* fall_through = NULL;
2861   context()->PrepareTest(&materialize_true, &materialize_false,
2862                          &if_true, &if_false, &fall_through);
2863 
2864   __ test(FieldOperand(eax, String::kHashFieldOffset),
2865           Immediate(String::kContainsCachedArrayIndexMask));
2866   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2867   Split(zero, if_true, if_false, fall_through);
2868 
2869   context()->Plug(if_true, if_false);
2870 }
2871 
2872 
EmitGetCachedArrayIndex(CallRuntime * expr)2873 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
2874   ZoneList<Expression*>* args = expr->arguments();
2875   DCHECK(args->length() == 1);
2876   VisitForAccumulatorValue(args->at(0));
2877 
2878   __ AssertString(eax);
2879 
2880   __ mov(eax, FieldOperand(eax, String::kHashFieldOffset));
2881   __ IndexFromHash(eax, eax);
2882 
2883   context()->Plug(eax);
2884 }
2885 
2886 
EmitGetSuperConstructor(CallRuntime * expr)2887 void FullCodeGenerator::EmitGetSuperConstructor(CallRuntime* expr) {
2888   ZoneList<Expression*>* args = expr->arguments();
2889   DCHECK_EQ(1, args->length());
2890   VisitForAccumulatorValue(args->at(0));
2891   __ AssertFunction(eax);
2892   __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset));
2893   __ mov(eax, FieldOperand(eax, Map::kPrototypeOffset));
2894   context()->Plug(eax);
2895 }
2896 
EmitDebugIsActive(CallRuntime * expr)2897 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
2898   DCHECK(expr->arguments()->length() == 0);
2899   ExternalReference debug_is_active =
2900       ExternalReference::debug_is_active_address(isolate());
2901   __ movzx_b(eax, Operand::StaticVariable(debug_is_active));
2902   __ SmiTag(eax);
2903   context()->Plug(eax);
2904 }
2905 
2906 
EmitCreateIterResultObject(CallRuntime * expr)2907 void FullCodeGenerator::EmitCreateIterResultObject(CallRuntime* expr) {
2908   ZoneList<Expression*>* args = expr->arguments();
2909   DCHECK_EQ(2, args->length());
2910   VisitForStackValue(args->at(0));
2911   VisitForStackValue(args->at(1));
2912 
2913   Label runtime, done;
2914 
2915   __ Allocate(JSIteratorResult::kSize, eax, ecx, edx, &runtime,
2916               NO_ALLOCATION_FLAGS);
2917   __ mov(ebx, NativeContextOperand());
2918   __ mov(ebx, ContextOperand(ebx, Context::ITERATOR_RESULT_MAP_INDEX));
2919   __ mov(FieldOperand(eax, HeapObject::kMapOffset), ebx);
2920   __ mov(FieldOperand(eax, JSObject::kPropertiesOffset),
2921          isolate()->factory()->empty_fixed_array());
2922   __ mov(FieldOperand(eax, JSObject::kElementsOffset),
2923          isolate()->factory()->empty_fixed_array());
2924   __ pop(FieldOperand(eax, JSIteratorResult::kDoneOffset));
2925   __ pop(FieldOperand(eax, JSIteratorResult::kValueOffset));
2926   STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
2927   __ jmp(&done, Label::kNear);
2928 
2929   __ bind(&runtime);
2930   CallRuntimeWithOperands(Runtime::kCreateIterResultObject);
2931 
2932   __ bind(&done);
2933   context()->Plug(eax);
2934 }
2935 
2936 
EmitLoadJSRuntimeFunction(CallRuntime * expr)2937 void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
2938   // Push function.
2939   __ LoadGlobalFunction(expr->context_index(), eax);
2940   PushOperand(eax);
2941 
2942   // Push undefined as receiver.
2943   PushOperand(isolate()->factory()->undefined_value());
2944 }
2945 
2946 
EmitCallJSRuntimeFunction(CallRuntime * expr)2947 void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
2948   ZoneList<Expression*>* args = expr->arguments();
2949   int arg_count = args->length();
2950 
2951   SetCallPosition(expr);
2952   __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
2953   __ Set(eax, arg_count);
2954   __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kNullOrUndefined),
2955           RelocInfo::CODE_TARGET);
2956   OperandStackDepthDecrement(arg_count + 1);
2957   RestoreContext();
2958 }
2959 
2960 
VisitUnaryOperation(UnaryOperation * expr)2961 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
2962   switch (expr->op()) {
2963     case Token::DELETE: {
2964       Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
2965       Property* property = expr->expression()->AsProperty();
2966       VariableProxy* proxy = expr->expression()->AsVariableProxy();
2967 
2968       if (property != NULL) {
2969         VisitForStackValue(property->obj());
2970         VisitForStackValue(property->key());
2971         CallRuntimeWithOperands(is_strict(language_mode())
2972                                     ? Runtime::kDeleteProperty_Strict
2973                                     : Runtime::kDeleteProperty_Sloppy);
2974         context()->Plug(eax);
2975       } else if (proxy != NULL) {
2976         Variable* var = proxy->var();
2977         // Delete of an unqualified identifier is disallowed in strict mode but
2978         // "delete this" is allowed.
2979         bool is_this = var->HasThisName(isolate());
2980         DCHECK(is_sloppy(language_mode()) || is_this);
2981         if (var->IsUnallocatedOrGlobalSlot()) {
2982           __ mov(eax, NativeContextOperand());
2983           __ push(ContextOperand(eax, Context::EXTENSION_INDEX));
2984           __ push(Immediate(var->name()));
2985           __ CallRuntime(Runtime::kDeleteProperty_Sloppy);
2986           context()->Plug(eax);
2987         } else if (var->IsStackAllocated() || var->IsContextSlot()) {
2988           // Result of deleting non-global variables is false.  'this' is
2989           // not really a variable, though we implement it as one.  The
2990           // subexpression does not have side effects.
2991           context()->Plug(is_this);
2992         } else {
2993           // Non-global variable.  Call the runtime to try to delete from the
2994           // context where the variable was introduced.
2995           __ Push(var->name());
2996           __ CallRuntime(Runtime::kDeleteLookupSlot);
2997           context()->Plug(eax);
2998         }
2999       } else {
3000         // Result of deleting non-property, non-variable reference is true.
3001         // The subexpression may have side effects.
3002         VisitForEffect(expr->expression());
3003         context()->Plug(true);
3004       }
3005       break;
3006     }
3007 
3008     case Token::VOID: {
3009       Comment cmnt(masm_, "[ UnaryOperation (VOID)");
3010       VisitForEffect(expr->expression());
3011       context()->Plug(isolate()->factory()->undefined_value());
3012       break;
3013     }
3014 
3015     case Token::NOT: {
3016       Comment cmnt(masm_, "[ UnaryOperation (NOT)");
3017       if (context()->IsEffect()) {
3018         // Unary NOT has no side effects so it's only necessary to visit the
3019         // subexpression.  Match the optimizing compiler by not branching.
3020         VisitForEffect(expr->expression());
3021       } else if (context()->IsTest()) {
3022         const TestContext* test = TestContext::cast(context());
3023         // The labels are swapped for the recursive call.
3024         VisitForControl(expr->expression(),
3025                         test->false_label(),
3026                         test->true_label(),
3027                         test->fall_through());
3028         context()->Plug(test->true_label(), test->false_label());
3029       } else {
3030         // We handle value contexts explicitly rather than simply visiting
3031         // for control and plugging the control flow into the context,
3032         // because we need to prepare a pair of extra administrative AST ids
3033         // for the optimizing compiler.
3034         DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
3035         Label materialize_true, materialize_false, done;
3036         VisitForControl(expr->expression(),
3037                         &materialize_false,
3038                         &materialize_true,
3039                         &materialize_true);
3040         if (!context()->IsAccumulatorValue()) OperandStackDepthIncrement(1);
3041         __ bind(&materialize_true);
3042         PrepareForBailoutForId(expr->MaterializeTrueId(),
3043                                BailoutState::NO_REGISTERS);
3044         if (context()->IsAccumulatorValue()) {
3045           __ mov(eax, isolate()->factory()->true_value());
3046         } else {
3047           __ Push(isolate()->factory()->true_value());
3048         }
3049         __ jmp(&done, Label::kNear);
3050         __ bind(&materialize_false);
3051         PrepareForBailoutForId(expr->MaterializeFalseId(),
3052                                BailoutState::NO_REGISTERS);
3053         if (context()->IsAccumulatorValue()) {
3054           __ mov(eax, isolate()->factory()->false_value());
3055         } else {
3056           __ Push(isolate()->factory()->false_value());
3057         }
3058         __ bind(&done);
3059       }
3060       break;
3061     }
3062 
3063     case Token::TYPEOF: {
3064       Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
3065       {
3066         AccumulatorValueContext context(this);
3067         VisitForTypeofValue(expr->expression());
3068       }
3069       __ mov(ebx, eax);
3070       TypeofStub typeof_stub(isolate());
3071       __ CallStub(&typeof_stub);
3072       context()->Plug(eax);
3073       break;
3074     }
3075 
3076     default:
3077       UNREACHABLE();
3078   }
3079 }
3080 
3081 
VisitCountOperation(CountOperation * expr)3082 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3083   DCHECK(expr->expression()->IsValidReferenceExpressionOrThis());
3084 
3085   Comment cmnt(masm_, "[ CountOperation");
3086 
3087   Property* prop = expr->expression()->AsProperty();
3088   LhsKind assign_type = Property::GetAssignType(prop);
3089 
3090   // Evaluate expression and get value.
3091   if (assign_type == VARIABLE) {
3092     DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
3093     AccumulatorValueContext context(this);
3094     EmitVariableLoad(expr->expression()->AsVariableProxy());
3095   } else {
3096     // Reserve space for result of postfix operation.
3097     if (expr->is_postfix() && !context()->IsEffect()) {
3098       PushOperand(Smi::FromInt(0));
3099     }
3100     switch (assign_type) {
3101       case NAMED_PROPERTY: {
3102         // Put the object both on the stack and in the register.
3103         VisitForStackValue(prop->obj());
3104         __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
3105         EmitNamedPropertyLoad(prop);
3106         break;
3107       }
3108 
3109       case NAMED_SUPER_PROPERTY: {
3110         VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
3111         VisitForAccumulatorValue(
3112             prop->obj()->AsSuperPropertyReference()->home_object());
3113         PushOperand(result_register());
3114         PushOperand(MemOperand(esp, kPointerSize));
3115         PushOperand(result_register());
3116         EmitNamedSuperPropertyLoad(prop);
3117         break;
3118       }
3119 
3120       case KEYED_SUPER_PROPERTY: {
3121         VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
3122         VisitForStackValue(
3123             prop->obj()->AsSuperPropertyReference()->home_object());
3124         VisitForAccumulatorValue(prop->key());
3125         PushOperand(result_register());
3126         PushOperand(MemOperand(esp, 2 * kPointerSize));
3127         PushOperand(MemOperand(esp, 2 * kPointerSize));
3128         PushOperand(result_register());
3129         EmitKeyedSuperPropertyLoad(prop);
3130         break;
3131       }
3132 
3133       case KEYED_PROPERTY: {
3134         VisitForStackValue(prop->obj());
3135         VisitForStackValue(prop->key());
3136         __ mov(LoadDescriptor::ReceiverRegister(),
3137                Operand(esp, kPointerSize));                       // Object.
3138         __ mov(LoadDescriptor::NameRegister(), Operand(esp, 0));  // Key.
3139         EmitKeyedPropertyLoad(prop);
3140         break;
3141       }
3142 
3143       case VARIABLE:
3144         UNREACHABLE();
3145     }
3146   }
3147 
3148   // We need a second deoptimization point after loading the value
3149   // in case evaluating the property load my have a side effect.
3150   if (assign_type == VARIABLE) {
3151     PrepareForBailout(expr->expression(), BailoutState::TOS_REGISTER);
3152   } else {
3153     PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
3154   }
3155 
3156   // Inline smi case if we are in a loop.
3157   Label done, stub_call;
3158   JumpPatchSite patch_site(masm_);
3159   if (ShouldInlineSmiCase(expr->op())) {
3160     Label slow;
3161     patch_site.EmitJumpIfNotSmi(eax, &slow, Label::kNear);
3162 
3163     // Save result for postfix expressions.
3164     if (expr->is_postfix()) {
3165       if (!context()->IsEffect()) {
3166         // Save the result on the stack. If we have a named or keyed property
3167         // we store the result under the receiver that is currently on top
3168         // of the stack.
3169         switch (assign_type) {
3170           case VARIABLE:
3171             __ push(eax);
3172             break;
3173           case NAMED_PROPERTY:
3174             __ mov(Operand(esp, kPointerSize), eax);
3175             break;
3176           case NAMED_SUPER_PROPERTY:
3177             __ mov(Operand(esp, 2 * kPointerSize), eax);
3178             break;
3179           case KEYED_PROPERTY:
3180             __ mov(Operand(esp, 2 * kPointerSize), eax);
3181             break;
3182           case KEYED_SUPER_PROPERTY:
3183             __ mov(Operand(esp, 3 * kPointerSize), eax);
3184             break;
3185         }
3186       }
3187     }
3188 
3189     if (expr->op() == Token::INC) {
3190       __ add(eax, Immediate(Smi::FromInt(1)));
3191     } else {
3192       __ sub(eax, Immediate(Smi::FromInt(1)));
3193     }
3194     __ j(no_overflow, &done, Label::kNear);
3195     // Call stub. Undo operation first.
3196     if (expr->op() == Token::INC) {
3197       __ sub(eax, Immediate(Smi::FromInt(1)));
3198     } else {
3199       __ add(eax, Immediate(Smi::FromInt(1)));
3200     }
3201     __ jmp(&stub_call, Label::kNear);
3202     __ bind(&slow);
3203   }
3204 
3205   // Convert old value into a number.
3206   __ Call(isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
3207   PrepareForBailoutForId(expr->ToNumberId(), BailoutState::TOS_REGISTER);
3208 
3209   // Save result for postfix expressions.
3210   if (expr->is_postfix()) {
3211     if (!context()->IsEffect()) {
3212       // Save the result on the stack. If we have a named or keyed property
3213       // we store the result under the receiver that is currently on top
3214       // of the stack.
3215       switch (assign_type) {
3216         case VARIABLE:
3217           PushOperand(eax);
3218           break;
3219         case NAMED_PROPERTY:
3220           __ mov(Operand(esp, kPointerSize), eax);
3221           break;
3222         case NAMED_SUPER_PROPERTY:
3223           __ mov(Operand(esp, 2 * kPointerSize), eax);
3224           break;
3225         case KEYED_PROPERTY:
3226           __ mov(Operand(esp, 2 * kPointerSize), eax);
3227           break;
3228         case KEYED_SUPER_PROPERTY:
3229           __ mov(Operand(esp, 3 * kPointerSize), eax);
3230           break;
3231       }
3232     }
3233   }
3234 
3235   SetExpressionPosition(expr);
3236 
3237   // Call stub for +1/-1.
3238   __ bind(&stub_call);
3239   __ mov(edx, eax);
3240   __ mov(eax, Immediate(Smi::FromInt(1)));
3241   Handle<Code> code =
3242       CodeFactory::BinaryOpIC(isolate(), expr->binary_op()).code();
3243   CallIC(code, expr->CountBinOpFeedbackId());
3244   patch_site.EmitPatchInfo();
3245   __ bind(&done);
3246 
3247   // Store the value returned in eax.
3248   switch (assign_type) {
3249     case VARIABLE:
3250       if (expr->is_postfix()) {
3251         // Perform the assignment as if via '='.
3252         { EffectContext context(this);
3253           EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3254                                  Token::ASSIGN, expr->CountSlot());
3255           PrepareForBailoutForId(expr->AssignmentId(),
3256                                  BailoutState::TOS_REGISTER);
3257           context.Plug(eax);
3258         }
3259         // For all contexts except EffectContext We have the result on
3260         // top of the stack.
3261         if (!context()->IsEffect()) {
3262           context()->PlugTOS();
3263         }
3264       } else {
3265         // Perform the assignment as if via '='.
3266         EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3267                                Token::ASSIGN, expr->CountSlot());
3268         PrepareForBailoutForId(expr->AssignmentId(),
3269                                BailoutState::TOS_REGISTER);
3270         context()->Plug(eax);
3271       }
3272       break;
3273     case NAMED_PROPERTY: {
3274       __ mov(StoreDescriptor::NameRegister(),
3275              prop->key()->AsLiteral()->value());
3276       PopOperand(StoreDescriptor::ReceiverRegister());
3277       EmitLoadStoreICSlot(expr->CountSlot());
3278       CallStoreIC();
3279       PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
3280       if (expr->is_postfix()) {
3281         if (!context()->IsEffect()) {
3282           context()->PlugTOS();
3283         }
3284       } else {
3285         context()->Plug(eax);
3286       }
3287       break;
3288     }
3289     case NAMED_SUPER_PROPERTY: {
3290       EmitNamedSuperPropertyStore(prop);
3291       if (expr->is_postfix()) {
3292         if (!context()->IsEffect()) {
3293           context()->PlugTOS();
3294         }
3295       } else {
3296         context()->Plug(eax);
3297       }
3298       break;
3299     }
3300     case KEYED_SUPER_PROPERTY: {
3301       EmitKeyedSuperPropertyStore(prop);
3302       if (expr->is_postfix()) {
3303         if (!context()->IsEffect()) {
3304           context()->PlugTOS();
3305         }
3306       } else {
3307         context()->Plug(eax);
3308       }
3309       break;
3310     }
3311     case KEYED_PROPERTY: {
3312       PopOperand(StoreDescriptor::NameRegister());
3313       PopOperand(StoreDescriptor::ReceiverRegister());
3314       Handle<Code> ic =
3315           CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
3316       EmitLoadStoreICSlot(expr->CountSlot());
3317       CallIC(ic);
3318       PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
3319       if (expr->is_postfix()) {
3320         // Result is on the stack
3321         if (!context()->IsEffect()) {
3322           context()->PlugTOS();
3323         }
3324       } else {
3325         context()->Plug(eax);
3326       }
3327       break;
3328     }
3329   }
3330 }
3331 
3332 
EmitLiteralCompareTypeof(Expression * expr,Expression * sub_expr,Handle<String> check)3333 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
3334                                                  Expression* sub_expr,
3335                                                  Handle<String> check) {
3336   Label materialize_true, materialize_false;
3337   Label* if_true = NULL;
3338   Label* if_false = NULL;
3339   Label* fall_through = NULL;
3340   context()->PrepareTest(&materialize_true, &materialize_false,
3341                          &if_true, &if_false, &fall_through);
3342 
3343   { AccumulatorValueContext context(this);
3344     VisitForTypeofValue(sub_expr);
3345   }
3346   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3347 
3348   Factory* factory = isolate()->factory();
3349   if (String::Equals(check, factory->number_string())) {
3350     __ JumpIfSmi(eax, if_true);
3351     __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
3352            isolate()->factory()->heap_number_map());
3353     Split(equal, if_true, if_false, fall_through);
3354   } else if (String::Equals(check, factory->string_string())) {
3355     __ JumpIfSmi(eax, if_false);
3356     __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, edx);
3357     Split(below, if_true, if_false, fall_through);
3358   } else if (String::Equals(check, factory->symbol_string())) {
3359     __ JumpIfSmi(eax, if_false);
3360     __ CmpObjectType(eax, SYMBOL_TYPE, edx);
3361     Split(equal, if_true, if_false, fall_through);
3362   } else if (String::Equals(check, factory->boolean_string())) {
3363     __ cmp(eax, isolate()->factory()->true_value());
3364     __ j(equal, if_true);
3365     __ cmp(eax, isolate()->factory()->false_value());
3366     Split(equal, if_true, if_false, fall_through);
3367   } else if (String::Equals(check, factory->undefined_string())) {
3368     __ cmp(eax, isolate()->factory()->null_value());
3369     __ j(equal, if_false);
3370     __ JumpIfSmi(eax, if_false);
3371     // Check for undetectable objects => true.
3372     __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
3373     __ test_b(FieldOperand(edx, Map::kBitFieldOffset),
3374               Immediate(1 << Map::kIsUndetectable));
3375     Split(not_zero, if_true, if_false, fall_through);
3376   } else if (String::Equals(check, factory->function_string())) {
3377     __ JumpIfSmi(eax, if_false);
3378     // Check for callable and not undetectable objects => true.
3379     __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
3380     __ movzx_b(ecx, FieldOperand(edx, Map::kBitFieldOffset));
3381     __ and_(ecx, (1 << Map::kIsCallable) | (1 << Map::kIsUndetectable));
3382     __ cmp(ecx, 1 << Map::kIsCallable);
3383     Split(equal, if_true, if_false, fall_through);
3384   } else if (String::Equals(check, factory->object_string())) {
3385     __ JumpIfSmi(eax, if_false);
3386     __ cmp(eax, isolate()->factory()->null_value());
3387     __ j(equal, if_true);
3388     STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
3389     __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, edx);
3390     __ j(below, if_false);
3391     // Check for callable or undetectable objects => false.
3392     __ test_b(FieldOperand(edx, Map::kBitFieldOffset),
3393               Immediate((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
3394     Split(zero, if_true, if_false, fall_through);
3395 // clang-format off
3396 #define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type)   \
3397   } else if (String::Equals(check, factory->type##_string())) { \
3398     __ JumpIfSmi(eax, if_false);                                \
3399     __ cmp(FieldOperand(eax, HeapObject::kMapOffset),           \
3400            isolate()->factory()->type##_map());                 \
3401     Split(equal, if_true, if_false, fall_through);
3402   SIMD128_TYPES(SIMD128_TYPE)
3403 #undef SIMD128_TYPE
3404     // clang-format on
3405   } else {
3406     if (if_false != fall_through) __ jmp(if_false);
3407   }
3408   context()->Plug(if_true, if_false);
3409 }
3410 
3411 
VisitCompareOperation(CompareOperation * expr)3412 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
3413   Comment cmnt(masm_, "[ CompareOperation");
3414 
3415   // First we try a fast inlined version of the compare when one of
3416   // the operands is a literal.
3417   if (TryLiteralCompare(expr)) return;
3418 
3419   // Always perform the comparison for its control flow.  Pack the result
3420   // into the expression's context after the comparison is performed.
3421   Label materialize_true, materialize_false;
3422   Label* if_true = NULL;
3423   Label* if_false = NULL;
3424   Label* fall_through = NULL;
3425   context()->PrepareTest(&materialize_true, &materialize_false,
3426                          &if_true, &if_false, &fall_through);
3427 
3428   Token::Value op = expr->op();
3429   VisitForStackValue(expr->left());
3430   switch (op) {
3431     case Token::IN:
3432       VisitForStackValue(expr->right());
3433       SetExpressionPosition(expr);
3434       EmitHasProperty();
3435       PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
3436       __ cmp(eax, isolate()->factory()->true_value());
3437       Split(equal, if_true, if_false, fall_through);
3438       break;
3439 
3440     case Token::INSTANCEOF: {
3441       VisitForAccumulatorValue(expr->right());
3442       SetExpressionPosition(expr);
3443       PopOperand(edx);
3444       InstanceOfStub stub(isolate());
3445       __ CallStub(&stub);
3446       PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
3447       __ cmp(eax, isolate()->factory()->true_value());
3448       Split(equal, if_true, if_false, fall_through);
3449       break;
3450     }
3451 
3452     default: {
3453       VisitForAccumulatorValue(expr->right());
3454       SetExpressionPosition(expr);
3455       Condition cc = CompareIC::ComputeCondition(op);
3456       PopOperand(edx);
3457 
3458       bool inline_smi_code = ShouldInlineSmiCase(op);
3459       JumpPatchSite patch_site(masm_);
3460       if (inline_smi_code) {
3461         Label slow_case;
3462         __ mov(ecx, edx);
3463         __ or_(ecx, eax);
3464         patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear);
3465         __ cmp(edx, eax);
3466         Split(cc, if_true, if_false, NULL);
3467         __ bind(&slow_case);
3468       }
3469 
3470       Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
3471       CallIC(ic, expr->CompareOperationFeedbackId());
3472       patch_site.EmitPatchInfo();
3473 
3474       PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3475       __ test(eax, eax);
3476       Split(cc, if_true, if_false, fall_through);
3477     }
3478   }
3479 
3480   // Convert the result of the comparison into one expected for this
3481   // expression's context.
3482   context()->Plug(if_true, if_false);
3483 }
3484 
3485 
EmitLiteralCompareNil(CompareOperation * expr,Expression * sub_expr,NilValue nil)3486 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
3487                                               Expression* sub_expr,
3488                                               NilValue nil) {
3489   Label materialize_true, materialize_false;
3490   Label* if_true = NULL;
3491   Label* if_false = NULL;
3492   Label* fall_through = NULL;
3493   context()->PrepareTest(&materialize_true, &materialize_false,
3494                          &if_true, &if_false, &fall_through);
3495 
3496   VisitForAccumulatorValue(sub_expr);
3497   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3498 
3499   Handle<Object> nil_value = nil == kNullValue
3500       ? isolate()->factory()->null_value()
3501       : isolate()->factory()->undefined_value();
3502   if (expr->op() == Token::EQ_STRICT) {
3503     __ cmp(eax, nil_value);
3504     Split(equal, if_true, if_false, fall_through);
3505   } else {
3506     __ JumpIfSmi(eax, if_false);
3507     __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset));
3508     __ test_b(FieldOperand(eax, Map::kBitFieldOffset),
3509               Immediate(1 << Map::kIsUndetectable));
3510     Split(not_zero, if_true, if_false, fall_through);
3511   }
3512   context()->Plug(if_true, if_false);
3513 }
3514 
3515 
result_register()3516 Register FullCodeGenerator::result_register() {
3517   return eax;
3518 }
3519 
3520 
context_register()3521 Register FullCodeGenerator::context_register() {
3522   return esi;
3523 }
3524 
LoadFromFrameField(int frame_offset,Register value)3525 void FullCodeGenerator::LoadFromFrameField(int frame_offset, Register value) {
3526   DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
3527   __ mov(value, Operand(ebp, frame_offset));
3528 }
3529 
StoreToFrameField(int frame_offset,Register value)3530 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
3531   DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
3532   __ mov(Operand(ebp, frame_offset), value);
3533 }
3534 
3535 
LoadContextField(Register dst,int context_index)3536 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
3537   __ mov(dst, ContextOperand(esi, context_index));
3538 }
3539 
3540 
PushFunctionArgumentForContextAllocation()3541 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
3542   Scope* closure_scope = scope()->ClosureScope();
3543   if (closure_scope->is_script_scope() ||
3544       closure_scope->is_module_scope()) {
3545     // Contexts nested in the native context have a canonical empty function
3546     // as their closure, not the anonymous closure containing the global
3547     // code.
3548     __ mov(eax, NativeContextOperand());
3549     PushOperand(ContextOperand(eax, Context::CLOSURE_INDEX));
3550   } else if (closure_scope->is_eval_scope()) {
3551     // Contexts nested inside eval code have the same closure as the context
3552     // calling eval, not the anonymous closure containing the eval code.
3553     // Fetch it from the context.
3554     PushOperand(ContextOperand(esi, Context::CLOSURE_INDEX));
3555   } else {
3556     DCHECK(closure_scope->is_function_scope());
3557     PushOperand(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
3558   }
3559 }
3560 
3561 
3562 // ----------------------------------------------------------------------------
3563 // Non-local control flow support.
3564 
EnterFinallyBlock()3565 void FullCodeGenerator::EnterFinallyBlock() {
3566   // Store pending message while executing finally block.
3567   ExternalReference pending_message_obj =
3568       ExternalReference::address_of_pending_message_obj(isolate());
3569   __ mov(edx, Operand::StaticVariable(pending_message_obj));
3570   PushOperand(edx);
3571 
3572   ClearPendingMessage();
3573 }
3574 
3575 
ExitFinallyBlock()3576 void FullCodeGenerator::ExitFinallyBlock() {
3577   DCHECK(!result_register().is(edx));
3578   // Restore pending message from stack.
3579   PopOperand(edx);
3580   ExternalReference pending_message_obj =
3581       ExternalReference::address_of_pending_message_obj(isolate());
3582   __ mov(Operand::StaticVariable(pending_message_obj), edx);
3583 }
3584 
3585 
ClearPendingMessage()3586 void FullCodeGenerator::ClearPendingMessage() {
3587   DCHECK(!result_register().is(edx));
3588   ExternalReference pending_message_obj =
3589       ExternalReference::address_of_pending_message_obj(isolate());
3590   __ mov(edx, Immediate(isolate()->factory()->the_hole_value()));
3591   __ mov(Operand::StaticVariable(pending_message_obj), edx);
3592 }
3593 
3594 
EmitCommands()3595 void FullCodeGenerator::DeferredCommands::EmitCommands() {
3596   DCHECK(!result_register().is(edx));
3597   __ Pop(result_register());  // Restore the accumulator.
3598   __ Pop(edx);                // Get the token.
3599   for (DeferredCommand cmd : commands_) {
3600     Label skip;
3601     __ cmp(edx, Immediate(Smi::FromInt(cmd.token)));
3602     __ j(not_equal, &skip);
3603     switch (cmd.command) {
3604       case kReturn:
3605         codegen_->EmitUnwindAndReturn();
3606         break;
3607       case kThrow:
3608         __ Push(result_register());
3609         __ CallRuntime(Runtime::kReThrow);
3610         break;
3611       case kContinue:
3612         codegen_->EmitContinue(cmd.target);
3613         break;
3614       case kBreak:
3615         codegen_->EmitBreak(cmd.target);
3616         break;
3617     }
3618     __ bind(&skip);
3619   }
3620 }
3621 
3622 #undef __
3623 
3624 
3625 static const byte kJnsInstruction = 0x79;
3626 static const byte kJnsOffset = 0x11;
3627 static const byte kNopByteOne = 0x66;
3628 static const byte kNopByteTwo = 0x90;
3629 #ifdef DEBUG
3630 static const byte kCallInstruction = 0xe8;
3631 #endif
3632 
3633 
PatchAt(Code * unoptimized_code,Address pc,BackEdgeState target_state,Code * replacement_code)3634 void BackEdgeTable::PatchAt(Code* unoptimized_code,
3635                             Address pc,
3636                             BackEdgeState target_state,
3637                             Code* replacement_code) {
3638   Address call_target_address = pc - kIntSize;
3639   Address jns_instr_address = call_target_address - 3;
3640   Address jns_offset_address = call_target_address - 2;
3641 
3642   switch (target_state) {
3643     case INTERRUPT:
3644       //     sub <profiling_counter>, <delta>  ;; Not changed
3645       //     jns ok
3646       //     call <interrupt stub>
3647       //   ok:
3648       *jns_instr_address = kJnsInstruction;
3649       *jns_offset_address = kJnsOffset;
3650       break;
3651     case ON_STACK_REPLACEMENT:
3652       //     sub <profiling_counter>, <delta>  ;; Not changed
3653       //     nop
3654       //     nop
3655       //     call <on-stack replacment>
3656       //   ok:
3657       *jns_instr_address = kNopByteOne;
3658       *jns_offset_address = kNopByteTwo;
3659       break;
3660   }
3661 
3662   Assembler::set_target_address_at(unoptimized_code->GetIsolate(),
3663                                    call_target_address, unoptimized_code,
3664                                    replacement_code->entry());
3665   unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
3666       unoptimized_code, call_target_address, replacement_code);
3667 }
3668 
3669 
GetBackEdgeState(Isolate * isolate,Code * unoptimized_code,Address pc)3670 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
3671     Isolate* isolate,
3672     Code* unoptimized_code,
3673     Address pc) {
3674   Address call_target_address = pc - kIntSize;
3675   Address jns_instr_address = call_target_address - 3;
3676   DCHECK_EQ(kCallInstruction, *(call_target_address - 1));
3677 
3678   if (*jns_instr_address == kJnsInstruction) {
3679     DCHECK_EQ(kJnsOffset, *(call_target_address - 2));
3680     DCHECK_EQ(isolate->builtins()->InterruptCheck()->entry(),
3681               Assembler::target_address_at(call_target_address,
3682                                            unoptimized_code));
3683     return INTERRUPT;
3684   }
3685 
3686   DCHECK_EQ(kNopByteOne, *jns_instr_address);
3687   DCHECK_EQ(kNopByteTwo, *(call_target_address - 2));
3688 
3689   DCHECK_EQ(
3690       isolate->builtins()->OnStackReplacement()->entry(),
3691       Assembler::target_address_at(call_target_address, unoptimized_code));
3692   return ON_STACK_REPLACEMENT;
3693 }
3694 
3695 
3696 }  // namespace internal
3697 }  // namespace v8
3698 
3699 #endif  // V8_TARGET_ARCH_X87
3700