• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #if V8_TARGET_ARCH_X64
6 
7 #include "src/ast/scopes.h"
8 #include "src/code-factory.h"
9 #include "src/code-stubs.h"
10 #include "src/codegen.h"
11 #include "src/debug/debug.h"
12 #include "src/full-codegen/full-codegen.h"
13 #include "src/ic/ic.h"
14 #include "src/parsing/parser.h"
15 
16 namespace v8 {
17 namespace internal {
18 
19 #define __ ACCESS_MASM(masm())
20 
21 class JumpPatchSite BASE_EMBEDDED {
22  public:
JumpPatchSite(MacroAssembler * masm)23   explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
24 #ifdef DEBUG
25     info_emitted_ = false;
26 #endif
27   }
28 
~JumpPatchSite()29   ~JumpPatchSite() {
30     DCHECK(patch_site_.is_bound() == info_emitted_);
31   }
32 
EmitJumpIfNotSmi(Register reg,Label * target,Label::Distance near_jump=Label::kFar)33   void EmitJumpIfNotSmi(Register reg,
34                         Label* target,
35                         Label::Distance near_jump = Label::kFar) {
36     __ testb(reg, Immediate(kSmiTagMask));
37     EmitJump(not_carry, target, near_jump);   // Always taken before patched.
38   }
39 
EmitJumpIfSmi(Register reg,Label * target,Label::Distance near_jump=Label::kFar)40   void EmitJumpIfSmi(Register reg,
41                      Label* target,
42                      Label::Distance near_jump = Label::kFar) {
43     __ testb(reg, Immediate(kSmiTagMask));
44     EmitJump(carry, target, near_jump);  // Never taken before patched.
45   }
46 
EmitPatchInfo()47   void EmitPatchInfo() {
48     if (patch_site_.is_bound()) {
49       int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
50       DCHECK(is_uint8(delta_to_patch_site));
51       __ testl(rax, Immediate(delta_to_patch_site));
52 #ifdef DEBUG
53       info_emitted_ = true;
54 #endif
55     } else {
56       __ nop();  // Signals no inlined code.
57     }
58   }
59 
60  private:
61   // jc will be patched with jz, jnc will become jnz.
EmitJump(Condition cc,Label * target,Label::Distance near_jump)62   void EmitJump(Condition cc, Label* target, Label::Distance near_jump) {
63     DCHECK(!patch_site_.is_bound() && !info_emitted_);
64     DCHECK(cc == carry || cc == not_carry);
65     __ bind(&patch_site_);
66     __ j(cc, target, near_jump);
67   }
68 
masm()69   MacroAssembler* masm() { return masm_; }
70   MacroAssembler* masm_;
71   Label patch_site_;
72 #ifdef DEBUG
73   bool info_emitted_;
74 #endif
75 };
76 
77 
78 // Generate code for a JS function.  On entry to the function the receiver
79 // and arguments have been pushed on the stack left to right, with the
80 // return address on top of them.  The actual argument count matches the
81 // formal parameter count expected by the function.
82 //
83 // The live registers are:
84 //   o rdi: the JS function object being called (i.e. ourselves)
85 //   o rdx: the new target value
86 //   o rsi: our context
87 //   o rbp: our caller's frame pointer
88 //   o rsp: stack pointer (pointing to return address)
89 //
90 // The function builds a JS frame.  Please see JavaScriptFrameConstants in
91 // frames-x64.h for its layout.
Generate()92 void FullCodeGenerator::Generate() {
93   CompilationInfo* info = info_;
94   profiling_counter_ = isolate()->factory()->NewCell(
95       Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
96   SetFunctionPosition(literal());
97   Comment cmnt(masm_, "[ function compiled by full code generator");
98 
99   ProfileEntryHookStub::MaybeCallEntryHook(masm_);
100 
101   if (FLAG_debug_code && info->ExpectsJSReceiverAsReceiver()) {
102     StackArgumentsAccessor args(rsp, info->scope()->num_parameters());
103     __ movp(rcx, args.GetReceiverOperand());
104     __ AssertNotSmi(rcx);
105     __ CmpObjectType(rcx, FIRST_JS_RECEIVER_TYPE, rcx);
106     __ Assert(above_equal, kSloppyFunctionExpectsJSReceiverReceiver);
107   }
108 
109   // Open a frame scope to indicate that there is a frame on the stack.  The
110   // MANUAL indicates that the scope shouldn't actually generate code to set up
111   // the frame (that is done below).
112   FrameScope frame_scope(masm_, StackFrame::MANUAL);
113 
114   info->set_prologue_offset(masm_->pc_offset());
115   __ Prologue(info->GeneratePreagedPrologue());
116 
117   { Comment cmnt(masm_, "[ Allocate locals");
118     int locals_count = info->scope()->num_stack_slots();
119     // Generators allocate locals, if any, in context slots.
120     DCHECK(!IsGeneratorFunction(info->literal()->kind()) || locals_count == 0);
121     OperandStackDepthIncrement(locals_count);
122     if (locals_count == 1) {
123       __ PushRoot(Heap::kUndefinedValueRootIndex);
124     } else if (locals_count > 1) {
125       if (locals_count >= 128) {
126         Label ok;
127         __ movp(rcx, rsp);
128         __ subp(rcx, Immediate(locals_count * kPointerSize));
129         __ CompareRoot(rcx, Heap::kRealStackLimitRootIndex);
130         __ j(above_equal, &ok, Label::kNear);
131         __ CallRuntime(Runtime::kThrowStackOverflow);
132         __ bind(&ok);
133       }
134       __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
135       const int kMaxPushes = 32;
136       if (locals_count >= kMaxPushes) {
137         int loop_iterations = locals_count / kMaxPushes;
138         __ movp(rcx, Immediate(loop_iterations));
139         Label loop_header;
140         __ bind(&loop_header);
141         // Do pushes.
142         for (int i = 0; i < kMaxPushes; i++) {
143           __ Push(rax);
144         }
145         // Continue loop if not done.
146         __ decp(rcx);
147         __ j(not_zero, &loop_header, Label::kNear);
148       }
149       int remaining = locals_count % kMaxPushes;
150       // Emit the remaining pushes.
151       for (int i  = 0; i < remaining; i++) {
152         __ Push(rax);
153       }
154     }
155   }
156 
157   bool function_in_register = true;
158 
159   // Possibly allocate a local context.
160   if (info->scope()->num_heap_slots() > 0) {
161     Comment cmnt(masm_, "[ Allocate context");
162     bool need_write_barrier = true;
163     int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
164     // Argument to NewContext is the function, which is still in rdi.
165     if (info->scope()->is_script_scope()) {
166       __ Push(rdi);
167       __ Push(info->scope()->GetScopeInfo(info->isolate()));
168       __ CallRuntime(Runtime::kNewScriptContext);
169       PrepareForBailoutForId(BailoutId::ScriptContext(),
170                              BailoutState::TOS_REGISTER);
171       // The new target value is not used, clobbering is safe.
172       DCHECK_NULL(info->scope()->new_target_var());
173     } else {
174       if (info->scope()->new_target_var() != nullptr) {
175         __ Push(rdx);  // Preserve new target.
176       }
177       if (slots <= FastNewContextStub::kMaximumSlots) {
178         FastNewContextStub stub(isolate(), slots);
179         __ CallStub(&stub);
180         // Result of FastNewContextStub is always in new space.
181         need_write_barrier = false;
182       } else {
183         __ Push(rdi);
184         __ CallRuntime(Runtime::kNewFunctionContext);
185       }
186       if (info->scope()->new_target_var() != nullptr) {
187         __ Pop(rdx);  // Restore new target.
188       }
189     }
190     function_in_register = false;
191     // Context is returned in rax.  It replaces the context passed to us.
192     // It's saved in the stack and kept live in rsi.
193     __ movp(rsi, rax);
194     __ movp(Operand(rbp, StandardFrameConstants::kContextOffset), rax);
195 
196     // Copy any necessary parameters into the context.
197     int num_parameters = info->scope()->num_parameters();
198     int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
199     for (int i = first_parameter; i < num_parameters; i++) {
200       Variable* var = (i == -1) ? scope()->receiver() : scope()->parameter(i);
201       if (var->IsContextSlot()) {
202         int parameter_offset = StandardFrameConstants::kCallerSPOffset +
203             (num_parameters - 1 - i) * kPointerSize;
204         // Load parameter from stack.
205         __ movp(rax, Operand(rbp, parameter_offset));
206         // Store it in the context.
207         int context_offset = Context::SlotOffset(var->index());
208         __ movp(Operand(rsi, context_offset), rax);
209         // Update the write barrier.  This clobbers rax and rbx.
210         if (need_write_barrier) {
211           __ RecordWriteContextSlot(
212               rsi, context_offset, rax, rbx, kDontSaveFPRegs);
213         } else if (FLAG_debug_code) {
214           Label done;
215           __ JumpIfInNewSpace(rsi, rax, &done, Label::kNear);
216           __ Abort(kExpectedNewSpaceObject);
217           __ bind(&done);
218         }
219       }
220     }
221   }
222 
223   // Register holding this function and new target are both trashed in case we
224   // bailout here. But since that can happen only when new target is not used
225   // and we allocate a context, the value of |function_in_register| is correct.
226   PrepareForBailoutForId(BailoutId::FunctionContext(),
227                          BailoutState::NO_REGISTERS);
228 
229   // Possibly set up a local binding to the this function which is used in
230   // derived constructors with super calls.
231   Variable* this_function_var = scope()->this_function_var();
232   if (this_function_var != nullptr) {
233     Comment cmnt(masm_, "[ This function");
234     if (!function_in_register) {
235       __ movp(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
236       // The write barrier clobbers register again, keep it marked as such.
237     }
238     SetVar(this_function_var, rdi, rbx, rcx);
239   }
240 
241   // Possibly set up a local binding to the new target value.
242   Variable* new_target_var = scope()->new_target_var();
243   if (new_target_var != nullptr) {
244     Comment cmnt(masm_, "[ new.target");
245     SetVar(new_target_var, rdx, rbx, rcx);
246   }
247 
248   // Possibly allocate RestParameters
249   int rest_index;
250   Variable* rest_param = scope()->rest_parameter(&rest_index);
251   if (rest_param) {
252     Comment cmnt(masm_, "[ Allocate rest parameter array");
253     if (!function_in_register) {
254       __ movp(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
255     }
256     FastNewRestParameterStub stub(isolate());
257     __ CallStub(&stub);
258     function_in_register = false;
259     SetVar(rest_param, rax, rbx, rdx);
260   }
261 
262   // Possibly allocate an arguments object.
263   Variable* arguments = scope()->arguments();
264   if (arguments != NULL) {
265     // Arguments object must be allocated after the context object, in
266     // case the "arguments" or ".arguments" variables are in the context.
267     Comment cmnt(masm_, "[ Allocate arguments object");
268     if (!function_in_register) {
269       __ movp(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
270     }
271     if (is_strict(language_mode()) || !has_simple_parameters()) {
272       FastNewStrictArgumentsStub stub(isolate());
273       __ CallStub(&stub);
274     } else if (literal()->has_duplicate_parameters()) {
275       __ Push(rdi);
276       __ CallRuntime(Runtime::kNewSloppyArguments_Generic);
277     } else {
278       FastNewSloppyArgumentsStub stub(isolate());
279       __ CallStub(&stub);
280     }
281 
282     SetVar(arguments, rax, rbx, rdx);
283   }
284 
285   if (FLAG_trace) {
286     __ CallRuntime(Runtime::kTraceEnter);
287   }
288 
289   // Visit the declarations and body unless there is an illegal
290   // redeclaration.
291   PrepareForBailoutForId(BailoutId::FunctionEntry(),
292                          BailoutState::NO_REGISTERS);
293   {
294     Comment cmnt(masm_, "[ Declarations");
295     VisitDeclarations(scope()->declarations());
296   }
297 
298   // Assert that the declarations do not use ICs. Otherwise the debugger
299   // won't be able to redirect a PC at an IC to the correct IC in newly
300   // recompiled code.
301   DCHECK_EQ(0, ic_total_count_);
302 
303   {
304     Comment cmnt(masm_, "[ Stack check");
305     PrepareForBailoutForId(BailoutId::Declarations(),
306                            BailoutState::NO_REGISTERS);
307     Label ok;
308     __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
309     __ j(above_equal, &ok, Label::kNear);
310     __ call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
311     __ bind(&ok);
312   }
313 
314   {
315     Comment cmnt(masm_, "[ Body");
316     DCHECK(loop_depth() == 0);
317     VisitStatements(literal()->body());
318     DCHECK(loop_depth() == 0);
319   }
320 
321   // Always emit a 'return undefined' in case control fell off the end of
322   // the body.
323   { Comment cmnt(masm_, "[ return <undefined>;");
324     __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
325     EmitReturnSequence();
326   }
327 }
328 
329 
ClearAccumulator()330 void FullCodeGenerator::ClearAccumulator() {
331   __ Set(rax, 0);
332 }
333 
334 
EmitProfilingCounterDecrement(int delta)335 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
336   __ Move(rbx, profiling_counter_, RelocInfo::EMBEDDED_OBJECT);
337   __ SmiAddConstant(FieldOperand(rbx, Cell::kValueOffset),
338                     Smi::FromInt(-delta));
339 }
340 
341 
EmitProfilingCounterReset()342 void FullCodeGenerator::EmitProfilingCounterReset() {
343   int reset_value = FLAG_interrupt_budget;
344   __ Move(rbx, profiling_counter_, RelocInfo::EMBEDDED_OBJECT);
345   __ Move(kScratchRegister, Smi::FromInt(reset_value));
346   __ movp(FieldOperand(rbx, Cell::kValueOffset), kScratchRegister);
347 }
348 
349 
350 static const byte kJnsOffset = kPointerSize == kInt64Size ? 0x1d : 0x14;
351 
352 
EmitBackEdgeBookkeeping(IterationStatement * stmt,Label * back_edge_target)353 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
354                                                 Label* back_edge_target) {
355   Comment cmnt(masm_, "[ Back edge bookkeeping");
356   Label ok;
357 
358   DCHECK(back_edge_target->is_bound());
359   int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
360   int weight = Min(kMaxBackEdgeWeight,
361                    Max(1, distance / kCodeSizeMultiplier));
362   EmitProfilingCounterDecrement(weight);
363 
364   __ j(positive, &ok, Label::kNear);
365   {
366     PredictableCodeSizeScope predictible_code_size_scope(masm_, kJnsOffset);
367     DontEmitDebugCodeScope dont_emit_debug_code_scope(masm_);
368     __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
369 
370     // Record a mapping of this PC offset to the OSR id.  This is used to find
371     // the AST id from the unoptimized code in order to use it as a key into
372     // the deoptimization input data found in the optimized code.
373     RecordBackEdge(stmt->OsrEntryId());
374 
375     EmitProfilingCounterReset();
376   }
377   __ bind(&ok);
378 
379   PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS);
380   // Record a mapping of the OSR id to this PC.  This is used if the OSR
381   // entry becomes the target of a bailout.  We don't expect it to be, but
382   // we want it to work if it is.
383   PrepareForBailoutForId(stmt->OsrEntryId(), BailoutState::NO_REGISTERS);
384 }
385 
EmitProfilingCounterHandlingForReturnSequence(bool is_tail_call)386 void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence(
387     bool is_tail_call) {
388   // Pretend that the exit is a backwards jump to the entry.
389   int weight = 1;
390   if (info_->ShouldSelfOptimize()) {
391     weight = FLAG_interrupt_budget / FLAG_self_opt_count;
392   } else {
393     int distance = masm_->pc_offset();
394     weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
395   }
396   EmitProfilingCounterDecrement(weight);
397   Label ok;
398   __ j(positive, &ok, Label::kNear);
399   // Don't need to save result register if we are going to do a tail call.
400   if (!is_tail_call) {
401     __ Push(rax);
402   }
403   __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
404   if (!is_tail_call) {
405     __ Pop(rax);
406   }
407   EmitProfilingCounterReset();
408   __ bind(&ok);
409 }
410 
EmitReturnSequence()411 void FullCodeGenerator::EmitReturnSequence() {
412   Comment cmnt(masm_, "[ Return sequence");
413   if (return_label_.is_bound()) {
414     __ jmp(&return_label_);
415   } else {
416     __ bind(&return_label_);
417     if (FLAG_trace) {
418       __ Push(rax);
419       __ CallRuntime(Runtime::kTraceExit);
420     }
421     EmitProfilingCounterHandlingForReturnSequence(false);
422 
423     SetReturnPosition(literal());
424     __ leave();
425 
426     int arg_count = info_->scope()->num_parameters() + 1;
427     int arguments_bytes = arg_count * kPointerSize;
428     __ Ret(arguments_bytes, rcx);
429   }
430 }
431 
RestoreContext()432 void FullCodeGenerator::RestoreContext() {
433   __ movp(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
434 }
435 
Plug(Variable * var) const436 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
437   DCHECK(var->IsStackAllocated() || var->IsContextSlot());
438   MemOperand operand = codegen()->VarOperand(var, result_register());
439   codegen()->PushOperand(operand);
440 }
441 
442 
Plug(Heap::RootListIndex index) const443 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
444 }
445 
446 
Plug(Heap::RootListIndex index) const447 void FullCodeGenerator::AccumulatorValueContext::Plug(
448     Heap::RootListIndex index) const {
449   __ LoadRoot(result_register(), index);
450 }
451 
452 
Plug(Heap::RootListIndex index) const453 void FullCodeGenerator::StackValueContext::Plug(
454     Heap::RootListIndex index) const {
455   codegen()->OperandStackDepthIncrement(1);
456   __ PushRoot(index);
457 }
458 
459 
Plug(Heap::RootListIndex index) const460 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
461   codegen()->PrepareForBailoutBeforeSplit(condition(),
462                                           true,
463                                           true_label_,
464                                           false_label_);
465   if (index == Heap::kUndefinedValueRootIndex ||
466       index == Heap::kNullValueRootIndex ||
467       index == Heap::kFalseValueRootIndex) {
468     if (false_label_ != fall_through_) __ jmp(false_label_);
469   } else if (index == Heap::kTrueValueRootIndex) {
470     if (true_label_ != fall_through_) __ jmp(true_label_);
471   } else {
472     __ LoadRoot(result_register(), index);
473     codegen()->DoTest(this);
474   }
475 }
476 
477 
Plug(Handle<Object> lit) const478 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
479 }
480 
481 
Plug(Handle<Object> lit) const482 void FullCodeGenerator::AccumulatorValueContext::Plug(
483     Handle<Object> lit) const {
484   if (lit->IsSmi()) {
485     __ SafeMove(result_register(), Smi::cast(*lit));
486   } else {
487     __ Move(result_register(), lit);
488   }
489 }
490 
491 
Plug(Handle<Object> lit) const492 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
493   codegen()->OperandStackDepthIncrement(1);
494   if (lit->IsSmi()) {
495     __ SafePush(Smi::cast(*lit));
496   } else {
497     __ Push(lit);
498   }
499 }
500 
501 
Plug(Handle<Object> lit) const502 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
503   codegen()->PrepareForBailoutBeforeSplit(condition(),
504                                           true,
505                                           true_label_,
506                                           false_label_);
507   DCHECK(lit->IsNull(isolate()) || lit->IsUndefined(isolate()) ||
508          !lit->IsUndetectable());
509   if (lit->IsUndefined(isolate()) || lit->IsNull(isolate()) ||
510       lit->IsFalse(isolate())) {
511     if (false_label_ != fall_through_) __ jmp(false_label_);
512   } else if (lit->IsTrue(isolate()) || lit->IsJSObject()) {
513     if (true_label_ != fall_through_) __ jmp(true_label_);
514   } else if (lit->IsString()) {
515     if (String::cast(*lit)->length() == 0) {
516       if (false_label_ != fall_through_) __ jmp(false_label_);
517     } else {
518       if (true_label_ != fall_through_) __ jmp(true_label_);
519     }
520   } else if (lit->IsSmi()) {
521     if (Smi::cast(*lit)->value() == 0) {
522       if (false_label_ != fall_through_) __ jmp(false_label_);
523     } else {
524       if (true_label_ != fall_through_) __ jmp(true_label_);
525     }
526   } else {
527     // For simplicity we always test the accumulator register.
528     __ Move(result_register(), lit);
529     codegen()->DoTest(this);
530   }
531 }
532 
533 
DropAndPlug(int count,Register reg) const534 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
535                                                        Register reg) const {
536   DCHECK(count > 0);
537   if (count > 1) codegen()->DropOperands(count - 1);
538   __ movp(Operand(rsp, 0), reg);
539 }
540 
541 
Plug(Label * materialize_true,Label * materialize_false) const542 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
543                                             Label* materialize_false) const {
544   DCHECK(materialize_true == materialize_false);
545   __ bind(materialize_true);
546 }
547 
548 
Plug(Label * materialize_true,Label * materialize_false) const549 void FullCodeGenerator::AccumulatorValueContext::Plug(
550     Label* materialize_true,
551     Label* materialize_false) const {
552   Label done;
553   __ bind(materialize_true);
554   __ Move(result_register(), isolate()->factory()->true_value());
555   __ jmp(&done, Label::kNear);
556   __ bind(materialize_false);
557   __ Move(result_register(), isolate()->factory()->false_value());
558   __ bind(&done);
559 }
560 
561 
Plug(Label * materialize_true,Label * materialize_false) const562 void FullCodeGenerator::StackValueContext::Plug(
563     Label* materialize_true,
564     Label* materialize_false) const {
565   codegen()->OperandStackDepthIncrement(1);
566   Label done;
567   __ bind(materialize_true);
568   __ Push(isolate()->factory()->true_value());
569   __ jmp(&done, Label::kNear);
570   __ bind(materialize_false);
571   __ Push(isolate()->factory()->false_value());
572   __ bind(&done);
573 }
574 
575 
Plug(Label * materialize_true,Label * materialize_false) const576 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
577                                           Label* materialize_false) const {
578   DCHECK(materialize_true == true_label_);
579   DCHECK(materialize_false == false_label_);
580 }
581 
582 
Plug(bool flag) const583 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
584   Heap::RootListIndex value_root_index =
585       flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
586   __ LoadRoot(result_register(), value_root_index);
587 }
588 
589 
Plug(bool flag) const590 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
591   codegen()->OperandStackDepthIncrement(1);
592   Heap::RootListIndex value_root_index =
593       flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
594   __ PushRoot(value_root_index);
595 }
596 
597 
Plug(bool flag) const598 void FullCodeGenerator::TestContext::Plug(bool flag) const {
599   codegen()->PrepareForBailoutBeforeSplit(condition(),
600                                           true,
601                                           true_label_,
602                                           false_label_);
603   if (flag) {
604     if (true_label_ != fall_through_) __ jmp(true_label_);
605   } else {
606     if (false_label_ != fall_through_) __ jmp(false_label_);
607   }
608 }
609 
610 
DoTest(Expression * condition,Label * if_true,Label * if_false,Label * fall_through)611 void FullCodeGenerator::DoTest(Expression* condition,
612                                Label* if_true,
613                                Label* if_false,
614                                Label* fall_through) {
615   Handle<Code> ic = ToBooleanICStub::GetUninitialized(isolate());
616   CallIC(ic, condition->test_id());
617   __ CompareRoot(result_register(), Heap::kTrueValueRootIndex);
618   Split(equal, if_true, if_false, fall_through);
619 }
620 
621 
Split(Condition cc,Label * if_true,Label * if_false,Label * fall_through)622 void FullCodeGenerator::Split(Condition cc,
623                               Label* if_true,
624                               Label* if_false,
625                               Label* fall_through) {
626   if (if_false == fall_through) {
627     __ j(cc, if_true);
628   } else if (if_true == fall_through) {
629     __ j(NegateCondition(cc), if_false);
630   } else {
631     __ j(cc, if_true);
632     __ jmp(if_false);
633   }
634 }
635 
636 
StackOperand(Variable * var)637 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
638   DCHECK(var->IsStackAllocated());
639   // Offset is negative because higher indexes are at lower addresses.
640   int offset = -var->index() * kPointerSize;
641   // Adjust by a (parameter or local) base offset.
642   if (var->IsParameter()) {
643     offset += kFPOnStackSize + kPCOnStackSize +
644               (info_->scope()->num_parameters() - 1) * kPointerSize;
645   } else {
646     offset += JavaScriptFrameConstants::kLocal0Offset;
647   }
648   return Operand(rbp, offset);
649 }
650 
651 
VarOperand(Variable * var,Register scratch)652 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
653   DCHECK(var->IsContextSlot() || var->IsStackAllocated());
654   if (var->IsContextSlot()) {
655     int context_chain_length = scope()->ContextChainLength(var->scope());
656     __ LoadContext(scratch, context_chain_length);
657     return ContextOperand(scratch, var->index());
658   } else {
659     return StackOperand(var);
660   }
661 }
662 
663 
GetVar(Register dest,Variable * var)664 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
665   DCHECK(var->IsContextSlot() || var->IsStackAllocated());
666   MemOperand location = VarOperand(var, dest);
667   __ movp(dest, location);
668 }
669 
670 
SetVar(Variable * var,Register src,Register scratch0,Register scratch1)671 void FullCodeGenerator::SetVar(Variable* var,
672                                Register src,
673                                Register scratch0,
674                                Register scratch1) {
675   DCHECK(var->IsContextSlot() || var->IsStackAllocated());
676   DCHECK(!scratch0.is(src));
677   DCHECK(!scratch0.is(scratch1));
678   DCHECK(!scratch1.is(src));
679   MemOperand location = VarOperand(var, scratch0);
680   __ movp(location, src);
681 
682   // Emit the write barrier code if the location is in the heap.
683   if (var->IsContextSlot()) {
684     int offset = Context::SlotOffset(var->index());
685     __ RecordWriteContextSlot(scratch0, offset, src, scratch1, kDontSaveFPRegs);
686   }
687 }
688 
689 
PrepareForBailoutBeforeSplit(Expression * expr,bool should_normalize,Label * if_true,Label * if_false)690 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
691                                                      bool should_normalize,
692                                                      Label* if_true,
693                                                      Label* if_false) {
694   // Only prepare for bailouts before splits if we're in a test
695   // context. Otherwise, we let the Visit function deal with the
696   // preparation to avoid preparing with the same AST id twice.
697   if (!context()->IsTest()) return;
698 
699   Label skip;
700   if (should_normalize) __ jmp(&skip, Label::kNear);
701   PrepareForBailout(expr, BailoutState::TOS_REGISTER);
702   if (should_normalize) {
703     __ CompareRoot(rax, Heap::kTrueValueRootIndex);
704     Split(equal, if_true, if_false, NULL);
705     __ bind(&skip);
706   }
707 }
708 
709 
EmitDebugCheckDeclarationContext(Variable * variable)710 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
711   // The variable in the declaration always resides in the current context.
712   DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
713   if (FLAG_debug_code) {
714     // Check that we're not inside a with or catch context.
715     __ movp(rbx, FieldOperand(rsi, HeapObject::kMapOffset));
716     __ CompareRoot(rbx, Heap::kWithContextMapRootIndex);
717     __ Check(not_equal, kDeclarationInWithContext);
718     __ CompareRoot(rbx, Heap::kCatchContextMapRootIndex);
719     __ Check(not_equal, kDeclarationInCatchContext);
720   }
721 }
722 
723 
VisitVariableDeclaration(VariableDeclaration * declaration)724 void FullCodeGenerator::VisitVariableDeclaration(
725     VariableDeclaration* declaration) {
726   // If it was not possible to allocate the variable at compile time, we
727   // need to "declare" it at runtime to make sure it actually exists in the
728   // local context.
729   VariableProxy* proxy = declaration->proxy();
730   VariableMode mode = declaration->mode();
731   Variable* variable = proxy->var();
732   bool hole_init = mode == LET || mode == CONST;
733   switch (variable->location()) {
734     case VariableLocation::GLOBAL:
735     case VariableLocation::UNALLOCATED:
736       DCHECK(!variable->binding_needs_init());
737       globals_->Add(variable->name(), zone());
738       globals_->Add(isolate()->factory()->undefined_value(), zone());
739       break;
740 
741     case VariableLocation::PARAMETER:
742     case VariableLocation::LOCAL:
743       if (hole_init) {
744         Comment cmnt(masm_, "[ VariableDeclaration");
745         __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
746         __ movp(StackOperand(variable), kScratchRegister);
747       }
748       break;
749 
750     case VariableLocation::CONTEXT:
751       if (hole_init) {
752         Comment cmnt(masm_, "[ VariableDeclaration");
753         EmitDebugCheckDeclarationContext(variable);
754         __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
755         __ movp(ContextOperand(rsi, variable->index()), kScratchRegister);
756         // No write barrier since the hole value is in old space.
757         PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
758       }
759       break;
760 
761     case VariableLocation::LOOKUP: {
762       Comment cmnt(masm_, "[ VariableDeclaration");
763       DCHECK_EQ(VAR, mode);
764       DCHECK(!hole_init);
765       __ Push(variable->name());
766       __ CallRuntime(Runtime::kDeclareEvalVar);
767       PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
768       break;
769     }
770   }
771 }
772 
773 
VisitFunctionDeclaration(FunctionDeclaration * declaration)774 void FullCodeGenerator::VisitFunctionDeclaration(
775     FunctionDeclaration* declaration) {
776   VariableProxy* proxy = declaration->proxy();
777   Variable* variable = proxy->var();
778   switch (variable->location()) {
779     case VariableLocation::GLOBAL:
780     case VariableLocation::UNALLOCATED: {
781       globals_->Add(variable->name(), zone());
782       Handle<SharedFunctionInfo> function =
783           Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_);
784       // Check for stack-overflow exception.
785       if (function.is_null()) return SetStackOverflow();
786       globals_->Add(function, zone());
787       break;
788     }
789 
790     case VariableLocation::PARAMETER:
791     case VariableLocation::LOCAL: {
792       Comment cmnt(masm_, "[ FunctionDeclaration");
793       VisitForAccumulatorValue(declaration->fun());
794       __ movp(StackOperand(variable), result_register());
795       break;
796     }
797 
798     case VariableLocation::CONTEXT: {
799       Comment cmnt(masm_, "[ FunctionDeclaration");
800       EmitDebugCheckDeclarationContext(variable);
801       VisitForAccumulatorValue(declaration->fun());
802       __ movp(ContextOperand(rsi, variable->index()), result_register());
803       int offset = Context::SlotOffset(variable->index());
804       // We know that we have written a function, which is not a smi.
805       __ RecordWriteContextSlot(rsi,
806                                 offset,
807                                 result_register(),
808                                 rcx,
809                                 kDontSaveFPRegs,
810                                 EMIT_REMEMBERED_SET,
811                                 OMIT_SMI_CHECK);
812       PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
813       break;
814     }
815 
816     case VariableLocation::LOOKUP: {
817       Comment cmnt(masm_, "[ FunctionDeclaration");
818       PushOperand(variable->name());
819       VisitForStackValue(declaration->fun());
820       CallRuntimeWithOperands(Runtime::kDeclareEvalFunction);
821       PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
822       break;
823     }
824   }
825 }
826 
827 
DeclareGlobals(Handle<FixedArray> pairs)828 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
829   // Call the runtime to declare the globals.
830   __ Push(pairs);
831   __ Push(Smi::FromInt(DeclareGlobalsFlags()));
832   __ CallRuntime(Runtime::kDeclareGlobals);
833   // Return value is ignored.
834 }
835 
836 
DeclareModules(Handle<FixedArray> descriptions)837 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
838   // Call the runtime to declare the modules.
839   __ Push(descriptions);
840   __ CallRuntime(Runtime::kDeclareModules);
841   // Return value is ignored.
842 }
843 
844 
VisitSwitchStatement(SwitchStatement * stmt)845 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
846   Comment cmnt(masm_, "[ SwitchStatement");
847   Breakable nested_statement(this, stmt);
848   SetStatementPosition(stmt);
849 
850   // Keep the switch value on the stack until a case matches.
851   VisitForStackValue(stmt->tag());
852   PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS);
853 
854   ZoneList<CaseClause*>* clauses = stmt->cases();
855   CaseClause* default_clause = NULL;  // Can occur anywhere in the list.
856 
857   Label next_test;  // Recycled for each test.
858   // Compile all the tests with branches to their bodies.
859   for (int i = 0; i < clauses->length(); i++) {
860     CaseClause* clause = clauses->at(i);
861     clause->body_target()->Unuse();
862 
863     // The default is not a test, but remember it as final fall through.
864     if (clause->is_default()) {
865       default_clause = clause;
866       continue;
867     }
868 
869     Comment cmnt(masm_, "[ Case comparison");
870     __ bind(&next_test);
871     next_test.Unuse();
872 
873     // Compile the label expression.
874     VisitForAccumulatorValue(clause->label());
875 
876     // Perform the comparison as if via '==='.
877     __ movp(rdx, Operand(rsp, 0));  // Switch value.
878     bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
879     JumpPatchSite patch_site(masm_);
880     if (inline_smi_code) {
881       Label slow_case;
882       __ movp(rcx, rdx);
883       __ orp(rcx, rax);
884       patch_site.EmitJumpIfNotSmi(rcx, &slow_case, Label::kNear);
885 
886       __ cmpp(rdx, rax);
887       __ j(not_equal, &next_test);
888       __ Drop(1);  // Switch value is no longer needed.
889       __ jmp(clause->body_target());
890       __ bind(&slow_case);
891     }
892 
893     // Record position before stub call for type feedback.
894     SetExpressionPosition(clause);
895     Handle<Code> ic =
896         CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
897     CallIC(ic, clause->CompareId());
898     patch_site.EmitPatchInfo();
899 
900     Label skip;
901     __ jmp(&skip, Label::kNear);
902     PrepareForBailout(clause, BailoutState::TOS_REGISTER);
903     __ CompareRoot(rax, Heap::kTrueValueRootIndex);
904     __ j(not_equal, &next_test);
905     __ Drop(1);
906     __ jmp(clause->body_target());
907     __ bind(&skip);
908 
909     __ testp(rax, rax);
910     __ j(not_equal, &next_test);
911     __ Drop(1);  // Switch value is no longer needed.
912     __ jmp(clause->body_target());
913   }
914 
915   // Discard the test value and jump to the default if present, otherwise to
916   // the end of the statement.
917   __ bind(&next_test);
918   DropOperands(1);  // Switch value is no longer needed.
919   if (default_clause == NULL) {
920     __ jmp(nested_statement.break_label());
921   } else {
922     __ jmp(default_clause->body_target());
923   }
924 
925   // Compile all the case bodies.
926   for (int i = 0; i < clauses->length(); i++) {
927     Comment cmnt(masm_, "[ Case body");
928     CaseClause* clause = clauses->at(i);
929     __ bind(clause->body_target());
930     PrepareForBailoutForId(clause->EntryId(), BailoutState::NO_REGISTERS);
931     VisitStatements(clause->statements());
932   }
933 
934   __ bind(nested_statement.break_label());
935   PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
936 }
937 
938 
VisitForInStatement(ForInStatement * stmt)939 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
940   Comment cmnt(masm_, "[ ForInStatement");
941   SetStatementPosition(stmt, SKIP_BREAK);
942 
943   FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
944 
945   // Get the object to enumerate over.
946   SetExpressionAsStatementPosition(stmt->enumerable());
947   VisitForAccumulatorValue(stmt->enumerable());
948   OperandStackDepthIncrement(5);
949 
950   Label loop, exit;
951   Iteration loop_statement(this, stmt);
952   increment_loop_depth();
953 
954   // If the object is null or undefined, skip over the loop, otherwise convert
955   // it to a JS receiver.  See ECMA-262 version 5, section 12.6.4.
956   Label convert, done_convert;
957   __ JumpIfSmi(rax, &convert, Label::kNear);
958   __ CmpObjectType(rax, FIRST_JS_RECEIVER_TYPE, rcx);
959   __ j(above_equal, &done_convert, Label::kNear);
960   __ CompareRoot(rax, Heap::kNullValueRootIndex);
961   __ j(equal, &exit);
962   __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
963   __ j(equal, &exit);
964   __ bind(&convert);
965   ToObjectStub stub(isolate());
966   __ CallStub(&stub);
967   __ bind(&done_convert);
968   PrepareForBailoutForId(stmt->ToObjectId(), BailoutState::TOS_REGISTER);
969   __ Push(rax);
970 
971   // Check cache validity in generated code. If we cannot guarantee cache
972   // validity, call the runtime system to check cache validity or get the
973   // property names in a fixed array. Note: Proxies never have an enum cache,
974   // so will always take the slow path.
975   Label call_runtime;
976   __ CheckEnumCache(&call_runtime);
977 
978   // The enum cache is valid.  Load the map of the object being
979   // iterated over and use the cache for the iteration.
980   Label use_cache;
981   __ movp(rax, FieldOperand(rax, HeapObject::kMapOffset));
982   __ jmp(&use_cache, Label::kNear);
983 
984   // Get the set of properties to enumerate.
985   __ bind(&call_runtime);
986   __ Push(rax);  // Duplicate the enumerable object on the stack.
987   __ CallRuntime(Runtime::kForInEnumerate);
988   PrepareForBailoutForId(stmt->EnumId(), BailoutState::TOS_REGISTER);
989 
990   // If we got a map from the runtime call, we can do a fast
991   // modification check. Otherwise, we got a fixed array, and we have
992   // to do a slow check.
993   Label fixed_array;
994   __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
995                  Heap::kMetaMapRootIndex);
996   __ j(not_equal, &fixed_array);
997 
998   // We got a map in register rax. Get the enumeration cache from it.
999   __ bind(&use_cache);
1000 
1001   Label no_descriptors;
1002 
1003   __ EnumLength(rdx, rax);
1004   __ Cmp(rdx, Smi::FromInt(0));
1005   __ j(equal, &no_descriptors);
1006 
1007   __ LoadInstanceDescriptors(rax, rcx);
1008   __ movp(rcx, FieldOperand(rcx, DescriptorArray::kEnumCacheOffset));
1009   __ movp(rcx, FieldOperand(rcx, DescriptorArray::kEnumCacheBridgeCacheOffset));
1010 
1011   // Set up the four remaining stack slots.
1012   __ Push(rax);  // Map.
1013   __ Push(rcx);  // Enumeration cache.
1014   __ Push(rdx);  // Number of valid entries for the map in the enum cache.
1015   __ Push(Smi::FromInt(0));  // Initial index.
1016   __ jmp(&loop);
1017 
1018   __ bind(&no_descriptors);
1019   __ addp(rsp, Immediate(kPointerSize));
1020   __ jmp(&exit);
1021 
1022   // We got a fixed array in register rax. Iterate through that.
1023   __ bind(&fixed_array);
1024 
1025   __ movp(rcx, Operand(rsp, 0 * kPointerSize));  // Get enumerated object
1026   __ Push(Smi::FromInt(1));                      // Smi(1) indicates slow check
1027   __ Push(rax);  // Array
1028   __ movp(rax, FieldOperand(rax, FixedArray::kLengthOffset));
1029   __ Push(rax);  // Fixed array length (as smi).
1030   PrepareForBailoutForId(stmt->PrepareId(), BailoutState::NO_REGISTERS);
1031   __ Push(Smi::FromInt(0));  // Initial index.
1032 
1033   // Generate code for doing the condition check.
1034   __ bind(&loop);
1035   SetExpressionAsStatementPosition(stmt->each());
1036 
1037   __ movp(rax, Operand(rsp, 0 * kPointerSize));  // Get the current index.
1038   __ cmpp(rax, Operand(rsp, 1 * kPointerSize));  // Compare to the array length.
1039   __ j(above_equal, loop_statement.break_label());
1040 
1041   // Get the current entry of the array into register rbx.
1042   __ movp(rbx, Operand(rsp, 2 * kPointerSize));
1043   SmiIndex index = masm()->SmiToIndex(rax, rax, kPointerSizeLog2);
1044   __ movp(rbx, FieldOperand(rbx,
1045                             index.reg,
1046                             index.scale,
1047                             FixedArray::kHeaderSize));
1048 
1049   // Get the expected map from the stack or a smi in the
1050   // permanent slow case into register rdx.
1051   __ movp(rdx, Operand(rsp, 3 * kPointerSize));
1052 
1053   // Check if the expected map still matches that of the enumerable.
1054   // If not, we may have to filter the key.
1055   Label update_each;
1056   __ movp(rcx, Operand(rsp, 4 * kPointerSize));
1057   __ cmpp(rdx, FieldOperand(rcx, HeapObject::kMapOffset));
1058   __ j(equal, &update_each, Label::kNear);
1059 
1060   // We need to filter the key, record slow-path here.
1061   int const vector_index = SmiFromSlot(slot)->value();
1062   __ EmitLoadTypeFeedbackVector(rdx);
1063   __ Move(FieldOperand(rdx, FixedArray::OffsetOfElementAt(vector_index)),
1064           TypeFeedbackVector::MegamorphicSentinel(isolate()));
1065 
1066   // Convert the entry to a string or null if it isn't a property
1067   // anymore. If the property has been removed while iterating, we
1068   // just skip it.
1069   __ Push(rcx);  // Enumerable.
1070   __ Push(rbx);  // Current entry.
1071   __ CallRuntime(Runtime::kForInFilter);
1072   PrepareForBailoutForId(stmt->FilterId(), BailoutState::TOS_REGISTER);
1073   __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
1074   __ j(equal, loop_statement.continue_label());
1075   __ movp(rbx, rax);
1076 
1077   // Update the 'each' property or variable from the possibly filtered
1078   // entry in register rbx.
1079   __ bind(&update_each);
1080   __ movp(result_register(), rbx);
1081   // Perform the assignment as if via '='.
1082   { EffectContext context(this);
1083     EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
1084     PrepareForBailoutForId(stmt->AssignmentId(), BailoutState::NO_REGISTERS);
1085   }
1086 
1087   // Both Crankshaft and Turbofan expect BodyId to be right before stmt->body().
1088   PrepareForBailoutForId(stmt->BodyId(), BailoutState::NO_REGISTERS);
1089   // Generate code for the body of the loop.
1090   Visit(stmt->body());
1091 
1092   // Generate code for going to the next element by incrementing the
1093   // index (smi) stored on top of the stack.
1094   __ bind(loop_statement.continue_label());
1095   __ SmiAddConstant(Operand(rsp, 0 * kPointerSize), Smi::FromInt(1));
1096 
1097   EmitBackEdgeBookkeeping(stmt, &loop);
1098   __ jmp(&loop);
1099 
1100   // Remove the pointers stored on the stack.
1101   __ bind(loop_statement.break_label());
1102   DropOperands(5);
1103 
1104   // Exit and decrement the loop depth.
1105   PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
1106   __ bind(&exit);
1107   decrement_loop_depth();
1108 }
1109 
1110 
EmitSetHomeObject(Expression * initializer,int offset,FeedbackVectorSlot slot)1111 void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset,
1112                                           FeedbackVectorSlot slot) {
1113   DCHECK(NeedsHomeObject(initializer));
1114   __ movp(StoreDescriptor::ReceiverRegister(), Operand(rsp, 0));
1115   __ Move(StoreDescriptor::NameRegister(),
1116           isolate()->factory()->home_object_symbol());
1117   __ movp(StoreDescriptor::ValueRegister(),
1118           Operand(rsp, offset * kPointerSize));
1119   EmitLoadStoreICSlot(slot);
1120   CallStoreIC();
1121 }
1122 
1123 
EmitSetHomeObjectAccumulator(Expression * initializer,int offset,FeedbackVectorSlot slot)1124 void FullCodeGenerator::EmitSetHomeObjectAccumulator(Expression* initializer,
1125                                                      int offset,
1126                                                      FeedbackVectorSlot slot) {
1127   DCHECK(NeedsHomeObject(initializer));
1128   __ movp(StoreDescriptor::ReceiverRegister(), rax);
1129   __ Move(StoreDescriptor::NameRegister(),
1130           isolate()->factory()->home_object_symbol());
1131   __ movp(StoreDescriptor::ValueRegister(),
1132           Operand(rsp, offset * kPointerSize));
1133   EmitLoadStoreICSlot(slot);
1134   CallStoreIC();
1135 }
1136 
1137 
EmitLoadGlobalCheckExtensions(VariableProxy * proxy,TypeofMode typeof_mode,Label * slow)1138 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1139                                                       TypeofMode typeof_mode,
1140                                                       Label* slow) {
1141   Register context = rsi;
1142   Register temp = rdx;
1143 
1144   Scope* s = scope();
1145   while (s != NULL) {
1146     if (s->num_heap_slots() > 0) {
1147       if (s->calls_sloppy_eval()) {
1148         // Check that extension is "the hole".
1149         __ JumpIfNotRoot(ContextOperand(context, Context::EXTENSION_INDEX),
1150                          Heap::kTheHoleValueRootIndex, slow);
1151       }
1152       // Load next context in chain.
1153       __ movp(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1154       // Walk the rest of the chain without clobbering rsi.
1155       context = temp;
1156     }
1157     // If no outer scope calls eval, we do not need to check more
1158     // context extensions.  If we have reached an eval scope, we check
1159     // all extensions from this point.
1160     if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1161     s = s->outer_scope();
1162   }
1163 
1164   if (s != NULL && s->is_eval_scope()) {
1165     // Loop up the context chain.  There is no frame effect so it is
1166     // safe to use raw labels here.
1167     Label next, fast;
1168     if (!context.is(temp)) {
1169       __ movp(temp, context);
1170     }
1171     // Load map for comparison into register, outside loop.
1172     __ LoadRoot(kScratchRegister, Heap::kNativeContextMapRootIndex);
1173     __ bind(&next);
1174     // Terminate at native context.
1175     __ cmpp(kScratchRegister, FieldOperand(temp, HeapObject::kMapOffset));
1176     __ j(equal, &fast, Label::kNear);
1177     // Check that extension is "the hole".
1178     __ JumpIfNotRoot(ContextOperand(temp, Context::EXTENSION_INDEX),
1179                      Heap::kTheHoleValueRootIndex, slow);
1180     // Load next context in chain.
1181     __ movp(temp, ContextOperand(temp, Context::PREVIOUS_INDEX));
1182     __ jmp(&next);
1183     __ bind(&fast);
1184   }
1185 
1186   // All extension objects were empty and it is safe to use a normal global
1187   // load machinery.
1188   EmitGlobalVariableLoad(proxy, typeof_mode);
1189 }
1190 
1191 
ContextSlotOperandCheckExtensions(Variable * var,Label * slow)1192 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1193                                                                 Label* slow) {
1194   DCHECK(var->IsContextSlot());
1195   Register context = rsi;
1196   Register temp = rbx;
1197 
1198   for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1199     if (s->num_heap_slots() > 0) {
1200       if (s->calls_sloppy_eval()) {
1201         // Check that extension is "the hole".
1202         __ JumpIfNotRoot(ContextOperand(context, Context::EXTENSION_INDEX),
1203                          Heap::kTheHoleValueRootIndex, slow);
1204       }
1205       __ movp(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1206       // Walk the rest of the chain without clobbering rsi.
1207       context = temp;
1208     }
1209   }
1210   // Check that last extension is "the hole".
1211   __ JumpIfNotRoot(ContextOperand(context, Context::EXTENSION_INDEX),
1212                    Heap::kTheHoleValueRootIndex, slow);
1213 
1214   // This function is used only for loads, not stores, so it's safe to
1215   // return an rsi-based operand (the write barrier cannot be allowed to
1216   // destroy the rsi register).
1217   return ContextOperand(context, var->index());
1218 }
1219 
1220 
EmitDynamicLookupFastCase(VariableProxy * proxy,TypeofMode typeof_mode,Label * slow,Label * done)1221 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1222                                                   TypeofMode typeof_mode,
1223                                                   Label* slow, Label* done) {
1224   // Generate fast-case code for variables that might be shadowed by
1225   // eval-introduced variables.  Eval is used a lot without
1226   // introducing variables.  In those cases, we do not want to
1227   // perform a runtime call for all variables in the scope
1228   // containing the eval.
1229   Variable* var = proxy->var();
1230   if (var->mode() == DYNAMIC_GLOBAL) {
1231     EmitLoadGlobalCheckExtensions(proxy, typeof_mode, slow);
1232     __ jmp(done);
1233   } else if (var->mode() == DYNAMIC_LOCAL) {
1234     Variable* local = var->local_if_not_shadowed();
1235     __ movp(rax, ContextSlotOperandCheckExtensions(local, slow));
1236     if (local->mode() == LET || local->mode() == CONST) {
1237       __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
1238       __ j(not_equal, done);
1239       __ Push(var->name());
1240       __ CallRuntime(Runtime::kThrowReferenceError);
1241     }
1242     __ jmp(done);
1243   }
1244 }
1245 
1246 
EmitGlobalVariableLoad(VariableProxy * proxy,TypeofMode typeof_mode)1247 void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
1248                                                TypeofMode typeof_mode) {
1249 #ifdef DEBUG
1250   Variable* var = proxy->var();
1251   DCHECK(var->IsUnallocatedOrGlobalSlot() ||
1252          (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL));
1253 #endif
1254   __ Move(LoadGlobalDescriptor::SlotRegister(),
1255           SmiFromSlot(proxy->VariableFeedbackSlot()));
1256   CallLoadGlobalIC(typeof_mode);
1257 }
1258 
1259 
EmitVariableLoad(VariableProxy * proxy,TypeofMode typeof_mode)1260 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
1261                                          TypeofMode typeof_mode) {
1262   // Record position before possible IC call.
1263   SetExpressionPosition(proxy);
1264   PrepareForBailoutForId(proxy->BeforeId(), BailoutState::NO_REGISTERS);
1265   Variable* var = proxy->var();
1266 
1267   // Three cases: global variables, lookup variables, and all other types of
1268   // variables.
1269   switch (var->location()) {
1270     case VariableLocation::GLOBAL:
1271     case VariableLocation::UNALLOCATED: {
1272       Comment cmnt(masm_, "[ Global variable");
1273       EmitGlobalVariableLoad(proxy, typeof_mode);
1274       context()->Plug(rax);
1275       break;
1276     }
1277 
1278     case VariableLocation::PARAMETER:
1279     case VariableLocation::LOCAL:
1280     case VariableLocation::CONTEXT: {
1281       DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode);
1282       Comment cmnt(masm_, var->IsContextSlot() ? "[ Context slot"
1283                                                : "[ Stack slot");
1284       if (NeedsHoleCheckForLoad(proxy)) {
1285         // Let and const need a read barrier.
1286         Label done;
1287         GetVar(rax, var);
1288         __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
1289         __ j(not_equal, &done, Label::kNear);
1290         if (var->mode() == LET || var->mode() == CONST) {
1291           // Throw a reference error when using an uninitialized let/const
1292           // binding in harmony mode.
1293           __ Push(var->name());
1294           __ CallRuntime(Runtime::kThrowReferenceError);
1295         }
1296         __ bind(&done);
1297         context()->Plug(rax);
1298         break;
1299       }
1300       context()->Plug(var);
1301       break;
1302     }
1303 
1304     case VariableLocation::LOOKUP: {
1305       Comment cmnt(masm_, "[ Lookup slot");
1306       Label done, slow;
1307       // Generate code for loading from variables potentially shadowed
1308       // by eval-introduced variables.
1309       EmitDynamicLookupFastCase(proxy, typeof_mode, &slow, &done);
1310       __ bind(&slow);
1311       __ Push(var->name());
1312       Runtime::FunctionId function_id =
1313           typeof_mode == NOT_INSIDE_TYPEOF
1314               ? Runtime::kLoadLookupSlot
1315               : Runtime::kLoadLookupSlotInsideTypeof;
1316       __ CallRuntime(function_id);
1317       __ bind(&done);
1318       context()->Plug(rax);
1319       break;
1320     }
1321   }
1322 }
1323 
1324 
EmitAccessor(ObjectLiteralProperty * property)1325 void FullCodeGenerator::EmitAccessor(ObjectLiteralProperty* property) {
1326   Expression* expression = (property == NULL) ? NULL : property->value();
1327   if (expression == NULL) {
1328     OperandStackDepthIncrement(1);
1329     __ PushRoot(Heap::kNullValueRootIndex);
1330   } else {
1331     VisitForStackValue(expression);
1332     if (NeedsHomeObject(expression)) {
1333       DCHECK(property->kind() == ObjectLiteral::Property::GETTER ||
1334              property->kind() == ObjectLiteral::Property::SETTER);
1335       int offset = property->kind() == ObjectLiteral::Property::GETTER ? 2 : 3;
1336       EmitSetHomeObject(expression, offset, property->GetSlot());
1337     }
1338   }
1339 }
1340 
1341 
VisitObjectLiteral(ObjectLiteral * expr)1342 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1343   Comment cmnt(masm_, "[ ObjectLiteral");
1344 
1345   Handle<FixedArray> constant_properties = expr->constant_properties();
1346   int flags = expr->ComputeFlags();
1347   if (MustCreateObjectLiteralWithRuntime(expr)) {
1348     __ Push(Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1349     __ Push(Smi::FromInt(expr->literal_index()));
1350     __ Push(constant_properties);
1351     __ Push(Smi::FromInt(flags));
1352     __ CallRuntime(Runtime::kCreateObjectLiteral);
1353   } else {
1354     __ movp(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1355     __ Move(rbx, Smi::FromInt(expr->literal_index()));
1356     __ Move(rcx, constant_properties);
1357     __ Move(rdx, Smi::FromInt(flags));
1358     FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1359     __ CallStub(&stub);
1360     RestoreContext();
1361   }
1362   PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
1363 
1364   // If result_saved is true the result is on top of the stack.  If
1365   // result_saved is false the result is in rax.
1366   bool result_saved = false;
1367 
1368   AccessorTable accessor_table(zone());
1369   int property_index = 0;
1370   for (; property_index < expr->properties()->length(); property_index++) {
1371     ObjectLiteral::Property* property = expr->properties()->at(property_index);
1372     if (property->is_computed_name()) break;
1373     if (property->IsCompileTimeValue()) continue;
1374 
1375     Literal* key = property->key()->AsLiteral();
1376     Expression* value = property->value();
1377     if (!result_saved) {
1378       PushOperand(rax);  // Save result on the stack
1379       result_saved = true;
1380     }
1381     switch (property->kind()) {
1382       case ObjectLiteral::Property::CONSTANT:
1383         UNREACHABLE();
1384       case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1385         DCHECK(!CompileTimeValue::IsCompileTimeValue(value));
1386         // Fall through.
1387       case ObjectLiteral::Property::COMPUTED:
1388         // It is safe to use [[Put]] here because the boilerplate already
1389         // contains computed properties with an uninitialized value.
1390         if (key->value()->IsInternalizedString()) {
1391           if (property->emit_store()) {
1392             VisitForAccumulatorValue(value);
1393             DCHECK(StoreDescriptor::ValueRegister().is(rax));
1394             __ Move(StoreDescriptor::NameRegister(), key->value());
1395             __ movp(StoreDescriptor::ReceiverRegister(), Operand(rsp, 0));
1396             EmitLoadStoreICSlot(property->GetSlot(0));
1397             CallStoreIC();
1398             PrepareForBailoutForId(key->id(), BailoutState::NO_REGISTERS);
1399 
1400             if (NeedsHomeObject(value)) {
1401               EmitSetHomeObjectAccumulator(value, 0, property->GetSlot(1));
1402             }
1403           } else {
1404             VisitForEffect(value);
1405           }
1406           break;
1407         }
1408         PushOperand(Operand(rsp, 0));  // Duplicate receiver.
1409         VisitForStackValue(key);
1410         VisitForStackValue(value);
1411         if (property->emit_store()) {
1412           if (NeedsHomeObject(value)) {
1413             EmitSetHomeObject(value, 2, property->GetSlot());
1414           }
1415           PushOperand(Smi::FromInt(SLOPPY));  // Language mode
1416           CallRuntimeWithOperands(Runtime::kSetProperty);
1417         } else {
1418           DropOperands(3);
1419         }
1420         break;
1421       case ObjectLiteral::Property::PROTOTYPE:
1422         PushOperand(Operand(rsp, 0));  // Duplicate receiver.
1423         VisitForStackValue(value);
1424         DCHECK(property->emit_store());
1425         CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
1426         PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1427                                BailoutState::NO_REGISTERS);
1428         break;
1429       case ObjectLiteral::Property::GETTER:
1430         if (property->emit_store()) {
1431           AccessorTable::Iterator it = accessor_table.lookup(key);
1432           it->second->bailout_id = expr->GetIdForPropertySet(property_index);
1433           it->second->getter = property;
1434         }
1435         break;
1436       case ObjectLiteral::Property::SETTER:
1437         if (property->emit_store()) {
1438           AccessorTable::Iterator it = accessor_table.lookup(key);
1439           it->second->bailout_id = expr->GetIdForPropertySet(property_index);
1440           it->second->setter = property;
1441         }
1442         break;
1443     }
1444   }
1445 
1446   // Emit code to define accessors, using only a single call to the runtime for
1447   // each pair of corresponding getters and setters.
1448   for (AccessorTable::Iterator it = accessor_table.begin();
1449        it != accessor_table.end();
1450        ++it) {
1451     PushOperand(Operand(rsp, 0));  // Duplicate receiver.
1452     VisitForStackValue(it->first);
1453     EmitAccessor(it->second->getter);
1454     EmitAccessor(it->second->setter);
1455     PushOperand(Smi::FromInt(NONE));
1456     CallRuntimeWithOperands(Runtime::kDefineAccessorPropertyUnchecked);
1457     PrepareForBailoutForId(it->second->bailout_id, BailoutState::NO_REGISTERS);
1458   }
1459 
1460   // Object literals have two parts. The "static" part on the left contains no
1461   // computed property names, and so we can compute its map ahead of time; see
1462   // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1463   // starts with the first computed property name, and continues with all
1464   // properties to its right.  All the code from above initializes the static
1465   // component of the object literal, and arranges for the map of the result to
1466   // reflect the static order in which the keys appear. For the dynamic
1467   // properties, we compile them into a series of "SetOwnProperty" runtime
1468   // calls. This will preserve insertion order.
1469   for (; property_index < expr->properties()->length(); property_index++) {
1470     ObjectLiteral::Property* property = expr->properties()->at(property_index);
1471 
1472     Expression* value = property->value();
1473     if (!result_saved) {
1474       PushOperand(rax);  // Save result on the stack
1475       result_saved = true;
1476     }
1477 
1478     PushOperand(Operand(rsp, 0));  // Duplicate receiver.
1479 
1480     if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1481       DCHECK(!property->is_computed_name());
1482       VisitForStackValue(value);
1483       DCHECK(property->emit_store());
1484       CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
1485       PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1486                              BailoutState::NO_REGISTERS);
1487     } else {
1488       EmitPropertyKey(property, expr->GetIdForPropertyName(property_index));
1489       VisitForStackValue(value);
1490       if (NeedsHomeObject(value)) {
1491         EmitSetHomeObject(value, 2, property->GetSlot());
1492       }
1493 
1494       switch (property->kind()) {
1495         case ObjectLiteral::Property::CONSTANT:
1496         case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1497         case ObjectLiteral::Property::COMPUTED:
1498           if (property->emit_store()) {
1499             PushOperand(Smi::FromInt(NONE));
1500             PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
1501             CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
1502             PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1503                                    BailoutState::NO_REGISTERS);
1504           } else {
1505             DropOperands(3);
1506           }
1507           break;
1508 
1509         case ObjectLiteral::Property::PROTOTYPE:
1510           UNREACHABLE();
1511           break;
1512 
1513         case ObjectLiteral::Property::GETTER:
1514           PushOperand(Smi::FromInt(NONE));
1515           CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
1516           break;
1517 
1518         case ObjectLiteral::Property::SETTER:
1519           PushOperand(Smi::FromInt(NONE));
1520           CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
1521           break;
1522       }
1523     }
1524   }
1525 
1526   if (result_saved) {
1527     context()->PlugTOS();
1528   } else {
1529     context()->Plug(rax);
1530   }
1531 }
1532 
1533 
VisitArrayLiteral(ArrayLiteral * expr)1534 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1535   Comment cmnt(masm_, "[ ArrayLiteral");
1536 
1537   Handle<FixedArray> constant_elements = expr->constant_elements();
1538   bool has_constant_fast_elements =
1539       IsFastObjectElementsKind(expr->constant_elements_kind());
1540 
1541   AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1542   if (has_constant_fast_elements && !FLAG_allocation_site_pretenuring) {
1543     // If the only customer of allocation sites is transitioning, then
1544     // we can turn it off if we don't have anywhere else to transition to.
1545     allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1546   }
1547 
1548   if (MustCreateArrayLiteralWithRuntime(expr)) {
1549     __ Push(Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1550     __ Push(Smi::FromInt(expr->literal_index()));
1551     __ Push(constant_elements);
1552     __ Push(Smi::FromInt(expr->ComputeFlags()));
1553     __ CallRuntime(Runtime::kCreateArrayLiteral);
1554   } else {
1555     __ movp(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1556     __ Move(rbx, Smi::FromInt(expr->literal_index()));
1557     __ Move(rcx, constant_elements);
1558     FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1559     __ CallStub(&stub);
1560   }
1561   PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
1562 
1563   bool result_saved = false;  // Is the result saved to the stack?
1564   ZoneList<Expression*>* subexprs = expr->values();
1565   int length = subexprs->length();
1566 
1567   // Emit code to evaluate all the non-constant subexpressions and to store
1568   // them into the newly cloned array.
1569   int array_index = 0;
1570   for (; array_index < length; array_index++) {
1571     Expression* subexpr = subexprs->at(array_index);
1572     DCHECK(!subexpr->IsSpread());
1573 
1574     // If the subexpression is a literal or a simple materialized literal it
1575     // is already set in the cloned array.
1576     if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1577 
1578     if (!result_saved) {
1579       PushOperand(rax);  // array literal
1580       result_saved = true;
1581     }
1582     VisitForAccumulatorValue(subexpr);
1583 
1584     __ Move(StoreDescriptor::NameRegister(), Smi::FromInt(array_index));
1585     __ movp(StoreDescriptor::ReceiverRegister(), Operand(rsp, 0));
1586     EmitLoadStoreICSlot(expr->LiteralFeedbackSlot());
1587     Handle<Code> ic =
1588         CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
1589     CallIC(ic);
1590 
1591     PrepareForBailoutForId(expr->GetIdForElement(array_index),
1592                            BailoutState::NO_REGISTERS);
1593   }
1594 
1595   // In case the array literal contains spread expressions it has two parts. The
1596   // first part is  the "static" array which has a literal index is  handled
1597   // above. The second part is the part after the first spread expression
1598   // (inclusive) and these elements gets appended to the array. Note that the
1599   // number elements an iterable produces is unknown ahead of time.
1600   if (array_index < length && result_saved) {
1601     PopOperand(rax);
1602     result_saved = false;
1603   }
1604   for (; array_index < length; array_index++) {
1605     Expression* subexpr = subexprs->at(array_index);
1606 
1607     PushOperand(rax);
1608     DCHECK(!subexpr->IsSpread());
1609     VisitForStackValue(subexpr);
1610     CallRuntimeWithOperands(Runtime::kAppendElement);
1611 
1612     PrepareForBailoutForId(expr->GetIdForElement(array_index),
1613                            BailoutState::NO_REGISTERS);
1614   }
1615 
1616   if (result_saved) {
1617     context()->PlugTOS();
1618   } else {
1619     context()->Plug(rax);
1620   }
1621 }
1622 
1623 
VisitAssignment(Assignment * expr)1624 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1625   DCHECK(expr->target()->IsValidReferenceExpressionOrThis());
1626 
1627   Comment cmnt(masm_, "[ Assignment");
1628 
1629   Property* property = expr->target()->AsProperty();
1630   LhsKind assign_type = Property::GetAssignType(property);
1631 
1632   // Evaluate LHS expression.
1633   switch (assign_type) {
1634     case VARIABLE:
1635       // Nothing to do here.
1636       break;
1637     case NAMED_PROPERTY:
1638       if (expr->is_compound()) {
1639         // We need the receiver both on the stack and in the register.
1640         VisitForStackValue(property->obj());
1641         __ movp(LoadDescriptor::ReceiverRegister(), Operand(rsp, 0));
1642       } else {
1643         VisitForStackValue(property->obj());
1644       }
1645       break;
1646     case NAMED_SUPER_PROPERTY:
1647       VisitForStackValue(
1648           property->obj()->AsSuperPropertyReference()->this_var());
1649       VisitForAccumulatorValue(
1650           property->obj()->AsSuperPropertyReference()->home_object());
1651       PushOperand(result_register());
1652       if (expr->is_compound()) {
1653         PushOperand(MemOperand(rsp, kPointerSize));
1654         PushOperand(result_register());
1655       }
1656       break;
1657     case KEYED_SUPER_PROPERTY:
1658       VisitForStackValue(
1659           property->obj()->AsSuperPropertyReference()->this_var());
1660       VisitForStackValue(
1661           property->obj()->AsSuperPropertyReference()->home_object());
1662       VisitForAccumulatorValue(property->key());
1663       PushOperand(result_register());
1664       if (expr->is_compound()) {
1665         PushOperand(MemOperand(rsp, 2 * kPointerSize));
1666         PushOperand(MemOperand(rsp, 2 * kPointerSize));
1667         PushOperand(result_register());
1668       }
1669       break;
1670     case KEYED_PROPERTY: {
1671       if (expr->is_compound()) {
1672         VisitForStackValue(property->obj());
1673         VisitForStackValue(property->key());
1674         __ movp(LoadDescriptor::ReceiverRegister(), Operand(rsp, kPointerSize));
1675         __ movp(LoadDescriptor::NameRegister(), Operand(rsp, 0));
1676       } else {
1677         VisitForStackValue(property->obj());
1678         VisitForStackValue(property->key());
1679       }
1680       break;
1681     }
1682   }
1683 
1684   // For compound assignments we need another deoptimization point after the
1685   // variable/property load.
1686   if (expr->is_compound()) {
1687     { AccumulatorValueContext context(this);
1688       switch (assign_type) {
1689         case VARIABLE:
1690           EmitVariableLoad(expr->target()->AsVariableProxy());
1691           PrepareForBailout(expr->target(), BailoutState::TOS_REGISTER);
1692           break;
1693         case NAMED_PROPERTY:
1694           EmitNamedPropertyLoad(property);
1695           PrepareForBailoutForId(property->LoadId(),
1696                                  BailoutState::TOS_REGISTER);
1697           break;
1698         case NAMED_SUPER_PROPERTY:
1699           EmitNamedSuperPropertyLoad(property);
1700           PrepareForBailoutForId(property->LoadId(),
1701                                  BailoutState::TOS_REGISTER);
1702           break;
1703         case KEYED_SUPER_PROPERTY:
1704           EmitKeyedSuperPropertyLoad(property);
1705           PrepareForBailoutForId(property->LoadId(),
1706                                  BailoutState::TOS_REGISTER);
1707           break;
1708         case KEYED_PROPERTY:
1709           EmitKeyedPropertyLoad(property);
1710           PrepareForBailoutForId(property->LoadId(),
1711                                  BailoutState::TOS_REGISTER);
1712           break;
1713       }
1714     }
1715 
1716     Token::Value op = expr->binary_op();
1717     PushOperand(rax);  // Left operand goes on the stack.
1718     VisitForAccumulatorValue(expr->value());
1719 
1720     AccumulatorValueContext context(this);
1721     if (ShouldInlineSmiCase(op)) {
1722       EmitInlineSmiBinaryOp(expr->binary_operation(),
1723                             op,
1724                             expr->target(),
1725                             expr->value());
1726     } else {
1727       EmitBinaryOp(expr->binary_operation(), op);
1728     }
1729     // Deoptimization point in case the binary operation may have side effects.
1730     PrepareForBailout(expr->binary_operation(), BailoutState::TOS_REGISTER);
1731   } else {
1732     VisitForAccumulatorValue(expr->value());
1733   }
1734 
1735   SetExpressionPosition(expr);
1736 
1737   // Store the value.
1738   switch (assign_type) {
1739     case VARIABLE:
1740       EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1741                              expr->op(), expr->AssignmentSlot());
1742       PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
1743       context()->Plug(rax);
1744       break;
1745     case NAMED_PROPERTY:
1746       EmitNamedPropertyAssignment(expr);
1747       break;
1748     case NAMED_SUPER_PROPERTY:
1749       EmitNamedSuperPropertyStore(property);
1750       context()->Plug(rax);
1751       break;
1752     case KEYED_SUPER_PROPERTY:
1753       EmitKeyedSuperPropertyStore(property);
1754       context()->Plug(rax);
1755       break;
1756     case KEYED_PROPERTY:
1757       EmitKeyedPropertyAssignment(expr);
1758       break;
1759   }
1760 }
1761 
1762 
VisitYield(Yield * expr)1763 void FullCodeGenerator::VisitYield(Yield* expr) {
1764   Comment cmnt(masm_, "[ Yield");
1765   SetExpressionPosition(expr);
1766 
1767   // Evaluate yielded value first; the initial iterator definition depends on
1768   // this.  It stays on the stack while we update the iterator.
1769   VisitForStackValue(expr->expression());
1770 
1771   Label suspend, continuation, post_runtime, resume, exception;
1772 
1773   __ jmp(&suspend);
1774   __ bind(&continuation);
1775   // When we arrive here, rax holds the generator object.
1776   __ RecordGeneratorContinuation();
1777   __ movp(rbx, FieldOperand(rax, JSGeneratorObject::kResumeModeOffset));
1778   __ movp(rax, FieldOperand(rax, JSGeneratorObject::kInputOrDebugPosOffset));
1779   STATIC_ASSERT(JSGeneratorObject::kNext < JSGeneratorObject::kReturn);
1780   STATIC_ASSERT(JSGeneratorObject::kThrow > JSGeneratorObject::kReturn);
1781   __ SmiCompare(rbx, Smi::FromInt(JSGeneratorObject::kReturn));
1782   __ j(less, &resume);
1783   __ Push(result_register());
1784   __ j(greater, &exception);
1785   EmitCreateIteratorResult(true);
1786   EmitUnwindAndReturn();
1787 
1788   __ bind(&exception);
1789   __ CallRuntime(Runtime::kThrow);
1790 
1791   __ bind(&suspend);
1792   OperandStackDepthIncrement(1);  // Not popped on this path.
1793   VisitForAccumulatorValue(expr->generator_object());
1794   DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
1795   __ Move(FieldOperand(rax, JSGeneratorObject::kContinuationOffset),
1796           Smi::FromInt(continuation.pos()));
1797   __ movp(FieldOperand(rax, JSGeneratorObject::kContextOffset), rsi);
1798   __ movp(rcx, rsi);
1799   __ RecordWriteField(rax, JSGeneratorObject::kContextOffset, rcx, rdx,
1800                       kDontSaveFPRegs);
1801   __ leap(rbx, Operand(rbp, StandardFrameConstants::kExpressionsOffset));
1802   __ cmpp(rsp, rbx);
1803   __ j(equal, &post_runtime);
1804   __ Push(rax);  // generator object
1805   __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
1806   RestoreContext();
1807   __ bind(&post_runtime);
1808 
1809   PopOperand(result_register());
1810   EmitReturnSequence();
1811 
1812   __ bind(&resume);
1813   context()->Plug(result_register());
1814 }
1815 
PushOperand(MemOperand operand)1816 void FullCodeGenerator::PushOperand(MemOperand operand) {
1817   OperandStackDepthIncrement(1);
1818   __ Push(operand);
1819 }
1820 
EmitOperandStackDepthCheck()1821 void FullCodeGenerator::EmitOperandStackDepthCheck() {
1822   if (FLAG_debug_code) {
1823     int expected_diff = StandardFrameConstants::kFixedFrameSizeFromFp +
1824                         operand_stack_depth_ * kPointerSize;
1825     __ movp(rax, rbp);
1826     __ subp(rax, rsp);
1827     __ cmpp(rax, Immediate(expected_diff));
1828     __ Assert(equal, kUnexpectedStackDepth);
1829   }
1830 }
1831 
EmitCreateIteratorResult(bool done)1832 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
1833   Label allocate, done_allocate;
1834 
1835   __ Allocate(JSIteratorResult::kSize, rax, rcx, rdx, &allocate,
1836               NO_ALLOCATION_FLAGS);
1837   __ jmp(&done_allocate, Label::kNear);
1838 
1839   __ bind(&allocate);
1840   __ Push(Smi::FromInt(JSIteratorResult::kSize));
1841   __ CallRuntime(Runtime::kAllocateInNewSpace);
1842 
1843   __ bind(&done_allocate);
1844   __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, rbx);
1845   __ movp(FieldOperand(rax, HeapObject::kMapOffset), rbx);
1846   __ LoadRoot(rbx, Heap::kEmptyFixedArrayRootIndex);
1847   __ movp(FieldOperand(rax, JSObject::kPropertiesOffset), rbx);
1848   __ movp(FieldOperand(rax, JSObject::kElementsOffset), rbx);
1849   __ Pop(FieldOperand(rax, JSIteratorResult::kValueOffset));
1850   __ LoadRoot(FieldOperand(rax, JSIteratorResult::kDoneOffset),
1851               done ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex);
1852   STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
1853   OperandStackDepthDecrement(1);
1854 }
1855 
1856 
EmitInlineSmiBinaryOp(BinaryOperation * expr,Token::Value op,Expression * left,Expression * right)1857 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
1858                                               Token::Value op,
1859                                               Expression* left,
1860                                               Expression* right) {
1861   // Do combined smi check of the operands. Left operand is on the
1862   // stack (popped into rdx). Right operand is in rax but moved into
1863   // rcx to make the shifts easier.
1864   Label done, stub_call, smi_case;
1865   PopOperand(rdx);
1866   __ movp(rcx, rax);
1867   __ orp(rax, rdx);
1868   JumpPatchSite patch_site(masm_);
1869   patch_site.EmitJumpIfSmi(rax, &smi_case, Label::kNear);
1870 
1871   __ bind(&stub_call);
1872   __ movp(rax, rcx);
1873   Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
1874   CallIC(code, expr->BinaryOperationFeedbackId());
1875   patch_site.EmitPatchInfo();
1876   __ jmp(&done, Label::kNear);
1877 
1878   __ bind(&smi_case);
1879   switch (op) {
1880     case Token::SAR:
1881       __ SmiShiftArithmeticRight(rax, rdx, rcx);
1882       break;
1883     case Token::SHL:
1884       __ SmiShiftLeft(rax, rdx, rcx, &stub_call);
1885       break;
1886     case Token::SHR:
1887       __ SmiShiftLogicalRight(rax, rdx, rcx, &stub_call);
1888       break;
1889     case Token::ADD:
1890       __ SmiAdd(rax, rdx, rcx, &stub_call);
1891       break;
1892     case Token::SUB:
1893       __ SmiSub(rax, rdx, rcx, &stub_call);
1894       break;
1895     case Token::MUL:
1896       __ SmiMul(rax, rdx, rcx, &stub_call);
1897       break;
1898     case Token::BIT_OR:
1899       __ SmiOr(rax, rdx, rcx);
1900       break;
1901     case Token::BIT_AND:
1902       __ SmiAnd(rax, rdx, rcx);
1903       break;
1904     case Token::BIT_XOR:
1905       __ SmiXor(rax, rdx, rcx);
1906       break;
1907     default:
1908       UNREACHABLE();
1909       break;
1910   }
1911 
1912   __ bind(&done);
1913   context()->Plug(rax);
1914 }
1915 
1916 
EmitClassDefineProperties(ClassLiteral * lit)1917 void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) {
1918   for (int i = 0; i < lit->properties()->length(); i++) {
1919     ObjectLiteral::Property* property = lit->properties()->at(i);
1920     Expression* value = property->value();
1921 
1922     if (property->is_static()) {
1923       PushOperand(Operand(rsp, kPointerSize));  // constructor
1924     } else {
1925       PushOperand(Operand(rsp, 0));  // prototype
1926     }
1927     EmitPropertyKey(property, lit->GetIdForProperty(i));
1928 
1929     // The static prototype property is read only. We handle the non computed
1930     // property name case in the parser. Since this is the only case where we
1931     // need to check for an own read only property we special case this so we do
1932     // not need to do this for every property.
1933     if (property->is_static() && property->is_computed_name()) {
1934       __ CallRuntime(Runtime::kThrowIfStaticPrototype);
1935       __ Push(rax);
1936     }
1937 
1938     VisitForStackValue(value);
1939     if (NeedsHomeObject(value)) {
1940       EmitSetHomeObject(value, 2, property->GetSlot());
1941     }
1942 
1943     switch (property->kind()) {
1944       case ObjectLiteral::Property::CONSTANT:
1945       case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1946       case ObjectLiteral::Property::PROTOTYPE:
1947         UNREACHABLE();
1948       case ObjectLiteral::Property::COMPUTED:
1949         PushOperand(Smi::FromInt(DONT_ENUM));
1950         PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
1951         CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
1952         break;
1953 
1954       case ObjectLiteral::Property::GETTER:
1955         PushOperand(Smi::FromInt(DONT_ENUM));
1956         CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
1957         break;
1958 
1959       case ObjectLiteral::Property::SETTER:
1960         PushOperand(Smi::FromInt(DONT_ENUM));
1961         CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
1962         break;
1963 
1964       default:
1965         UNREACHABLE();
1966     }
1967   }
1968 }
1969 
1970 
EmitBinaryOp(BinaryOperation * expr,Token::Value op)1971 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
1972   PopOperand(rdx);
1973   Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
1974   JumpPatchSite patch_site(masm_);    // unbound, signals no inlined smi code.
1975   CallIC(code, expr->BinaryOperationFeedbackId());
1976   patch_site.EmitPatchInfo();
1977   context()->Plug(rax);
1978 }
1979 
1980 
EmitAssignment(Expression * expr,FeedbackVectorSlot slot)1981 void FullCodeGenerator::EmitAssignment(Expression* expr,
1982                                        FeedbackVectorSlot slot) {
1983   DCHECK(expr->IsValidReferenceExpressionOrThis());
1984 
1985   Property* prop = expr->AsProperty();
1986   LhsKind assign_type = Property::GetAssignType(prop);
1987 
1988   switch (assign_type) {
1989     case VARIABLE: {
1990       Variable* var = expr->AsVariableProxy()->var();
1991       EffectContext context(this);
1992       EmitVariableAssignment(var, Token::ASSIGN, slot);
1993       break;
1994     }
1995     case NAMED_PROPERTY: {
1996       PushOperand(rax);  // Preserve value.
1997       VisitForAccumulatorValue(prop->obj());
1998       __ Move(StoreDescriptor::ReceiverRegister(), rax);
1999       PopOperand(StoreDescriptor::ValueRegister());  // Restore value.
2000       __ Move(StoreDescriptor::NameRegister(),
2001               prop->key()->AsLiteral()->value());
2002       EmitLoadStoreICSlot(slot);
2003       CallStoreIC();
2004       break;
2005     }
2006     case NAMED_SUPER_PROPERTY: {
2007       PushOperand(rax);
2008       VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2009       VisitForAccumulatorValue(
2010           prop->obj()->AsSuperPropertyReference()->home_object());
2011       // stack: value, this; rax: home_object
2012       Register scratch = rcx;
2013       Register scratch2 = rdx;
2014       __ Move(scratch, result_register());               // home_object
2015       __ movp(rax, MemOperand(rsp, kPointerSize));       // value
2016       __ movp(scratch2, MemOperand(rsp, 0));             // this
2017       __ movp(MemOperand(rsp, kPointerSize), scratch2);  // this
2018       __ movp(MemOperand(rsp, 0), scratch);              // home_object
2019       // stack: this, home_object; rax: value
2020       EmitNamedSuperPropertyStore(prop);
2021       break;
2022     }
2023     case KEYED_SUPER_PROPERTY: {
2024       PushOperand(rax);
2025       VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2026       VisitForStackValue(
2027           prop->obj()->AsSuperPropertyReference()->home_object());
2028       VisitForAccumulatorValue(prop->key());
2029       Register scratch = rcx;
2030       Register scratch2 = rdx;
2031       __ movp(scratch2, MemOperand(rsp, 2 * kPointerSize));  // value
2032       // stack: value, this, home_object; rax: key, rdx: value
2033       __ movp(scratch, MemOperand(rsp, kPointerSize));  // this
2034       __ movp(MemOperand(rsp, 2 * kPointerSize), scratch);
2035       __ movp(scratch, MemOperand(rsp, 0));  // home_object
2036       __ movp(MemOperand(rsp, kPointerSize), scratch);
2037       __ movp(MemOperand(rsp, 0), rax);
2038       __ Move(rax, scratch2);
2039       // stack: this, home_object, key; rax: value.
2040       EmitKeyedSuperPropertyStore(prop);
2041       break;
2042     }
2043     case KEYED_PROPERTY: {
2044       PushOperand(rax);  // Preserve value.
2045       VisitForStackValue(prop->obj());
2046       VisitForAccumulatorValue(prop->key());
2047       __ Move(StoreDescriptor::NameRegister(), rax);
2048       PopOperand(StoreDescriptor::ReceiverRegister());
2049       PopOperand(StoreDescriptor::ValueRegister());  // Restore value.
2050       EmitLoadStoreICSlot(slot);
2051       Handle<Code> ic =
2052           CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2053       CallIC(ic);
2054       break;
2055     }
2056   }
2057   context()->Plug(rax);
2058 }
2059 
2060 
EmitStoreToStackLocalOrContextSlot(Variable * var,MemOperand location)2061 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2062     Variable* var, MemOperand location) {
2063   __ movp(location, rax);
2064   if (var->IsContextSlot()) {
2065     __ movp(rdx, rax);
2066     __ RecordWriteContextSlot(
2067         rcx, Context::SlotOffset(var->index()), rdx, rbx, kDontSaveFPRegs);
2068   }
2069 }
2070 
2071 
EmitVariableAssignment(Variable * var,Token::Value op,FeedbackVectorSlot slot)2072 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
2073                                                FeedbackVectorSlot slot) {
2074   if (var->IsUnallocated()) {
2075     // Global var, const, or let.
2076     __ Move(StoreDescriptor::NameRegister(), var->name());
2077     __ LoadGlobalObject(StoreDescriptor::ReceiverRegister());
2078     EmitLoadStoreICSlot(slot);
2079     CallStoreIC();
2080 
2081   } else if (var->mode() == LET && op != Token::INIT) {
2082     // Non-initializing assignment to let variable needs a write barrier.
2083     DCHECK(!var->IsLookupSlot());
2084     DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2085     Label assign;
2086     MemOperand location = VarOperand(var, rcx);
2087     __ movp(rdx, location);
2088     __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
2089     __ j(not_equal, &assign, Label::kNear);
2090     __ Push(var->name());
2091     __ CallRuntime(Runtime::kThrowReferenceError);
2092     __ bind(&assign);
2093     EmitStoreToStackLocalOrContextSlot(var, location);
2094 
2095   } else if (var->mode() == CONST && op != Token::INIT) {
2096     // Assignment to const variable needs a write barrier.
2097     DCHECK(!var->IsLookupSlot());
2098     DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2099     Label const_error;
2100     MemOperand location = VarOperand(var, rcx);
2101     __ movp(rdx, location);
2102     __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
2103     __ j(not_equal, &const_error, Label::kNear);
2104     __ Push(var->name());
2105     __ CallRuntime(Runtime::kThrowReferenceError);
2106     __ bind(&const_error);
2107     __ CallRuntime(Runtime::kThrowConstAssignError);
2108 
2109   } else if (var->is_this() && var->mode() == CONST && op == Token::INIT) {
2110     // Initializing assignment to const {this} needs a write barrier.
2111     DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2112     Label uninitialized_this;
2113     MemOperand location = VarOperand(var, rcx);
2114     __ movp(rdx, location);
2115     __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
2116     __ j(equal, &uninitialized_this);
2117     __ Push(var->name());
2118     __ CallRuntime(Runtime::kThrowReferenceError);
2119     __ bind(&uninitialized_this);
2120     EmitStoreToStackLocalOrContextSlot(var, location);
2121 
2122   } else if (!var->is_const_mode() || op == Token::INIT) {
2123     if (var->IsLookupSlot()) {
2124       // Assignment to var.
2125       __ Push(var->name());
2126       __ Push(rax);
2127       __ CallRuntime(is_strict(language_mode())
2128                          ? Runtime::kStoreLookupSlot_Strict
2129                          : Runtime::kStoreLookupSlot_Sloppy);
2130     } else {
2131       // Assignment to var or initializing assignment to let/const in harmony
2132       // mode.
2133       DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2134       MemOperand location = VarOperand(var, rcx);
2135       if (FLAG_debug_code && var->mode() == LET && op == Token::INIT) {
2136         // Check for an uninitialized let binding.
2137         __ movp(rdx, location);
2138         __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
2139         __ Check(equal, kLetBindingReInitialization);
2140       }
2141       EmitStoreToStackLocalOrContextSlot(var, location);
2142     }
2143 
2144   } else {
2145     DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT);
2146     if (is_strict(language_mode())) {
2147       __ CallRuntime(Runtime::kThrowConstAssignError);
2148     }
2149     // Silently ignore store in sloppy mode.
2150   }
2151 }
2152 
2153 
EmitNamedPropertyAssignment(Assignment * expr)2154 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2155   // Assignment to a property, using a named store IC.
2156   Property* prop = expr->target()->AsProperty();
2157   DCHECK(prop != NULL);
2158   DCHECK(prop->key()->IsLiteral());
2159 
2160   __ Move(StoreDescriptor::NameRegister(), prop->key()->AsLiteral()->value());
2161   PopOperand(StoreDescriptor::ReceiverRegister());
2162   EmitLoadStoreICSlot(expr->AssignmentSlot());
2163   CallStoreIC();
2164 
2165   PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
2166   context()->Plug(rax);
2167 }
2168 
2169 
EmitNamedSuperPropertyStore(Property * prop)2170 void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2171   // Assignment to named property of super.
2172   // rax : value
2173   // stack : receiver ('this'), home_object
2174   DCHECK(prop != NULL);
2175   Literal* key = prop->key()->AsLiteral();
2176   DCHECK(key != NULL);
2177 
2178   PushOperand(key->value());
2179   PushOperand(rax);
2180   CallRuntimeWithOperands(is_strict(language_mode())
2181                               ? Runtime::kStoreToSuper_Strict
2182                               : Runtime::kStoreToSuper_Sloppy);
2183 }
2184 
2185 
EmitKeyedSuperPropertyStore(Property * prop)2186 void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2187   // Assignment to named property of super.
2188   // rax : value
2189   // stack : receiver ('this'), home_object, key
2190   DCHECK(prop != NULL);
2191 
2192   PushOperand(rax);
2193   CallRuntimeWithOperands(is_strict(language_mode())
2194                               ? Runtime::kStoreKeyedToSuper_Strict
2195                               : Runtime::kStoreKeyedToSuper_Sloppy);
2196 }
2197 
2198 
EmitKeyedPropertyAssignment(Assignment * expr)2199 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2200   // Assignment to a property, using a keyed store IC.
2201   PopOperand(StoreDescriptor::NameRegister());  // Key.
2202   PopOperand(StoreDescriptor::ReceiverRegister());
2203   DCHECK(StoreDescriptor::ValueRegister().is(rax));
2204   Handle<Code> ic =
2205       CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2206   EmitLoadStoreICSlot(expr->AssignmentSlot());
2207   CallIC(ic);
2208 
2209   PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
2210   context()->Plug(rax);
2211 }
2212 
2213 
CallIC(Handle<Code> code,TypeFeedbackId ast_id)2214 void FullCodeGenerator::CallIC(Handle<Code> code,
2215                                TypeFeedbackId ast_id) {
2216   ic_total_count_++;
2217   __ call(code, RelocInfo::CODE_TARGET, ast_id);
2218 }
2219 
2220 
2221 // Code common for calls using the IC.
EmitCallWithLoadIC(Call * expr)2222 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2223   Expression* callee = expr->expression();
2224 
2225   // Get the target function.
2226   ConvertReceiverMode convert_mode;
2227   if (callee->IsVariableProxy()) {
2228     { StackValueContext context(this);
2229       EmitVariableLoad(callee->AsVariableProxy());
2230       PrepareForBailout(callee, BailoutState::NO_REGISTERS);
2231     }
2232     // Push undefined as receiver. This is patched in the Call builtin if it
2233     // is a sloppy mode method.
2234     PushOperand(isolate()->factory()->undefined_value());
2235     convert_mode = ConvertReceiverMode::kNullOrUndefined;
2236   } else {
2237     // Load the function from the receiver.
2238     DCHECK(callee->IsProperty());
2239     DCHECK(!callee->AsProperty()->IsSuperAccess());
2240     __ movp(LoadDescriptor::ReceiverRegister(), Operand(rsp, 0));
2241     EmitNamedPropertyLoad(callee->AsProperty());
2242     PrepareForBailoutForId(callee->AsProperty()->LoadId(),
2243                            BailoutState::TOS_REGISTER);
2244     // Push the target function under the receiver.
2245     PushOperand(Operand(rsp, 0));
2246     __ movp(Operand(rsp, kPointerSize), rax);
2247     convert_mode = ConvertReceiverMode::kNotNullOrUndefined;
2248   }
2249 
2250   EmitCall(expr, convert_mode);
2251 }
2252 
2253 
EmitSuperCallWithLoadIC(Call * expr)2254 void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2255   Expression* callee = expr->expression();
2256   DCHECK(callee->IsProperty());
2257   Property* prop = callee->AsProperty();
2258   DCHECK(prop->IsSuperAccess());
2259   SetExpressionPosition(prop);
2260 
2261   Literal* key = prop->key()->AsLiteral();
2262   DCHECK(!key->value()->IsSmi());
2263   // Load the function from the receiver.
2264   SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2265   VisitForStackValue(super_ref->home_object());
2266   VisitForAccumulatorValue(super_ref->this_var());
2267   PushOperand(rax);
2268   PushOperand(rax);
2269   PushOperand(Operand(rsp, kPointerSize * 2));
2270   PushOperand(key->value());
2271 
2272   // Stack here:
2273   //  - home_object
2274   //  - this (receiver)
2275   //  - this (receiver) <-- LoadFromSuper will pop here and below.
2276   //  - home_object
2277   //  - key
2278   CallRuntimeWithOperands(Runtime::kLoadFromSuper);
2279   PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
2280 
2281   // Replace home_object with target function.
2282   __ movp(Operand(rsp, kPointerSize), rax);
2283 
2284   // Stack here:
2285   // - target function
2286   // - this (receiver)
2287   EmitCall(expr);
2288 }
2289 
2290 
2291 // Common code for calls using the IC.
EmitKeyedCallWithLoadIC(Call * expr,Expression * key)2292 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2293                                                 Expression* key) {
2294   // Load the key.
2295   VisitForAccumulatorValue(key);
2296 
2297   Expression* callee = expr->expression();
2298 
2299   // Load the function from the receiver.
2300   DCHECK(callee->IsProperty());
2301   __ movp(LoadDescriptor::ReceiverRegister(), Operand(rsp, 0));
2302   __ Move(LoadDescriptor::NameRegister(), rax);
2303   EmitKeyedPropertyLoad(callee->AsProperty());
2304   PrepareForBailoutForId(callee->AsProperty()->LoadId(),
2305                          BailoutState::TOS_REGISTER);
2306 
2307   // Push the target function under the receiver.
2308   PushOperand(Operand(rsp, 0));
2309   __ movp(Operand(rsp, kPointerSize), rax);
2310 
2311   EmitCall(expr, ConvertReceiverMode::kNotNullOrUndefined);
2312 }
2313 
2314 
EmitKeyedSuperCallWithLoadIC(Call * expr)2315 void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
2316   Expression* callee = expr->expression();
2317   DCHECK(callee->IsProperty());
2318   Property* prop = callee->AsProperty();
2319   DCHECK(prop->IsSuperAccess());
2320 
2321   SetExpressionPosition(prop);
2322   // Load the function from the receiver.
2323   SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2324   VisitForStackValue(super_ref->home_object());
2325   VisitForAccumulatorValue(super_ref->this_var());
2326   PushOperand(rax);
2327   PushOperand(rax);
2328   PushOperand(Operand(rsp, kPointerSize * 2));
2329   VisitForStackValue(prop->key());
2330 
2331   // Stack here:
2332   //  - home_object
2333   //  - this (receiver)
2334   //  - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
2335   //  - home_object
2336   //  - key
2337   CallRuntimeWithOperands(Runtime::kLoadKeyedFromSuper);
2338   PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
2339 
2340   // Replace home_object with target function.
2341   __ movp(Operand(rsp, kPointerSize), rax);
2342 
2343   // Stack here:
2344   // - target function
2345   // - this (receiver)
2346   EmitCall(expr);
2347 }
2348 
2349 
EmitCall(Call * expr,ConvertReceiverMode mode)2350 void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) {
2351   // Load the arguments.
2352   ZoneList<Expression*>* args = expr->arguments();
2353   int arg_count = args->length();
2354   for (int i = 0; i < arg_count; i++) {
2355     VisitForStackValue(args->at(i));
2356   }
2357 
2358   PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
2359   SetCallPosition(expr, expr->tail_call_mode());
2360   if (expr->tail_call_mode() == TailCallMode::kAllow) {
2361     if (FLAG_trace) {
2362       __ CallRuntime(Runtime::kTraceTailCall);
2363     }
2364     // Update profiling counters before the tail call since we will
2365     // not return to this function.
2366     EmitProfilingCounterHandlingForReturnSequence(true);
2367   }
2368   Handle<Code> ic =
2369       CodeFactory::CallIC(isolate(), arg_count, mode, expr->tail_call_mode())
2370           .code();
2371   __ Move(rdx, SmiFromSlot(expr->CallFeedbackICSlot()));
2372   __ movp(rdi, Operand(rsp, (arg_count + 1) * kPointerSize));
2373   // Don't assign a type feedback id to the IC, since type feedback is provided
2374   // by the vector above.
2375   CallIC(ic);
2376   OperandStackDepthDecrement(arg_count + 1);
2377 
2378   RecordJSReturnSite(expr);
2379   RestoreContext();
2380   // Discard the function left on TOS.
2381   context()->DropAndPlug(1, rax);
2382 }
2383 
EmitResolvePossiblyDirectEval(Call * expr)2384 void FullCodeGenerator::EmitResolvePossiblyDirectEval(Call* expr) {
2385   int arg_count = expr->arguments()->length();
2386   // Push copy of the first argument or undefined if it doesn't exist.
2387   if (arg_count > 0) {
2388     __ Push(Operand(rsp, arg_count * kPointerSize));
2389   } else {
2390     __ PushRoot(Heap::kUndefinedValueRootIndex);
2391   }
2392 
2393   // Push the enclosing function.
2394   __ Push(Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
2395 
2396   // Push the language mode.
2397   __ Push(Smi::FromInt(language_mode()));
2398 
2399   // Push the start position of the scope the calls resides in.
2400   __ Push(Smi::FromInt(scope()->start_position()));
2401 
2402   // Push the source position of the eval call.
2403   __ Push(Smi::FromInt(expr->position()));
2404 
2405   // Do the runtime call.
2406   __ CallRuntime(Runtime::kResolvePossiblyDirectEval);
2407 }
2408 
2409 
2410 // See http://www.ecma-international.org/ecma-262/6.0/#sec-function-calls.
PushCalleeAndWithBaseObject(Call * expr)2411 void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) {
2412   VariableProxy* callee = expr->expression()->AsVariableProxy();
2413   if (callee->var()->IsLookupSlot()) {
2414     Label slow, done;
2415     SetExpressionPosition(callee);
2416     // Generate code for loading from variables potentially shadowed by
2417     // eval-introduced variables.
2418     EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done);
2419     __ bind(&slow);
2420     // Call the runtime to find the function to call (returned in rax) and
2421     // the object holding it (returned in rdx).
2422     __ Push(callee->name());
2423     __ CallRuntime(Runtime::kLoadLookupSlotForCall);
2424     PushOperand(rax);  // Function.
2425     PushOperand(rdx);  // Receiver.
2426     PrepareForBailoutForId(expr->LookupId(), BailoutState::NO_REGISTERS);
2427 
2428     // If fast case code has been generated, emit code to push the function
2429     // and receiver and have the slow path jump around this code.
2430     if (done.is_linked()) {
2431       Label call;
2432       __ jmp(&call, Label::kNear);
2433       __ bind(&done);
2434       // Push function.
2435       __ Push(rax);
2436       // Pass undefined as the receiver, which is the WithBaseObject of a
2437       // non-object environment record.  If the callee is sloppy, it will patch
2438       // it up to be the global receiver.
2439       __ PushRoot(Heap::kUndefinedValueRootIndex);
2440       __ bind(&call);
2441     }
2442   } else {
2443     VisitForStackValue(callee);
2444     // refEnv.WithBaseObject()
2445     OperandStackDepthIncrement(1);
2446     __ PushRoot(Heap::kUndefinedValueRootIndex);
2447   }
2448 }
2449 
2450 
EmitPossiblyEvalCall(Call * expr)2451 void FullCodeGenerator::EmitPossiblyEvalCall(Call* expr) {
2452   // In a call to eval, we first call Runtime_ResolvePossiblyDirectEval
2453   // to resolve the function we need to call.  Then we call the resolved
2454   // function using the given arguments.
2455   ZoneList<Expression*>* args = expr->arguments();
2456   int arg_count = args->length();
2457   PushCalleeAndWithBaseObject(expr);
2458 
2459   // Push the arguments.
2460   for (int i = 0; i < arg_count; i++) {
2461     VisitForStackValue(args->at(i));
2462   }
2463 
2464   // Push a copy of the function (found below the arguments) and resolve
2465   // eval.
2466   __ Push(Operand(rsp, (arg_count + 1) * kPointerSize));
2467   EmitResolvePossiblyDirectEval(expr);
2468 
2469   // Touch up the callee.
2470   __ movp(Operand(rsp, (arg_count + 1) * kPointerSize), rax);
2471 
2472   PrepareForBailoutForId(expr->EvalId(), BailoutState::NO_REGISTERS);
2473 
2474   SetCallPosition(expr);
2475   __ movp(rdi, Operand(rsp, (arg_count + 1) * kPointerSize));
2476   __ Set(rax, arg_count);
2477   __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kAny,
2478                                       expr->tail_call_mode()),
2479           RelocInfo::CODE_TARGET);
2480   OperandStackDepthDecrement(arg_count + 1);
2481   RecordJSReturnSite(expr);
2482   RestoreContext();
2483   context()->DropAndPlug(1, rax);
2484 }
2485 
2486 
VisitCallNew(CallNew * expr)2487 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2488   Comment cmnt(masm_, "[ CallNew");
2489   // According to ECMA-262, section 11.2.2, page 44, the function
2490   // expression in new calls must be evaluated before the
2491   // arguments.
2492 
2493   // Push constructor on the stack.  If it's not a function it's used as
2494   // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2495   // ignored.
2496   DCHECK(!expr->expression()->IsSuperPropertyReference());
2497   VisitForStackValue(expr->expression());
2498 
2499   // Push the arguments ("left-to-right") on the stack.
2500   ZoneList<Expression*>* args = expr->arguments();
2501   int arg_count = args->length();
2502   for (int i = 0; i < arg_count; i++) {
2503     VisitForStackValue(args->at(i));
2504   }
2505 
2506   // Call the construct call builtin that handles allocation and
2507   // constructor invocation.
2508   SetConstructCallPosition(expr);
2509 
2510   // Load function and argument count into rdi and rax.
2511   __ Set(rax, arg_count);
2512   __ movp(rdi, Operand(rsp, arg_count * kPointerSize));
2513 
2514   // Record call targets in unoptimized code, but not in the snapshot.
2515   __ EmitLoadTypeFeedbackVector(rbx);
2516   __ Move(rdx, SmiFromSlot(expr->CallNewFeedbackSlot()));
2517 
2518   CallConstructStub stub(isolate());
2519   __ Call(stub.GetCode(), RelocInfo::CODE_TARGET);
2520   OperandStackDepthDecrement(arg_count + 1);
2521   PrepareForBailoutForId(expr->ReturnId(), BailoutState::TOS_REGISTER);
2522   RestoreContext();
2523   context()->Plug(rax);
2524 }
2525 
2526 
EmitSuperConstructorCall(Call * expr)2527 void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
2528   SuperCallReference* super_call_ref =
2529       expr->expression()->AsSuperCallReference();
2530   DCHECK_NOT_NULL(super_call_ref);
2531 
2532   // Push the super constructor target on the stack (may be null,
2533   // but the Construct builtin can deal with that properly).
2534   VisitForAccumulatorValue(super_call_ref->this_function_var());
2535   __ AssertFunction(result_register());
2536   __ movp(result_register(),
2537           FieldOperand(result_register(), HeapObject::kMapOffset));
2538   PushOperand(FieldOperand(result_register(), Map::kPrototypeOffset));
2539 
2540   // Push the arguments ("left-to-right") on the stack.
2541   ZoneList<Expression*>* args = expr->arguments();
2542   int arg_count = args->length();
2543   for (int i = 0; i < arg_count; i++) {
2544     VisitForStackValue(args->at(i));
2545   }
2546 
2547   // Call the construct call builtin that handles allocation and
2548   // constructor invocation.
2549   SetConstructCallPosition(expr);
2550 
2551   // Load new target into rdx.
2552   VisitForAccumulatorValue(super_call_ref->new_target_var());
2553   __ movp(rdx, result_register());
2554 
2555   // Load function and argument count into rdi and rax.
2556   __ Set(rax, arg_count);
2557   __ movp(rdi, Operand(rsp, arg_count * kPointerSize));
2558 
2559   __ Call(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
2560   OperandStackDepthDecrement(arg_count + 1);
2561 
2562   RecordJSReturnSite(expr);
2563   RestoreContext();
2564   context()->Plug(rax);
2565 }
2566 
2567 
EmitIsSmi(CallRuntime * expr)2568 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2569   ZoneList<Expression*>* args = expr->arguments();
2570   DCHECK(args->length() == 1);
2571 
2572   VisitForAccumulatorValue(args->at(0));
2573 
2574   Label materialize_true, materialize_false;
2575   Label* if_true = NULL;
2576   Label* if_false = NULL;
2577   Label* fall_through = NULL;
2578   context()->PrepareTest(&materialize_true, &materialize_false,
2579                          &if_true, &if_false, &fall_through);
2580 
2581   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2582   __ JumpIfSmi(rax, if_true);
2583   __ jmp(if_false);
2584 
2585   context()->Plug(if_true, if_false);
2586 }
2587 
2588 
EmitIsJSReceiver(CallRuntime * expr)2589 void FullCodeGenerator::EmitIsJSReceiver(CallRuntime* expr) {
2590   ZoneList<Expression*>* args = expr->arguments();
2591   DCHECK(args->length() == 1);
2592 
2593   VisitForAccumulatorValue(args->at(0));
2594 
2595   Label materialize_true, materialize_false;
2596   Label* if_true = NULL;
2597   Label* if_false = NULL;
2598   Label* fall_through = NULL;
2599   context()->PrepareTest(&materialize_true, &materialize_false,
2600                          &if_true, &if_false, &fall_through);
2601 
2602   __ JumpIfSmi(rax, if_false);
2603   __ CmpObjectType(rax, FIRST_JS_RECEIVER_TYPE, rbx);
2604   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2605   Split(above_equal, if_true, if_false, fall_through);
2606 
2607   context()->Plug(if_true, if_false);
2608 }
2609 
2610 
EmitIsArray(CallRuntime * expr)2611 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
2612   ZoneList<Expression*>* args = expr->arguments();
2613   DCHECK(args->length() == 1);
2614 
2615   VisitForAccumulatorValue(args->at(0));
2616 
2617   Label materialize_true, materialize_false;
2618   Label* if_true = NULL;
2619   Label* if_false = NULL;
2620   Label* fall_through = NULL;
2621   context()->PrepareTest(&materialize_true, &materialize_false,
2622                          &if_true, &if_false, &fall_through);
2623 
2624   __ JumpIfSmi(rax, if_false);
2625   __ CmpObjectType(rax, JS_ARRAY_TYPE, rbx);
2626   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2627   Split(equal, if_true, if_false, fall_through);
2628 
2629   context()->Plug(if_true, if_false);
2630 }
2631 
2632 
EmitIsTypedArray(CallRuntime * expr)2633 void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) {
2634   ZoneList<Expression*>* args = expr->arguments();
2635   DCHECK(args->length() == 1);
2636 
2637   VisitForAccumulatorValue(args->at(0));
2638 
2639   Label materialize_true, materialize_false;
2640   Label* if_true = NULL;
2641   Label* if_false = NULL;
2642   Label* fall_through = NULL;
2643   context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2644                          &if_false, &fall_through);
2645 
2646   __ JumpIfSmi(rax, if_false);
2647   __ CmpObjectType(rax, JS_TYPED_ARRAY_TYPE, rbx);
2648   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2649   Split(equal, if_true, if_false, fall_through);
2650 
2651   context()->Plug(if_true, if_false);
2652 }
2653 
2654 
EmitIsRegExp(CallRuntime * expr)2655 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
2656   ZoneList<Expression*>* args = expr->arguments();
2657   DCHECK(args->length() == 1);
2658 
2659   VisitForAccumulatorValue(args->at(0));
2660 
2661   Label materialize_true, materialize_false;
2662   Label* if_true = NULL;
2663   Label* if_false = NULL;
2664   Label* fall_through = NULL;
2665   context()->PrepareTest(&materialize_true, &materialize_false,
2666                          &if_true, &if_false, &fall_through);
2667 
2668   __ JumpIfSmi(rax, if_false);
2669   __ CmpObjectType(rax, JS_REGEXP_TYPE, rbx);
2670   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2671   Split(equal, if_true, if_false, fall_through);
2672 
2673   context()->Plug(if_true, if_false);
2674 }
2675 
2676 
EmitIsJSProxy(CallRuntime * expr)2677 void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
2678   ZoneList<Expression*>* args = expr->arguments();
2679   DCHECK(args->length() == 1);
2680 
2681   VisitForAccumulatorValue(args->at(0));
2682 
2683   Label materialize_true, materialize_false;
2684   Label* if_true = NULL;
2685   Label* if_false = NULL;
2686   Label* fall_through = NULL;
2687   context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2688                          &if_false, &fall_through);
2689 
2690 
2691   __ JumpIfSmi(rax, if_false);
2692   __ CmpObjectType(rax, JS_PROXY_TYPE, rbx);
2693   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2694   Split(equal, if_true, if_false, fall_through);
2695 
2696   context()->Plug(if_true, if_false);
2697 }
2698 
2699 
EmitClassOf(CallRuntime * expr)2700 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
2701   ZoneList<Expression*>* args = expr->arguments();
2702   DCHECK(args->length() == 1);
2703   Label done, null, function, non_function_constructor;
2704 
2705   VisitForAccumulatorValue(args->at(0));
2706 
2707   // If the object is not a JSReceiver, we return null.
2708   __ JumpIfSmi(rax, &null, Label::kNear);
2709   STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2710   __ CmpObjectType(rax, FIRST_JS_RECEIVER_TYPE, rax);
2711   __ j(below, &null, Label::kNear);
2712 
2713   // Return 'Function' for JSFunction and JSBoundFunction objects.
2714   __ CmpInstanceType(rax, FIRST_FUNCTION_TYPE);
2715   STATIC_ASSERT(LAST_FUNCTION_TYPE == LAST_TYPE);
2716   __ j(above_equal, &function, Label::kNear);
2717 
2718   // Check if the constructor in the map is a JS function.
2719   __ GetMapConstructor(rax, rax, rbx);
2720   __ CmpInstanceType(rbx, JS_FUNCTION_TYPE);
2721   __ j(not_equal, &non_function_constructor, Label::kNear);
2722 
2723   // rax now contains the constructor function. Grab the
2724   // instance class name from there.
2725   __ movp(rax, FieldOperand(rax, JSFunction::kSharedFunctionInfoOffset));
2726   __ movp(rax, FieldOperand(rax, SharedFunctionInfo::kInstanceClassNameOffset));
2727   __ jmp(&done, Label::kNear);
2728 
2729   // Non-JS objects have class null.
2730   __ bind(&null);
2731   __ LoadRoot(rax, Heap::kNullValueRootIndex);
2732   __ jmp(&done, Label::kNear);
2733 
2734   // Functions have class 'Function'.
2735   __ bind(&function);
2736   __ LoadRoot(rax, Heap::kFunction_stringRootIndex);
2737   __ jmp(&done, Label::kNear);
2738 
2739   // Objects with a non-function constructor have class 'Object'.
2740   __ bind(&non_function_constructor);
2741   __ LoadRoot(rax, Heap::kObject_stringRootIndex);
2742 
2743   // All done.
2744   __ bind(&done);
2745 
2746   context()->Plug(rax);
2747 }
2748 
2749 
EmitValueOf(CallRuntime * expr)2750 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
2751   ZoneList<Expression*>* args = expr->arguments();
2752   DCHECK(args->length() == 1);
2753 
2754   VisitForAccumulatorValue(args->at(0));  // Load the object.
2755 
2756   Label done;
2757   // If the object is a smi return the object.
2758   __ JumpIfSmi(rax, &done);
2759   // If the object is not a value type, return the object.
2760   __ CmpObjectType(rax, JS_VALUE_TYPE, rbx);
2761   __ j(not_equal, &done);
2762   __ movp(rax, FieldOperand(rax, JSValue::kValueOffset));
2763 
2764   __ bind(&done);
2765   context()->Plug(rax);
2766 }
2767 
2768 
EmitStringCharFromCode(CallRuntime * expr)2769 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
2770   ZoneList<Expression*>* args = expr->arguments();
2771   DCHECK(args->length() == 1);
2772 
2773   VisitForAccumulatorValue(args->at(0));
2774 
2775   Label done;
2776   StringCharFromCodeGenerator generator(rax, rbx);
2777   generator.GenerateFast(masm_);
2778   __ jmp(&done);
2779 
2780   NopRuntimeCallHelper call_helper;
2781   generator.GenerateSlow(masm_, call_helper);
2782 
2783   __ bind(&done);
2784   context()->Plug(rbx);
2785 }
2786 
2787 
EmitStringCharCodeAt(CallRuntime * expr)2788 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
2789   ZoneList<Expression*>* args = expr->arguments();
2790   DCHECK(args->length() == 2);
2791 
2792   VisitForStackValue(args->at(0));
2793   VisitForAccumulatorValue(args->at(1));
2794 
2795   Register object = rbx;
2796   Register index = rax;
2797   Register result = rdx;
2798 
2799   PopOperand(object);
2800 
2801   Label need_conversion;
2802   Label index_out_of_range;
2803   Label done;
2804   StringCharCodeAtGenerator generator(object, index, result, &need_conversion,
2805                                       &need_conversion, &index_out_of_range);
2806   generator.GenerateFast(masm_);
2807   __ jmp(&done);
2808 
2809   __ bind(&index_out_of_range);
2810   // When the index is out of range, the spec requires us to return
2811   // NaN.
2812   __ LoadRoot(result, Heap::kNanValueRootIndex);
2813   __ jmp(&done);
2814 
2815   __ bind(&need_conversion);
2816   // Move the undefined value into the result register, which will
2817   // trigger conversion.
2818   __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
2819   __ jmp(&done);
2820 
2821   NopRuntimeCallHelper call_helper;
2822   generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
2823 
2824   __ bind(&done);
2825   context()->Plug(result);
2826 }
2827 
2828 
EmitCall(CallRuntime * expr)2829 void FullCodeGenerator::EmitCall(CallRuntime* expr) {
2830   ZoneList<Expression*>* args = expr->arguments();
2831   DCHECK_LE(2, args->length());
2832   // Push target, receiver and arguments onto the stack.
2833   for (Expression* const arg : *args) {
2834     VisitForStackValue(arg);
2835   }
2836   PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
2837   // Move target to rdi.
2838   int const argc = args->length() - 2;
2839   __ movp(rdi, Operand(rsp, (argc + 1) * kPointerSize));
2840   // Call the target.
2841   __ Set(rax, argc);
2842   __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
2843   OperandStackDepthDecrement(argc + 1);
2844   RestoreContext();
2845   // Discard the function left on TOS.
2846   context()->DropAndPlug(1, rax);
2847 }
2848 
2849 
EmitHasCachedArrayIndex(CallRuntime * expr)2850 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
2851   ZoneList<Expression*>* args = expr->arguments();
2852   DCHECK(args->length() == 1);
2853 
2854   VisitForAccumulatorValue(args->at(0));
2855 
2856   Label materialize_true, materialize_false;
2857   Label* if_true = NULL;
2858   Label* if_false = NULL;
2859   Label* fall_through = NULL;
2860   context()->PrepareTest(&materialize_true, &materialize_false,
2861                          &if_true, &if_false, &fall_through);
2862 
2863   __ testl(FieldOperand(rax, String::kHashFieldOffset),
2864            Immediate(String::kContainsCachedArrayIndexMask));
2865   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2866   __ j(zero, if_true);
2867   __ jmp(if_false);
2868 
2869   context()->Plug(if_true, if_false);
2870 }
2871 
2872 
EmitGetCachedArrayIndex(CallRuntime * expr)2873 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
2874   ZoneList<Expression*>* args = expr->arguments();
2875   DCHECK(args->length() == 1);
2876   VisitForAccumulatorValue(args->at(0));
2877 
2878   __ AssertString(rax);
2879 
2880   __ movl(rax, FieldOperand(rax, String::kHashFieldOffset));
2881   DCHECK(String::kHashShift >= kSmiTagSize);
2882   __ IndexFromHash(rax, rax);
2883 
2884   context()->Plug(rax);
2885 }
2886 
2887 
EmitGetSuperConstructor(CallRuntime * expr)2888 void FullCodeGenerator::EmitGetSuperConstructor(CallRuntime* expr) {
2889   ZoneList<Expression*>* args = expr->arguments();
2890   DCHECK_EQ(1, args->length());
2891   VisitForAccumulatorValue(args->at(0));
2892   __ AssertFunction(rax);
2893   __ movp(rax, FieldOperand(rax, HeapObject::kMapOffset));
2894   __ movp(rax, FieldOperand(rax, Map::kPrototypeOffset));
2895   context()->Plug(rax);
2896 }
2897 
EmitDebugIsActive(CallRuntime * expr)2898 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
2899   DCHECK(expr->arguments()->length() == 0);
2900   ExternalReference debug_is_active =
2901       ExternalReference::debug_is_active_address(isolate());
2902   __ Move(kScratchRegister, debug_is_active);
2903   __ movzxbp(rax, Operand(kScratchRegister, 0));
2904   __ Integer32ToSmi(rax, rax);
2905   context()->Plug(rax);
2906 }
2907 
2908 
EmitCreateIterResultObject(CallRuntime * expr)2909 void FullCodeGenerator::EmitCreateIterResultObject(CallRuntime* expr) {
2910   ZoneList<Expression*>* args = expr->arguments();
2911   DCHECK_EQ(2, args->length());
2912   VisitForStackValue(args->at(0));
2913   VisitForStackValue(args->at(1));
2914 
2915   Label runtime, done;
2916 
2917   __ Allocate(JSIteratorResult::kSize, rax, rcx, rdx, &runtime,
2918               NO_ALLOCATION_FLAGS);
2919   __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, rbx);
2920   __ movp(FieldOperand(rax, HeapObject::kMapOffset), rbx);
2921   __ LoadRoot(rbx, Heap::kEmptyFixedArrayRootIndex);
2922   __ movp(FieldOperand(rax, JSObject::kPropertiesOffset), rbx);
2923   __ movp(FieldOperand(rax, JSObject::kElementsOffset), rbx);
2924   __ Pop(FieldOperand(rax, JSIteratorResult::kDoneOffset));
2925   __ Pop(FieldOperand(rax, JSIteratorResult::kValueOffset));
2926   STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
2927   __ jmp(&done, Label::kNear);
2928 
2929   __ bind(&runtime);
2930   CallRuntimeWithOperands(Runtime::kCreateIterResultObject);
2931 
2932   __ bind(&done);
2933   context()->Plug(rax);
2934 }
2935 
2936 
EmitLoadJSRuntimeFunction(CallRuntime * expr)2937 void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
2938   // Push function.
2939   __ LoadNativeContextSlot(expr->context_index(), rax);
2940   PushOperand(rax);
2941 
2942   // Push undefined as receiver.
2943   OperandStackDepthIncrement(1);
2944   __ PushRoot(Heap::kUndefinedValueRootIndex);
2945 }
2946 
2947 
EmitCallJSRuntimeFunction(CallRuntime * expr)2948 void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
2949   ZoneList<Expression*>* args = expr->arguments();
2950   int arg_count = args->length();
2951 
2952   SetCallPosition(expr);
2953   __ movp(rdi, Operand(rsp, (arg_count + 1) * kPointerSize));
2954   __ Set(rax, arg_count);
2955   __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kNullOrUndefined),
2956           RelocInfo::CODE_TARGET);
2957   OperandStackDepthDecrement(arg_count + 1);
2958   RestoreContext();
2959 }
2960 
2961 
VisitUnaryOperation(UnaryOperation * expr)2962 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
2963   switch (expr->op()) {
2964     case Token::DELETE: {
2965       Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
2966       Property* property = expr->expression()->AsProperty();
2967       VariableProxy* proxy = expr->expression()->AsVariableProxy();
2968 
2969       if (property != NULL) {
2970         VisitForStackValue(property->obj());
2971         VisitForStackValue(property->key());
2972         CallRuntimeWithOperands(is_strict(language_mode())
2973                                     ? Runtime::kDeleteProperty_Strict
2974                                     : Runtime::kDeleteProperty_Sloppy);
2975         context()->Plug(rax);
2976       } else if (proxy != NULL) {
2977         Variable* var = proxy->var();
2978         // Delete of an unqualified identifier is disallowed in strict mode but
2979         // "delete this" is allowed.
2980         bool is_this = var->HasThisName(isolate());
2981         DCHECK(is_sloppy(language_mode()) || is_this);
2982         if (var->IsUnallocatedOrGlobalSlot()) {
2983           __ movp(rax, NativeContextOperand());
2984           __ Push(ContextOperand(rax, Context::EXTENSION_INDEX));
2985           __ Push(var->name());
2986           __ CallRuntime(Runtime::kDeleteProperty_Sloppy);
2987           context()->Plug(rax);
2988         } else if (var->IsStackAllocated() || var->IsContextSlot()) {
2989           // Result of deleting non-global variables is false.  'this' is
2990           // not really a variable, though we implement it as one.  The
2991           // subexpression does not have side effects.
2992           context()->Plug(is_this);
2993         } else {
2994           // Non-global variable.  Call the runtime to try to delete from the
2995           // context where the variable was introduced.
2996           __ Push(var->name());
2997           __ CallRuntime(Runtime::kDeleteLookupSlot);
2998           context()->Plug(rax);
2999         }
3000       } else {
3001         // Result of deleting non-property, non-variable reference is true.
3002         // The subexpression may have side effects.
3003         VisitForEffect(expr->expression());
3004         context()->Plug(true);
3005       }
3006       break;
3007     }
3008 
3009     case Token::VOID: {
3010       Comment cmnt(masm_, "[ UnaryOperation (VOID)");
3011       VisitForEffect(expr->expression());
3012       context()->Plug(Heap::kUndefinedValueRootIndex);
3013       break;
3014     }
3015 
3016     case Token::NOT: {
3017       Comment cmnt(masm_, "[ UnaryOperation (NOT)");
3018       if (context()->IsEffect()) {
3019         // Unary NOT has no side effects so it's only necessary to visit the
3020         // subexpression.  Match the optimizing compiler by not branching.
3021         VisitForEffect(expr->expression());
3022       } else if (context()->IsTest()) {
3023         const TestContext* test = TestContext::cast(context());
3024         // The labels are swapped for the recursive call.
3025         VisitForControl(expr->expression(),
3026                         test->false_label(),
3027                         test->true_label(),
3028                         test->fall_through());
3029         context()->Plug(test->true_label(), test->false_label());
3030       } else {
3031         // We handle value contexts explicitly rather than simply visiting
3032         // for control and plugging the control flow into the context,
3033         // because we need to prepare a pair of extra administrative AST ids
3034         // for the optimizing compiler.
3035         DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
3036         Label materialize_true, materialize_false, done;
3037         VisitForControl(expr->expression(),
3038                         &materialize_false,
3039                         &materialize_true,
3040                         &materialize_true);
3041         if (!context()->IsAccumulatorValue()) OperandStackDepthIncrement(1);
3042         __ bind(&materialize_true);
3043         PrepareForBailoutForId(expr->MaterializeTrueId(),
3044                                BailoutState::NO_REGISTERS);
3045         if (context()->IsAccumulatorValue()) {
3046           __ LoadRoot(rax, Heap::kTrueValueRootIndex);
3047         } else {
3048           __ PushRoot(Heap::kTrueValueRootIndex);
3049         }
3050         __ jmp(&done, Label::kNear);
3051         __ bind(&materialize_false);
3052         PrepareForBailoutForId(expr->MaterializeFalseId(),
3053                                BailoutState::NO_REGISTERS);
3054         if (context()->IsAccumulatorValue()) {
3055           __ LoadRoot(rax, Heap::kFalseValueRootIndex);
3056         } else {
3057           __ PushRoot(Heap::kFalseValueRootIndex);
3058         }
3059         __ bind(&done);
3060       }
3061       break;
3062     }
3063 
3064     case Token::TYPEOF: {
3065       Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
3066       {
3067         AccumulatorValueContext context(this);
3068         VisitForTypeofValue(expr->expression());
3069       }
3070       __ movp(rbx, rax);
3071       TypeofStub typeof_stub(isolate());
3072       __ CallStub(&typeof_stub);
3073       context()->Plug(rax);
3074       break;
3075     }
3076 
3077     default:
3078       UNREACHABLE();
3079   }
3080 }
3081 
3082 
VisitCountOperation(CountOperation * expr)3083 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3084   DCHECK(expr->expression()->IsValidReferenceExpressionOrThis());
3085 
3086   Comment cmnt(masm_, "[ CountOperation");
3087 
3088   Property* prop = expr->expression()->AsProperty();
3089   LhsKind assign_type = Property::GetAssignType(prop);
3090 
3091   // Evaluate expression and get value.
3092   if (assign_type == VARIABLE) {
3093     DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
3094     AccumulatorValueContext context(this);
3095     EmitVariableLoad(expr->expression()->AsVariableProxy());
3096   } else {
3097     // Reserve space for result of postfix operation.
3098     if (expr->is_postfix() && !context()->IsEffect()) {
3099       PushOperand(Smi::FromInt(0));
3100     }
3101     switch (assign_type) {
3102       case NAMED_PROPERTY: {
3103         VisitForStackValue(prop->obj());
3104         __ movp(LoadDescriptor::ReceiverRegister(), Operand(rsp, 0));
3105         EmitNamedPropertyLoad(prop);
3106         break;
3107       }
3108 
3109       case NAMED_SUPER_PROPERTY: {
3110         VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
3111         VisitForAccumulatorValue(
3112             prop->obj()->AsSuperPropertyReference()->home_object());
3113         PushOperand(result_register());
3114         PushOperand(MemOperand(rsp, kPointerSize));
3115         PushOperand(result_register());
3116         EmitNamedSuperPropertyLoad(prop);
3117         break;
3118       }
3119 
3120       case KEYED_SUPER_PROPERTY: {
3121         VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
3122         VisitForStackValue(
3123             prop->obj()->AsSuperPropertyReference()->home_object());
3124         VisitForAccumulatorValue(prop->key());
3125         PushOperand(result_register());
3126         PushOperand(MemOperand(rsp, 2 * kPointerSize));
3127         PushOperand(MemOperand(rsp, 2 * kPointerSize));
3128         PushOperand(result_register());
3129         EmitKeyedSuperPropertyLoad(prop);
3130         break;
3131       }
3132 
3133       case KEYED_PROPERTY: {
3134         VisitForStackValue(prop->obj());
3135         VisitForStackValue(prop->key());
3136         // Leave receiver on stack
3137         __ movp(LoadDescriptor::ReceiverRegister(), Operand(rsp, kPointerSize));
3138         // Copy of key, needed for later store.
3139         __ movp(LoadDescriptor::NameRegister(), Operand(rsp, 0));
3140         EmitKeyedPropertyLoad(prop);
3141         break;
3142       }
3143 
3144       case VARIABLE:
3145         UNREACHABLE();
3146     }
3147   }
3148 
3149   // We need a second deoptimization point after loading the value
3150   // in case evaluating the property load my have a side effect.
3151   if (assign_type == VARIABLE) {
3152     PrepareForBailout(expr->expression(), BailoutState::TOS_REGISTER);
3153   } else {
3154     PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
3155   }
3156 
3157   // Inline smi case if we are in a loop.
3158   Label done, stub_call;
3159   JumpPatchSite patch_site(masm_);
3160   if (ShouldInlineSmiCase(expr->op())) {
3161     Label slow;
3162     patch_site.EmitJumpIfNotSmi(rax, &slow, Label::kNear);
3163 
3164     // Save result for postfix expressions.
3165     if (expr->is_postfix()) {
3166       if (!context()->IsEffect()) {
3167         // Save the result on the stack. If we have a named or keyed property
3168         // we store the result under the receiver that is currently on top
3169         // of the stack.
3170         switch (assign_type) {
3171           case VARIABLE:
3172             __ Push(rax);
3173             break;
3174           case NAMED_PROPERTY:
3175             __ movp(Operand(rsp, kPointerSize), rax);
3176             break;
3177           case NAMED_SUPER_PROPERTY:
3178             __ movp(Operand(rsp, 2 * kPointerSize), rax);
3179             break;
3180           case KEYED_PROPERTY:
3181             __ movp(Operand(rsp, 2 * kPointerSize), rax);
3182             break;
3183           case KEYED_SUPER_PROPERTY:
3184             __ movp(Operand(rsp, 3 * kPointerSize), rax);
3185             break;
3186         }
3187       }
3188     }
3189 
3190     SmiOperationConstraints constraints =
3191         SmiOperationConstraint::kPreserveSourceRegister |
3192         SmiOperationConstraint::kBailoutOnNoOverflow;
3193     if (expr->op() == Token::INC) {
3194       __ SmiAddConstant(rax, rax, Smi::FromInt(1), constraints, &done,
3195                         Label::kNear);
3196     } else {
3197       __ SmiSubConstant(rax, rax, Smi::FromInt(1), constraints, &done,
3198                         Label::kNear);
3199     }
3200     __ jmp(&stub_call, Label::kNear);
3201     __ bind(&slow);
3202   }
3203 
3204   // Convert old value into a number.
3205   __ Call(isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
3206   PrepareForBailoutForId(expr->ToNumberId(), BailoutState::TOS_REGISTER);
3207 
3208   // Save result for postfix expressions.
3209   if (expr->is_postfix()) {
3210     if (!context()->IsEffect()) {
3211       // Save the result on the stack. If we have a named or keyed property
3212       // we store the result under the receiver that is currently on top
3213       // of the stack.
3214       switch (assign_type) {
3215         case VARIABLE:
3216           PushOperand(rax);
3217           break;
3218         case NAMED_PROPERTY:
3219           __ movp(Operand(rsp, kPointerSize), rax);
3220           break;
3221         case NAMED_SUPER_PROPERTY:
3222           __ movp(Operand(rsp, 2 * kPointerSize), rax);
3223           break;
3224         case KEYED_PROPERTY:
3225           __ movp(Operand(rsp, 2 * kPointerSize), rax);
3226           break;
3227         case KEYED_SUPER_PROPERTY:
3228           __ movp(Operand(rsp, 3 * kPointerSize), rax);
3229           break;
3230       }
3231     }
3232   }
3233 
3234   SetExpressionPosition(expr);
3235 
3236   // Call stub for +1/-1.
3237   __ bind(&stub_call);
3238   __ movp(rdx, rax);
3239   __ Move(rax, Smi::FromInt(1));
3240   Handle<Code> code =
3241       CodeFactory::BinaryOpIC(isolate(), expr->binary_op()).code();
3242   CallIC(code, expr->CountBinOpFeedbackId());
3243   patch_site.EmitPatchInfo();
3244   __ bind(&done);
3245 
3246   // Store the value returned in rax.
3247   switch (assign_type) {
3248     case VARIABLE:
3249       if (expr->is_postfix()) {
3250         // Perform the assignment as if via '='.
3251         { EffectContext context(this);
3252           EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3253                                  Token::ASSIGN, expr->CountSlot());
3254           PrepareForBailoutForId(expr->AssignmentId(),
3255                                  BailoutState::TOS_REGISTER);
3256           context.Plug(rax);
3257         }
3258         // For all contexts except kEffect: We have the result on
3259         // top of the stack.
3260         if (!context()->IsEffect()) {
3261           context()->PlugTOS();
3262         }
3263       } else {
3264         // Perform the assignment as if via '='.
3265         EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3266                                Token::ASSIGN, expr->CountSlot());
3267         PrepareForBailoutForId(expr->AssignmentId(),
3268                                BailoutState::TOS_REGISTER);
3269         context()->Plug(rax);
3270       }
3271       break;
3272     case NAMED_PROPERTY: {
3273       __ Move(StoreDescriptor::NameRegister(),
3274               prop->key()->AsLiteral()->value());
3275       PopOperand(StoreDescriptor::ReceiverRegister());
3276       EmitLoadStoreICSlot(expr->CountSlot());
3277       CallStoreIC();
3278       PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
3279       if (expr->is_postfix()) {
3280         if (!context()->IsEffect()) {
3281           context()->PlugTOS();
3282         }
3283       } else {
3284         context()->Plug(rax);
3285       }
3286       break;
3287     }
3288     case NAMED_SUPER_PROPERTY: {
3289       EmitNamedSuperPropertyStore(prop);
3290       if (expr->is_postfix()) {
3291         if (!context()->IsEffect()) {
3292           context()->PlugTOS();
3293         }
3294       } else {
3295         context()->Plug(rax);
3296       }
3297       break;
3298     }
3299     case KEYED_SUPER_PROPERTY: {
3300       EmitKeyedSuperPropertyStore(prop);
3301       if (expr->is_postfix()) {
3302         if (!context()->IsEffect()) {
3303           context()->PlugTOS();
3304         }
3305       } else {
3306         context()->Plug(rax);
3307       }
3308       break;
3309     }
3310     case KEYED_PROPERTY: {
3311       PopOperand(StoreDescriptor::NameRegister());
3312       PopOperand(StoreDescriptor::ReceiverRegister());
3313       Handle<Code> ic =
3314           CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
3315       EmitLoadStoreICSlot(expr->CountSlot());
3316       CallIC(ic);
3317       PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
3318       if (expr->is_postfix()) {
3319         if (!context()->IsEffect()) {
3320           context()->PlugTOS();
3321         }
3322       } else {
3323         context()->Plug(rax);
3324       }
3325       break;
3326     }
3327   }
3328 }
3329 
3330 
EmitLiteralCompareTypeof(Expression * expr,Expression * sub_expr,Handle<String> check)3331 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
3332                                                  Expression* sub_expr,
3333                                                  Handle<String> check) {
3334   Label materialize_true, materialize_false;
3335   Label* if_true = NULL;
3336   Label* if_false = NULL;
3337   Label* fall_through = NULL;
3338   context()->PrepareTest(&materialize_true, &materialize_false,
3339                          &if_true, &if_false, &fall_through);
3340 
3341   { AccumulatorValueContext context(this);
3342     VisitForTypeofValue(sub_expr);
3343   }
3344   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3345 
3346   Factory* factory = isolate()->factory();
3347   if (String::Equals(check, factory->number_string())) {
3348     __ JumpIfSmi(rax, if_true);
3349     __ movp(rax, FieldOperand(rax, HeapObject::kMapOffset));
3350     __ CompareRoot(rax, Heap::kHeapNumberMapRootIndex);
3351     Split(equal, if_true, if_false, fall_through);
3352   } else if (String::Equals(check, factory->string_string())) {
3353     __ JumpIfSmi(rax, if_false);
3354     __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, rdx);
3355     Split(below, if_true, if_false, fall_through);
3356   } else if (String::Equals(check, factory->symbol_string())) {
3357     __ JumpIfSmi(rax, if_false);
3358     __ CmpObjectType(rax, SYMBOL_TYPE, rdx);
3359     Split(equal, if_true, if_false, fall_through);
3360   } else if (String::Equals(check, factory->boolean_string())) {
3361     __ CompareRoot(rax, Heap::kTrueValueRootIndex);
3362     __ j(equal, if_true);
3363     __ CompareRoot(rax, Heap::kFalseValueRootIndex);
3364     Split(equal, if_true, if_false, fall_through);
3365   } else if (String::Equals(check, factory->undefined_string())) {
3366     __ CompareRoot(rax, Heap::kNullValueRootIndex);
3367     __ j(equal, if_false);
3368     __ JumpIfSmi(rax, if_false);
3369     // Check for undetectable objects => true.
3370     __ movp(rdx, FieldOperand(rax, HeapObject::kMapOffset));
3371     __ testb(FieldOperand(rdx, Map::kBitFieldOffset),
3372              Immediate(1 << Map::kIsUndetectable));
3373     Split(not_zero, if_true, if_false, fall_through);
3374   } else if (String::Equals(check, factory->function_string())) {
3375     __ JumpIfSmi(rax, if_false);
3376     // Check for callable and not undetectable objects => true.
3377     __ movp(rdx, FieldOperand(rax, HeapObject::kMapOffset));
3378     __ movzxbl(rdx, FieldOperand(rdx, Map::kBitFieldOffset));
3379     __ andb(rdx,
3380             Immediate((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
3381     __ cmpb(rdx, Immediate(1 << Map::kIsCallable));
3382     Split(equal, if_true, if_false, fall_through);
3383   } else if (String::Equals(check, factory->object_string())) {
3384     __ JumpIfSmi(rax, if_false);
3385     __ CompareRoot(rax, Heap::kNullValueRootIndex);
3386     __ j(equal, if_true);
3387     STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
3388     __ CmpObjectType(rax, FIRST_JS_RECEIVER_TYPE, rdx);
3389     __ j(below, if_false);
3390     // Check for callable or undetectable objects => false.
3391     __ testb(FieldOperand(rdx, Map::kBitFieldOffset),
3392              Immediate((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
3393     Split(zero, if_true, if_false, fall_through);
3394 // clang-format off
3395 #define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type)   \
3396   } else if (String::Equals(check, factory->type##_string())) { \
3397     __ JumpIfSmi(rax, if_false);                                \
3398     __ movp(rax, FieldOperand(rax, HeapObject::kMapOffset));    \
3399     __ CompareRoot(rax, Heap::k##Type##MapRootIndex);           \
3400     Split(equal, if_true, if_false, fall_through);
3401   SIMD128_TYPES(SIMD128_TYPE)
3402 #undef SIMD128_TYPE
3403     // clang-format on
3404   } else {
3405     if (if_false != fall_through) __ jmp(if_false);
3406   }
3407   context()->Plug(if_true, if_false);
3408 }
3409 
3410 
VisitCompareOperation(CompareOperation * expr)3411 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
3412   Comment cmnt(masm_, "[ CompareOperation");
3413 
3414   // First we try a fast inlined version of the compare when one of
3415   // the operands is a literal.
3416   if (TryLiteralCompare(expr)) return;
3417 
3418   // Always perform the comparison for its control flow.  Pack the result
3419   // into the expression's context after the comparison is performed.
3420   Label materialize_true, materialize_false;
3421   Label* if_true = NULL;
3422   Label* if_false = NULL;
3423   Label* fall_through = NULL;
3424   context()->PrepareTest(&materialize_true, &materialize_false,
3425                          &if_true, &if_false, &fall_through);
3426 
3427   Token::Value op = expr->op();
3428   VisitForStackValue(expr->left());
3429   switch (op) {
3430     case Token::IN:
3431       VisitForStackValue(expr->right());
3432       SetExpressionPosition(expr);
3433       EmitHasProperty();
3434       PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
3435       __ CompareRoot(rax, Heap::kTrueValueRootIndex);
3436       Split(equal, if_true, if_false, fall_through);
3437       break;
3438 
3439     case Token::INSTANCEOF: {
3440       VisitForAccumulatorValue(expr->right());
3441       SetExpressionPosition(expr);
3442       PopOperand(rdx);
3443       InstanceOfStub stub(isolate());
3444       __ CallStub(&stub);
3445       PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
3446       __ CompareRoot(rax, Heap::kTrueValueRootIndex);
3447       Split(equal, if_true, if_false, fall_through);
3448       break;
3449     }
3450 
3451     default: {
3452       VisitForAccumulatorValue(expr->right());
3453       SetExpressionPosition(expr);
3454       Condition cc = CompareIC::ComputeCondition(op);
3455       PopOperand(rdx);
3456 
3457       bool inline_smi_code = ShouldInlineSmiCase(op);
3458       JumpPatchSite patch_site(masm_);
3459       if (inline_smi_code) {
3460         Label slow_case;
3461         __ movp(rcx, rdx);
3462         __ orp(rcx, rax);
3463         patch_site.EmitJumpIfNotSmi(rcx, &slow_case, Label::kNear);
3464         __ cmpp(rdx, rax);
3465         Split(cc, if_true, if_false, NULL);
3466         __ bind(&slow_case);
3467       }
3468 
3469       Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
3470       CallIC(ic, expr->CompareOperationFeedbackId());
3471       patch_site.EmitPatchInfo();
3472 
3473       PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3474       __ testp(rax, rax);
3475       Split(cc, if_true, if_false, fall_through);
3476     }
3477   }
3478 
3479   // Convert the result of the comparison into one expected for this
3480   // expression's context.
3481   context()->Plug(if_true, if_false);
3482 }
3483 
3484 
EmitLiteralCompareNil(CompareOperation * expr,Expression * sub_expr,NilValue nil)3485 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
3486                                               Expression* sub_expr,
3487                                               NilValue nil) {
3488   Label materialize_true, materialize_false;
3489   Label* if_true = NULL;
3490   Label* if_false = NULL;
3491   Label* fall_through = NULL;
3492   context()->PrepareTest(&materialize_true, &materialize_false,
3493                          &if_true, &if_false, &fall_through);
3494 
3495   VisitForAccumulatorValue(sub_expr);
3496   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3497   if (expr->op() == Token::EQ_STRICT) {
3498     Heap::RootListIndex nil_value = nil == kNullValue ?
3499         Heap::kNullValueRootIndex :
3500         Heap::kUndefinedValueRootIndex;
3501     __ CompareRoot(rax, nil_value);
3502     Split(equal, if_true, if_false, fall_through);
3503   } else {
3504     __ JumpIfSmi(rax, if_false);
3505     __ movp(rax, FieldOperand(rax, HeapObject::kMapOffset));
3506     __ testb(FieldOperand(rax, Map::kBitFieldOffset),
3507              Immediate(1 << Map::kIsUndetectable));
3508     Split(not_zero, if_true, if_false, fall_through);
3509   }
3510   context()->Plug(if_true, if_false);
3511 }
3512 
3513 
result_register()3514 Register FullCodeGenerator::result_register() {
3515   return rax;
3516 }
3517 
3518 
context_register()3519 Register FullCodeGenerator::context_register() {
3520   return rsi;
3521 }
3522 
LoadFromFrameField(int frame_offset,Register value)3523 void FullCodeGenerator::LoadFromFrameField(int frame_offset, Register value) {
3524   DCHECK(IsAligned(frame_offset, kPointerSize));
3525   __ movp(value, Operand(rbp, frame_offset));
3526 }
3527 
StoreToFrameField(int frame_offset,Register value)3528 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
3529   DCHECK(IsAligned(frame_offset, kPointerSize));
3530   __ movp(Operand(rbp, frame_offset), value);
3531 }
3532 
3533 
LoadContextField(Register dst,int context_index)3534 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
3535   __ movp(dst, ContextOperand(rsi, context_index));
3536 }
3537 
3538 
PushFunctionArgumentForContextAllocation()3539 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
3540   Scope* closure_scope = scope()->ClosureScope();
3541   if (closure_scope->is_script_scope() ||
3542       closure_scope->is_module_scope()) {
3543     // Contexts nested in the native context have a canonical empty function
3544     // as their closure, not the anonymous closure containing the global
3545     // code.
3546     __ movp(rax, NativeContextOperand());
3547     PushOperand(ContextOperand(rax, Context::CLOSURE_INDEX));
3548   } else if (closure_scope->is_eval_scope()) {
3549     // Contexts created by a call to eval have the same closure as the
3550     // context calling eval, not the anonymous closure containing the eval
3551     // code.  Fetch it from the context.
3552     PushOperand(ContextOperand(rsi, Context::CLOSURE_INDEX));
3553   } else {
3554     DCHECK(closure_scope->is_function_scope());
3555     PushOperand(Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
3556   }
3557 }
3558 
3559 
3560 // ----------------------------------------------------------------------------
3561 // Non-local control flow support.
3562 
3563 
EnterFinallyBlock()3564 void FullCodeGenerator::EnterFinallyBlock() {
3565   DCHECK(!result_register().is(rdx));
3566 
3567   // Store pending message while executing finally block.
3568   ExternalReference pending_message_obj =
3569       ExternalReference::address_of_pending_message_obj(isolate());
3570   __ Load(rdx, pending_message_obj);
3571   PushOperand(rdx);
3572 
3573   ClearPendingMessage();
3574 }
3575 
3576 
ExitFinallyBlock()3577 void FullCodeGenerator::ExitFinallyBlock() {
3578   DCHECK(!result_register().is(rdx));
3579   // Restore pending message from stack.
3580   PopOperand(rdx);
3581   ExternalReference pending_message_obj =
3582       ExternalReference::address_of_pending_message_obj(isolate());
3583   __ Store(pending_message_obj, rdx);
3584 }
3585 
3586 
ClearPendingMessage()3587 void FullCodeGenerator::ClearPendingMessage() {
3588   DCHECK(!result_register().is(rdx));
3589   ExternalReference pending_message_obj =
3590       ExternalReference::address_of_pending_message_obj(isolate());
3591   __ LoadRoot(rdx, Heap::kTheHoleValueRootIndex);
3592   __ Store(pending_message_obj, rdx);
3593 }
3594 
3595 
EmitCommands()3596 void FullCodeGenerator::DeferredCommands::EmitCommands() {
3597   __ Pop(result_register());  // Restore the accumulator.
3598   __ Pop(rdx);                // Get the token.
3599   for (DeferredCommand cmd : commands_) {
3600     Label skip;
3601     __ SmiCompare(rdx, Smi::FromInt(cmd.token));
3602     __ j(not_equal, &skip);
3603     switch (cmd.command) {
3604       case kReturn:
3605         codegen_->EmitUnwindAndReturn();
3606         break;
3607       case kThrow:
3608         __ Push(result_register());
3609         __ CallRuntime(Runtime::kReThrow);
3610         break;
3611       case kContinue:
3612         codegen_->EmitContinue(cmd.target);
3613         break;
3614       case kBreak:
3615         codegen_->EmitBreak(cmd.target);
3616         break;
3617     }
3618     __ bind(&skip);
3619   }
3620 }
3621 
3622 #undef __
3623 
3624 
3625 static const byte kJnsInstruction = 0x79;
3626 static const byte kNopByteOne = 0x66;
3627 static const byte kNopByteTwo = 0x90;
3628 #ifdef DEBUG
3629 static const byte kCallInstruction = 0xe8;
3630 #endif
3631 
3632 
PatchAt(Code * unoptimized_code,Address pc,BackEdgeState target_state,Code * replacement_code)3633 void BackEdgeTable::PatchAt(Code* unoptimized_code,
3634                             Address pc,
3635                             BackEdgeState target_state,
3636                             Code* replacement_code) {
3637   Address call_target_address = pc - kIntSize;
3638   Address jns_instr_address = call_target_address - 3;
3639   Address jns_offset_address = call_target_address - 2;
3640 
3641   switch (target_state) {
3642     case INTERRUPT:
3643       //     sub <profiling_counter>, <delta>  ;; Not changed
3644       //     jns ok
3645       //     call <interrupt stub>
3646       //   ok:
3647       *jns_instr_address = kJnsInstruction;
3648       *jns_offset_address = kJnsOffset;
3649       break;
3650     case ON_STACK_REPLACEMENT:
3651       //     sub <profiling_counter>, <delta>  ;; Not changed
3652       //     nop
3653       //     nop
3654       //     call <on-stack replacment>
3655       //   ok:
3656       *jns_instr_address = kNopByteOne;
3657       *jns_offset_address = kNopByteTwo;
3658       break;
3659   }
3660 
3661   Assembler::set_target_address_at(unoptimized_code->GetIsolate(),
3662                                    call_target_address, unoptimized_code,
3663                                    replacement_code->entry());
3664   unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
3665       unoptimized_code, call_target_address, replacement_code);
3666 }
3667 
3668 
GetBackEdgeState(Isolate * isolate,Code * unoptimized_code,Address pc)3669 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
3670     Isolate* isolate,
3671     Code* unoptimized_code,
3672     Address pc) {
3673   Address call_target_address = pc - kIntSize;
3674   Address jns_instr_address = call_target_address - 3;
3675   DCHECK_EQ(kCallInstruction, *(call_target_address - 1));
3676 
3677   if (*jns_instr_address == kJnsInstruction) {
3678     DCHECK_EQ(kJnsOffset, *(call_target_address - 2));
3679     DCHECK_EQ(isolate->builtins()->InterruptCheck()->entry(),
3680               Assembler::target_address_at(call_target_address,
3681                                            unoptimized_code));
3682     return INTERRUPT;
3683   }
3684 
3685   DCHECK_EQ(kNopByteOne, *jns_instr_address);
3686   DCHECK_EQ(kNopByteTwo, *(call_target_address - 2));
3687 
3688   DCHECK_EQ(
3689       isolate->builtins()->OnStackReplacement()->entry(),
3690       Assembler::target_address_at(call_target_address, unoptimized_code));
3691   return ON_STACK_REPLACEMENT;
3692 }
3693 
3694 }  // namespace internal
3695 }  // namespace v8
3696 
3697 #endif  // V8_TARGET_ARCH_X64
3698