• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 //     * Redistributions of source code must retain the above copyright
7 //       notice, this list of conditions and the following disclaimer.
8 //     * Redistributions in binary form must reproduce the above
9 //       copyright notice, this list of conditions and the following
10 //       disclaimer in the documentation and/or other materials provided
11 //       with the distribution.
12 //     * Neither the name of Google Inc. nor the names of its
13 //       contributors may be used to endorse or promote products derived
14 //       from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #if defined(V8_TARGET_ARCH_X64)
31 
32 #include "code-stubs.h"
33 #include "codegen.h"
34 #include "compiler.h"
35 #include "debug.h"
36 #include "full-codegen.h"
37 #include "parser.h"
38 #include "scopes.h"
39 #include "stub-cache.h"
40 
41 namespace v8 {
42 namespace internal {
43 
44 #define __ ACCESS_MASM(masm_)
45 
46 
47 class JumpPatchSite BASE_EMBEDDED {
48  public:
JumpPatchSite(MacroAssembler * masm)49   explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
50 #ifdef DEBUG
51     info_emitted_ = false;
52 #endif
53   }
54 
~JumpPatchSite()55   ~JumpPatchSite() {
56     ASSERT(patch_site_.is_bound() == info_emitted_);
57   }
58 
EmitJumpIfNotSmi(Register reg,Label * target,Label::Distance near_jump=Label::kFar)59   void EmitJumpIfNotSmi(Register reg,
60                         Label* target,
61                         Label::Distance near_jump = Label::kFar) {
62     __ testb(reg, Immediate(kSmiTagMask));
63     EmitJump(not_carry, target, near_jump);   // Always taken before patched.
64   }
65 
EmitJumpIfSmi(Register reg,Label * target,Label::Distance near_jump=Label::kFar)66   void EmitJumpIfSmi(Register reg,
67                      Label* target,
68                      Label::Distance near_jump = Label::kFar) {
69     __ testb(reg, Immediate(kSmiTagMask));
70     EmitJump(carry, target, near_jump);  // Never taken before patched.
71   }
72 
EmitPatchInfo()73   void EmitPatchInfo() {
74     if (patch_site_.is_bound()) {
75       int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
76       ASSERT(is_int8(delta_to_patch_site));
77       __ testl(rax, Immediate(delta_to_patch_site));
78 #ifdef DEBUG
79       info_emitted_ = true;
80 #endif
81     } else {
82       __ nop();  // Signals no inlined code.
83     }
84   }
85 
86  private:
87   // jc will be patched with jz, jnc will become jnz.
EmitJump(Condition cc,Label * target,Label::Distance near_jump)88   void EmitJump(Condition cc, Label* target, Label::Distance near_jump) {
89     ASSERT(!patch_site_.is_bound() && !info_emitted_);
90     ASSERT(cc == carry || cc == not_carry);
91     __ bind(&patch_site_);
92     __ j(cc, target, near_jump);
93   }
94 
95   MacroAssembler* masm_;
96   Label patch_site_;
97 #ifdef DEBUG
98   bool info_emitted_;
99 #endif
100 };
101 
102 
self_optimization_header_size()103 int FullCodeGenerator::self_optimization_header_size() {
104   return 20;
105 }
106 
107 
108 // Generate code for a JS function.  On entry to the function the receiver
109 // and arguments have been pushed on the stack left to right, with the
110 // return address on top of them.  The actual argument count matches the
111 // formal parameter count expected by the function.
112 //
113 // The live registers are:
114 //   o rdi: the JS function object being called (i.e. ourselves)
115 //   o rsi: our context
116 //   o rbp: our caller's frame pointer
117 //   o rsp: stack pointer (pointing to return address)
118 //
119 // The function builds a JS frame.  Please see JavaScriptFrameConstants in
120 // frames-x64.h for its layout.
Generate()121 void FullCodeGenerator::Generate() {
122   CompilationInfo* info = info_;
123   handler_table_ =
124       isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
125   SetFunctionPosition(function());
126   Comment cmnt(masm_, "[ function compiled by full code generator");
127 
128   // We can optionally optimize based on counters rather than statistical
129   // sampling.
130   if (info->ShouldSelfOptimize()) {
131     if (FLAG_trace_opt_verbose) {
132       PrintF("[adding self-optimization header to %s]\n",
133              *info->function()->debug_name()->ToCString());
134     }
135     has_self_optimization_header_ = true;
136     MaybeObject* maybe_cell = isolate()->heap()->AllocateJSGlobalPropertyCell(
137         Smi::FromInt(Compiler::kCallsUntilPrimitiveOpt));
138     JSGlobalPropertyCell* cell;
139     if (maybe_cell->To(&cell)) {
140       __ movq(rax, Handle<JSGlobalPropertyCell>(cell),
141               RelocInfo::EMBEDDED_OBJECT);
142       __ SmiAddConstant(FieldOperand(rax, JSGlobalPropertyCell::kValueOffset),
143                         Smi::FromInt(-1));
144       Handle<Code> compile_stub(
145           isolate()->builtins()->builtin(Builtins::kLazyRecompile));
146       __ j(zero, compile_stub, RelocInfo::CODE_TARGET);
147       ASSERT(masm_->pc_offset() == self_optimization_header_size());
148     }
149   }
150 
151 #ifdef DEBUG
152   if (strlen(FLAG_stop_at) > 0 &&
153       info->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
154     __ int3();
155   }
156 #endif
157 
158   // Strict mode functions and builtins need to replace the receiver
159   // with undefined when called as functions (without an explicit
160   // receiver object). rcx is zero for method calls and non-zero for
161   // function calls.
162   if (!info->is_classic_mode() || info->is_native()) {
163     Label ok;
164     __ testq(rcx, rcx);
165     __ j(zero, &ok, Label::kNear);
166     // +1 for return address.
167     int receiver_offset = (info->scope()->num_parameters() + 1) * kPointerSize;
168     __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
169     __ movq(Operand(rsp, receiver_offset), kScratchRegister);
170     __ bind(&ok);
171   }
172 
173   // Open a frame scope to indicate that there is a frame on the stack.  The
174   // MANUAL indicates that the scope shouldn't actually generate code to set up
175   // the frame (that is done below).
176   FrameScope frame_scope(masm_, StackFrame::MANUAL);
177 
178   __ push(rbp);  // Caller's frame pointer.
179   __ movq(rbp, rsp);
180   __ push(rsi);  // Callee's context.
181   __ push(rdi);  // Callee's JS Function.
182 
183   { Comment cmnt(masm_, "[ Allocate locals");
184     int locals_count = info->scope()->num_stack_slots();
185     if (locals_count == 1) {
186       __ PushRoot(Heap::kUndefinedValueRootIndex);
187     } else if (locals_count > 1) {
188       __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
189       for (int i = 0; i < locals_count; i++) {
190         __ push(rdx);
191       }
192     }
193   }
194 
195   bool function_in_register = true;
196 
197   // Possibly allocate a local context.
198   int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
199   if (heap_slots > 0) {
200     Comment cmnt(masm_, "[ Allocate local context");
201     // Argument to NewContext is the function, which is still in rdi.
202     __ push(rdi);
203     if (heap_slots <= FastNewContextStub::kMaximumSlots) {
204       FastNewContextStub stub(heap_slots);
205       __ CallStub(&stub);
206     } else {
207       __ CallRuntime(Runtime::kNewFunctionContext, 1);
208     }
209     function_in_register = false;
210     // Context is returned in both rax and rsi.  It replaces the context
211     // passed to us.  It's saved in the stack and kept live in rsi.
212     __ movq(Operand(rbp, StandardFrameConstants::kContextOffset), rsi);
213 
214     // Copy any necessary parameters into the context.
215     int num_parameters = info->scope()->num_parameters();
216     for (int i = 0; i < num_parameters; i++) {
217       Variable* var = scope()->parameter(i);
218       if (var->IsContextSlot()) {
219         int parameter_offset = StandardFrameConstants::kCallerSPOffset +
220             (num_parameters - 1 - i) * kPointerSize;
221         // Load parameter from stack.
222         __ movq(rax, Operand(rbp, parameter_offset));
223         // Store it in the context.
224         int context_offset = Context::SlotOffset(var->index());
225         __ movq(Operand(rsi, context_offset), rax);
226         // Update the write barrier.  This clobbers rax and rbx.
227         __ RecordWriteContextSlot(
228             rsi, context_offset, rax, rbx, kDontSaveFPRegs);
229       }
230     }
231   }
232 
233   // Possibly allocate an arguments object.
234   Variable* arguments = scope()->arguments();
235   if (arguments != NULL) {
236     // Arguments object must be allocated after the context object, in
237     // case the "arguments" or ".arguments" variables are in the context.
238     Comment cmnt(masm_, "[ Allocate arguments object");
239     if (function_in_register) {
240       __ push(rdi);
241     } else {
242       __ push(Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
243     }
244     // The receiver is just before the parameters on the caller's stack.
245     int num_parameters = info->scope()->num_parameters();
246     int offset = num_parameters * kPointerSize;
247     __ lea(rdx,
248            Operand(rbp, StandardFrameConstants::kCallerSPOffset + offset));
249     __ push(rdx);
250     __ Push(Smi::FromInt(num_parameters));
251     // Arguments to ArgumentsAccessStub:
252     //   function, receiver address, parameter count.
253     // The stub will rewrite receiver and parameter count if the previous
254     // stack frame was an arguments adapter frame.
255     ArgumentsAccessStub::Type type;
256     if (!is_classic_mode()) {
257       type = ArgumentsAccessStub::NEW_STRICT;
258     } else if (function()->has_duplicate_parameters()) {
259       type = ArgumentsAccessStub::NEW_NON_STRICT_SLOW;
260     } else {
261       type = ArgumentsAccessStub::NEW_NON_STRICT_FAST;
262     }
263     ArgumentsAccessStub stub(type);
264     __ CallStub(&stub);
265 
266     SetVar(arguments, rax, rbx, rdx);
267   }
268 
269   if (FLAG_trace) {
270     __ CallRuntime(Runtime::kTraceEnter, 0);
271   }
272 
273   // Visit the declarations and body unless there is an illegal
274   // redeclaration.
275   if (scope()->HasIllegalRedeclaration()) {
276     Comment cmnt(masm_, "[ Declarations");
277     scope()->VisitIllegalRedeclaration(this);
278 
279   } else {
280     PrepareForBailoutForId(AstNode::kFunctionEntryId, NO_REGISTERS);
281     { Comment cmnt(masm_, "[ Declarations");
282       // For named function expressions, declare the function name as a
283       // constant.
284       if (scope()->is_function_scope() && scope()->function() != NULL) {
285         VariableProxy* proxy = scope()->function();
286         ASSERT(proxy->var()->mode() == CONST ||
287                proxy->var()->mode() == CONST_HARMONY);
288         ASSERT(proxy->var()->location() != Variable::UNALLOCATED);
289         EmitDeclaration(proxy, proxy->var()->mode(), NULL);
290       }
291       VisitDeclarations(scope()->declarations());
292     }
293 
294     { Comment cmnt(masm_, "[ Stack check");
295       PrepareForBailoutForId(AstNode::kDeclarationsId, NO_REGISTERS);
296       Label ok;
297       __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
298       __ j(above_equal, &ok, Label::kNear);
299       StackCheckStub stub;
300       __ CallStub(&stub);
301       __ bind(&ok);
302     }
303 
304     { Comment cmnt(masm_, "[ Body");
305       ASSERT(loop_depth() == 0);
306       VisitStatements(function()->body());
307       ASSERT(loop_depth() == 0);
308     }
309   }
310 
311   // Always emit a 'return undefined' in case control fell off the end of
312   // the body.
313   { Comment cmnt(masm_, "[ return <undefined>;");
314     __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
315     EmitReturnSequence();
316   }
317 }
318 
319 
ClearAccumulator()320 void FullCodeGenerator::ClearAccumulator() {
321   __ Set(rax, 0);
322 }
323 
324 
EmitStackCheck(IterationStatement * stmt,Label * back_edge_target)325 void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt,
326                                        Label* back_edge_target) {
327   Comment cmnt(masm_, "[ Stack check");
328   Label ok;
329   __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
330   __ j(above_equal, &ok, Label::kNear);
331   StackCheckStub stub;
332   __ CallStub(&stub);
333   // Record a mapping of this PC offset to the OSR id.  This is used to find
334   // the AST id from the unoptimized code in order to use it as a key into
335   // the deoptimization input data found in the optimized code.
336   RecordStackCheck(stmt->OsrEntryId());
337 
338   // Loop stack checks can be patched to perform on-stack replacement. In
339   // order to decide whether or not to perform OSR we embed the loop depth
340   // in a test instruction after the call so we can extract it from the OSR
341   // builtin.
342   ASSERT(loop_depth() > 0);
343   __ testl(rax, Immediate(Min(loop_depth(), Code::kMaxLoopNestingMarker)));
344 
345   __ bind(&ok);
346   PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
347   // Record a mapping of the OSR id to this PC.  This is used if the OSR
348   // entry becomes the target of a bailout.  We don't expect it to be, but
349   // we want it to work if it is.
350   PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
351 }
352 
353 
EmitReturnSequence()354 void FullCodeGenerator::EmitReturnSequence() {
355   Comment cmnt(masm_, "[ Return sequence");
356   if (return_label_.is_bound()) {
357     __ jmp(&return_label_);
358   } else {
359     __ bind(&return_label_);
360     if (FLAG_trace) {
361       __ push(rax);
362       __ CallRuntime(Runtime::kTraceExit, 1);
363     }
364 #ifdef DEBUG
365     // Add a label for checking the size of the code used for returning.
366     Label check_exit_codesize;
367     masm_->bind(&check_exit_codesize);
368 #endif
369     CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
370     __ RecordJSReturn();
371     // Do not use the leave instruction here because it is too short to
372     // patch with the code required by the debugger.
373     __ movq(rsp, rbp);
374     __ pop(rbp);
375 
376     int arguments_bytes = (info_->scope()->num_parameters() + 1) * kPointerSize;
377     __ Ret(arguments_bytes, rcx);
378 
379 #ifdef ENABLE_DEBUGGER_SUPPORT
380     // Add padding that will be overwritten by a debugger breakpoint.  We
381     // have just generated at least 7 bytes: "movq rsp, rbp; pop rbp; ret k"
382     // (3 + 1 + 3).
383     const int kPadding = Assembler::kJSReturnSequenceLength - 7;
384     for (int i = 0; i < kPadding; ++i) {
385       masm_->int3();
386     }
387     // Check that the size of the code used for returning is large enough
388     // for the debugger's requirements.
389     ASSERT(Assembler::kJSReturnSequenceLength <=
390            masm_->SizeOfCodeGeneratedSince(&check_exit_codesize));
391 #endif
392   }
393 }
394 
395 
Plug(Variable * var) const396 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
397   ASSERT(var->IsStackAllocated() || var->IsContextSlot());
398 }
399 
400 
Plug(Variable * var) const401 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
402   ASSERT(var->IsStackAllocated() || var->IsContextSlot());
403   codegen()->GetVar(result_register(), var);
404 }
405 
406 
Plug(Variable * var) const407 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
408   ASSERT(var->IsStackAllocated() || var->IsContextSlot());
409   MemOperand operand = codegen()->VarOperand(var, result_register());
410   __ push(operand);
411 }
412 
413 
Plug(Variable * var) const414 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
415   codegen()->GetVar(result_register(), var);
416   codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
417   codegen()->DoTest(this);
418 }
419 
420 
Plug(Heap::RootListIndex index) const421 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
422 }
423 
424 
Plug(Heap::RootListIndex index) const425 void FullCodeGenerator::AccumulatorValueContext::Plug(
426     Heap::RootListIndex index) const {
427   __ LoadRoot(result_register(), index);
428 }
429 
430 
Plug(Heap::RootListIndex index) const431 void FullCodeGenerator::StackValueContext::Plug(
432     Heap::RootListIndex index) const {
433   __ PushRoot(index);
434 }
435 
436 
Plug(Heap::RootListIndex index) const437 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
438   codegen()->PrepareForBailoutBeforeSplit(condition(),
439                                           true,
440                                           true_label_,
441                                           false_label_);
442   if (index == Heap::kUndefinedValueRootIndex ||
443       index == Heap::kNullValueRootIndex ||
444       index == Heap::kFalseValueRootIndex) {
445     if (false_label_ != fall_through_) __ jmp(false_label_);
446   } else if (index == Heap::kTrueValueRootIndex) {
447     if (true_label_ != fall_through_) __ jmp(true_label_);
448   } else {
449     __ LoadRoot(result_register(), index);
450     codegen()->DoTest(this);
451   }
452 }
453 
454 
Plug(Handle<Object> lit) const455 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
456 }
457 
458 
Plug(Handle<Object> lit) const459 void FullCodeGenerator::AccumulatorValueContext::Plug(
460     Handle<Object> lit) const {
461   __ Move(result_register(), lit);
462 }
463 
464 
Plug(Handle<Object> lit) const465 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
466   __ Push(lit);
467 }
468 
469 
Plug(Handle<Object> lit) const470 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
471   codegen()->PrepareForBailoutBeforeSplit(condition(),
472                                           true,
473                                           true_label_,
474                                           false_label_);
475   ASSERT(!lit->IsUndetectableObject());  // There are no undetectable literals.
476   if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
477     if (false_label_ != fall_through_) __ jmp(false_label_);
478   } else if (lit->IsTrue() || lit->IsJSObject()) {
479     if (true_label_ != fall_through_) __ jmp(true_label_);
480   } else if (lit->IsString()) {
481     if (String::cast(*lit)->length() == 0) {
482       if (false_label_ != fall_through_) __ jmp(false_label_);
483     } else {
484       if (true_label_ != fall_through_) __ jmp(true_label_);
485     }
486   } else if (lit->IsSmi()) {
487     if (Smi::cast(*lit)->value() == 0) {
488       if (false_label_ != fall_through_) __ jmp(false_label_);
489     } else {
490       if (true_label_ != fall_through_) __ jmp(true_label_);
491     }
492   } else {
493     // For simplicity we always test the accumulator register.
494     __ Move(result_register(), lit);
495     codegen()->DoTest(this);
496   }
497 }
498 
499 
DropAndPlug(int count,Register reg) const500 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
501                                                    Register reg) const {
502   ASSERT(count > 0);
503   __ Drop(count);
504 }
505 
506 
DropAndPlug(int count,Register reg) const507 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
508     int count,
509     Register reg) const {
510   ASSERT(count > 0);
511   __ Drop(count);
512   __ Move(result_register(), reg);
513 }
514 
515 
DropAndPlug(int count,Register reg) const516 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
517                                                        Register reg) const {
518   ASSERT(count > 0);
519   if (count > 1) __ Drop(count - 1);
520   __ movq(Operand(rsp, 0), reg);
521 }
522 
523 
DropAndPlug(int count,Register reg) const524 void FullCodeGenerator::TestContext::DropAndPlug(int count,
525                                                  Register reg) const {
526   ASSERT(count > 0);
527   // For simplicity we always test the accumulator register.
528   __ Drop(count);
529   __ Move(result_register(), reg);
530   codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
531   codegen()->DoTest(this);
532 }
533 
534 
Plug(Label * materialize_true,Label * materialize_false) const535 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
536                                             Label* materialize_false) const {
537   ASSERT(materialize_true == materialize_false);
538   __ bind(materialize_true);
539 }
540 
541 
Plug(Label * materialize_true,Label * materialize_false) const542 void FullCodeGenerator::AccumulatorValueContext::Plug(
543     Label* materialize_true,
544     Label* materialize_false) const {
545   Label done;
546   __ bind(materialize_true);
547   __ Move(result_register(), isolate()->factory()->true_value());
548   __ jmp(&done, Label::kNear);
549   __ bind(materialize_false);
550   __ Move(result_register(), isolate()->factory()->false_value());
551   __ bind(&done);
552 }
553 
554 
Plug(Label * materialize_true,Label * materialize_false) const555 void FullCodeGenerator::StackValueContext::Plug(
556     Label* materialize_true,
557     Label* materialize_false) const {
558   Label done;
559   __ bind(materialize_true);
560   __ Push(isolate()->factory()->true_value());
561   __ jmp(&done, Label::kNear);
562   __ bind(materialize_false);
563   __ Push(isolate()->factory()->false_value());
564   __ bind(&done);
565 }
566 
567 
Plug(Label * materialize_true,Label * materialize_false) const568 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
569                                           Label* materialize_false) const {
570   ASSERT(materialize_true == true_label_);
571   ASSERT(materialize_false == false_label_);
572 }
573 
574 
Plug(bool flag) const575 void FullCodeGenerator::EffectContext::Plug(bool flag) const {
576 }
577 
578 
Plug(bool flag) const579 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
580   Heap::RootListIndex value_root_index =
581       flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
582   __ LoadRoot(result_register(), value_root_index);
583 }
584 
585 
Plug(bool flag) const586 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
587   Heap::RootListIndex value_root_index =
588       flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
589   __ PushRoot(value_root_index);
590 }
591 
592 
Plug(bool flag) const593 void FullCodeGenerator::TestContext::Plug(bool flag) const {
594   codegen()->PrepareForBailoutBeforeSplit(condition(),
595                                           true,
596                                           true_label_,
597                                           false_label_);
598   if (flag) {
599     if (true_label_ != fall_through_) __ jmp(true_label_);
600   } else {
601     if (false_label_ != fall_through_) __ jmp(false_label_);
602   }
603 }
604 
605 
DoTest(Expression * condition,Label * if_true,Label * if_false,Label * fall_through)606 void FullCodeGenerator::DoTest(Expression* condition,
607                                Label* if_true,
608                                Label* if_false,
609                                Label* fall_through) {
610   ToBooleanStub stub(result_register());
611   __ push(result_register());
612   __ CallStub(&stub);
613   __ testq(result_register(), result_register());
614   // The stub returns nonzero for true.
615   Split(not_zero, if_true, if_false, fall_through);
616 }
617 
618 
Split(Condition cc,Label * if_true,Label * if_false,Label * fall_through)619 void FullCodeGenerator::Split(Condition cc,
620                               Label* if_true,
621                               Label* if_false,
622                               Label* fall_through) {
623   if (if_false == fall_through) {
624     __ j(cc, if_true);
625   } else if (if_true == fall_through) {
626     __ j(NegateCondition(cc), if_false);
627   } else {
628     __ j(cc, if_true);
629     __ jmp(if_false);
630   }
631 }
632 
633 
StackOperand(Variable * var)634 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
635   ASSERT(var->IsStackAllocated());
636   // Offset is negative because higher indexes are at lower addresses.
637   int offset = -var->index() * kPointerSize;
638   // Adjust by a (parameter or local) base offset.
639   if (var->IsParameter()) {
640     offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
641   } else {
642     offset += JavaScriptFrameConstants::kLocal0Offset;
643   }
644   return Operand(rbp, offset);
645 }
646 
647 
VarOperand(Variable * var,Register scratch)648 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
649   ASSERT(var->IsContextSlot() || var->IsStackAllocated());
650   if (var->IsContextSlot()) {
651     int context_chain_length = scope()->ContextChainLength(var->scope());
652     __ LoadContext(scratch, context_chain_length);
653     return ContextOperand(scratch, var->index());
654   } else {
655     return StackOperand(var);
656   }
657 }
658 
659 
GetVar(Register dest,Variable * var)660 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
661   ASSERT(var->IsContextSlot() || var->IsStackAllocated());
662   MemOperand location = VarOperand(var, dest);
663   __ movq(dest, location);
664 }
665 
666 
SetVar(Variable * var,Register src,Register scratch0,Register scratch1)667 void FullCodeGenerator::SetVar(Variable* var,
668                                Register src,
669                                Register scratch0,
670                                Register scratch1) {
671   ASSERT(var->IsContextSlot() || var->IsStackAllocated());
672   ASSERT(!scratch0.is(src));
673   ASSERT(!scratch0.is(scratch1));
674   ASSERT(!scratch1.is(src));
675   MemOperand location = VarOperand(var, scratch0);
676   __ movq(location, src);
677 
678   // Emit the write barrier code if the location is in the heap.
679   if (var->IsContextSlot()) {
680     int offset = Context::SlotOffset(var->index());
681     __ RecordWriteContextSlot(scratch0, offset, src, scratch1, kDontSaveFPRegs);
682   }
683 }
684 
685 
PrepareForBailoutBeforeSplit(Expression * expr,bool should_normalize,Label * if_true,Label * if_false)686 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
687                                                      bool should_normalize,
688                                                      Label* if_true,
689                                                      Label* if_false) {
690   // Only prepare for bailouts before splits if we're in a test
691   // context. Otherwise, we let the Visit function deal with the
692   // preparation to avoid preparing with the same AST id twice.
693   if (!context()->IsTest() || !info_->IsOptimizable()) return;
694 
695   Label skip;
696   if (should_normalize) __ jmp(&skip, Label::kNear);
697   PrepareForBailout(expr, TOS_REG);
698   if (should_normalize) {
699     __ CompareRoot(rax, Heap::kTrueValueRootIndex);
700     Split(equal, if_true, if_false, NULL);
701     __ bind(&skip);
702   }
703 }
704 
705 
EmitDeclaration(VariableProxy * proxy,VariableMode mode,FunctionLiteral * function)706 void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy,
707                                         VariableMode mode,
708                                         FunctionLiteral* function) {
709   // If it was not possible to allocate the variable at compile time, we
710   // need to "declare" it at runtime to make sure it actually exists in the
711   // local context.
712   Variable* variable = proxy->var();
713   bool binding_needs_init = (function == NULL) &&
714       (mode == CONST || mode == CONST_HARMONY || mode == LET);
715   switch (variable->location()) {
716     case Variable::UNALLOCATED:
717       ++global_count_;
718       break;
719 
720     case Variable::PARAMETER:
721     case Variable::LOCAL:
722       if (function != NULL) {
723         Comment cmnt(masm_, "[ Declaration");
724         VisitForAccumulatorValue(function);
725         __ movq(StackOperand(variable), result_register());
726       } else if (binding_needs_init) {
727         Comment cmnt(masm_, "[ Declaration");
728         __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
729         __ movq(StackOperand(variable), kScratchRegister);
730       }
731       break;
732 
733     case Variable::CONTEXT:
734       // The variable in the decl always resides in the current function
735       // context.
736       ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
737       if (FLAG_debug_code) {
738         // Check that we're not inside a with or catch context.
739         __ movq(rbx, FieldOperand(rsi, HeapObject::kMapOffset));
740         __ CompareRoot(rbx, Heap::kWithContextMapRootIndex);
741         __ Check(not_equal, "Declaration in with context.");
742         __ CompareRoot(rbx, Heap::kCatchContextMapRootIndex);
743         __ Check(not_equal, "Declaration in catch context.");
744       }
745       if (function != NULL) {
746         Comment cmnt(masm_, "[ Declaration");
747         VisitForAccumulatorValue(function);
748         __ movq(ContextOperand(rsi, variable->index()), result_register());
749         int offset = Context::SlotOffset(variable->index());
750         // We know that we have written a function, which is not a smi.
751         __ RecordWriteContextSlot(rsi,
752                                   offset,
753                                   result_register(),
754                                   rcx,
755                                   kDontSaveFPRegs,
756                                   EMIT_REMEMBERED_SET,
757                                   OMIT_SMI_CHECK);
758         PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
759       } else if (binding_needs_init) {
760         Comment cmnt(masm_, "[ Declaration");
761         __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
762         __ movq(ContextOperand(rsi, variable->index()), kScratchRegister);
763         // No write barrier since the hole value is in old space.
764         PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
765       }
766       break;
767 
768     case Variable::LOOKUP: {
769       Comment cmnt(masm_, "[ Declaration");
770       __ push(rsi);
771       __ Push(variable->name());
772       // Declaration nodes are always introduced in one of four modes.
773       ASSERT(mode == VAR ||
774              mode == CONST ||
775              mode == CONST_HARMONY ||
776              mode == LET);
777       PropertyAttributes attr =
778           (mode == CONST || mode == CONST_HARMONY) ? READ_ONLY : NONE;
779       __ Push(Smi::FromInt(attr));
780       // Push initial value, if any.
781       // Note: For variables we must not push an initial value (such as
782       // 'undefined') because we may have a (legal) redeclaration and we
783       // must not destroy the current value.
784       if (function != NULL) {
785         VisitForStackValue(function);
786       } else if (binding_needs_init) {
787         __ PushRoot(Heap::kTheHoleValueRootIndex);
788       } else {
789         __ Push(Smi::FromInt(0));  // Indicates no initial value.
790       }
791       __ CallRuntime(Runtime::kDeclareContextSlot, 4);
792       break;
793     }
794   }
795 }
796 
797 
DeclareGlobals(Handle<FixedArray> pairs)798 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
799   // Call the runtime to declare the globals.
800   __ push(rsi);  // The context is the first argument.
801   __ Push(pairs);
802   __ Push(Smi::FromInt(DeclareGlobalsFlags()));
803   __ CallRuntime(Runtime::kDeclareGlobals, 3);
804   // Return value is ignored.
805 }
806 
807 
VisitSwitchStatement(SwitchStatement * stmt)808 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
809   Comment cmnt(masm_, "[ SwitchStatement");
810   Breakable nested_statement(this, stmt);
811   SetStatementPosition(stmt);
812 
813   // Keep the switch value on the stack until a case matches.
814   VisitForStackValue(stmt->tag());
815   PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
816 
817   ZoneList<CaseClause*>* clauses = stmt->cases();
818   CaseClause* default_clause = NULL;  // Can occur anywhere in the list.
819 
820   Label next_test;  // Recycled for each test.
821   // Compile all the tests with branches to their bodies.
822   for (int i = 0; i < clauses->length(); i++) {
823     CaseClause* clause = clauses->at(i);
824     clause->body_target()->Unuse();
825 
826     // The default is not a test, but remember it as final fall through.
827     if (clause->is_default()) {
828       default_clause = clause;
829       continue;
830     }
831 
832     Comment cmnt(masm_, "[ Case comparison");
833     __ bind(&next_test);
834     next_test.Unuse();
835 
836     // Compile the label expression.
837     VisitForAccumulatorValue(clause->label());
838 
839     // Perform the comparison as if via '==='.
840     __ movq(rdx, Operand(rsp, 0));  // Switch value.
841     bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
842     JumpPatchSite patch_site(masm_);
843     if (inline_smi_code) {
844       Label slow_case;
845       __ movq(rcx, rdx);
846       __ or_(rcx, rax);
847       patch_site.EmitJumpIfNotSmi(rcx, &slow_case, Label::kNear);
848 
849       __ cmpq(rdx, rax);
850       __ j(not_equal, &next_test);
851       __ Drop(1);  // Switch value is no longer needed.
852       __ jmp(clause->body_target());
853       __ bind(&slow_case);
854     }
855 
856     // Record position before stub call for type feedback.
857     SetSourcePosition(clause->position());
858     Handle<Code> ic = CompareIC::GetUninitialized(Token::EQ_STRICT);
859     __ call(ic, RelocInfo::CODE_TARGET, clause->CompareId());
860     patch_site.EmitPatchInfo();
861 
862     __ testq(rax, rax);
863     __ j(not_equal, &next_test);
864     __ Drop(1);  // Switch value is no longer needed.
865     __ jmp(clause->body_target());
866   }
867 
868   // Discard the test value and jump to the default if present, otherwise to
869   // the end of the statement.
870   __ bind(&next_test);
871   __ Drop(1);  // Switch value is no longer needed.
872   if (default_clause == NULL) {
873     __ jmp(nested_statement.break_label());
874   } else {
875     __ jmp(default_clause->body_target());
876   }
877 
878   // Compile all the case bodies.
879   for (int i = 0; i < clauses->length(); i++) {
880     Comment cmnt(masm_, "[ Case body");
881     CaseClause* clause = clauses->at(i);
882     __ bind(clause->body_target());
883     PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
884     VisitStatements(clause->statements());
885   }
886 
887   __ bind(nested_statement.break_label());
888   PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
889 }
890 
891 
VisitForInStatement(ForInStatement * stmt)892 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
893   Comment cmnt(masm_, "[ ForInStatement");
894   SetStatementPosition(stmt);
895 
896   Label loop, exit;
897   ForIn loop_statement(this, stmt);
898   increment_loop_depth();
899 
900   // Get the object to enumerate over. Both SpiderMonkey and JSC
901   // ignore null and undefined in contrast to the specification; see
902   // ECMA-262 section 12.6.4.
903   VisitForAccumulatorValue(stmt->enumerable());
904   __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
905   __ j(equal, &exit);
906   Register null_value = rdi;
907   __ LoadRoot(null_value, Heap::kNullValueRootIndex);
908   __ cmpq(rax, null_value);
909   __ j(equal, &exit);
910 
911   PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
912 
913   // Convert the object to a JS object.
914   Label convert, done_convert;
915   __ JumpIfSmi(rax, &convert);
916   __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx);
917   __ j(above_equal, &done_convert);
918   __ bind(&convert);
919   __ push(rax);
920   __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
921   __ bind(&done_convert);
922   __ push(rax);
923 
924   // Check for proxies.
925   Label call_runtime;
926   STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
927   __ CmpObjectType(rax, LAST_JS_PROXY_TYPE, rcx);
928   __ j(below_equal, &call_runtime);
929 
930   // Check cache validity in generated code. This is a fast case for
931   // the JSObject::IsSimpleEnum cache validity checks. If we cannot
932   // guarantee cache validity, call the runtime system to check cache
933   // validity or get the property names in a fixed array.
934   __ CheckEnumCache(null_value, &call_runtime);
935 
936   // The enum cache is valid.  Load the map of the object being
937   // iterated over and use the cache for the iteration.
938   Label use_cache;
939   __ movq(rax, FieldOperand(rax, HeapObject::kMapOffset));
940   __ jmp(&use_cache, Label::kNear);
941 
942   // Get the set of properties to enumerate.
943   __ bind(&call_runtime);
944   __ push(rax);  // Duplicate the enumerable object on the stack.
945   __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
946 
947   // If we got a map from the runtime call, we can do a fast
948   // modification check. Otherwise, we got a fixed array, and we have
949   // to do a slow check.
950   Label fixed_array;
951   __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
952                  Heap::kMetaMapRootIndex);
953   __ j(not_equal, &fixed_array, Label::kNear);
954 
955   // We got a map in register rax. Get the enumeration cache from it.
956   __ bind(&use_cache);
957   __ LoadInstanceDescriptors(rax, rcx);
958   __ movq(rcx, FieldOperand(rcx, DescriptorArray::kEnumerationIndexOffset));
959   __ movq(rdx, FieldOperand(rcx, DescriptorArray::kEnumCacheBridgeCacheOffset));
960 
961   // Set up the four remaining stack slots.
962   __ push(rax);  // Map.
963   __ push(rdx);  // Enumeration cache.
964   __ movq(rax, FieldOperand(rdx, FixedArray::kLengthOffset));
965   __ push(rax);  // Enumeration cache length (as smi).
966   __ Push(Smi::FromInt(0));  // Initial index.
967   __ jmp(&loop);
968 
969   // We got a fixed array in register rax. Iterate through that.
970   Label non_proxy;
971   __ bind(&fixed_array);
972 
973   Handle<JSGlobalPropertyCell> cell =
974       isolate()->factory()->NewJSGlobalPropertyCell(
975           Handle<Object>(
976               Smi::FromInt(TypeFeedbackCells::kForInFastCaseMarker)));
977   RecordTypeFeedbackCell(stmt->PrepareId(), cell);
978   __ LoadHeapObject(rbx, cell);
979   __ Move(FieldOperand(rbx, JSGlobalPropertyCell::kValueOffset),
980           Smi::FromInt(TypeFeedbackCells::kForInSlowCaseMarker));
981 
982   __ Move(rbx, Smi::FromInt(1));  // Smi indicates slow check
983   __ movq(rcx, Operand(rsp, 0 * kPointerSize));  // Get enumerated object
984   STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
985   __ CmpObjectType(rcx, LAST_JS_PROXY_TYPE, rcx);
986   __ j(above, &non_proxy);
987   __ Move(rbx, Smi::FromInt(0));  // Zero indicates proxy
988   __ bind(&non_proxy);
989   __ push(rbx);  // Smi
990   __ push(rax);  // Array
991   __ movq(rax, FieldOperand(rax, FixedArray::kLengthOffset));
992   __ push(rax);  // Fixed array length (as smi).
993   __ Push(Smi::FromInt(0));  // Initial index.
994 
995   // Generate code for doing the condition check.
996   PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
997   __ bind(&loop);
998   __ movq(rax, Operand(rsp, 0 * kPointerSize));  // Get the current index.
999   __ cmpq(rax, Operand(rsp, 1 * kPointerSize));  // Compare to the array length.
1000   __ j(above_equal, loop_statement.break_label());
1001 
1002   // Get the current entry of the array into register rbx.
1003   __ movq(rbx, Operand(rsp, 2 * kPointerSize));
1004   SmiIndex index = masm()->SmiToIndex(rax, rax, kPointerSizeLog2);
1005   __ movq(rbx, FieldOperand(rbx,
1006                             index.reg,
1007                             index.scale,
1008                             FixedArray::kHeaderSize));
1009 
1010   // Get the expected map from the stack or a smi in the
1011   // permanent slow case into register rdx.
1012   __ movq(rdx, Operand(rsp, 3 * kPointerSize));
1013 
1014   // Check if the expected map still matches that of the enumerable.
1015   // If not, we may have to filter the key.
1016   Label update_each;
1017   __ movq(rcx, Operand(rsp, 4 * kPointerSize));
1018   __ cmpq(rdx, FieldOperand(rcx, HeapObject::kMapOffset));
1019   __ j(equal, &update_each, Label::kNear);
1020 
1021   // For proxies, no filtering is done.
1022   // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1023   __ Cmp(rdx, Smi::FromInt(0));
1024   __ j(equal, &update_each, Label::kNear);
1025 
1026   // Convert the entry to a string or null if it isn't a property
1027   // anymore. If the property has been removed while iterating, we
1028   // just skip it.
1029   __ push(rcx);  // Enumerable.
1030   __ push(rbx);  // Current entry.
1031   __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1032   __ Cmp(rax, Smi::FromInt(0));
1033   __ j(equal, loop_statement.continue_label());
1034   __ movq(rbx, rax);
1035 
1036   // Update the 'each' property or variable from the possibly filtered
1037   // entry in register rbx.
1038   __ bind(&update_each);
1039   __ movq(result_register(), rbx);
1040   // Perform the assignment as if via '='.
1041   { EffectContext context(this);
1042     EmitAssignment(stmt->each());
1043   }
1044 
1045   // Generate code for the body of the loop.
1046   Visit(stmt->body());
1047 
1048   // Generate code for going to the next element by incrementing the
1049   // index (smi) stored on top of the stack.
1050   __ bind(loop_statement.continue_label());
1051   __ SmiAddConstant(Operand(rsp, 0 * kPointerSize), Smi::FromInt(1));
1052 
1053   EmitStackCheck(stmt, &loop);
1054   __ jmp(&loop);
1055 
1056   // Remove the pointers stored on the stack.
1057   __ bind(loop_statement.break_label());
1058   __ addq(rsp, Immediate(5 * kPointerSize));
1059 
1060   // Exit and decrement the loop depth.
1061   PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1062   __ bind(&exit);
1063   decrement_loop_depth();
1064 }
1065 
1066 
EmitNewClosure(Handle<SharedFunctionInfo> info,bool pretenure)1067 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1068                                        bool pretenure) {
1069   // Use the fast case closure allocation code that allocates in new
1070   // space for nested functions that don't need literals cloning. If
1071   // we're running with the --always-opt or the --prepare-always-opt
1072   // flag, we need to use the runtime function so that the new function
1073   // we are creating here gets a chance to have its code optimized and
1074   // doesn't just get a copy of the existing unoptimized code.
1075   if (!FLAG_always_opt &&
1076       !FLAG_prepare_always_opt &&
1077       !pretenure &&
1078       scope()->is_function_scope() &&
1079       info->num_literals() == 0) {
1080     FastNewClosureStub stub(info->language_mode());
1081     __ Push(info);
1082     __ CallStub(&stub);
1083   } else {
1084     __ push(rsi);
1085     __ Push(info);
1086     __ Push(pretenure
1087             ? isolate()->factory()->true_value()
1088             : isolate()->factory()->false_value());
1089     __ CallRuntime(Runtime::kNewClosure, 3);
1090   }
1091   context()->Plug(rax);
1092 }
1093 
1094 
VisitVariableProxy(VariableProxy * expr)1095 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1096   Comment cmnt(masm_, "[ VariableProxy");
1097   EmitVariableLoad(expr);
1098 }
1099 
1100 
EmitLoadGlobalCheckExtensions(Variable * var,TypeofState typeof_state,Label * slow)1101 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
1102                                                       TypeofState typeof_state,
1103                                                       Label* slow) {
1104   Register context = rsi;
1105   Register temp = rdx;
1106 
1107   Scope* s = scope();
1108   while (s != NULL) {
1109     if (s->num_heap_slots() > 0) {
1110       if (s->calls_non_strict_eval()) {
1111         // Check that extension is NULL.
1112         __ cmpq(ContextOperand(context, Context::EXTENSION_INDEX),
1113                 Immediate(0));
1114         __ j(not_equal, slow);
1115       }
1116       // Load next context in chain.
1117       __ movq(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1118       // Walk the rest of the chain without clobbering rsi.
1119       context = temp;
1120     }
1121     // If no outer scope calls eval, we do not need to check more
1122     // context extensions.  If we have reached an eval scope, we check
1123     // all extensions from this point.
1124     if (!s->outer_scope_calls_non_strict_eval() || s->is_eval_scope()) break;
1125     s = s->outer_scope();
1126   }
1127 
1128   if (s != NULL && s->is_eval_scope()) {
1129     // Loop up the context chain.  There is no frame effect so it is
1130     // safe to use raw labels here.
1131     Label next, fast;
1132     if (!context.is(temp)) {
1133       __ movq(temp, context);
1134     }
1135     // Load map for comparison into register, outside loop.
1136     __ LoadRoot(kScratchRegister, Heap::kGlobalContextMapRootIndex);
1137     __ bind(&next);
1138     // Terminate at global context.
1139     __ cmpq(kScratchRegister, FieldOperand(temp, HeapObject::kMapOffset));
1140     __ j(equal, &fast, Label::kNear);
1141     // Check that extension is NULL.
1142     __ cmpq(ContextOperand(temp, Context::EXTENSION_INDEX), Immediate(0));
1143     __ j(not_equal, slow);
1144     // Load next context in chain.
1145     __ movq(temp, ContextOperand(temp, Context::PREVIOUS_INDEX));
1146     __ jmp(&next);
1147     __ bind(&fast);
1148   }
1149 
1150   // All extension objects were empty and it is safe to use a global
1151   // load IC call.
1152   __ movq(rax, GlobalObjectOperand());
1153   __ Move(rcx, var->name());
1154   Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1155   RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
1156       ? RelocInfo::CODE_TARGET
1157       : RelocInfo::CODE_TARGET_CONTEXT;
1158   __ call(ic, mode);
1159 }
1160 
1161 
ContextSlotOperandCheckExtensions(Variable * var,Label * slow)1162 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1163                                                                 Label* slow) {
1164   ASSERT(var->IsContextSlot());
1165   Register context = rsi;
1166   Register temp = rbx;
1167 
1168   for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1169     if (s->num_heap_slots() > 0) {
1170       if (s->calls_non_strict_eval()) {
1171         // Check that extension is NULL.
1172         __ cmpq(ContextOperand(context, Context::EXTENSION_INDEX),
1173                 Immediate(0));
1174         __ j(not_equal, slow);
1175       }
1176       __ movq(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1177       // Walk the rest of the chain without clobbering rsi.
1178       context = temp;
1179     }
1180   }
1181   // Check that last extension is NULL.
1182   __ cmpq(ContextOperand(context, Context::EXTENSION_INDEX), Immediate(0));
1183   __ j(not_equal, slow);
1184 
1185   // This function is used only for loads, not stores, so it's safe to
1186   // return an rsi-based operand (the write barrier cannot be allowed to
1187   // destroy the rsi register).
1188   return ContextOperand(context, var->index());
1189 }
1190 
1191 
EmitDynamicLookupFastCase(Variable * var,TypeofState typeof_state,Label * slow,Label * done)1192 void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var,
1193                                                   TypeofState typeof_state,
1194                                                   Label* slow,
1195                                                   Label* done) {
1196   // Generate fast-case code for variables that might be shadowed by
1197   // eval-introduced variables.  Eval is used a lot without
1198   // introducing variables.  In those cases, we do not want to
1199   // perform a runtime call for all variables in the scope
1200   // containing the eval.
1201   if (var->mode() == DYNAMIC_GLOBAL) {
1202     EmitLoadGlobalCheckExtensions(var, typeof_state, slow);
1203     __ jmp(done);
1204   } else if (var->mode() == DYNAMIC_LOCAL) {
1205     Variable* local = var->local_if_not_shadowed();
1206     __ movq(rax, ContextSlotOperandCheckExtensions(local, slow));
1207     if (local->mode() == CONST ||
1208         local->mode() == CONST_HARMONY ||
1209         local->mode() == LET) {
1210       __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
1211       __ j(not_equal, done);
1212       if (local->mode() == CONST) {
1213         __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
1214       } else {  // LET || CONST_HARMONY
1215         __ Push(var->name());
1216         __ CallRuntime(Runtime::kThrowReferenceError, 1);
1217       }
1218     }
1219     __ jmp(done);
1220   }
1221 }
1222 
1223 
EmitVariableLoad(VariableProxy * proxy)1224 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1225   // Record position before possible IC call.
1226   SetSourcePosition(proxy->position());
1227   Variable* var = proxy->var();
1228 
1229   // Three cases: global variables, lookup variables, and all other types of
1230   // variables.
1231   switch (var->location()) {
1232     case Variable::UNALLOCATED: {
1233       Comment cmnt(masm_, "Global variable");
1234       // Use inline caching. Variable name is passed in rcx and the global
1235       // object on the stack.
1236       __ Move(rcx, var->name());
1237       __ movq(rax, GlobalObjectOperand());
1238       Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1239       __ call(ic, RelocInfo::CODE_TARGET_CONTEXT);
1240       context()->Plug(rax);
1241       break;
1242     }
1243 
1244     case Variable::PARAMETER:
1245     case Variable::LOCAL:
1246     case Variable::CONTEXT: {
1247       Comment cmnt(masm_, var->IsContextSlot() ? "Context slot" : "Stack slot");
1248       if (var->binding_needs_init()) {
1249         // var->scope() may be NULL when the proxy is located in eval code and
1250         // refers to a potential outside binding. Currently those bindings are
1251         // always looked up dynamically, i.e. in that case
1252         //     var->location() == LOOKUP.
1253         // always holds.
1254         ASSERT(var->scope() != NULL);
1255 
1256         // Check if the binding really needs an initialization check. The check
1257         // can be skipped in the following situation: we have a LET or CONST
1258         // binding in harmony mode, both the Variable and the VariableProxy have
1259         // the same declaration scope (i.e. they are both in global code, in the
1260         // same function or in the same eval code) and the VariableProxy is in
1261         // the source physically located after the initializer of the variable.
1262         //
1263         // We cannot skip any initialization checks for CONST in non-harmony
1264         // mode because const variables may be declared but never initialized:
1265         //   if (false) { const x; }; var y = x;
1266         //
1267         // The condition on the declaration scopes is a conservative check for
1268         // nested functions that access a binding and are called before the
1269         // binding is initialized:
1270         //   function() { f(); let x = 1; function f() { x = 2; } }
1271         //
1272         bool skip_init_check;
1273         if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1274           skip_init_check = false;
1275         } else {
1276           // Check that we always have valid source position.
1277           ASSERT(var->initializer_position() != RelocInfo::kNoPosition);
1278           ASSERT(proxy->position() != RelocInfo::kNoPosition);
1279           skip_init_check = var->mode() != CONST &&
1280               var->initializer_position() < proxy->position();
1281         }
1282 
1283         if (!skip_init_check) {
1284           // Let and const need a read barrier.
1285           Label done;
1286           GetVar(rax, var);
1287           __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
1288           __ j(not_equal, &done, Label::kNear);
1289           if (var->mode() == LET || var->mode() == CONST_HARMONY) {
1290             // Throw a reference error when using an uninitialized let/const
1291             // binding in harmony mode.
1292             __ Push(var->name());
1293             __ CallRuntime(Runtime::kThrowReferenceError, 1);
1294           } else {
1295             // Uninitalized const bindings outside of harmony mode are unholed.
1296             ASSERT(var->mode() == CONST);
1297             __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
1298           }
1299           __ bind(&done);
1300           context()->Plug(rax);
1301           break;
1302         }
1303       }
1304       context()->Plug(var);
1305       break;
1306     }
1307 
1308     case Variable::LOOKUP: {
1309       Label done, slow;
1310       // Generate code for loading from variables potentially shadowed
1311       // by eval-introduced variables.
1312       EmitDynamicLookupFastCase(var, NOT_INSIDE_TYPEOF, &slow, &done);
1313       __ bind(&slow);
1314       Comment cmnt(masm_, "Lookup slot");
1315       __ push(rsi);  // Context.
1316       __ Push(var->name());
1317       __ CallRuntime(Runtime::kLoadContextSlot, 2);
1318       __ bind(&done);
1319       context()->Plug(rax);
1320       break;
1321     }
1322   }
1323 }
1324 
1325 
VisitRegExpLiteral(RegExpLiteral * expr)1326 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1327   Comment cmnt(masm_, "[ RegExpLiteral");
1328   Label materialized;
1329   // Registers will be used as follows:
1330   // rdi = JS function.
1331   // rcx = literals array.
1332   // rbx = regexp literal.
1333   // rax = regexp literal clone.
1334   __ movq(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1335   __ movq(rcx, FieldOperand(rdi, JSFunction::kLiteralsOffset));
1336   int literal_offset =
1337       FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1338   __ movq(rbx, FieldOperand(rcx, literal_offset));
1339   __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
1340   __ j(not_equal, &materialized, Label::kNear);
1341 
1342   // Create regexp literal using runtime function
1343   // Result will be in rax.
1344   __ push(rcx);
1345   __ Push(Smi::FromInt(expr->literal_index()));
1346   __ Push(expr->pattern());
1347   __ Push(expr->flags());
1348   __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1349   __ movq(rbx, rax);
1350 
1351   __ bind(&materialized);
1352   int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1353   Label allocated, runtime_allocate;
1354   __ AllocateInNewSpace(size, rax, rcx, rdx, &runtime_allocate, TAG_OBJECT);
1355   __ jmp(&allocated);
1356 
1357   __ bind(&runtime_allocate);
1358   __ push(rbx);
1359   __ Push(Smi::FromInt(size));
1360   __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1361   __ pop(rbx);
1362 
1363   __ bind(&allocated);
1364   // Copy the content into the newly allocated memory.
1365   // (Unroll copy loop once for better throughput).
1366   for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
1367     __ movq(rdx, FieldOperand(rbx, i));
1368     __ movq(rcx, FieldOperand(rbx, i + kPointerSize));
1369     __ movq(FieldOperand(rax, i), rdx);
1370     __ movq(FieldOperand(rax, i + kPointerSize), rcx);
1371   }
1372   if ((size % (2 * kPointerSize)) != 0) {
1373     __ movq(rdx, FieldOperand(rbx, size - kPointerSize));
1374     __ movq(FieldOperand(rax, size - kPointerSize), rdx);
1375   }
1376   context()->Plug(rax);
1377 }
1378 
1379 
EmitAccessor(Expression * expression)1380 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1381   if (expression == NULL) {
1382     __ PushRoot(Heap::kNullValueRootIndex);
1383   } else {
1384     VisitForStackValue(expression);
1385   }
1386 }
1387 
1388 
VisitObjectLiteral(ObjectLiteral * expr)1389 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1390   Comment cmnt(masm_, "[ ObjectLiteral");
1391   Handle<FixedArray> constant_properties = expr->constant_properties();
1392   __ movq(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1393   __ push(FieldOperand(rdi, JSFunction::kLiteralsOffset));
1394   __ Push(Smi::FromInt(expr->literal_index()));
1395   __ Push(constant_properties);
1396   int flags = expr->fast_elements()
1397       ? ObjectLiteral::kFastElements
1398       : ObjectLiteral::kNoFlags;
1399   flags |= expr->has_function()
1400       ? ObjectLiteral::kHasFunction
1401       : ObjectLiteral::kNoFlags;
1402   __ Push(Smi::FromInt(flags));
1403   int properties_count = constant_properties->length() / 2;
1404   if (expr->depth() > 1) {
1405     __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1406   } else if (flags != ObjectLiteral::kFastElements ||
1407       properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) {
1408     __ CallRuntime(Runtime::kCreateObjectLiteralShallow, 4);
1409   } else {
1410     FastCloneShallowObjectStub stub(properties_count);
1411     __ CallStub(&stub);
1412   }
1413 
1414   // If result_saved is true the result is on top of the stack.  If
1415   // result_saved is false the result is in rax.
1416   bool result_saved = false;
1417 
1418   // Mark all computed expressions that are bound to a key that
1419   // is shadowed by a later occurrence of the same key. For the
1420   // marked expressions, no store code is emitted.
1421   expr->CalculateEmitStore();
1422 
1423   AccessorTable accessor_table(isolate()->zone());
1424   for (int i = 0; i < expr->properties()->length(); i++) {
1425     ObjectLiteral::Property* property = expr->properties()->at(i);
1426     if (property->IsCompileTimeValue()) continue;
1427 
1428     Literal* key = property->key();
1429     Expression* value = property->value();
1430     if (!result_saved) {
1431       __ push(rax);  // Save result on the stack
1432       result_saved = true;
1433     }
1434     switch (property->kind()) {
1435       case ObjectLiteral::Property::CONSTANT:
1436         UNREACHABLE();
1437       case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1438         ASSERT(!CompileTimeValue::IsCompileTimeValue(value));
1439         // Fall through.
1440       case ObjectLiteral::Property::COMPUTED:
1441         if (key->handle()->IsSymbol()) {
1442           if (property->emit_store()) {
1443             VisitForAccumulatorValue(value);
1444             __ Move(rcx, key->handle());
1445             __ movq(rdx, Operand(rsp, 0));
1446             Handle<Code> ic = is_classic_mode()
1447                 ? isolate()->builtins()->StoreIC_Initialize()
1448                 : isolate()->builtins()->StoreIC_Initialize_Strict();
1449             __ call(ic, RelocInfo::CODE_TARGET, key->id());
1450             PrepareForBailoutForId(key->id(), NO_REGISTERS);
1451           } else {
1452             VisitForEffect(value);
1453           }
1454           break;
1455         }
1456         // Fall through.
1457       case ObjectLiteral::Property::PROTOTYPE:
1458         __ push(Operand(rsp, 0));  // Duplicate receiver.
1459         VisitForStackValue(key);
1460         VisitForStackValue(value);
1461         if (property->emit_store()) {
1462           __ Push(Smi::FromInt(NONE));    // PropertyAttributes
1463           __ CallRuntime(Runtime::kSetProperty, 4);
1464         } else {
1465           __ Drop(3);
1466         }
1467         break;
1468       case ObjectLiteral::Property::GETTER:
1469         accessor_table.lookup(key)->second->getter = value;
1470         break;
1471       case ObjectLiteral::Property::SETTER:
1472         accessor_table.lookup(key)->second->setter = value;
1473         break;
1474     }
1475   }
1476 
1477   // Emit code to define accessors, using only a single call to the runtime for
1478   // each pair of corresponding getters and setters.
1479   for (AccessorTable::Iterator it = accessor_table.begin();
1480        it != accessor_table.end();
1481        ++it) {
1482     __ push(Operand(rsp, 0));  // Duplicate receiver.
1483     VisitForStackValue(it->first);
1484     EmitAccessor(it->second->getter);
1485     EmitAccessor(it->second->setter);
1486     __ Push(Smi::FromInt(NONE));
1487     __ CallRuntime(Runtime::kDefineOrRedefineAccessorProperty, 5);
1488   }
1489 
1490   if (expr->has_function()) {
1491     ASSERT(result_saved);
1492     __ push(Operand(rsp, 0));
1493     __ CallRuntime(Runtime::kToFastProperties, 1);
1494   }
1495 
1496   if (result_saved) {
1497     context()->PlugTOS();
1498   } else {
1499     context()->Plug(rax);
1500   }
1501 }
1502 
1503 
VisitArrayLiteral(ArrayLiteral * expr)1504 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1505   Comment cmnt(masm_, "[ ArrayLiteral");
1506 
1507   ZoneList<Expression*>* subexprs = expr->values();
1508   int length = subexprs->length();
1509   Handle<FixedArray> constant_elements = expr->constant_elements();
1510   ASSERT_EQ(2, constant_elements->length());
1511   ElementsKind constant_elements_kind =
1512       static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
1513   bool has_constant_fast_elements = constant_elements_kind == FAST_ELEMENTS;
1514   Handle<FixedArrayBase> constant_elements_values(
1515       FixedArrayBase::cast(constant_elements->get(1)));
1516 
1517   __ movq(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1518   __ push(FieldOperand(rbx, JSFunction::kLiteralsOffset));
1519   __ Push(Smi::FromInt(expr->literal_index()));
1520   __ Push(constant_elements);
1521   Heap* heap = isolate()->heap();
1522   if (has_constant_fast_elements &&
1523       constant_elements_values->map() == heap->fixed_cow_array_map()) {
1524     // If the elements are already FAST_ELEMENTS, the boilerplate cannot
1525     // change, so it's possible to specialize the stub in advance.
1526     __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), 1);
1527     FastCloneShallowArrayStub stub(
1528         FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS,
1529         length);
1530     __ CallStub(&stub);
1531   } else if (expr->depth() > 1) {
1532     __ CallRuntime(Runtime::kCreateArrayLiteral, 3);
1533   } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
1534     __ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
1535   } else {
1536     ASSERT(constant_elements_kind == FAST_ELEMENTS ||
1537            constant_elements_kind == FAST_SMI_ONLY_ELEMENTS ||
1538            FLAG_smi_only_arrays);
1539     // If the elements are already FAST_ELEMENTS, the boilerplate cannot
1540     // change, so it's possible to specialize the stub in advance.
1541     FastCloneShallowArrayStub::Mode mode = has_constant_fast_elements
1542         ? FastCloneShallowArrayStub::CLONE_ELEMENTS
1543         : FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS;
1544     FastCloneShallowArrayStub stub(mode, length);
1545     __ CallStub(&stub);
1546   }
1547 
1548   bool result_saved = false;  // Is the result saved to the stack?
1549 
1550   // Emit code to evaluate all the non-constant subexpressions and to store
1551   // them into the newly cloned array.
1552   for (int i = 0; i < length; i++) {
1553     Expression* subexpr = subexprs->at(i);
1554     // If the subexpression is a literal or a simple materialized literal it
1555     // is already set in the cloned array.
1556     if (subexpr->AsLiteral() != NULL ||
1557         CompileTimeValue::IsCompileTimeValue(subexpr)) {
1558       continue;
1559     }
1560 
1561     if (!result_saved) {
1562       __ push(rax);
1563       result_saved = true;
1564     }
1565     VisitForAccumulatorValue(subexpr);
1566 
1567     if (constant_elements_kind == FAST_ELEMENTS) {
1568       // Fast-case array literal with ElementsKind of FAST_ELEMENTS, they cannot
1569       // transition and don't need to call the runtime stub.
1570       int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1571       __ movq(rbx, Operand(rsp, 0));  // Copy of array literal.
1572       __ movq(rbx, FieldOperand(rbx, JSObject::kElementsOffset));
1573       // Store the subexpression value in the array's elements.
1574       __ movq(FieldOperand(rbx, offset), result_register());
1575       // Update the write barrier for the array store.
1576       __ RecordWriteField(rbx, offset, result_register(), rcx,
1577                           kDontSaveFPRegs,
1578                           EMIT_REMEMBERED_SET,
1579                           INLINE_SMI_CHECK);
1580     } else {
1581       // Store the subexpression value in the array's elements.
1582       __ movq(rbx, Operand(rsp, 0));  // Copy of array literal.
1583       __ movq(rdi, FieldOperand(rbx, JSObject::kMapOffset));
1584       __ Move(rcx, Smi::FromInt(i));
1585       __ Move(rdx, Smi::FromInt(expr->literal_index()));
1586       StoreArrayLiteralElementStub stub;
1587       __ CallStub(&stub);
1588     }
1589 
1590     PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1591   }
1592 
1593   if (result_saved) {
1594     context()->PlugTOS();
1595   } else {
1596     context()->Plug(rax);
1597   }
1598 }
1599 
1600 
VisitAssignment(Assignment * expr)1601 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1602   Comment cmnt(masm_, "[ Assignment");
1603   // Invalid left-hand sides are rewritten to have a 'throw ReferenceError'
1604   // on the left-hand side.
1605   if (!expr->target()->IsValidLeftHandSide()) {
1606     VisitForEffect(expr->target());
1607     return;
1608   }
1609 
1610   // Left-hand side can only be a property, a global or a (parameter or local)
1611   // slot.
1612   enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1613   LhsKind assign_type = VARIABLE;
1614   Property* property = expr->target()->AsProperty();
1615   if (property != NULL) {
1616     assign_type = (property->key()->IsPropertyName())
1617         ? NAMED_PROPERTY
1618         : KEYED_PROPERTY;
1619   }
1620 
1621   // Evaluate LHS expression.
1622   switch (assign_type) {
1623     case VARIABLE:
1624       // Nothing to do here.
1625       break;
1626     case NAMED_PROPERTY:
1627       if (expr->is_compound()) {
1628         // We need the receiver both on the stack and in the accumulator.
1629         VisitForAccumulatorValue(property->obj());
1630         __ push(result_register());
1631       } else {
1632         VisitForStackValue(property->obj());
1633       }
1634       break;
1635     case KEYED_PROPERTY: {
1636       if (expr->is_compound()) {
1637         VisitForStackValue(property->obj());
1638         VisitForAccumulatorValue(property->key());
1639         __ movq(rdx, Operand(rsp, 0));
1640         __ push(rax);
1641       } else {
1642         VisitForStackValue(property->obj());
1643         VisitForStackValue(property->key());
1644       }
1645       break;
1646     }
1647   }
1648 
1649   // For compound assignments we need another deoptimization point after the
1650   // variable/property load.
1651   if (expr->is_compound()) {
1652     { AccumulatorValueContext context(this);
1653       switch (assign_type) {
1654         case VARIABLE:
1655           EmitVariableLoad(expr->target()->AsVariableProxy());
1656           PrepareForBailout(expr->target(), TOS_REG);
1657           break;
1658         case NAMED_PROPERTY:
1659           EmitNamedPropertyLoad(property);
1660           PrepareForBailoutForId(expr->CompoundLoadId(), TOS_REG);
1661           break;
1662         case KEYED_PROPERTY:
1663           EmitKeyedPropertyLoad(property);
1664           PrepareForBailoutForId(expr->CompoundLoadId(), TOS_REG);
1665           break;
1666       }
1667     }
1668 
1669     Token::Value op = expr->binary_op();
1670     __ push(rax);  // Left operand goes on the stack.
1671     VisitForAccumulatorValue(expr->value());
1672 
1673     OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1674         ? OVERWRITE_RIGHT
1675         : NO_OVERWRITE;
1676     SetSourcePosition(expr->position() + 1);
1677     AccumulatorValueContext context(this);
1678     if (ShouldInlineSmiCase(op)) {
1679       EmitInlineSmiBinaryOp(expr->binary_operation(),
1680                             op,
1681                             mode,
1682                             expr->target(),
1683                             expr->value());
1684     } else {
1685       EmitBinaryOp(expr->binary_operation(), op, mode);
1686     }
1687     // Deoptimization point in case the binary operation may have side effects.
1688     PrepareForBailout(expr->binary_operation(), TOS_REG);
1689   } else {
1690     VisitForAccumulatorValue(expr->value());
1691   }
1692 
1693   // Record source position before possible IC call.
1694   SetSourcePosition(expr->position());
1695 
1696   // Store the value.
1697   switch (assign_type) {
1698     case VARIABLE:
1699       EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1700                              expr->op());
1701       PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1702       context()->Plug(rax);
1703       break;
1704     case NAMED_PROPERTY:
1705       EmitNamedPropertyAssignment(expr);
1706       break;
1707     case KEYED_PROPERTY:
1708       EmitKeyedPropertyAssignment(expr);
1709       break;
1710   }
1711 }
1712 
1713 
EmitNamedPropertyLoad(Property * prop)1714 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
1715   SetSourcePosition(prop->position());
1716   Literal* key = prop->key()->AsLiteral();
1717   __ Move(rcx, key->handle());
1718   Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1719   __ call(ic, RelocInfo::CODE_TARGET, prop->id());
1720 }
1721 
1722 
EmitKeyedPropertyLoad(Property * prop)1723 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
1724   SetSourcePosition(prop->position());
1725   Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
1726   __ call(ic, RelocInfo::CODE_TARGET, prop->id());
1727 }
1728 
1729 
EmitInlineSmiBinaryOp(BinaryOperation * expr,Token::Value op,OverwriteMode mode,Expression * left,Expression * right)1730 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
1731                                               Token::Value op,
1732                                               OverwriteMode mode,
1733                                               Expression* left,
1734                                               Expression* right) {
1735   // Do combined smi check of the operands. Left operand is on the
1736   // stack (popped into rdx). Right operand is in rax but moved into
1737   // rcx to make the shifts easier.
1738   Label done, stub_call, smi_case;
1739   __ pop(rdx);
1740   __ movq(rcx, rax);
1741   __ or_(rax, rdx);
1742   JumpPatchSite patch_site(masm_);
1743   patch_site.EmitJumpIfSmi(rax, &smi_case, Label::kNear);
1744 
1745   __ bind(&stub_call);
1746   __ movq(rax, rcx);
1747   BinaryOpStub stub(op, mode);
1748   __ call(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
1749   patch_site.EmitPatchInfo();
1750   __ jmp(&done, Label::kNear);
1751 
1752   __ bind(&smi_case);
1753   switch (op) {
1754     case Token::SAR:
1755       __ SmiShiftArithmeticRight(rax, rdx, rcx);
1756       break;
1757     case Token::SHL:
1758       __ SmiShiftLeft(rax, rdx, rcx);
1759       break;
1760     case Token::SHR:
1761       __ SmiShiftLogicalRight(rax, rdx, rcx, &stub_call);
1762       break;
1763     case Token::ADD:
1764       __ SmiAdd(rax, rdx, rcx, &stub_call);
1765       break;
1766     case Token::SUB:
1767       __ SmiSub(rax, rdx, rcx, &stub_call);
1768       break;
1769     case Token::MUL:
1770       __ SmiMul(rax, rdx, rcx, &stub_call);
1771       break;
1772     case Token::BIT_OR:
1773       __ SmiOr(rax, rdx, rcx);
1774       break;
1775     case Token::BIT_AND:
1776       __ SmiAnd(rax, rdx, rcx);
1777       break;
1778     case Token::BIT_XOR:
1779       __ SmiXor(rax, rdx, rcx);
1780       break;
1781     default:
1782       UNREACHABLE();
1783       break;
1784   }
1785 
1786   __ bind(&done);
1787   context()->Plug(rax);
1788 }
1789 
1790 
EmitBinaryOp(BinaryOperation * expr,Token::Value op,OverwriteMode mode)1791 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
1792                                      Token::Value op,
1793                                      OverwriteMode mode) {
1794   __ pop(rdx);
1795   BinaryOpStub stub(op, mode);
1796   JumpPatchSite patch_site(masm_);    // unbound, signals no inlined smi code.
1797   __ call(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
1798   patch_site.EmitPatchInfo();
1799   context()->Plug(rax);
1800 }
1801 
1802 
EmitAssignment(Expression * expr)1803 void FullCodeGenerator::EmitAssignment(Expression* expr) {
1804   // Invalid left-hand sides are rewritten to have a 'throw
1805   // ReferenceError' on the left-hand side.
1806   if (!expr->IsValidLeftHandSide()) {
1807     VisitForEffect(expr);
1808     return;
1809   }
1810 
1811   // Left-hand side can only be a property, a global or a (parameter or local)
1812   // slot.
1813   enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1814   LhsKind assign_type = VARIABLE;
1815   Property* prop = expr->AsProperty();
1816   if (prop != NULL) {
1817     assign_type = (prop->key()->IsPropertyName())
1818         ? NAMED_PROPERTY
1819         : KEYED_PROPERTY;
1820   }
1821 
1822   switch (assign_type) {
1823     case VARIABLE: {
1824       Variable* var = expr->AsVariableProxy()->var();
1825       EffectContext context(this);
1826       EmitVariableAssignment(var, Token::ASSIGN);
1827       break;
1828     }
1829     case NAMED_PROPERTY: {
1830       __ push(rax);  // Preserve value.
1831       VisitForAccumulatorValue(prop->obj());
1832       __ movq(rdx, rax);
1833       __ pop(rax);  // Restore value.
1834       __ Move(rcx, prop->key()->AsLiteral()->handle());
1835       Handle<Code> ic = is_classic_mode()
1836           ? isolate()->builtins()->StoreIC_Initialize()
1837           : isolate()->builtins()->StoreIC_Initialize_Strict();
1838       __ call(ic);
1839       break;
1840     }
1841     case KEYED_PROPERTY: {
1842       __ push(rax);  // Preserve value.
1843       VisitForStackValue(prop->obj());
1844       VisitForAccumulatorValue(prop->key());
1845       __ movq(rcx, rax);
1846       __ pop(rdx);
1847       __ pop(rax);  // Restore value.
1848       Handle<Code> ic = is_classic_mode()
1849           ? isolate()->builtins()->KeyedStoreIC_Initialize()
1850           : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
1851       __ call(ic);
1852       break;
1853     }
1854   }
1855   context()->Plug(rax);
1856 }
1857 
1858 
EmitVariableAssignment(Variable * var,Token::Value op)1859 void FullCodeGenerator::EmitVariableAssignment(Variable* var,
1860                                                Token::Value op) {
1861   if (var->IsUnallocated()) {
1862     // Global var, const, or let.
1863     __ Move(rcx, var->name());
1864     __ movq(rdx, GlobalObjectOperand());
1865     Handle<Code> ic = is_classic_mode()
1866         ? isolate()->builtins()->StoreIC_Initialize()
1867         : isolate()->builtins()->StoreIC_Initialize_Strict();
1868     __ call(ic, RelocInfo::CODE_TARGET_CONTEXT);
1869   } else if (op == Token::INIT_CONST) {
1870     // Const initializers need a write barrier.
1871     ASSERT(!var->IsParameter());  // No const parameters.
1872     if (var->IsStackLocal()) {
1873       Label skip;
1874       __ movq(rdx, StackOperand(var));
1875       __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
1876       __ j(not_equal, &skip);
1877       __ movq(StackOperand(var), rax);
1878       __ bind(&skip);
1879     } else {
1880       ASSERT(var->IsContextSlot() || var->IsLookupSlot());
1881       // Like var declarations, const declarations are hoisted to function
1882       // scope.  However, unlike var initializers, const initializers are
1883       // able to drill a hole to that function context, even from inside a
1884       // 'with' context.  We thus bypass the normal static scope lookup for
1885       // var->IsContextSlot().
1886       __ push(rax);
1887       __ push(rsi);
1888       __ Push(var->name());
1889       __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
1890     }
1891 
1892   } else if (var->mode() == LET && op != Token::INIT_LET) {
1893     // Non-initializing assignment to let variable needs a write barrier.
1894     if (var->IsLookupSlot()) {
1895       __ push(rax);  // Value.
1896       __ push(rsi);  // Context.
1897       __ Push(var->name());
1898       __ Push(Smi::FromInt(language_mode()));
1899       __ CallRuntime(Runtime::kStoreContextSlot, 4);
1900     } else {
1901       ASSERT(var->IsStackAllocated() || var->IsContextSlot());
1902       Label assign;
1903       MemOperand location = VarOperand(var, rcx);
1904       __ movq(rdx, location);
1905       __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
1906       __ j(not_equal, &assign, Label::kNear);
1907       __ Push(var->name());
1908       __ CallRuntime(Runtime::kThrowReferenceError, 1);
1909       __ bind(&assign);
1910       __ movq(location, rax);
1911       if (var->IsContextSlot()) {
1912         __ movq(rdx, rax);
1913         __ RecordWriteContextSlot(
1914             rcx, Context::SlotOffset(var->index()), rdx, rbx, kDontSaveFPRegs);
1915       }
1916     }
1917 
1918   } else if (!var->is_const_mode() || op == Token::INIT_CONST_HARMONY) {
1919     // Assignment to var or initializing assignment to let/const
1920     // in harmony mode.
1921     if (var->IsStackAllocated() || var->IsContextSlot()) {
1922       MemOperand location = VarOperand(var, rcx);
1923       if (FLAG_debug_code && op == Token::INIT_LET) {
1924         // Check for an uninitialized let binding.
1925         __ movq(rdx, location);
1926         __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
1927         __ Check(equal, "Let binding re-initialization.");
1928       }
1929       // Perform the assignment.
1930       __ movq(location, rax);
1931       if (var->IsContextSlot()) {
1932         __ movq(rdx, rax);
1933         __ RecordWriteContextSlot(
1934             rcx, Context::SlotOffset(var->index()), rdx, rbx, kDontSaveFPRegs);
1935       }
1936     } else {
1937       ASSERT(var->IsLookupSlot());
1938       __ push(rax);  // Value.
1939       __ push(rsi);  // Context.
1940       __ Push(var->name());
1941       __ Push(Smi::FromInt(language_mode()));
1942       __ CallRuntime(Runtime::kStoreContextSlot, 4);
1943     }
1944   }
1945   // Non-initializing assignments to consts are ignored.
1946 }
1947 
1948 
EmitNamedPropertyAssignment(Assignment * expr)1949 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
1950   // Assignment to a property, using a named store IC.
1951   Property* prop = expr->target()->AsProperty();
1952   ASSERT(prop != NULL);
1953   ASSERT(prop->key()->AsLiteral() != NULL);
1954 
1955   // If the assignment starts a block of assignments to the same object,
1956   // change to slow case to avoid the quadratic behavior of repeatedly
1957   // adding fast properties.
1958   if (expr->starts_initialization_block()) {
1959     __ push(result_register());
1960     __ push(Operand(rsp, kPointerSize));  // Receiver is now under value.
1961     __ CallRuntime(Runtime::kToSlowProperties, 1);
1962     __ pop(result_register());
1963   }
1964 
1965   // Record source code position before IC call.
1966   SetSourcePosition(expr->position());
1967   __ Move(rcx, prop->key()->AsLiteral()->handle());
1968   if (expr->ends_initialization_block()) {
1969     __ movq(rdx, Operand(rsp, 0));
1970   } else {
1971     __ pop(rdx);
1972   }
1973   Handle<Code> ic = is_classic_mode()
1974       ? isolate()->builtins()->StoreIC_Initialize()
1975       : isolate()->builtins()->StoreIC_Initialize_Strict();
1976   __ call(ic, RelocInfo::CODE_TARGET, expr->id());
1977 
1978   // If the assignment ends an initialization block, revert to fast case.
1979   if (expr->ends_initialization_block()) {
1980     __ push(rax);  // Result of assignment, saved even if not needed.
1981     __ push(Operand(rsp, kPointerSize));  // Receiver is under value.
1982     __ CallRuntime(Runtime::kToFastProperties, 1);
1983     __ pop(rax);
1984     __ Drop(1);
1985   }
1986   PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1987   context()->Plug(rax);
1988 }
1989 
1990 
EmitKeyedPropertyAssignment(Assignment * expr)1991 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
1992   // Assignment to a property, using a keyed store IC.
1993 
1994   // If the assignment starts a block of assignments to the same object,
1995   // change to slow case to avoid the quadratic behavior of repeatedly
1996   // adding fast properties.
1997   if (expr->starts_initialization_block()) {
1998     __ push(result_register());
1999     // Receiver is now under the key and value.
2000     __ push(Operand(rsp, 2 * kPointerSize));
2001     __ CallRuntime(Runtime::kToSlowProperties, 1);
2002     __ pop(result_register());
2003   }
2004 
2005   __ pop(rcx);
2006   if (expr->ends_initialization_block()) {
2007     __ movq(rdx, Operand(rsp, 0));  // Leave receiver on the stack for later.
2008   } else {
2009     __ pop(rdx);
2010   }
2011   // Record source code position before IC call.
2012   SetSourcePosition(expr->position());
2013   Handle<Code> ic = is_classic_mode()
2014       ? isolate()->builtins()->KeyedStoreIC_Initialize()
2015       : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2016   __ call(ic, RelocInfo::CODE_TARGET, expr->id());
2017 
2018   // If the assignment ends an initialization block, revert to fast case.
2019   if (expr->ends_initialization_block()) {
2020     __ pop(rdx);
2021     __ push(rax);  // Result of assignment, saved even if not needed.
2022     __ push(rdx);
2023     __ CallRuntime(Runtime::kToFastProperties, 1);
2024     __ pop(rax);
2025   }
2026 
2027   PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2028   context()->Plug(rax);
2029 }
2030 
2031 
VisitProperty(Property * expr)2032 void FullCodeGenerator::VisitProperty(Property* expr) {
2033   Comment cmnt(masm_, "[ Property");
2034   Expression* key = expr->key();
2035 
2036   if (key->IsPropertyName()) {
2037     VisitForAccumulatorValue(expr->obj());
2038     EmitNamedPropertyLoad(expr);
2039     context()->Plug(rax);
2040   } else {
2041     VisitForStackValue(expr->obj());
2042     VisitForAccumulatorValue(expr->key());
2043     __ pop(rdx);
2044     EmitKeyedPropertyLoad(expr);
2045     context()->Plug(rax);
2046   }
2047 }
2048 
2049 
EmitCallWithIC(Call * expr,Handle<Object> name,RelocInfo::Mode mode)2050 void FullCodeGenerator::EmitCallWithIC(Call* expr,
2051                                        Handle<Object> name,
2052                                        RelocInfo::Mode mode) {
2053   // Code common for calls using the IC.
2054   ZoneList<Expression*>* args = expr->arguments();
2055   int arg_count = args->length();
2056   { PreservePositionScope scope(masm()->positions_recorder());
2057     for (int i = 0; i < arg_count; i++) {
2058       VisitForStackValue(args->at(i));
2059     }
2060     __ Move(rcx, name);
2061   }
2062   // Record source position for debugger.
2063   SetSourcePosition(expr->position());
2064   // Call the IC initialization code.
2065   Handle<Code> ic =
2066       isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
2067   __ call(ic, mode, expr->id());
2068   RecordJSReturnSite(expr);
2069   // Restore context register.
2070   __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2071   context()->Plug(rax);
2072 }
2073 
2074 
EmitKeyedCallWithIC(Call * expr,Expression * key)2075 void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
2076                                             Expression* key) {
2077   // Load the key.
2078   VisitForAccumulatorValue(key);
2079 
2080   // Swap the name of the function and the receiver on the stack to follow
2081   // the calling convention for call ICs.
2082   __ pop(rcx);
2083   __ push(rax);
2084   __ push(rcx);
2085 
2086   // Load the arguments.
2087   ZoneList<Expression*>* args = expr->arguments();
2088   int arg_count = args->length();
2089   { PreservePositionScope scope(masm()->positions_recorder());
2090     for (int i = 0; i < arg_count; i++) {
2091       VisitForStackValue(args->at(i));
2092     }
2093   }
2094   // Record source position for debugger.
2095   SetSourcePosition(expr->position());
2096   // Call the IC initialization code.
2097   Handle<Code> ic =
2098       isolate()->stub_cache()->ComputeKeyedCallInitialize(arg_count);
2099   __ movq(rcx, Operand(rsp, (arg_count + 1) * kPointerSize));  // Key.
2100   __ call(ic, RelocInfo::CODE_TARGET, expr->id());
2101   RecordJSReturnSite(expr);
2102   // Restore context register.
2103   __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2104   context()->DropAndPlug(1, rax);  // Drop the key still on the stack.
2105 }
2106 
2107 
EmitCallWithStub(Call * expr,CallFunctionFlags flags)2108 void FullCodeGenerator::EmitCallWithStub(Call* expr, CallFunctionFlags flags) {
2109   // Code common for calls using the call stub.
2110   ZoneList<Expression*>* args = expr->arguments();
2111   int arg_count = args->length();
2112   { PreservePositionScope scope(masm()->positions_recorder());
2113     for (int i = 0; i < arg_count; i++) {
2114       VisitForStackValue(args->at(i));
2115     }
2116   }
2117   // Record source position for debugger.
2118   SetSourcePosition(expr->position());
2119   CallFunctionStub stub(arg_count, flags);
2120   __ movq(rdi, Operand(rsp, (arg_count + 1) * kPointerSize));
2121   __ CallStub(&stub);
2122   RecordJSReturnSite(expr);
2123   // Restore context register.
2124   __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2125   // Discard the function left on TOS.
2126   context()->DropAndPlug(1, rax);
2127 }
2128 
2129 
EmitResolvePossiblyDirectEval(int arg_count)2130 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2131   // Push copy of the first argument or undefined if it doesn't exist.
2132   if (arg_count > 0) {
2133     __ push(Operand(rsp, arg_count * kPointerSize));
2134   } else {
2135     __ PushRoot(Heap::kUndefinedValueRootIndex);
2136   }
2137 
2138   // Push the receiver of the enclosing function and do runtime call.
2139   __ push(Operand(rbp, (2 + info_->scope()->num_parameters()) * kPointerSize));
2140 
2141   // Push the language mode.
2142   __ Push(Smi::FromInt(language_mode()));
2143 
2144   // Push the start position of the scope the calls resides in.
2145   __ Push(Smi::FromInt(scope()->start_position()));
2146 
2147   // Do the runtime call.
2148   __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
2149 }
2150 
2151 
VisitCall(Call * expr)2152 void FullCodeGenerator::VisitCall(Call* expr) {
2153 #ifdef DEBUG
2154   // We want to verify that RecordJSReturnSite gets called on all paths
2155   // through this function.  Avoid early returns.
2156   expr->return_is_recorded_ = false;
2157 #endif
2158 
2159   Comment cmnt(masm_, "[ Call");
2160   Expression* callee = expr->expression();
2161   VariableProxy* proxy = callee->AsVariableProxy();
2162   Property* property = callee->AsProperty();
2163 
2164   if (proxy != NULL && proxy->var()->is_possibly_eval()) {
2165     // In a call to eval, we first call %ResolvePossiblyDirectEval to
2166     // resolve the function we need to call and the receiver of the call.
2167     // Then we call the resolved function using the given arguments.
2168     ZoneList<Expression*>* args = expr->arguments();
2169     int arg_count = args->length();
2170     { PreservePositionScope pos_scope(masm()->positions_recorder());
2171       VisitForStackValue(callee);
2172       __ PushRoot(Heap::kUndefinedValueRootIndex);  // Reserved receiver slot.
2173 
2174       // Push the arguments.
2175       for (int i = 0; i < arg_count; i++) {
2176         VisitForStackValue(args->at(i));
2177       }
2178 
2179       // Push a copy of the function (found below the arguments) and resolve
2180       // eval.
2181       __ push(Operand(rsp, (arg_count + 1) * kPointerSize));
2182       EmitResolvePossiblyDirectEval(arg_count);
2183 
2184       // The runtime call returns a pair of values in rax (function) and
2185       // rdx (receiver). Touch up the stack with the right values.
2186       __ movq(Operand(rsp, (arg_count + 0) * kPointerSize), rdx);
2187       __ movq(Operand(rsp, (arg_count + 1) * kPointerSize), rax);
2188     }
2189     // Record source position for debugger.
2190     SetSourcePosition(expr->position());
2191     CallFunctionStub stub(arg_count, RECEIVER_MIGHT_BE_IMPLICIT);
2192     __ movq(rdi, Operand(rsp, (arg_count + 1) * kPointerSize));
2193     __ CallStub(&stub);
2194     RecordJSReturnSite(expr);
2195     // Restore context register.
2196     __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2197     context()->DropAndPlug(1, rax);
2198   } else if (proxy != NULL && proxy->var()->IsUnallocated()) {
2199     // Call to a global variable.  Push global object as receiver for the
2200     // call IC lookup.
2201     __ push(GlobalObjectOperand());
2202     EmitCallWithIC(expr, proxy->name(), RelocInfo::CODE_TARGET_CONTEXT);
2203   } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
2204     // Call to a lookup slot (dynamically introduced variable).
2205     Label slow, done;
2206 
2207     { PreservePositionScope scope(masm()->positions_recorder());
2208       // Generate code for loading from variables potentially shadowed by
2209       // eval-introduced variables.
2210       EmitDynamicLookupFastCase(proxy->var(), NOT_INSIDE_TYPEOF, &slow, &done);
2211     }
2212     __ bind(&slow);
2213     // Call the runtime to find the function to call (returned in rax) and
2214     // the object holding it (returned in rdx).
2215     __ push(context_register());
2216     __ Push(proxy->name());
2217     __ CallRuntime(Runtime::kLoadContextSlot, 2);
2218     __ push(rax);  // Function.
2219     __ push(rdx);  // Receiver.
2220 
2221     // If fast case code has been generated, emit code to push the function
2222     // and receiver and have the slow path jump around this code.
2223     if (done.is_linked()) {
2224       Label call;
2225       __ jmp(&call, Label::kNear);
2226       __ bind(&done);
2227       // Push function.
2228       __ push(rax);
2229       // The receiver is implicitly the global receiver. Indicate this by
2230       // passing the hole to the call function stub.
2231       __ PushRoot(Heap::kTheHoleValueRootIndex);
2232       __ bind(&call);
2233     }
2234 
2235     // The receiver is either the global receiver or an object found by
2236     // LoadContextSlot. That object could be the hole if the receiver is
2237     // implicitly the global object.
2238     EmitCallWithStub(expr, RECEIVER_MIGHT_BE_IMPLICIT);
2239   } else if (property != NULL) {
2240     { PreservePositionScope scope(masm()->positions_recorder());
2241       VisitForStackValue(property->obj());
2242     }
2243     if (property->key()->IsPropertyName()) {
2244       EmitCallWithIC(expr,
2245                      property->key()->AsLiteral()->handle(),
2246                      RelocInfo::CODE_TARGET);
2247     } else {
2248       EmitKeyedCallWithIC(expr, property->key());
2249     }
2250   } else {
2251     // Call to an arbitrary expression not handled specially above.
2252     { PreservePositionScope scope(masm()->positions_recorder());
2253       VisitForStackValue(callee);
2254     }
2255     // Load global receiver object.
2256     __ movq(rbx, GlobalObjectOperand());
2257     __ push(FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset));
2258     // Emit function call.
2259     EmitCallWithStub(expr, NO_CALL_FUNCTION_FLAGS);
2260   }
2261 
2262 #ifdef DEBUG
2263   // RecordJSReturnSite should have been called.
2264   ASSERT(expr->return_is_recorded_);
2265 #endif
2266 }
2267 
2268 
VisitCallNew(CallNew * expr)2269 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2270   Comment cmnt(masm_, "[ CallNew");
2271   // According to ECMA-262, section 11.2.2, page 44, the function
2272   // expression in new calls must be evaluated before the
2273   // arguments.
2274 
2275   // Push constructor on the stack.  If it's not a function it's used as
2276   // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2277   // ignored.
2278   VisitForStackValue(expr->expression());
2279 
2280   // Push the arguments ("left-to-right") on the stack.
2281   ZoneList<Expression*>* args = expr->arguments();
2282   int arg_count = args->length();
2283   for (int i = 0; i < arg_count; i++) {
2284     VisitForStackValue(args->at(i));
2285   }
2286 
2287   // Call the construct call builtin that handles allocation and
2288   // constructor invocation.
2289   SetSourcePosition(expr->position());
2290 
2291   // Load function and argument count into rdi and rax.
2292   __ Set(rax, arg_count);
2293   __ movq(rdi, Operand(rsp, arg_count * kPointerSize));
2294 
2295   // Record call targets in unoptimized code, but not in the snapshot.
2296   CallFunctionFlags flags;
2297   if (!Serializer::enabled()) {
2298     flags = RECORD_CALL_TARGET;
2299     Handle<Object> uninitialized =
2300         TypeFeedbackCells::UninitializedSentinel(isolate());
2301     Handle<JSGlobalPropertyCell> cell =
2302         isolate()->factory()->NewJSGlobalPropertyCell(uninitialized);
2303     RecordTypeFeedbackCell(expr->id(), cell);
2304     __ Move(rbx, cell);
2305   } else {
2306     flags = NO_CALL_FUNCTION_FLAGS;
2307   }
2308 
2309   CallConstructStub stub(flags);
2310   __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
2311   PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2312   context()->Plug(rax);
2313 }
2314 
2315 
EmitIsSmi(CallRuntime * expr)2316 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2317   ZoneList<Expression*>* args = expr->arguments();
2318   ASSERT(args->length() == 1);
2319 
2320   VisitForAccumulatorValue(args->at(0));
2321 
2322   Label materialize_true, materialize_false;
2323   Label* if_true = NULL;
2324   Label* if_false = NULL;
2325   Label* fall_through = NULL;
2326   context()->PrepareTest(&materialize_true, &materialize_false,
2327                          &if_true, &if_false, &fall_through);
2328 
2329   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2330   __ JumpIfSmi(rax, if_true);
2331   __ jmp(if_false);
2332 
2333   context()->Plug(if_true, if_false);
2334 }
2335 
2336 
EmitIsNonNegativeSmi(CallRuntime * expr)2337 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
2338   ZoneList<Expression*>* args = expr->arguments();
2339   ASSERT(args->length() == 1);
2340 
2341   VisitForAccumulatorValue(args->at(0));
2342 
2343   Label materialize_true, materialize_false;
2344   Label* if_true = NULL;
2345   Label* if_false = NULL;
2346   Label* fall_through = NULL;
2347   context()->PrepareTest(&materialize_true, &materialize_false,
2348                          &if_true, &if_false, &fall_through);
2349 
2350   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2351   Condition non_negative_smi = masm()->CheckNonNegativeSmi(rax);
2352   Split(non_negative_smi, if_true, if_false, fall_through);
2353 
2354   context()->Plug(if_true, if_false);
2355 }
2356 
2357 
EmitIsObject(CallRuntime * expr)2358 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
2359   ZoneList<Expression*>* args = expr->arguments();
2360   ASSERT(args->length() == 1);
2361 
2362   VisitForAccumulatorValue(args->at(0));
2363 
2364   Label materialize_true, materialize_false;
2365   Label* if_true = NULL;
2366   Label* if_false = NULL;
2367   Label* fall_through = NULL;
2368   context()->PrepareTest(&materialize_true, &materialize_false,
2369                          &if_true, &if_false, &fall_through);
2370 
2371   __ JumpIfSmi(rax, if_false);
2372   __ CompareRoot(rax, Heap::kNullValueRootIndex);
2373   __ j(equal, if_true);
2374   __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
2375   // Undetectable objects behave like undefined when tested with typeof.
2376   __ testb(FieldOperand(rbx, Map::kBitFieldOffset),
2377            Immediate(1 << Map::kIsUndetectable));
2378   __ j(not_zero, if_false);
2379   __ movzxbq(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
2380   __ cmpq(rbx, Immediate(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
2381   __ j(below, if_false);
2382   __ cmpq(rbx, Immediate(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
2383   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2384   Split(below_equal, if_true, if_false, fall_through);
2385 
2386   context()->Plug(if_true, if_false);
2387 }
2388 
2389 
EmitIsSpecObject(CallRuntime * expr)2390 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
2391   ZoneList<Expression*>* args = expr->arguments();
2392   ASSERT(args->length() == 1);
2393 
2394   VisitForAccumulatorValue(args->at(0));
2395 
2396   Label materialize_true, materialize_false;
2397   Label* if_true = NULL;
2398   Label* if_false = NULL;
2399   Label* fall_through = NULL;
2400   context()->PrepareTest(&materialize_true, &materialize_false,
2401                          &if_true, &if_false, &fall_through);
2402 
2403   __ JumpIfSmi(rax, if_false);
2404   __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rbx);
2405   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2406   Split(above_equal, if_true, if_false, fall_through);
2407 
2408   context()->Plug(if_true, if_false);
2409 }
2410 
2411 
EmitIsUndetectableObject(CallRuntime * expr)2412 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
2413   ZoneList<Expression*>* args = expr->arguments();
2414   ASSERT(args->length() == 1);
2415 
2416   VisitForAccumulatorValue(args->at(0));
2417 
2418   Label materialize_true, materialize_false;
2419   Label* if_true = NULL;
2420   Label* if_false = NULL;
2421   Label* fall_through = NULL;
2422   context()->PrepareTest(&materialize_true, &materialize_false,
2423                          &if_true, &if_false, &fall_through);
2424 
2425   __ JumpIfSmi(rax, if_false);
2426   __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
2427   __ testb(FieldOperand(rbx, Map::kBitFieldOffset),
2428            Immediate(1 << Map::kIsUndetectable));
2429   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2430   Split(not_zero, if_true, if_false, fall_through);
2431 
2432   context()->Plug(if_true, if_false);
2433 }
2434 
2435 
EmitIsStringWrapperSafeForDefaultValueOf(CallRuntime * expr)2436 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
2437     CallRuntime* expr) {
2438   ZoneList<Expression*>* args = expr->arguments();
2439   ASSERT(args->length() == 1);
2440 
2441   VisitForAccumulatorValue(args->at(0));
2442 
2443   Label materialize_true, materialize_false;
2444   Label* if_true = NULL;
2445   Label* if_false = NULL;
2446   Label* fall_through = NULL;
2447   context()->PrepareTest(&materialize_true, &materialize_false,
2448                          &if_true, &if_false, &fall_through);
2449 
2450   if (FLAG_debug_code) __ AbortIfSmi(rax);
2451 
2452   // Check whether this map has already been checked to be safe for default
2453   // valueOf.
2454   __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
2455   __ testb(FieldOperand(rbx, Map::kBitField2Offset),
2456            Immediate(1 << Map::kStringWrapperSafeForDefaultValueOf));
2457   __ j(not_zero, if_true);
2458 
2459   // Check for fast case object. Generate false result for slow case object.
2460   __ movq(rcx, FieldOperand(rax, JSObject::kPropertiesOffset));
2461   __ movq(rcx, FieldOperand(rcx, HeapObject::kMapOffset));
2462   __ CompareRoot(rcx, Heap::kHashTableMapRootIndex);
2463   __ j(equal, if_false);
2464 
2465   // Look for valueOf symbol in the descriptor array, and indicate false if
2466   // found. The type is not checked, so if it is a transition it is a false
2467   // negative.
2468   __ LoadInstanceDescriptors(rbx, rbx);
2469   __ movq(rcx, FieldOperand(rbx, FixedArray::kLengthOffset));
2470   // rbx: descriptor array
2471   // rcx: length of descriptor array
2472   // Calculate the end of the descriptor array.
2473   SmiIndex index = masm_->SmiToIndex(rdx, rcx, kPointerSizeLog2);
2474   __ lea(rcx,
2475          Operand(
2476              rbx, index.reg, index.scale, FixedArray::kHeaderSize));
2477   // Calculate location of the first key name.
2478   __ addq(rbx,
2479           Immediate(FixedArray::kHeaderSize +
2480                     DescriptorArray::kFirstIndex * kPointerSize));
2481   // Loop through all the keys in the descriptor array. If one of these is the
2482   // symbol valueOf the result is false.
2483   Label entry, loop;
2484   __ jmp(&entry);
2485   __ bind(&loop);
2486   __ movq(rdx, FieldOperand(rbx, 0));
2487   __ Cmp(rdx, FACTORY->value_of_symbol());
2488   __ j(equal, if_false);
2489   __ addq(rbx, Immediate(kPointerSize));
2490   __ bind(&entry);
2491   __ cmpq(rbx, rcx);
2492   __ j(not_equal, &loop);
2493 
2494   // Reload map as register rbx was used as temporary above.
2495   __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
2496 
2497   // If a valueOf property is not found on the object check that it's
2498   // prototype is the un-modified String prototype. If not result is false.
2499   __ movq(rcx, FieldOperand(rbx, Map::kPrototypeOffset));
2500   __ testq(rcx, Immediate(kSmiTagMask));
2501   __ j(zero, if_false);
2502   __ movq(rcx, FieldOperand(rcx, HeapObject::kMapOffset));
2503   __ movq(rdx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
2504   __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalContextOffset));
2505   __ cmpq(rcx,
2506           ContextOperand(rdx, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
2507   __ j(not_equal, if_false);
2508   // Set the bit in the map to indicate that it has been checked safe for
2509   // default valueOf and set true result.
2510   __ or_(FieldOperand(rbx, Map::kBitField2Offset),
2511          Immediate(1 << Map::kStringWrapperSafeForDefaultValueOf));
2512   __ jmp(if_true);
2513 
2514   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2515   context()->Plug(if_true, if_false);
2516 }
2517 
2518 
EmitIsFunction(CallRuntime * expr)2519 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
2520   ZoneList<Expression*>* args = expr->arguments();
2521   ASSERT(args->length() == 1);
2522 
2523   VisitForAccumulatorValue(args->at(0));
2524 
2525   Label materialize_true, materialize_false;
2526   Label* if_true = NULL;
2527   Label* if_false = NULL;
2528   Label* fall_through = NULL;
2529   context()->PrepareTest(&materialize_true, &materialize_false,
2530                          &if_true, &if_false, &fall_through);
2531 
2532   __ JumpIfSmi(rax, if_false);
2533   __ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx);
2534   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2535   Split(equal, if_true, if_false, fall_through);
2536 
2537   context()->Plug(if_true, if_false);
2538 }
2539 
2540 
EmitIsArray(CallRuntime * expr)2541 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
2542   ZoneList<Expression*>* args = expr->arguments();
2543   ASSERT(args->length() == 1);
2544 
2545   VisitForAccumulatorValue(args->at(0));
2546 
2547   Label materialize_true, materialize_false;
2548   Label* if_true = NULL;
2549   Label* if_false = NULL;
2550   Label* fall_through = NULL;
2551   context()->PrepareTest(&materialize_true, &materialize_false,
2552                          &if_true, &if_false, &fall_through);
2553 
2554   __ JumpIfSmi(rax, if_false);
2555   __ CmpObjectType(rax, JS_ARRAY_TYPE, rbx);
2556   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2557   Split(equal, if_true, if_false, fall_through);
2558 
2559   context()->Plug(if_true, if_false);
2560 }
2561 
2562 
EmitIsRegExp(CallRuntime * expr)2563 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
2564   ZoneList<Expression*>* args = expr->arguments();
2565   ASSERT(args->length() == 1);
2566 
2567   VisitForAccumulatorValue(args->at(0));
2568 
2569   Label materialize_true, materialize_false;
2570   Label* if_true = NULL;
2571   Label* if_false = NULL;
2572   Label* fall_through = NULL;
2573   context()->PrepareTest(&materialize_true, &materialize_false,
2574                          &if_true, &if_false, &fall_through);
2575 
2576   __ JumpIfSmi(rax, if_false);
2577   __ CmpObjectType(rax, JS_REGEXP_TYPE, rbx);
2578   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2579   Split(equal, if_true, if_false, fall_through);
2580 
2581   context()->Plug(if_true, if_false);
2582 }
2583 
2584 
2585 
EmitIsConstructCall(CallRuntime * expr)2586 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
2587   ASSERT(expr->arguments()->length() == 0);
2588 
2589   Label materialize_true, materialize_false;
2590   Label* if_true = NULL;
2591   Label* if_false = NULL;
2592   Label* fall_through = NULL;
2593   context()->PrepareTest(&materialize_true, &materialize_false,
2594                          &if_true, &if_false, &fall_through);
2595 
2596   // Get the frame pointer for the calling frame.
2597   __ movq(rax, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
2598 
2599   // Skip the arguments adaptor frame if it exists.
2600   Label check_frame_marker;
2601   __ Cmp(Operand(rax, StandardFrameConstants::kContextOffset),
2602          Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
2603   __ j(not_equal, &check_frame_marker);
2604   __ movq(rax, Operand(rax, StandardFrameConstants::kCallerFPOffset));
2605 
2606   // Check the marker in the calling frame.
2607   __ bind(&check_frame_marker);
2608   __ Cmp(Operand(rax, StandardFrameConstants::kMarkerOffset),
2609          Smi::FromInt(StackFrame::CONSTRUCT));
2610   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2611   Split(equal, if_true, if_false, fall_through);
2612 
2613   context()->Plug(if_true, if_false);
2614 }
2615 
2616 
EmitObjectEquals(CallRuntime * expr)2617 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
2618   ZoneList<Expression*>* args = expr->arguments();
2619   ASSERT(args->length() == 2);
2620 
2621   // Load the two objects into registers and perform the comparison.
2622   VisitForStackValue(args->at(0));
2623   VisitForAccumulatorValue(args->at(1));
2624 
2625   Label materialize_true, materialize_false;
2626   Label* if_true = NULL;
2627   Label* if_false = NULL;
2628   Label* fall_through = NULL;
2629   context()->PrepareTest(&materialize_true, &materialize_false,
2630                          &if_true, &if_false, &fall_through);
2631 
2632   __ pop(rbx);
2633   __ cmpq(rax, rbx);
2634   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2635   Split(equal, if_true, if_false, fall_through);
2636 
2637   context()->Plug(if_true, if_false);
2638 }
2639 
2640 
EmitArguments(CallRuntime * expr)2641 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
2642   ZoneList<Expression*>* args = expr->arguments();
2643   ASSERT(args->length() == 1);
2644 
2645   // ArgumentsAccessStub expects the key in rdx and the formal
2646   // parameter count in rax.
2647   VisitForAccumulatorValue(args->at(0));
2648   __ movq(rdx, rax);
2649   __ Move(rax, Smi::FromInt(info_->scope()->num_parameters()));
2650   ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
2651   __ CallStub(&stub);
2652   context()->Plug(rax);
2653 }
2654 
2655 
EmitArgumentsLength(CallRuntime * expr)2656 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
2657   ASSERT(expr->arguments()->length() == 0);
2658 
2659   Label exit;
2660   // Get the number of formal parameters.
2661   __ Move(rax, Smi::FromInt(info_->scope()->num_parameters()));
2662 
2663   // Check if the calling frame is an arguments adaptor frame.
2664   __ movq(rbx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
2665   __ Cmp(Operand(rbx, StandardFrameConstants::kContextOffset),
2666          Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
2667   __ j(not_equal, &exit, Label::kNear);
2668 
2669   // Arguments adaptor case: Read the arguments length from the
2670   // adaptor frame.
2671   __ movq(rax, Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset));
2672 
2673   __ bind(&exit);
2674   if (FLAG_debug_code) __ AbortIfNotSmi(rax);
2675   context()->Plug(rax);
2676 }
2677 
2678 
EmitClassOf(CallRuntime * expr)2679 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
2680   ZoneList<Expression*>* args = expr->arguments();
2681   ASSERT(args->length() == 1);
2682   Label done, null, function, non_function_constructor;
2683 
2684   VisitForAccumulatorValue(args->at(0));
2685 
2686   // If the object is a smi, we return null.
2687   __ JumpIfSmi(rax, &null);
2688 
2689   // Check that the object is a JS object but take special care of JS
2690   // functions to make sure they have 'Function' as their class.
2691   // Assume that there are only two callable types, and one of them is at
2692   // either end of the type range for JS object types. Saves extra comparisons.
2693   STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
2694   __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rax);
2695   // Map is now in rax.
2696   __ j(below, &null);
2697   STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
2698                 FIRST_SPEC_OBJECT_TYPE + 1);
2699   __ j(equal, &function);
2700 
2701   __ CmpInstanceType(rax, LAST_SPEC_OBJECT_TYPE);
2702   STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
2703                 LAST_SPEC_OBJECT_TYPE - 1);
2704   __ j(equal, &function);
2705   // Assume that there is no larger type.
2706   STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
2707 
2708   // Check if the constructor in the map is a JS function.
2709   __ movq(rax, FieldOperand(rax, Map::kConstructorOffset));
2710   __ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx);
2711   __ j(not_equal, &non_function_constructor);
2712 
2713   // rax now contains the constructor function. Grab the
2714   // instance class name from there.
2715   __ movq(rax, FieldOperand(rax, JSFunction::kSharedFunctionInfoOffset));
2716   __ movq(rax, FieldOperand(rax, SharedFunctionInfo::kInstanceClassNameOffset));
2717   __ jmp(&done);
2718 
2719   // Functions have class 'Function'.
2720   __ bind(&function);
2721   __ Move(rax, isolate()->factory()->function_class_symbol());
2722   __ jmp(&done);
2723 
2724   // Objects with a non-function constructor have class 'Object'.
2725   __ bind(&non_function_constructor);
2726   __ Move(rax, isolate()->factory()->Object_symbol());
2727   __ jmp(&done);
2728 
2729   // Non-JS objects have class null.
2730   __ bind(&null);
2731   __ LoadRoot(rax, Heap::kNullValueRootIndex);
2732 
2733   // All done.
2734   __ bind(&done);
2735 
2736   context()->Plug(rax);
2737 }
2738 
2739 
EmitLog(CallRuntime * expr)2740 void FullCodeGenerator::EmitLog(CallRuntime* expr) {
2741   // Conditionally generate a log call.
2742   // Args:
2743   //   0 (literal string): The type of logging (corresponds to the flags).
2744   //     This is used to determine whether or not to generate the log call.
2745   //   1 (string): Format string.  Access the string at argument index 2
2746   //     with '%2s' (see Logger::LogRuntime for all the formats).
2747   //   2 (array): Arguments to the format string.
2748   ZoneList<Expression*>* args = expr->arguments();
2749   ASSERT_EQ(args->length(), 3);
2750   if (CodeGenerator::ShouldGenerateLog(args->at(0))) {
2751     VisitForStackValue(args->at(1));
2752     VisitForStackValue(args->at(2));
2753     __ CallRuntime(Runtime::kLog, 2);
2754   }
2755   // Finally, we're expected to leave a value on the top of the stack.
2756   __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
2757   context()->Plug(rax);
2758 }
2759 
2760 
EmitRandomHeapNumber(CallRuntime * expr)2761 void FullCodeGenerator::EmitRandomHeapNumber(CallRuntime* expr) {
2762   ASSERT(expr->arguments()->length() == 0);
2763 
2764   Label slow_allocate_heapnumber;
2765   Label heapnumber_allocated;
2766 
2767   __ AllocateHeapNumber(rbx, rcx, &slow_allocate_heapnumber);
2768   __ jmp(&heapnumber_allocated);
2769 
2770   __ bind(&slow_allocate_heapnumber);
2771   // Allocate a heap number.
2772   __ CallRuntime(Runtime::kNumberAlloc, 0);
2773   __ movq(rbx, rax);
2774 
2775   __ bind(&heapnumber_allocated);
2776 
2777   // Return a random uint32 number in rax.
2778   // The fresh HeapNumber is in rbx, which is callee-save on both x64 ABIs.
2779   __ PrepareCallCFunction(1);
2780 #ifdef _WIN64
2781   __ movq(rcx, ContextOperand(context_register(), Context::GLOBAL_INDEX));
2782   __ movq(rcx, FieldOperand(rcx, GlobalObject::kGlobalContextOffset));
2783 
2784 #else
2785   __ movq(rdi, ContextOperand(context_register(), Context::GLOBAL_INDEX));
2786   __ movq(rdi, FieldOperand(rdi, GlobalObject::kGlobalContextOffset));
2787 #endif
2788   __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
2789 
2790   // Convert 32 random bits in rax to 0.(32 random bits) in a double
2791   // by computing:
2792   // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
2793   __ movl(rcx, Immediate(0x49800000));  // 1.0 x 2^20 as single.
2794   __ movd(xmm1, rcx);
2795   __ movd(xmm0, rax);
2796   __ cvtss2sd(xmm1, xmm1);
2797   __ xorps(xmm0, xmm1);
2798   __ subsd(xmm0, xmm1);
2799   __ movsd(FieldOperand(rbx, HeapNumber::kValueOffset), xmm0);
2800 
2801   __ movq(rax, rbx);
2802   context()->Plug(rax);
2803 }
2804 
2805 
EmitSubString(CallRuntime * expr)2806 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
2807   // Load the arguments on the stack and call the stub.
2808   SubStringStub stub;
2809   ZoneList<Expression*>* args = expr->arguments();
2810   ASSERT(args->length() == 3);
2811   VisitForStackValue(args->at(0));
2812   VisitForStackValue(args->at(1));
2813   VisitForStackValue(args->at(2));
2814   __ CallStub(&stub);
2815   context()->Plug(rax);
2816 }
2817 
2818 
EmitRegExpExec(CallRuntime * expr)2819 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
2820   // Load the arguments on the stack and call the stub.
2821   RegExpExecStub stub;
2822   ZoneList<Expression*>* args = expr->arguments();
2823   ASSERT(args->length() == 4);
2824   VisitForStackValue(args->at(0));
2825   VisitForStackValue(args->at(1));
2826   VisitForStackValue(args->at(2));
2827   VisitForStackValue(args->at(3));
2828   __ CallStub(&stub);
2829   context()->Plug(rax);
2830 }
2831 
2832 
EmitValueOf(CallRuntime * expr)2833 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
2834   ZoneList<Expression*>* args = expr->arguments();
2835   ASSERT(args->length() == 1);
2836 
2837   VisitForAccumulatorValue(args->at(0));  // Load the object.
2838 
2839   Label done;
2840   // If the object is a smi return the object.
2841   __ JumpIfSmi(rax, &done);
2842   // If the object is not a value type, return the object.
2843   __ CmpObjectType(rax, JS_VALUE_TYPE, rbx);
2844   __ j(not_equal, &done);
2845   __ movq(rax, FieldOperand(rax, JSValue::kValueOffset));
2846 
2847   __ bind(&done);
2848   context()->Plug(rax);
2849 }
2850 
2851 
EmitDateField(CallRuntime * expr)2852 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
2853   ZoneList<Expression*>* args = expr->arguments();
2854   ASSERT(args->length() == 2);
2855   ASSERT_NE(NULL, args->at(1)->AsLiteral());
2856   Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->handle()));
2857 
2858   VisitForAccumulatorValue(args->at(0));  // Load the object.
2859 
2860   Label runtime, done;
2861   Register object = rax;
2862   Register result = rax;
2863   Register scratch = rcx;
2864 
2865 #ifdef DEBUG
2866   __ AbortIfSmi(object);
2867   __ CmpObjectType(object, JS_DATE_TYPE, scratch);
2868   __ Assert(equal, "Trying to get date field from non-date.");
2869 #endif
2870 
2871   if (index->value() == 0) {
2872     __ movq(result, FieldOperand(object, JSDate::kValueOffset));
2873   } else {
2874     if (index->value() < JSDate::kFirstUncachedField) {
2875       ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
2876       __ movq(scratch, stamp);
2877       __ cmpq(scratch, FieldOperand(object, JSDate::kCacheStampOffset));
2878       __ j(not_equal, &runtime, Label::kNear);
2879       __ movq(result, FieldOperand(object, JSDate::kValueOffset +
2880                                            kPointerSize * index->value()));
2881       __ jmp(&done);
2882     }
2883     __ bind(&runtime);
2884     __ PrepareCallCFunction(2);
2885 #ifdef _WIN64
2886   __ movq(rcx, object);
2887   __ movq(rdx, index, RelocInfo::NONE);
2888 #else
2889   __ movq(rdi, object);
2890   __ movq(rsi, index, RelocInfo::NONE);
2891 #endif
2892     __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
2893     __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2894     __ bind(&done);
2895   }
2896   context()->Plug(rax);
2897 }
2898 
2899 
EmitMathPow(CallRuntime * expr)2900 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
2901   // Load the arguments on the stack and call the runtime function.
2902   ZoneList<Expression*>* args = expr->arguments();
2903   ASSERT(args->length() == 2);
2904   VisitForStackValue(args->at(0));
2905   VisitForStackValue(args->at(1));
2906   MathPowStub stub(MathPowStub::ON_STACK);
2907   __ CallStub(&stub);
2908   context()->Plug(rax);
2909 }
2910 
2911 
EmitSetValueOf(CallRuntime * expr)2912 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
2913   ZoneList<Expression*>* args = expr->arguments();
2914   ASSERT(args->length() == 2);
2915 
2916   VisitForStackValue(args->at(0));  // Load the object.
2917   VisitForAccumulatorValue(args->at(1));  // Load the value.
2918   __ pop(rbx);  // rax = value. rbx = object.
2919 
2920   Label done;
2921   // If the object is a smi, return the value.
2922   __ JumpIfSmi(rbx, &done);
2923 
2924   // If the object is not a value type, return the value.
2925   __ CmpObjectType(rbx, JS_VALUE_TYPE, rcx);
2926   __ j(not_equal, &done);
2927 
2928   // Store the value.
2929   __ movq(FieldOperand(rbx, JSValue::kValueOffset), rax);
2930   // Update the write barrier.  Save the value as it will be
2931   // overwritten by the write barrier code and is needed afterward.
2932   __ movq(rdx, rax);
2933   __ RecordWriteField(rbx, JSValue::kValueOffset, rdx, rcx, kDontSaveFPRegs);
2934 
2935   __ bind(&done);
2936   context()->Plug(rax);
2937 }
2938 
2939 
EmitNumberToString(CallRuntime * expr)2940 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
2941   ZoneList<Expression*>* args = expr->arguments();
2942   ASSERT_EQ(args->length(), 1);
2943 
2944   // Load the argument on the stack and call the stub.
2945   VisitForStackValue(args->at(0));
2946 
2947   NumberToStringStub stub;
2948   __ CallStub(&stub);
2949   context()->Plug(rax);
2950 }
2951 
2952 
EmitStringCharFromCode(CallRuntime * expr)2953 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
2954   ZoneList<Expression*>* args = expr->arguments();
2955   ASSERT(args->length() == 1);
2956 
2957   VisitForAccumulatorValue(args->at(0));
2958 
2959   Label done;
2960   StringCharFromCodeGenerator generator(rax, rbx);
2961   generator.GenerateFast(masm_);
2962   __ jmp(&done);
2963 
2964   NopRuntimeCallHelper call_helper;
2965   generator.GenerateSlow(masm_, call_helper);
2966 
2967   __ bind(&done);
2968   context()->Plug(rbx);
2969 }
2970 
2971 
EmitStringCharCodeAt(CallRuntime * expr)2972 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
2973   ZoneList<Expression*>* args = expr->arguments();
2974   ASSERT(args->length() == 2);
2975 
2976   VisitForStackValue(args->at(0));
2977   VisitForAccumulatorValue(args->at(1));
2978 
2979   Register object = rbx;
2980   Register index = rax;
2981   Register result = rdx;
2982 
2983   __ pop(object);
2984 
2985   Label need_conversion;
2986   Label index_out_of_range;
2987   Label done;
2988   StringCharCodeAtGenerator generator(object,
2989                                       index,
2990                                       result,
2991                                       &need_conversion,
2992                                       &need_conversion,
2993                                       &index_out_of_range,
2994                                       STRING_INDEX_IS_NUMBER);
2995   generator.GenerateFast(masm_);
2996   __ jmp(&done);
2997 
2998   __ bind(&index_out_of_range);
2999   // When the index is out of range, the spec requires us to return
3000   // NaN.
3001   __ LoadRoot(result, Heap::kNanValueRootIndex);
3002   __ jmp(&done);
3003 
3004   __ bind(&need_conversion);
3005   // Move the undefined value into the result register, which will
3006   // trigger conversion.
3007   __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3008   __ jmp(&done);
3009 
3010   NopRuntimeCallHelper call_helper;
3011   generator.GenerateSlow(masm_, call_helper);
3012 
3013   __ bind(&done);
3014   context()->Plug(result);
3015 }
3016 
3017 
EmitStringCharAt(CallRuntime * expr)3018 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3019   ZoneList<Expression*>* args = expr->arguments();
3020   ASSERT(args->length() == 2);
3021 
3022   VisitForStackValue(args->at(0));
3023   VisitForAccumulatorValue(args->at(1));
3024 
3025   Register object = rbx;
3026   Register index = rax;
3027   Register scratch = rdx;
3028   Register result = rax;
3029 
3030   __ pop(object);
3031 
3032   Label need_conversion;
3033   Label index_out_of_range;
3034   Label done;
3035   StringCharAtGenerator generator(object,
3036                                   index,
3037                                   scratch,
3038                                   result,
3039                                   &need_conversion,
3040                                   &need_conversion,
3041                                   &index_out_of_range,
3042                                   STRING_INDEX_IS_NUMBER);
3043   generator.GenerateFast(masm_);
3044   __ jmp(&done);
3045 
3046   __ bind(&index_out_of_range);
3047   // When the index is out of range, the spec requires us to return
3048   // the empty string.
3049   __ LoadRoot(result, Heap::kEmptyStringRootIndex);
3050   __ jmp(&done);
3051 
3052   __ bind(&need_conversion);
3053   // Move smi zero into the result register, which will trigger
3054   // conversion.
3055   __ Move(result, Smi::FromInt(0));
3056   __ jmp(&done);
3057 
3058   NopRuntimeCallHelper call_helper;
3059   generator.GenerateSlow(masm_, call_helper);
3060 
3061   __ bind(&done);
3062   context()->Plug(result);
3063 }
3064 
3065 
EmitStringAdd(CallRuntime * expr)3066 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3067   ZoneList<Expression*>* args = expr->arguments();
3068   ASSERT_EQ(2, args->length());
3069 
3070   VisitForStackValue(args->at(0));
3071   VisitForStackValue(args->at(1));
3072 
3073   StringAddStub stub(NO_STRING_ADD_FLAGS);
3074   __ CallStub(&stub);
3075   context()->Plug(rax);
3076 }
3077 
3078 
EmitStringCompare(CallRuntime * expr)3079 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3080   ZoneList<Expression*>* args = expr->arguments();
3081   ASSERT_EQ(2, args->length());
3082 
3083   VisitForStackValue(args->at(0));
3084   VisitForStackValue(args->at(1));
3085 
3086   StringCompareStub stub;
3087   __ CallStub(&stub);
3088   context()->Plug(rax);
3089 }
3090 
3091 
EmitMathSin(CallRuntime * expr)3092 void FullCodeGenerator::EmitMathSin(CallRuntime* expr) {
3093   // Load the argument on the stack and call the stub.
3094   TranscendentalCacheStub stub(TranscendentalCache::SIN,
3095                                TranscendentalCacheStub::TAGGED);
3096   ZoneList<Expression*>* args = expr->arguments();
3097   ASSERT(args->length() == 1);
3098   VisitForStackValue(args->at(0));
3099   __ CallStub(&stub);
3100   context()->Plug(rax);
3101 }
3102 
3103 
EmitMathCos(CallRuntime * expr)3104 void FullCodeGenerator::EmitMathCos(CallRuntime* expr) {
3105   // Load the argument on the stack and call the stub.
3106   TranscendentalCacheStub stub(TranscendentalCache::COS,
3107                                TranscendentalCacheStub::TAGGED);
3108   ZoneList<Expression*>* args = expr->arguments();
3109   ASSERT(args->length() == 1);
3110   VisitForStackValue(args->at(0));
3111   __ CallStub(&stub);
3112   context()->Plug(rax);
3113 }
3114 
3115 
EmitMathTan(CallRuntime * expr)3116 void FullCodeGenerator::EmitMathTan(CallRuntime* expr) {
3117   // Load the argument on the stack and call the stub.
3118   TranscendentalCacheStub stub(TranscendentalCache::TAN,
3119                                TranscendentalCacheStub::TAGGED);
3120   ZoneList<Expression*>* args = expr->arguments();
3121   ASSERT(args->length() == 1);
3122   VisitForStackValue(args->at(0));
3123   __ CallStub(&stub);
3124   context()->Plug(rax);
3125 }
3126 
3127 
EmitMathLog(CallRuntime * expr)3128 void FullCodeGenerator::EmitMathLog(CallRuntime* expr) {
3129   // Load the argument on the stack and call the stub.
3130   TranscendentalCacheStub stub(TranscendentalCache::LOG,
3131                                TranscendentalCacheStub::TAGGED);
3132   ZoneList<Expression*>* args = expr->arguments();
3133   ASSERT(args->length() == 1);
3134   VisitForStackValue(args->at(0));
3135   __ CallStub(&stub);
3136   context()->Plug(rax);
3137 }
3138 
3139 
EmitMathSqrt(CallRuntime * expr)3140 void FullCodeGenerator::EmitMathSqrt(CallRuntime* expr) {
3141   // Load the argument on the stack and call the runtime function.
3142   ZoneList<Expression*>* args = expr->arguments();
3143   ASSERT(args->length() == 1);
3144   VisitForStackValue(args->at(0));
3145   __ CallRuntime(Runtime::kMath_sqrt, 1);
3146   context()->Plug(rax);
3147 }
3148 
3149 
EmitCallFunction(CallRuntime * expr)3150 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3151   ZoneList<Expression*>* args = expr->arguments();
3152   ASSERT(args->length() >= 2);
3153 
3154   int arg_count = args->length() - 2;  // 2 ~ receiver and function.
3155   for (int i = 0; i < arg_count + 1; i++) {
3156     VisitForStackValue(args->at(i));
3157   }
3158   VisitForAccumulatorValue(args->last());  // Function.
3159 
3160   // Check for proxy.
3161   Label proxy, done;
3162   __ CmpObjectType(rax, JS_FUNCTION_PROXY_TYPE, rbx);
3163   __ j(equal, &proxy);
3164 
3165   // InvokeFunction requires the function in rdi. Move it in there.
3166   __ movq(rdi, result_register());
3167   ParameterCount count(arg_count);
3168   __ InvokeFunction(rdi, count, CALL_FUNCTION,
3169                     NullCallWrapper(), CALL_AS_METHOD);
3170   __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
3171   __ jmp(&done);
3172 
3173   __ bind(&proxy);
3174   __ push(rax);
3175   __ CallRuntime(Runtime::kCall, args->length());
3176   __ bind(&done);
3177 
3178   context()->Plug(rax);
3179 }
3180 
3181 
EmitRegExpConstructResult(CallRuntime * expr)3182 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3183   RegExpConstructResultStub stub;
3184   ZoneList<Expression*>* args = expr->arguments();
3185   ASSERT(args->length() == 3);
3186   VisitForStackValue(args->at(0));
3187   VisitForStackValue(args->at(1));
3188   VisitForStackValue(args->at(2));
3189   __ CallStub(&stub);
3190   context()->Plug(rax);
3191 }
3192 
3193 
EmitGetFromCache(CallRuntime * expr)3194 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3195   ZoneList<Expression*>* args = expr->arguments();
3196   ASSERT_EQ(2, args->length());
3197 
3198   ASSERT_NE(NULL, args->at(0)->AsLiteral());
3199   int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value();
3200 
3201   Handle<FixedArray> jsfunction_result_caches(
3202       isolate()->global_context()->jsfunction_result_caches());
3203   if (jsfunction_result_caches->length() <= cache_id) {
3204     __ Abort("Attempt to use undefined cache.");
3205     __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
3206     context()->Plug(rax);
3207     return;
3208   }
3209 
3210   VisitForAccumulatorValue(args->at(1));
3211 
3212   Register key = rax;
3213   Register cache = rbx;
3214   Register tmp = rcx;
3215   __ movq(cache, ContextOperand(rsi, Context::GLOBAL_INDEX));
3216   __ movq(cache,
3217           FieldOperand(cache, GlobalObject::kGlobalContextOffset));
3218   __ movq(cache,
3219           ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
3220   __ movq(cache,
3221           FieldOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
3222 
3223   Label done, not_found;
3224   // tmp now holds finger offset as a smi.
3225   STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
3226   __ movq(tmp, FieldOperand(cache, JSFunctionResultCache::kFingerOffset));
3227   SmiIndex index =
3228       __ SmiToIndex(kScratchRegister, tmp, kPointerSizeLog2);
3229   __ cmpq(key, FieldOperand(cache,
3230                             index.reg,
3231                             index.scale,
3232                             FixedArray::kHeaderSize));
3233   __ j(not_equal, &not_found, Label::kNear);
3234   __ movq(rax, FieldOperand(cache,
3235                             index.reg,
3236                             index.scale,
3237                             FixedArray::kHeaderSize + kPointerSize));
3238   __ jmp(&done, Label::kNear);
3239 
3240   __ bind(&not_found);
3241   // Call runtime to perform the lookup.
3242   __ push(cache);
3243   __ push(key);
3244   __ CallRuntime(Runtime::kGetFromCache, 2);
3245 
3246   __ bind(&done);
3247   context()->Plug(rax);
3248 }
3249 
3250 
EmitIsRegExpEquivalent(CallRuntime * expr)3251 void FullCodeGenerator::EmitIsRegExpEquivalent(CallRuntime* expr) {
3252   ZoneList<Expression*>* args = expr->arguments();
3253   ASSERT_EQ(2, args->length());
3254 
3255   Register right = rax;
3256   Register left = rbx;
3257   Register tmp = rcx;
3258 
3259   VisitForStackValue(args->at(0));
3260   VisitForAccumulatorValue(args->at(1));
3261   __ pop(left);
3262 
3263   Label done, fail, ok;
3264   __ cmpq(left, right);
3265   __ j(equal, &ok, Label::kNear);
3266   // Fail if either is a non-HeapObject.
3267   Condition either_smi = masm()->CheckEitherSmi(left, right, tmp);
3268   __ j(either_smi, &fail, Label::kNear);
3269   __ j(zero, &fail, Label::kNear);
3270   __ movq(tmp, FieldOperand(left, HeapObject::kMapOffset));
3271   __ cmpb(FieldOperand(tmp, Map::kInstanceTypeOffset),
3272           Immediate(JS_REGEXP_TYPE));
3273   __ j(not_equal, &fail, Label::kNear);
3274   __ cmpq(tmp, FieldOperand(right, HeapObject::kMapOffset));
3275   __ j(not_equal, &fail, Label::kNear);
3276   __ movq(tmp, FieldOperand(left, JSRegExp::kDataOffset));
3277   __ cmpq(tmp, FieldOperand(right, JSRegExp::kDataOffset));
3278   __ j(equal, &ok, Label::kNear);
3279   __ bind(&fail);
3280   __ Move(rax, isolate()->factory()->false_value());
3281   __ jmp(&done, Label::kNear);
3282   __ bind(&ok);
3283   __ Move(rax, isolate()->factory()->true_value());
3284   __ bind(&done);
3285 
3286   context()->Plug(rax);
3287 }
3288 
3289 
EmitHasCachedArrayIndex(CallRuntime * expr)3290 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3291   ZoneList<Expression*>* args = expr->arguments();
3292   ASSERT(args->length() == 1);
3293 
3294   VisitForAccumulatorValue(args->at(0));
3295 
3296   Label materialize_true, materialize_false;
3297   Label* if_true = NULL;
3298   Label* if_false = NULL;
3299   Label* fall_through = NULL;
3300   context()->PrepareTest(&materialize_true, &materialize_false,
3301                          &if_true, &if_false, &fall_through);
3302 
3303   __ testl(FieldOperand(rax, String::kHashFieldOffset),
3304            Immediate(String::kContainsCachedArrayIndexMask));
3305   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3306   __ j(zero, if_true);
3307   __ jmp(if_false);
3308 
3309   context()->Plug(if_true, if_false);
3310 }
3311 
3312 
EmitGetCachedArrayIndex(CallRuntime * expr)3313 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3314   ZoneList<Expression*>* args = expr->arguments();
3315   ASSERT(args->length() == 1);
3316   VisitForAccumulatorValue(args->at(0));
3317 
3318   if (FLAG_debug_code) {
3319     __ AbortIfNotString(rax);
3320   }
3321 
3322   __ movl(rax, FieldOperand(rax, String::kHashFieldOffset));
3323   ASSERT(String::kHashShift >= kSmiTagSize);
3324   __ IndexFromHash(rax, rax);
3325 
3326   context()->Plug(rax);
3327 }
3328 
3329 
EmitFastAsciiArrayJoin(CallRuntime * expr)3330 void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
3331   Label bailout, return_result, done, one_char_separator, long_separator,
3332       non_trivial_array, not_size_one_array, loop,
3333       loop_1, loop_1_condition, loop_2, loop_2_entry, loop_3, loop_3_entry;
3334   ZoneList<Expression*>* args = expr->arguments();
3335   ASSERT(args->length() == 2);
3336   // We will leave the separator on the stack until the end of the function.
3337   VisitForStackValue(args->at(1));
3338   // Load this to rax (= array)
3339   VisitForAccumulatorValue(args->at(0));
3340   // All aliases of the same register have disjoint lifetimes.
3341   Register array = rax;
3342   Register elements = no_reg;  // Will be rax.
3343 
3344   Register index = rdx;
3345 
3346   Register string_length = rcx;
3347 
3348   Register string = rsi;
3349 
3350   Register scratch = rbx;
3351 
3352   Register array_length = rdi;
3353   Register result_pos = no_reg;  // Will be rdi.
3354 
3355   Operand separator_operand =    Operand(rsp, 2 * kPointerSize);
3356   Operand result_operand =       Operand(rsp, 1 * kPointerSize);
3357   Operand array_length_operand = Operand(rsp, 0 * kPointerSize);
3358   // Separator operand is already pushed. Make room for the two
3359   // other stack fields, and clear the direction flag in anticipation
3360   // of calling CopyBytes.
3361   __ subq(rsp, Immediate(2 * kPointerSize));
3362   __ cld();
3363   // Check that the array is a JSArray
3364   __ JumpIfSmi(array, &bailout);
3365   __ CmpObjectType(array, JS_ARRAY_TYPE, scratch);
3366   __ j(not_equal, &bailout);
3367 
3368   // Check that the array has fast elements.
3369   __ CheckFastElements(scratch, &bailout);
3370 
3371   // Array has fast elements, so its length must be a smi.
3372   // If the array has length zero, return the empty string.
3373   __ movq(array_length, FieldOperand(array, JSArray::kLengthOffset));
3374   __ SmiCompare(array_length, Smi::FromInt(0));
3375   __ j(not_zero, &non_trivial_array);
3376   __ LoadRoot(rax, Heap::kEmptyStringRootIndex);
3377   __ jmp(&return_result);
3378 
3379   // Save the array length on the stack.
3380   __ bind(&non_trivial_array);
3381   __ SmiToInteger32(array_length, array_length);
3382   __ movl(array_length_operand, array_length);
3383 
3384   // Save the FixedArray containing array's elements.
3385   // End of array's live range.
3386   elements = array;
3387   __ movq(elements, FieldOperand(array, JSArray::kElementsOffset));
3388   array = no_reg;
3389 
3390 
3391   // Check that all array elements are sequential ASCII strings, and
3392   // accumulate the sum of their lengths, as a smi-encoded value.
3393   __ Set(index, 0);
3394   __ Set(string_length, 0);
3395   // Loop condition: while (index < array_length).
3396   // Live loop registers: index(int32), array_length(int32), string(String*),
3397   //                      scratch, string_length(int32), elements(FixedArray*).
3398   if (FLAG_debug_code) {
3399     __ cmpq(index, array_length);
3400     __ Assert(below, "No empty arrays here in EmitFastAsciiArrayJoin");
3401   }
3402   __ bind(&loop);
3403   __ movq(string, FieldOperand(elements,
3404                                index,
3405                                times_pointer_size,
3406                                FixedArray::kHeaderSize));
3407   __ JumpIfSmi(string, &bailout);
3408   __ movq(scratch, FieldOperand(string, HeapObject::kMapOffset));
3409   __ movzxbl(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
3410   __ andb(scratch, Immediate(
3411       kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
3412   __ cmpb(scratch, Immediate(kStringTag | kAsciiStringTag | kSeqStringTag));
3413   __ j(not_equal, &bailout);
3414   __ AddSmiField(string_length,
3415                  FieldOperand(string, SeqAsciiString::kLengthOffset));
3416   __ j(overflow, &bailout);
3417   __ incl(index);
3418   __ cmpl(index, array_length);
3419   __ j(less, &loop);
3420 
3421   // Live registers:
3422   // string_length: Sum of string lengths.
3423   // elements: FixedArray of strings.
3424   // index: Array length.
3425   // array_length: Array length.
3426 
3427   // If array_length is 1, return elements[0], a string.
3428   __ cmpl(array_length, Immediate(1));
3429   __ j(not_equal, &not_size_one_array);
3430   __ movq(rax, FieldOperand(elements, FixedArray::kHeaderSize));
3431   __ jmp(&return_result);
3432 
3433   __ bind(&not_size_one_array);
3434 
3435   // End of array_length live range.
3436   result_pos = array_length;
3437   array_length = no_reg;
3438 
3439   // Live registers:
3440   // string_length: Sum of string lengths.
3441   // elements: FixedArray of strings.
3442   // index: Array length.
3443 
3444   // Check that the separator is a sequential ASCII string.
3445   __ movq(string, separator_operand);
3446   __ JumpIfSmi(string, &bailout);
3447   __ movq(scratch, FieldOperand(string, HeapObject::kMapOffset));
3448   __ movzxbl(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
3449   __ andb(scratch, Immediate(
3450       kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
3451   __ cmpb(scratch, Immediate(kStringTag | kAsciiStringTag | kSeqStringTag));
3452   __ j(not_equal, &bailout);
3453 
3454   // Live registers:
3455   // string_length: Sum of string lengths.
3456   // elements: FixedArray of strings.
3457   // index: Array length.
3458   // string: Separator string.
3459 
3460   // Add (separator length times (array_length - 1)) to string_length.
3461   __ SmiToInteger32(scratch,
3462                     FieldOperand(string, SeqAsciiString::kLengthOffset));
3463   __ decl(index);
3464   __ imull(scratch, index);
3465   __ j(overflow, &bailout);
3466   __ addl(string_length, scratch);
3467   __ j(overflow, &bailout);
3468 
3469   // Live registers and stack values:
3470   //   string_length: Total length of result string.
3471   //   elements: FixedArray of strings.
3472   __ AllocateAsciiString(result_pos, string_length, scratch,
3473                          index, string, &bailout);
3474   __ movq(result_operand, result_pos);
3475   __ lea(result_pos, FieldOperand(result_pos, SeqAsciiString::kHeaderSize));
3476 
3477   __ movq(string, separator_operand);
3478   __ SmiCompare(FieldOperand(string, SeqAsciiString::kLengthOffset),
3479                 Smi::FromInt(1));
3480   __ j(equal, &one_char_separator);
3481   __ j(greater, &long_separator);
3482 
3483 
3484   // Empty separator case:
3485   __ Set(index, 0);
3486   __ movl(scratch, array_length_operand);
3487   __ jmp(&loop_1_condition);
3488   // Loop condition: while (index < array_length).
3489   __ bind(&loop_1);
3490   // Each iteration of the loop concatenates one string to the result.
3491   // Live values in registers:
3492   //   index: which element of the elements array we are adding to the result.
3493   //   result_pos: the position to which we are currently copying characters.
3494   //   elements: the FixedArray of strings we are joining.
3495   //   scratch: array length.
3496 
3497   // Get string = array[index].
3498   __ movq(string, FieldOperand(elements, index,
3499                                times_pointer_size,
3500                                FixedArray::kHeaderSize));
3501   __ SmiToInteger32(string_length,
3502                     FieldOperand(string, String::kLengthOffset));
3503   __ lea(string,
3504          FieldOperand(string, SeqAsciiString::kHeaderSize));
3505   __ CopyBytes(result_pos, string, string_length);
3506   __ incl(index);
3507   __ bind(&loop_1_condition);
3508   __ cmpl(index, scratch);
3509   __ j(less, &loop_1);  // Loop while (index < array_length).
3510   __ jmp(&done);
3511 
3512   // Generic bailout code used from several places.
3513   __ bind(&bailout);
3514   __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
3515   __ jmp(&return_result);
3516 
3517 
3518   // One-character separator case
3519   __ bind(&one_char_separator);
3520   // Get the separator ASCII character value.
3521   // Register "string" holds the separator.
3522   __ movzxbl(scratch, FieldOperand(string, SeqAsciiString::kHeaderSize));
3523   __ Set(index, 0);
3524   // Jump into the loop after the code that copies the separator, so the first
3525   // element is not preceded by a separator
3526   __ jmp(&loop_2_entry);
3527   // Loop condition: while (index < length).
3528   __ bind(&loop_2);
3529   // Each iteration of the loop concatenates one string to the result.
3530   // Live values in registers:
3531   //   elements: The FixedArray of strings we are joining.
3532   //   index: which element of the elements array we are adding to the result.
3533   //   result_pos: the position to which we are currently copying characters.
3534   //   scratch: Separator character.
3535 
3536   // Copy the separator character to the result.
3537   __ movb(Operand(result_pos, 0), scratch);
3538   __ incq(result_pos);
3539 
3540   __ bind(&loop_2_entry);
3541   // Get string = array[index].
3542   __ movq(string, FieldOperand(elements, index,
3543                                times_pointer_size,
3544                                FixedArray::kHeaderSize));
3545   __ SmiToInteger32(string_length,
3546                     FieldOperand(string, String::kLengthOffset));
3547   __ lea(string,
3548          FieldOperand(string, SeqAsciiString::kHeaderSize));
3549   __ CopyBytes(result_pos, string, string_length);
3550   __ incl(index);
3551   __ cmpl(index, array_length_operand);
3552   __ j(less, &loop_2);  // End while (index < length).
3553   __ jmp(&done);
3554 
3555 
3556   // Long separator case (separator is more than one character).
3557   __ bind(&long_separator);
3558 
3559   // Make elements point to end of elements array, and index
3560   // count from -array_length to zero, so we don't need to maintain
3561   // a loop limit.
3562   __ movl(index, array_length_operand);
3563   __ lea(elements, FieldOperand(elements, index, times_pointer_size,
3564                                 FixedArray::kHeaderSize));
3565   __ neg(index);
3566 
3567   // Replace separator string with pointer to its first character, and
3568   // make scratch be its length.
3569   __ movq(string, separator_operand);
3570   __ SmiToInteger32(scratch,
3571                     FieldOperand(string, String::kLengthOffset));
3572   __ lea(string,
3573          FieldOperand(string, SeqAsciiString::kHeaderSize));
3574   __ movq(separator_operand, string);
3575 
3576   // Jump into the loop after the code that copies the separator, so the first
3577   // element is not preceded by a separator
3578   __ jmp(&loop_3_entry);
3579   // Loop condition: while (index < length).
3580   __ bind(&loop_3);
3581   // Each iteration of the loop concatenates one string to the result.
3582   // Live values in registers:
3583   //   index: which element of the elements array we are adding to the result.
3584   //   result_pos: the position to which we are currently copying characters.
3585   //   scratch: Separator length.
3586   //   separator_operand (rsp[0x10]): Address of first char of separator.
3587 
3588   // Copy the separator to the result.
3589   __ movq(string, separator_operand);
3590   __ movl(string_length, scratch);
3591   __ CopyBytes(result_pos, string, string_length, 2);
3592 
3593   __ bind(&loop_3_entry);
3594   // Get string = array[index].
3595   __ movq(string, Operand(elements, index, times_pointer_size, 0));
3596   __ SmiToInteger32(string_length,
3597                     FieldOperand(string, String::kLengthOffset));
3598   __ lea(string,
3599          FieldOperand(string, SeqAsciiString::kHeaderSize));
3600   __ CopyBytes(result_pos, string, string_length);
3601   __ incq(index);
3602   __ j(not_equal, &loop_3);  // Loop while (index < 0).
3603 
3604   __ bind(&done);
3605   __ movq(rax, result_operand);
3606 
3607   __ bind(&return_result);
3608   // Drop temp values from the stack, and restore context register.
3609   __ addq(rsp, Immediate(3 * kPointerSize));
3610   __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
3611   context()->Plug(rax);
3612 }
3613 
3614 
VisitCallRuntime(CallRuntime * expr)3615 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
3616   Handle<String> name = expr->name();
3617   if (name->length() > 0 && name->Get(0) == '_') {
3618     Comment cmnt(masm_, "[ InlineRuntimeCall");
3619     EmitInlineRuntimeCall(expr);
3620     return;
3621   }
3622 
3623   Comment cmnt(masm_, "[ CallRuntime");
3624   ZoneList<Expression*>* args = expr->arguments();
3625 
3626   if (expr->is_jsruntime()) {
3627     // Prepare for calling JS runtime function.
3628     __ movq(rax, GlobalObjectOperand());
3629     __ push(FieldOperand(rax, GlobalObject::kBuiltinsOffset));
3630   }
3631 
3632   // Push the arguments ("left-to-right").
3633   int arg_count = args->length();
3634   for (int i = 0; i < arg_count; i++) {
3635     VisitForStackValue(args->at(i));
3636   }
3637 
3638   if (expr->is_jsruntime()) {
3639     // Call the JS runtime function using a call IC.
3640     __ Move(rcx, expr->name());
3641     RelocInfo::Mode mode = RelocInfo::CODE_TARGET;
3642     Handle<Code> ic =
3643         isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
3644     __ call(ic, mode, expr->id());
3645     // Restore context register.
3646     __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
3647   } else {
3648     __ CallRuntime(expr->function(), arg_count);
3649   }
3650   context()->Plug(rax);
3651 }
3652 
3653 
VisitUnaryOperation(UnaryOperation * expr)3654 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3655   switch (expr->op()) {
3656     case Token::DELETE: {
3657       Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
3658       Property* property = expr->expression()->AsProperty();
3659       VariableProxy* proxy = expr->expression()->AsVariableProxy();
3660 
3661       if (property != NULL) {
3662         VisitForStackValue(property->obj());
3663         VisitForStackValue(property->key());
3664         StrictModeFlag strict_mode_flag = (language_mode() == CLASSIC_MODE)
3665             ? kNonStrictMode : kStrictMode;
3666         __ Push(Smi::FromInt(strict_mode_flag));
3667         __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3668         context()->Plug(rax);
3669       } else if (proxy != NULL) {
3670         Variable* var = proxy->var();
3671         // Delete of an unqualified identifier is disallowed in strict mode
3672         // but "delete this" is allowed.
3673         ASSERT(language_mode() == CLASSIC_MODE || var->is_this());
3674         if (var->IsUnallocated()) {
3675           __ push(GlobalObjectOperand());
3676           __ Push(var->name());
3677           __ Push(Smi::FromInt(kNonStrictMode));
3678           __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3679           context()->Plug(rax);
3680         } else if (var->IsStackAllocated() || var->IsContextSlot()) {
3681           // Result of deleting non-global variables is false.  'this' is
3682           // not really a variable, though we implement it as one.  The
3683           // subexpression does not have side effects.
3684           context()->Plug(var->is_this());
3685         } else {
3686           // Non-global variable.  Call the runtime to try to delete from the
3687           // context where the variable was introduced.
3688           __ push(context_register());
3689           __ Push(var->name());
3690           __ CallRuntime(Runtime::kDeleteContextSlot, 2);
3691           context()->Plug(rax);
3692         }
3693       } else {
3694         // Result of deleting non-property, non-variable reference is true.
3695         // The subexpression may have side effects.
3696         VisitForEffect(expr->expression());
3697         context()->Plug(true);
3698       }
3699       break;
3700     }
3701 
3702     case Token::VOID: {
3703       Comment cmnt(masm_, "[ UnaryOperation (VOID)");
3704       VisitForEffect(expr->expression());
3705       context()->Plug(Heap::kUndefinedValueRootIndex);
3706       break;
3707     }
3708 
3709     case Token::NOT: {
3710       Comment cmnt(masm_, "[ UnaryOperation (NOT)");
3711       if (context()->IsEffect()) {
3712         // Unary NOT has no side effects so it's only necessary to visit the
3713         // subexpression.  Match the optimizing compiler by not branching.
3714         VisitForEffect(expr->expression());
3715       } else if (context()->IsTest()) {
3716         const TestContext* test = TestContext::cast(context());
3717         // The labels are swapped for the recursive call.
3718         VisitForControl(expr->expression(),
3719                         test->false_label(),
3720                         test->true_label(),
3721                         test->fall_through());
3722         context()->Plug(test->true_label(), test->false_label());
3723       } else {
3724         // We handle value contexts explicitly rather than simply visiting
3725         // for control and plugging the control flow into the context,
3726         // because we need to prepare a pair of extra administrative AST ids
3727         // for the optimizing compiler.
3728         ASSERT(context()->IsAccumulatorValue() || context()->IsStackValue());
3729         Label materialize_true, materialize_false, done;
3730         VisitForControl(expr->expression(),
3731                         &materialize_false,
3732                         &materialize_true,
3733                         &materialize_true);
3734         __ bind(&materialize_true);
3735         PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
3736         if (context()->IsAccumulatorValue()) {
3737           __ LoadRoot(rax, Heap::kTrueValueRootIndex);
3738         } else {
3739           __ PushRoot(Heap::kTrueValueRootIndex);
3740         }
3741         __ jmp(&done, Label::kNear);
3742         __ bind(&materialize_false);
3743         PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
3744         if (context()->IsAccumulatorValue()) {
3745           __ LoadRoot(rax, Heap::kFalseValueRootIndex);
3746         } else {
3747           __ PushRoot(Heap::kFalseValueRootIndex);
3748         }
3749         __ bind(&done);
3750       }
3751       break;
3752     }
3753 
3754     case Token::TYPEOF: {
3755       Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
3756       { StackValueContext context(this);
3757         VisitForTypeofValue(expr->expression());
3758       }
3759       __ CallRuntime(Runtime::kTypeof, 1);
3760       context()->Plug(rax);
3761       break;
3762     }
3763 
3764     case Token::ADD: {
3765       Comment cmt(masm_, "[ UnaryOperation (ADD)");
3766       VisitForAccumulatorValue(expr->expression());
3767       Label no_conversion;
3768       __ JumpIfSmi(result_register(), &no_conversion);
3769       ToNumberStub convert_stub;
3770       __ CallStub(&convert_stub);
3771       __ bind(&no_conversion);
3772       context()->Plug(result_register());
3773       break;
3774     }
3775 
3776     case Token::SUB:
3777       EmitUnaryOperation(expr, "[ UnaryOperation (SUB)");
3778       break;
3779 
3780     case Token::BIT_NOT:
3781       EmitUnaryOperation(expr, "[ UnaryOperation (BIT_NOT)");
3782       break;
3783 
3784     default:
3785       UNREACHABLE();
3786   }
3787 }
3788 
3789 
EmitUnaryOperation(UnaryOperation * expr,const char * comment)3790 void FullCodeGenerator::EmitUnaryOperation(UnaryOperation* expr,
3791                                            const char* comment) {
3792   // TODO(svenpanne): Allowing format strings in Comment would be nice here...
3793   Comment cmt(masm_, comment);
3794   bool can_overwrite = expr->expression()->ResultOverwriteAllowed();
3795   UnaryOverwriteMode overwrite =
3796       can_overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE;
3797   UnaryOpStub stub(expr->op(), overwrite);
3798   // UnaryOpStub expects the argument to be in the
3799   // accumulator register rax.
3800   VisitForAccumulatorValue(expr->expression());
3801   SetSourcePosition(expr->position());
3802   __ call(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
3803   context()->Plug(rax);
3804 }
3805 
3806 
VisitCountOperation(CountOperation * expr)3807 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3808   Comment cmnt(masm_, "[ CountOperation");
3809   SetSourcePosition(expr->position());
3810 
3811   // Invalid left-hand-sides are rewritten to have a 'throw
3812   // ReferenceError' as the left-hand side.
3813   if (!expr->expression()->IsValidLeftHandSide()) {
3814     VisitForEffect(expr->expression());
3815     return;
3816   }
3817 
3818   // Expression can only be a property, a global or a (parameter or local)
3819   // slot.
3820   enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
3821   LhsKind assign_type = VARIABLE;
3822   Property* prop = expr->expression()->AsProperty();
3823   // In case of a property we use the uninitialized expression context
3824   // of the key to detect a named property.
3825   if (prop != NULL) {
3826     assign_type =
3827         (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
3828   }
3829 
3830   // Evaluate expression and get value.
3831   if (assign_type == VARIABLE) {
3832     ASSERT(expr->expression()->AsVariableProxy()->var() != NULL);
3833     AccumulatorValueContext context(this);
3834     EmitVariableLoad(expr->expression()->AsVariableProxy());
3835   } else {
3836     // Reserve space for result of postfix operation.
3837     if (expr->is_postfix() && !context()->IsEffect()) {
3838       __ Push(Smi::FromInt(0));
3839     }
3840     if (assign_type == NAMED_PROPERTY) {
3841       VisitForAccumulatorValue(prop->obj());
3842       __ push(rax);  // Copy of receiver, needed for later store.
3843       EmitNamedPropertyLoad(prop);
3844     } else {
3845       VisitForStackValue(prop->obj());
3846       VisitForAccumulatorValue(prop->key());
3847       __ movq(rdx, Operand(rsp, 0));  // Leave receiver on stack
3848       __ push(rax);  // Copy of key, needed for later store.
3849       EmitKeyedPropertyLoad(prop);
3850     }
3851   }
3852 
3853   // We need a second deoptimization point after loading the value
3854   // in case evaluating the property load my have a side effect.
3855   if (assign_type == VARIABLE) {
3856     PrepareForBailout(expr->expression(), TOS_REG);
3857   } else {
3858     PrepareForBailoutForId(expr->CountId(), TOS_REG);
3859   }
3860 
3861   // Call ToNumber only if operand is not a smi.
3862   Label no_conversion;
3863   __ JumpIfSmi(rax, &no_conversion, Label::kNear);
3864   ToNumberStub convert_stub;
3865   __ CallStub(&convert_stub);
3866   __ bind(&no_conversion);
3867 
3868   // Save result for postfix expressions.
3869   if (expr->is_postfix()) {
3870     if (!context()->IsEffect()) {
3871       // Save the result on the stack. If we have a named or keyed property
3872       // we store the result under the receiver that is currently on top
3873       // of the stack.
3874       switch (assign_type) {
3875         case VARIABLE:
3876           __ push(rax);
3877           break;
3878         case NAMED_PROPERTY:
3879           __ movq(Operand(rsp, kPointerSize), rax);
3880           break;
3881         case KEYED_PROPERTY:
3882           __ movq(Operand(rsp, 2 * kPointerSize), rax);
3883           break;
3884       }
3885     }
3886   }
3887 
3888   // Inline smi case if we are in a loop.
3889   Label done, stub_call;
3890   JumpPatchSite patch_site(masm_);
3891 
3892   if (ShouldInlineSmiCase(expr->op())) {
3893     if (expr->op() == Token::INC) {
3894       __ SmiAddConstant(rax, rax, Smi::FromInt(1));
3895     } else {
3896       __ SmiSubConstant(rax, rax, Smi::FromInt(1));
3897     }
3898     __ j(overflow, &stub_call, Label::kNear);
3899     // We could eliminate this smi check if we split the code at
3900     // the first smi check before calling ToNumber.
3901     patch_site.EmitJumpIfSmi(rax, &done, Label::kNear);
3902 
3903     __ bind(&stub_call);
3904     // Call stub. Undo operation first.
3905     if (expr->op() == Token::INC) {
3906       __ SmiSubConstant(rax, rax, Smi::FromInt(1));
3907     } else {
3908       __ SmiAddConstant(rax, rax, Smi::FromInt(1));
3909     }
3910   }
3911 
3912   // Record position before stub call.
3913   SetSourcePosition(expr->position());
3914 
3915   // Call stub for +1/-1.
3916   BinaryOpStub stub(expr->binary_op(), NO_OVERWRITE);
3917   if (expr->op() == Token::INC) {
3918     __ Move(rdx, Smi::FromInt(1));
3919   } else {
3920     __ movq(rdx, rax);
3921     __ Move(rax, Smi::FromInt(1));
3922   }
3923   __ call(stub.GetCode(), RelocInfo::CODE_TARGET, expr->CountId());
3924   patch_site.EmitPatchInfo();
3925   __ bind(&done);
3926 
3927   // Store the value returned in rax.
3928   switch (assign_type) {
3929     case VARIABLE:
3930       if (expr->is_postfix()) {
3931         // Perform the assignment as if via '='.
3932         { EffectContext context(this);
3933           EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3934                                  Token::ASSIGN);
3935           PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3936           context.Plug(rax);
3937         }
3938         // For all contexts except kEffect: We have the result on
3939         // top of the stack.
3940         if (!context()->IsEffect()) {
3941           context()->PlugTOS();
3942         }
3943       } else {
3944         // Perform the assignment as if via '='.
3945         EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3946                                Token::ASSIGN);
3947         PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3948         context()->Plug(rax);
3949       }
3950       break;
3951     case NAMED_PROPERTY: {
3952       __ Move(rcx, prop->key()->AsLiteral()->handle());
3953       __ pop(rdx);
3954       Handle<Code> ic = is_classic_mode()
3955           ? isolate()->builtins()->StoreIC_Initialize()
3956           : isolate()->builtins()->StoreIC_Initialize_Strict();
3957       __ call(ic, RelocInfo::CODE_TARGET, expr->id());
3958       PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3959       if (expr->is_postfix()) {
3960         if (!context()->IsEffect()) {
3961           context()->PlugTOS();
3962         }
3963       } else {
3964         context()->Plug(rax);
3965       }
3966       break;
3967     }
3968     case KEYED_PROPERTY: {
3969       __ pop(rcx);
3970       __ pop(rdx);
3971       Handle<Code> ic = is_classic_mode()
3972           ? isolate()->builtins()->KeyedStoreIC_Initialize()
3973           : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
3974       __ call(ic, RelocInfo::CODE_TARGET, expr->id());
3975       PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3976       if (expr->is_postfix()) {
3977         if (!context()->IsEffect()) {
3978           context()->PlugTOS();
3979         }
3980       } else {
3981         context()->Plug(rax);
3982       }
3983       break;
3984     }
3985   }
3986 }
3987 
3988 
VisitForTypeofValue(Expression * expr)3989 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
3990   VariableProxy* proxy = expr->AsVariableProxy();
3991   ASSERT(!context()->IsEffect());
3992   ASSERT(!context()->IsTest());
3993 
3994   if (proxy != NULL && proxy->var()->IsUnallocated()) {
3995     Comment cmnt(masm_, "Global variable");
3996     __ Move(rcx, proxy->name());
3997     __ movq(rax, GlobalObjectOperand());
3998     Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
3999     // Use a regular load, not a contextual load, to avoid a reference
4000     // error.
4001     __ call(ic);
4002     PrepareForBailout(expr, TOS_REG);
4003     context()->Plug(rax);
4004   } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
4005     Label done, slow;
4006 
4007     // Generate code for loading from variables potentially shadowed
4008     // by eval-introduced variables.
4009     EmitDynamicLookupFastCase(proxy->var(), INSIDE_TYPEOF, &slow, &done);
4010 
4011     __ bind(&slow);
4012     __ push(rsi);
4013     __ Push(proxy->name());
4014     __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
4015     PrepareForBailout(expr, TOS_REG);
4016     __ bind(&done);
4017 
4018     context()->Plug(rax);
4019   } else {
4020     // This expression cannot throw a reference error at the top level.
4021     VisitInDuplicateContext(expr);
4022   }
4023 }
4024 
4025 
EmitLiteralCompareTypeof(Expression * expr,Expression * sub_expr,Handle<String> check)4026 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4027                                                  Expression* sub_expr,
4028                                                  Handle<String> check) {
4029   Label materialize_true, materialize_false;
4030   Label* if_true = NULL;
4031   Label* if_false = NULL;
4032   Label* fall_through = NULL;
4033   context()->PrepareTest(&materialize_true, &materialize_false,
4034                          &if_true, &if_false, &fall_through);
4035 
4036   { AccumulatorValueContext context(this);
4037     VisitForTypeofValue(sub_expr);
4038   }
4039   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4040 
4041   if (check->Equals(isolate()->heap()->number_symbol())) {
4042     __ JumpIfSmi(rax, if_true);
4043     __ movq(rax, FieldOperand(rax, HeapObject::kMapOffset));
4044     __ CompareRoot(rax, Heap::kHeapNumberMapRootIndex);
4045     Split(equal, if_true, if_false, fall_through);
4046   } else if (check->Equals(isolate()->heap()->string_symbol())) {
4047     __ JumpIfSmi(rax, if_false);
4048     // Check for undetectable objects => false.
4049     __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, rdx);
4050     __ j(above_equal, if_false);
4051     __ testb(FieldOperand(rdx, Map::kBitFieldOffset),
4052              Immediate(1 << Map::kIsUndetectable));
4053     Split(zero, if_true, if_false, fall_through);
4054   } else if (check->Equals(isolate()->heap()->boolean_symbol())) {
4055     __ CompareRoot(rax, Heap::kTrueValueRootIndex);
4056     __ j(equal, if_true);
4057     __ CompareRoot(rax, Heap::kFalseValueRootIndex);
4058     Split(equal, if_true, if_false, fall_through);
4059   } else if (FLAG_harmony_typeof &&
4060              check->Equals(isolate()->heap()->null_symbol())) {
4061     __ CompareRoot(rax, Heap::kNullValueRootIndex);
4062     Split(equal, if_true, if_false, fall_through);
4063   } else if (check->Equals(isolate()->heap()->undefined_symbol())) {
4064     __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
4065     __ j(equal, if_true);
4066     __ JumpIfSmi(rax, if_false);
4067     // Check for undetectable objects => true.
4068     __ movq(rdx, FieldOperand(rax, HeapObject::kMapOffset));
4069     __ testb(FieldOperand(rdx, Map::kBitFieldOffset),
4070              Immediate(1 << Map::kIsUndetectable));
4071     Split(not_zero, if_true, if_false, fall_through);
4072   } else if (check->Equals(isolate()->heap()->function_symbol())) {
4073     __ JumpIfSmi(rax, if_false);
4074     STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
4075     __ CmpObjectType(rax, JS_FUNCTION_TYPE, rdx);
4076     __ j(equal, if_true);
4077     __ CmpInstanceType(rdx, JS_FUNCTION_PROXY_TYPE);
4078     Split(equal, if_true, if_false, fall_through);
4079   } else if (check->Equals(isolate()->heap()->object_symbol())) {
4080     __ JumpIfSmi(rax, if_false);
4081     if (!FLAG_harmony_typeof) {
4082       __ CompareRoot(rax, Heap::kNullValueRootIndex);
4083       __ j(equal, if_true);
4084     }
4085     __ CmpObjectType(rax, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE, rdx);
4086     __ j(below, if_false);
4087     __ CmpInstanceType(rdx, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
4088     __ j(above, if_false);
4089     // Check for undetectable objects => false.
4090     __ testb(FieldOperand(rdx, Map::kBitFieldOffset),
4091              Immediate(1 << Map::kIsUndetectable));
4092     Split(zero, if_true, if_false, fall_through);
4093   } else {
4094     if (if_false != fall_through) __ jmp(if_false);
4095   }
4096   context()->Plug(if_true, if_false);
4097 }
4098 
4099 
VisitCompareOperation(CompareOperation * expr)4100 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4101   Comment cmnt(masm_, "[ CompareOperation");
4102   SetSourcePosition(expr->position());
4103 
4104   // First we try a fast inlined version of the compare when one of
4105   // the operands is a literal.
4106   if (TryLiteralCompare(expr)) return;
4107 
4108   // Always perform the comparison for its control flow.  Pack the result
4109   // into the expression's context after the comparison is performed.
4110   Label materialize_true, materialize_false;
4111   Label* if_true = NULL;
4112   Label* if_false = NULL;
4113   Label* fall_through = NULL;
4114   context()->PrepareTest(&materialize_true, &materialize_false,
4115                          &if_true, &if_false, &fall_through);
4116 
4117   Token::Value op = expr->op();
4118   VisitForStackValue(expr->left());
4119   switch (op) {
4120     case Token::IN:
4121       VisitForStackValue(expr->right());
4122       __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
4123       PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4124       __ CompareRoot(rax, Heap::kTrueValueRootIndex);
4125       Split(equal, if_true, if_false, fall_through);
4126       break;
4127 
4128     case Token::INSTANCEOF: {
4129       VisitForStackValue(expr->right());
4130       InstanceofStub stub(InstanceofStub::kNoFlags);
4131       __ CallStub(&stub);
4132       PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4133       __ testq(rax, rax);
4134        // The stub returns 0 for true.
4135       Split(zero, if_true, if_false, fall_through);
4136       break;
4137     }
4138 
4139     default: {
4140       VisitForAccumulatorValue(expr->right());
4141       Condition cc = no_condition;
4142       switch (op) {
4143         case Token::EQ_STRICT:
4144         case Token::EQ:
4145           cc = equal;
4146           break;
4147         case Token::LT:
4148           cc = less;
4149           break;
4150         case Token::GT:
4151           cc = greater;
4152          break;
4153         case Token::LTE:
4154           cc = less_equal;
4155           break;
4156         case Token::GTE:
4157           cc = greater_equal;
4158           break;
4159         case Token::IN:
4160         case Token::INSTANCEOF:
4161         default:
4162           UNREACHABLE();
4163       }
4164       __ pop(rdx);
4165 
4166       bool inline_smi_code = ShouldInlineSmiCase(op);
4167       JumpPatchSite patch_site(masm_);
4168       if (inline_smi_code) {
4169         Label slow_case;
4170         __ movq(rcx, rdx);
4171         __ or_(rcx, rax);
4172         patch_site.EmitJumpIfNotSmi(rcx, &slow_case, Label::kNear);
4173         __ cmpq(rdx, rax);
4174         Split(cc, if_true, if_false, NULL);
4175         __ bind(&slow_case);
4176       }
4177 
4178       // Record position and call the compare IC.
4179       SetSourcePosition(expr->position());
4180       Handle<Code> ic = CompareIC::GetUninitialized(op);
4181       __ call(ic, RelocInfo::CODE_TARGET, expr->id());
4182       patch_site.EmitPatchInfo();
4183 
4184       PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4185       __ testq(rax, rax);
4186       Split(cc, if_true, if_false, fall_through);
4187     }
4188   }
4189 
4190   // Convert the result of the comparison into one expected for this
4191   // expression's context.
4192   context()->Plug(if_true, if_false);
4193 }
4194 
4195 
EmitLiteralCompareNil(CompareOperation * expr,Expression * sub_expr,NilValue nil)4196 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4197                                               Expression* sub_expr,
4198                                               NilValue nil) {
4199   Label materialize_true, materialize_false;
4200   Label* if_true = NULL;
4201   Label* if_false = NULL;
4202   Label* fall_through = NULL;
4203   context()->PrepareTest(&materialize_true, &materialize_false,
4204                          &if_true, &if_false, &fall_through);
4205 
4206   VisitForAccumulatorValue(sub_expr);
4207   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4208   Heap::RootListIndex nil_value = nil == kNullValue ?
4209       Heap::kNullValueRootIndex :
4210       Heap::kUndefinedValueRootIndex;
4211   __ CompareRoot(rax, nil_value);
4212   if (expr->op() == Token::EQ_STRICT) {
4213     Split(equal, if_true, if_false, fall_through);
4214   } else {
4215     Heap::RootListIndex other_nil_value = nil == kNullValue ?
4216         Heap::kUndefinedValueRootIndex :
4217         Heap::kNullValueRootIndex;
4218     __ j(equal, if_true);
4219     __ CompareRoot(rax, other_nil_value);
4220     __ j(equal, if_true);
4221     __ JumpIfSmi(rax, if_false);
4222     // It can be an undetectable object.
4223     __ movq(rdx, FieldOperand(rax, HeapObject::kMapOffset));
4224     __ testb(FieldOperand(rdx, Map::kBitFieldOffset),
4225              Immediate(1 << Map::kIsUndetectable));
4226     Split(not_zero, if_true, if_false, fall_through);
4227   }
4228   context()->Plug(if_true, if_false);
4229 }
4230 
4231 
VisitThisFunction(ThisFunction * expr)4232 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4233   __ movq(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
4234   context()->Plug(rax);
4235 }
4236 
4237 
result_register()4238 Register FullCodeGenerator::result_register() {
4239   return rax;
4240 }
4241 
4242 
context_register()4243 Register FullCodeGenerator::context_register() {
4244   return rsi;
4245 }
4246 
4247 
StoreToFrameField(int frame_offset,Register value)4248 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4249   ASSERT(IsAligned(frame_offset, kPointerSize));
4250   __ movq(Operand(rbp, frame_offset), value);
4251 }
4252 
4253 
LoadContextField(Register dst,int context_index)4254 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4255   __ movq(dst, ContextOperand(rsi, context_index));
4256 }
4257 
4258 
PushFunctionArgumentForContextAllocation()4259 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4260   Scope* declaration_scope = scope()->DeclarationScope();
4261   if (declaration_scope->is_global_scope()) {
4262     // Contexts nested in the global context have a canonical empty function
4263     // as their closure, not the anonymous closure containing the global
4264     // code.  Pass a smi sentinel and let the runtime look up the empty
4265     // function.
4266     __ Push(Smi::FromInt(0));
4267   } else if (declaration_scope->is_eval_scope()) {
4268     // Contexts created by a call to eval have the same closure as the
4269     // context calling eval, not the anonymous closure containing the eval
4270     // code.  Fetch it from the context.
4271     __ push(ContextOperand(rsi, Context::CLOSURE_INDEX));
4272   } else {
4273     ASSERT(declaration_scope->is_function_scope());
4274     __ push(Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
4275   }
4276 }
4277 
4278 
4279 // ----------------------------------------------------------------------------
4280 // Non-local control flow support.
4281 
4282 
EnterFinallyBlock()4283 void FullCodeGenerator::EnterFinallyBlock() {
4284   ASSERT(!result_register().is(rdx));
4285   ASSERT(!result_register().is(rcx));
4286   // Cook return address on top of stack (smi encoded Code* delta)
4287   __ pop(rdx);
4288   __ Move(rcx, masm_->CodeObject());
4289   __ subq(rdx, rcx);
4290   __ Integer32ToSmi(rdx, rdx);
4291   __ push(rdx);
4292   // Store result register while executing finally block.
4293   __ push(result_register());
4294 }
4295 
4296 
ExitFinallyBlock()4297 void FullCodeGenerator::ExitFinallyBlock() {
4298   ASSERT(!result_register().is(rdx));
4299   ASSERT(!result_register().is(rcx));
4300   __ pop(result_register());
4301   // Uncook return address.
4302   __ pop(rdx);
4303   __ SmiToInteger32(rdx, rdx);
4304   __ Move(rcx, masm_->CodeObject());
4305   __ addq(rdx, rcx);
4306   __ jmp(rdx);
4307 }
4308 
4309 
4310 #undef __
4311 
4312 #define __ ACCESS_MASM(masm())
4313 
Exit(int * stack_depth,int * context_length)4314 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
4315     int* stack_depth,
4316     int* context_length) {
4317   // The macros used here must preserve the result register.
4318 
4319   // Because the handler block contains the context of the finally
4320   // code, we can restore it directly from there for the finally code
4321   // rather than iteratively unwinding contexts via their previous
4322   // links.
4323   __ Drop(*stack_depth);  // Down to the handler block.
4324   if (*context_length > 0) {
4325     // Restore the context to its dedicated register and the stack.
4326     __ movq(rsi, Operand(rsp, StackHandlerConstants::kContextOffset));
4327     __ movq(Operand(rbp, StandardFrameConstants::kContextOffset), rsi);
4328   }
4329   __ PopTryHandler();
4330   __ call(finally_entry_);
4331 
4332   *stack_depth = 0;
4333   *context_length = 0;
4334   return previous_;
4335 }
4336 
4337 
4338 #undef __
4339 
4340 } }  // namespace v8::internal
4341 
4342 #endif  // V8_TARGET_ARCH_X64
4343