• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #if V8_TARGET_ARCH_X87
6 
7 #include "src/ast/scopes.h"
8 #include "src/code-factory.h"
9 #include "src/code-stubs.h"
10 #include "src/codegen.h"
11 #include "src/debug/debug.h"
12 #include "src/full-codegen/full-codegen.h"
13 #include "src/ic/ic.h"
14 #include "src/parsing/parser.h"
15 #include "src/x87/frames-x87.h"
16 
17 namespace v8 {
18 namespace internal {
19 
20 #define __ ACCESS_MASM(masm_)
21 
22 
23 class JumpPatchSite BASE_EMBEDDED {
24  public:
JumpPatchSite(MacroAssembler * masm)25   explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
26 #ifdef DEBUG
27     info_emitted_ = false;
28 #endif
29   }
30 
~JumpPatchSite()31   ~JumpPatchSite() {
32     DCHECK(patch_site_.is_bound() == info_emitted_);
33   }
34 
EmitJumpIfNotSmi(Register reg,Label * target,Label::Distance distance=Label::kFar)35   void EmitJumpIfNotSmi(Register reg,
36                         Label* target,
37                         Label::Distance distance = Label::kFar) {
38     __ test(reg, Immediate(kSmiTagMask));
39     EmitJump(not_carry, target, distance);  // Always taken before patched.
40   }
41 
EmitJumpIfSmi(Register reg,Label * target,Label::Distance distance=Label::kFar)42   void EmitJumpIfSmi(Register reg,
43                      Label* target,
44                      Label::Distance distance = Label::kFar) {
45     __ test(reg, Immediate(kSmiTagMask));
46     EmitJump(carry, target, distance);  // Never taken before patched.
47   }
48 
EmitPatchInfo()49   void EmitPatchInfo() {
50     if (patch_site_.is_bound()) {
51       int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
52       DCHECK(is_uint8(delta_to_patch_site));
53       __ test(eax, Immediate(delta_to_patch_site));
54 #ifdef DEBUG
55       info_emitted_ = true;
56 #endif
57     } else {
58       __ nop();  // Signals no inlined code.
59     }
60   }
61 
62  private:
63   // jc will be patched with jz, jnc will become jnz.
EmitJump(Condition cc,Label * target,Label::Distance distance)64   void EmitJump(Condition cc, Label* target, Label::Distance distance) {
65     DCHECK(!patch_site_.is_bound() && !info_emitted_);
66     DCHECK(cc == carry || cc == not_carry);
67     __ bind(&patch_site_);
68     __ j(cc, target, distance);
69   }
70 
71   MacroAssembler* masm_;
72   Label patch_site_;
73 #ifdef DEBUG
74   bool info_emitted_;
75 #endif
76 };
77 
78 
79 // Generate code for a JS function.  On entry to the function the receiver
80 // and arguments have been pushed on the stack left to right, with the
81 // return address on top of them.  The actual argument count matches the
82 // formal parameter count expected by the function.
83 //
84 // The live registers are:
85 //   o edi: the JS function object being called (i.e. ourselves)
86 //   o edx: the new target value
87 //   o esi: our context
88 //   o ebp: our caller's frame pointer
89 //   o esp: stack pointer (pointing to return address)
90 //
91 // The function builds a JS frame.  Please see JavaScriptFrameConstants in
92 // frames-x87.h for its layout.
Generate()93 void FullCodeGenerator::Generate() {
94   CompilationInfo* info = info_;
95   profiling_counter_ = isolate()->factory()->NewCell(
96       Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
97   SetFunctionPosition(literal());
98   Comment cmnt(masm_, "[ function compiled by full code generator");
99 
100   ProfileEntryHookStub::MaybeCallEntryHook(masm_);
101 
102 #ifdef DEBUG
103   if (strlen(FLAG_stop_at) > 0 &&
104       literal()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
105     __ int3();
106   }
107 #endif
108 
109   if (FLAG_debug_code && info->ExpectsJSReceiverAsReceiver()) {
110     int receiver_offset = (info->scope()->num_parameters() + 1) * kPointerSize;
111     __ mov(ecx, Operand(esp, receiver_offset));
112     __ AssertNotSmi(ecx);
113     __ CmpObjectType(ecx, FIRST_JS_RECEIVER_TYPE, ecx);
114     __ Assert(above_equal, kSloppyFunctionExpectsJSReceiverReceiver);
115   }
116 
117   // Open a frame scope to indicate that there is a frame on the stack.  The
118   // MANUAL indicates that the scope shouldn't actually generate code to set up
119   // the frame (that is done below).
120   FrameScope frame_scope(masm_, StackFrame::MANUAL);
121 
122   info->set_prologue_offset(masm_->pc_offset());
123   __ Prologue(info->GeneratePreagedPrologue());
124 
125   { Comment cmnt(masm_, "[ Allocate locals");
126     int locals_count = info->scope()->num_stack_slots();
127     // Generators allocate locals, if any, in context slots.
128     DCHECK(!IsGeneratorFunction(literal()->kind()) || locals_count == 0);
129     if (locals_count == 1) {
130       __ push(Immediate(isolate()->factory()->undefined_value()));
131     } else if (locals_count > 1) {
132       if (locals_count >= 128) {
133         Label ok;
134         __ mov(ecx, esp);
135         __ sub(ecx, Immediate(locals_count * kPointerSize));
136         ExternalReference stack_limit =
137             ExternalReference::address_of_real_stack_limit(isolate());
138         __ cmp(ecx, Operand::StaticVariable(stack_limit));
139         __ j(above_equal, &ok, Label::kNear);
140         __ CallRuntime(Runtime::kThrowStackOverflow);
141         __ bind(&ok);
142       }
143       __ mov(eax, Immediate(isolate()->factory()->undefined_value()));
144       const int kMaxPushes = 32;
145       if (locals_count >= kMaxPushes) {
146         int loop_iterations = locals_count / kMaxPushes;
147         __ mov(ecx, loop_iterations);
148         Label loop_header;
149         __ bind(&loop_header);
150         // Do pushes.
151         for (int i = 0; i < kMaxPushes; i++) {
152           __ push(eax);
153         }
154         __ dec(ecx);
155         __ j(not_zero, &loop_header, Label::kNear);
156       }
157       int remaining = locals_count % kMaxPushes;
158       // Emit the remaining pushes.
159       for (int i  = 0; i < remaining; i++) {
160         __ push(eax);
161       }
162     }
163   }
164 
165   bool function_in_register = true;
166 
167   // Possibly allocate a local context.
168   if (info->scope()->num_heap_slots() > 0) {
169     Comment cmnt(masm_, "[ Allocate context");
170     bool need_write_barrier = true;
171     int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
172     // Argument to NewContext is the function, which is still in edi.
173     if (info->scope()->is_script_scope()) {
174       __ push(edi);
175       __ Push(info->scope()->GetScopeInfo(info->isolate()));
176       __ CallRuntime(Runtime::kNewScriptContext);
177       PrepareForBailoutForId(BailoutId::ScriptContext(), TOS_REG);
178       // The new target value is not used, clobbering is safe.
179       DCHECK_NULL(info->scope()->new_target_var());
180     } else {
181       if (info->scope()->new_target_var() != nullptr) {
182         __ push(edx);  // Preserve new target.
183       }
184       if (slots <= FastNewContextStub::kMaximumSlots) {
185         FastNewContextStub stub(isolate(), slots);
186         __ CallStub(&stub);
187         // Result of FastNewContextStub is always in new space.
188         need_write_barrier = false;
189       } else {
190         __ push(edi);
191         __ CallRuntime(Runtime::kNewFunctionContext);
192       }
193       if (info->scope()->new_target_var() != nullptr) {
194         __ pop(edx);  // Restore new target.
195       }
196     }
197     function_in_register = false;
198     // Context is returned in eax.  It replaces the context passed to us.
199     // It's saved in the stack and kept live in esi.
200     __ mov(esi, eax);
201     __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), eax);
202 
203     // Copy parameters into context if necessary.
204     int num_parameters = info->scope()->num_parameters();
205     int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
206     for (int i = first_parameter; i < num_parameters; i++) {
207       Variable* var = (i == -1) ? scope()->receiver() : scope()->parameter(i);
208       if (var->IsContextSlot()) {
209         int parameter_offset = StandardFrameConstants::kCallerSPOffset +
210             (num_parameters - 1 - i) * kPointerSize;
211         // Load parameter from stack.
212         __ mov(eax, Operand(ebp, parameter_offset));
213         // Store it in the context.
214         int context_offset = Context::SlotOffset(var->index());
215         __ mov(Operand(esi, context_offset), eax);
216         // Update the write barrier. This clobbers eax and ebx.
217         if (need_write_barrier) {
218           __ RecordWriteContextSlot(esi, context_offset, eax, ebx,
219                                     kDontSaveFPRegs);
220         } else if (FLAG_debug_code) {
221           Label done;
222           __ JumpIfInNewSpace(esi, eax, &done, Label::kNear);
223           __ Abort(kExpectedNewSpaceObject);
224           __ bind(&done);
225         }
226       }
227     }
228   }
229 
230   // Register holding this function and new target are both trashed in case we
231   // bailout here. But since that can happen only when new target is not used
232   // and we allocate a context, the value of |function_in_register| is correct.
233   PrepareForBailoutForId(BailoutId::FunctionContext(), NO_REGISTERS);
234 
235   // Possibly set up a local binding to the this function which is used in
236   // derived constructors with super calls.
237   Variable* this_function_var = scope()->this_function_var();
238   if (this_function_var != nullptr) {
239     Comment cmnt(masm_, "[ This function");
240     if (!function_in_register) {
241       __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
242       // The write barrier clobbers register again, keep it marked as such.
243     }
244     SetVar(this_function_var, edi, ebx, ecx);
245   }
246 
247   // Possibly set up a local binding to the new target value.
248   Variable* new_target_var = scope()->new_target_var();
249   if (new_target_var != nullptr) {
250     Comment cmnt(masm_, "[ new.target");
251     SetVar(new_target_var, edx, ebx, ecx);
252   }
253 
254   // Possibly allocate RestParameters
255   int rest_index;
256   Variable* rest_param = scope()->rest_parameter(&rest_index);
257   if (rest_param) {
258     Comment cmnt(masm_, "[ Allocate rest parameter array");
259 
260     int num_parameters = info->scope()->num_parameters();
261     int offset = num_parameters * kPointerSize;
262 
263     __ mov(RestParamAccessDescriptor::parameter_count(),
264            Immediate(Smi::FromInt(num_parameters)));
265     __ lea(RestParamAccessDescriptor::parameter_pointer(),
266            Operand(ebp, StandardFrameConstants::kCallerSPOffset + offset));
267     __ mov(RestParamAccessDescriptor::rest_parameter_index(),
268            Immediate(Smi::FromInt(rest_index)));
269     function_in_register = false;
270 
271     RestParamAccessStub stub(isolate());
272     __ CallStub(&stub);
273     SetVar(rest_param, eax, ebx, edx);
274   }
275 
276   Variable* arguments = scope()->arguments();
277   if (arguments != NULL) {
278     // Function uses arguments object.
279     Comment cmnt(masm_, "[ Allocate arguments object");
280     DCHECK(edi.is(ArgumentsAccessNewDescriptor::function()));
281     if (!function_in_register) {
282       __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
283     }
284     // Receiver is just before the parameters on the caller's stack.
285     int num_parameters = info->scope()->num_parameters();
286     int offset = num_parameters * kPointerSize;
287     __ mov(ArgumentsAccessNewDescriptor::parameter_count(),
288            Immediate(Smi::FromInt(num_parameters)));
289     __ lea(ArgumentsAccessNewDescriptor::parameter_pointer(),
290            Operand(ebp, StandardFrameConstants::kCallerSPOffset + offset));
291 
292     // Arguments to ArgumentsAccessStub:
293     //   function, parameter pointer, parameter count.
294     // The stub will rewrite parameter pointer and parameter count if the
295     // previous stack frame was an arguments adapter frame.
296     bool is_unmapped = is_strict(language_mode()) || !has_simple_parameters();
297     ArgumentsAccessStub::Type type = ArgumentsAccessStub::ComputeType(
298         is_unmapped, literal()->has_duplicate_parameters());
299     ArgumentsAccessStub stub(isolate(), type);
300     __ CallStub(&stub);
301 
302     SetVar(arguments, eax, ebx, edx);
303   }
304 
305   if (FLAG_trace) {
306     __ CallRuntime(Runtime::kTraceEnter);
307   }
308 
309   // Visit the declarations and body unless there is an illegal
310   // redeclaration.
311   if (scope()->HasIllegalRedeclaration()) {
312     Comment cmnt(masm_, "[ Declarations");
313     VisitForEffect(scope()->GetIllegalRedeclaration());
314 
315   } else {
316     PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
317     { Comment cmnt(masm_, "[ Declarations");
318       VisitDeclarations(scope()->declarations());
319     }
320 
321     // Assert that the declarations do not use ICs. Otherwise the debugger
322     // won't be able to redirect a PC at an IC to the correct IC in newly
323     // recompiled code.
324     DCHECK_EQ(0, ic_total_count_);
325 
326     { Comment cmnt(masm_, "[ Stack check");
327       PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
328       Label ok;
329       ExternalReference stack_limit
330           = ExternalReference::address_of_stack_limit(isolate());
331       __ cmp(esp, Operand::StaticVariable(stack_limit));
332       __ j(above_equal, &ok, Label::kNear);
333       __ call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
334       __ bind(&ok);
335     }
336 
337     { Comment cmnt(masm_, "[ Body");
338       DCHECK(loop_depth() == 0);
339       VisitStatements(literal()->body());
340       DCHECK(loop_depth() == 0);
341     }
342   }
343 
344   // Always emit a 'return undefined' in case control fell off the end of
345   // the body.
346   { Comment cmnt(masm_, "[ return <undefined>;");
347     __ mov(eax, isolate()->factory()->undefined_value());
348     EmitReturnSequence();
349   }
350 }
351 
352 
ClearAccumulator()353 void FullCodeGenerator::ClearAccumulator() {
354   __ Move(eax, Immediate(Smi::FromInt(0)));
355 }
356 
357 
EmitProfilingCounterDecrement(int delta)358 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
359   __ mov(ebx, Immediate(profiling_counter_));
360   __ sub(FieldOperand(ebx, Cell::kValueOffset),
361          Immediate(Smi::FromInt(delta)));
362 }
363 
364 
EmitProfilingCounterReset()365 void FullCodeGenerator::EmitProfilingCounterReset() {
366   int reset_value = FLAG_interrupt_budget;
367   __ mov(ebx, Immediate(profiling_counter_));
368   __ mov(FieldOperand(ebx, Cell::kValueOffset),
369          Immediate(Smi::FromInt(reset_value)));
370 }
371 
372 
EmitBackEdgeBookkeeping(IterationStatement * stmt,Label * back_edge_target)373 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
374                                                 Label* back_edge_target) {
375   Comment cmnt(masm_, "[ Back edge bookkeeping");
376   Label ok;
377 
378   DCHECK(back_edge_target->is_bound());
379   int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
380   int weight = Min(kMaxBackEdgeWeight,
381                    Max(1, distance / kCodeSizeMultiplier));
382   EmitProfilingCounterDecrement(weight);
383   __ j(positive, &ok, Label::kNear);
384   __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
385 
386   // Record a mapping of this PC offset to the OSR id.  This is used to find
387   // the AST id from the unoptimized code in order to use it as a key into
388   // the deoptimization input data found in the optimized code.
389   RecordBackEdge(stmt->OsrEntryId());
390 
391   EmitProfilingCounterReset();
392 
393   __ bind(&ok);
394   PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
395   // Record a mapping of the OSR id to this PC.  This is used if the OSR
396   // entry becomes the target of a bailout.  We don't expect it to be, but
397   // we want it to work if it is.
398   PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
399 }
400 
401 
EmitReturnSequence()402 void FullCodeGenerator::EmitReturnSequence() {
403   Comment cmnt(masm_, "[ Return sequence");
404   if (return_label_.is_bound()) {
405     __ jmp(&return_label_);
406   } else {
407     // Common return label
408     __ bind(&return_label_);
409     if (FLAG_trace) {
410       __ push(eax);
411       __ CallRuntime(Runtime::kTraceExit);
412     }
413     // Pretend that the exit is a backwards jump to the entry.
414     int weight = 1;
415     if (info_->ShouldSelfOptimize()) {
416       weight = FLAG_interrupt_budget / FLAG_self_opt_count;
417     } else {
418       int distance = masm_->pc_offset();
419       weight = Min(kMaxBackEdgeWeight,
420                    Max(1, distance / kCodeSizeMultiplier));
421     }
422     EmitProfilingCounterDecrement(weight);
423     Label ok;
424     __ j(positive, &ok, Label::kNear);
425     __ push(eax);
426     __ call(isolate()->builtins()->InterruptCheck(),
427             RelocInfo::CODE_TARGET);
428     __ pop(eax);
429     EmitProfilingCounterReset();
430     __ bind(&ok);
431 
432     SetReturnPosition(literal());
433     __ leave();
434 
435     int arg_count = info_->scope()->num_parameters() + 1;
436     int arguments_bytes = arg_count * kPointerSize;
437     __ Ret(arguments_bytes, ecx);
438   }
439 }
440 
441 
Plug(Variable * var) const442 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
443   DCHECK(var->IsStackAllocated() || var->IsContextSlot());
444   MemOperand operand = codegen()->VarOperand(var, result_register());
445   // Memory operands can be pushed directly.
446   __ push(operand);
447 }
448 
449 
Plug(Heap::RootListIndex index) const450 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
451   UNREACHABLE();  // Not used on X87.
452 }
453 
454 
Plug(Heap::RootListIndex index) const455 void FullCodeGenerator::AccumulatorValueContext::Plug(
456     Heap::RootListIndex index) const {
457   UNREACHABLE();  // Not used on X87.
458 }
459 
460 
Plug(Heap::RootListIndex index) const461 void FullCodeGenerator::StackValueContext::Plug(
462     Heap::RootListIndex index) const {
463   UNREACHABLE();  // Not used on X87.
464 }
465 
466 
Plug(Heap::RootListIndex index) const467 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
468   UNREACHABLE();  // Not used on X87.
469 }
470 
471 
Plug(Handle<Object> lit) const472 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
473 }
474 
475 
Plug(Handle<Object> lit) const476 void FullCodeGenerator::AccumulatorValueContext::Plug(
477     Handle<Object> lit) const {
478   if (lit->IsSmi()) {
479     __ SafeMove(result_register(), Immediate(lit));
480   } else {
481     __ Move(result_register(), Immediate(lit));
482   }
483 }
484 
485 
Plug(Handle<Object> lit) const486 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
487   if (lit->IsSmi()) {
488     __ SafePush(Immediate(lit));
489   } else {
490     __ push(Immediate(lit));
491   }
492 }
493 
494 
Plug(Handle<Object> lit) const495 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
496   codegen()->PrepareForBailoutBeforeSplit(condition(),
497                                           true,
498                                           true_label_,
499                                           false_label_);
500   DCHECK(!lit->IsUndetectableObject());  // There are no undetectable literals.
501   if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
502     if (false_label_ != fall_through_) __ jmp(false_label_);
503   } else if (lit->IsTrue() || lit->IsJSObject()) {
504     if (true_label_ != fall_through_) __ jmp(true_label_);
505   } else if (lit->IsString()) {
506     if (String::cast(*lit)->length() == 0) {
507       if (false_label_ != fall_through_) __ jmp(false_label_);
508     } else {
509       if (true_label_ != fall_through_) __ jmp(true_label_);
510     }
511   } else if (lit->IsSmi()) {
512     if (Smi::cast(*lit)->value() == 0) {
513       if (false_label_ != fall_through_) __ jmp(false_label_);
514     } else {
515       if (true_label_ != fall_through_) __ jmp(true_label_);
516     }
517   } else {
518     // For simplicity we always test the accumulator register.
519     __ mov(result_register(), lit);
520     codegen()->DoTest(this);
521   }
522 }
523 
524 
DropAndPlug(int count,Register reg) const525 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
526                                                    Register reg) const {
527   DCHECK(count > 0);
528   __ Drop(count);
529 }
530 
531 
DropAndPlug(int count,Register reg) const532 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
533     int count,
534     Register reg) const {
535   DCHECK(count > 0);
536   __ Drop(count);
537   __ Move(result_register(), reg);
538 }
539 
540 
DropAndPlug(int count,Register reg) const541 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
542                                                        Register reg) const {
543   DCHECK(count > 0);
544   if (count > 1) __ Drop(count - 1);
545   __ mov(Operand(esp, 0), reg);
546 }
547 
548 
DropAndPlug(int count,Register reg) const549 void FullCodeGenerator::TestContext::DropAndPlug(int count,
550                                                  Register reg) const {
551   DCHECK(count > 0);
552   // For simplicity we always test the accumulator register.
553   __ Drop(count);
554   __ Move(result_register(), reg);
555   codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
556   codegen()->DoTest(this);
557 }
558 
559 
Plug(Label * materialize_true,Label * materialize_false) const560 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
561                                             Label* materialize_false) const {
562   DCHECK(materialize_true == materialize_false);
563   __ bind(materialize_true);
564 }
565 
566 
Plug(Label * materialize_true,Label * materialize_false) const567 void FullCodeGenerator::AccumulatorValueContext::Plug(
568     Label* materialize_true,
569     Label* materialize_false) const {
570   Label done;
571   __ bind(materialize_true);
572   __ mov(result_register(), isolate()->factory()->true_value());
573   __ jmp(&done, Label::kNear);
574   __ bind(materialize_false);
575   __ mov(result_register(), isolate()->factory()->false_value());
576   __ bind(&done);
577 }
578 
579 
Plug(Label * materialize_true,Label * materialize_false) const580 void FullCodeGenerator::StackValueContext::Plug(
581     Label* materialize_true,
582     Label* materialize_false) const {
583   Label done;
584   __ bind(materialize_true);
585   __ push(Immediate(isolate()->factory()->true_value()));
586   __ jmp(&done, Label::kNear);
587   __ bind(materialize_false);
588   __ push(Immediate(isolate()->factory()->false_value()));
589   __ bind(&done);
590 }
591 
592 
Plug(Label * materialize_true,Label * materialize_false) const593 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
594                                           Label* materialize_false) const {
595   DCHECK(materialize_true == true_label_);
596   DCHECK(materialize_false == false_label_);
597 }
598 
599 
Plug(bool flag) const600 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
601   Handle<Object> value = flag
602       ? isolate()->factory()->true_value()
603       : isolate()->factory()->false_value();
604   __ mov(result_register(), value);
605 }
606 
607 
Plug(bool flag) const608 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
609   Handle<Object> value = flag
610       ? isolate()->factory()->true_value()
611       : isolate()->factory()->false_value();
612   __ push(Immediate(value));
613 }
614 
615 
Plug(bool flag) const616 void FullCodeGenerator::TestContext::Plug(bool flag) const {
617   codegen()->PrepareForBailoutBeforeSplit(condition(),
618                                           true,
619                                           true_label_,
620                                           false_label_);
621   if (flag) {
622     if (true_label_ != fall_through_) __ jmp(true_label_);
623   } else {
624     if (false_label_ != fall_through_) __ jmp(false_label_);
625   }
626 }
627 
628 
DoTest(Expression * condition,Label * if_true,Label * if_false,Label * fall_through)629 void FullCodeGenerator::DoTest(Expression* condition,
630                                Label* if_true,
631                                Label* if_false,
632                                Label* fall_through) {
633   Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
634   CallIC(ic, condition->test_id());
635   __ CompareRoot(result_register(), Heap::kTrueValueRootIndex);
636   Split(equal, if_true, if_false, fall_through);
637 }
638 
639 
Split(Condition cc,Label * if_true,Label * if_false,Label * fall_through)640 void FullCodeGenerator::Split(Condition cc,
641                               Label* if_true,
642                               Label* if_false,
643                               Label* fall_through) {
644   if (if_false == fall_through) {
645     __ j(cc, if_true);
646   } else if (if_true == fall_through) {
647     __ j(NegateCondition(cc), if_false);
648   } else {
649     __ j(cc, if_true);
650     __ jmp(if_false);
651   }
652 }
653 
654 
StackOperand(Variable * var)655 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
656   DCHECK(var->IsStackAllocated());
657   // Offset is negative because higher indexes are at lower addresses.
658   int offset = -var->index() * kPointerSize;
659   // Adjust by a (parameter or local) base offset.
660   if (var->IsParameter()) {
661     offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
662   } else {
663     offset += JavaScriptFrameConstants::kLocal0Offset;
664   }
665   return Operand(ebp, offset);
666 }
667 
668 
VarOperand(Variable * var,Register scratch)669 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
670   DCHECK(var->IsContextSlot() || var->IsStackAllocated());
671   if (var->IsContextSlot()) {
672     int context_chain_length = scope()->ContextChainLength(var->scope());
673     __ LoadContext(scratch, context_chain_length);
674     return ContextOperand(scratch, var->index());
675   } else {
676     return StackOperand(var);
677   }
678 }
679 
680 
GetVar(Register dest,Variable * var)681 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
682   DCHECK(var->IsContextSlot() || var->IsStackAllocated());
683   MemOperand location = VarOperand(var, dest);
684   __ mov(dest, location);
685 }
686 
687 
SetVar(Variable * var,Register src,Register scratch0,Register scratch1)688 void FullCodeGenerator::SetVar(Variable* var,
689                                Register src,
690                                Register scratch0,
691                                Register scratch1) {
692   DCHECK(var->IsContextSlot() || var->IsStackAllocated());
693   DCHECK(!scratch0.is(src));
694   DCHECK(!scratch0.is(scratch1));
695   DCHECK(!scratch1.is(src));
696   MemOperand location = VarOperand(var, scratch0);
697   __ mov(location, src);
698 
699   // Emit the write barrier code if the location is in the heap.
700   if (var->IsContextSlot()) {
701     int offset = Context::SlotOffset(var->index());
702     DCHECK(!scratch0.is(esi) && !src.is(esi) && !scratch1.is(esi));
703     __ RecordWriteContextSlot(scratch0, offset, src, scratch1, kDontSaveFPRegs);
704   }
705 }
706 
707 
PrepareForBailoutBeforeSplit(Expression * expr,bool should_normalize,Label * if_true,Label * if_false)708 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
709                                                      bool should_normalize,
710                                                      Label* if_true,
711                                                      Label* if_false) {
712   // Only prepare for bailouts before splits if we're in a test
713   // context. Otherwise, we let the Visit function deal with the
714   // preparation to avoid preparing with the same AST id twice.
715   if (!context()->IsTest()) return;
716 
717   Label skip;
718   if (should_normalize) __ jmp(&skip, Label::kNear);
719   PrepareForBailout(expr, TOS_REG);
720   if (should_normalize) {
721     __ cmp(eax, isolate()->factory()->true_value());
722     Split(equal, if_true, if_false, NULL);
723     __ bind(&skip);
724   }
725 }
726 
727 
EmitDebugCheckDeclarationContext(Variable * variable)728 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
729   // The variable in the declaration always resides in the current context.
730   DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
731   if (generate_debug_code_) {
732     // Check that we're not inside a with or catch context.
733     __ mov(ebx, FieldOperand(esi, HeapObject::kMapOffset));
734     __ cmp(ebx, isolate()->factory()->with_context_map());
735     __ Check(not_equal, kDeclarationInWithContext);
736     __ cmp(ebx, isolate()->factory()->catch_context_map());
737     __ Check(not_equal, kDeclarationInCatchContext);
738   }
739 }
740 
741 
VisitVariableDeclaration(VariableDeclaration * declaration)742 void FullCodeGenerator::VisitVariableDeclaration(
743     VariableDeclaration* declaration) {
744   // If it was not possible to allocate the variable at compile time, we
745   // need to "declare" it at runtime to make sure it actually exists in the
746   // local context.
747   VariableProxy* proxy = declaration->proxy();
748   VariableMode mode = declaration->mode();
749   Variable* variable = proxy->var();
750   bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
751   switch (variable->location()) {
752     case VariableLocation::GLOBAL:
753     case VariableLocation::UNALLOCATED:
754       globals_->Add(variable->name(), zone());
755       globals_->Add(variable->binding_needs_init()
756                         ? isolate()->factory()->the_hole_value()
757                         : isolate()->factory()->undefined_value(), zone());
758       break;
759 
760     case VariableLocation::PARAMETER:
761     case VariableLocation::LOCAL:
762       if (hole_init) {
763         Comment cmnt(masm_, "[ VariableDeclaration");
764         __ mov(StackOperand(variable),
765                Immediate(isolate()->factory()->the_hole_value()));
766       }
767       break;
768 
769     case VariableLocation::CONTEXT:
770       if (hole_init) {
771         Comment cmnt(masm_, "[ VariableDeclaration");
772         EmitDebugCheckDeclarationContext(variable);
773         __ mov(ContextOperand(esi, variable->index()),
774                Immediate(isolate()->factory()->the_hole_value()));
775         // No write barrier since the hole value is in old space.
776         PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
777       }
778       break;
779 
780     case VariableLocation::LOOKUP: {
781       Comment cmnt(masm_, "[ VariableDeclaration");
782       __ push(Immediate(variable->name()));
783       // VariableDeclaration nodes are always introduced in one of four modes.
784       DCHECK(IsDeclaredVariableMode(mode));
785       // Push initial value, if any.
786       // Note: For variables we must not push an initial value (such as
787       // 'undefined') because we may have a (legal) redeclaration and we
788       // must not destroy the current value.
789       if (hole_init) {
790         __ push(Immediate(isolate()->factory()->the_hole_value()));
791       } else {
792         __ push(Immediate(Smi::FromInt(0)));  // Indicates no initial value.
793       }
794       __ push(
795           Immediate(Smi::FromInt(variable->DeclarationPropertyAttributes())));
796       __ CallRuntime(Runtime::kDeclareLookupSlot);
797       break;
798     }
799   }
800 }
801 
VisitFunctionDeclaration(FunctionDeclaration * declaration)802 void FullCodeGenerator::VisitFunctionDeclaration(
803     FunctionDeclaration* declaration) {
804   VariableProxy* proxy = declaration->proxy();
805   Variable* variable = proxy->var();
806   switch (variable->location()) {
807     case VariableLocation::GLOBAL:
808     case VariableLocation::UNALLOCATED: {
809       globals_->Add(variable->name(), zone());
810       Handle<SharedFunctionInfo> function =
811           Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_);
812       // Check for stack-overflow exception.
813       if (function.is_null()) return SetStackOverflow();
814       globals_->Add(function, zone());
815       break;
816     }
817 
818     case VariableLocation::PARAMETER:
819     case VariableLocation::LOCAL: {
820       Comment cmnt(masm_, "[ FunctionDeclaration");
821       VisitForAccumulatorValue(declaration->fun());
822       __ mov(StackOperand(variable), result_register());
823       break;
824     }
825 
826     case VariableLocation::CONTEXT: {
827       Comment cmnt(masm_, "[ FunctionDeclaration");
828       EmitDebugCheckDeclarationContext(variable);
829       VisitForAccumulatorValue(declaration->fun());
830       __ mov(ContextOperand(esi, variable->index()), result_register());
831       // We know that we have written a function, which is not a smi.
832       __ RecordWriteContextSlot(esi, Context::SlotOffset(variable->index()),
833                                 result_register(), ecx, kDontSaveFPRegs,
834                                 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
835       PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
836       break;
837     }
838 
839     case VariableLocation::LOOKUP: {
840       Comment cmnt(masm_, "[ FunctionDeclaration");
841       __ push(Immediate(variable->name()));
842       VisitForStackValue(declaration->fun());
843       __ push(
844           Immediate(Smi::FromInt(variable->DeclarationPropertyAttributes())));
845       __ CallRuntime(Runtime::kDeclareLookupSlot);
846       break;
847     }
848   }
849 }
850 
851 
DeclareGlobals(Handle<FixedArray> pairs)852 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
853   // Call the runtime to declare the globals.
854   __ Push(pairs);
855   __ Push(Smi::FromInt(DeclareGlobalsFlags()));
856   __ CallRuntime(Runtime::kDeclareGlobals);
857   // Return value is ignored.
858 }
859 
860 
DeclareModules(Handle<FixedArray> descriptions)861 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
862   // Call the runtime to declare the modules.
863   __ Push(descriptions);
864   __ CallRuntime(Runtime::kDeclareModules);
865   // Return value is ignored.
866 }
867 
868 
VisitSwitchStatement(SwitchStatement * stmt)869 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
870   Comment cmnt(masm_, "[ SwitchStatement");
871   Breakable nested_statement(this, stmt);
872   SetStatementPosition(stmt);
873 
874   // Keep the switch value on the stack until a case matches.
875   VisitForStackValue(stmt->tag());
876   PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
877 
878   ZoneList<CaseClause*>* clauses = stmt->cases();
879   CaseClause* default_clause = NULL;  // Can occur anywhere in the list.
880 
881   Label next_test;  // Recycled for each test.
882   // Compile all the tests with branches to their bodies.
883   for (int i = 0; i < clauses->length(); i++) {
884     CaseClause* clause = clauses->at(i);
885     clause->body_target()->Unuse();
886 
887     // The default is not a test, but remember it as final fall through.
888     if (clause->is_default()) {
889       default_clause = clause;
890       continue;
891     }
892 
893     Comment cmnt(masm_, "[ Case comparison");
894     __ bind(&next_test);
895     next_test.Unuse();
896 
897     // Compile the label expression.
898     VisitForAccumulatorValue(clause->label());
899 
900     // Perform the comparison as if via '==='.
901     __ mov(edx, Operand(esp, 0));  // Switch value.
902     bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
903     JumpPatchSite patch_site(masm_);
904     if (inline_smi_code) {
905       Label slow_case;
906       __ mov(ecx, edx);
907       __ or_(ecx, eax);
908       patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear);
909 
910       __ cmp(edx, eax);
911       __ j(not_equal, &next_test);
912       __ Drop(1);  // Switch value is no longer needed.
913       __ jmp(clause->body_target());
914       __ bind(&slow_case);
915     }
916 
917     SetExpressionPosition(clause);
918     Handle<Code> ic = CodeFactory::CompareIC(isolate(), Token::EQ_STRICT,
919                                              strength(language_mode())).code();
920     CallIC(ic, clause->CompareId());
921     patch_site.EmitPatchInfo();
922 
923     Label skip;
924     __ jmp(&skip, Label::kNear);
925     PrepareForBailout(clause, TOS_REG);
926     __ cmp(eax, isolate()->factory()->true_value());
927     __ j(not_equal, &next_test);
928     __ Drop(1);
929     __ jmp(clause->body_target());
930     __ bind(&skip);
931 
932     __ test(eax, eax);
933     __ j(not_equal, &next_test);
934     __ Drop(1);  // Switch value is no longer needed.
935     __ jmp(clause->body_target());
936   }
937 
938   // Discard the test value and jump to the default if present, otherwise to
939   // the end of the statement.
940   __ bind(&next_test);
941   __ Drop(1);  // Switch value is no longer needed.
942   if (default_clause == NULL) {
943     __ jmp(nested_statement.break_label());
944   } else {
945     __ jmp(default_clause->body_target());
946   }
947 
948   // Compile all the case bodies.
949   for (int i = 0; i < clauses->length(); i++) {
950     Comment cmnt(masm_, "[ Case body");
951     CaseClause* clause = clauses->at(i);
952     __ bind(clause->body_target());
953     PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
954     VisitStatements(clause->statements());
955   }
956 
957   __ bind(nested_statement.break_label());
958   PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
959 }
960 
961 
VisitForInStatement(ForInStatement * stmt)962 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
963   Comment cmnt(masm_, "[ ForInStatement");
964   SetStatementPosition(stmt, SKIP_BREAK);
965 
966   FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
967 
968   Label loop, exit;
969   ForIn loop_statement(this, stmt);
970   increment_loop_depth();
971 
972   // Get the object to enumerate over. If the object is null or undefined, skip
973   // over the loop.  See ECMA-262 version 5, section 12.6.4.
974   SetExpressionAsStatementPosition(stmt->enumerable());
975   VisitForAccumulatorValue(stmt->enumerable());
976   __ cmp(eax, isolate()->factory()->undefined_value());
977   __ j(equal, &exit);
978   __ cmp(eax, isolate()->factory()->null_value());
979   __ j(equal, &exit);
980 
981   PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
982 
983   // Convert the object to a JS object.
984   Label convert, done_convert;
985   __ JumpIfSmi(eax, &convert, Label::kNear);
986   __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ecx);
987   __ j(above_equal, &done_convert, Label::kNear);
988   __ bind(&convert);
989   ToObjectStub stub(isolate());
990   __ CallStub(&stub);
991   __ bind(&done_convert);
992   PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG);
993   __ push(eax);
994 
995   // Check for proxies.
996   Label call_runtime, use_cache, fixed_array;
997   __ CmpObjectType(eax, JS_PROXY_TYPE, ecx);
998   __ j(equal, &call_runtime);
999 
1000   // Check cache validity in generated code. This is a fast case for
1001   // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1002   // guarantee cache validity, call the runtime system to check cache
1003   // validity or get the property names in a fixed array.
1004   __ CheckEnumCache(&call_runtime);
1005 
1006   __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset));
1007   __ jmp(&use_cache, Label::kNear);
1008 
1009   // Get the set of properties to enumerate.
1010   __ bind(&call_runtime);
1011   __ push(eax);
1012   __ CallRuntime(Runtime::kGetPropertyNamesFast);
1013   PrepareForBailoutForId(stmt->EnumId(), TOS_REG);
1014   __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
1015          isolate()->factory()->meta_map());
1016   __ j(not_equal, &fixed_array);
1017 
1018 
1019   // We got a map in register eax. Get the enumeration cache from it.
1020   Label no_descriptors;
1021   __ bind(&use_cache);
1022 
1023   __ EnumLength(edx, eax);
1024   __ cmp(edx, Immediate(Smi::FromInt(0)));
1025   __ j(equal, &no_descriptors);
1026 
1027   __ LoadInstanceDescriptors(eax, ecx);
1028   __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumCacheOffset));
1029   __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumCacheBridgeCacheOffset));
1030 
1031   // Set up the four remaining stack slots.
1032   __ push(eax);  // Map.
1033   __ push(ecx);  // Enumeration cache.
1034   __ push(edx);  // Number of valid entries for the map in the enum cache.
1035   __ push(Immediate(Smi::FromInt(0)));  // Initial index.
1036   __ jmp(&loop);
1037 
1038   __ bind(&no_descriptors);
1039   __ add(esp, Immediate(kPointerSize));
1040   __ jmp(&exit);
1041 
1042   // We got a fixed array in register eax. Iterate through that.
1043   __ bind(&fixed_array);
1044 
1045   // No need for a write barrier, we are storing a Smi in the feedback vector.
1046   __ EmitLoadTypeFeedbackVector(ebx);
1047   int vector_index = SmiFromSlot(slot)->value();
1048   __ mov(FieldOperand(ebx, FixedArray::OffsetOfElementAt(vector_index)),
1049          Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1050   __ push(Immediate(Smi::FromInt(1)));  // Smi(1) undicates slow check
1051   __ push(eax);  // Array
1052   __ mov(eax, FieldOperand(eax, FixedArray::kLengthOffset));
1053   __ push(eax);  // Fixed array length (as smi).
1054   __ push(Immediate(Smi::FromInt(0)));  // Initial index.
1055 
1056   // Generate code for doing the condition check.
1057   __ bind(&loop);
1058   SetExpressionAsStatementPosition(stmt->each());
1059 
1060   __ mov(eax, Operand(esp, 0 * kPointerSize));  // Get the current index.
1061   __ cmp(eax, Operand(esp, 1 * kPointerSize));  // Compare to the array length.
1062   __ j(above_equal, loop_statement.break_label());
1063 
1064   // Get the current entry of the array into register ebx.
1065   __ mov(ebx, Operand(esp, 2 * kPointerSize));
1066   __ mov(ebx, FieldOperand(ebx, eax, times_2, FixedArray::kHeaderSize));
1067 
1068   // Get the expected map from the stack or a smi in the
1069   // permanent slow case into register edx.
1070   __ mov(edx, Operand(esp, 3 * kPointerSize));
1071 
1072   // Check if the expected map still matches that of the enumerable.
1073   // If not, we may have to filter the key.
1074   Label update_each;
1075   __ mov(ecx, Operand(esp, 4 * kPointerSize));
1076   __ cmp(edx, FieldOperand(ecx, HeapObject::kMapOffset));
1077   __ j(equal, &update_each, Label::kNear);
1078 
1079   // Convert the entry to a string or null if it isn't a property
1080   // anymore. If the property has been removed while iterating, we
1081   // just skip it.
1082   __ push(ecx);  // Enumerable.
1083   __ push(ebx);  // Current entry.
1084   __ CallRuntime(Runtime::kForInFilter);
1085   PrepareForBailoutForId(stmt->FilterId(), TOS_REG);
1086   __ cmp(eax, isolate()->factory()->undefined_value());
1087   __ j(equal, loop_statement.continue_label());
1088   __ mov(ebx, eax);
1089 
1090   // Update the 'each' property or variable from the possibly filtered
1091   // entry in register ebx.
1092   __ bind(&update_each);
1093   __ mov(result_register(), ebx);
1094   // Perform the assignment as if via '='.
1095   { EffectContext context(this);
1096     EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
1097     PrepareForBailoutForId(stmt->AssignmentId(), NO_REGISTERS);
1098   }
1099 
1100   // Both Crankshaft and Turbofan expect BodyId to be right before stmt->body().
1101   PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1102   // Generate code for the body of the loop.
1103   Visit(stmt->body());
1104 
1105   // Generate code for going to the next element by incrementing the
1106   // index (smi) stored on top of the stack.
1107   __ bind(loop_statement.continue_label());
1108   __ add(Operand(esp, 0 * kPointerSize), Immediate(Smi::FromInt(1)));
1109 
1110   EmitBackEdgeBookkeeping(stmt, &loop);
1111   __ jmp(&loop);
1112 
1113   // Remove the pointers stored on the stack.
1114   __ bind(loop_statement.break_label());
1115   __ add(esp, Immediate(5 * kPointerSize));
1116 
1117   // Exit and decrement the loop depth.
1118   PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1119   __ bind(&exit);
1120   decrement_loop_depth();
1121 }
1122 
1123 
EmitNewClosure(Handle<SharedFunctionInfo> info,bool pretenure)1124 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1125                                        bool pretenure) {
1126   // Use the fast case closure allocation code that allocates in new
1127   // space for nested functions that don't need literals cloning. If
1128   // we're running with the --always-opt or the --prepare-always-opt
1129   // flag, we need to use the runtime function so that the new function
1130   // we are creating here gets a chance to have its code optimized and
1131   // doesn't just get a copy of the existing unoptimized code.
1132   if (!FLAG_always_opt &&
1133       !FLAG_prepare_always_opt &&
1134       !pretenure &&
1135       scope()->is_function_scope() &&
1136       info->num_literals() == 0) {
1137     FastNewClosureStub stub(isolate(), info->language_mode(), info->kind());
1138     __ mov(ebx, Immediate(info));
1139     __ CallStub(&stub);
1140   } else {
1141     __ push(Immediate(info));
1142     __ CallRuntime(pretenure ? Runtime::kNewClosure_Tenured
1143                              : Runtime::kNewClosure);
1144   }
1145   context()->Plug(eax);
1146 }
1147 
1148 
EmitSetHomeObject(Expression * initializer,int offset,FeedbackVectorSlot slot)1149 void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset,
1150                                           FeedbackVectorSlot slot) {
1151   DCHECK(NeedsHomeObject(initializer));
1152   __ mov(StoreDescriptor::ReceiverRegister(), Operand(esp, 0));
1153   __ mov(StoreDescriptor::NameRegister(),
1154          Immediate(isolate()->factory()->home_object_symbol()));
1155   __ mov(StoreDescriptor::ValueRegister(), Operand(esp, offset * kPointerSize));
1156   EmitLoadStoreICSlot(slot);
1157   CallStoreIC();
1158 }
1159 
1160 
EmitSetHomeObjectAccumulator(Expression * initializer,int offset,FeedbackVectorSlot slot)1161 void FullCodeGenerator::EmitSetHomeObjectAccumulator(Expression* initializer,
1162                                                      int offset,
1163                                                      FeedbackVectorSlot slot) {
1164   DCHECK(NeedsHomeObject(initializer));
1165   __ mov(StoreDescriptor::ReceiverRegister(), eax);
1166   __ mov(StoreDescriptor::NameRegister(),
1167          Immediate(isolate()->factory()->home_object_symbol()));
1168   __ mov(StoreDescriptor::ValueRegister(), Operand(esp, offset * kPointerSize));
1169   EmitLoadStoreICSlot(slot);
1170   CallStoreIC();
1171 }
1172 
1173 
EmitLoadGlobalCheckExtensions(VariableProxy * proxy,TypeofMode typeof_mode,Label * slow)1174 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1175                                                       TypeofMode typeof_mode,
1176                                                       Label* slow) {
1177   Register context = esi;
1178   Register temp = edx;
1179 
1180   Scope* s = scope();
1181   while (s != NULL) {
1182     if (s->num_heap_slots() > 0) {
1183       if (s->calls_sloppy_eval()) {
1184         // Check that extension is "the hole".
1185         __ JumpIfNotRoot(ContextOperand(context, Context::EXTENSION_INDEX),
1186                          Heap::kTheHoleValueRootIndex, slow);
1187       }
1188       // Load next context in chain.
1189       __ mov(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1190       // Walk the rest of the chain without clobbering esi.
1191       context = temp;
1192     }
1193     // If no outer scope calls eval, we do not need to check more
1194     // context extensions.  If we have reached an eval scope, we check
1195     // all extensions from this point.
1196     if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1197     s = s->outer_scope();
1198   }
1199 
1200   if (s != NULL && s->is_eval_scope()) {
1201     // Loop up the context chain.  There is no frame effect so it is
1202     // safe to use raw labels here.
1203     Label next, fast;
1204     if (!context.is(temp)) {
1205       __ mov(temp, context);
1206     }
1207     __ bind(&next);
1208     // Terminate at native context.
1209     __ cmp(FieldOperand(temp, HeapObject::kMapOffset),
1210            Immediate(isolate()->factory()->native_context_map()));
1211     __ j(equal, &fast, Label::kNear);
1212     // Check that extension is "the hole".
1213     __ JumpIfNotRoot(ContextOperand(temp, Context::EXTENSION_INDEX),
1214                      Heap::kTheHoleValueRootIndex, slow);
1215     // Load next context in chain.
1216     __ mov(temp, ContextOperand(temp, Context::PREVIOUS_INDEX));
1217     __ jmp(&next);
1218     __ bind(&fast);
1219   }
1220 
1221   // All extension objects were empty and it is safe to use a normal global
1222   // load machinery.
1223   EmitGlobalVariableLoad(proxy, typeof_mode);
1224 }
1225 
1226 
ContextSlotOperandCheckExtensions(Variable * var,Label * slow)1227 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1228                                                                 Label* slow) {
1229   DCHECK(var->IsContextSlot());
1230   Register context = esi;
1231   Register temp = ebx;
1232 
1233   for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1234     if (s->num_heap_slots() > 0) {
1235       if (s->calls_sloppy_eval()) {
1236         // Check that extension is "the hole".
1237         __ JumpIfNotRoot(ContextOperand(context, Context::EXTENSION_INDEX),
1238                          Heap::kTheHoleValueRootIndex, slow);
1239       }
1240       __ mov(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1241       // Walk the rest of the chain without clobbering esi.
1242       context = temp;
1243     }
1244   }
1245   // Check that last extension is "the hole".
1246   __ JumpIfNotRoot(ContextOperand(context, Context::EXTENSION_INDEX),
1247                    Heap::kTheHoleValueRootIndex, slow);
1248 
1249   // This function is used only for loads, not stores, so it's safe to
1250   // return an esi-based operand (the write barrier cannot be allowed to
1251   // destroy the esi register).
1252   return ContextOperand(context, var->index());
1253 }
1254 
1255 
EmitDynamicLookupFastCase(VariableProxy * proxy,TypeofMode typeof_mode,Label * slow,Label * done)1256 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1257                                                   TypeofMode typeof_mode,
1258                                                   Label* slow, Label* done) {
1259   // Generate fast-case code for variables that might be shadowed by
1260   // eval-introduced variables.  Eval is used a lot without
1261   // introducing variables.  In those cases, we do not want to
1262   // perform a runtime call for all variables in the scope
1263   // containing the eval.
1264   Variable* var = proxy->var();
1265   if (var->mode() == DYNAMIC_GLOBAL) {
1266     EmitLoadGlobalCheckExtensions(proxy, typeof_mode, slow);
1267     __ jmp(done);
1268   } else if (var->mode() == DYNAMIC_LOCAL) {
1269     Variable* local = var->local_if_not_shadowed();
1270     __ mov(eax, ContextSlotOperandCheckExtensions(local, slow));
1271     if (local->mode() == LET || local->mode() == CONST ||
1272         local->mode() == CONST_LEGACY) {
1273       __ cmp(eax, isolate()->factory()->the_hole_value());
1274       __ j(not_equal, done);
1275       if (local->mode() == CONST_LEGACY) {
1276         __ mov(eax, isolate()->factory()->undefined_value());
1277       } else {  // LET || CONST
1278         __ push(Immediate(var->name()));
1279         __ CallRuntime(Runtime::kThrowReferenceError);
1280       }
1281     }
1282     __ jmp(done);
1283   }
1284 }
1285 
1286 
EmitGlobalVariableLoad(VariableProxy * proxy,TypeofMode typeof_mode)1287 void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
1288                                                TypeofMode typeof_mode) {
1289   Variable* var = proxy->var();
1290   DCHECK(var->IsUnallocatedOrGlobalSlot() ||
1291          (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL));
1292   __ mov(LoadDescriptor::ReceiverRegister(), NativeContextOperand());
1293   __ mov(LoadDescriptor::ReceiverRegister(),
1294          ContextOperand(LoadDescriptor::ReceiverRegister(),
1295                         Context::EXTENSION_INDEX));
1296   __ mov(LoadDescriptor::NameRegister(), var->name());
1297   __ mov(LoadDescriptor::SlotRegister(),
1298          Immediate(SmiFromSlot(proxy->VariableFeedbackSlot())));
1299   CallLoadIC(typeof_mode);
1300 }
1301 
1302 
EmitVariableLoad(VariableProxy * proxy,TypeofMode typeof_mode)1303 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
1304                                          TypeofMode typeof_mode) {
1305   SetExpressionPosition(proxy);
1306   PrepareForBailoutForId(proxy->BeforeId(), NO_REGISTERS);
1307   Variable* var = proxy->var();
1308 
1309   // Three cases: global variables, lookup variables, and all other types of
1310   // variables.
1311   switch (var->location()) {
1312     case VariableLocation::GLOBAL:
1313     case VariableLocation::UNALLOCATED: {
1314       Comment cmnt(masm_, "[ Global variable");
1315       EmitGlobalVariableLoad(proxy, typeof_mode);
1316       context()->Plug(eax);
1317       break;
1318     }
1319 
1320     case VariableLocation::PARAMETER:
1321     case VariableLocation::LOCAL:
1322     case VariableLocation::CONTEXT: {
1323       DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode);
1324       Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1325                                                : "[ Stack variable");
1326 
1327       if (NeedsHoleCheckForLoad(proxy)) {
1328         // Let and const need a read barrier.
1329         Label done;
1330         GetVar(eax, var);
1331         __ cmp(eax, isolate()->factory()->the_hole_value());
1332         __ j(not_equal, &done, Label::kNear);
1333         if (var->mode() == LET || var->mode() == CONST) {
1334           // Throw a reference error when using an uninitialized let/const
1335           // binding in harmony mode.
1336           __ push(Immediate(var->name()));
1337           __ CallRuntime(Runtime::kThrowReferenceError);
1338         } else {
1339           // Uninitialized legacy const bindings are unholed.
1340           DCHECK(var->mode() == CONST_LEGACY);
1341           __ mov(eax, isolate()->factory()->undefined_value());
1342         }
1343         __ bind(&done);
1344         context()->Plug(eax);
1345         break;
1346       }
1347       context()->Plug(var);
1348       break;
1349     }
1350 
1351     case VariableLocation::LOOKUP: {
1352       Comment cmnt(masm_, "[ Lookup variable");
1353       Label done, slow;
1354       // Generate code for loading from variables potentially shadowed
1355       // by eval-introduced variables.
1356       EmitDynamicLookupFastCase(proxy, typeof_mode, &slow, &done);
1357       __ bind(&slow);
1358       __ push(esi);  // Context.
1359       __ push(Immediate(var->name()));
1360       Runtime::FunctionId function_id =
1361           typeof_mode == NOT_INSIDE_TYPEOF
1362               ? Runtime::kLoadLookupSlot
1363               : Runtime::kLoadLookupSlotNoReferenceError;
1364       __ CallRuntime(function_id);
1365       __ bind(&done);
1366       context()->Plug(eax);
1367       break;
1368     }
1369   }
1370 }
1371 
1372 
VisitRegExpLiteral(RegExpLiteral * expr)1373 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1374   Comment cmnt(masm_, "[ RegExpLiteral");
1375   __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1376   __ Move(eax, Immediate(Smi::FromInt(expr->literal_index())));
1377   __ Move(ecx, Immediate(expr->pattern()));
1378   __ Move(edx, Immediate(Smi::FromInt(expr->flags())));
1379   FastCloneRegExpStub stub(isolate());
1380   __ CallStub(&stub);
1381   context()->Plug(eax);
1382 }
1383 
1384 
EmitAccessor(ObjectLiteralProperty * property)1385 void FullCodeGenerator::EmitAccessor(ObjectLiteralProperty* property) {
1386   Expression* expression = (property == NULL) ? NULL : property->value();
1387   if (expression == NULL) {
1388     __ push(Immediate(isolate()->factory()->null_value()));
1389   } else {
1390     VisitForStackValue(expression);
1391     if (NeedsHomeObject(expression)) {
1392       DCHECK(property->kind() == ObjectLiteral::Property::GETTER ||
1393              property->kind() == ObjectLiteral::Property::SETTER);
1394       int offset = property->kind() == ObjectLiteral::Property::GETTER ? 2 : 3;
1395       EmitSetHomeObject(expression, offset, property->GetSlot());
1396     }
1397   }
1398 }
1399 
1400 
VisitObjectLiteral(ObjectLiteral * expr)1401 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1402   Comment cmnt(masm_, "[ ObjectLiteral");
1403 
1404   Handle<FixedArray> constant_properties = expr->constant_properties();
1405   int flags = expr->ComputeFlags();
1406   // If any of the keys would store to the elements array, then we shouldn't
1407   // allow it.
1408   if (MustCreateObjectLiteralWithRuntime(expr)) {
1409     __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1410     __ push(Immediate(Smi::FromInt(expr->literal_index())));
1411     __ push(Immediate(constant_properties));
1412     __ push(Immediate(Smi::FromInt(flags)));
1413     __ CallRuntime(Runtime::kCreateObjectLiteral);
1414   } else {
1415     __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1416     __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
1417     __ mov(ecx, Immediate(constant_properties));
1418     __ mov(edx, Immediate(Smi::FromInt(flags)));
1419     FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1420     __ CallStub(&stub);
1421   }
1422   PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1423 
1424   // If result_saved is true the result is on top of the stack.  If
1425   // result_saved is false the result is in eax.
1426   bool result_saved = false;
1427 
1428   AccessorTable accessor_table(zone());
1429   int property_index = 0;
1430   for (; property_index < expr->properties()->length(); property_index++) {
1431     ObjectLiteral::Property* property = expr->properties()->at(property_index);
1432     if (property->is_computed_name()) break;
1433     if (property->IsCompileTimeValue()) continue;
1434 
1435     Literal* key = property->key()->AsLiteral();
1436     Expression* value = property->value();
1437     if (!result_saved) {
1438       __ push(eax);  // Save result on the stack
1439       result_saved = true;
1440     }
1441     switch (property->kind()) {
1442       case ObjectLiteral::Property::CONSTANT:
1443         UNREACHABLE();
1444       case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1445         DCHECK(!CompileTimeValue::IsCompileTimeValue(value));
1446         // Fall through.
1447       case ObjectLiteral::Property::COMPUTED:
1448         // It is safe to use [[Put]] here because the boilerplate already
1449         // contains computed properties with an uninitialized value.
1450         if (key->value()->IsInternalizedString()) {
1451           if (property->emit_store()) {
1452             VisitForAccumulatorValue(value);
1453             DCHECK(StoreDescriptor::ValueRegister().is(eax));
1454             __ mov(StoreDescriptor::NameRegister(), Immediate(key->value()));
1455             __ mov(StoreDescriptor::ReceiverRegister(), Operand(esp, 0));
1456             EmitLoadStoreICSlot(property->GetSlot(0));
1457             CallStoreIC();
1458             PrepareForBailoutForId(key->id(), NO_REGISTERS);
1459             if (NeedsHomeObject(value)) {
1460               EmitSetHomeObjectAccumulator(value, 0, property->GetSlot(1));
1461             }
1462           } else {
1463             VisitForEffect(value);
1464           }
1465           break;
1466         }
1467         __ push(Operand(esp, 0));  // Duplicate receiver.
1468         VisitForStackValue(key);
1469         VisitForStackValue(value);
1470         if (property->emit_store()) {
1471           if (NeedsHomeObject(value)) {
1472             EmitSetHomeObject(value, 2, property->GetSlot());
1473           }
1474           __ push(Immediate(Smi::FromInt(SLOPPY)));  // Language mode
1475           __ CallRuntime(Runtime::kSetProperty);
1476         } else {
1477           __ Drop(3);
1478         }
1479         break;
1480       case ObjectLiteral::Property::PROTOTYPE:
1481         __ push(Operand(esp, 0));  // Duplicate receiver.
1482         VisitForStackValue(value);
1483         DCHECK(property->emit_store());
1484         __ CallRuntime(Runtime::kInternalSetPrototype);
1485         PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1486                                NO_REGISTERS);
1487         break;
1488       case ObjectLiteral::Property::GETTER:
1489         if (property->emit_store()) {
1490           accessor_table.lookup(key)->second->getter = property;
1491         }
1492         break;
1493       case ObjectLiteral::Property::SETTER:
1494         if (property->emit_store()) {
1495           accessor_table.lookup(key)->second->setter = property;
1496         }
1497         break;
1498     }
1499   }
1500 
1501   // Emit code to define accessors, using only a single call to the runtime for
1502   // each pair of corresponding getters and setters.
1503   for (AccessorTable::Iterator it = accessor_table.begin();
1504        it != accessor_table.end();
1505        ++it) {
1506     __ push(Operand(esp, 0));  // Duplicate receiver.
1507     VisitForStackValue(it->first);
1508 
1509     EmitAccessor(it->second->getter);
1510     EmitAccessor(it->second->setter);
1511 
1512     __ push(Immediate(Smi::FromInt(NONE)));
1513     __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked);
1514   }
1515 
1516   // Object literals have two parts. The "static" part on the left contains no
1517   // computed property names, and so we can compute its map ahead of time; see
1518   // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1519   // starts with the first computed property name, and continues with all
1520   // properties to its right.  All the code from above initializes the static
1521   // component of the object literal, and arranges for the map of the result to
1522   // reflect the static order in which the keys appear. For the dynamic
1523   // properties, we compile them into a series of "SetOwnProperty" runtime
1524   // calls. This will preserve insertion order.
1525   for (; property_index < expr->properties()->length(); property_index++) {
1526     ObjectLiteral::Property* property = expr->properties()->at(property_index);
1527 
1528     Expression* value = property->value();
1529     if (!result_saved) {
1530       __ push(eax);  // Save result on the stack
1531       result_saved = true;
1532     }
1533 
1534     __ push(Operand(esp, 0));  // Duplicate receiver.
1535 
1536     if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1537       DCHECK(!property->is_computed_name());
1538       VisitForStackValue(value);
1539       DCHECK(property->emit_store());
1540       __ CallRuntime(Runtime::kInternalSetPrototype);
1541       PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1542                              NO_REGISTERS);
1543     } else {
1544       EmitPropertyKey(property, expr->GetIdForPropertyName(property_index));
1545       VisitForStackValue(value);
1546       if (NeedsHomeObject(value)) {
1547         EmitSetHomeObject(value, 2, property->GetSlot());
1548       }
1549 
1550       switch (property->kind()) {
1551         case ObjectLiteral::Property::CONSTANT:
1552         case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1553         case ObjectLiteral::Property::COMPUTED:
1554           if (property->emit_store()) {
1555             __ push(Immediate(Smi::FromInt(NONE)));
1556             __ CallRuntime(Runtime::kDefineDataPropertyUnchecked);
1557           } else {
1558             __ Drop(3);
1559           }
1560           break;
1561 
1562         case ObjectLiteral::Property::PROTOTYPE:
1563           UNREACHABLE();
1564           break;
1565 
1566         case ObjectLiteral::Property::GETTER:
1567           __ push(Immediate(Smi::FromInt(NONE)));
1568           __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked);
1569           break;
1570 
1571         case ObjectLiteral::Property::SETTER:
1572           __ push(Immediate(Smi::FromInt(NONE)));
1573           __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked);
1574           break;
1575       }
1576     }
1577   }
1578 
1579   if (expr->has_function()) {
1580     DCHECK(result_saved);
1581     __ push(Operand(esp, 0));
1582     __ CallRuntime(Runtime::kToFastProperties);
1583   }
1584 
1585   if (result_saved) {
1586     context()->PlugTOS();
1587   } else {
1588     context()->Plug(eax);
1589   }
1590 }
1591 
1592 
VisitArrayLiteral(ArrayLiteral * expr)1593 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1594   Comment cmnt(masm_, "[ ArrayLiteral");
1595 
1596   Handle<FixedArray> constant_elements = expr->constant_elements();
1597   bool has_constant_fast_elements =
1598       IsFastObjectElementsKind(expr->constant_elements_kind());
1599 
1600   AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1601   if (has_constant_fast_elements && !FLAG_allocation_site_pretenuring) {
1602     // If the only customer of allocation sites is transitioning, then
1603     // we can turn it off if we don't have anywhere else to transition to.
1604     allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1605   }
1606 
1607   if (MustCreateArrayLiteralWithRuntime(expr)) {
1608     __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1609     __ push(Immediate(Smi::FromInt(expr->literal_index())));
1610     __ push(Immediate(constant_elements));
1611     __ push(Immediate(Smi::FromInt(expr->ComputeFlags())));
1612     __ CallRuntime(Runtime::kCreateArrayLiteral);
1613   } else {
1614     __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1615     __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
1616     __ mov(ecx, Immediate(constant_elements));
1617     FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1618     __ CallStub(&stub);
1619   }
1620   PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1621 
1622   bool result_saved = false;  // Is the result saved to the stack?
1623   ZoneList<Expression*>* subexprs = expr->values();
1624   int length = subexprs->length();
1625 
1626   // Emit code to evaluate all the non-constant subexpressions and to store
1627   // them into the newly cloned array.
1628   int array_index = 0;
1629   for (; array_index < length; array_index++) {
1630     Expression* subexpr = subexprs->at(array_index);
1631     if (subexpr->IsSpread()) break;
1632 
1633     // If the subexpression is a literal or a simple materialized literal it
1634     // is already set in the cloned array.
1635     if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1636 
1637     if (!result_saved) {
1638       __ push(eax);  // array literal.
1639       result_saved = true;
1640     }
1641     VisitForAccumulatorValue(subexpr);
1642 
1643     __ mov(StoreDescriptor::NameRegister(),
1644            Immediate(Smi::FromInt(array_index)));
1645     __ mov(StoreDescriptor::ReceiverRegister(), Operand(esp, 0));
1646     EmitLoadStoreICSlot(expr->LiteralFeedbackSlot());
1647     Handle<Code> ic =
1648         CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
1649     CallIC(ic);
1650     PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1651   }
1652 
1653   // In case the array literal contains spread expressions it has two parts. The
1654   // first part is  the "static" array which has a literal index is  handled
1655   // above. The second part is the part after the first spread expression
1656   // (inclusive) and these elements gets appended to the array. Note that the
1657   // number elements an iterable produces is unknown ahead of time.
1658   if (array_index < length && result_saved) {
1659     __ Pop(eax);
1660     result_saved = false;
1661   }
1662   for (; array_index < length; array_index++) {
1663     Expression* subexpr = subexprs->at(array_index);
1664 
1665     __ Push(eax);
1666     if (subexpr->IsSpread()) {
1667       VisitForStackValue(subexpr->AsSpread()->expression());
1668       __ InvokeBuiltin(Context::CONCAT_ITERABLE_TO_ARRAY_BUILTIN_INDEX,
1669                        CALL_FUNCTION);
1670     } else {
1671       VisitForStackValue(subexpr);
1672       __ CallRuntime(Runtime::kAppendElement);
1673     }
1674 
1675     PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1676   }
1677 
1678   if (result_saved) {
1679     context()->PlugTOS();
1680   } else {
1681     context()->Plug(eax);
1682   }
1683 }
1684 
1685 
VisitAssignment(Assignment * expr)1686 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1687   DCHECK(expr->target()->IsValidReferenceExpressionOrThis());
1688 
1689   Comment cmnt(masm_, "[ Assignment");
1690   SetExpressionPosition(expr, INSERT_BREAK);
1691 
1692   Property* property = expr->target()->AsProperty();
1693   LhsKind assign_type = Property::GetAssignType(property);
1694 
1695   // Evaluate LHS expression.
1696   switch (assign_type) {
1697     case VARIABLE:
1698       // Nothing to do here.
1699       break;
1700     case NAMED_SUPER_PROPERTY:
1701       VisitForStackValue(
1702           property->obj()->AsSuperPropertyReference()->this_var());
1703       VisitForAccumulatorValue(
1704           property->obj()->AsSuperPropertyReference()->home_object());
1705       __ push(result_register());
1706       if (expr->is_compound()) {
1707         __ push(MemOperand(esp, kPointerSize));
1708         __ push(result_register());
1709       }
1710       break;
1711     case NAMED_PROPERTY:
1712       if (expr->is_compound()) {
1713         // We need the receiver both on the stack and in the register.
1714         VisitForStackValue(property->obj());
1715         __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
1716       } else {
1717         VisitForStackValue(property->obj());
1718       }
1719       break;
1720     case KEYED_SUPER_PROPERTY:
1721       VisitForStackValue(
1722           property->obj()->AsSuperPropertyReference()->this_var());
1723       VisitForStackValue(
1724           property->obj()->AsSuperPropertyReference()->home_object());
1725       VisitForAccumulatorValue(property->key());
1726       __ Push(result_register());
1727       if (expr->is_compound()) {
1728         __ push(MemOperand(esp, 2 * kPointerSize));
1729         __ push(MemOperand(esp, 2 * kPointerSize));
1730         __ push(result_register());
1731       }
1732       break;
1733     case KEYED_PROPERTY: {
1734       if (expr->is_compound()) {
1735         VisitForStackValue(property->obj());
1736         VisitForStackValue(property->key());
1737         __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, kPointerSize));
1738         __ mov(LoadDescriptor::NameRegister(), Operand(esp, 0));
1739       } else {
1740         VisitForStackValue(property->obj());
1741         VisitForStackValue(property->key());
1742       }
1743       break;
1744     }
1745   }
1746 
1747   // For compound assignments we need another deoptimization point after the
1748   // variable/property load.
1749   if (expr->is_compound()) {
1750     AccumulatorValueContext result_context(this);
1751     { AccumulatorValueContext left_operand_context(this);
1752       switch (assign_type) {
1753         case VARIABLE:
1754           EmitVariableLoad(expr->target()->AsVariableProxy());
1755           PrepareForBailout(expr->target(), TOS_REG);
1756           break;
1757         case NAMED_SUPER_PROPERTY:
1758           EmitNamedSuperPropertyLoad(property);
1759           PrepareForBailoutForId(property->LoadId(), TOS_REG);
1760           break;
1761         case NAMED_PROPERTY:
1762           EmitNamedPropertyLoad(property);
1763           PrepareForBailoutForId(property->LoadId(), TOS_REG);
1764           break;
1765         case KEYED_SUPER_PROPERTY:
1766           EmitKeyedSuperPropertyLoad(property);
1767           PrepareForBailoutForId(property->LoadId(), TOS_REG);
1768           break;
1769         case KEYED_PROPERTY:
1770           EmitKeyedPropertyLoad(property);
1771           PrepareForBailoutForId(property->LoadId(), TOS_REG);
1772           break;
1773       }
1774     }
1775 
1776     Token::Value op = expr->binary_op();
1777     __ push(eax);  // Left operand goes on the stack.
1778     VisitForAccumulatorValue(expr->value());
1779 
1780     if (ShouldInlineSmiCase(op)) {
1781       EmitInlineSmiBinaryOp(expr->binary_operation(),
1782                             op,
1783                             expr->target(),
1784                             expr->value());
1785     } else {
1786       EmitBinaryOp(expr->binary_operation(), op);
1787     }
1788 
1789     // Deoptimization point in case the binary operation may have side effects.
1790     PrepareForBailout(expr->binary_operation(), TOS_REG);
1791   } else {
1792     VisitForAccumulatorValue(expr->value());
1793   }
1794 
1795   SetExpressionPosition(expr);
1796 
1797   // Store the value.
1798   switch (assign_type) {
1799     case VARIABLE:
1800       EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1801                              expr->op(), expr->AssignmentSlot());
1802       PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1803       context()->Plug(eax);
1804       break;
1805     case NAMED_PROPERTY:
1806       EmitNamedPropertyAssignment(expr);
1807       break;
1808     case NAMED_SUPER_PROPERTY:
1809       EmitNamedSuperPropertyStore(property);
1810       context()->Plug(result_register());
1811       break;
1812     case KEYED_SUPER_PROPERTY:
1813       EmitKeyedSuperPropertyStore(property);
1814       context()->Plug(result_register());
1815       break;
1816     case KEYED_PROPERTY:
1817       EmitKeyedPropertyAssignment(expr);
1818       break;
1819   }
1820 }
1821 
1822 
VisitYield(Yield * expr)1823 void FullCodeGenerator::VisitYield(Yield* expr) {
1824   Comment cmnt(masm_, "[ Yield");
1825   SetExpressionPosition(expr);
1826 
1827   // Evaluate yielded value first; the initial iterator definition depends on
1828   // this.  It stays on the stack while we update the iterator.
1829   VisitForStackValue(expr->expression());
1830 
1831   switch (expr->yield_kind()) {
1832     case Yield::kSuspend:
1833       // Pop value from top-of-stack slot; box result into result register.
1834       EmitCreateIteratorResult(false);
1835       __ push(result_register());
1836       // Fall through.
1837     case Yield::kInitial: {
1838       Label suspend, continuation, post_runtime, resume;
1839 
1840       __ jmp(&suspend);
1841       __ bind(&continuation);
1842       __ RecordGeneratorContinuation();
1843       __ jmp(&resume);
1844 
1845       __ bind(&suspend);
1846       VisitForAccumulatorValue(expr->generator_object());
1847       DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
1848       __ mov(FieldOperand(eax, JSGeneratorObject::kContinuationOffset),
1849              Immediate(Smi::FromInt(continuation.pos())));
1850       __ mov(FieldOperand(eax, JSGeneratorObject::kContextOffset), esi);
1851       __ mov(ecx, esi);
1852       __ RecordWriteField(eax, JSGeneratorObject::kContextOffset, ecx, edx,
1853                           kDontSaveFPRegs);
1854       __ lea(ebx, Operand(ebp, StandardFrameConstants::kExpressionsOffset));
1855       __ cmp(esp, ebx);
1856       __ j(equal, &post_runtime);
1857       __ push(eax);  // generator object
1858       __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
1859       __ mov(context_register(),
1860              Operand(ebp, StandardFrameConstants::kContextOffset));
1861       __ bind(&post_runtime);
1862       __ pop(result_register());
1863       EmitReturnSequence();
1864 
1865       __ bind(&resume);
1866       context()->Plug(result_register());
1867       break;
1868     }
1869 
1870     case Yield::kFinal: {
1871       VisitForAccumulatorValue(expr->generator_object());
1872       __ mov(FieldOperand(result_register(),
1873                           JSGeneratorObject::kContinuationOffset),
1874              Immediate(Smi::FromInt(JSGeneratorObject::kGeneratorClosed)));
1875       // Pop value from top-of-stack slot, box result into result register.
1876       EmitCreateIteratorResult(true);
1877       EmitUnwindBeforeReturn();
1878       EmitReturnSequence();
1879       break;
1880     }
1881 
1882     case Yield::kDelegating: {
1883       VisitForStackValue(expr->generator_object());
1884 
1885       // Initial stack layout is as follows:
1886       // [sp + 1 * kPointerSize] iter
1887       // [sp + 0 * kPointerSize] g
1888 
1889       Label l_catch, l_try, l_suspend, l_continuation, l_resume;
1890       Label l_next, l_call, l_loop;
1891       Register load_receiver = LoadDescriptor::ReceiverRegister();
1892       Register load_name = LoadDescriptor::NameRegister();
1893 
1894       // Initial send value is undefined.
1895       __ mov(eax, isolate()->factory()->undefined_value());
1896       __ jmp(&l_next);
1897 
1898       // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
1899       __ bind(&l_catch);
1900       __ mov(load_name, isolate()->factory()->throw_string());  // "throw"
1901       __ push(load_name);                                       // "throw"
1902       __ push(Operand(esp, 2 * kPointerSize));                  // iter
1903       __ push(eax);                                             // exception
1904       __ jmp(&l_call);
1905 
1906       // try { received = %yield result }
1907       // Shuffle the received result above a try handler and yield it without
1908       // re-boxing.
1909       __ bind(&l_try);
1910       __ pop(eax);                                       // result
1911       int handler_index = NewHandlerTableEntry();
1912       EnterTryBlock(handler_index, &l_catch);
1913       const int try_block_size = TryCatch::kElementCount * kPointerSize;
1914       __ push(eax);                                      // result
1915 
1916       __ jmp(&l_suspend);
1917       __ bind(&l_continuation);
1918       __ RecordGeneratorContinuation();
1919       __ jmp(&l_resume);
1920 
1921       __ bind(&l_suspend);
1922       const int generator_object_depth = kPointerSize + try_block_size;
1923       __ mov(eax, Operand(esp, generator_object_depth));
1924       __ push(eax);                                      // g
1925       __ push(Immediate(Smi::FromInt(handler_index)));   // handler-index
1926       DCHECK(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
1927       __ mov(FieldOperand(eax, JSGeneratorObject::kContinuationOffset),
1928              Immediate(Smi::FromInt(l_continuation.pos())));
1929       __ mov(FieldOperand(eax, JSGeneratorObject::kContextOffset), esi);
1930       __ mov(ecx, esi);
1931       __ RecordWriteField(eax, JSGeneratorObject::kContextOffset, ecx, edx,
1932                           kDontSaveFPRegs);
1933       __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 2);
1934       __ mov(context_register(),
1935              Operand(ebp, StandardFrameConstants::kContextOffset));
1936       __ pop(eax);                                       // result
1937       EmitReturnSequence();
1938       __ bind(&l_resume);                                // received in eax
1939       ExitTryBlock(handler_index);
1940 
1941       // receiver = iter; f = iter.next; arg = received;
1942       __ bind(&l_next);
1943 
1944       __ mov(load_name, isolate()->factory()->next_string());
1945       __ push(load_name);                           // "next"
1946       __ push(Operand(esp, 2 * kPointerSize));      // iter
1947       __ push(eax);                                 // received
1948 
1949       // result = receiver[f](arg);
1950       __ bind(&l_call);
1951       __ mov(load_receiver, Operand(esp, kPointerSize));
1952       __ mov(LoadDescriptor::SlotRegister(),
1953              Immediate(SmiFromSlot(expr->KeyedLoadFeedbackSlot())));
1954       Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate(), SLOPPY).code();
1955       CallIC(ic, TypeFeedbackId::None());
1956       __ mov(edi, eax);
1957       __ mov(Operand(esp, 2 * kPointerSize), edi);
1958       SetCallPosition(expr);
1959       __ Set(eax, 1);
1960       __ Call(
1961           isolate()->builtins()->Call(ConvertReceiverMode::kNotNullOrUndefined),
1962           RelocInfo::CODE_TARGET);
1963 
1964       __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
1965       __ Drop(1);  // The function is still on the stack; drop it.
1966 
1967       // if (!result.done) goto l_try;
1968       __ bind(&l_loop);
1969       __ push(eax);                                      // save result
1970       __ Move(load_receiver, eax);                       // result
1971       __ mov(load_name,
1972              isolate()->factory()->done_string());       // "done"
1973       __ mov(LoadDescriptor::SlotRegister(),
1974              Immediate(SmiFromSlot(expr->DoneFeedbackSlot())));
1975       CallLoadIC(NOT_INSIDE_TYPEOF);  // result.done in eax
1976       Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
1977       CallIC(bool_ic);
1978       __ CompareRoot(result_register(), Heap::kTrueValueRootIndex);
1979       __ j(not_equal, &l_try);
1980 
1981       // result.value
1982       __ pop(load_receiver);                              // result
1983       __ mov(load_name,
1984              isolate()->factory()->value_string());       // "value"
1985       __ mov(LoadDescriptor::SlotRegister(),
1986              Immediate(SmiFromSlot(expr->ValueFeedbackSlot())));
1987       CallLoadIC(NOT_INSIDE_TYPEOF);                      // result.value in eax
1988       context()->DropAndPlug(2, eax);                     // drop iter and g
1989       break;
1990     }
1991   }
1992 }
1993 
1994 
EmitGeneratorResume(Expression * generator,Expression * value,JSGeneratorObject::ResumeMode resume_mode)1995 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
1996     Expression *value,
1997     JSGeneratorObject::ResumeMode resume_mode) {
1998   // The value stays in eax, and is ultimately read by the resumed generator, as
1999   // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
2000   // is read to throw the value when the resumed generator is already closed.
2001   // ebx will hold the generator object until the activation has been resumed.
2002   VisitForStackValue(generator);
2003   VisitForAccumulatorValue(value);
2004   __ pop(ebx);
2005 
2006   // Load suspended function and context.
2007   __ mov(esi, FieldOperand(ebx, JSGeneratorObject::kContextOffset));
2008   __ mov(edi, FieldOperand(ebx, JSGeneratorObject::kFunctionOffset));
2009 
2010   // Push receiver.
2011   __ push(FieldOperand(ebx, JSGeneratorObject::kReceiverOffset));
2012 
2013   // Push holes for arguments to generator function.
2014   __ mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2015   __ mov(edx,
2016          FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
2017   __ mov(ecx, isolate()->factory()->the_hole_value());
2018   Label push_argument_holes, push_frame;
2019   __ bind(&push_argument_holes);
2020   __ sub(edx, Immediate(Smi::FromInt(1)));
2021   __ j(carry, &push_frame);
2022   __ push(ecx);
2023   __ jmp(&push_argument_holes);
2024 
2025   // Enter a new JavaScript frame, and initialize its slots as they were when
2026   // the generator was suspended.
2027   Label resume_frame, done;
2028   __ bind(&push_frame);
2029   __ call(&resume_frame);
2030   __ jmp(&done);
2031   __ bind(&resume_frame);
2032   __ push(ebp);  // Caller's frame pointer.
2033   __ mov(ebp, esp);
2034   __ push(esi);  // Callee's context.
2035   __ push(edi);  // Callee's JS Function.
2036 
2037   // Load the operand stack size.
2038   __ mov(edx, FieldOperand(ebx, JSGeneratorObject::kOperandStackOffset));
2039   __ mov(edx, FieldOperand(edx, FixedArray::kLengthOffset));
2040   __ SmiUntag(edx);
2041 
2042   // If we are sending a value and there is no operand stack, we can jump back
2043   // in directly.
2044   if (resume_mode == JSGeneratorObject::NEXT) {
2045     Label slow_resume;
2046     __ cmp(edx, Immediate(0));
2047     __ j(not_zero, &slow_resume);
2048     __ mov(edx, FieldOperand(edi, JSFunction::kCodeEntryOffset));
2049     __ mov(ecx, FieldOperand(ebx, JSGeneratorObject::kContinuationOffset));
2050     __ SmiUntag(ecx);
2051     __ add(edx, ecx);
2052     __ mov(FieldOperand(ebx, JSGeneratorObject::kContinuationOffset),
2053            Immediate(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
2054     __ jmp(edx);
2055     __ bind(&slow_resume);
2056   }
2057 
2058   // Otherwise, we push holes for the operand stack and call the runtime to fix
2059   // up the stack and the handlers.
2060   Label push_operand_holes, call_resume;
2061   __ bind(&push_operand_holes);
2062   __ sub(edx, Immediate(1));
2063   __ j(carry, &call_resume);
2064   __ push(ecx);
2065   __ jmp(&push_operand_holes);
2066   __ bind(&call_resume);
2067   __ push(ebx);
2068   __ push(result_register());
2069   __ Push(Smi::FromInt(resume_mode));
2070   __ CallRuntime(Runtime::kResumeJSGeneratorObject);
2071   // Not reached: the runtime call returns elsewhere.
2072   __ Abort(kGeneratorFailedToResume);
2073 
2074   __ bind(&done);
2075   context()->Plug(result_register());
2076 }
2077 
2078 
EmitCreateIteratorResult(bool done)2079 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
2080   Label allocate, done_allocate;
2081 
2082   __ Allocate(JSIteratorResult::kSize, eax, ecx, edx, &allocate, TAG_OBJECT);
2083   __ jmp(&done_allocate, Label::kNear);
2084 
2085   __ bind(&allocate);
2086   __ Push(Smi::FromInt(JSIteratorResult::kSize));
2087   __ CallRuntime(Runtime::kAllocateInNewSpace);
2088 
2089   __ bind(&done_allocate);
2090   __ mov(ebx, NativeContextOperand());
2091   __ mov(ebx, ContextOperand(ebx, Context::ITERATOR_RESULT_MAP_INDEX));
2092   __ mov(FieldOperand(eax, HeapObject::kMapOffset), ebx);
2093   __ mov(FieldOperand(eax, JSObject::kPropertiesOffset),
2094          isolate()->factory()->empty_fixed_array());
2095   __ mov(FieldOperand(eax, JSObject::kElementsOffset),
2096          isolate()->factory()->empty_fixed_array());
2097   __ pop(FieldOperand(eax, JSIteratorResult::kValueOffset));
2098   __ mov(FieldOperand(eax, JSIteratorResult::kDoneOffset),
2099          isolate()->factory()->ToBoolean(done));
2100   STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
2101 }
2102 
2103 
EmitNamedPropertyLoad(Property * prop)2104 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2105   SetExpressionPosition(prop);
2106   Literal* key = prop->key()->AsLiteral();
2107   DCHECK(!key->value()->IsSmi());
2108   DCHECK(!prop->IsSuperAccess());
2109 
2110   __ mov(LoadDescriptor::NameRegister(), Immediate(key->value()));
2111   __ mov(LoadDescriptor::SlotRegister(),
2112          Immediate(SmiFromSlot(prop->PropertyFeedbackSlot())));
2113   CallLoadIC(NOT_INSIDE_TYPEOF, language_mode());
2114 }
2115 
2116 
EmitNamedSuperPropertyLoad(Property * prop)2117 void FullCodeGenerator::EmitNamedSuperPropertyLoad(Property* prop) {
2118   // Stack: receiver, home_object.
2119   SetExpressionPosition(prop);
2120   Literal* key = prop->key()->AsLiteral();
2121   DCHECK(!key->value()->IsSmi());
2122   DCHECK(prop->IsSuperAccess());
2123 
2124   __ push(Immediate(key->value()));
2125   __ push(Immediate(Smi::FromInt(language_mode())));
2126   __ CallRuntime(Runtime::kLoadFromSuper);
2127 }
2128 
2129 
EmitKeyedPropertyLoad(Property * prop)2130 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2131   SetExpressionPosition(prop);
2132   Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate(), language_mode()).code();
2133   __ mov(LoadDescriptor::SlotRegister(),
2134          Immediate(SmiFromSlot(prop->PropertyFeedbackSlot())));
2135   CallIC(ic);
2136 }
2137 
2138 
EmitKeyedSuperPropertyLoad(Property * prop)2139 void FullCodeGenerator::EmitKeyedSuperPropertyLoad(Property* prop) {
2140   // Stack: receiver, home_object, key.
2141   SetExpressionPosition(prop);
2142   __ push(Immediate(Smi::FromInt(language_mode())));
2143   __ CallRuntime(Runtime::kLoadKeyedFromSuper);
2144 }
2145 
2146 
EmitInlineSmiBinaryOp(BinaryOperation * expr,Token::Value op,Expression * left,Expression * right)2147 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2148                                               Token::Value op,
2149                                               Expression* left,
2150                                               Expression* right) {
2151   // Do combined smi check of the operands. Left operand is on the
2152   // stack. Right operand is in eax.
2153   Label smi_case, done, stub_call;
2154   __ pop(edx);
2155   __ mov(ecx, eax);
2156   __ or_(eax, edx);
2157   JumpPatchSite patch_site(masm_);
2158   patch_site.EmitJumpIfSmi(eax, &smi_case, Label::kNear);
2159 
2160   __ bind(&stub_call);
2161   __ mov(eax, ecx);
2162   Handle<Code> code =
2163       CodeFactory::BinaryOpIC(isolate(), op, strength(language_mode())).code();
2164   CallIC(code, expr->BinaryOperationFeedbackId());
2165   patch_site.EmitPatchInfo();
2166   __ jmp(&done, Label::kNear);
2167 
2168   // Smi case.
2169   __ bind(&smi_case);
2170   __ mov(eax, edx);  // Copy left operand in case of a stub call.
2171 
2172   switch (op) {
2173     case Token::SAR:
2174       __ SmiUntag(ecx);
2175       __ sar_cl(eax);  // No checks of result necessary
2176       __ and_(eax, Immediate(~kSmiTagMask));
2177       break;
2178     case Token::SHL: {
2179       Label result_ok;
2180       __ SmiUntag(eax);
2181       __ SmiUntag(ecx);
2182       __ shl_cl(eax);
2183       // Check that the *signed* result fits in a smi.
2184       __ cmp(eax, 0xc0000000);
2185       __ j(positive, &result_ok);
2186       __ SmiTag(ecx);
2187       __ jmp(&stub_call);
2188       __ bind(&result_ok);
2189       __ SmiTag(eax);
2190       break;
2191     }
2192     case Token::SHR: {
2193       Label result_ok;
2194       __ SmiUntag(eax);
2195       __ SmiUntag(ecx);
2196       __ shr_cl(eax);
2197       __ test(eax, Immediate(0xc0000000));
2198       __ j(zero, &result_ok);
2199       __ SmiTag(ecx);
2200       __ jmp(&stub_call);
2201       __ bind(&result_ok);
2202       __ SmiTag(eax);
2203       break;
2204     }
2205     case Token::ADD:
2206       __ add(eax, ecx);
2207       __ j(overflow, &stub_call);
2208       break;
2209     case Token::SUB:
2210       __ sub(eax, ecx);
2211       __ j(overflow, &stub_call);
2212       break;
2213     case Token::MUL: {
2214       __ SmiUntag(eax);
2215       __ imul(eax, ecx);
2216       __ j(overflow, &stub_call);
2217       __ test(eax, eax);
2218       __ j(not_zero, &done, Label::kNear);
2219       __ mov(ebx, edx);
2220       __ or_(ebx, ecx);
2221       __ j(negative, &stub_call);
2222       break;
2223     }
2224     case Token::BIT_OR:
2225       __ or_(eax, ecx);
2226       break;
2227     case Token::BIT_AND:
2228       __ and_(eax, ecx);
2229       break;
2230     case Token::BIT_XOR:
2231       __ xor_(eax, ecx);
2232       break;
2233     default:
2234       UNREACHABLE();
2235   }
2236 
2237   __ bind(&done);
2238   context()->Plug(eax);
2239 }
2240 
2241 
EmitClassDefineProperties(ClassLiteral * lit)2242 void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) {
2243   // Constructor is in eax.
2244   DCHECK(lit != NULL);
2245   __ push(eax);
2246 
2247   // No access check is needed here since the constructor is created by the
2248   // class literal.
2249   Register scratch = ebx;
2250   __ mov(scratch, FieldOperand(eax, JSFunction::kPrototypeOrInitialMapOffset));
2251   __ Push(scratch);
2252 
2253   for (int i = 0; i < lit->properties()->length(); i++) {
2254     ObjectLiteral::Property* property = lit->properties()->at(i);
2255     Expression* value = property->value();
2256 
2257     if (property->is_static()) {
2258       __ push(Operand(esp, kPointerSize));  // constructor
2259     } else {
2260       __ push(Operand(esp, 0));  // prototype
2261     }
2262     EmitPropertyKey(property, lit->GetIdForProperty(i));
2263 
2264     // The static prototype property is read only. We handle the non computed
2265     // property name case in the parser. Since this is the only case where we
2266     // need to check for an own read only property we special case this so we do
2267     // not need to do this for every property.
2268     if (property->is_static() && property->is_computed_name()) {
2269       __ CallRuntime(Runtime::kThrowIfStaticPrototype);
2270       __ push(eax);
2271     }
2272 
2273     VisitForStackValue(value);
2274     if (NeedsHomeObject(value)) {
2275       EmitSetHomeObject(value, 2, property->GetSlot());
2276     }
2277 
2278     switch (property->kind()) {
2279       case ObjectLiteral::Property::CONSTANT:
2280       case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2281       case ObjectLiteral::Property::PROTOTYPE:
2282         UNREACHABLE();
2283       case ObjectLiteral::Property::COMPUTED:
2284         __ CallRuntime(Runtime::kDefineClassMethod);
2285         break;
2286 
2287       case ObjectLiteral::Property::GETTER:
2288         __ push(Immediate(Smi::FromInt(DONT_ENUM)));
2289         __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked);
2290         break;
2291 
2292       case ObjectLiteral::Property::SETTER:
2293         __ push(Immediate(Smi::FromInt(DONT_ENUM)));
2294         __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked);
2295         break;
2296     }
2297   }
2298 
2299   // Set both the prototype and constructor to have fast properties, and also
2300   // freeze them in strong mode.
2301   __ CallRuntime(Runtime::kFinalizeClassDefinition);
2302 }
2303 
2304 
EmitBinaryOp(BinaryOperation * expr,Token::Value op)2305 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
2306   __ pop(edx);
2307   Handle<Code> code =
2308       CodeFactory::BinaryOpIC(isolate(), op, strength(language_mode())).code();
2309   JumpPatchSite patch_site(masm_);    // unbound, signals no inlined smi code.
2310   CallIC(code, expr->BinaryOperationFeedbackId());
2311   patch_site.EmitPatchInfo();
2312   context()->Plug(eax);
2313 }
2314 
2315 
EmitAssignment(Expression * expr,FeedbackVectorSlot slot)2316 void FullCodeGenerator::EmitAssignment(Expression* expr,
2317                                        FeedbackVectorSlot slot) {
2318   DCHECK(expr->IsValidReferenceExpressionOrThis());
2319 
2320   Property* prop = expr->AsProperty();
2321   LhsKind assign_type = Property::GetAssignType(prop);
2322 
2323   switch (assign_type) {
2324     case VARIABLE: {
2325       Variable* var = expr->AsVariableProxy()->var();
2326       EffectContext context(this);
2327       EmitVariableAssignment(var, Token::ASSIGN, slot);
2328       break;
2329     }
2330     case NAMED_PROPERTY: {
2331       __ push(eax);  // Preserve value.
2332       VisitForAccumulatorValue(prop->obj());
2333       __ Move(StoreDescriptor::ReceiverRegister(), eax);
2334       __ pop(StoreDescriptor::ValueRegister());  // Restore value.
2335       __ mov(StoreDescriptor::NameRegister(),
2336              prop->key()->AsLiteral()->value());
2337       EmitLoadStoreICSlot(slot);
2338       CallStoreIC();
2339       break;
2340     }
2341     case NAMED_SUPER_PROPERTY: {
2342       __ push(eax);
2343       VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2344       VisitForAccumulatorValue(
2345           prop->obj()->AsSuperPropertyReference()->home_object());
2346       // stack: value, this; eax: home_object
2347       Register scratch = ecx;
2348       Register scratch2 = edx;
2349       __ mov(scratch, result_register());               // home_object
2350       __ mov(eax, MemOperand(esp, kPointerSize));       // value
2351       __ mov(scratch2, MemOperand(esp, 0));             // this
2352       __ mov(MemOperand(esp, kPointerSize), scratch2);  // this
2353       __ mov(MemOperand(esp, 0), scratch);              // home_object
2354       // stack: this, home_object. eax: value
2355       EmitNamedSuperPropertyStore(prop);
2356       break;
2357     }
2358     case KEYED_SUPER_PROPERTY: {
2359       __ push(eax);
2360       VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2361       VisitForStackValue(
2362           prop->obj()->AsSuperPropertyReference()->home_object());
2363       VisitForAccumulatorValue(prop->key());
2364       Register scratch = ecx;
2365       Register scratch2 = edx;
2366       __ mov(scratch2, MemOperand(esp, 2 * kPointerSize));  // value
2367       // stack: value, this, home_object; eax: key, edx: value
2368       __ mov(scratch, MemOperand(esp, kPointerSize));  // this
2369       __ mov(MemOperand(esp, 2 * kPointerSize), scratch);
2370       __ mov(scratch, MemOperand(esp, 0));  // home_object
2371       __ mov(MemOperand(esp, kPointerSize), scratch);
2372       __ mov(MemOperand(esp, 0), eax);
2373       __ mov(eax, scratch2);
2374       // stack: this, home_object, key; eax: value.
2375       EmitKeyedSuperPropertyStore(prop);
2376       break;
2377     }
2378     case KEYED_PROPERTY: {
2379       __ push(eax);  // Preserve value.
2380       VisitForStackValue(prop->obj());
2381       VisitForAccumulatorValue(prop->key());
2382       __ Move(StoreDescriptor::NameRegister(), eax);
2383       __ pop(StoreDescriptor::ReceiverRegister());  // Receiver.
2384       __ pop(StoreDescriptor::ValueRegister());     // Restore value.
2385       EmitLoadStoreICSlot(slot);
2386       Handle<Code> ic =
2387           CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2388       CallIC(ic);
2389       break;
2390     }
2391   }
2392   context()->Plug(eax);
2393 }
2394 
2395 
EmitStoreToStackLocalOrContextSlot(Variable * var,MemOperand location)2396 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2397     Variable* var, MemOperand location) {
2398   __ mov(location, eax);
2399   if (var->IsContextSlot()) {
2400     __ mov(edx, eax);
2401     int offset = Context::SlotOffset(var->index());
2402     __ RecordWriteContextSlot(ecx, offset, edx, ebx, kDontSaveFPRegs);
2403   }
2404 }
2405 
2406 
EmitVariableAssignment(Variable * var,Token::Value op,FeedbackVectorSlot slot)2407 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
2408                                                FeedbackVectorSlot slot) {
2409   if (var->IsUnallocated()) {
2410     // Global var, const, or let.
2411     __ mov(StoreDescriptor::NameRegister(), var->name());
2412     __ mov(StoreDescriptor::ReceiverRegister(), NativeContextOperand());
2413     __ mov(StoreDescriptor::ReceiverRegister(),
2414            ContextOperand(StoreDescriptor::ReceiverRegister(),
2415                           Context::EXTENSION_INDEX));
2416     EmitLoadStoreICSlot(slot);
2417     CallStoreIC();
2418 
2419   } else if (var->mode() == LET && op != Token::INIT) {
2420     // Non-initializing assignment to let variable needs a write barrier.
2421     DCHECK(!var->IsLookupSlot());
2422     DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2423     Label assign;
2424     MemOperand location = VarOperand(var, ecx);
2425     __ mov(edx, location);
2426     __ cmp(edx, isolate()->factory()->the_hole_value());
2427     __ j(not_equal, &assign, Label::kNear);
2428     __ push(Immediate(var->name()));
2429     __ CallRuntime(Runtime::kThrowReferenceError);
2430     __ bind(&assign);
2431     EmitStoreToStackLocalOrContextSlot(var, location);
2432 
2433   } else if (var->mode() == CONST && op != Token::INIT) {
2434     // Assignment to const variable needs a write barrier.
2435     DCHECK(!var->IsLookupSlot());
2436     DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2437     Label const_error;
2438     MemOperand location = VarOperand(var, ecx);
2439     __ mov(edx, location);
2440     __ cmp(edx, isolate()->factory()->the_hole_value());
2441     __ j(not_equal, &const_error, Label::kNear);
2442     __ push(Immediate(var->name()));
2443     __ CallRuntime(Runtime::kThrowReferenceError);
2444     __ bind(&const_error);
2445     __ CallRuntime(Runtime::kThrowConstAssignError);
2446 
2447   } else if (var->is_this() && var->mode() == CONST && op == Token::INIT) {
2448     // Initializing assignment to const {this} needs a write barrier.
2449     DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2450     Label uninitialized_this;
2451     MemOperand location = VarOperand(var, ecx);
2452     __ mov(edx, location);
2453     __ cmp(edx, isolate()->factory()->the_hole_value());
2454     __ j(equal, &uninitialized_this);
2455     __ push(Immediate(var->name()));
2456     __ CallRuntime(Runtime::kThrowReferenceError);
2457     __ bind(&uninitialized_this);
2458     EmitStoreToStackLocalOrContextSlot(var, location);
2459 
2460   } else if (!var->is_const_mode() ||
2461              (var->mode() == CONST && op == Token::INIT)) {
2462     if (var->IsLookupSlot()) {
2463       // Assignment to var.
2464       __ push(eax);  // Value.
2465       __ push(esi);  // Context.
2466       __ push(Immediate(var->name()));
2467       __ push(Immediate(Smi::FromInt(language_mode())));
2468       __ CallRuntime(Runtime::kStoreLookupSlot);
2469     } else {
2470       // Assignment to var or initializing assignment to let/const in harmony
2471       // mode.
2472       DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2473       MemOperand location = VarOperand(var, ecx);
2474       if (generate_debug_code_ && var->mode() == LET && op == Token::INIT) {
2475         // Check for an uninitialized let binding.
2476         __ mov(edx, location);
2477         __ cmp(edx, isolate()->factory()->the_hole_value());
2478         __ Check(equal, kLetBindingReInitialization);
2479       }
2480       EmitStoreToStackLocalOrContextSlot(var, location);
2481     }
2482 
2483   } else if (var->mode() == CONST_LEGACY && op == Token::INIT) {
2484     // Const initializers need a write barrier.
2485     DCHECK(!var->IsParameter());  // No const parameters.
2486     if (var->IsLookupSlot()) {
2487       __ push(eax);
2488       __ push(esi);
2489       __ push(Immediate(var->name()));
2490       __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot);
2491     } else {
2492       DCHECK(var->IsStackLocal() || var->IsContextSlot());
2493       Label skip;
2494       MemOperand location = VarOperand(var, ecx);
2495       __ mov(edx, location);
2496       __ cmp(edx, isolate()->factory()->the_hole_value());
2497       __ j(not_equal, &skip, Label::kNear);
2498       EmitStoreToStackLocalOrContextSlot(var, location);
2499       __ bind(&skip);
2500     }
2501 
2502   } else {
2503     DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT);
2504     if (is_strict(language_mode())) {
2505       __ CallRuntime(Runtime::kThrowConstAssignError);
2506     }
2507     // Silently ignore store in sloppy mode.
2508   }
2509 }
2510 
2511 
EmitNamedPropertyAssignment(Assignment * expr)2512 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2513   // Assignment to a property, using a named store IC.
2514   // eax    : value
2515   // esp[0] : receiver
2516   Property* prop = expr->target()->AsProperty();
2517   DCHECK(prop != NULL);
2518   DCHECK(prop->key()->IsLiteral());
2519 
2520   __ mov(StoreDescriptor::NameRegister(), prop->key()->AsLiteral()->value());
2521   __ pop(StoreDescriptor::ReceiverRegister());
2522   EmitLoadStoreICSlot(expr->AssignmentSlot());
2523   CallStoreIC();
2524   PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2525   context()->Plug(eax);
2526 }
2527 
2528 
EmitNamedSuperPropertyStore(Property * prop)2529 void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2530   // Assignment to named property of super.
2531   // eax : value
2532   // stack : receiver ('this'), home_object
2533   DCHECK(prop != NULL);
2534   Literal* key = prop->key()->AsLiteral();
2535   DCHECK(key != NULL);
2536 
2537   __ push(Immediate(key->value()));
2538   __ push(eax);
2539   __ CallRuntime((is_strict(language_mode()) ? Runtime::kStoreToSuper_Strict
2540                                              : Runtime::kStoreToSuper_Sloppy));
2541 }
2542 
2543 
EmitKeyedSuperPropertyStore(Property * prop)2544 void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2545   // Assignment to named property of super.
2546   // eax : value
2547   // stack : receiver ('this'), home_object, key
2548 
2549   __ push(eax);
2550   __ CallRuntime((is_strict(language_mode())
2551                       ? Runtime::kStoreKeyedToSuper_Strict
2552                       : Runtime::kStoreKeyedToSuper_Sloppy));
2553 }
2554 
2555 
EmitKeyedPropertyAssignment(Assignment * expr)2556 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2557   // Assignment to a property, using a keyed store IC.
2558   // eax               : value
2559   // esp[0]            : key
2560   // esp[kPointerSize] : receiver
2561 
2562   __ pop(StoreDescriptor::NameRegister());  // Key.
2563   __ pop(StoreDescriptor::ReceiverRegister());
2564   DCHECK(StoreDescriptor::ValueRegister().is(eax));
2565   Handle<Code> ic =
2566       CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2567   EmitLoadStoreICSlot(expr->AssignmentSlot());
2568   CallIC(ic);
2569   PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2570   context()->Plug(eax);
2571 }
2572 
2573 
VisitProperty(Property * expr)2574 void FullCodeGenerator::VisitProperty(Property* expr) {
2575   Comment cmnt(masm_, "[ Property");
2576   SetExpressionPosition(expr);
2577 
2578   Expression* key = expr->key();
2579 
2580   if (key->IsPropertyName()) {
2581     if (!expr->IsSuperAccess()) {
2582       VisitForAccumulatorValue(expr->obj());
2583       __ Move(LoadDescriptor::ReceiverRegister(), result_register());
2584       EmitNamedPropertyLoad(expr);
2585     } else {
2586       VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2587       VisitForStackValue(
2588           expr->obj()->AsSuperPropertyReference()->home_object());
2589       EmitNamedSuperPropertyLoad(expr);
2590     }
2591   } else {
2592     if (!expr->IsSuperAccess()) {
2593       VisitForStackValue(expr->obj());
2594       VisitForAccumulatorValue(expr->key());
2595       __ pop(LoadDescriptor::ReceiverRegister());                  // Object.
2596       __ Move(LoadDescriptor::NameRegister(), result_register());  // Key.
2597       EmitKeyedPropertyLoad(expr);
2598     } else {
2599       VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2600       VisitForStackValue(
2601           expr->obj()->AsSuperPropertyReference()->home_object());
2602       VisitForStackValue(expr->key());
2603       EmitKeyedSuperPropertyLoad(expr);
2604     }
2605   }
2606   PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2607   context()->Plug(eax);
2608 }
2609 
2610 
CallIC(Handle<Code> code,TypeFeedbackId ast_id)2611 void FullCodeGenerator::CallIC(Handle<Code> code,
2612                                TypeFeedbackId ast_id) {
2613   ic_total_count_++;
2614   __ call(code, RelocInfo::CODE_TARGET, ast_id);
2615 }
2616 
2617 
2618 // Code common for calls using the IC.
EmitCallWithLoadIC(Call * expr)2619 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2620   Expression* callee = expr->expression();
2621 
2622   // Get the target function.
2623   ConvertReceiverMode convert_mode;
2624   if (callee->IsVariableProxy()) {
2625     { StackValueContext context(this);
2626       EmitVariableLoad(callee->AsVariableProxy());
2627       PrepareForBailout(callee, NO_REGISTERS);
2628     }
2629     // Push undefined as receiver. This is patched in the method prologue if it
2630     // is a sloppy mode method.
2631     __ push(Immediate(isolate()->factory()->undefined_value()));
2632     convert_mode = ConvertReceiverMode::kNullOrUndefined;
2633   } else {
2634     // Load the function from the receiver.
2635     DCHECK(callee->IsProperty());
2636     DCHECK(!callee->AsProperty()->IsSuperAccess());
2637     __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
2638     EmitNamedPropertyLoad(callee->AsProperty());
2639     PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2640     // Push the target function under the receiver.
2641     __ push(Operand(esp, 0));
2642     __ mov(Operand(esp, kPointerSize), eax);
2643     convert_mode = ConvertReceiverMode::kNotNullOrUndefined;
2644   }
2645 
2646   EmitCall(expr, convert_mode);
2647 }
2648 
2649 
EmitSuperCallWithLoadIC(Call * expr)2650 void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2651   SetExpressionPosition(expr);
2652   Expression* callee = expr->expression();
2653   DCHECK(callee->IsProperty());
2654   Property* prop = callee->AsProperty();
2655   DCHECK(prop->IsSuperAccess());
2656 
2657   Literal* key = prop->key()->AsLiteral();
2658   DCHECK(!key->value()->IsSmi());
2659   // Load the function from the receiver.
2660   SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2661   VisitForStackValue(super_ref->home_object());
2662   VisitForAccumulatorValue(super_ref->this_var());
2663   __ push(eax);
2664   __ push(eax);
2665   __ push(Operand(esp, kPointerSize * 2));
2666   __ push(Immediate(key->value()));
2667   __ push(Immediate(Smi::FromInt(language_mode())));
2668   // Stack here:
2669   //  - home_object
2670   //  - this (receiver)
2671   //  - this (receiver) <-- LoadFromSuper will pop here and below.
2672   //  - home_object
2673   //  - key
2674   //  - language_mode
2675   __ CallRuntime(Runtime::kLoadFromSuper);
2676 
2677   // Replace home_object with target function.
2678   __ mov(Operand(esp, kPointerSize), eax);
2679 
2680   // Stack here:
2681   // - target function
2682   // - this (receiver)
2683   EmitCall(expr);
2684 }
2685 
2686 
2687 // Code common for calls using the IC.
EmitKeyedCallWithLoadIC(Call * expr,Expression * key)2688 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2689                                                 Expression* key) {
2690   // Load the key.
2691   VisitForAccumulatorValue(key);
2692 
2693   Expression* callee = expr->expression();
2694 
2695   // Load the function from the receiver.
2696   DCHECK(callee->IsProperty());
2697   __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
2698   __ mov(LoadDescriptor::NameRegister(), eax);
2699   EmitKeyedPropertyLoad(callee->AsProperty());
2700   PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2701 
2702   // Push the target function under the receiver.
2703   __ push(Operand(esp, 0));
2704   __ mov(Operand(esp, kPointerSize), eax);
2705 
2706   EmitCall(expr, ConvertReceiverMode::kNotNullOrUndefined);
2707 }
2708 
2709 
EmitKeyedSuperCallWithLoadIC(Call * expr)2710 void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
2711   Expression* callee = expr->expression();
2712   DCHECK(callee->IsProperty());
2713   Property* prop = callee->AsProperty();
2714   DCHECK(prop->IsSuperAccess());
2715 
2716   SetExpressionPosition(prop);
2717   // Load the function from the receiver.
2718   SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2719   VisitForStackValue(super_ref->home_object());
2720   VisitForAccumulatorValue(super_ref->this_var());
2721   __ push(eax);
2722   __ push(eax);
2723   __ push(Operand(esp, kPointerSize * 2));
2724   VisitForStackValue(prop->key());
2725   __ push(Immediate(Smi::FromInt(language_mode())));
2726   // Stack here:
2727   //  - home_object
2728   //  - this (receiver)
2729   //  - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
2730   //  - home_object
2731   //  - key
2732   //  - language_mode
2733   __ CallRuntime(Runtime::kLoadKeyedFromSuper);
2734 
2735   // Replace home_object with target function.
2736   __ mov(Operand(esp, kPointerSize), eax);
2737 
2738   // Stack here:
2739   // - target function
2740   // - this (receiver)
2741   EmitCall(expr);
2742 }
2743 
2744 
EmitCall(Call * expr,ConvertReceiverMode mode)2745 void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) {
2746   // Load the arguments.
2747   ZoneList<Expression*>* args = expr->arguments();
2748   int arg_count = args->length();
2749   for (int i = 0; i < arg_count; i++) {
2750     VisitForStackValue(args->at(i));
2751   }
2752 
2753   PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
2754   SetCallPosition(expr);
2755   Handle<Code> ic = CodeFactory::CallIC(isolate(), arg_count, mode).code();
2756   __ Move(edx, Immediate(SmiFromSlot(expr->CallFeedbackICSlot())));
2757   __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
2758   // Don't assign a type feedback id to the IC, since type feedback is provided
2759   // by the vector above.
2760   CallIC(ic);
2761 
2762   RecordJSReturnSite(expr);
2763 
2764   // Restore context register.
2765   __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2766 
2767   context()->DropAndPlug(1, eax);
2768 }
2769 
2770 
EmitResolvePossiblyDirectEval(int arg_count)2771 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2772   // Push copy of the first argument or undefined if it doesn't exist.
2773   if (arg_count > 0) {
2774     __ push(Operand(esp, arg_count * kPointerSize));
2775   } else {
2776     __ push(Immediate(isolate()->factory()->undefined_value()));
2777   }
2778 
2779   // Push the enclosing function.
2780   __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
2781 
2782   // Push the language mode.
2783   __ push(Immediate(Smi::FromInt(language_mode())));
2784 
2785   // Push the start position of the scope the calls resides in.
2786   __ push(Immediate(Smi::FromInt(scope()->start_position())));
2787 
2788   // Do the runtime call.
2789   __ CallRuntime(Runtime::kResolvePossiblyDirectEval);
2790 }
2791 
2792 
2793 // See http://www.ecma-international.org/ecma-262/6.0/#sec-function-calls.
PushCalleeAndWithBaseObject(Call * expr)2794 void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) {
2795   VariableProxy* callee = expr->expression()->AsVariableProxy();
2796   if (callee->var()->IsLookupSlot()) {
2797     Label slow, done;
2798     SetExpressionPosition(callee);
2799     // Generate code for loading from variables potentially shadowed by
2800     // eval-introduced variables.
2801     EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done);
2802 
2803     __ bind(&slow);
2804     // Call the runtime to find the function to call (returned in eax) and
2805     // the object holding it (returned in edx).
2806     __ push(context_register());
2807     __ push(Immediate(callee->name()));
2808     __ CallRuntime(Runtime::kLoadLookupSlot);
2809     __ push(eax);  // Function.
2810     __ push(edx);  // Receiver.
2811     PrepareForBailoutForId(expr->LookupId(), NO_REGISTERS);
2812 
2813     // If fast case code has been generated, emit code to push the function
2814     // and receiver and have the slow path jump around this code.
2815     if (done.is_linked()) {
2816       Label call;
2817       __ jmp(&call, Label::kNear);
2818       __ bind(&done);
2819       // Push function.
2820       __ push(eax);
2821       // The receiver is implicitly the global receiver. Indicate this by
2822       // passing the hole to the call function stub.
2823       __ push(Immediate(isolate()->factory()->undefined_value()));
2824       __ bind(&call);
2825     }
2826   } else {
2827     VisitForStackValue(callee);
2828     // refEnv.WithBaseObject()
2829     __ push(Immediate(isolate()->factory()->undefined_value()));
2830   }
2831 }
2832 
2833 
EmitPossiblyEvalCall(Call * expr)2834 void FullCodeGenerator::EmitPossiblyEvalCall(Call* expr) {
2835   // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
2836   // to resolve the function we need to call.  Then we call the resolved
2837   // function using the given arguments.
2838   ZoneList<Expression*>* args = expr->arguments();
2839   int arg_count = args->length();
2840 
2841   PushCalleeAndWithBaseObject(expr);
2842 
2843   // Push the arguments.
2844   for (int i = 0; i < arg_count; i++) {
2845     VisitForStackValue(args->at(i));
2846   }
2847 
2848   // Push a copy of the function (found below the arguments) and
2849   // resolve eval.
2850   __ push(Operand(esp, (arg_count + 1) * kPointerSize));
2851   EmitResolvePossiblyDirectEval(arg_count);
2852 
2853   // Touch up the stack with the resolved function.
2854   __ mov(Operand(esp, (arg_count + 1) * kPointerSize), eax);
2855 
2856   PrepareForBailoutForId(expr->EvalId(), NO_REGISTERS);
2857 
2858   SetCallPosition(expr);
2859   __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
2860   __ Set(eax, arg_count);
2861   __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
2862   RecordJSReturnSite(expr);
2863   // Restore context register.
2864   __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2865   context()->DropAndPlug(1, eax);
2866 }
2867 
2868 
VisitCallNew(CallNew * expr)2869 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2870   Comment cmnt(masm_, "[ CallNew");
2871   // According to ECMA-262, section 11.2.2, page 44, the function
2872   // expression in new calls must be evaluated before the
2873   // arguments.
2874 
2875   // Push constructor on the stack.  If it's not a function it's used as
2876   // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2877   // ignored.
2878   DCHECK(!expr->expression()->IsSuperPropertyReference());
2879   VisitForStackValue(expr->expression());
2880 
2881   // Push the arguments ("left-to-right") on the stack.
2882   ZoneList<Expression*>* args = expr->arguments();
2883   int arg_count = args->length();
2884   for (int i = 0; i < arg_count; i++) {
2885     VisitForStackValue(args->at(i));
2886   }
2887 
2888   // Call the construct call builtin that handles allocation and
2889   // constructor invocation.
2890   SetConstructCallPosition(expr);
2891 
2892   // Load function and argument count into edi and eax.
2893   __ Move(eax, Immediate(arg_count));
2894   __ mov(edi, Operand(esp, arg_count * kPointerSize));
2895 
2896   // Record call targets in unoptimized code.
2897   __ EmitLoadTypeFeedbackVector(ebx);
2898   __ mov(edx, Immediate(SmiFromSlot(expr->CallNewFeedbackSlot())));
2899 
2900   CallConstructStub stub(isolate());
2901   __ call(stub.GetCode(), RelocInfo::CODE_TARGET);
2902   PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2903   // Restore context register.
2904   __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2905   context()->Plug(eax);
2906 }
2907 
2908 
EmitSuperConstructorCall(Call * expr)2909 void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
2910   SuperCallReference* super_call_ref =
2911       expr->expression()->AsSuperCallReference();
2912   DCHECK_NOT_NULL(super_call_ref);
2913 
2914   // Push the super constructor target on the stack (may be null,
2915   // but the Construct builtin can deal with that properly).
2916   VisitForAccumulatorValue(super_call_ref->this_function_var());
2917   __ AssertFunction(result_register());
2918   __ mov(result_register(),
2919          FieldOperand(result_register(), HeapObject::kMapOffset));
2920   __ Push(FieldOperand(result_register(), Map::kPrototypeOffset));
2921 
2922   // Push the arguments ("left-to-right") on the stack.
2923   ZoneList<Expression*>* args = expr->arguments();
2924   int arg_count = args->length();
2925   for (int i = 0; i < arg_count; i++) {
2926     VisitForStackValue(args->at(i));
2927   }
2928 
2929   // Call the construct call builtin that handles allocation and
2930   // constructor invocation.
2931   SetConstructCallPosition(expr);
2932 
2933   // Load new target into edx.
2934   VisitForAccumulatorValue(super_call_ref->new_target_var());
2935   __ mov(edx, result_register());
2936 
2937   // Load function and argument count into edi and eax.
2938   __ Move(eax, Immediate(arg_count));
2939   __ mov(edi, Operand(esp, arg_count * kPointerSize));
2940 
2941   __ Call(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
2942 
2943   RecordJSReturnSite(expr);
2944 
2945   // Restore context register.
2946   __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2947   context()->Plug(eax);
2948 }
2949 
2950 
EmitIsSmi(CallRuntime * expr)2951 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2952   ZoneList<Expression*>* args = expr->arguments();
2953   DCHECK(args->length() == 1);
2954 
2955   VisitForAccumulatorValue(args->at(0));
2956 
2957   Label materialize_true, materialize_false;
2958   Label* if_true = NULL;
2959   Label* if_false = NULL;
2960   Label* fall_through = NULL;
2961   context()->PrepareTest(&materialize_true, &materialize_false,
2962                          &if_true, &if_false, &fall_through);
2963 
2964   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2965   __ test(eax, Immediate(kSmiTagMask));
2966   Split(zero, if_true, if_false, fall_through);
2967 
2968   context()->Plug(if_true, if_false);
2969 }
2970 
2971 
EmitIsJSReceiver(CallRuntime * expr)2972 void FullCodeGenerator::EmitIsJSReceiver(CallRuntime* expr) {
2973   ZoneList<Expression*>* args = expr->arguments();
2974   DCHECK(args->length() == 1);
2975 
2976   VisitForAccumulatorValue(args->at(0));
2977 
2978   Label materialize_true, materialize_false;
2979   Label* if_true = NULL;
2980   Label* if_false = NULL;
2981   Label* fall_through = NULL;
2982   context()->PrepareTest(&materialize_true, &materialize_false,
2983                          &if_true, &if_false, &fall_through);
2984 
2985   __ JumpIfSmi(eax, if_false);
2986   __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ebx);
2987   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2988   Split(above_equal, if_true, if_false, fall_through);
2989 
2990   context()->Plug(if_true, if_false);
2991 }
2992 
2993 
EmitIsSimdValue(CallRuntime * expr)2994 void FullCodeGenerator::EmitIsSimdValue(CallRuntime* expr) {
2995   ZoneList<Expression*>* args = expr->arguments();
2996   DCHECK(args->length() == 1);
2997 
2998   VisitForAccumulatorValue(args->at(0));
2999 
3000   Label materialize_true, materialize_false;
3001   Label* if_true = NULL;
3002   Label* if_false = NULL;
3003   Label* fall_through = NULL;
3004   context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3005                          &if_false, &fall_through);
3006 
3007   __ JumpIfSmi(eax, if_false);
3008   __ CmpObjectType(eax, SIMD128_VALUE_TYPE, ebx);
3009   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3010   Split(equal, if_true, if_false, fall_through);
3011 
3012   context()->Plug(if_true, if_false);
3013 }
3014 
3015 
EmitIsFunction(CallRuntime * expr)3016 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3017   ZoneList<Expression*>* args = expr->arguments();
3018   DCHECK(args->length() == 1);
3019 
3020   VisitForAccumulatorValue(args->at(0));
3021 
3022   Label materialize_true, materialize_false;
3023   Label* if_true = NULL;
3024   Label* if_false = NULL;
3025   Label* fall_through = NULL;
3026   context()->PrepareTest(&materialize_true, &materialize_false,
3027                          &if_true, &if_false, &fall_through);
3028 
3029   __ JumpIfSmi(eax, if_false);
3030   __ CmpObjectType(eax, FIRST_FUNCTION_TYPE, ebx);
3031   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3032   Split(above_equal, if_true, if_false, fall_through);
3033 
3034   context()->Plug(if_true, if_false);
3035 }
3036 
3037 
EmitIsMinusZero(CallRuntime * expr)3038 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3039   ZoneList<Expression*>* args = expr->arguments();
3040   DCHECK(args->length() == 1);
3041 
3042   VisitForAccumulatorValue(args->at(0));
3043 
3044   Label materialize_true, materialize_false;
3045   Label* if_true = NULL;
3046   Label* if_false = NULL;
3047   Label* fall_through = NULL;
3048   context()->PrepareTest(&materialize_true, &materialize_false,
3049                          &if_true, &if_false, &fall_through);
3050 
3051   Handle<Map> map = masm()->isolate()->factory()->heap_number_map();
3052   __ CheckMap(eax, map, if_false, DO_SMI_CHECK);
3053   // Check if the exponent half is 0x80000000. Comparing against 1 and
3054   // checking for overflow is the shortest possible encoding.
3055   __ cmp(FieldOperand(eax, HeapNumber::kExponentOffset), Immediate(0x1));
3056   __ j(no_overflow, if_false);
3057   __ cmp(FieldOperand(eax, HeapNumber::kMantissaOffset), Immediate(0x0));
3058   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3059   Split(equal, if_true, if_false, fall_through);
3060 
3061   context()->Plug(if_true, if_false);
3062 }
3063 
3064 
EmitIsArray(CallRuntime * expr)3065 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3066   ZoneList<Expression*>* args = expr->arguments();
3067   DCHECK(args->length() == 1);
3068 
3069   VisitForAccumulatorValue(args->at(0));
3070 
3071   Label materialize_true, materialize_false;
3072   Label* if_true = NULL;
3073   Label* if_false = NULL;
3074   Label* fall_through = NULL;
3075   context()->PrepareTest(&materialize_true, &materialize_false,
3076                          &if_true, &if_false, &fall_through);
3077 
3078   __ JumpIfSmi(eax, if_false);
3079   __ CmpObjectType(eax, JS_ARRAY_TYPE, ebx);
3080   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3081   Split(equal, if_true, if_false, fall_through);
3082 
3083   context()->Plug(if_true, if_false);
3084 }
3085 
3086 
EmitIsTypedArray(CallRuntime * expr)3087 void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) {
3088   ZoneList<Expression*>* args = expr->arguments();
3089   DCHECK(args->length() == 1);
3090 
3091   VisitForAccumulatorValue(args->at(0));
3092 
3093   Label materialize_true, materialize_false;
3094   Label* if_true = NULL;
3095   Label* if_false = NULL;
3096   Label* fall_through = NULL;
3097   context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3098                          &if_false, &fall_through);
3099 
3100   __ JumpIfSmi(eax, if_false);
3101   __ CmpObjectType(eax, JS_TYPED_ARRAY_TYPE, ebx);
3102   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3103   Split(equal, if_true, if_false, fall_through);
3104 
3105   context()->Plug(if_true, if_false);
3106 }
3107 
3108 
EmitIsRegExp(CallRuntime * expr)3109 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3110   ZoneList<Expression*>* args = expr->arguments();
3111   DCHECK(args->length() == 1);
3112 
3113   VisitForAccumulatorValue(args->at(0));
3114 
3115   Label materialize_true, materialize_false;
3116   Label* if_true = NULL;
3117   Label* if_false = NULL;
3118   Label* fall_through = NULL;
3119   context()->PrepareTest(&materialize_true, &materialize_false,
3120                          &if_true, &if_false, &fall_through);
3121 
3122   __ JumpIfSmi(eax, if_false);
3123   __ CmpObjectType(eax, JS_REGEXP_TYPE, ebx);
3124   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3125   Split(equal, if_true, if_false, fall_through);
3126 
3127   context()->Plug(if_true, if_false);
3128 }
3129 
3130 
EmitIsJSProxy(CallRuntime * expr)3131 void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
3132   ZoneList<Expression*>* args = expr->arguments();
3133   DCHECK(args->length() == 1);
3134 
3135   VisitForAccumulatorValue(args->at(0));
3136 
3137   Label materialize_true, materialize_false;
3138   Label* if_true = NULL;
3139   Label* if_false = NULL;
3140   Label* fall_through = NULL;
3141   context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3142                          &if_false, &fall_through);
3143 
3144   __ JumpIfSmi(eax, if_false);
3145   __ CmpObjectType(eax, JS_PROXY_TYPE, ebx);
3146   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3147   Split(equal, if_true, if_false, fall_through);
3148 
3149   context()->Plug(if_true, if_false);
3150 }
3151 
3152 
EmitObjectEquals(CallRuntime * expr)3153 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3154   ZoneList<Expression*>* args = expr->arguments();
3155   DCHECK(args->length() == 2);
3156 
3157   // Load the two objects into registers and perform the comparison.
3158   VisitForStackValue(args->at(0));
3159   VisitForAccumulatorValue(args->at(1));
3160 
3161   Label materialize_true, materialize_false;
3162   Label* if_true = NULL;
3163   Label* if_false = NULL;
3164   Label* fall_through = NULL;
3165   context()->PrepareTest(&materialize_true, &materialize_false,
3166                          &if_true, &if_false, &fall_through);
3167 
3168   __ pop(ebx);
3169   __ cmp(eax, ebx);
3170   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3171   Split(equal, if_true, if_false, fall_through);
3172 
3173   context()->Plug(if_true, if_false);
3174 }
3175 
3176 
EmitArguments(CallRuntime * expr)3177 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3178   ZoneList<Expression*>* args = expr->arguments();
3179   DCHECK(args->length() == 1);
3180 
3181   // ArgumentsAccessStub expects the key in edx and the formal
3182   // parameter count in eax.
3183   VisitForAccumulatorValue(args->at(0));
3184   __ mov(edx, eax);
3185   __ Move(eax, Immediate(Smi::FromInt(info_->scope()->num_parameters())));
3186   ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
3187   __ CallStub(&stub);
3188   context()->Plug(eax);
3189 }
3190 
3191 
EmitArgumentsLength(CallRuntime * expr)3192 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3193   DCHECK(expr->arguments()->length() == 0);
3194 
3195   Label exit;
3196   // Get the number of formal parameters.
3197   __ Move(eax, Immediate(Smi::FromInt(info_->scope()->num_parameters())));
3198 
3199   // Check if the calling frame is an arguments adaptor frame.
3200   __ mov(ebx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
3201   __ cmp(Operand(ebx, StandardFrameConstants::kContextOffset),
3202          Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3203   __ j(not_equal, &exit);
3204 
3205   // Arguments adaptor case: Read the arguments length from the
3206   // adaptor frame.
3207   __ mov(eax, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset));
3208 
3209   __ bind(&exit);
3210   __ AssertSmi(eax);
3211   context()->Plug(eax);
3212 }
3213 
3214 
EmitClassOf(CallRuntime * expr)3215 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3216   ZoneList<Expression*>* args = expr->arguments();
3217   DCHECK(args->length() == 1);
3218   Label done, null, function, non_function_constructor;
3219 
3220   VisitForAccumulatorValue(args->at(0));
3221 
3222   // If the object is not a JSReceiver, we return null.
3223   __ JumpIfSmi(eax, &null, Label::kNear);
3224   STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
3225   __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, eax);
3226   __ j(below, &null, Label::kNear);
3227 
3228   // Return 'Function' for JSFunction objects.
3229   __ CmpInstanceType(eax, JS_FUNCTION_TYPE);
3230   __ j(equal, &function, Label::kNear);
3231 
3232   // Check if the constructor in the map is a JS function.
3233   __ GetMapConstructor(eax, eax, ebx);
3234   __ CmpInstanceType(ebx, JS_FUNCTION_TYPE);
3235   __ j(not_equal, &non_function_constructor, Label::kNear);
3236 
3237   // eax now contains the constructor function. Grab the
3238   // instance class name from there.
3239   __ mov(eax, FieldOperand(eax, JSFunction::kSharedFunctionInfoOffset));
3240   __ mov(eax, FieldOperand(eax, SharedFunctionInfo::kInstanceClassNameOffset));
3241   __ jmp(&done, Label::kNear);
3242 
3243   // Non-JS objects have class null.
3244   __ bind(&null);
3245   __ mov(eax, isolate()->factory()->null_value());
3246   __ jmp(&done, Label::kNear);
3247 
3248   // Functions have class 'Function'.
3249   __ bind(&function);
3250   __ mov(eax, isolate()->factory()->Function_string());
3251   __ jmp(&done, Label::kNear);
3252 
3253   // Objects with a non-function constructor have class 'Object'.
3254   __ bind(&non_function_constructor);
3255   __ mov(eax, isolate()->factory()->Object_string());
3256 
3257   // All done.
3258   __ bind(&done);
3259 
3260   context()->Plug(eax);
3261 }
3262 
3263 
EmitValueOf(CallRuntime * expr)3264 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3265   ZoneList<Expression*>* args = expr->arguments();
3266   DCHECK(args->length() == 1);
3267 
3268   VisitForAccumulatorValue(args->at(0));  // Load the object.
3269 
3270   Label done;
3271   // If the object is a smi return the object.
3272   __ JumpIfSmi(eax, &done, Label::kNear);
3273   // If the object is not a value type, return the object.
3274   __ CmpObjectType(eax, JS_VALUE_TYPE, ebx);
3275   __ j(not_equal, &done, Label::kNear);
3276   __ mov(eax, FieldOperand(eax, JSValue::kValueOffset));
3277 
3278   __ bind(&done);
3279   context()->Plug(eax);
3280 }
3281 
3282 
EmitIsDate(CallRuntime * expr)3283 void FullCodeGenerator::EmitIsDate(CallRuntime* expr) {
3284   ZoneList<Expression*>* args = expr->arguments();
3285   DCHECK_EQ(1, args->length());
3286 
3287   VisitForAccumulatorValue(args->at(0));
3288 
3289   Label materialize_true, materialize_false;
3290   Label* if_true = nullptr;
3291   Label* if_false = nullptr;
3292   Label* fall_through = nullptr;
3293   context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3294                          &if_false, &fall_through);
3295 
3296   __ JumpIfSmi(eax, if_false);
3297   __ CmpObjectType(eax, JS_DATE_TYPE, ebx);
3298   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3299   Split(equal, if_true, if_false, fall_through);
3300 
3301   context()->Plug(if_true, if_false);
3302 }
3303 
3304 
EmitOneByteSeqStringSetChar(CallRuntime * expr)3305 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3306   ZoneList<Expression*>* args = expr->arguments();
3307   DCHECK_EQ(3, args->length());
3308 
3309   Register string = eax;
3310   Register index = ebx;
3311   Register value = ecx;
3312 
3313   VisitForStackValue(args->at(0));        // index
3314   VisitForStackValue(args->at(1));        // value
3315   VisitForAccumulatorValue(args->at(2));  // string
3316 
3317   __ pop(value);
3318   __ pop(index);
3319 
3320   if (FLAG_debug_code) {
3321     __ test(value, Immediate(kSmiTagMask));
3322     __ Check(zero, kNonSmiValue);
3323     __ test(index, Immediate(kSmiTagMask));
3324     __ Check(zero, kNonSmiValue);
3325   }
3326 
3327   __ SmiUntag(value);
3328   __ SmiUntag(index);
3329 
3330   if (FLAG_debug_code) {
3331     static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3332     __ EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type);
3333   }
3334 
3335   __ mov_b(FieldOperand(string, index, times_1, SeqOneByteString::kHeaderSize),
3336            value);
3337   context()->Plug(string);
3338 }
3339 
3340 
EmitTwoByteSeqStringSetChar(CallRuntime * expr)3341 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3342   ZoneList<Expression*>* args = expr->arguments();
3343   DCHECK_EQ(3, args->length());
3344 
3345   Register string = eax;
3346   Register index = ebx;
3347   Register value = ecx;
3348 
3349   VisitForStackValue(args->at(0));        // index
3350   VisitForStackValue(args->at(1));        // value
3351   VisitForAccumulatorValue(args->at(2));  // string
3352   __ pop(value);
3353   __ pop(index);
3354 
3355   if (FLAG_debug_code) {
3356     __ test(value, Immediate(kSmiTagMask));
3357     __ Check(zero, kNonSmiValue);
3358     __ test(index, Immediate(kSmiTagMask));
3359     __ Check(zero, kNonSmiValue);
3360     __ SmiUntag(index);
3361     static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3362     __ EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type);
3363     __ SmiTag(index);
3364   }
3365 
3366   __ SmiUntag(value);
3367   // No need to untag a smi for two-byte addressing.
3368   __ mov_w(FieldOperand(string, index, times_1, SeqTwoByteString::kHeaderSize),
3369            value);
3370   context()->Plug(string);
3371 }
3372 
3373 
EmitSetValueOf(CallRuntime * expr)3374 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3375   ZoneList<Expression*>* args = expr->arguments();
3376   DCHECK(args->length() == 2);
3377 
3378   VisitForStackValue(args->at(0));  // Load the object.
3379   VisitForAccumulatorValue(args->at(1));  // Load the value.
3380   __ pop(ebx);  // eax = value. ebx = object.
3381 
3382   Label done;
3383   // If the object is a smi, return the value.
3384   __ JumpIfSmi(ebx, &done, Label::kNear);
3385 
3386   // If the object is not a value type, return the value.
3387   __ CmpObjectType(ebx, JS_VALUE_TYPE, ecx);
3388   __ j(not_equal, &done, Label::kNear);
3389 
3390   // Store the value.
3391   __ mov(FieldOperand(ebx, JSValue::kValueOffset), eax);
3392 
3393   // Update the write barrier.  Save the value as it will be
3394   // overwritten by the write barrier code and is needed afterward.
3395   __ mov(edx, eax);
3396   __ RecordWriteField(ebx, JSValue::kValueOffset, edx, ecx, kDontSaveFPRegs);
3397 
3398   __ bind(&done);
3399   context()->Plug(eax);
3400 }
3401 
3402 
EmitToInteger(CallRuntime * expr)3403 void FullCodeGenerator::EmitToInteger(CallRuntime* expr) {
3404   ZoneList<Expression*>* args = expr->arguments();
3405   DCHECK_EQ(1, args->length());
3406 
3407   // Load the argument into eax and convert it.
3408   VisitForAccumulatorValue(args->at(0));
3409 
3410   // Convert the object to an integer.
3411   Label done_convert;
3412   __ JumpIfSmi(eax, &done_convert, Label::kNear);
3413   __ Push(eax);
3414   __ CallRuntime(Runtime::kToInteger);
3415   __ bind(&done_convert);
3416   context()->Plug(eax);
3417 }
3418 
3419 
EmitToName(CallRuntime * expr)3420 void FullCodeGenerator::EmitToName(CallRuntime* expr) {
3421   ZoneList<Expression*>* args = expr->arguments();
3422   DCHECK_EQ(1, args->length());
3423 
3424   // Load the argument into eax and convert it.
3425   VisitForAccumulatorValue(args->at(0));
3426 
3427   // Convert the object to a name.
3428   Label convert, done_convert;
3429   __ JumpIfSmi(eax, &convert, Label::kNear);
3430   STATIC_ASSERT(FIRST_NAME_TYPE == FIRST_TYPE);
3431   __ CmpObjectType(eax, LAST_NAME_TYPE, ecx);
3432   __ j(below_equal, &done_convert, Label::kNear);
3433   __ bind(&convert);
3434   __ Push(eax);
3435   __ CallRuntime(Runtime::kToName);
3436   __ bind(&done_convert);
3437   context()->Plug(eax);
3438 }
3439 
3440 
EmitStringCharFromCode(CallRuntime * expr)3441 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3442   ZoneList<Expression*>* args = expr->arguments();
3443   DCHECK(args->length() == 1);
3444 
3445   VisitForAccumulatorValue(args->at(0));
3446 
3447   Label done;
3448   StringCharFromCodeGenerator generator(eax, ebx);
3449   generator.GenerateFast(masm_);
3450   __ jmp(&done);
3451 
3452   NopRuntimeCallHelper call_helper;
3453   generator.GenerateSlow(masm_, call_helper);
3454 
3455   __ bind(&done);
3456   context()->Plug(ebx);
3457 }
3458 
3459 
EmitStringCharCodeAt(CallRuntime * expr)3460 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3461   ZoneList<Expression*>* args = expr->arguments();
3462   DCHECK(args->length() == 2);
3463 
3464   VisitForStackValue(args->at(0));
3465   VisitForAccumulatorValue(args->at(1));
3466 
3467   Register object = ebx;
3468   Register index = eax;
3469   Register result = edx;
3470 
3471   __ pop(object);
3472 
3473   Label need_conversion;
3474   Label index_out_of_range;
3475   Label done;
3476   StringCharCodeAtGenerator generator(object,
3477                                       index,
3478                                       result,
3479                                       &need_conversion,
3480                                       &need_conversion,
3481                                       &index_out_of_range,
3482                                       STRING_INDEX_IS_NUMBER);
3483   generator.GenerateFast(masm_);
3484   __ jmp(&done);
3485 
3486   __ bind(&index_out_of_range);
3487   // When the index is out of range, the spec requires us to return
3488   // NaN.
3489   __ Move(result, Immediate(isolate()->factory()->nan_value()));
3490   __ jmp(&done);
3491 
3492   __ bind(&need_conversion);
3493   // Move the undefined value into the result register, which will
3494   // trigger conversion.
3495   __ Move(result, Immediate(isolate()->factory()->undefined_value()));
3496   __ jmp(&done);
3497 
3498   NopRuntimeCallHelper call_helper;
3499   generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
3500 
3501   __ bind(&done);
3502   context()->Plug(result);
3503 }
3504 
3505 
EmitStringCharAt(CallRuntime * expr)3506 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3507   ZoneList<Expression*>* args = expr->arguments();
3508   DCHECK(args->length() == 2);
3509 
3510   VisitForStackValue(args->at(0));
3511   VisitForAccumulatorValue(args->at(1));
3512 
3513   Register object = ebx;
3514   Register index = eax;
3515   Register scratch = edx;
3516   Register result = eax;
3517 
3518   __ pop(object);
3519 
3520   Label need_conversion;
3521   Label index_out_of_range;
3522   Label done;
3523   StringCharAtGenerator generator(object,
3524                                   index,
3525                                   scratch,
3526                                   result,
3527                                   &need_conversion,
3528                                   &need_conversion,
3529                                   &index_out_of_range,
3530                                   STRING_INDEX_IS_NUMBER);
3531   generator.GenerateFast(masm_);
3532   __ jmp(&done);
3533 
3534   __ bind(&index_out_of_range);
3535   // When the index is out of range, the spec requires us to return
3536   // the empty string.
3537   __ Move(result, Immediate(isolate()->factory()->empty_string()));
3538   __ jmp(&done);
3539 
3540   __ bind(&need_conversion);
3541   // Move smi zero into the result register, which will trigger
3542   // conversion.
3543   __ Move(result, Immediate(Smi::FromInt(0)));
3544   __ jmp(&done);
3545 
3546   NopRuntimeCallHelper call_helper;
3547   generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
3548 
3549   __ bind(&done);
3550   context()->Plug(result);
3551 }
3552 
3553 
EmitCall(CallRuntime * expr)3554 void FullCodeGenerator::EmitCall(CallRuntime* expr) {
3555   ZoneList<Expression*>* args = expr->arguments();
3556   DCHECK_LE(2, args->length());
3557   // Push target, receiver and arguments onto the stack.
3558   for (Expression* const arg : *args) {
3559     VisitForStackValue(arg);
3560   }
3561   PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
3562   // Move target to edi.
3563   int const argc = args->length() - 2;
3564   __ mov(edi, Operand(esp, (argc + 1) * kPointerSize));
3565   // Call the target.
3566   __ mov(eax, Immediate(argc));
3567   __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
3568   // Restore context register.
3569   __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
3570   // Discard the function left on TOS.
3571   context()->DropAndPlug(1, eax);
3572 }
3573 
3574 
EmitHasCachedArrayIndex(CallRuntime * expr)3575 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3576   ZoneList<Expression*>* args = expr->arguments();
3577   DCHECK(args->length() == 1);
3578 
3579   VisitForAccumulatorValue(args->at(0));
3580 
3581   __ AssertString(eax);
3582 
3583   Label materialize_true, materialize_false;
3584   Label* if_true = NULL;
3585   Label* if_false = NULL;
3586   Label* fall_through = NULL;
3587   context()->PrepareTest(&materialize_true, &materialize_false,
3588                          &if_true, &if_false, &fall_through);
3589 
3590   __ test(FieldOperand(eax, String::kHashFieldOffset),
3591           Immediate(String::kContainsCachedArrayIndexMask));
3592   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3593   Split(zero, if_true, if_false, fall_through);
3594 
3595   context()->Plug(if_true, if_false);
3596 }
3597 
3598 
EmitGetCachedArrayIndex(CallRuntime * expr)3599 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3600   ZoneList<Expression*>* args = expr->arguments();
3601   DCHECK(args->length() == 1);
3602   VisitForAccumulatorValue(args->at(0));
3603 
3604   __ AssertString(eax);
3605 
3606   __ mov(eax, FieldOperand(eax, String::kHashFieldOffset));
3607   __ IndexFromHash(eax, eax);
3608 
3609   context()->Plug(eax);
3610 }
3611 
3612 
EmitGetSuperConstructor(CallRuntime * expr)3613 void FullCodeGenerator::EmitGetSuperConstructor(CallRuntime* expr) {
3614   ZoneList<Expression*>* args = expr->arguments();
3615   DCHECK_EQ(1, args->length());
3616   VisitForAccumulatorValue(args->at(0));
3617   __ AssertFunction(eax);
3618   __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset));
3619   __ mov(eax, FieldOperand(eax, Map::kPrototypeOffset));
3620   context()->Plug(eax);
3621 }
3622 
3623 
EmitFastOneByteArrayJoin(CallRuntime * expr)3624 void FullCodeGenerator::EmitFastOneByteArrayJoin(CallRuntime* expr) {
3625   Label bailout, done, one_char_separator, long_separator,
3626       non_trivial_array, not_size_one_array, loop,
3627       loop_1, loop_1_condition, loop_2, loop_2_entry, loop_3, loop_3_entry;
3628 
3629   ZoneList<Expression*>* args = expr->arguments();
3630   DCHECK(args->length() == 2);
3631   // We will leave the separator on the stack until the end of the function.
3632   VisitForStackValue(args->at(1));
3633   // Load this to eax (= array)
3634   VisitForAccumulatorValue(args->at(0));
3635   // All aliases of the same register have disjoint lifetimes.
3636   Register array = eax;
3637   Register elements = no_reg;  // Will be eax.
3638 
3639   Register index = edx;
3640 
3641   Register string_length = ecx;
3642 
3643   Register string = esi;
3644 
3645   Register scratch = ebx;
3646 
3647   Register array_length = edi;
3648   Register result_pos = no_reg;  // Will be edi.
3649 
3650   // Separator operand is already pushed.
3651   Operand separator_operand = Operand(esp, 2 * kPointerSize);
3652   Operand result_operand = Operand(esp, 1 * kPointerSize);
3653   Operand array_length_operand = Operand(esp, 0);
3654   __ sub(esp, Immediate(2 * kPointerSize));
3655   __ cld();
3656   // Check that the array is a JSArray
3657   __ JumpIfSmi(array, &bailout);
3658   __ CmpObjectType(array, JS_ARRAY_TYPE, scratch);
3659   __ j(not_equal, &bailout);
3660 
3661   // Check that the array has fast elements.
3662   __ CheckFastElements(scratch, &bailout);
3663 
3664   // If the array has length zero, return the empty string.
3665   __ mov(array_length, FieldOperand(array, JSArray::kLengthOffset));
3666   __ SmiUntag(array_length);
3667   __ j(not_zero, &non_trivial_array);
3668   __ mov(result_operand, isolate()->factory()->empty_string());
3669   __ jmp(&done);
3670 
3671   // Save the array length.
3672   __ bind(&non_trivial_array);
3673   __ mov(array_length_operand, array_length);
3674 
3675   // Save the FixedArray containing array's elements.
3676   // End of array's live range.
3677   elements = array;
3678   __ mov(elements, FieldOperand(array, JSArray::kElementsOffset));
3679   array = no_reg;
3680 
3681 
3682   // Check that all array elements are sequential one-byte strings, and
3683   // accumulate the sum of their lengths, as a smi-encoded value.
3684   __ Move(index, Immediate(0));
3685   __ Move(string_length, Immediate(0));
3686   // Loop condition: while (index < length).
3687   // Live loop registers: index, array_length, string,
3688   //                      scratch, string_length, elements.
3689   if (generate_debug_code_) {
3690     __ cmp(index, array_length);
3691     __ Assert(less, kNoEmptyArraysHereInEmitFastOneByteArrayJoin);
3692   }
3693   __ bind(&loop);
3694   __ mov(string, FieldOperand(elements,
3695                               index,
3696                               times_pointer_size,
3697                               FixedArray::kHeaderSize));
3698   __ JumpIfSmi(string, &bailout);
3699   __ mov(scratch, FieldOperand(string, HeapObject::kMapOffset));
3700   __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
3701   __ and_(scratch, Immediate(
3702       kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
3703   __ cmp(scratch, kStringTag | kOneByteStringTag | kSeqStringTag);
3704   __ j(not_equal, &bailout);
3705   __ add(string_length,
3706          FieldOperand(string, SeqOneByteString::kLengthOffset));
3707   __ j(overflow, &bailout);
3708   __ add(index, Immediate(1));
3709   __ cmp(index, array_length);
3710   __ j(less, &loop);
3711 
3712   // If array_length is 1, return elements[0], a string.
3713   __ cmp(array_length, 1);
3714   __ j(not_equal, &not_size_one_array);
3715   __ mov(scratch, FieldOperand(elements, FixedArray::kHeaderSize));
3716   __ mov(result_operand, scratch);
3717   __ jmp(&done);
3718 
3719   __ bind(&not_size_one_array);
3720 
3721   // End of array_length live range.
3722   result_pos = array_length;
3723   array_length = no_reg;
3724 
3725   // Live registers:
3726   // string_length: Sum of string lengths, as a smi.
3727   // elements: FixedArray of strings.
3728 
3729   // Check that the separator is a flat one-byte string.
3730   __ mov(string, separator_operand);
3731   __ JumpIfSmi(string, &bailout);
3732   __ mov(scratch, FieldOperand(string, HeapObject::kMapOffset));
3733   __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
3734   __ and_(scratch, Immediate(
3735       kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
3736   __ cmp(scratch, kStringTag | kOneByteStringTag | kSeqStringTag);
3737   __ j(not_equal, &bailout);
3738 
3739   // Add (separator length times array_length) - separator length
3740   // to string_length.
3741   __ mov(scratch, separator_operand);
3742   __ mov(scratch, FieldOperand(scratch, SeqOneByteString::kLengthOffset));
3743   __ sub(string_length, scratch);  // May be negative, temporarily.
3744   __ imul(scratch, array_length_operand);
3745   __ j(overflow, &bailout);
3746   __ add(string_length, scratch);
3747   __ j(overflow, &bailout);
3748 
3749   __ shr(string_length, 1);
3750 
3751   // Bailout for large object allocations.
3752   __ cmp(string_length, Page::kMaxRegularHeapObjectSize);
3753   __ j(greater, &bailout);
3754 
3755   // Live registers and stack values:
3756   //   string_length
3757   //   elements
3758   __ AllocateOneByteString(result_pos, string_length, scratch, index, string,
3759                            &bailout);
3760   __ mov(result_operand, result_pos);
3761   __ lea(result_pos, FieldOperand(result_pos, SeqOneByteString::kHeaderSize));
3762 
3763 
3764   __ mov(string, separator_operand);
3765   __ cmp(FieldOperand(string, SeqOneByteString::kLengthOffset),
3766          Immediate(Smi::FromInt(1)));
3767   __ j(equal, &one_char_separator);
3768   __ j(greater, &long_separator);
3769 
3770 
3771   // Empty separator case
3772   __ mov(index, Immediate(0));
3773   __ jmp(&loop_1_condition);
3774   // Loop condition: while (index < length).
3775   __ bind(&loop_1);
3776   // Each iteration of the loop concatenates one string to the result.
3777   // Live values in registers:
3778   //   index: which element of the elements array we are adding to the result.
3779   //   result_pos: the position to which we are currently copying characters.
3780   //   elements: the FixedArray of strings we are joining.
3781 
3782   // Get string = array[index].
3783   __ mov(string, FieldOperand(elements, index,
3784                               times_pointer_size,
3785                               FixedArray::kHeaderSize));
3786   __ mov(string_length,
3787          FieldOperand(string, String::kLengthOffset));
3788   __ shr(string_length, 1);
3789   __ lea(string,
3790          FieldOperand(string, SeqOneByteString::kHeaderSize));
3791   __ CopyBytes(string, result_pos, string_length, scratch);
3792   __ add(index, Immediate(1));
3793   __ bind(&loop_1_condition);
3794   __ cmp(index, array_length_operand);
3795   __ j(less, &loop_1);  // End while (index < length).
3796   __ jmp(&done);
3797 
3798 
3799 
3800   // One-character separator case
3801   __ bind(&one_char_separator);
3802   // Replace separator with its one-byte character value.
3803   __ mov_b(scratch, FieldOperand(string, SeqOneByteString::kHeaderSize));
3804   __ mov_b(separator_operand, scratch);
3805 
3806   __ Move(index, Immediate(0));
3807   // Jump into the loop after the code that copies the separator, so the first
3808   // element is not preceded by a separator
3809   __ jmp(&loop_2_entry);
3810   // Loop condition: while (index < length).
3811   __ bind(&loop_2);
3812   // Each iteration of the loop concatenates one string to the result.
3813   // Live values in registers:
3814   //   index: which element of the elements array we are adding to the result.
3815   //   result_pos: the position to which we are currently copying characters.
3816 
3817   // Copy the separator character to the result.
3818   __ mov_b(scratch, separator_operand);
3819   __ mov_b(Operand(result_pos, 0), scratch);
3820   __ inc(result_pos);
3821 
3822   __ bind(&loop_2_entry);
3823   // Get string = array[index].
3824   __ mov(string, FieldOperand(elements, index,
3825                               times_pointer_size,
3826                               FixedArray::kHeaderSize));
3827   __ mov(string_length,
3828          FieldOperand(string, String::kLengthOffset));
3829   __ shr(string_length, 1);
3830   __ lea(string,
3831          FieldOperand(string, SeqOneByteString::kHeaderSize));
3832   __ CopyBytes(string, result_pos, string_length, scratch);
3833   __ add(index, Immediate(1));
3834 
3835   __ cmp(index, array_length_operand);
3836   __ j(less, &loop_2);  // End while (index < length).
3837   __ jmp(&done);
3838 
3839 
3840   // Long separator case (separator is more than one character).
3841   __ bind(&long_separator);
3842 
3843   __ Move(index, Immediate(0));
3844   // Jump into the loop after the code that copies the separator, so the first
3845   // element is not preceded by a separator
3846   __ jmp(&loop_3_entry);
3847   // Loop condition: while (index < length).
3848   __ bind(&loop_3);
3849   // Each iteration of the loop concatenates one string to the result.
3850   // Live values in registers:
3851   //   index: which element of the elements array we are adding to the result.
3852   //   result_pos: the position to which we are currently copying characters.
3853 
3854   // Copy the separator to the result.
3855   __ mov(string, separator_operand);
3856   __ mov(string_length,
3857          FieldOperand(string, String::kLengthOffset));
3858   __ shr(string_length, 1);
3859   __ lea(string,
3860          FieldOperand(string, SeqOneByteString::kHeaderSize));
3861   __ CopyBytes(string, result_pos, string_length, scratch);
3862 
3863   __ bind(&loop_3_entry);
3864   // Get string = array[index].
3865   __ mov(string, FieldOperand(elements, index,
3866                               times_pointer_size,
3867                               FixedArray::kHeaderSize));
3868   __ mov(string_length,
3869          FieldOperand(string, String::kLengthOffset));
3870   __ shr(string_length, 1);
3871   __ lea(string,
3872          FieldOperand(string, SeqOneByteString::kHeaderSize));
3873   __ CopyBytes(string, result_pos, string_length, scratch);
3874   __ add(index, Immediate(1));
3875 
3876   __ cmp(index, array_length_operand);
3877   __ j(less, &loop_3);  // End while (index < length).
3878   __ jmp(&done);
3879 
3880 
3881   __ bind(&bailout);
3882   __ mov(result_operand, isolate()->factory()->undefined_value());
3883   __ bind(&done);
3884   __ mov(eax, result_operand);
3885   // Drop temp values from the stack, and restore context register.
3886   __ add(esp, Immediate(3 * kPointerSize));
3887 
3888   __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
3889   context()->Plug(eax);
3890 }
3891 
3892 
EmitDebugIsActive(CallRuntime * expr)3893 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
3894   DCHECK(expr->arguments()->length() == 0);
3895   ExternalReference debug_is_active =
3896       ExternalReference::debug_is_active_address(isolate());
3897   __ movzx_b(eax, Operand::StaticVariable(debug_is_active));
3898   __ SmiTag(eax);
3899   context()->Plug(eax);
3900 }
3901 
3902 
EmitCreateIterResultObject(CallRuntime * expr)3903 void FullCodeGenerator::EmitCreateIterResultObject(CallRuntime* expr) {
3904   ZoneList<Expression*>* args = expr->arguments();
3905   DCHECK_EQ(2, args->length());
3906   VisitForStackValue(args->at(0));
3907   VisitForStackValue(args->at(1));
3908 
3909   Label runtime, done;
3910 
3911   __ Allocate(JSIteratorResult::kSize, eax, ecx, edx, &runtime, TAG_OBJECT);
3912   __ mov(ebx, NativeContextOperand());
3913   __ mov(ebx, ContextOperand(ebx, Context::ITERATOR_RESULT_MAP_INDEX));
3914   __ mov(FieldOperand(eax, HeapObject::kMapOffset), ebx);
3915   __ mov(FieldOperand(eax, JSObject::kPropertiesOffset),
3916          isolate()->factory()->empty_fixed_array());
3917   __ mov(FieldOperand(eax, JSObject::kElementsOffset),
3918          isolate()->factory()->empty_fixed_array());
3919   __ pop(FieldOperand(eax, JSIteratorResult::kDoneOffset));
3920   __ pop(FieldOperand(eax, JSIteratorResult::kValueOffset));
3921   STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
3922   __ jmp(&done, Label::kNear);
3923 
3924   __ bind(&runtime);
3925   __ CallRuntime(Runtime::kCreateIterResultObject);
3926 
3927   __ bind(&done);
3928   context()->Plug(eax);
3929 }
3930 
3931 
EmitLoadJSRuntimeFunction(CallRuntime * expr)3932 void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
3933   // Push undefined as receiver.
3934   __ push(Immediate(isolate()->factory()->undefined_value()));
3935 
3936   __ LoadGlobalFunction(expr->context_index(), eax);
3937 }
3938 
3939 
EmitCallJSRuntimeFunction(CallRuntime * expr)3940 void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
3941   ZoneList<Expression*>* args = expr->arguments();
3942   int arg_count = args->length();
3943 
3944   SetCallPosition(expr);
3945   __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
3946   __ Set(eax, arg_count);
3947   __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kNullOrUndefined),
3948           RelocInfo::CODE_TARGET);
3949 }
3950 
3951 
VisitCallRuntime(CallRuntime * expr)3952 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
3953   ZoneList<Expression*>* args = expr->arguments();
3954   int arg_count = args->length();
3955 
3956   if (expr->is_jsruntime()) {
3957     Comment cmnt(masm_, "[ CallRuntime");
3958     EmitLoadJSRuntimeFunction(expr);
3959 
3960     // Push the target function under the receiver.
3961     __ push(Operand(esp, 0));
3962     __ mov(Operand(esp, kPointerSize), eax);
3963 
3964     // Push the arguments ("left-to-right").
3965     for (int i = 0; i < arg_count; i++) {
3966       VisitForStackValue(args->at(i));
3967     }
3968 
3969     PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
3970     EmitCallJSRuntimeFunction(expr);
3971 
3972     // Restore context register.
3973     __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
3974     context()->DropAndPlug(1, eax);
3975 
3976   } else {
3977     const Runtime::Function* function = expr->function();
3978     switch (function->function_id) {
3979 #define CALL_INTRINSIC_GENERATOR(Name)     \
3980   case Runtime::kInline##Name: {           \
3981     Comment cmnt(masm_, "[ Inline" #Name); \
3982     return Emit##Name(expr);               \
3983   }
3984       FOR_EACH_FULL_CODE_INTRINSIC(CALL_INTRINSIC_GENERATOR)
3985 #undef CALL_INTRINSIC_GENERATOR
3986       default: {
3987         Comment cmnt(masm_, "[ CallRuntime for unhandled intrinsic");
3988         // Push the arguments ("left-to-right").
3989         for (int i = 0; i < arg_count; i++) {
3990           VisitForStackValue(args->at(i));
3991         }
3992 
3993         // Call the C runtime function.
3994         PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
3995         __ CallRuntime(expr->function(), arg_count);
3996         context()->Plug(eax);
3997       }
3998     }
3999   }
4000 }
4001 
4002 
VisitUnaryOperation(UnaryOperation * expr)4003 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4004   switch (expr->op()) {
4005     case Token::DELETE: {
4006       Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4007       Property* property = expr->expression()->AsProperty();
4008       VariableProxy* proxy = expr->expression()->AsVariableProxy();
4009 
4010       if (property != NULL) {
4011         VisitForStackValue(property->obj());
4012         VisitForStackValue(property->key());
4013         __ CallRuntime(is_strict(language_mode())
4014                            ? Runtime::kDeleteProperty_Strict
4015                            : Runtime::kDeleteProperty_Sloppy);
4016         context()->Plug(eax);
4017       } else if (proxy != NULL) {
4018         Variable* var = proxy->var();
4019         // Delete of an unqualified identifier is disallowed in strict mode but
4020         // "delete this" is allowed.
4021         bool is_this = var->HasThisName(isolate());
4022         DCHECK(is_sloppy(language_mode()) || is_this);
4023         if (var->IsUnallocatedOrGlobalSlot()) {
4024           __ mov(eax, NativeContextOperand());
4025           __ push(ContextOperand(eax, Context::EXTENSION_INDEX));
4026           __ push(Immediate(var->name()));
4027           __ CallRuntime(Runtime::kDeleteProperty_Sloppy);
4028           context()->Plug(eax);
4029         } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4030           // Result of deleting non-global variables is false.  'this' is
4031           // not really a variable, though we implement it as one.  The
4032           // subexpression does not have side effects.
4033           context()->Plug(is_this);
4034         } else {
4035           // Non-global variable.  Call the runtime to try to delete from the
4036           // context where the variable was introduced.
4037           __ push(context_register());
4038           __ push(Immediate(var->name()));
4039           __ CallRuntime(Runtime::kDeleteLookupSlot);
4040           context()->Plug(eax);
4041         }
4042       } else {
4043         // Result of deleting non-property, non-variable reference is true.
4044         // The subexpression may have side effects.
4045         VisitForEffect(expr->expression());
4046         context()->Plug(true);
4047       }
4048       break;
4049     }
4050 
4051     case Token::VOID: {
4052       Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4053       VisitForEffect(expr->expression());
4054       context()->Plug(isolate()->factory()->undefined_value());
4055       break;
4056     }
4057 
4058     case Token::NOT: {
4059       Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4060       if (context()->IsEffect()) {
4061         // Unary NOT has no side effects so it's only necessary to visit the
4062         // subexpression.  Match the optimizing compiler by not branching.
4063         VisitForEffect(expr->expression());
4064       } else if (context()->IsTest()) {
4065         const TestContext* test = TestContext::cast(context());
4066         // The labels are swapped for the recursive call.
4067         VisitForControl(expr->expression(),
4068                         test->false_label(),
4069                         test->true_label(),
4070                         test->fall_through());
4071         context()->Plug(test->true_label(), test->false_label());
4072       } else {
4073         // We handle value contexts explicitly rather than simply visiting
4074         // for control and plugging the control flow into the context,
4075         // because we need to prepare a pair of extra administrative AST ids
4076         // for the optimizing compiler.
4077         DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
4078         Label materialize_true, materialize_false, done;
4079         VisitForControl(expr->expression(),
4080                         &materialize_false,
4081                         &materialize_true,
4082                         &materialize_true);
4083         __ bind(&materialize_true);
4084         PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4085         if (context()->IsAccumulatorValue()) {
4086           __ mov(eax, isolate()->factory()->true_value());
4087         } else {
4088           __ Push(isolate()->factory()->true_value());
4089         }
4090         __ jmp(&done, Label::kNear);
4091         __ bind(&materialize_false);
4092         PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4093         if (context()->IsAccumulatorValue()) {
4094           __ mov(eax, isolate()->factory()->false_value());
4095         } else {
4096           __ Push(isolate()->factory()->false_value());
4097         }
4098         __ bind(&done);
4099       }
4100       break;
4101     }
4102 
4103     case Token::TYPEOF: {
4104       Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4105       {
4106         AccumulatorValueContext context(this);
4107         VisitForTypeofValue(expr->expression());
4108       }
4109       __ mov(ebx, eax);
4110       TypeofStub typeof_stub(isolate());
4111       __ CallStub(&typeof_stub);
4112       context()->Plug(eax);
4113       break;
4114     }
4115 
4116     default:
4117       UNREACHABLE();
4118   }
4119 }
4120 
4121 
VisitCountOperation(CountOperation * expr)4122 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4123   DCHECK(expr->expression()->IsValidReferenceExpressionOrThis());
4124 
4125   Comment cmnt(masm_, "[ CountOperation");
4126 
4127   Property* prop = expr->expression()->AsProperty();
4128   LhsKind assign_type = Property::GetAssignType(prop);
4129 
4130   // Evaluate expression and get value.
4131   if (assign_type == VARIABLE) {
4132     DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
4133     AccumulatorValueContext context(this);
4134     EmitVariableLoad(expr->expression()->AsVariableProxy());
4135   } else {
4136     // Reserve space for result of postfix operation.
4137     if (expr->is_postfix() && !context()->IsEffect()) {
4138       __ push(Immediate(Smi::FromInt(0)));
4139     }
4140     switch (assign_type) {
4141       case NAMED_PROPERTY: {
4142         // Put the object both on the stack and in the register.
4143         VisitForStackValue(prop->obj());
4144         __ mov(LoadDescriptor::ReceiverRegister(), Operand(esp, 0));
4145         EmitNamedPropertyLoad(prop);
4146         break;
4147       }
4148 
4149       case NAMED_SUPER_PROPERTY: {
4150         VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
4151         VisitForAccumulatorValue(
4152             prop->obj()->AsSuperPropertyReference()->home_object());
4153         __ push(result_register());
4154         __ push(MemOperand(esp, kPointerSize));
4155         __ push(result_register());
4156         EmitNamedSuperPropertyLoad(prop);
4157         break;
4158       }
4159 
4160       case KEYED_SUPER_PROPERTY: {
4161         VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
4162         VisitForStackValue(
4163             prop->obj()->AsSuperPropertyReference()->home_object());
4164         VisitForAccumulatorValue(prop->key());
4165         __ push(result_register());
4166         __ push(MemOperand(esp, 2 * kPointerSize));
4167         __ push(MemOperand(esp, 2 * kPointerSize));
4168         __ push(result_register());
4169         EmitKeyedSuperPropertyLoad(prop);
4170         break;
4171       }
4172 
4173       case KEYED_PROPERTY: {
4174         VisitForStackValue(prop->obj());
4175         VisitForStackValue(prop->key());
4176         __ mov(LoadDescriptor::ReceiverRegister(),
4177                Operand(esp, kPointerSize));                       // Object.
4178         __ mov(LoadDescriptor::NameRegister(), Operand(esp, 0));  // Key.
4179         EmitKeyedPropertyLoad(prop);
4180         break;
4181       }
4182 
4183       case VARIABLE:
4184         UNREACHABLE();
4185     }
4186   }
4187 
4188   // We need a second deoptimization point after loading the value
4189   // in case evaluating the property load my have a side effect.
4190   if (assign_type == VARIABLE) {
4191     PrepareForBailout(expr->expression(), TOS_REG);
4192   } else {
4193     PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4194   }
4195 
4196   // Inline smi case if we are in a loop.
4197   Label done, stub_call;
4198   JumpPatchSite patch_site(masm_);
4199   if (ShouldInlineSmiCase(expr->op())) {
4200     Label slow;
4201     patch_site.EmitJumpIfNotSmi(eax, &slow, Label::kNear);
4202 
4203     // Save result for postfix expressions.
4204     if (expr->is_postfix()) {
4205       if (!context()->IsEffect()) {
4206         // Save the result on the stack. If we have a named or keyed property
4207         // we store the result under the receiver that is currently on top
4208         // of the stack.
4209         switch (assign_type) {
4210           case VARIABLE:
4211             __ push(eax);
4212             break;
4213           case NAMED_PROPERTY:
4214             __ mov(Operand(esp, kPointerSize), eax);
4215             break;
4216           case NAMED_SUPER_PROPERTY:
4217             __ mov(Operand(esp, 2 * kPointerSize), eax);
4218             break;
4219           case KEYED_PROPERTY:
4220             __ mov(Operand(esp, 2 * kPointerSize), eax);
4221             break;
4222           case KEYED_SUPER_PROPERTY:
4223             __ mov(Operand(esp, 3 * kPointerSize), eax);
4224             break;
4225         }
4226       }
4227     }
4228 
4229     if (expr->op() == Token::INC) {
4230       __ add(eax, Immediate(Smi::FromInt(1)));
4231     } else {
4232       __ sub(eax, Immediate(Smi::FromInt(1)));
4233     }
4234     __ j(no_overflow, &done, Label::kNear);
4235     // Call stub. Undo operation first.
4236     if (expr->op() == Token::INC) {
4237       __ sub(eax, Immediate(Smi::FromInt(1)));
4238     } else {
4239       __ add(eax, Immediate(Smi::FromInt(1)));
4240     }
4241     __ jmp(&stub_call, Label::kNear);
4242     __ bind(&slow);
4243   }
4244   if (!is_strong(language_mode())) {
4245     ToNumberStub convert_stub(isolate());
4246     __ CallStub(&convert_stub);
4247     PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
4248   }
4249 
4250   // Save result for postfix expressions.
4251   if (expr->is_postfix()) {
4252     if (!context()->IsEffect()) {
4253       // Save the result on the stack. If we have a named or keyed property
4254       // we store the result under the receiver that is currently on top
4255       // of the stack.
4256       switch (assign_type) {
4257         case VARIABLE:
4258           __ push(eax);
4259           break;
4260         case NAMED_PROPERTY:
4261           __ mov(Operand(esp, kPointerSize), eax);
4262           break;
4263         case NAMED_SUPER_PROPERTY:
4264           __ mov(Operand(esp, 2 * kPointerSize), eax);
4265           break;
4266         case KEYED_PROPERTY:
4267           __ mov(Operand(esp, 2 * kPointerSize), eax);
4268           break;
4269         case KEYED_SUPER_PROPERTY:
4270           __ mov(Operand(esp, 3 * kPointerSize), eax);
4271           break;
4272       }
4273     }
4274   }
4275 
4276   SetExpressionPosition(expr);
4277 
4278   // Call stub for +1/-1.
4279   __ bind(&stub_call);
4280   __ mov(edx, eax);
4281   __ mov(eax, Immediate(Smi::FromInt(1)));
4282   Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), expr->binary_op(),
4283                                               strength(language_mode())).code();
4284   CallIC(code, expr->CountBinOpFeedbackId());
4285   patch_site.EmitPatchInfo();
4286   __ bind(&done);
4287 
4288   if (is_strong(language_mode())) {
4289     PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
4290   }
4291   // Store the value returned in eax.
4292   switch (assign_type) {
4293     case VARIABLE:
4294       if (expr->is_postfix()) {
4295         // Perform the assignment as if via '='.
4296         { EffectContext context(this);
4297           EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4298                                  Token::ASSIGN, expr->CountSlot());
4299           PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4300           context.Plug(eax);
4301         }
4302         // For all contexts except EffectContext We have the result on
4303         // top of the stack.
4304         if (!context()->IsEffect()) {
4305           context()->PlugTOS();
4306         }
4307       } else {
4308         // Perform the assignment as if via '='.
4309         EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4310                                Token::ASSIGN, expr->CountSlot());
4311         PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4312         context()->Plug(eax);
4313       }
4314       break;
4315     case NAMED_PROPERTY: {
4316       __ mov(StoreDescriptor::NameRegister(),
4317              prop->key()->AsLiteral()->value());
4318       __ pop(StoreDescriptor::ReceiverRegister());
4319       EmitLoadStoreICSlot(expr->CountSlot());
4320       CallStoreIC();
4321       PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4322       if (expr->is_postfix()) {
4323         if (!context()->IsEffect()) {
4324           context()->PlugTOS();
4325         }
4326       } else {
4327         context()->Plug(eax);
4328       }
4329       break;
4330     }
4331     case NAMED_SUPER_PROPERTY: {
4332       EmitNamedSuperPropertyStore(prop);
4333       if (expr->is_postfix()) {
4334         if (!context()->IsEffect()) {
4335           context()->PlugTOS();
4336         }
4337       } else {
4338         context()->Plug(eax);
4339       }
4340       break;
4341     }
4342     case KEYED_SUPER_PROPERTY: {
4343       EmitKeyedSuperPropertyStore(prop);
4344       if (expr->is_postfix()) {
4345         if (!context()->IsEffect()) {
4346           context()->PlugTOS();
4347         }
4348       } else {
4349         context()->Plug(eax);
4350       }
4351       break;
4352     }
4353     case KEYED_PROPERTY: {
4354       __ pop(StoreDescriptor::NameRegister());
4355       __ pop(StoreDescriptor::ReceiverRegister());
4356       Handle<Code> ic =
4357           CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
4358       EmitLoadStoreICSlot(expr->CountSlot());
4359       CallIC(ic);
4360       PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4361       if (expr->is_postfix()) {
4362         // Result is on the stack
4363         if (!context()->IsEffect()) {
4364           context()->PlugTOS();
4365         }
4366       } else {
4367         context()->Plug(eax);
4368       }
4369       break;
4370     }
4371   }
4372 }
4373 
4374 
EmitLiteralCompareTypeof(Expression * expr,Expression * sub_expr,Handle<String> check)4375 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4376                                                  Expression* sub_expr,
4377                                                  Handle<String> check) {
4378   Label materialize_true, materialize_false;
4379   Label* if_true = NULL;
4380   Label* if_false = NULL;
4381   Label* fall_through = NULL;
4382   context()->PrepareTest(&materialize_true, &materialize_false,
4383                          &if_true, &if_false, &fall_through);
4384 
4385   { AccumulatorValueContext context(this);
4386     VisitForTypeofValue(sub_expr);
4387   }
4388   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4389 
4390   Factory* factory = isolate()->factory();
4391   if (String::Equals(check, factory->number_string())) {
4392     __ JumpIfSmi(eax, if_true);
4393     __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
4394            isolate()->factory()->heap_number_map());
4395     Split(equal, if_true, if_false, fall_through);
4396   } else if (String::Equals(check, factory->string_string())) {
4397     __ JumpIfSmi(eax, if_false);
4398     __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, edx);
4399     Split(below, if_true, if_false, fall_through);
4400   } else if (String::Equals(check, factory->symbol_string())) {
4401     __ JumpIfSmi(eax, if_false);
4402     __ CmpObjectType(eax, SYMBOL_TYPE, edx);
4403     Split(equal, if_true, if_false, fall_through);
4404   } else if (String::Equals(check, factory->boolean_string())) {
4405     __ cmp(eax, isolate()->factory()->true_value());
4406     __ j(equal, if_true);
4407     __ cmp(eax, isolate()->factory()->false_value());
4408     Split(equal, if_true, if_false, fall_through);
4409   } else if (String::Equals(check, factory->undefined_string())) {
4410     __ cmp(eax, isolate()->factory()->undefined_value());
4411     __ j(equal, if_true);
4412     __ JumpIfSmi(eax, if_false);
4413     // Check for undetectable objects => true.
4414     __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
4415     __ test_b(FieldOperand(edx, Map::kBitFieldOffset),
4416               1 << Map::kIsUndetectable);
4417     Split(not_zero, if_true, if_false, fall_through);
4418   } else if (String::Equals(check, factory->function_string())) {
4419     __ JumpIfSmi(eax, if_false);
4420     // Check for callable and not undetectable objects => true.
4421     __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
4422     __ movzx_b(ecx, FieldOperand(edx, Map::kBitFieldOffset));
4423     __ and_(ecx, (1 << Map::kIsCallable) | (1 << Map::kIsUndetectable));
4424     __ cmp(ecx, 1 << Map::kIsCallable);
4425     Split(equal, if_true, if_false, fall_through);
4426   } else if (String::Equals(check, factory->object_string())) {
4427     __ JumpIfSmi(eax, if_false);
4428     __ cmp(eax, isolate()->factory()->null_value());
4429     __ j(equal, if_true);
4430     STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
4431     __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, edx);
4432     __ j(below, if_false);
4433     // Check for callable or undetectable objects => false.
4434     __ test_b(FieldOperand(edx, Map::kBitFieldOffset),
4435               (1 << Map::kIsCallable) | (1 << Map::kIsUndetectable));
4436     Split(zero, if_true, if_false, fall_through);
4437 // clang-format off
4438 #define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type)   \
4439   } else if (String::Equals(check, factory->type##_string())) { \
4440     __ JumpIfSmi(eax, if_false);                                \
4441     __ cmp(FieldOperand(eax, HeapObject::kMapOffset),           \
4442            isolate()->factory()->type##_map());                 \
4443     Split(equal, if_true, if_false, fall_through);
4444   SIMD128_TYPES(SIMD128_TYPE)
4445 #undef SIMD128_TYPE
4446     // clang-format on
4447   } else {
4448     if (if_false != fall_through) __ jmp(if_false);
4449   }
4450   context()->Plug(if_true, if_false);
4451 }
4452 
4453 
VisitCompareOperation(CompareOperation * expr)4454 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4455   Comment cmnt(masm_, "[ CompareOperation");
4456   SetExpressionPosition(expr);
4457 
4458   // First we try a fast inlined version of the compare when one of
4459   // the operands is a literal.
4460   if (TryLiteralCompare(expr)) return;
4461 
4462   // Always perform the comparison for its control flow.  Pack the result
4463   // into the expression's context after the comparison is performed.
4464   Label materialize_true, materialize_false;
4465   Label* if_true = NULL;
4466   Label* if_false = NULL;
4467   Label* fall_through = NULL;
4468   context()->PrepareTest(&materialize_true, &materialize_false,
4469                          &if_true, &if_false, &fall_through);
4470 
4471   Token::Value op = expr->op();
4472   VisitForStackValue(expr->left());
4473   switch (op) {
4474     case Token::IN:
4475       VisitForStackValue(expr->right());
4476       __ CallRuntime(Runtime::kHasProperty);
4477       PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4478       __ cmp(eax, isolate()->factory()->true_value());
4479       Split(equal, if_true, if_false, fall_through);
4480       break;
4481 
4482     case Token::INSTANCEOF: {
4483       VisitForAccumulatorValue(expr->right());
4484       __ Pop(edx);
4485       InstanceOfStub stub(isolate());
4486       __ CallStub(&stub);
4487       PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4488       __ cmp(eax, isolate()->factory()->true_value());
4489       Split(equal, if_true, if_false, fall_through);
4490       break;
4491     }
4492 
4493     default: {
4494       VisitForAccumulatorValue(expr->right());
4495       Condition cc = CompareIC::ComputeCondition(op);
4496       __ pop(edx);
4497 
4498       bool inline_smi_code = ShouldInlineSmiCase(op);
4499       JumpPatchSite patch_site(masm_);
4500       if (inline_smi_code) {
4501         Label slow_case;
4502         __ mov(ecx, edx);
4503         __ or_(ecx, eax);
4504         patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear);
4505         __ cmp(edx, eax);
4506         Split(cc, if_true, if_false, NULL);
4507         __ bind(&slow_case);
4508       }
4509 
4510       Handle<Code> ic = CodeFactory::CompareIC(
4511                             isolate(), op, strength(language_mode())).code();
4512       CallIC(ic, expr->CompareOperationFeedbackId());
4513       patch_site.EmitPatchInfo();
4514 
4515       PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4516       __ test(eax, eax);
4517       Split(cc, if_true, if_false, fall_through);
4518     }
4519   }
4520 
4521   // Convert the result of the comparison into one expected for this
4522   // expression's context.
4523   context()->Plug(if_true, if_false);
4524 }
4525 
4526 
EmitLiteralCompareNil(CompareOperation * expr,Expression * sub_expr,NilValue nil)4527 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4528                                               Expression* sub_expr,
4529                                               NilValue nil) {
4530   Label materialize_true, materialize_false;
4531   Label* if_true = NULL;
4532   Label* if_false = NULL;
4533   Label* fall_through = NULL;
4534   context()->PrepareTest(&materialize_true, &materialize_false,
4535                          &if_true, &if_false, &fall_through);
4536 
4537   VisitForAccumulatorValue(sub_expr);
4538   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4539 
4540   Handle<Object> nil_value = nil == kNullValue
4541       ? isolate()->factory()->null_value()
4542       : isolate()->factory()->undefined_value();
4543   if (expr->op() == Token::EQ_STRICT) {
4544     __ cmp(eax, nil_value);
4545     Split(equal, if_true, if_false, fall_through);
4546   } else {
4547     Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
4548     CallIC(ic, expr->CompareOperationFeedbackId());
4549     __ cmp(eax, isolate()->factory()->true_value());
4550     Split(equal, if_true, if_false, fall_through);
4551   }
4552   context()->Plug(if_true, if_false);
4553 }
4554 
4555 
VisitThisFunction(ThisFunction * expr)4556 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4557   __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
4558   context()->Plug(eax);
4559 }
4560 
4561 
result_register()4562 Register FullCodeGenerator::result_register() {
4563   return eax;
4564 }
4565 
4566 
context_register()4567 Register FullCodeGenerator::context_register() {
4568   return esi;
4569 }
4570 
4571 
StoreToFrameField(int frame_offset,Register value)4572 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4573   DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
4574   __ mov(Operand(ebp, frame_offset), value);
4575 }
4576 
4577 
LoadContextField(Register dst,int context_index)4578 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4579   __ mov(dst, ContextOperand(esi, context_index));
4580 }
4581 
4582 
PushFunctionArgumentForContextAllocation()4583 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4584   Scope* closure_scope = scope()->ClosureScope();
4585   if (closure_scope->is_script_scope() ||
4586       closure_scope->is_module_scope()) {
4587     // Contexts nested in the native context have a canonical empty function
4588     // as their closure, not the anonymous closure containing the global
4589     // code.
4590     __ mov(eax, NativeContextOperand());
4591     __ push(ContextOperand(eax, Context::CLOSURE_INDEX));
4592   } else if (closure_scope->is_eval_scope()) {
4593     // Contexts nested inside eval code have the same closure as the context
4594     // calling eval, not the anonymous closure containing the eval code.
4595     // Fetch it from the context.
4596     __ push(ContextOperand(esi, Context::CLOSURE_INDEX));
4597   } else {
4598     DCHECK(closure_scope->is_function_scope());
4599     __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
4600   }
4601 }
4602 
4603 
4604 // ----------------------------------------------------------------------------
4605 // Non-local control flow support.
4606 
EnterFinallyBlock()4607 void FullCodeGenerator::EnterFinallyBlock() {
4608   // Cook return address on top of stack (smi encoded Code* delta)
4609   DCHECK(!result_register().is(edx));
4610   __ pop(edx);
4611   __ sub(edx, Immediate(masm_->CodeObject()));
4612   STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
4613   STATIC_ASSERT(kSmiTag == 0);
4614   __ SmiTag(edx);
4615   __ push(edx);
4616 
4617   // Store result register while executing finally block.
4618   __ push(result_register());
4619 
4620   // Store pending message while executing finally block.
4621   ExternalReference pending_message_obj =
4622       ExternalReference::address_of_pending_message_obj(isolate());
4623   __ mov(edx, Operand::StaticVariable(pending_message_obj));
4624   __ push(edx);
4625 
4626   ClearPendingMessage();
4627 }
4628 
4629 
ExitFinallyBlock()4630 void FullCodeGenerator::ExitFinallyBlock() {
4631   DCHECK(!result_register().is(edx));
4632   // Restore pending message from stack.
4633   __ pop(edx);
4634   ExternalReference pending_message_obj =
4635       ExternalReference::address_of_pending_message_obj(isolate());
4636   __ mov(Operand::StaticVariable(pending_message_obj), edx);
4637 
4638   // Restore result register from stack.
4639   __ pop(result_register());
4640 
4641   // Uncook return address.
4642   __ pop(edx);
4643   __ SmiUntag(edx);
4644   __ add(edx, Immediate(masm_->CodeObject()));
4645   __ jmp(edx);
4646 }
4647 
4648 
ClearPendingMessage()4649 void FullCodeGenerator::ClearPendingMessage() {
4650   DCHECK(!result_register().is(edx));
4651   ExternalReference pending_message_obj =
4652       ExternalReference::address_of_pending_message_obj(isolate());
4653   __ mov(edx, Immediate(isolate()->factory()->the_hole_value()));
4654   __ mov(Operand::StaticVariable(pending_message_obj), edx);
4655 }
4656 
4657 
EmitLoadStoreICSlot(FeedbackVectorSlot slot)4658 void FullCodeGenerator::EmitLoadStoreICSlot(FeedbackVectorSlot slot) {
4659   DCHECK(!slot.IsInvalid());
4660   __ mov(VectorStoreICTrampolineDescriptor::SlotRegister(),
4661          Immediate(SmiFromSlot(slot)));
4662 }
4663 
4664 
4665 #undef __
4666 
4667 
4668 static const byte kJnsInstruction = 0x79;
4669 static const byte kJnsOffset = 0x11;
4670 static const byte kNopByteOne = 0x66;
4671 static const byte kNopByteTwo = 0x90;
4672 #ifdef DEBUG
4673 static const byte kCallInstruction = 0xe8;
4674 #endif
4675 
4676 
PatchAt(Code * unoptimized_code,Address pc,BackEdgeState target_state,Code * replacement_code)4677 void BackEdgeTable::PatchAt(Code* unoptimized_code,
4678                             Address pc,
4679                             BackEdgeState target_state,
4680                             Code* replacement_code) {
4681   Address call_target_address = pc - kIntSize;
4682   Address jns_instr_address = call_target_address - 3;
4683   Address jns_offset_address = call_target_address - 2;
4684 
4685   switch (target_state) {
4686     case INTERRUPT:
4687       //     sub <profiling_counter>, <delta>  ;; Not changed
4688       //     jns ok
4689       //     call <interrupt stub>
4690       //   ok:
4691       *jns_instr_address = kJnsInstruction;
4692       *jns_offset_address = kJnsOffset;
4693       break;
4694     case ON_STACK_REPLACEMENT:
4695     case OSR_AFTER_STACK_CHECK:
4696       //     sub <profiling_counter>, <delta>  ;; Not changed
4697       //     nop
4698       //     nop
4699       //     call <on-stack replacment>
4700       //   ok:
4701       *jns_instr_address = kNopByteOne;
4702       *jns_offset_address = kNopByteTwo;
4703       break;
4704   }
4705 
4706   Assembler::set_target_address_at(unoptimized_code->GetIsolate(),
4707                                    call_target_address, unoptimized_code,
4708                                    replacement_code->entry());
4709   unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
4710       unoptimized_code, call_target_address, replacement_code);
4711 }
4712 
4713 
GetBackEdgeState(Isolate * isolate,Code * unoptimized_code,Address pc)4714 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
4715     Isolate* isolate,
4716     Code* unoptimized_code,
4717     Address pc) {
4718   Address call_target_address = pc - kIntSize;
4719   Address jns_instr_address = call_target_address - 3;
4720   DCHECK_EQ(kCallInstruction, *(call_target_address - 1));
4721 
4722   if (*jns_instr_address == kJnsInstruction) {
4723     DCHECK_EQ(kJnsOffset, *(call_target_address - 2));
4724     DCHECK_EQ(isolate->builtins()->InterruptCheck()->entry(),
4725               Assembler::target_address_at(call_target_address,
4726                                            unoptimized_code));
4727     return INTERRUPT;
4728   }
4729 
4730   DCHECK_EQ(kNopByteOne, *jns_instr_address);
4731   DCHECK_EQ(kNopByteTwo, *(call_target_address - 2));
4732 
4733   if (Assembler::target_address_at(call_target_address, unoptimized_code) ==
4734       isolate->builtins()->OnStackReplacement()->entry()) {
4735     return ON_STACK_REPLACEMENT;
4736   }
4737 
4738   DCHECK_EQ(isolate->builtins()->OsrAfterStackCheck()->entry(),
4739             Assembler::target_address_at(call_target_address,
4740                                          unoptimized_code));
4741   return OSR_AFTER_STACK_CHECK;
4742 }
4743 
4744 
4745 }  // namespace internal
4746 }  // namespace v8
4747 
4748 #endif  // V8_TARGET_ARCH_X87
4749