• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #if V8_TARGET_ARCH_MIPS64
6 
7 // Note on Mips implementation:
8 //
9 // The result_register() for mips is the 'v0' register, which is defined
10 // by the ABI to contain function return values. However, the first
11 // parameter to a function is defined to be 'a0'. So there are many
12 // places where we have to move a previous result in v0 to a0 for the
13 // next call: mov(a0, v0). This is not needed on the other architectures.
14 
15 #include "src/ast/scopes.h"
16 #include "src/code-factory.h"
17 #include "src/code-stubs.h"
18 #include "src/codegen.h"
19 #include "src/debug/debug.h"
20 #include "src/full-codegen/full-codegen.h"
21 #include "src/ic/ic.h"
22 #include "src/parsing/parser.h"
23 
24 #include "src/mips64/code-stubs-mips64.h"
25 #include "src/mips64/macro-assembler-mips64.h"
26 
27 namespace v8 {
28 namespace internal {
29 
30 #define __ ACCESS_MASM(masm())
31 
32 // A patch site is a location in the code which it is possible to patch. This
33 // class has a number of methods to emit the code which is patchable and the
34 // method EmitPatchInfo to record a marker back to the patchable code. This
35 // marker is a andi zero_reg, rx, #yyyy instruction, and rx * 0x0000ffff + yyyy
36 // (raw 16 bit immediate value is used) is the delta from the pc to the first
37 // instruction of the patchable code.
38 // The marker instruction is effectively a NOP (dest is zero_reg) and will
39 // never be emitted by normal code.
40 class JumpPatchSite BASE_EMBEDDED {
41  public:
JumpPatchSite(MacroAssembler * masm)42   explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
43 #ifdef DEBUG
44     info_emitted_ = false;
45 #endif
46   }
47 
~JumpPatchSite()48   ~JumpPatchSite() {
49     DCHECK(patch_site_.is_bound() == info_emitted_);
50   }
51 
52   // When initially emitting this ensure that a jump is always generated to skip
53   // the inlined smi code.
EmitJumpIfNotSmi(Register reg,Label * target)54   void EmitJumpIfNotSmi(Register reg, Label* target) {
55     DCHECK(!patch_site_.is_bound() && !info_emitted_);
56     Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
57     __ bind(&patch_site_);
58     __ andi(at, reg, 0);
59     // Always taken before patched.
60     __ BranchShort(target, eq, at, Operand(zero_reg));
61   }
62 
63   // When initially emitting this ensure that a jump is never generated to skip
64   // the inlined smi code.
EmitJumpIfSmi(Register reg,Label * target)65   void EmitJumpIfSmi(Register reg, Label* target) {
66     Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
67     DCHECK(!patch_site_.is_bound() && !info_emitted_);
68     __ bind(&patch_site_);
69     __ andi(at, reg, 0);
70     // Never taken before patched.
71     __ BranchShort(target, ne, at, Operand(zero_reg));
72   }
73 
EmitPatchInfo()74   void EmitPatchInfo() {
75     if (patch_site_.is_bound()) {
76       int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
77       Register reg = Register::from_code(delta_to_patch_site / kImm16Mask);
78       __ andi(zero_reg, reg, delta_to_patch_site % kImm16Mask);
79 #ifdef DEBUG
80       info_emitted_ = true;
81 #endif
82     } else {
83       __ nop();  // Signals no inlined code.
84     }
85   }
86 
87  private:
masm()88   MacroAssembler* masm() { return masm_; }
89   MacroAssembler* masm_;
90   Label patch_site_;
91 #ifdef DEBUG
92   bool info_emitted_;
93 #endif
94 };
95 
96 
97 // Generate code for a JS function.  On entry to the function the receiver
98 // and arguments have been pushed on the stack left to right.  The actual
99 // argument count matches the formal parameter count expected by the
100 // function.
101 //
102 // The live registers are:
103 //   o a1: the JS function object being called (i.e. ourselves)
104 //   o a3: the new target value
105 //   o cp: our context
106 //   o fp: our caller's frame pointer
107 //   o sp: stack pointer
108 //   o ra: return address
109 //
110 // The function builds a JS frame.  Please see JavaScriptFrameConstants in
111 // frames-mips.h for its layout.
Generate()112 void FullCodeGenerator::Generate() {
113   CompilationInfo* info = info_;
114   profiling_counter_ = isolate()->factory()->NewCell(
115       Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
116   SetFunctionPosition(literal());
117   Comment cmnt(masm_, "[ function compiled by full code generator");
118 
119   ProfileEntryHookStub::MaybeCallEntryHook(masm_);
120 
121   if (FLAG_debug_code && info->ExpectsJSReceiverAsReceiver()) {
122     int receiver_offset = info->scope()->num_parameters() * kPointerSize;
123     __ ld(a2, MemOperand(sp, receiver_offset));
124     __ AssertNotSmi(a2);
125     __ GetObjectType(a2, a2, a2);
126     __ Check(ge, kSloppyFunctionExpectsJSReceiverReceiver, a2,
127              Operand(FIRST_JS_RECEIVER_TYPE));
128   }
129 
130   // Open a frame scope to indicate that there is a frame on the stack.  The
131   // MANUAL indicates that the scope shouldn't actually generate code to set up
132   // the frame (that is done below).
133   FrameScope frame_scope(masm_, StackFrame::MANUAL);
134   info->set_prologue_offset(masm_->pc_offset());
135   __ Prologue(info->GeneratePreagedPrologue());
136 
137   { Comment cmnt(masm_, "[ Allocate locals");
138     int locals_count = info->scope()->num_stack_slots();
139     // Generators allocate locals, if any, in context slots.
140     DCHECK(!IsGeneratorFunction(info->literal()->kind()) || locals_count == 0);
141     OperandStackDepthIncrement(locals_count);
142     if (locals_count > 0) {
143       if (locals_count >= 128) {
144         Label ok;
145         __ Dsubu(t1, sp, Operand(locals_count * kPointerSize));
146         __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
147         __ Branch(&ok, hs, t1, Operand(a2));
148         __ CallRuntime(Runtime::kThrowStackOverflow);
149         __ bind(&ok);
150       }
151       __ LoadRoot(t1, Heap::kUndefinedValueRootIndex);
152       int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
153       if (locals_count >= kMaxPushes) {
154         int loop_iterations = locals_count / kMaxPushes;
155         __ li(a2, Operand(loop_iterations));
156         Label loop_header;
157         __ bind(&loop_header);
158         // Do pushes.
159         __ Dsubu(sp, sp, Operand(kMaxPushes * kPointerSize));
160         for (int i = 0; i < kMaxPushes; i++) {
161           __ sd(t1, MemOperand(sp, i * kPointerSize));
162         }
163         // Continue loop if not done.
164         __ Dsubu(a2, a2, Operand(1));
165         __ Branch(&loop_header, ne, a2, Operand(zero_reg));
166       }
167       int remaining = locals_count % kMaxPushes;
168       // Emit the remaining pushes.
169       __ Dsubu(sp, sp, Operand(remaining * kPointerSize));
170       for (int i  = 0; i < remaining; i++) {
171         __ sd(t1, MemOperand(sp, i * kPointerSize));
172       }
173     }
174   }
175 
176   bool function_in_register_a1 = true;
177 
178   // Possibly allocate a local context.
179   if (info->scope()->num_heap_slots() > 0) {
180     Comment cmnt(masm_, "[ Allocate context");
181     // Argument to NewContext is the function, which is still in a1.
182     bool need_write_barrier = true;
183     int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
184     if (info->scope()->is_script_scope()) {
185       __ push(a1);
186       __ Push(info->scope()->GetScopeInfo(info->isolate()));
187       __ CallRuntime(Runtime::kNewScriptContext);
188       PrepareForBailoutForId(BailoutId::ScriptContext(),
189                              BailoutState::TOS_REGISTER);
190       // The new target value is not used, clobbering is safe.
191       DCHECK_NULL(info->scope()->new_target_var());
192     } else {
193       if (info->scope()->new_target_var() != nullptr) {
194         __ push(a3);  // Preserve new target.
195       }
196       if (slots <= FastNewContextStub::kMaximumSlots) {
197         FastNewContextStub stub(isolate(), slots);
198         __ CallStub(&stub);
199         // Result of FastNewContextStub is always in new space.
200         need_write_barrier = false;
201       } else {
202         __ push(a1);
203         __ CallRuntime(Runtime::kNewFunctionContext);
204       }
205       if (info->scope()->new_target_var() != nullptr) {
206         __ pop(a3);  // Restore new target.
207       }
208     }
209     function_in_register_a1 = false;
210     // Context is returned in v0. It replaces the context passed to us.
211     // It's saved in the stack and kept live in cp.
212     __ mov(cp, v0);
213     __ sd(v0, MemOperand(fp, StandardFrameConstants::kContextOffset));
214     // Copy any necessary parameters into the context.
215     int num_parameters = info->scope()->num_parameters();
216     int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
217     for (int i = first_parameter; i < num_parameters; i++) {
218       Variable* var = (i == -1) ? scope()->receiver() : scope()->parameter(i);
219       if (var->IsContextSlot()) {
220         int parameter_offset = StandardFrameConstants::kCallerSPOffset +
221                                  (num_parameters - 1 - i) * kPointerSize;
222         // Load parameter from stack.
223         __ ld(a0, MemOperand(fp, parameter_offset));
224         // Store it in the context.
225         MemOperand target = ContextMemOperand(cp, var->index());
226         __ sd(a0, target);
227 
228         // Update the write barrier.
229         if (need_write_barrier) {
230           __ RecordWriteContextSlot(cp, target.offset(), a0, a2,
231                                     kRAHasBeenSaved, kDontSaveFPRegs);
232         } else if (FLAG_debug_code) {
233           Label done;
234           __ JumpIfInNewSpace(cp, a0, &done);
235           __ Abort(kExpectedNewSpaceObject);
236           __ bind(&done);
237         }
238       }
239     }
240   }
241 
242   // Register holding this function and new target are both trashed in case we
243   // bailout here. But since that can happen only when new target is not used
244   // and we allocate a context, the value of |function_in_register| is correct.
245   PrepareForBailoutForId(BailoutId::FunctionContext(),
246                          BailoutState::NO_REGISTERS);
247 
248   // Possibly set up a local binding to the this function which is used in
249   // derived constructors with super calls.
250   Variable* this_function_var = scope()->this_function_var();
251   if (this_function_var != nullptr) {
252     Comment cmnt(masm_, "[ This function");
253     if (!function_in_register_a1) {
254       __ ld(a1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
255       // The write barrier clobbers register again, keep it marked as such.
256     }
257     SetVar(this_function_var, a1, a0, a2);
258   }
259 
260   Variable* new_target_var = scope()->new_target_var();
261   if (new_target_var != nullptr) {
262     Comment cmnt(masm_, "[ new.target");
263     SetVar(new_target_var, a3, a0, a2);
264   }
265 
266   // Possibly allocate RestParameters
267   int rest_index;
268   Variable* rest_param = scope()->rest_parameter(&rest_index);
269   if (rest_param) {
270     Comment cmnt(masm_, "[ Allocate rest parameter array");
271     if (!function_in_register_a1) {
272       __ ld(a1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
273     }
274     FastNewRestParameterStub stub(isolate());
275     __ CallStub(&stub);
276     function_in_register_a1 = false;
277     SetVar(rest_param, v0, a1, a2);
278   }
279 
280   Variable* arguments = scope()->arguments();
281   if (arguments != NULL) {
282     // Function uses arguments object.
283     Comment cmnt(masm_, "[ Allocate arguments object");
284     if (!function_in_register_a1) {
285       // Load this again, if it's used by the local context below.
286       __ ld(a1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
287     }
288     if (is_strict(language_mode()) || !has_simple_parameters()) {
289       FastNewStrictArgumentsStub stub(isolate());
290       __ CallStub(&stub);
291     } else if (literal()->has_duplicate_parameters()) {
292       __ Push(a1);
293       __ CallRuntime(Runtime::kNewSloppyArguments_Generic);
294     } else {
295       FastNewSloppyArgumentsStub stub(isolate());
296       __ CallStub(&stub);
297     }
298 
299     SetVar(arguments, v0, a1, a2);
300   }
301 
302   if (FLAG_trace) {
303     __ CallRuntime(Runtime::kTraceEnter);
304   }
305 
306   // Visit the declarations and body.
307   PrepareForBailoutForId(BailoutId::FunctionEntry(),
308                          BailoutState::NO_REGISTERS);
309   {
310     Comment cmnt(masm_, "[ Declarations");
311     VisitDeclarations(scope()->declarations());
312   }
313 
314   // Assert that the declarations do not use ICs. Otherwise the debugger
315   // won't be able to redirect a PC at an IC to the correct IC in newly
316   // recompiled code.
317   DCHECK_EQ(0, ic_total_count_);
318 
319   {
320     Comment cmnt(masm_, "[ Stack check");
321     PrepareForBailoutForId(BailoutId::Declarations(),
322                            BailoutState::NO_REGISTERS);
323     Label ok;
324     __ LoadRoot(at, Heap::kStackLimitRootIndex);
325     __ Branch(&ok, hs, sp, Operand(at));
326     Handle<Code> stack_check = isolate()->builtins()->StackCheck();
327     PredictableCodeSizeScope predictable(
328         masm_, masm_->CallSize(stack_check, RelocInfo::CODE_TARGET));
329     __ Call(stack_check, RelocInfo::CODE_TARGET);
330     __ bind(&ok);
331   }
332 
333   {
334     Comment cmnt(masm_, "[ Body");
335     DCHECK(loop_depth() == 0);
336 
337     VisitStatements(literal()->body());
338 
339     DCHECK(loop_depth() == 0);
340   }
341 
342   // Always emit a 'return undefined' in case control fell off the end of
343   // the body.
344   { Comment cmnt(masm_, "[ return <undefined>;");
345     __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
346   }
347   EmitReturnSequence();
348 }
349 
350 
ClearAccumulator()351 void FullCodeGenerator::ClearAccumulator() {
352   DCHECK(Smi::FromInt(0) == 0);
353   __ mov(v0, zero_reg);
354 }
355 
356 
EmitProfilingCounterDecrement(int delta)357 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
358   __ li(a2, Operand(profiling_counter_));
359   __ ld(a3, FieldMemOperand(a2, Cell::kValueOffset));
360   __ Dsubu(a3, a3, Operand(Smi::FromInt(delta)));
361   __ sd(a3, FieldMemOperand(a2, Cell::kValueOffset));
362 }
363 
364 
EmitProfilingCounterReset()365 void FullCodeGenerator::EmitProfilingCounterReset() {
366   int reset_value = FLAG_interrupt_budget;
367   if (info_->is_debug()) {
368     // Detect debug break requests as soon as possible.
369     reset_value = FLAG_interrupt_budget >> 4;
370   }
371   __ li(a2, Operand(profiling_counter_));
372   __ li(a3, Operand(Smi::FromInt(reset_value)));
373   __ sd(a3, FieldMemOperand(a2, Cell::kValueOffset));
374 }
375 
376 
EmitBackEdgeBookkeeping(IterationStatement * stmt,Label * back_edge_target)377 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
378                                                 Label* back_edge_target) {
379   // The generated code is used in Deoptimizer::PatchStackCheckCodeAt so we need
380   // to make sure it is constant. Branch may emit a skip-or-jump sequence
381   // instead of the normal Branch. It seems that the "skip" part of that
382   // sequence is about as long as this Branch would be so it is safe to ignore
383   // that.
384   Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
385   Comment cmnt(masm_, "[ Back edge bookkeeping");
386   Label ok;
387   DCHECK(back_edge_target->is_bound());
388   int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
389   int weight = Min(kMaxBackEdgeWeight,
390                    Max(1, distance / kCodeSizeMultiplier));
391   EmitProfilingCounterDecrement(weight);
392   __ slt(at, a3, zero_reg);
393   __ beq(at, zero_reg, &ok);
394   // Call will emit a li t9 first, so it is safe to use the delay slot.
395   __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
396   // Record a mapping of this PC offset to the OSR id.  This is used to find
397   // the AST id from the unoptimized code in order to use it as a key into
398   // the deoptimization input data found in the optimized code.
399   RecordBackEdge(stmt->OsrEntryId());
400   EmitProfilingCounterReset();
401 
402   __ bind(&ok);
403   PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS);
404   // Record a mapping of the OSR id to this PC.  This is used if the OSR
405   // entry becomes the target of a bailout.  We don't expect it to be, but
406   // we want it to work if it is.
407   PrepareForBailoutForId(stmt->OsrEntryId(), BailoutState::NO_REGISTERS);
408 }
409 
EmitProfilingCounterHandlingForReturnSequence(bool is_tail_call)410 void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence(
411     bool is_tail_call) {
412   // Pretend that the exit is a backwards jump to the entry.
413   int weight = 1;
414   if (info_->ShouldSelfOptimize()) {
415     weight = FLAG_interrupt_budget / FLAG_self_opt_count;
416   } else {
417     int distance = masm_->pc_offset();
418     weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
419   }
420   EmitProfilingCounterDecrement(weight);
421   Label ok;
422   __ Branch(&ok, ge, a3, Operand(zero_reg));
423   // Don't need to save result register if we are going to do a tail call.
424   if (!is_tail_call) {
425     __ push(v0);
426   }
427   __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
428   if (!is_tail_call) {
429     __ pop(v0);
430   }
431   EmitProfilingCounterReset();
432   __ bind(&ok);
433 }
434 
EmitReturnSequence()435 void FullCodeGenerator::EmitReturnSequence() {
436   Comment cmnt(masm_, "[ Return sequence");
437   if (return_label_.is_bound()) {
438     __ Branch(&return_label_);
439   } else {
440     __ bind(&return_label_);
441     if (FLAG_trace) {
442       // Push the return value on the stack as the parameter.
443       // Runtime::TraceExit returns its parameter in v0.
444       __ push(v0);
445       __ CallRuntime(Runtime::kTraceExit);
446     }
447     EmitProfilingCounterHandlingForReturnSequence(false);
448 
449     // Make sure that the constant pool is not emitted inside of the return
450     // sequence.
451     { Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
452       // Here we use masm_-> instead of the __ macro to avoid the code coverage
453       // tool from instrumenting as we rely on the code size here.
454       int32_t arg_count = info_->scope()->num_parameters() + 1;
455       int32_t sp_delta = arg_count * kPointerSize;
456       SetReturnPosition(literal());
457       masm_->mov(sp, fp);
458       masm_->MultiPop(static_cast<RegList>(fp.bit() | ra.bit()));
459       masm_->Daddu(sp, sp, Operand(sp_delta));
460       masm_->Jump(ra);
461     }
462   }
463 }
464 
RestoreContext()465 void FullCodeGenerator::RestoreContext() {
466   __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
467 }
468 
Plug(Variable * var) const469 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
470   DCHECK(var->IsStackAllocated() || var->IsContextSlot());
471   codegen()->GetVar(result_register(), var);
472   codegen()->PushOperand(result_register());
473 }
474 
475 
Plug(Heap::RootListIndex index) const476 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
477 }
478 
479 
Plug(Heap::RootListIndex index) const480 void FullCodeGenerator::AccumulatorValueContext::Plug(
481     Heap::RootListIndex index) const {
482   __ LoadRoot(result_register(), index);
483 }
484 
485 
Plug(Heap::RootListIndex index) const486 void FullCodeGenerator::StackValueContext::Plug(
487     Heap::RootListIndex index) const {
488   __ LoadRoot(result_register(), index);
489   codegen()->PushOperand(result_register());
490 }
491 
492 
Plug(Heap::RootListIndex index) const493 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
494   codegen()->PrepareForBailoutBeforeSplit(condition(),
495                                           true,
496                                           true_label_,
497                                           false_label_);
498   if (index == Heap::kUndefinedValueRootIndex ||
499       index == Heap::kNullValueRootIndex ||
500       index == Heap::kFalseValueRootIndex) {
501     if (false_label_ != fall_through_) __ Branch(false_label_);
502   } else if (index == Heap::kTrueValueRootIndex) {
503     if (true_label_ != fall_through_) __ Branch(true_label_);
504   } else {
505     __ LoadRoot(result_register(), index);
506     codegen()->DoTest(this);
507   }
508 }
509 
510 
Plug(Handle<Object> lit) const511 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
512 }
513 
514 
Plug(Handle<Object> lit) const515 void FullCodeGenerator::AccumulatorValueContext::Plug(
516     Handle<Object> lit) const {
517   __ li(result_register(), Operand(lit));
518 }
519 
520 
Plug(Handle<Object> lit) const521 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
522   // Immediates cannot be pushed directly.
523   __ li(result_register(), Operand(lit));
524   codegen()->PushOperand(result_register());
525 }
526 
527 
Plug(Handle<Object> lit) const528 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
529   codegen()->PrepareForBailoutBeforeSplit(condition(),
530                                           true,
531                                           true_label_,
532                                           false_label_);
533   DCHECK(lit->IsNull(isolate()) || lit->IsUndefined(isolate()) ||
534          !lit->IsUndetectable());
535   if (lit->IsUndefined(isolate()) || lit->IsNull(isolate()) ||
536       lit->IsFalse(isolate())) {
537     if (false_label_ != fall_through_) __ Branch(false_label_);
538   } else if (lit->IsTrue(isolate()) || lit->IsJSObject()) {
539     if (true_label_ != fall_through_) __ Branch(true_label_);
540   } else if (lit->IsString()) {
541     if (String::cast(*lit)->length() == 0) {
542       if (false_label_ != fall_through_) __ Branch(false_label_);
543     } else {
544       if (true_label_ != fall_through_) __ Branch(true_label_);
545     }
546   } else if (lit->IsSmi()) {
547     if (Smi::cast(*lit)->value() == 0) {
548       if (false_label_ != fall_through_) __ Branch(false_label_);
549     } else {
550       if (true_label_ != fall_through_) __ Branch(true_label_);
551     }
552   } else {
553     // For simplicity we always test the accumulator register.
554     __ li(result_register(), Operand(lit));
555     codegen()->DoTest(this);
556   }
557 }
558 
559 
DropAndPlug(int count,Register reg) const560 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
561                                                        Register reg) const {
562   DCHECK(count > 0);
563   if (count > 1) codegen()->DropOperands(count - 1);
564   __ sd(reg, MemOperand(sp, 0));
565 }
566 
567 
Plug(Label * materialize_true,Label * materialize_false) const568 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
569                                             Label* materialize_false) const {
570   DCHECK(materialize_true == materialize_false);
571   __ bind(materialize_true);
572 }
573 
574 
Plug(Label * materialize_true,Label * materialize_false) const575 void FullCodeGenerator::AccumulatorValueContext::Plug(
576     Label* materialize_true,
577     Label* materialize_false) const {
578   Label done;
579   __ bind(materialize_true);
580   __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
581   __ Branch(&done);
582   __ bind(materialize_false);
583   __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
584   __ bind(&done);
585 }
586 
587 
Plug(Label * materialize_true,Label * materialize_false) const588 void FullCodeGenerator::StackValueContext::Plug(
589     Label* materialize_true,
590     Label* materialize_false) const {
591   codegen()->OperandStackDepthIncrement(1);
592   Label done;
593   __ bind(materialize_true);
594   __ LoadRoot(at, Heap::kTrueValueRootIndex);
595   // Push the value as the following branch can clobber at in long branch mode.
596   __ push(at);
597   __ Branch(&done);
598   __ bind(materialize_false);
599   __ LoadRoot(at, Heap::kFalseValueRootIndex);
600   __ push(at);
601   __ bind(&done);
602 }
603 
604 
Plug(Label * materialize_true,Label * materialize_false) const605 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
606                                           Label* materialize_false) const {
607   DCHECK(materialize_true == true_label_);
608   DCHECK(materialize_false == false_label_);
609 }
610 
611 
Plug(bool flag) const612 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
613   Heap::RootListIndex value_root_index =
614       flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
615   __ LoadRoot(result_register(), value_root_index);
616 }
617 
618 
Plug(bool flag) const619 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
620   Heap::RootListIndex value_root_index =
621       flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
622   __ LoadRoot(at, value_root_index);
623   codegen()->PushOperand(at);
624 }
625 
626 
Plug(bool flag) const627 void FullCodeGenerator::TestContext::Plug(bool flag) const {
628   codegen()->PrepareForBailoutBeforeSplit(condition(),
629                                           true,
630                                           true_label_,
631                                           false_label_);
632   if (flag) {
633     if (true_label_ != fall_through_) __ Branch(true_label_);
634   } else {
635     if (false_label_ != fall_through_) __ Branch(false_label_);
636   }
637 }
638 
639 
DoTest(Expression * condition,Label * if_true,Label * if_false,Label * fall_through)640 void FullCodeGenerator::DoTest(Expression* condition,
641                                Label* if_true,
642                                Label* if_false,
643                                Label* fall_through) {
644   __ mov(a0, result_register());
645   Handle<Code> ic = ToBooleanICStub::GetUninitialized(isolate());
646   CallIC(ic, condition->test_id());
647   __ LoadRoot(at, Heap::kTrueValueRootIndex);
648   Split(eq, result_register(), Operand(at), if_true, if_false, fall_through);
649 }
650 
651 
Split(Condition cc,Register lhs,const Operand & rhs,Label * if_true,Label * if_false,Label * fall_through)652 void FullCodeGenerator::Split(Condition cc,
653                               Register lhs,
654                               const Operand&  rhs,
655                               Label* if_true,
656                               Label* if_false,
657                               Label* fall_through) {
658   if (if_false == fall_through) {
659     __ Branch(if_true, cc, lhs, rhs);
660   } else if (if_true == fall_through) {
661     __ Branch(if_false, NegateCondition(cc), lhs, rhs);
662   } else {
663     __ Branch(if_true, cc, lhs, rhs);
664     __ Branch(if_false);
665   }
666 }
667 
668 
StackOperand(Variable * var)669 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
670   DCHECK(var->IsStackAllocated());
671   // Offset is negative because higher indexes are at lower addresses.
672   int offset = -var->index() * kPointerSize;
673   // Adjust by a (parameter or local) base offset.
674   if (var->IsParameter()) {
675     offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
676   } else {
677     offset += JavaScriptFrameConstants::kLocal0Offset;
678   }
679   return MemOperand(fp, offset);
680 }
681 
682 
VarOperand(Variable * var,Register scratch)683 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
684   DCHECK(var->IsContextSlot() || var->IsStackAllocated());
685   if (var->IsContextSlot()) {
686     int context_chain_length = scope()->ContextChainLength(var->scope());
687     __ LoadContext(scratch, context_chain_length);
688     return ContextMemOperand(scratch, var->index());
689   } else {
690     return StackOperand(var);
691   }
692 }
693 
694 
GetVar(Register dest,Variable * var)695 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
696   // Use destination as scratch.
697   MemOperand location = VarOperand(var, dest);
698   __ ld(dest, location);
699 }
700 
701 
SetVar(Variable * var,Register src,Register scratch0,Register scratch1)702 void FullCodeGenerator::SetVar(Variable* var,
703                                Register src,
704                                Register scratch0,
705                                Register scratch1) {
706   DCHECK(var->IsContextSlot() || var->IsStackAllocated());
707   DCHECK(!scratch0.is(src));
708   DCHECK(!scratch0.is(scratch1));
709   DCHECK(!scratch1.is(src));
710   MemOperand location = VarOperand(var, scratch0);
711   __ sd(src, location);
712   // Emit the write barrier code if the location is in the heap.
713   if (var->IsContextSlot()) {
714     __ RecordWriteContextSlot(scratch0,
715                               location.offset(),
716                               src,
717                               scratch1,
718                               kRAHasBeenSaved,
719                               kDontSaveFPRegs);
720   }
721 }
722 
723 
PrepareForBailoutBeforeSplit(Expression * expr,bool should_normalize,Label * if_true,Label * if_false)724 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
725                                                      bool should_normalize,
726                                                      Label* if_true,
727                                                      Label* if_false) {
728   // Only prepare for bailouts before splits if we're in a test
729   // context. Otherwise, we let the Visit function deal with the
730   // preparation to avoid preparing with the same AST id twice.
731   if (!context()->IsTest()) return;
732 
733   Label skip;
734   if (should_normalize) __ Branch(&skip);
735   PrepareForBailout(expr, BailoutState::TOS_REGISTER);
736   if (should_normalize) {
737     __ LoadRoot(a4, Heap::kTrueValueRootIndex);
738     Split(eq, a0, Operand(a4), if_true, if_false, NULL);
739     __ bind(&skip);
740   }
741 }
742 
743 
EmitDebugCheckDeclarationContext(Variable * variable)744 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
745   // The variable in the declaration always resides in the current function
746   // context.
747   DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
748   if (FLAG_debug_code) {
749     // Check that we're not inside a with or catch context.
750     __ ld(a1, FieldMemOperand(cp, HeapObject::kMapOffset));
751     __ LoadRoot(a4, Heap::kWithContextMapRootIndex);
752     __ Check(ne, kDeclarationInWithContext,
753         a1, Operand(a4));
754     __ LoadRoot(a4, Heap::kCatchContextMapRootIndex);
755     __ Check(ne, kDeclarationInCatchContext,
756         a1, Operand(a4));
757   }
758 }
759 
760 
VisitVariableDeclaration(VariableDeclaration * declaration)761 void FullCodeGenerator::VisitVariableDeclaration(
762     VariableDeclaration* declaration) {
763   // If it was not possible to allocate the variable at compile time, we
764   // need to "declare" it at runtime to make sure it actually exists in the
765   // local context.
766   VariableProxy* proxy = declaration->proxy();
767   VariableMode mode = declaration->mode();
768   Variable* variable = proxy->var();
769   bool hole_init = mode == LET || mode == CONST;
770   switch (variable->location()) {
771     case VariableLocation::GLOBAL:
772     case VariableLocation::UNALLOCATED:
773       DCHECK(!variable->binding_needs_init());
774       globals_->Add(variable->name(), zone());
775       globals_->Add(isolate()->factory()->undefined_value(), zone());
776       break;
777 
778     case VariableLocation::PARAMETER:
779     case VariableLocation::LOCAL:
780       if (hole_init) {
781         Comment cmnt(masm_, "[ VariableDeclaration");
782         __ LoadRoot(a4, Heap::kTheHoleValueRootIndex);
783         __ sd(a4, StackOperand(variable));
784       }
785       break;
786 
787     case VariableLocation::CONTEXT:
788       if (hole_init) {
789         Comment cmnt(masm_, "[ VariableDeclaration");
790         EmitDebugCheckDeclarationContext(variable);
791           __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
792           __ sd(at, ContextMemOperand(cp, variable->index()));
793           // No write barrier since the_hole_value is in old space.
794           PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
795       }
796       break;
797 
798     case VariableLocation::LOOKUP: {
799       Comment cmnt(masm_, "[ VariableDeclaration");
800       DCHECK_EQ(VAR, mode);
801       DCHECK(!hole_init);
802       __ li(a2, Operand(variable->name()));
803       __ Push(a2);
804       __ CallRuntime(Runtime::kDeclareEvalVar);
805       PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
806       break;
807     }
808   }
809 }
810 
811 
VisitFunctionDeclaration(FunctionDeclaration * declaration)812 void FullCodeGenerator::VisitFunctionDeclaration(
813     FunctionDeclaration* declaration) {
814   VariableProxy* proxy = declaration->proxy();
815   Variable* variable = proxy->var();
816   switch (variable->location()) {
817     case VariableLocation::GLOBAL:
818     case VariableLocation::UNALLOCATED: {
819       globals_->Add(variable->name(), zone());
820       Handle<SharedFunctionInfo> function =
821           Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_);
822       // Check for stack-overflow exception.
823       if (function.is_null()) return SetStackOverflow();
824       globals_->Add(function, zone());
825       break;
826     }
827 
828     case VariableLocation::PARAMETER:
829     case VariableLocation::LOCAL: {
830       Comment cmnt(masm_, "[ FunctionDeclaration");
831       VisitForAccumulatorValue(declaration->fun());
832       __ sd(result_register(), StackOperand(variable));
833       break;
834     }
835 
836     case VariableLocation::CONTEXT: {
837       Comment cmnt(masm_, "[ FunctionDeclaration");
838       EmitDebugCheckDeclarationContext(variable);
839       VisitForAccumulatorValue(declaration->fun());
840       __ sd(result_register(), ContextMemOperand(cp, variable->index()));
841       int offset = Context::SlotOffset(variable->index());
842       // We know that we have written a function, which is not a smi.
843       __ RecordWriteContextSlot(cp,
844                                 offset,
845                                 result_register(),
846                                 a2,
847                                 kRAHasBeenSaved,
848                                 kDontSaveFPRegs,
849                                 EMIT_REMEMBERED_SET,
850                                 OMIT_SMI_CHECK);
851       PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
852       break;
853     }
854 
855     case VariableLocation::LOOKUP: {
856       Comment cmnt(masm_, "[ FunctionDeclaration");
857       __ li(a2, Operand(variable->name()));
858       PushOperand(a2);
859       // Push initial value for function declaration.
860       VisitForStackValue(declaration->fun());
861       CallRuntimeWithOperands(Runtime::kDeclareEvalFunction);
862       PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
863       break;
864     }
865   }
866 }
867 
868 
DeclareGlobals(Handle<FixedArray> pairs)869 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
870   // Call the runtime to declare the globals.
871   __ li(a1, Operand(pairs));
872   __ li(a0, Operand(Smi::FromInt(DeclareGlobalsFlags())));
873   __ Push(a1, a0);
874   __ CallRuntime(Runtime::kDeclareGlobals);
875   // Return value is ignored.
876 }
877 
878 
DeclareModules(Handle<FixedArray> descriptions)879 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
880   // Call the runtime to declare the modules.
881   __ Push(descriptions);
882   __ CallRuntime(Runtime::kDeclareModules);
883   // Return value is ignored.
884 }
885 
886 
VisitSwitchStatement(SwitchStatement * stmt)887 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
888   Comment cmnt(masm_, "[ SwitchStatement");
889   Breakable nested_statement(this, stmt);
890   SetStatementPosition(stmt);
891 
892   // Keep the switch value on the stack until a case matches.
893   VisitForStackValue(stmt->tag());
894   PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS);
895 
896   ZoneList<CaseClause*>* clauses = stmt->cases();
897   CaseClause* default_clause = NULL;  // Can occur anywhere in the list.
898 
899   Label next_test;  // Recycled for each test.
900   // Compile all the tests with branches to their bodies.
901   for (int i = 0; i < clauses->length(); i++) {
902     CaseClause* clause = clauses->at(i);
903     clause->body_target()->Unuse();
904 
905     // The default is not a test, but remember it as final fall through.
906     if (clause->is_default()) {
907       default_clause = clause;
908       continue;
909     }
910 
911     Comment cmnt(masm_, "[ Case comparison");
912     __ bind(&next_test);
913     next_test.Unuse();
914 
915     // Compile the label expression.
916     VisitForAccumulatorValue(clause->label());
917     __ mov(a0, result_register());  // CompareStub requires args in a0, a1.
918 
919     // Perform the comparison as if via '==='.
920     __ ld(a1, MemOperand(sp, 0));  // Switch value.
921     bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
922     JumpPatchSite patch_site(masm_);
923     if (inline_smi_code) {
924       Label slow_case;
925       __ or_(a2, a1, a0);
926       patch_site.EmitJumpIfNotSmi(a2, &slow_case);
927 
928       __ Branch(&next_test, ne, a1, Operand(a0));
929       __ Drop(1);  // Switch value is no longer needed.
930       __ Branch(clause->body_target());
931 
932       __ bind(&slow_case);
933     }
934 
935     // Record position before stub call for type feedback.
936     SetExpressionPosition(clause);
937     Handle<Code> ic =
938         CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
939     CallIC(ic, clause->CompareId());
940     patch_site.EmitPatchInfo();
941 
942     Label skip;
943     __ Branch(&skip);
944     PrepareForBailout(clause, BailoutState::TOS_REGISTER);
945     __ LoadRoot(at, Heap::kTrueValueRootIndex);
946     __ Branch(&next_test, ne, v0, Operand(at));
947     __ Drop(1);
948     __ Branch(clause->body_target());
949     __ bind(&skip);
950 
951     __ Branch(&next_test, ne, v0, Operand(zero_reg));
952     __ Drop(1);  // Switch value is no longer needed.
953     __ Branch(clause->body_target());
954   }
955 
956   // Discard the test value and jump to the default if present, otherwise to
957   // the end of the statement.
958   __ bind(&next_test);
959   DropOperands(1);  // Switch value is no longer needed.
960   if (default_clause == NULL) {
961     __ Branch(nested_statement.break_label());
962   } else {
963     __ Branch(default_clause->body_target());
964   }
965 
966   // Compile all the case bodies.
967   for (int i = 0; i < clauses->length(); i++) {
968     Comment cmnt(masm_, "[ Case body");
969     CaseClause* clause = clauses->at(i);
970     __ bind(clause->body_target());
971     PrepareForBailoutForId(clause->EntryId(), BailoutState::NO_REGISTERS);
972     VisitStatements(clause->statements());
973   }
974 
975   __ bind(nested_statement.break_label());
976   PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
977 }
978 
979 
VisitForInStatement(ForInStatement * stmt)980 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
981   Comment cmnt(masm_, "[ ForInStatement");
982   SetStatementPosition(stmt, SKIP_BREAK);
983 
984   FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
985 
986   // Get the object to enumerate over. If the object is null or undefined, skip
987   // over the loop.  See ECMA-262 version 5, section 12.6.4.
988   SetExpressionAsStatementPosition(stmt->enumerable());
989   VisitForAccumulatorValue(stmt->enumerable());
990   __ mov(a0, result_register());
991   OperandStackDepthIncrement(5);
992 
993   Label loop, exit;
994   Iteration loop_statement(this, stmt);
995   increment_loop_depth();
996 
997   // If the object is null or undefined, skip over the loop, otherwise convert
998   // it to a JS receiver.  See ECMA-262 version 5, section 12.6.4.
999   Label convert, done_convert;
1000   __ JumpIfSmi(a0, &convert);
1001   __ GetObjectType(a0, a1, a1);
1002   __ Branch(USE_DELAY_SLOT, &done_convert, ge, a1,
1003             Operand(FIRST_JS_RECEIVER_TYPE));
1004   __ LoadRoot(at, Heap::kNullValueRootIndex);  // In delay slot.
1005   __ Branch(USE_DELAY_SLOT, &exit, eq, a0, Operand(at));
1006   __ LoadRoot(at, Heap::kUndefinedValueRootIndex);  // In delay slot.
1007   __ Branch(&exit, eq, a0, Operand(at));
1008   __ bind(&convert);
1009   ToObjectStub stub(isolate());
1010   __ CallStub(&stub);
1011   __ mov(a0, v0);
1012   __ bind(&done_convert);
1013   PrepareForBailoutForId(stmt->ToObjectId(), BailoutState::TOS_REGISTER);
1014   __ push(a0);
1015 
1016   // Check cache validity in generated code. If we cannot guarantee cache
1017   // validity, call the runtime system to check cache validity or get the
1018   // property names in a fixed array. Note: Proxies never have an enum cache,
1019   // so will always take the slow path.
1020   Label call_runtime;
1021   __ CheckEnumCache(&call_runtime);
1022 
1023   // The enum cache is valid.  Load the map of the object being
1024   // iterated over and use the cache for the iteration.
1025   Label use_cache;
1026   __ ld(v0, FieldMemOperand(a0, HeapObject::kMapOffset));
1027   __ Branch(&use_cache);
1028 
1029   // Get the set of properties to enumerate.
1030   __ bind(&call_runtime);
1031   __ push(a0);  // Duplicate the enumerable object on the stack.
1032   __ CallRuntime(Runtime::kForInEnumerate);
1033   PrepareForBailoutForId(stmt->EnumId(), BailoutState::TOS_REGISTER);
1034 
1035   // If we got a map from the runtime call, we can do a fast
1036   // modification check. Otherwise, we got a fixed array, and we have
1037   // to do a slow check.
1038   Label fixed_array;
1039   __ ld(a2, FieldMemOperand(v0, HeapObject::kMapOffset));
1040   __ LoadRoot(at, Heap::kMetaMapRootIndex);
1041   __ Branch(&fixed_array, ne, a2, Operand(at));
1042 
1043   // We got a map in register v0. Get the enumeration cache from it.
1044   Label no_descriptors;
1045   __ bind(&use_cache);
1046 
1047   __ EnumLength(a1, v0);
1048   __ Branch(&no_descriptors, eq, a1, Operand(Smi::FromInt(0)));
1049 
1050   __ LoadInstanceDescriptors(v0, a2);
1051   __ ld(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheOffset));
1052   __ ld(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheBridgeCacheOffset));
1053 
1054   // Set up the four remaining stack slots.
1055   __ li(a0, Operand(Smi::FromInt(0)));
1056   // Push map, enumeration cache, enumeration cache length (as smi) and zero.
1057   __ Push(v0, a2, a1, a0);
1058   __ jmp(&loop);
1059 
1060   __ bind(&no_descriptors);
1061   __ Drop(1);
1062   __ jmp(&exit);
1063 
1064   // We got a fixed array in register v0. Iterate through that.
1065   __ bind(&fixed_array);
1066 
1067   __ li(a1, Operand(Smi::FromInt(1)));  // Smi(1) indicates slow check
1068   __ Push(a1, v0);  // Smi and array
1069   __ ld(a1, FieldMemOperand(v0, FixedArray::kLengthOffset));
1070   __ Push(a1);  // Fixed array length (as smi).
1071   PrepareForBailoutForId(stmt->PrepareId(), BailoutState::NO_REGISTERS);
1072   __ li(a0, Operand(Smi::FromInt(0)));
1073   __ Push(a0);  // Initial index.
1074 
1075   // Generate code for doing the condition check.
1076   __ bind(&loop);
1077   SetExpressionAsStatementPosition(stmt->each());
1078 
1079   // Load the current count to a0, load the length to a1.
1080   __ ld(a0, MemOperand(sp, 0 * kPointerSize));
1081   __ ld(a1, MemOperand(sp, 1 * kPointerSize));
1082   __ Branch(loop_statement.break_label(), hs, a0, Operand(a1));
1083 
1084   // Get the current entry of the array into register a3.
1085   __ ld(a2, MemOperand(sp, 2 * kPointerSize));
1086   __ Daddu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1087   __ SmiScale(a4, a0, kPointerSizeLog2);
1088   __ daddu(a4, a2, a4);  // Array base + scaled (smi) index.
1089   __ ld(a3, MemOperand(a4));  // Current entry.
1090 
1091   // Get the expected map from the stack or a smi in the
1092   // permanent slow case into register a2.
1093   __ ld(a2, MemOperand(sp, 3 * kPointerSize));
1094 
1095   // Check if the expected map still matches that of the enumerable.
1096   // If not, we may have to filter the key.
1097   Label update_each;
1098   __ ld(a1, MemOperand(sp, 4 * kPointerSize));
1099   __ ld(a4, FieldMemOperand(a1, HeapObject::kMapOffset));
1100   __ Branch(&update_each, eq, a4, Operand(a2));
1101 
1102   // We need to filter the key, record slow-path here.
1103   int const vector_index = SmiFromSlot(slot)->value();
1104   __ EmitLoadTypeFeedbackVector(a0);
1105   __ li(a2, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1106   __ sd(a2, FieldMemOperand(a0, FixedArray::OffsetOfElementAt(vector_index)));
1107 
1108   // Convert the entry to a string or (smi) 0 if it isn't a property
1109   // any more. If the property has been removed while iterating, we
1110   // just skip it.
1111   __ Push(a1, a3);  // Enumerable and current entry.
1112   __ CallRuntime(Runtime::kForInFilter);
1113   PrepareForBailoutForId(stmt->FilterId(), BailoutState::TOS_REGISTER);
1114   __ mov(a3, result_register());
1115   __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1116   __ Branch(loop_statement.continue_label(), eq, a3, Operand(at));
1117 
1118   // Update the 'each' property or variable from the possibly filtered
1119   // entry in register a3.
1120   __ bind(&update_each);
1121   __ mov(result_register(), a3);
1122   // Perform the assignment as if via '='.
1123   { EffectContext context(this);
1124     EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
1125     PrepareForBailoutForId(stmt->AssignmentId(), BailoutState::NO_REGISTERS);
1126   }
1127 
1128   // Both Crankshaft and Turbofan expect BodyId to be right before stmt->body().
1129   PrepareForBailoutForId(stmt->BodyId(), BailoutState::NO_REGISTERS);
1130   // Generate code for the body of the loop.
1131   Visit(stmt->body());
1132 
1133   // Generate code for the going to the next element by incrementing
1134   // the index (smi) stored on top of the stack.
1135   __ bind(loop_statement.continue_label());
1136   __ pop(a0);
1137   __ Daddu(a0, a0, Operand(Smi::FromInt(1)));
1138   __ push(a0);
1139 
1140   EmitBackEdgeBookkeeping(stmt, &loop);
1141   __ Branch(&loop);
1142 
1143   // Remove the pointers stored on the stack.
1144   __ bind(loop_statement.break_label());
1145   DropOperands(5);
1146 
1147   // Exit and decrement the loop depth.
1148   PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
1149   __ bind(&exit);
1150   decrement_loop_depth();
1151 }
1152 
1153 
EmitSetHomeObject(Expression * initializer,int offset,FeedbackVectorSlot slot)1154 void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset,
1155                                           FeedbackVectorSlot slot) {
1156   DCHECK(NeedsHomeObject(initializer));
1157   __ ld(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1158   __ li(StoreDescriptor::NameRegister(),
1159         Operand(isolate()->factory()->home_object_symbol()));
1160   __ ld(StoreDescriptor::ValueRegister(),
1161         MemOperand(sp, offset * kPointerSize));
1162   EmitLoadStoreICSlot(slot);
1163   CallStoreIC();
1164 }
1165 
1166 
EmitSetHomeObjectAccumulator(Expression * initializer,int offset,FeedbackVectorSlot slot)1167 void FullCodeGenerator::EmitSetHomeObjectAccumulator(Expression* initializer,
1168                                                      int offset,
1169                                                      FeedbackVectorSlot slot) {
1170   DCHECK(NeedsHomeObject(initializer));
1171   __ Move(StoreDescriptor::ReceiverRegister(), v0);
1172   __ li(StoreDescriptor::NameRegister(),
1173         Operand(isolate()->factory()->home_object_symbol()));
1174   __ ld(StoreDescriptor::ValueRegister(),
1175         MemOperand(sp, offset * kPointerSize));
1176   EmitLoadStoreICSlot(slot);
1177   CallStoreIC();
1178 }
1179 
1180 
EmitLoadGlobalCheckExtensions(VariableProxy * proxy,TypeofMode typeof_mode,Label * slow)1181 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1182                                                       TypeofMode typeof_mode,
1183                                                       Label* slow) {
1184   Register current = cp;
1185   Register next = a1;
1186   Register temp = a2;
1187 
1188   Scope* s = scope();
1189   while (s != NULL) {
1190     if (s->num_heap_slots() > 0) {
1191       if (s->calls_sloppy_eval()) {
1192         // Check that extension is "the hole".
1193         __ ld(temp, ContextMemOperand(current, Context::EXTENSION_INDEX));
1194         __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1195       }
1196       // Load next context in chain.
1197       __ ld(next, ContextMemOperand(current, Context::PREVIOUS_INDEX));
1198       // Walk the rest of the chain without clobbering cp.
1199       current = next;
1200     }
1201     // If no outer scope calls eval, we do not need to check more
1202     // context extensions.
1203     if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1204     s = s->outer_scope();
1205   }
1206 
1207   if (s->is_eval_scope()) {
1208     Label loop, fast;
1209     if (!current.is(next)) {
1210       __ Move(next, current);
1211     }
1212     __ bind(&loop);
1213     // Terminate at native context.
1214     __ ld(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1215     __ LoadRoot(a4, Heap::kNativeContextMapRootIndex);
1216     __ Branch(&fast, eq, temp, Operand(a4));
1217     // Check that extension is "the hole".
1218     __ ld(temp, ContextMemOperand(next, Context::EXTENSION_INDEX));
1219     __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1220     // Load next context in chain.
1221     __ ld(next, ContextMemOperand(next, Context::PREVIOUS_INDEX));
1222     __ Branch(&loop);
1223     __ bind(&fast);
1224   }
1225 
1226   // All extension objects were empty and it is safe to use a normal global
1227   // load machinery.
1228   EmitGlobalVariableLoad(proxy, typeof_mode);
1229 }
1230 
1231 
ContextSlotOperandCheckExtensions(Variable * var,Label * slow)1232 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1233                                                                 Label* slow) {
1234   DCHECK(var->IsContextSlot());
1235   Register context = cp;
1236   Register next = a3;
1237   Register temp = a4;
1238 
1239   for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1240     if (s->num_heap_slots() > 0) {
1241       if (s->calls_sloppy_eval()) {
1242         // Check that extension is "the hole".
1243         __ ld(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1244         __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1245       }
1246       __ ld(next, ContextMemOperand(context, Context::PREVIOUS_INDEX));
1247       // Walk the rest of the chain without clobbering cp.
1248       context = next;
1249     }
1250   }
1251   // Check that last extension is "the hole".
1252   __ ld(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1253   __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1254 
1255   // This function is used only for loads, not stores, so it's safe to
1256   // return an cp-based operand (the write barrier cannot be allowed to
1257   // destroy the cp register).
1258   return ContextMemOperand(context, var->index());
1259 }
1260 
1261 
EmitDynamicLookupFastCase(VariableProxy * proxy,TypeofMode typeof_mode,Label * slow,Label * done)1262 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1263                                                   TypeofMode typeof_mode,
1264                                                   Label* slow, Label* done) {
1265   // Generate fast-case code for variables that might be shadowed by
1266   // eval-introduced variables.  Eval is used a lot without
1267   // introducing variables.  In those cases, we do not want to
1268   // perform a runtime call for all variables in the scope
1269   // containing the eval.
1270   Variable* var = proxy->var();
1271   if (var->mode() == DYNAMIC_GLOBAL) {
1272     EmitLoadGlobalCheckExtensions(proxy, typeof_mode, slow);
1273     __ Branch(done);
1274   } else if (var->mode() == DYNAMIC_LOCAL) {
1275     Variable* local = var->local_if_not_shadowed();
1276     __ ld(v0, ContextSlotOperandCheckExtensions(local, slow));
1277     if (local->mode() == LET || local->mode() == CONST) {
1278       __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1279       __ dsubu(at, v0, at);  // Sub as compare: at == 0 on eq.
1280       __ Branch(done, ne, at, Operand(zero_reg));
1281       __ li(a0, Operand(var->name()));
1282       __ push(a0);
1283       __ CallRuntime(Runtime::kThrowReferenceError);
1284     }
1285     __ Branch(done);
1286   }
1287 }
1288 
1289 
EmitGlobalVariableLoad(VariableProxy * proxy,TypeofMode typeof_mode)1290 void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
1291                                                TypeofMode typeof_mode) {
1292 #ifdef DEBUG
1293   Variable* var = proxy->var();
1294   DCHECK(var->IsUnallocatedOrGlobalSlot() ||
1295          (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL));
1296 #endif
1297   __ li(LoadGlobalDescriptor::SlotRegister(),
1298         Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
1299   CallLoadGlobalIC(typeof_mode);
1300 }
1301 
1302 
EmitVariableLoad(VariableProxy * proxy,TypeofMode typeof_mode)1303 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
1304                                          TypeofMode typeof_mode) {
1305   // Record position before possible IC call.
1306   SetExpressionPosition(proxy);
1307   PrepareForBailoutForId(proxy->BeforeId(), BailoutState::NO_REGISTERS);
1308   Variable* var = proxy->var();
1309 
1310   // Three cases: global variables, lookup variables, and all other types of
1311   // variables.
1312   switch (var->location()) {
1313     case VariableLocation::GLOBAL:
1314     case VariableLocation::UNALLOCATED: {
1315       Comment cmnt(masm_, "[ Global variable");
1316       EmitGlobalVariableLoad(proxy, typeof_mode);
1317       context()->Plug(v0);
1318       break;
1319     }
1320 
1321     case VariableLocation::PARAMETER:
1322     case VariableLocation::LOCAL:
1323     case VariableLocation::CONTEXT: {
1324       DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode);
1325       Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1326                                                : "[ Stack variable");
1327       if (NeedsHoleCheckForLoad(proxy)) {
1328         // Let and const need a read barrier.
1329         GetVar(v0, var);
1330         __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1331         __ dsubu(at, v0, at);  // Sub as compare: at == 0 on eq.
1332         if (var->mode() == LET || var->mode() == CONST) {
1333           // Throw a reference error when using an uninitialized let/const
1334           // binding in harmony mode.
1335           Label done;
1336           __ Branch(&done, ne, at, Operand(zero_reg));
1337           __ li(a0, Operand(var->name()));
1338           __ push(a0);
1339           __ CallRuntime(Runtime::kThrowReferenceError);
1340           __ bind(&done);
1341         }
1342         context()->Plug(v0);
1343         break;
1344       }
1345       context()->Plug(var);
1346       break;
1347     }
1348 
1349     case VariableLocation::LOOKUP: {
1350       Comment cmnt(masm_, "[ Lookup variable");
1351       Label done, slow;
1352       // Generate code for loading from variables potentially shadowed
1353       // by eval-introduced variables.
1354       EmitDynamicLookupFastCase(proxy, typeof_mode, &slow, &done);
1355       __ bind(&slow);
1356       __ Push(var->name());
1357       Runtime::FunctionId function_id =
1358           typeof_mode == NOT_INSIDE_TYPEOF
1359               ? Runtime::kLoadLookupSlot
1360               : Runtime::kLoadLookupSlotInsideTypeof;
1361       __ CallRuntime(function_id);
1362       __ bind(&done);
1363       context()->Plug(v0);
1364     }
1365   }
1366 }
1367 
1368 
EmitAccessor(ObjectLiteralProperty * property)1369 void FullCodeGenerator::EmitAccessor(ObjectLiteralProperty* property) {
1370   Expression* expression = (property == NULL) ? NULL : property->value();
1371   if (expression == NULL) {
1372     __ LoadRoot(a1, Heap::kNullValueRootIndex);
1373     PushOperand(a1);
1374   } else {
1375     VisitForStackValue(expression);
1376     if (NeedsHomeObject(expression)) {
1377       DCHECK(property->kind() == ObjectLiteral::Property::GETTER ||
1378              property->kind() == ObjectLiteral::Property::SETTER);
1379       int offset = property->kind() == ObjectLiteral::Property::GETTER ? 2 : 3;
1380       EmitSetHomeObject(expression, offset, property->GetSlot());
1381     }
1382   }
1383 }
1384 
1385 
VisitObjectLiteral(ObjectLiteral * expr)1386 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1387   Comment cmnt(masm_, "[ ObjectLiteral");
1388 
1389   Handle<FixedArray> constant_properties = expr->constant_properties();
1390   __ ld(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1391   __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
1392   __ li(a1, Operand(constant_properties));
1393   __ li(a0, Operand(Smi::FromInt(expr->ComputeFlags())));
1394   if (MustCreateObjectLiteralWithRuntime(expr)) {
1395     __ Push(a3, a2, a1, a0);
1396     __ CallRuntime(Runtime::kCreateObjectLiteral);
1397   } else {
1398     FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1399     __ CallStub(&stub);
1400     RestoreContext();
1401   }
1402   PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
1403 
1404   // If result_saved is true the result is on top of the stack.  If
1405   // result_saved is false the result is in v0.
1406   bool result_saved = false;
1407 
1408   AccessorTable accessor_table(zone());
1409   int property_index = 0;
1410   for (; property_index < expr->properties()->length(); property_index++) {
1411     ObjectLiteral::Property* property = expr->properties()->at(property_index);
1412     if (property->is_computed_name()) break;
1413     if (property->IsCompileTimeValue()) continue;
1414 
1415     Literal* key = property->key()->AsLiteral();
1416     Expression* value = property->value();
1417     if (!result_saved) {
1418       PushOperand(v0);  // Save result on stack.
1419       result_saved = true;
1420     }
1421     switch (property->kind()) {
1422       case ObjectLiteral::Property::CONSTANT:
1423         UNREACHABLE();
1424       case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1425         DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1426         // Fall through.
1427       case ObjectLiteral::Property::COMPUTED:
1428         // It is safe to use [[Put]] here because the boilerplate already
1429         // contains computed properties with an uninitialized value.
1430         if (key->value()->IsInternalizedString()) {
1431           if (property->emit_store()) {
1432             VisitForAccumulatorValue(value);
1433             __ mov(StoreDescriptor::ValueRegister(), result_register());
1434             DCHECK(StoreDescriptor::ValueRegister().is(a0));
1435             __ li(StoreDescriptor::NameRegister(), Operand(key->value()));
1436             __ ld(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1437             EmitLoadStoreICSlot(property->GetSlot(0));
1438             CallStoreIC();
1439             PrepareForBailoutForId(key->id(), BailoutState::NO_REGISTERS);
1440 
1441             if (NeedsHomeObject(value)) {
1442               EmitSetHomeObjectAccumulator(value, 0, property->GetSlot(1));
1443             }
1444           } else {
1445             VisitForEffect(value);
1446           }
1447           break;
1448         }
1449         // Duplicate receiver on stack.
1450         __ ld(a0, MemOperand(sp));
1451         PushOperand(a0);
1452         VisitForStackValue(key);
1453         VisitForStackValue(value);
1454         if (property->emit_store()) {
1455           if (NeedsHomeObject(value)) {
1456             EmitSetHomeObject(value, 2, property->GetSlot());
1457           }
1458           __ li(a0, Operand(Smi::FromInt(SLOPPY)));  // PropertyAttributes.
1459           PushOperand(a0);
1460           CallRuntimeWithOperands(Runtime::kSetProperty);
1461         } else {
1462           DropOperands(3);
1463         }
1464         break;
1465       case ObjectLiteral::Property::PROTOTYPE:
1466         // Duplicate receiver on stack.
1467         __ ld(a0, MemOperand(sp));
1468         PushOperand(a0);
1469         VisitForStackValue(value);
1470         DCHECK(property->emit_store());
1471         CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
1472         PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1473                                BailoutState::NO_REGISTERS);
1474         break;
1475       case ObjectLiteral::Property::GETTER:
1476         if (property->emit_store()) {
1477           AccessorTable::Iterator it = accessor_table.lookup(key);
1478           it->second->bailout_id = expr->GetIdForPropertySet(property_index);
1479           it->second->getter = property;
1480         }
1481         break;
1482       case ObjectLiteral::Property::SETTER:
1483         if (property->emit_store()) {
1484           AccessorTable::Iterator it = accessor_table.lookup(key);
1485           it->second->bailout_id = expr->GetIdForPropertySet(property_index);
1486           it->second->setter = property;
1487         }
1488         break;
1489     }
1490   }
1491 
1492   // Emit code to define accessors, using only a single call to the runtime for
1493   // each pair of corresponding getters and setters.
1494   for (AccessorTable::Iterator it = accessor_table.begin();
1495        it != accessor_table.end();
1496        ++it) {
1497     __ ld(a0, MemOperand(sp));  // Duplicate receiver.
1498     PushOperand(a0);
1499     VisitForStackValue(it->first);
1500     EmitAccessor(it->second->getter);
1501     EmitAccessor(it->second->setter);
1502     __ li(a0, Operand(Smi::FromInt(NONE)));
1503     PushOperand(a0);
1504     CallRuntimeWithOperands(Runtime::kDefineAccessorPropertyUnchecked);
1505     PrepareForBailoutForId(it->second->bailout_id, BailoutState::NO_REGISTERS);
1506   }
1507 
1508   // Object literals have two parts. The "static" part on the left contains no
1509   // computed property names, and so we can compute its map ahead of time; see
1510   // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1511   // starts with the first computed property name, and continues with all
1512   // properties to its right.  All the code from above initializes the static
1513   // component of the object literal, and arranges for the map of the result to
1514   // reflect the static order in which the keys appear. For the dynamic
1515   // properties, we compile them into a series of "SetOwnProperty" runtime
1516   // calls. This will preserve insertion order.
1517   for (; property_index < expr->properties()->length(); property_index++) {
1518     ObjectLiteral::Property* property = expr->properties()->at(property_index);
1519 
1520     Expression* value = property->value();
1521     if (!result_saved) {
1522       PushOperand(v0);  // Save result on the stack
1523       result_saved = true;
1524     }
1525 
1526     __ ld(a0, MemOperand(sp));  // Duplicate receiver.
1527     PushOperand(a0);
1528 
1529     if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1530       DCHECK(!property->is_computed_name());
1531       VisitForStackValue(value);
1532       DCHECK(property->emit_store());
1533       CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
1534       PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1535                              BailoutState::NO_REGISTERS);
1536     } else {
1537       EmitPropertyKey(property, expr->GetIdForPropertyName(property_index));
1538       VisitForStackValue(value);
1539       if (NeedsHomeObject(value)) {
1540         EmitSetHomeObject(value, 2, property->GetSlot());
1541       }
1542 
1543       switch (property->kind()) {
1544         case ObjectLiteral::Property::CONSTANT:
1545         case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1546         case ObjectLiteral::Property::COMPUTED:
1547           if (property->emit_store()) {
1548             PushOperand(Smi::FromInt(NONE));
1549             PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
1550             CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
1551             PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1552                                    BailoutState::NO_REGISTERS);
1553           } else {
1554             DropOperands(3);
1555           }
1556           break;
1557 
1558         case ObjectLiteral::Property::PROTOTYPE:
1559           UNREACHABLE();
1560           break;
1561 
1562         case ObjectLiteral::Property::GETTER:
1563           PushOperand(Smi::FromInt(NONE));
1564           CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
1565           break;
1566 
1567         case ObjectLiteral::Property::SETTER:
1568           PushOperand(Smi::FromInt(NONE));
1569           CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
1570           break;
1571       }
1572     }
1573   }
1574 
1575   if (result_saved) {
1576     context()->PlugTOS();
1577   } else {
1578     context()->Plug(v0);
1579   }
1580 }
1581 
1582 
VisitArrayLiteral(ArrayLiteral * expr)1583 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1584   Comment cmnt(masm_, "[ ArrayLiteral");
1585 
1586   Handle<FixedArray> constant_elements = expr->constant_elements();
1587   bool has_fast_elements =
1588       IsFastObjectElementsKind(expr->constant_elements_kind());
1589 
1590   AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1591   if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1592     // If the only customer of allocation sites is transitioning, then
1593     // we can turn it off if we don't have anywhere else to transition to.
1594     allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1595   }
1596 
1597   __ mov(a0, result_register());
1598   __ ld(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1599   __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
1600   __ li(a1, Operand(constant_elements));
1601   if (MustCreateArrayLiteralWithRuntime(expr)) {
1602     __ li(a0, Operand(Smi::FromInt(expr->ComputeFlags())));
1603     __ Push(a3, a2, a1, a0);
1604     __ CallRuntime(Runtime::kCreateArrayLiteral);
1605   } else {
1606     FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1607     __ CallStub(&stub);
1608   }
1609   PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
1610 
1611   bool result_saved = false;  // Is the result saved to the stack?
1612   ZoneList<Expression*>* subexprs = expr->values();
1613   int length = subexprs->length();
1614 
1615   // Emit code to evaluate all the non-constant subexpressions and to store
1616   // them into the newly cloned array.
1617   int array_index = 0;
1618   for (; array_index < length; array_index++) {
1619     Expression* subexpr = subexprs->at(array_index);
1620     DCHECK(!subexpr->IsSpread());
1621 
1622     // If the subexpression is a literal or a simple materialized literal it
1623     // is already set in the cloned array.
1624     if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1625 
1626     if (!result_saved) {
1627       PushOperand(v0);  // array literal
1628       result_saved = true;
1629     }
1630 
1631     VisitForAccumulatorValue(subexpr);
1632 
1633     __ li(StoreDescriptor::NameRegister(), Operand(Smi::FromInt(array_index)));
1634     __ ld(StoreDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1635     __ mov(StoreDescriptor::ValueRegister(), result_register());
1636     EmitLoadStoreICSlot(expr->LiteralFeedbackSlot());
1637     Handle<Code> ic =
1638         CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
1639     CallIC(ic);
1640 
1641     PrepareForBailoutForId(expr->GetIdForElement(array_index),
1642                            BailoutState::NO_REGISTERS);
1643   }
1644 
1645   // In case the array literal contains spread expressions it has two parts. The
1646   // first part is  the "static" array which has a literal index is  handled
1647   // above. The second part is the part after the first spread expression
1648   // (inclusive) and these elements gets appended to the array. Note that the
1649   // number elements an iterable produces is unknown ahead of time.
1650   if (array_index < length && result_saved) {
1651     PopOperand(v0);
1652     result_saved = false;
1653   }
1654   for (; array_index < length; array_index++) {
1655     Expression* subexpr = subexprs->at(array_index);
1656 
1657     PushOperand(v0);
1658     DCHECK(!subexpr->IsSpread());
1659     VisitForStackValue(subexpr);
1660     CallRuntimeWithOperands(Runtime::kAppendElement);
1661 
1662     PrepareForBailoutForId(expr->GetIdForElement(array_index),
1663                            BailoutState::NO_REGISTERS);
1664   }
1665 
1666   if (result_saved) {
1667     context()->PlugTOS();
1668   } else {
1669     context()->Plug(v0);
1670   }
1671 }
1672 
1673 
VisitAssignment(Assignment * expr)1674 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1675   DCHECK(expr->target()->IsValidReferenceExpressionOrThis());
1676 
1677   Comment cmnt(masm_, "[ Assignment");
1678 
1679   Property* property = expr->target()->AsProperty();
1680   LhsKind assign_type = Property::GetAssignType(property);
1681 
1682   // Evaluate LHS expression.
1683   switch (assign_type) {
1684     case VARIABLE:
1685       // Nothing to do here.
1686       break;
1687     case NAMED_PROPERTY:
1688       if (expr->is_compound()) {
1689         // We need the receiver both on the stack and in the register.
1690         VisitForStackValue(property->obj());
1691         __ ld(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1692       } else {
1693         VisitForStackValue(property->obj());
1694       }
1695       break;
1696     case NAMED_SUPER_PROPERTY:
1697       VisitForStackValue(
1698           property->obj()->AsSuperPropertyReference()->this_var());
1699       VisitForAccumulatorValue(
1700           property->obj()->AsSuperPropertyReference()->home_object());
1701       PushOperand(result_register());
1702       if (expr->is_compound()) {
1703         const Register scratch = a1;
1704         __ ld(scratch, MemOperand(sp, kPointerSize));
1705         PushOperands(scratch, result_register());
1706       }
1707       break;
1708     case KEYED_SUPER_PROPERTY: {
1709       const Register scratch = a1;
1710       VisitForStackValue(
1711           property->obj()->AsSuperPropertyReference()->this_var());
1712       VisitForAccumulatorValue(
1713           property->obj()->AsSuperPropertyReference()->home_object());
1714       __ Move(scratch, result_register());
1715       VisitForAccumulatorValue(property->key());
1716       PushOperands(scratch, result_register());
1717       if (expr->is_compound()) {
1718         const Register scratch1 = a4;
1719         __ ld(scratch1, MemOperand(sp, 2 * kPointerSize));
1720         PushOperands(scratch1, scratch, result_register());
1721       }
1722       break;
1723     }
1724     case KEYED_PROPERTY:
1725       // We need the key and receiver on both the stack and in v0 and a1.
1726       if (expr->is_compound()) {
1727         VisitForStackValue(property->obj());
1728         VisitForStackValue(property->key());
1729         __ ld(LoadDescriptor::ReceiverRegister(),
1730               MemOperand(sp, 1 * kPointerSize));
1731         __ ld(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
1732       } else {
1733         VisitForStackValue(property->obj());
1734         VisitForStackValue(property->key());
1735       }
1736       break;
1737   }
1738 
1739   // For compound assignments we need another deoptimization point after the
1740   // variable/property load.
1741   if (expr->is_compound()) {
1742     { AccumulatorValueContext context(this);
1743       switch (assign_type) {
1744         case VARIABLE:
1745           EmitVariableLoad(expr->target()->AsVariableProxy());
1746           PrepareForBailout(expr->target(), BailoutState::TOS_REGISTER);
1747           break;
1748         case NAMED_PROPERTY:
1749           EmitNamedPropertyLoad(property);
1750           PrepareForBailoutForId(property->LoadId(),
1751                                  BailoutState::TOS_REGISTER);
1752           break;
1753         case NAMED_SUPER_PROPERTY:
1754           EmitNamedSuperPropertyLoad(property);
1755           PrepareForBailoutForId(property->LoadId(),
1756                                  BailoutState::TOS_REGISTER);
1757           break;
1758         case KEYED_SUPER_PROPERTY:
1759           EmitKeyedSuperPropertyLoad(property);
1760           PrepareForBailoutForId(property->LoadId(),
1761                                  BailoutState::TOS_REGISTER);
1762           break;
1763         case KEYED_PROPERTY:
1764           EmitKeyedPropertyLoad(property);
1765           PrepareForBailoutForId(property->LoadId(),
1766                                  BailoutState::TOS_REGISTER);
1767           break;
1768       }
1769     }
1770 
1771     Token::Value op = expr->binary_op();
1772     PushOperand(v0);  // Left operand goes on the stack.
1773     VisitForAccumulatorValue(expr->value());
1774 
1775     AccumulatorValueContext context(this);
1776     if (ShouldInlineSmiCase(op)) {
1777       EmitInlineSmiBinaryOp(expr->binary_operation(),
1778                             op,
1779                             expr->target(),
1780                             expr->value());
1781     } else {
1782       EmitBinaryOp(expr->binary_operation(), op);
1783     }
1784 
1785     // Deoptimization point in case the binary operation may have side effects.
1786     PrepareForBailout(expr->binary_operation(), BailoutState::TOS_REGISTER);
1787   } else {
1788     VisitForAccumulatorValue(expr->value());
1789   }
1790 
1791   SetExpressionPosition(expr);
1792 
1793   // Store the value.
1794   switch (assign_type) {
1795     case VARIABLE:
1796       EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1797                              expr->op(), expr->AssignmentSlot());
1798       PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
1799       context()->Plug(v0);
1800       break;
1801     case NAMED_PROPERTY:
1802       EmitNamedPropertyAssignment(expr);
1803       break;
1804     case NAMED_SUPER_PROPERTY:
1805       EmitNamedSuperPropertyStore(property);
1806       context()->Plug(v0);
1807       break;
1808     case KEYED_SUPER_PROPERTY:
1809       EmitKeyedSuperPropertyStore(property);
1810       context()->Plug(v0);
1811       break;
1812     case KEYED_PROPERTY:
1813       EmitKeyedPropertyAssignment(expr);
1814       break;
1815   }
1816 }
1817 
1818 
VisitYield(Yield * expr)1819 void FullCodeGenerator::VisitYield(Yield* expr) {
1820   Comment cmnt(masm_, "[ Yield");
1821   SetExpressionPosition(expr);
1822 
1823   // Evaluate yielded value first; the initial iterator definition depends on
1824   // this.  It stays on the stack while we update the iterator.
1825   VisitForStackValue(expr->expression());
1826 
1827   Label suspend, continuation, post_runtime, resume, exception;
1828 
1829   __ jmp(&suspend);
1830   __ bind(&continuation);
1831   // When we arrive here, v0 holds the generator object.
1832   __ RecordGeneratorContinuation();
1833   __ ld(a1, FieldMemOperand(v0, JSGeneratorObject::kResumeModeOffset));
1834   __ ld(v0, FieldMemOperand(v0, JSGeneratorObject::kInputOrDebugPosOffset));
1835   __ Branch(&resume, eq, a1, Operand(Smi::FromInt(JSGeneratorObject::kNext)));
1836   __ Push(result_register());
1837   __ Branch(&exception, eq, a1,
1838             Operand(Smi::FromInt(JSGeneratorObject::kThrow)));
1839   EmitCreateIteratorResult(true);
1840   EmitUnwindAndReturn();
1841 
1842   __ bind(&exception);
1843   __ CallRuntime(Runtime::kThrow);
1844 
1845   __ bind(&suspend);
1846   OperandStackDepthIncrement(1);  // Not popped on this path.
1847   VisitForAccumulatorValue(expr->generator_object());
1848   DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
1849   __ li(a1, Operand(Smi::FromInt(continuation.pos())));
1850   __ sd(a1, FieldMemOperand(v0, JSGeneratorObject::kContinuationOffset));
1851   __ sd(cp, FieldMemOperand(v0, JSGeneratorObject::kContextOffset));
1852   __ mov(a1, cp);
1853   __ RecordWriteField(v0, JSGeneratorObject::kContextOffset, a1, a2,
1854                       kRAHasBeenSaved, kDontSaveFPRegs);
1855   __ Daddu(a1, fp, Operand(StandardFrameConstants::kExpressionsOffset));
1856   __ Branch(&post_runtime, eq, sp, Operand(a1));
1857   __ push(v0);  // generator object
1858   __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
1859   RestoreContext();
1860   __ bind(&post_runtime);
1861   PopOperand(result_register());
1862   EmitReturnSequence();
1863 
1864   __ bind(&resume);
1865   context()->Plug(result_register());
1866 }
1867 
PushOperands(Register reg1,Register reg2)1868 void FullCodeGenerator::PushOperands(Register reg1, Register reg2) {
1869   OperandStackDepthIncrement(2);
1870   __ Push(reg1, reg2);
1871 }
1872 
PushOperands(Register reg1,Register reg2,Register reg3)1873 void FullCodeGenerator::PushOperands(Register reg1, Register reg2,
1874                                      Register reg3) {
1875   OperandStackDepthIncrement(3);
1876   __ Push(reg1, reg2, reg3);
1877 }
1878 
PushOperands(Register reg1,Register reg2,Register reg3,Register reg4)1879 void FullCodeGenerator::PushOperands(Register reg1, Register reg2,
1880                                      Register reg3, Register reg4) {
1881   OperandStackDepthIncrement(4);
1882   __ Push(reg1, reg2, reg3, reg4);
1883 }
1884 
PopOperands(Register reg1,Register reg2)1885 void FullCodeGenerator::PopOperands(Register reg1, Register reg2) {
1886   OperandStackDepthDecrement(2);
1887   __ Pop(reg1, reg2);
1888 }
1889 
EmitOperandStackDepthCheck()1890 void FullCodeGenerator::EmitOperandStackDepthCheck() {
1891   if (FLAG_debug_code) {
1892     int expected_diff = StandardFrameConstants::kFixedFrameSizeFromFp +
1893                         operand_stack_depth_ * kPointerSize;
1894     __ Dsubu(v0, fp, sp);
1895     __ Assert(eq, kUnexpectedStackDepth, v0, Operand(expected_diff));
1896   }
1897 }
1898 
EmitCreateIteratorResult(bool done)1899 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
1900   Label allocate, done_allocate;
1901 
1902   __ Allocate(JSIteratorResult::kSize, v0, a2, a3, &allocate,
1903               NO_ALLOCATION_FLAGS);
1904   __ jmp(&done_allocate);
1905 
1906   __ bind(&allocate);
1907   __ Push(Smi::FromInt(JSIteratorResult::kSize));
1908   __ CallRuntime(Runtime::kAllocateInNewSpace);
1909 
1910   __ bind(&done_allocate);
1911   __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, a1);
1912   PopOperand(a2);
1913   __ LoadRoot(a3,
1914               done ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex);
1915   __ LoadRoot(a4, Heap::kEmptyFixedArrayRootIndex);
1916   __ sd(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
1917   __ sd(a4, FieldMemOperand(v0, JSObject::kPropertiesOffset));
1918   __ sd(a4, FieldMemOperand(v0, JSObject::kElementsOffset));
1919   __ sd(a2, FieldMemOperand(v0, JSIteratorResult::kValueOffset));
1920   __ sd(a3, FieldMemOperand(v0, JSIteratorResult::kDoneOffset));
1921   STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
1922 }
1923 
1924 
EmitInlineSmiBinaryOp(BinaryOperation * expr,Token::Value op,Expression * left_expr,Expression * right_expr)1925 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
1926                                               Token::Value op,
1927                                               Expression* left_expr,
1928                                               Expression* right_expr) {
1929   Label done, smi_case, stub_call;
1930 
1931   Register scratch1 = a2;
1932   Register scratch2 = a3;
1933 
1934   // Get the arguments.
1935   Register left = a1;
1936   Register right = a0;
1937   PopOperand(left);
1938   __ mov(a0, result_register());
1939 
1940   // Perform combined smi check on both operands.
1941   __ Or(scratch1, left, Operand(right));
1942   STATIC_ASSERT(kSmiTag == 0);
1943   JumpPatchSite patch_site(masm_);
1944   patch_site.EmitJumpIfSmi(scratch1, &smi_case);
1945 
1946   __ bind(&stub_call);
1947   Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
1948   CallIC(code, expr->BinaryOperationFeedbackId());
1949   patch_site.EmitPatchInfo();
1950   __ jmp(&done);
1951 
1952   __ bind(&smi_case);
1953   // Smi case. This code works the same way as the smi-smi case in the type
1954   // recording binary operation stub, see
1955   switch (op) {
1956     case Token::SAR:
1957       __ GetLeastBitsFromSmi(scratch1, right, 5);
1958       __ dsrav(right, left, scratch1);
1959       __ And(v0, right, Operand(0xffffffff00000000L));
1960       break;
1961     case Token::SHL: {
1962       __ SmiUntag(scratch1, left);
1963       __ GetLeastBitsFromSmi(scratch2, right, 5);
1964       __ dsllv(scratch1, scratch1, scratch2);
1965       __ SmiTag(v0, scratch1);
1966       break;
1967     }
1968     case Token::SHR: {
1969       __ SmiUntag(scratch1, left);
1970       __ GetLeastBitsFromSmi(scratch2, right, 5);
1971       __ dsrlv(scratch1, scratch1, scratch2);
1972       __ And(scratch2, scratch1, 0x80000000);
1973       __ Branch(&stub_call, ne, scratch2, Operand(zero_reg));
1974       __ SmiTag(v0, scratch1);
1975       break;
1976     }
1977     case Token::ADD:
1978       __ DaddBranchOvf(v0, left, Operand(right), &stub_call);
1979       break;
1980     case Token::SUB:
1981       __ DsubBranchOvf(v0, left, Operand(right), &stub_call);
1982       break;
1983     case Token::MUL: {
1984       __ Dmulh(v0, left, right);
1985       __ dsra32(scratch2, v0, 0);
1986       __ sra(scratch1, v0, 31);
1987       __ Branch(USE_DELAY_SLOT, &stub_call, ne, scratch2, Operand(scratch1));
1988       __ SmiTag(v0);
1989       __ Branch(USE_DELAY_SLOT, &done, ne, v0, Operand(zero_reg));
1990       __ Daddu(scratch2, right, left);
1991       __ Branch(&stub_call, lt, scratch2, Operand(zero_reg));
1992       DCHECK(Smi::FromInt(0) == 0);
1993       __ mov(v0, zero_reg);
1994       break;
1995     }
1996     case Token::BIT_OR:
1997       __ Or(v0, left, Operand(right));
1998       break;
1999     case Token::BIT_AND:
2000       __ And(v0, left, Operand(right));
2001       break;
2002     case Token::BIT_XOR:
2003       __ Xor(v0, left, Operand(right));
2004       break;
2005     default:
2006       UNREACHABLE();
2007   }
2008 
2009   __ bind(&done);
2010   context()->Plug(v0);
2011 }
2012 
2013 
EmitClassDefineProperties(ClassLiteral * lit)2014 void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) {
2015   for (int i = 0; i < lit->properties()->length(); i++) {
2016     ObjectLiteral::Property* property = lit->properties()->at(i);
2017     Expression* value = property->value();
2018 
2019     Register scratch = a1;
2020     if (property->is_static()) {
2021       __ ld(scratch, MemOperand(sp, kPointerSize));  // constructor
2022     } else {
2023       __ ld(scratch, MemOperand(sp, 0));  // prototype
2024     }
2025     PushOperand(scratch);
2026     EmitPropertyKey(property, lit->GetIdForProperty(i));
2027 
2028     // The static prototype property is read only. We handle the non computed
2029     // property name case in the parser. Since this is the only case where we
2030     // need to check for an own read only property we special case this so we do
2031     // not need to do this for every property.
2032     if (property->is_static() && property->is_computed_name()) {
2033       __ CallRuntime(Runtime::kThrowIfStaticPrototype);
2034       __ push(v0);
2035     }
2036 
2037     VisitForStackValue(value);
2038     if (NeedsHomeObject(value)) {
2039       EmitSetHomeObject(value, 2, property->GetSlot());
2040     }
2041 
2042     switch (property->kind()) {
2043       case ObjectLiteral::Property::CONSTANT:
2044       case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2045       case ObjectLiteral::Property::PROTOTYPE:
2046         UNREACHABLE();
2047       case ObjectLiteral::Property::COMPUTED:
2048         PushOperand(Smi::FromInt(DONT_ENUM));
2049         PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
2050         CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
2051         break;
2052 
2053       case ObjectLiteral::Property::GETTER:
2054         PushOperand(Smi::FromInt(DONT_ENUM));
2055         CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
2056         break;
2057 
2058       case ObjectLiteral::Property::SETTER:
2059         PushOperand(Smi::FromInt(DONT_ENUM));
2060         CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
2061         break;
2062 
2063       default:
2064         UNREACHABLE();
2065     }
2066   }
2067 }
2068 
2069 
EmitBinaryOp(BinaryOperation * expr,Token::Value op)2070 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
2071   __ mov(a0, result_register());
2072   PopOperand(a1);
2073   Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
2074   JumpPatchSite patch_site(masm_);    // unbound, signals no inlined smi code.
2075   CallIC(code, expr->BinaryOperationFeedbackId());
2076   patch_site.EmitPatchInfo();
2077   context()->Plug(v0);
2078 }
2079 
2080 
EmitAssignment(Expression * expr,FeedbackVectorSlot slot)2081 void FullCodeGenerator::EmitAssignment(Expression* expr,
2082                                        FeedbackVectorSlot slot) {
2083   DCHECK(expr->IsValidReferenceExpressionOrThis());
2084 
2085   Property* prop = expr->AsProperty();
2086   LhsKind assign_type = Property::GetAssignType(prop);
2087 
2088   switch (assign_type) {
2089     case VARIABLE: {
2090       Variable* var = expr->AsVariableProxy()->var();
2091       EffectContext context(this);
2092       EmitVariableAssignment(var, Token::ASSIGN, slot);
2093       break;
2094     }
2095     case NAMED_PROPERTY: {
2096       PushOperand(result_register());  // Preserve value.
2097       VisitForAccumulatorValue(prop->obj());
2098       __ mov(StoreDescriptor::ReceiverRegister(), result_register());
2099       PopOperand(StoreDescriptor::ValueRegister());  // Restore value.
2100       __ li(StoreDescriptor::NameRegister(),
2101             Operand(prop->key()->AsLiteral()->value()));
2102       EmitLoadStoreICSlot(slot);
2103       CallStoreIC();
2104       break;
2105     }
2106     case NAMED_SUPER_PROPERTY: {
2107       PushOperand(v0);
2108       VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2109       VisitForAccumulatorValue(
2110           prop->obj()->AsSuperPropertyReference()->home_object());
2111       // stack: value, this; v0: home_object
2112       Register scratch = a2;
2113       Register scratch2 = a3;
2114       __ mov(scratch, result_register());             // home_object
2115       __ ld(v0, MemOperand(sp, kPointerSize));        // value
2116       __ ld(scratch2, MemOperand(sp, 0));             // this
2117       __ sd(scratch2, MemOperand(sp, kPointerSize));  // this
2118       __ sd(scratch, MemOperand(sp, 0));              // home_object
2119       // stack: this, home_object; v0: value
2120       EmitNamedSuperPropertyStore(prop);
2121       break;
2122     }
2123     case KEYED_SUPER_PROPERTY: {
2124       PushOperand(v0);
2125       VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2126       VisitForStackValue(
2127           prop->obj()->AsSuperPropertyReference()->home_object());
2128       VisitForAccumulatorValue(prop->key());
2129       Register scratch = a2;
2130       Register scratch2 = a3;
2131       __ ld(scratch2, MemOperand(sp, 2 * kPointerSize));  // value
2132       // stack: value, this, home_object; v0: key, a3: value
2133       __ ld(scratch, MemOperand(sp, kPointerSize));  // this
2134       __ sd(scratch, MemOperand(sp, 2 * kPointerSize));
2135       __ ld(scratch, MemOperand(sp, 0));  // home_object
2136       __ sd(scratch, MemOperand(sp, kPointerSize));
2137       __ sd(v0, MemOperand(sp, 0));
2138       __ Move(v0, scratch2);
2139       // stack: this, home_object, key; v0: value.
2140       EmitKeyedSuperPropertyStore(prop);
2141       break;
2142     }
2143     case KEYED_PROPERTY: {
2144       PushOperand(result_register());  // Preserve value.
2145       VisitForStackValue(prop->obj());
2146       VisitForAccumulatorValue(prop->key());
2147       __ Move(StoreDescriptor::NameRegister(), result_register());
2148       PopOperands(StoreDescriptor::ValueRegister(),
2149                   StoreDescriptor::ReceiverRegister());
2150       EmitLoadStoreICSlot(slot);
2151       Handle<Code> ic =
2152           CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2153       CallIC(ic);
2154       break;
2155     }
2156   }
2157   context()->Plug(v0);
2158 }
2159 
2160 
EmitStoreToStackLocalOrContextSlot(Variable * var,MemOperand location)2161 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2162     Variable* var, MemOperand location) {
2163   __ sd(result_register(), location);
2164   if (var->IsContextSlot()) {
2165     // RecordWrite may destroy all its register arguments.
2166     __ Move(a3, result_register());
2167     int offset = Context::SlotOffset(var->index());
2168     __ RecordWriteContextSlot(
2169         a1, offset, a3, a2, kRAHasBeenSaved, kDontSaveFPRegs);
2170   }
2171 }
2172 
2173 
EmitVariableAssignment(Variable * var,Token::Value op,FeedbackVectorSlot slot)2174 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
2175                                                FeedbackVectorSlot slot) {
2176   if (var->IsUnallocated()) {
2177     // Global var, const, or let.
2178     __ mov(StoreDescriptor::ValueRegister(), result_register());
2179     __ li(StoreDescriptor::NameRegister(), Operand(var->name()));
2180     __ LoadGlobalObject(StoreDescriptor::ReceiverRegister());
2181     EmitLoadStoreICSlot(slot);
2182     CallStoreIC();
2183 
2184   } else if (var->mode() == LET && op != Token::INIT) {
2185     // Non-initializing assignment to let variable needs a write barrier.
2186     DCHECK(!var->IsLookupSlot());
2187     DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2188     Label assign;
2189     MemOperand location = VarOperand(var, a1);
2190     __ ld(a3, location);
2191     __ LoadRoot(a4, Heap::kTheHoleValueRootIndex);
2192     __ Branch(&assign, ne, a3, Operand(a4));
2193     __ li(a3, Operand(var->name()));
2194     __ push(a3);
2195     __ CallRuntime(Runtime::kThrowReferenceError);
2196     // Perform the assignment.
2197     __ bind(&assign);
2198     EmitStoreToStackLocalOrContextSlot(var, location);
2199 
2200   } else if (var->mode() == CONST && op != Token::INIT) {
2201     // Assignment to const variable needs a write barrier.
2202     DCHECK(!var->IsLookupSlot());
2203     DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2204     Label const_error;
2205     MemOperand location = VarOperand(var, a1);
2206     __ ld(a3, location);
2207     __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2208     __ Branch(&const_error, ne, a3, Operand(at));
2209     __ li(a3, Operand(var->name()));
2210     __ push(a3);
2211     __ CallRuntime(Runtime::kThrowReferenceError);
2212     __ bind(&const_error);
2213     __ CallRuntime(Runtime::kThrowConstAssignError);
2214 
2215   } else if (var->is_this() && var->mode() == CONST && op == Token::INIT) {
2216     // Initializing assignment to const {this} needs a write barrier.
2217     DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2218     Label uninitialized_this;
2219     MemOperand location = VarOperand(var, a1);
2220     __ ld(a3, location);
2221     __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2222     __ Branch(&uninitialized_this, eq, a3, Operand(at));
2223     __ li(a0, Operand(var->name()));
2224     __ Push(a0);
2225     __ CallRuntime(Runtime::kThrowReferenceError);
2226     __ bind(&uninitialized_this);
2227     EmitStoreToStackLocalOrContextSlot(var, location);
2228 
2229   } else if (!var->is_const_mode() || op == Token::INIT) {
2230     if (var->IsLookupSlot()) {
2231       __ Push(var->name());
2232       __ Push(v0);
2233       __ CallRuntime(is_strict(language_mode())
2234                          ? Runtime::kStoreLookupSlot_Strict
2235                          : Runtime::kStoreLookupSlot_Sloppy);
2236     } else {
2237       // Assignment to var or initializing assignment to let/const in harmony
2238       // mode.
2239       DCHECK((var->IsStackAllocated() || var->IsContextSlot()));
2240       MemOperand location = VarOperand(var, a1);
2241       if (FLAG_debug_code && var->mode() == LET && op == Token::INIT) {
2242         // Check for an uninitialized let binding.
2243         __ ld(a2, location);
2244         __ LoadRoot(a4, Heap::kTheHoleValueRootIndex);
2245         __ Check(eq, kLetBindingReInitialization, a2, Operand(a4));
2246       }
2247       EmitStoreToStackLocalOrContextSlot(var, location);
2248     }
2249 
2250   } else {
2251     DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT);
2252     if (is_strict(language_mode())) {
2253       __ CallRuntime(Runtime::kThrowConstAssignError);
2254     }
2255     // Silently ignore store in sloppy mode.
2256   }
2257 }
2258 
2259 
EmitNamedPropertyAssignment(Assignment * expr)2260 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2261   // Assignment to a property, using a named store IC.
2262   Property* prop = expr->target()->AsProperty();
2263   DCHECK(prop != NULL);
2264   DCHECK(prop->key()->IsLiteral());
2265 
2266   __ mov(StoreDescriptor::ValueRegister(), result_register());
2267   __ li(StoreDescriptor::NameRegister(),
2268         Operand(prop->key()->AsLiteral()->value()));
2269   PopOperand(StoreDescriptor::ReceiverRegister());
2270   EmitLoadStoreICSlot(expr->AssignmentSlot());
2271   CallStoreIC();
2272 
2273   PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
2274   context()->Plug(v0);
2275 }
2276 
2277 
EmitNamedSuperPropertyStore(Property * prop)2278 void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2279   // Assignment to named property of super.
2280   // v0 : value
2281   // stack : receiver ('this'), home_object
2282   DCHECK(prop != NULL);
2283   Literal* key = prop->key()->AsLiteral();
2284   DCHECK(key != NULL);
2285 
2286   PushOperand(key->value());
2287   PushOperand(v0);
2288   CallRuntimeWithOperands(is_strict(language_mode())
2289                               ? Runtime::kStoreToSuper_Strict
2290                               : Runtime::kStoreToSuper_Sloppy);
2291 }
2292 
2293 
EmitKeyedSuperPropertyStore(Property * prop)2294 void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2295   // Assignment to named property of super.
2296   // v0 : value
2297   // stack : receiver ('this'), home_object, key
2298   DCHECK(prop != NULL);
2299 
2300   PushOperand(v0);
2301   CallRuntimeWithOperands(is_strict(language_mode())
2302                               ? Runtime::kStoreKeyedToSuper_Strict
2303                               : Runtime::kStoreKeyedToSuper_Sloppy);
2304 }
2305 
2306 
EmitKeyedPropertyAssignment(Assignment * expr)2307 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2308   // Assignment to a property, using a keyed store IC.
2309   // Call keyed store IC.
2310   // The arguments are:
2311   // - a0 is the value,
2312   // - a1 is the key,
2313   // - a2 is the receiver.
2314   __ mov(StoreDescriptor::ValueRegister(), result_register());
2315   PopOperands(StoreDescriptor::ReceiverRegister(),
2316               StoreDescriptor::NameRegister());
2317   DCHECK(StoreDescriptor::ValueRegister().is(a0));
2318 
2319   Handle<Code> ic =
2320       CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2321   EmitLoadStoreICSlot(expr->AssignmentSlot());
2322   CallIC(ic);
2323 
2324   PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
2325   context()->Plug(v0);
2326 }
2327 
2328 
CallIC(Handle<Code> code,TypeFeedbackId id)2329 void FullCodeGenerator::CallIC(Handle<Code> code,
2330                                TypeFeedbackId id) {
2331   ic_total_count_++;
2332   __ Call(code, RelocInfo::CODE_TARGET, id);
2333 }
2334 
2335 
2336 // Code common for calls using the IC.
EmitCallWithLoadIC(Call * expr)2337 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2338   Expression* callee = expr->expression();
2339 
2340   // Get the target function.
2341   ConvertReceiverMode convert_mode;
2342   if (callee->IsVariableProxy()) {
2343     { StackValueContext context(this);
2344       EmitVariableLoad(callee->AsVariableProxy());
2345       PrepareForBailout(callee, BailoutState::NO_REGISTERS);
2346     }
2347     // Push undefined as receiver. This is patched in the method prologue if it
2348     // is a sloppy mode method.
2349     __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
2350     PushOperand(at);
2351     convert_mode = ConvertReceiverMode::kNullOrUndefined;
2352   } else {
2353     // Load the function from the receiver.
2354     DCHECK(callee->IsProperty());
2355     DCHECK(!callee->AsProperty()->IsSuperAccess());
2356     __ ld(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2357     EmitNamedPropertyLoad(callee->AsProperty());
2358     PrepareForBailoutForId(callee->AsProperty()->LoadId(),
2359                            BailoutState::TOS_REGISTER);
2360     // Push the target function under the receiver.
2361     __ ld(at, MemOperand(sp, 0));
2362     PushOperand(at);
2363     __ sd(v0, MemOperand(sp, kPointerSize));
2364     convert_mode = ConvertReceiverMode::kNotNullOrUndefined;
2365   }
2366 
2367   EmitCall(expr, convert_mode);
2368 }
2369 
2370 
EmitSuperCallWithLoadIC(Call * expr)2371 void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2372   SetExpressionPosition(expr);
2373   Expression* callee = expr->expression();
2374   DCHECK(callee->IsProperty());
2375   Property* prop = callee->AsProperty();
2376   DCHECK(prop->IsSuperAccess());
2377 
2378   Literal* key = prop->key()->AsLiteral();
2379   DCHECK(!key->value()->IsSmi());
2380   // Load the function from the receiver.
2381   const Register scratch = a1;
2382   SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2383   VisitForAccumulatorValue(super_ref->home_object());
2384   __ mov(scratch, v0);
2385   VisitForAccumulatorValue(super_ref->this_var());
2386   PushOperands(scratch, v0, v0, scratch);
2387   PushOperand(key->value());
2388 
2389   // Stack here:
2390   //  - home_object
2391   //  - this (receiver)
2392   //  - this (receiver) <-- LoadFromSuper will pop here and below.
2393   //  - home_object
2394   //  - key
2395   CallRuntimeWithOperands(Runtime::kLoadFromSuper);
2396   PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
2397 
2398   // Replace home_object with target function.
2399   __ sd(v0, MemOperand(sp, kPointerSize));
2400 
2401   // Stack here:
2402   // - target function
2403   // - this (receiver)
2404   EmitCall(expr);
2405 }
2406 
2407 
2408 // Code common for calls using the IC.
EmitKeyedCallWithLoadIC(Call * expr,Expression * key)2409 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2410                                                 Expression* key) {
2411   // Load the key.
2412   VisitForAccumulatorValue(key);
2413 
2414   Expression* callee = expr->expression();
2415 
2416   // Load the function from the receiver.
2417   DCHECK(callee->IsProperty());
2418   __ ld(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2419   __ Move(LoadDescriptor::NameRegister(), v0);
2420   EmitKeyedPropertyLoad(callee->AsProperty());
2421   PrepareForBailoutForId(callee->AsProperty()->LoadId(),
2422                          BailoutState::TOS_REGISTER);
2423 
2424   // Push the target function under the receiver.
2425   __ ld(at, MemOperand(sp, 0));
2426   PushOperand(at);
2427   __ sd(v0, MemOperand(sp, kPointerSize));
2428 
2429   EmitCall(expr, ConvertReceiverMode::kNotNullOrUndefined);
2430 }
2431 
2432 
EmitKeyedSuperCallWithLoadIC(Call * expr)2433 void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
2434   Expression* callee = expr->expression();
2435   DCHECK(callee->IsProperty());
2436   Property* prop = callee->AsProperty();
2437   DCHECK(prop->IsSuperAccess());
2438 
2439   SetExpressionPosition(prop);
2440   // Load the function from the receiver.
2441   const Register scratch = a1;
2442   SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2443   VisitForAccumulatorValue(super_ref->home_object());
2444   __ Move(scratch, v0);
2445   VisitForAccumulatorValue(super_ref->this_var());
2446   PushOperands(scratch, v0, v0, scratch);
2447   VisitForStackValue(prop->key());
2448 
2449   // Stack here:
2450   //  - home_object
2451   //  - this (receiver)
2452   //  - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
2453   //  - home_object
2454   //  - key
2455   CallRuntimeWithOperands(Runtime::kLoadKeyedFromSuper);
2456   PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
2457 
2458   // Replace home_object with target function.
2459   __ sd(v0, MemOperand(sp, kPointerSize));
2460 
2461   // Stack here:
2462   // - target function
2463   // - this (receiver)
2464   EmitCall(expr);
2465 }
2466 
2467 
EmitCall(Call * expr,ConvertReceiverMode mode)2468 void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) {
2469   // Load the arguments.
2470   ZoneList<Expression*>* args = expr->arguments();
2471   int arg_count = args->length();
2472   for (int i = 0; i < arg_count; i++) {
2473     VisitForStackValue(args->at(i));
2474   }
2475 
2476   PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
2477   // Record source position of the IC call.
2478   SetCallPosition(expr, expr->tail_call_mode());
2479   if (expr->tail_call_mode() == TailCallMode::kAllow) {
2480     if (FLAG_trace) {
2481       __ CallRuntime(Runtime::kTraceTailCall);
2482     }
2483     // Update profiling counters before the tail call since we will
2484     // not return to this function.
2485     EmitProfilingCounterHandlingForReturnSequence(true);
2486   }
2487   Handle<Code> ic =
2488       CodeFactory::CallIC(isolate(), arg_count, mode, expr->tail_call_mode())
2489           .code();
2490   __ li(a3, Operand(SmiFromSlot(expr->CallFeedbackICSlot())));
2491   __ ld(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2492   // Don't assign a type feedback id to the IC, since type feedback is provided
2493   // by the vector above.
2494   CallIC(ic);
2495   OperandStackDepthDecrement(arg_count + 1);
2496 
2497   RecordJSReturnSite(expr);
2498   RestoreContext();
2499   context()->DropAndPlug(1, v0);
2500 }
2501 
EmitResolvePossiblyDirectEval(Call * expr)2502 void FullCodeGenerator::EmitResolvePossiblyDirectEval(Call* expr) {
2503   int arg_count = expr->arguments()->length();
2504   // a6: copy of the first argument or undefined if it doesn't exist.
2505   if (arg_count > 0) {
2506     __ ld(a6, MemOperand(sp, arg_count * kPointerSize));
2507   } else {
2508     __ LoadRoot(a6, Heap::kUndefinedValueRootIndex);
2509   }
2510 
2511   // a5: the receiver of the enclosing function.
2512   __ ld(a5, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
2513 
2514   // a4: the language mode.
2515   __ li(a4, Operand(Smi::FromInt(language_mode())));
2516 
2517   // a1: the start position of the scope the calls resides in.
2518   __ li(a1, Operand(Smi::FromInt(scope()->start_position())));
2519 
2520   // a0: the source position of the eval call.
2521   __ li(a0, Operand(Smi::FromInt(expr->position())));
2522 
2523   // Do the runtime call.
2524   __ Push(a6, a5, a4, a1, a0);
2525   __ CallRuntime(Runtime::kResolvePossiblyDirectEval);
2526 }
2527 
2528 
2529 // See http://www.ecma-international.org/ecma-262/6.0/#sec-function-calls.
PushCalleeAndWithBaseObject(Call * expr)2530 void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) {
2531   VariableProxy* callee = expr->expression()->AsVariableProxy();
2532   if (callee->var()->IsLookupSlot()) {
2533     Label slow, done;
2534 
2535     SetExpressionPosition(callee);
2536     // Generate code for loading from variables potentially shadowed by
2537     // eval-introduced variables.
2538     EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done);
2539 
2540     __ bind(&slow);
2541     // Call the runtime to find the function to call (returned in v0)
2542     // and the object holding it (returned in v1).
2543     __ Push(callee->name());
2544     __ CallRuntime(Runtime::kLoadLookupSlotForCall);
2545     PushOperands(v0, v1);  // Function, receiver.
2546     PrepareForBailoutForId(expr->LookupId(), BailoutState::NO_REGISTERS);
2547 
2548     // If fast case code has been generated, emit code to push the
2549     // function and receiver and have the slow path jump around this
2550     // code.
2551     if (done.is_linked()) {
2552       Label call;
2553       __ Branch(&call);
2554       __ bind(&done);
2555       // Push function.
2556       __ push(v0);
2557       // The receiver is implicitly the global receiver. Indicate this
2558       // by passing the hole to the call function stub.
2559       __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
2560       __ push(a1);
2561       __ bind(&call);
2562     }
2563   } else {
2564     VisitForStackValue(callee);
2565     // refEnv.WithBaseObject()
2566     __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
2567     PushOperand(a2);  // Reserved receiver slot.
2568   }
2569 }
2570 
2571 
EmitPossiblyEvalCall(Call * expr)2572 void FullCodeGenerator::EmitPossiblyEvalCall(Call* expr) {
2573   // In a call to eval, we first call Runtime_ResolvePossiblyDirectEval
2574   // to resolve the function we need to call.  Then we call the resolved
2575   // function using the given arguments.
2576   ZoneList<Expression*>* args = expr->arguments();
2577   int arg_count = args->length();
2578   PushCalleeAndWithBaseObject(expr);
2579 
2580   // Push the arguments.
2581   for (int i = 0; i < arg_count; i++) {
2582     VisitForStackValue(args->at(i));
2583   }
2584 
2585   // Push a copy of the function (found below the arguments) and
2586   // resolve eval.
2587   __ ld(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2588   __ push(a1);
2589   EmitResolvePossiblyDirectEval(expr);
2590 
2591   // Touch up the stack with the resolved function.
2592   __ sd(v0, MemOperand(sp, (arg_count + 1) * kPointerSize));
2593 
2594   PrepareForBailoutForId(expr->EvalId(), BailoutState::NO_REGISTERS);
2595   // Record source position for debugger.
2596   SetCallPosition(expr);
2597   __ ld(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2598   __ li(a0, Operand(arg_count));
2599   __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kAny,
2600                                       expr->tail_call_mode()),
2601           RelocInfo::CODE_TARGET);
2602   OperandStackDepthDecrement(arg_count + 1);
2603   RecordJSReturnSite(expr);
2604   RestoreContext();
2605   context()->DropAndPlug(1, v0);
2606 }
2607 
2608 
VisitCallNew(CallNew * expr)2609 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2610   Comment cmnt(masm_, "[ CallNew");
2611   // According to ECMA-262, section 11.2.2, page 44, the function
2612   // expression in new calls must be evaluated before the
2613   // arguments.
2614 
2615   // Push constructor on the stack.  If it's not a function it's used as
2616   // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2617   // ignored.
2618   DCHECK(!expr->expression()->IsSuperPropertyReference());
2619   VisitForStackValue(expr->expression());
2620 
2621   // Push the arguments ("left-to-right") on the stack.
2622   ZoneList<Expression*>* args = expr->arguments();
2623   int arg_count = args->length();
2624   for (int i = 0; i < arg_count; i++) {
2625     VisitForStackValue(args->at(i));
2626   }
2627 
2628   // Call the construct call builtin that handles allocation and
2629   // constructor invocation.
2630   SetConstructCallPosition(expr);
2631 
2632   // Load function and argument count into a1 and a0.
2633   __ li(a0, Operand(arg_count));
2634   __ ld(a1, MemOperand(sp, arg_count * kPointerSize));
2635 
2636   // Record call targets in unoptimized code.
2637   __ EmitLoadTypeFeedbackVector(a2);
2638   __ li(a3, Operand(SmiFromSlot(expr->CallNewFeedbackSlot())));
2639 
2640   CallConstructStub stub(isolate());
2641   __ Call(stub.GetCode(), RelocInfo::CODE_TARGET);
2642   OperandStackDepthDecrement(arg_count + 1);
2643   PrepareForBailoutForId(expr->ReturnId(), BailoutState::TOS_REGISTER);
2644   RestoreContext();
2645   context()->Plug(v0);
2646 }
2647 
2648 
EmitSuperConstructorCall(Call * expr)2649 void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
2650   SuperCallReference* super_call_ref =
2651       expr->expression()->AsSuperCallReference();
2652   DCHECK_NOT_NULL(super_call_ref);
2653 
2654   // Push the super constructor target on the stack (may be null,
2655   // but the Construct builtin can deal with that properly).
2656   VisitForAccumulatorValue(super_call_ref->this_function_var());
2657   __ AssertFunction(result_register());
2658   __ ld(result_register(),
2659         FieldMemOperand(result_register(), HeapObject::kMapOffset));
2660   __ ld(result_register(),
2661         FieldMemOperand(result_register(), Map::kPrototypeOffset));
2662   PushOperand(result_register());
2663 
2664   // Push the arguments ("left-to-right") on the stack.
2665   ZoneList<Expression*>* args = expr->arguments();
2666   int arg_count = args->length();
2667   for (int i = 0; i < arg_count; i++) {
2668     VisitForStackValue(args->at(i));
2669   }
2670 
2671   // Call the construct call builtin that handles allocation and
2672   // constructor invocation.
2673   SetConstructCallPosition(expr);
2674 
2675   // Load new target into a3.
2676   VisitForAccumulatorValue(super_call_ref->new_target_var());
2677   __ mov(a3, result_register());
2678 
2679   // Load function and argument count into a1 and a0.
2680   __ li(a0, Operand(arg_count));
2681   __ ld(a1, MemOperand(sp, arg_count * kPointerSize));
2682 
2683   __ Call(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
2684   OperandStackDepthDecrement(arg_count + 1);
2685 
2686   RecordJSReturnSite(expr);
2687   RestoreContext();
2688   context()->Plug(v0);
2689 }
2690 
2691 
EmitIsSmi(CallRuntime * expr)2692 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2693   ZoneList<Expression*>* args = expr->arguments();
2694   DCHECK(args->length() == 1);
2695 
2696   VisitForAccumulatorValue(args->at(0));
2697 
2698   Label materialize_true, materialize_false;
2699   Label* if_true = NULL;
2700   Label* if_false = NULL;
2701   Label* fall_through = NULL;
2702   context()->PrepareTest(&materialize_true, &materialize_false,
2703                          &if_true, &if_false, &fall_through);
2704 
2705   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2706   __ SmiTst(v0, a4);
2707   Split(eq, a4, Operand(zero_reg), if_true, if_false, fall_through);
2708 
2709   context()->Plug(if_true, if_false);
2710 }
2711 
2712 
EmitIsJSReceiver(CallRuntime * expr)2713 void FullCodeGenerator::EmitIsJSReceiver(CallRuntime* expr) {
2714   ZoneList<Expression*>* args = expr->arguments();
2715   DCHECK(args->length() == 1);
2716 
2717   VisitForAccumulatorValue(args->at(0));
2718 
2719   Label materialize_true, materialize_false;
2720   Label* if_true = NULL;
2721   Label* if_false = NULL;
2722   Label* fall_through = NULL;
2723   context()->PrepareTest(&materialize_true, &materialize_false,
2724                          &if_true, &if_false, &fall_through);
2725 
2726   __ JumpIfSmi(v0, if_false);
2727   __ GetObjectType(v0, a1, a1);
2728   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2729   Split(ge, a1, Operand(FIRST_JS_RECEIVER_TYPE),
2730         if_true, if_false, fall_through);
2731 
2732   context()->Plug(if_true, if_false);
2733 }
2734 
2735 
EmitIsArray(CallRuntime * expr)2736 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
2737   ZoneList<Expression*>* args = expr->arguments();
2738   DCHECK(args->length() == 1);
2739 
2740   VisitForAccumulatorValue(args->at(0));
2741 
2742   Label materialize_true, materialize_false;
2743   Label* if_true = NULL;
2744   Label* if_false = NULL;
2745   Label* fall_through = NULL;
2746   context()->PrepareTest(&materialize_true, &materialize_false,
2747                          &if_true, &if_false, &fall_through);
2748 
2749   __ JumpIfSmi(v0, if_false);
2750   __ GetObjectType(v0, a1, a1);
2751   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2752   Split(eq, a1, Operand(JS_ARRAY_TYPE),
2753         if_true, if_false, fall_through);
2754 
2755   context()->Plug(if_true, if_false);
2756 }
2757 
2758 
EmitIsTypedArray(CallRuntime * expr)2759 void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) {
2760   ZoneList<Expression*>* args = expr->arguments();
2761   DCHECK(args->length() == 1);
2762 
2763   VisitForAccumulatorValue(args->at(0));
2764 
2765   Label materialize_true, materialize_false;
2766   Label* if_true = NULL;
2767   Label* if_false = NULL;
2768   Label* fall_through = NULL;
2769   context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2770                          &if_false, &fall_through);
2771 
2772   __ JumpIfSmi(v0, if_false);
2773   __ GetObjectType(v0, a1, a1);
2774   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2775   Split(eq, a1, Operand(JS_TYPED_ARRAY_TYPE), if_true, if_false, fall_through);
2776 
2777   context()->Plug(if_true, if_false);
2778 }
2779 
2780 
EmitIsRegExp(CallRuntime * expr)2781 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
2782   ZoneList<Expression*>* args = expr->arguments();
2783   DCHECK(args->length() == 1);
2784 
2785   VisitForAccumulatorValue(args->at(0));
2786 
2787   Label materialize_true, materialize_false;
2788   Label* if_true = NULL;
2789   Label* if_false = NULL;
2790   Label* fall_through = NULL;
2791   context()->PrepareTest(&materialize_true, &materialize_false,
2792                          &if_true, &if_false, &fall_through);
2793 
2794   __ JumpIfSmi(v0, if_false);
2795   __ GetObjectType(v0, a1, a1);
2796   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2797   Split(eq, a1, Operand(JS_REGEXP_TYPE), if_true, if_false, fall_through);
2798 
2799   context()->Plug(if_true, if_false);
2800 }
2801 
2802 
EmitIsJSProxy(CallRuntime * expr)2803 void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
2804   ZoneList<Expression*>* args = expr->arguments();
2805   DCHECK(args->length() == 1);
2806 
2807   VisitForAccumulatorValue(args->at(0));
2808 
2809   Label materialize_true, materialize_false;
2810   Label* if_true = NULL;
2811   Label* if_false = NULL;
2812   Label* fall_through = NULL;
2813   context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2814                          &if_false, &fall_through);
2815 
2816   __ JumpIfSmi(v0, if_false);
2817   __ GetObjectType(v0, a1, a1);
2818   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2819   Split(eq, a1, Operand(JS_PROXY_TYPE), if_true, if_false, fall_through);
2820 
2821   context()->Plug(if_true, if_false);
2822 }
2823 
2824 
EmitClassOf(CallRuntime * expr)2825 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
2826   ZoneList<Expression*>* args = expr->arguments();
2827   DCHECK(args->length() == 1);
2828   Label done, null, function, non_function_constructor;
2829 
2830   VisitForAccumulatorValue(args->at(0));
2831 
2832   // If the object is not a JSReceiver, we return null.
2833   __ JumpIfSmi(v0, &null);
2834   STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2835   __ GetObjectType(v0, v0, a1);  // Map is now in v0.
2836   __ Branch(&null, lt, a1, Operand(FIRST_JS_RECEIVER_TYPE));
2837 
2838   // Return 'Function' for JSFunction and JSBoundFunction objects.
2839   STATIC_ASSERT(LAST_FUNCTION_TYPE == LAST_TYPE);
2840   __ Branch(&function, hs, a1, Operand(FIRST_FUNCTION_TYPE));
2841 
2842   // Check if the constructor in the map is a JS function.
2843   Register instance_type = a2;
2844   __ GetMapConstructor(v0, v0, a1, instance_type);
2845   __ Branch(&non_function_constructor, ne, instance_type,
2846             Operand(JS_FUNCTION_TYPE));
2847 
2848   // v0 now contains the constructor function. Grab the
2849   // instance class name from there.
2850   __ ld(v0, FieldMemOperand(v0, JSFunction::kSharedFunctionInfoOffset));
2851   __ ld(v0, FieldMemOperand(v0, SharedFunctionInfo::kInstanceClassNameOffset));
2852   __ Branch(&done);
2853 
2854   // Functions have class 'Function'.
2855   __ bind(&function);
2856   __ LoadRoot(v0, Heap::kFunction_stringRootIndex);
2857   __ jmp(&done);
2858 
2859   // Objects with a non-function constructor have class 'Object'.
2860   __ bind(&non_function_constructor);
2861   __ LoadRoot(v0, Heap::kObject_stringRootIndex);
2862   __ jmp(&done);
2863 
2864   // Non-JS objects have class null.
2865   __ bind(&null);
2866   __ LoadRoot(v0, Heap::kNullValueRootIndex);
2867 
2868   // All done.
2869   __ bind(&done);
2870 
2871   context()->Plug(v0);
2872 }
2873 
2874 
EmitValueOf(CallRuntime * expr)2875 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
2876   ZoneList<Expression*>* args = expr->arguments();
2877   DCHECK(args->length() == 1);
2878 
2879   VisitForAccumulatorValue(args->at(0));  // Load the object.
2880 
2881   Label done;
2882   // If the object is a smi return the object.
2883   __ JumpIfSmi(v0, &done);
2884   // If the object is not a value type, return the object.
2885   __ GetObjectType(v0, a1, a1);
2886   __ Branch(&done, ne, a1, Operand(JS_VALUE_TYPE));
2887 
2888   __ ld(v0, FieldMemOperand(v0, JSValue::kValueOffset));
2889 
2890   __ bind(&done);
2891   context()->Plug(v0);
2892 }
2893 
2894 
EmitStringCharFromCode(CallRuntime * expr)2895 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
2896   ZoneList<Expression*>* args = expr->arguments();
2897   DCHECK(args->length() == 1);
2898 
2899   VisitForAccumulatorValue(args->at(0));
2900 
2901   Label done;
2902   StringCharFromCodeGenerator generator(v0, a1);
2903   generator.GenerateFast(masm_);
2904   __ jmp(&done);
2905 
2906   NopRuntimeCallHelper call_helper;
2907   generator.GenerateSlow(masm_, call_helper);
2908 
2909   __ bind(&done);
2910   context()->Plug(a1);
2911 }
2912 
2913 
EmitStringCharCodeAt(CallRuntime * expr)2914 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
2915   ZoneList<Expression*>* args = expr->arguments();
2916   DCHECK(args->length() == 2);
2917 
2918   VisitForStackValue(args->at(0));
2919   VisitForAccumulatorValue(args->at(1));
2920   __ mov(a0, result_register());
2921 
2922   Register object = a1;
2923   Register index = a0;
2924   Register result = v0;
2925 
2926   PopOperand(object);
2927 
2928   Label need_conversion;
2929   Label index_out_of_range;
2930   Label done;
2931   StringCharCodeAtGenerator generator(object, index, result, &need_conversion,
2932                                       &need_conversion, &index_out_of_range);
2933   generator.GenerateFast(masm_);
2934   __ jmp(&done);
2935 
2936   __ bind(&index_out_of_range);
2937   // When the index is out of range, the spec requires us to return
2938   // NaN.
2939   __ LoadRoot(result, Heap::kNanValueRootIndex);
2940   __ jmp(&done);
2941 
2942   __ bind(&need_conversion);
2943   // Load the undefined value into the result register, which will
2944   // trigger conversion.
2945   __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
2946   __ jmp(&done);
2947 
2948   NopRuntimeCallHelper call_helper;
2949   generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
2950 
2951   __ bind(&done);
2952   context()->Plug(result);
2953 }
2954 
2955 
EmitCall(CallRuntime * expr)2956 void FullCodeGenerator::EmitCall(CallRuntime* expr) {
2957   ZoneList<Expression*>* args = expr->arguments();
2958   DCHECK_LE(2, args->length());
2959   // Push target, receiver and arguments onto the stack.
2960   for (Expression* const arg : *args) {
2961     VisitForStackValue(arg);
2962   }
2963   PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
2964   // Move target to a1.
2965   int const argc = args->length() - 2;
2966   __ ld(a1, MemOperand(sp, (argc + 1) * kPointerSize));
2967   // Call the target.
2968   __ li(a0, Operand(argc));
2969   __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
2970   OperandStackDepthDecrement(argc + 1);
2971   RestoreContext();
2972   // Discard the function left on TOS.
2973   context()->DropAndPlug(1, v0);
2974 }
2975 
2976 
EmitHasCachedArrayIndex(CallRuntime * expr)2977 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
2978   ZoneList<Expression*>* args = expr->arguments();
2979   VisitForAccumulatorValue(args->at(0));
2980 
2981   Label materialize_true, materialize_false;
2982   Label* if_true = NULL;
2983   Label* if_false = NULL;
2984   Label* fall_through = NULL;
2985   context()->PrepareTest(&materialize_true, &materialize_false,
2986                          &if_true, &if_false, &fall_through);
2987 
2988   __ lwu(a0, FieldMemOperand(v0, String::kHashFieldOffset));
2989   __ And(a0, a0, Operand(String::kContainsCachedArrayIndexMask));
2990 
2991   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2992   Split(eq, a0, Operand(zero_reg), if_true, if_false, fall_through);
2993 
2994   context()->Plug(if_true, if_false);
2995 }
2996 
2997 
EmitGetCachedArrayIndex(CallRuntime * expr)2998 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
2999   ZoneList<Expression*>* args = expr->arguments();
3000   DCHECK(args->length() == 1);
3001   VisitForAccumulatorValue(args->at(0));
3002 
3003   __ AssertString(v0);
3004 
3005   __ lwu(v0, FieldMemOperand(v0, String::kHashFieldOffset));
3006   __ IndexFromHash(v0, v0);
3007 
3008   context()->Plug(v0);
3009 }
3010 
3011 
EmitGetSuperConstructor(CallRuntime * expr)3012 void FullCodeGenerator::EmitGetSuperConstructor(CallRuntime* expr) {
3013   ZoneList<Expression*>* args = expr->arguments();
3014   DCHECK_EQ(1, args->length());
3015   VisitForAccumulatorValue(args->at(0));
3016   __ AssertFunction(v0);
3017   __ ld(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
3018   __ ld(v0, FieldMemOperand(v0, Map::kPrototypeOffset));
3019   context()->Plug(v0);
3020 }
3021 
EmitDebugIsActive(CallRuntime * expr)3022 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
3023   DCHECK(expr->arguments()->length() == 0);
3024   ExternalReference debug_is_active =
3025       ExternalReference::debug_is_active_address(isolate());
3026   __ li(at, Operand(debug_is_active));
3027   __ lbu(v0, MemOperand(at));
3028   __ SmiTag(v0);
3029   context()->Plug(v0);
3030 }
3031 
3032 
EmitCreateIterResultObject(CallRuntime * expr)3033 void FullCodeGenerator::EmitCreateIterResultObject(CallRuntime* expr) {
3034   ZoneList<Expression*>* args = expr->arguments();
3035   DCHECK_EQ(2, args->length());
3036   VisitForStackValue(args->at(0));
3037   VisitForStackValue(args->at(1));
3038 
3039   Label runtime, done;
3040 
3041   __ Allocate(JSIteratorResult::kSize, v0, a2, a3, &runtime,
3042               NO_ALLOCATION_FLAGS);
3043   __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, a1);
3044   __ Pop(a2, a3);
3045   __ LoadRoot(a4, Heap::kEmptyFixedArrayRootIndex);
3046   __ sd(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
3047   __ sd(a4, FieldMemOperand(v0, JSObject::kPropertiesOffset));
3048   __ sd(a4, FieldMemOperand(v0, JSObject::kElementsOffset));
3049   __ sd(a2, FieldMemOperand(v0, JSIteratorResult::kValueOffset));
3050   __ sd(a3, FieldMemOperand(v0, JSIteratorResult::kDoneOffset));
3051   STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
3052   __ jmp(&done);
3053 
3054   __ bind(&runtime);
3055   CallRuntimeWithOperands(Runtime::kCreateIterResultObject);
3056 
3057   __ bind(&done);
3058   context()->Plug(v0);
3059 }
3060 
3061 
EmitLoadJSRuntimeFunction(CallRuntime * expr)3062 void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
3063   // Push function.
3064   __ LoadNativeContextSlot(expr->context_index(), v0);
3065   PushOperand(v0);
3066 
3067   // Push undefined as the receiver.
3068   __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
3069   PushOperand(v0);
3070 }
3071 
3072 
EmitCallJSRuntimeFunction(CallRuntime * expr)3073 void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
3074   ZoneList<Expression*>* args = expr->arguments();
3075   int arg_count = args->length();
3076 
3077   SetCallPosition(expr);
3078   __ ld(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
3079   __ li(a0, Operand(arg_count));
3080   __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kNullOrUndefined),
3081           RelocInfo::CODE_TARGET);
3082   OperandStackDepthDecrement(arg_count + 1);
3083   RestoreContext();
3084 }
3085 
3086 
VisitUnaryOperation(UnaryOperation * expr)3087 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3088   switch (expr->op()) {
3089     case Token::DELETE: {
3090       Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
3091       Property* property = expr->expression()->AsProperty();
3092       VariableProxy* proxy = expr->expression()->AsVariableProxy();
3093 
3094       if (property != NULL) {
3095         VisitForStackValue(property->obj());
3096         VisitForStackValue(property->key());
3097         CallRuntimeWithOperands(is_strict(language_mode())
3098                                     ? Runtime::kDeleteProperty_Strict
3099                                     : Runtime::kDeleteProperty_Sloppy);
3100         context()->Plug(v0);
3101       } else if (proxy != NULL) {
3102         Variable* var = proxy->var();
3103         // Delete of an unqualified identifier is disallowed in strict mode but
3104         // "delete this" is allowed.
3105         bool is_this = var->HasThisName(isolate());
3106         DCHECK(is_sloppy(language_mode()) || is_this);
3107         if (var->IsUnallocatedOrGlobalSlot()) {
3108           __ LoadGlobalObject(a2);
3109           __ li(a1, Operand(var->name()));
3110           __ Push(a2, a1);
3111           __ CallRuntime(Runtime::kDeleteProperty_Sloppy);
3112           context()->Plug(v0);
3113         } else if (var->IsStackAllocated() || var->IsContextSlot()) {
3114           // Result of deleting non-global, non-dynamic variables is false.
3115           // The subexpression does not have side effects.
3116           context()->Plug(is_this);
3117         } else {
3118           // Non-global variable.  Call the runtime to try to delete from the
3119           // context where the variable was introduced.
3120           DCHECK(!context_register().is(a2));
3121           __ Push(var->name());
3122           __ CallRuntime(Runtime::kDeleteLookupSlot);
3123           context()->Plug(v0);
3124         }
3125       } else {
3126         // Result of deleting non-property, non-variable reference is true.
3127         // The subexpression may have side effects.
3128         VisitForEffect(expr->expression());
3129         context()->Plug(true);
3130       }
3131       break;
3132     }
3133 
3134     case Token::VOID: {
3135       Comment cmnt(masm_, "[ UnaryOperation (VOID)");
3136       VisitForEffect(expr->expression());
3137       context()->Plug(Heap::kUndefinedValueRootIndex);
3138       break;
3139     }
3140 
3141     case Token::NOT: {
3142       Comment cmnt(masm_, "[ UnaryOperation (NOT)");
3143       if (context()->IsEffect()) {
3144         // Unary NOT has no side effects so it's only necessary to visit the
3145         // subexpression.  Match the optimizing compiler by not branching.
3146         VisitForEffect(expr->expression());
3147       } else if (context()->IsTest()) {
3148         const TestContext* test = TestContext::cast(context());
3149         // The labels are swapped for the recursive call.
3150         VisitForControl(expr->expression(),
3151                         test->false_label(),
3152                         test->true_label(),
3153                         test->fall_through());
3154         context()->Plug(test->true_label(), test->false_label());
3155       } else {
3156         // We handle value contexts explicitly rather than simply visiting
3157         // for control and plugging the control flow into the context,
3158         // because we need to prepare a pair of extra administrative AST ids
3159         // for the optimizing compiler.
3160         DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
3161         Label materialize_true, materialize_false, done;
3162         VisitForControl(expr->expression(),
3163                         &materialize_false,
3164                         &materialize_true,
3165                         &materialize_true);
3166         if (!context()->IsAccumulatorValue()) OperandStackDepthIncrement(1);
3167         __ bind(&materialize_true);
3168         PrepareForBailoutForId(expr->MaterializeTrueId(),
3169                                BailoutState::NO_REGISTERS);
3170         __ LoadRoot(v0, Heap::kTrueValueRootIndex);
3171         if (context()->IsStackValue()) __ push(v0);
3172         __ jmp(&done);
3173         __ bind(&materialize_false);
3174         PrepareForBailoutForId(expr->MaterializeFalseId(),
3175                                BailoutState::NO_REGISTERS);
3176         __ LoadRoot(v0, Heap::kFalseValueRootIndex);
3177         if (context()->IsStackValue()) __ push(v0);
3178         __ bind(&done);
3179       }
3180       break;
3181     }
3182 
3183     case Token::TYPEOF: {
3184       Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
3185       {
3186         AccumulatorValueContext context(this);
3187         VisitForTypeofValue(expr->expression());
3188       }
3189       __ mov(a3, v0);
3190       TypeofStub typeof_stub(isolate());
3191       __ CallStub(&typeof_stub);
3192       context()->Plug(v0);
3193       break;
3194     }
3195 
3196     default:
3197       UNREACHABLE();
3198   }
3199 }
3200 
3201 
VisitCountOperation(CountOperation * expr)3202 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3203   DCHECK(expr->expression()->IsValidReferenceExpressionOrThis());
3204 
3205   Comment cmnt(masm_, "[ CountOperation");
3206 
3207   Property* prop = expr->expression()->AsProperty();
3208   LhsKind assign_type = Property::GetAssignType(prop);
3209 
3210   // Evaluate expression and get value.
3211   if (assign_type == VARIABLE) {
3212     DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
3213     AccumulatorValueContext context(this);
3214     EmitVariableLoad(expr->expression()->AsVariableProxy());
3215   } else {
3216     // Reserve space for result of postfix operation.
3217     if (expr->is_postfix() && !context()->IsEffect()) {
3218       __ li(at, Operand(Smi::FromInt(0)));
3219       PushOperand(at);
3220     }
3221     switch (assign_type) {
3222       case NAMED_PROPERTY: {
3223         // Put the object both on the stack and in the register.
3224         VisitForStackValue(prop->obj());
3225         __ ld(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
3226         EmitNamedPropertyLoad(prop);
3227         break;
3228       }
3229 
3230       case NAMED_SUPER_PROPERTY: {
3231         VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
3232         VisitForAccumulatorValue(
3233             prop->obj()->AsSuperPropertyReference()->home_object());
3234         PushOperand(result_register());
3235         const Register scratch = a1;
3236         __ ld(scratch, MemOperand(sp, kPointerSize));
3237         PushOperands(scratch, result_register());
3238         EmitNamedSuperPropertyLoad(prop);
3239         break;
3240       }
3241 
3242       case KEYED_SUPER_PROPERTY: {
3243         VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
3244         VisitForAccumulatorValue(
3245             prop->obj()->AsSuperPropertyReference()->home_object());
3246         const Register scratch = a1;
3247         const Register scratch1 = a4;
3248         __ Move(scratch, result_register());
3249         VisitForAccumulatorValue(prop->key());
3250         PushOperands(scratch, result_register());
3251         __ ld(scratch1, MemOperand(sp, 2 * kPointerSize));
3252         PushOperands(scratch1, scratch, result_register());
3253         EmitKeyedSuperPropertyLoad(prop);
3254         break;
3255       }
3256 
3257       case KEYED_PROPERTY: {
3258         VisitForStackValue(prop->obj());
3259         VisitForStackValue(prop->key());
3260         __ ld(LoadDescriptor::ReceiverRegister(),
3261               MemOperand(sp, 1 * kPointerSize));
3262         __ ld(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
3263         EmitKeyedPropertyLoad(prop);
3264         break;
3265       }
3266 
3267       case VARIABLE:
3268         UNREACHABLE();
3269     }
3270   }
3271 
3272   // We need a second deoptimization point after loading the value
3273   // in case evaluating the property load my have a side effect.
3274   if (assign_type == VARIABLE) {
3275     PrepareForBailout(expr->expression(), BailoutState::TOS_REGISTER);
3276   } else {
3277     PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
3278   }
3279 
3280   // Inline smi case if we are in a loop.
3281   Label stub_call, done;
3282   JumpPatchSite patch_site(masm_);
3283 
3284   int count_value = expr->op() == Token::INC ? 1 : -1;
3285   __ mov(a0, v0);
3286   if (ShouldInlineSmiCase(expr->op())) {
3287     Label slow;
3288     patch_site.EmitJumpIfNotSmi(v0, &slow);
3289 
3290     // Save result for postfix expressions.
3291     if (expr->is_postfix()) {
3292       if (!context()->IsEffect()) {
3293         // Save the result on the stack. If we have a named or keyed property
3294         // we store the result under the receiver that is currently on top
3295         // of the stack.
3296         switch (assign_type) {
3297           case VARIABLE:
3298             __ push(v0);
3299             break;
3300           case NAMED_PROPERTY:
3301             __ sd(v0, MemOperand(sp, kPointerSize));
3302             break;
3303           case NAMED_SUPER_PROPERTY:
3304             __ sd(v0, MemOperand(sp, 2 * kPointerSize));
3305             break;
3306           case KEYED_PROPERTY:
3307             __ sd(v0, MemOperand(sp, 2 * kPointerSize));
3308             break;
3309           case KEYED_SUPER_PROPERTY:
3310             __ sd(v0, MemOperand(sp, 3 * kPointerSize));
3311             break;
3312         }
3313       }
3314     }
3315 
3316     Register scratch1 = a1;
3317     __ li(scratch1, Operand(Smi::FromInt(count_value)));
3318     __ DaddBranchNoOvf(v0, v0, Operand(scratch1), &done);
3319     // Call stub. Undo operation first.
3320     __ Move(v0, a0);
3321     __ jmp(&stub_call);
3322     __ bind(&slow);
3323   }
3324 
3325   // Convert old value into a number.
3326   __ Call(isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
3327   PrepareForBailoutForId(expr->ToNumberId(), BailoutState::TOS_REGISTER);
3328 
3329   // Save result for postfix expressions.
3330   if (expr->is_postfix()) {
3331     if (!context()->IsEffect()) {
3332       // Save the result on the stack. If we have a named or keyed property
3333       // we store the result under the receiver that is currently on top
3334       // of the stack.
3335       switch (assign_type) {
3336         case VARIABLE:
3337           PushOperand(v0);
3338           break;
3339         case NAMED_PROPERTY:
3340           __ sd(v0, MemOperand(sp, kPointerSize));
3341           break;
3342         case NAMED_SUPER_PROPERTY:
3343           __ sd(v0, MemOperand(sp, 2 * kPointerSize));
3344           break;
3345         case KEYED_PROPERTY:
3346           __ sd(v0, MemOperand(sp, 2 * kPointerSize));
3347           break;
3348         case KEYED_SUPER_PROPERTY:
3349           __ sd(v0, MemOperand(sp, 3 * kPointerSize));
3350           break;
3351       }
3352     }
3353   }
3354 
3355   __ bind(&stub_call);
3356   __ mov(a1, v0);
3357   __ li(a0, Operand(Smi::FromInt(count_value)));
3358 
3359   SetExpressionPosition(expr);
3360 
3361   Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), Token::ADD).code();
3362   CallIC(code, expr->CountBinOpFeedbackId());
3363   patch_site.EmitPatchInfo();
3364   __ bind(&done);
3365 
3366   // Store the value returned in v0.
3367   switch (assign_type) {
3368     case VARIABLE:
3369       if (expr->is_postfix()) {
3370         { EffectContext context(this);
3371           EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3372                                  Token::ASSIGN, expr->CountSlot());
3373           PrepareForBailoutForId(expr->AssignmentId(),
3374                                  BailoutState::TOS_REGISTER);
3375           context.Plug(v0);
3376         }
3377         // For all contexts except EffectConstant we have the result on
3378         // top of the stack.
3379         if (!context()->IsEffect()) {
3380           context()->PlugTOS();
3381         }
3382       } else {
3383         EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3384                                Token::ASSIGN, expr->CountSlot());
3385         PrepareForBailoutForId(expr->AssignmentId(),
3386                                BailoutState::TOS_REGISTER);
3387         context()->Plug(v0);
3388       }
3389       break;
3390     case NAMED_PROPERTY: {
3391       __ mov(StoreDescriptor::ValueRegister(), result_register());
3392       __ li(StoreDescriptor::NameRegister(),
3393             Operand(prop->key()->AsLiteral()->value()));
3394       PopOperand(StoreDescriptor::ReceiverRegister());
3395       EmitLoadStoreICSlot(expr->CountSlot());
3396       CallStoreIC();
3397       PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
3398       if (expr->is_postfix()) {
3399         if (!context()->IsEffect()) {
3400           context()->PlugTOS();
3401         }
3402       } else {
3403         context()->Plug(v0);
3404       }
3405       break;
3406     }
3407     case NAMED_SUPER_PROPERTY: {
3408       EmitNamedSuperPropertyStore(prop);
3409       if (expr->is_postfix()) {
3410         if (!context()->IsEffect()) {
3411           context()->PlugTOS();
3412         }
3413       } else {
3414         context()->Plug(v0);
3415       }
3416       break;
3417     }
3418     case KEYED_SUPER_PROPERTY: {
3419       EmitKeyedSuperPropertyStore(prop);
3420       if (expr->is_postfix()) {
3421         if (!context()->IsEffect()) {
3422           context()->PlugTOS();
3423         }
3424       } else {
3425         context()->Plug(v0);
3426       }
3427       break;
3428     }
3429     case KEYED_PROPERTY: {
3430       __ mov(StoreDescriptor::ValueRegister(), result_register());
3431       PopOperands(StoreDescriptor::ReceiverRegister(),
3432                   StoreDescriptor::NameRegister());
3433       Handle<Code> ic =
3434           CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
3435       EmitLoadStoreICSlot(expr->CountSlot());
3436       CallIC(ic);
3437       PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
3438       if (expr->is_postfix()) {
3439         if (!context()->IsEffect()) {
3440           context()->PlugTOS();
3441         }
3442       } else {
3443         context()->Plug(v0);
3444       }
3445       break;
3446     }
3447   }
3448 }
3449 
3450 
EmitLiteralCompareTypeof(Expression * expr,Expression * sub_expr,Handle<String> check)3451 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
3452                                                  Expression* sub_expr,
3453                                                  Handle<String> check) {
3454   Label materialize_true, materialize_false;
3455   Label* if_true = NULL;
3456   Label* if_false = NULL;
3457   Label* fall_through = NULL;
3458   context()->PrepareTest(&materialize_true, &materialize_false,
3459                          &if_true, &if_false, &fall_through);
3460 
3461   { AccumulatorValueContext context(this);
3462     VisitForTypeofValue(sub_expr);
3463   }
3464   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3465 
3466   Factory* factory = isolate()->factory();
3467   if (String::Equals(check, factory->number_string())) {
3468     __ JumpIfSmi(v0, if_true);
3469     __ ld(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
3470     __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
3471     Split(eq, v0, Operand(at), if_true, if_false, fall_through);
3472   } else if (String::Equals(check, factory->string_string())) {
3473     __ JumpIfSmi(v0, if_false);
3474     __ GetObjectType(v0, v0, a1);
3475     Split(lt, a1, Operand(FIRST_NONSTRING_TYPE), if_true, if_false,
3476           fall_through);
3477   } else if (String::Equals(check, factory->symbol_string())) {
3478     __ JumpIfSmi(v0, if_false);
3479     __ GetObjectType(v0, v0, a1);
3480     Split(eq, a1, Operand(SYMBOL_TYPE), if_true, if_false, fall_through);
3481   } else if (String::Equals(check, factory->boolean_string())) {
3482     __ LoadRoot(at, Heap::kTrueValueRootIndex);
3483     __ Branch(if_true, eq, v0, Operand(at));
3484     __ LoadRoot(at, Heap::kFalseValueRootIndex);
3485     Split(eq, v0, Operand(at), if_true, if_false, fall_through);
3486   } else if (String::Equals(check, factory->undefined_string())) {
3487     __ LoadRoot(at, Heap::kNullValueRootIndex);
3488     __ Branch(if_false, eq, v0, Operand(at));
3489     __ JumpIfSmi(v0, if_false);
3490     // Check for undetectable objects => true.
3491     __ ld(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
3492     __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
3493     __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
3494     Split(ne, a1, Operand(zero_reg), if_true, if_false, fall_through);
3495   } else if (String::Equals(check, factory->function_string())) {
3496     __ JumpIfSmi(v0, if_false);
3497     __ ld(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
3498     __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
3499     __ And(a1, a1,
3500            Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
3501     Split(eq, a1, Operand(1 << Map::kIsCallable), if_true, if_false,
3502           fall_through);
3503   } else if (String::Equals(check, factory->object_string())) {
3504     __ JumpIfSmi(v0, if_false);
3505     __ LoadRoot(at, Heap::kNullValueRootIndex);
3506     __ Branch(if_true, eq, v0, Operand(at));
3507     STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
3508     __ GetObjectType(v0, v0, a1);
3509     __ Branch(if_false, lt, a1, Operand(FIRST_JS_RECEIVER_TYPE));
3510     // Check for callable or undetectable objects => false.
3511     __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
3512     __ And(a1, a1,
3513            Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
3514     Split(eq, a1, Operand(zero_reg), if_true, if_false, fall_through);
3515 // clang-format off
3516 #define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type)    \
3517   } else if (String::Equals(check, factory->type##_string())) {  \
3518     __ JumpIfSmi(v0, if_false);                                  \
3519     __ ld(v0, FieldMemOperand(v0, HeapObject::kMapOffset));      \
3520     __ LoadRoot(at, Heap::k##Type##MapRootIndex);                \
3521     Split(eq, v0, Operand(at), if_true, if_false, fall_through);
3522   SIMD128_TYPES(SIMD128_TYPE)
3523 #undef SIMD128_TYPE
3524     // clang-format on
3525   } else {
3526     if (if_false != fall_through) __ jmp(if_false);
3527   }
3528   context()->Plug(if_true, if_false);
3529 }
3530 
3531 
VisitCompareOperation(CompareOperation * expr)3532 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
3533   Comment cmnt(masm_, "[ CompareOperation");
3534 
3535   // First we try a fast inlined version of the compare when one of
3536   // the operands is a literal.
3537   if (TryLiteralCompare(expr)) return;
3538 
3539   // Always perform the comparison for its control flow.  Pack the result
3540   // into the expression's context after the comparison is performed.
3541   Label materialize_true, materialize_false;
3542   Label* if_true = NULL;
3543   Label* if_false = NULL;
3544   Label* fall_through = NULL;
3545   context()->PrepareTest(&materialize_true, &materialize_false,
3546                          &if_true, &if_false, &fall_through);
3547 
3548   Token::Value op = expr->op();
3549   VisitForStackValue(expr->left());
3550   switch (op) {
3551     case Token::IN:
3552       VisitForStackValue(expr->right());
3553       SetExpressionPosition(expr);
3554       EmitHasProperty();
3555       PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
3556       __ LoadRoot(a4, Heap::kTrueValueRootIndex);
3557       Split(eq, v0, Operand(a4), if_true, if_false, fall_through);
3558       break;
3559 
3560     case Token::INSTANCEOF: {
3561       VisitForAccumulatorValue(expr->right());
3562       SetExpressionPosition(expr);
3563       __ mov(a0, result_register());
3564       PopOperand(a1);
3565       InstanceOfStub stub(isolate());
3566       __ CallStub(&stub);
3567       PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
3568       __ LoadRoot(a4, Heap::kTrueValueRootIndex);
3569       Split(eq, v0, Operand(a4), if_true, if_false, fall_through);
3570       break;
3571     }
3572 
3573     default: {
3574       VisitForAccumulatorValue(expr->right());
3575       SetExpressionPosition(expr);
3576       Condition cc = CompareIC::ComputeCondition(op);
3577       __ mov(a0, result_register());
3578       PopOperand(a1);
3579 
3580       bool inline_smi_code = ShouldInlineSmiCase(op);
3581       JumpPatchSite patch_site(masm_);
3582       if (inline_smi_code) {
3583         Label slow_case;
3584         __ Or(a2, a0, Operand(a1));
3585         patch_site.EmitJumpIfNotSmi(a2, &slow_case);
3586         Split(cc, a1, Operand(a0), if_true, if_false, NULL);
3587         __ bind(&slow_case);
3588       }
3589 
3590       Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
3591       CallIC(ic, expr->CompareOperationFeedbackId());
3592       patch_site.EmitPatchInfo();
3593       PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3594       Split(cc, v0, Operand(zero_reg), if_true, if_false, fall_through);
3595     }
3596   }
3597 
3598   // Convert the result of the comparison into one expected for this
3599   // expression's context.
3600   context()->Plug(if_true, if_false);
3601 }
3602 
3603 
EmitLiteralCompareNil(CompareOperation * expr,Expression * sub_expr,NilValue nil)3604 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
3605                                               Expression* sub_expr,
3606                                               NilValue nil) {
3607   Label materialize_true, materialize_false;
3608   Label* if_true = NULL;
3609   Label* if_false = NULL;
3610   Label* fall_through = NULL;
3611   context()->PrepareTest(&materialize_true, &materialize_false,
3612                          &if_true, &if_false, &fall_through);
3613 
3614   VisitForAccumulatorValue(sub_expr);
3615   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3616   if (expr->op() == Token::EQ_STRICT) {
3617     Heap::RootListIndex nil_value = nil == kNullValue ?
3618         Heap::kNullValueRootIndex :
3619         Heap::kUndefinedValueRootIndex;
3620     __ LoadRoot(a1, nil_value);
3621     Split(eq, v0, Operand(a1), if_true, if_false, fall_through);
3622   } else {
3623     __ JumpIfSmi(v0, if_false);
3624     __ ld(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
3625     __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
3626     __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
3627     Split(ne, a1, Operand(zero_reg), if_true, if_false, fall_through);
3628   }
3629   context()->Plug(if_true, if_false);
3630 }
3631 
3632 
result_register()3633 Register FullCodeGenerator::result_register() {
3634   return v0;
3635 }
3636 
3637 
context_register()3638 Register FullCodeGenerator::context_register() {
3639   return cp;
3640 }
3641 
LoadFromFrameField(int frame_offset,Register value)3642 void FullCodeGenerator::LoadFromFrameField(int frame_offset, Register value) {
3643   // DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
3644   DCHECK(IsAligned(frame_offset, kPointerSize));
3645   //  __ sw(value, MemOperand(fp, frame_offset));
3646   __ ld(value, MemOperand(fp, frame_offset));
3647 }
3648 
StoreToFrameField(int frame_offset,Register value)3649 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
3650   // DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
3651   DCHECK(IsAligned(frame_offset, kPointerSize));
3652   //  __ sw(value, MemOperand(fp, frame_offset));
3653   __ sd(value, MemOperand(fp, frame_offset));
3654 }
3655 
3656 
LoadContextField(Register dst,int context_index)3657 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
3658   __ ld(dst, ContextMemOperand(cp, context_index));
3659 }
3660 
3661 
PushFunctionArgumentForContextAllocation()3662 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
3663   Scope* closure_scope = scope()->ClosureScope();
3664   if (closure_scope->is_script_scope() ||
3665       closure_scope->is_module_scope()) {
3666     // Contexts nested in the native context have a canonical empty function
3667     // as their closure, not the anonymous closure containing the global
3668     // code.
3669     __ LoadNativeContextSlot(Context::CLOSURE_INDEX, at);
3670   } else if (closure_scope->is_eval_scope()) {
3671     // Contexts created by a call to eval have the same closure as the
3672     // context calling eval, not the anonymous closure containing the eval
3673     // code.  Fetch it from the context.
3674     __ ld(at, ContextMemOperand(cp, Context::CLOSURE_INDEX));
3675   } else {
3676     DCHECK(closure_scope->is_function_scope());
3677     __ ld(at, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
3678   }
3679   PushOperand(at);
3680 }
3681 
3682 
3683 // ----------------------------------------------------------------------------
3684 // Non-local control flow support.
3685 
EnterFinallyBlock()3686 void FullCodeGenerator::EnterFinallyBlock() {
3687   DCHECK(!result_register().is(a1));
3688   // Store pending message while executing finally block.
3689   ExternalReference pending_message_obj =
3690       ExternalReference::address_of_pending_message_obj(isolate());
3691   __ li(at, Operand(pending_message_obj));
3692   __ ld(a1, MemOperand(at));
3693   PushOperand(a1);
3694 
3695   ClearPendingMessage();
3696 }
3697 
3698 
ExitFinallyBlock()3699 void FullCodeGenerator::ExitFinallyBlock() {
3700   DCHECK(!result_register().is(a1));
3701   // Restore pending message from stack.
3702   PopOperand(a1);
3703   ExternalReference pending_message_obj =
3704       ExternalReference::address_of_pending_message_obj(isolate());
3705   __ li(at, Operand(pending_message_obj));
3706   __ sd(a1, MemOperand(at));
3707 }
3708 
3709 
ClearPendingMessage()3710 void FullCodeGenerator::ClearPendingMessage() {
3711   DCHECK(!result_register().is(a1));
3712   ExternalReference pending_message_obj =
3713       ExternalReference::address_of_pending_message_obj(isolate());
3714   __ LoadRoot(a1, Heap::kTheHoleValueRootIndex);
3715   __ li(at, Operand(pending_message_obj));
3716   __ sd(a1, MemOperand(at));
3717 }
3718 
3719 
EmitCommands()3720 void FullCodeGenerator::DeferredCommands::EmitCommands() {
3721   __ Pop(result_register());  // Restore the accumulator.
3722   __ Pop(a1);                 // Get the token.
3723   for (DeferredCommand cmd : commands_) {
3724     Label skip;
3725     __ li(at, Operand(Smi::FromInt(cmd.token)));
3726     __ Branch(&skip, ne, a1, Operand(at));
3727     switch (cmd.command) {
3728       case kReturn:
3729         codegen_->EmitUnwindAndReturn();
3730         break;
3731       case kThrow:
3732         __ Push(result_register());
3733         __ CallRuntime(Runtime::kReThrow);
3734         break;
3735       case kContinue:
3736         codegen_->EmitContinue(cmd.target);
3737         break;
3738       case kBreak:
3739         codegen_->EmitBreak(cmd.target);
3740         break;
3741     }
3742     __ bind(&skip);
3743   }
3744 }
3745 
3746 #undef __
3747 
3748 
PatchAt(Code * unoptimized_code,Address pc,BackEdgeState target_state,Code * replacement_code)3749 void BackEdgeTable::PatchAt(Code* unoptimized_code,
3750                             Address pc,
3751                             BackEdgeState target_state,
3752                             Code* replacement_code) {
3753   static const int kInstrSize = Assembler::kInstrSize;
3754   Address pc_immediate_load_address =
3755       Assembler::target_address_from_return_address(pc);
3756   Address branch_address = pc_immediate_load_address - 2 * kInstrSize;
3757   Isolate* isolate = unoptimized_code->GetIsolate();
3758   CodePatcher patcher(isolate, branch_address, 1);
3759 
3760   switch (target_state) {
3761     case INTERRUPT:
3762       // slt  at, a3, zero_reg (in case of count based interrupts)
3763       // beq  at, zero_reg, ok
3764       // lui  t9, <interrupt stub address> upper
3765       // ori  t9, <interrupt stub address> u-middle
3766       // dsll t9, t9, 16
3767       // ori  t9, <interrupt stub address> lower
3768       // jalr t9
3769       // nop
3770       // ok-label ----- pc_after points here
3771       patcher.masm()->slt(at, a3, zero_reg);
3772       break;
3773     case ON_STACK_REPLACEMENT:
3774       // addiu at, zero_reg, 1
3775       // beq  at, zero_reg, ok  ;; Not changed
3776       // lui  t9, <on-stack replacement address> upper
3777       // ori  t9, <on-stack replacement address> middle
3778       // dsll t9, t9, 16
3779       // ori  t9, <on-stack replacement address> lower
3780       // jalr t9  ;; Not changed
3781       // nop  ;; Not changed
3782       // ok-label ----- pc_after points here
3783       patcher.masm()->daddiu(at, zero_reg, 1);
3784       break;
3785   }
3786   // Replace the stack check address in the load-immediate (6-instr sequence)
3787   // with the entry address of the replacement code.
3788   Assembler::set_target_address_at(isolate, pc_immediate_load_address,
3789                                    replacement_code->entry());
3790 
3791   unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
3792       unoptimized_code, pc_immediate_load_address, replacement_code);
3793 }
3794 
3795 
GetBackEdgeState(Isolate * isolate,Code * unoptimized_code,Address pc)3796 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
3797     Isolate* isolate,
3798     Code* unoptimized_code,
3799     Address pc) {
3800   static const int kInstrSize = Assembler::kInstrSize;
3801   Address pc_immediate_load_address =
3802       Assembler::target_address_from_return_address(pc);
3803   Address branch_address = pc_immediate_load_address - 2 * kInstrSize;
3804 
3805   DCHECK(Assembler::IsBeq(Assembler::instr_at(branch_address + kInstrSize)));
3806   if (!Assembler::IsAddImmediate(Assembler::instr_at(branch_address))) {
3807     DCHECK(reinterpret_cast<uint64_t>(
3808         Assembler::target_address_at(pc_immediate_load_address)) ==
3809            reinterpret_cast<uint64_t>(
3810                isolate->builtins()->InterruptCheck()->entry()));
3811     return INTERRUPT;
3812   }
3813 
3814   DCHECK(Assembler::IsAddImmediate(Assembler::instr_at(branch_address)));
3815 
3816   DCHECK(reinterpret_cast<uint64_t>(
3817              Assembler::target_address_at(pc_immediate_load_address)) ==
3818          reinterpret_cast<uint64_t>(
3819              isolate->builtins()->OnStackReplacement()->entry()));
3820   return ON_STACK_REPLACEMENT;
3821 }
3822 
3823 
3824 }  // namespace internal
3825 }  // namespace v8
3826 
3827 #endif  // V8_TARGET_ARCH_MIPS64
3828