• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #if V8_TARGET_ARCH_MIPS64
6 
7 // Note on Mips implementation:
8 //
9 // The result_register() for mips is the 'v0' register, which is defined
10 // by the ABI to contain function return values. However, the first
11 // parameter to a function is defined to be 'a0'. So there are many
12 // places where we have to move a previous result in v0 to a0 for the
13 // next call: mov(a0, v0). This is not needed on the other architectures.
14 
15 #include "src/ast/scopes.h"
16 #include "src/code-factory.h"
17 #include "src/code-stubs.h"
18 #include "src/codegen.h"
19 #include "src/debug/debug.h"
20 #include "src/full-codegen/full-codegen.h"
21 #include "src/ic/ic.h"
22 #include "src/parsing/parser.h"
23 
24 #include "src/mips64/code-stubs-mips64.h"
25 #include "src/mips64/macro-assembler-mips64.h"
26 
27 namespace v8 {
28 namespace internal {
29 
30 #define __ ACCESS_MASM(masm_)
31 
32 
33 // A patch site is a location in the code which it is possible to patch. This
34 // class has a number of methods to emit the code which is patchable and the
35 // method EmitPatchInfo to record a marker back to the patchable code. This
36 // marker is a andi zero_reg, rx, #yyyy instruction, and rx * 0x0000ffff + yyyy
37 // (raw 16 bit immediate value is used) is the delta from the pc to the first
38 // instruction of the patchable code.
39 // The marker instruction is effectively a NOP (dest is zero_reg) and will
40 // never be emitted by normal code.
41 class JumpPatchSite BASE_EMBEDDED {
42  public:
JumpPatchSite(MacroAssembler * masm)43   explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
44 #ifdef DEBUG
45     info_emitted_ = false;
46 #endif
47   }
48 
~JumpPatchSite()49   ~JumpPatchSite() {
50     DCHECK(patch_site_.is_bound() == info_emitted_);
51   }
52 
53   // When initially emitting this ensure that a jump is always generated to skip
54   // the inlined smi code.
EmitJumpIfNotSmi(Register reg,Label * target)55   void EmitJumpIfNotSmi(Register reg, Label* target) {
56     DCHECK(!patch_site_.is_bound() && !info_emitted_);
57     Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
58     __ bind(&patch_site_);
59     __ andi(at, reg, 0);
60     // Always taken before patched.
61     __ BranchShort(target, eq, at, Operand(zero_reg));
62   }
63 
64   // When initially emitting this ensure that a jump is never generated to skip
65   // the inlined smi code.
EmitJumpIfSmi(Register reg,Label * target)66   void EmitJumpIfSmi(Register reg, Label* target) {
67     Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
68     DCHECK(!patch_site_.is_bound() && !info_emitted_);
69     __ bind(&patch_site_);
70     __ andi(at, reg, 0);
71     // Never taken before patched.
72     __ BranchShort(target, ne, at, Operand(zero_reg));
73   }
74 
EmitPatchInfo()75   void EmitPatchInfo() {
76     if (patch_site_.is_bound()) {
77       int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
78       Register reg = Register::from_code(delta_to_patch_site / kImm16Mask);
79       __ andi(zero_reg, reg, delta_to_patch_site % kImm16Mask);
80 #ifdef DEBUG
81       info_emitted_ = true;
82 #endif
83     } else {
84       __ nop();  // Signals no inlined code.
85     }
86   }
87 
88  private:
89   MacroAssembler* masm_;
90   Label patch_site_;
91 #ifdef DEBUG
92   bool info_emitted_;
93 #endif
94 };
95 
96 
97 // Generate code for a JS function.  On entry to the function the receiver
98 // and arguments have been pushed on the stack left to right.  The actual
99 // argument count matches the formal parameter count expected by the
100 // function.
101 //
102 // The live registers are:
103 //   o a1: the JS function object being called (i.e. ourselves)
104 //   o a3: the new target value
105 //   o cp: our context
106 //   o fp: our caller's frame pointer
107 //   o sp: stack pointer
108 //   o ra: return address
109 //
110 // The function builds a JS frame.  Please see JavaScriptFrameConstants in
111 // frames-mips.h for its layout.
Generate()112 void FullCodeGenerator::Generate() {
113   CompilationInfo* info = info_;
114   profiling_counter_ = isolate()->factory()->NewCell(
115       Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
116   SetFunctionPosition(literal());
117   Comment cmnt(masm_, "[ function compiled by full code generator");
118 
119   ProfileEntryHookStub::MaybeCallEntryHook(masm_);
120 
121 #ifdef DEBUG
122   if (strlen(FLAG_stop_at) > 0 &&
123       info->literal()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
124     __ stop("stop-at");
125   }
126 #endif
127 
128   if (FLAG_debug_code && info->ExpectsJSReceiverAsReceiver()) {
129     int receiver_offset = info->scope()->num_parameters() * kPointerSize;
130     __ ld(a2, MemOperand(sp, receiver_offset));
131     __ AssertNotSmi(a2);
132     __ GetObjectType(a2, a2, a2);
133     __ Check(ge, kSloppyFunctionExpectsJSReceiverReceiver, a2,
134              Operand(FIRST_JS_RECEIVER_TYPE));
135   }
136 
137   // Open a frame scope to indicate that there is a frame on the stack.  The
138   // MANUAL indicates that the scope shouldn't actually generate code to set up
139   // the frame (that is done below).
140   FrameScope frame_scope(masm_, StackFrame::MANUAL);
141   info->set_prologue_offset(masm_->pc_offset());
142   __ Prologue(info->GeneratePreagedPrologue());
143 
144   { Comment cmnt(masm_, "[ Allocate locals");
145     int locals_count = info->scope()->num_stack_slots();
146     // Generators allocate locals, if any, in context slots.
147     DCHECK(!IsGeneratorFunction(info->literal()->kind()) || locals_count == 0);
148     if (locals_count > 0) {
149       if (locals_count >= 128) {
150         Label ok;
151         __ Dsubu(t1, sp, Operand(locals_count * kPointerSize));
152         __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
153         __ Branch(&ok, hs, t1, Operand(a2));
154         __ CallRuntime(Runtime::kThrowStackOverflow);
155         __ bind(&ok);
156       }
157       __ LoadRoot(t1, Heap::kUndefinedValueRootIndex);
158       int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
159       if (locals_count >= kMaxPushes) {
160         int loop_iterations = locals_count / kMaxPushes;
161         __ li(a2, Operand(loop_iterations));
162         Label loop_header;
163         __ bind(&loop_header);
164         // Do pushes.
165         __ Dsubu(sp, sp, Operand(kMaxPushes * kPointerSize));
166         for (int i = 0; i < kMaxPushes; i++) {
167           __ sd(t1, MemOperand(sp, i * kPointerSize));
168         }
169         // Continue loop if not done.
170         __ Dsubu(a2, a2, Operand(1));
171         __ Branch(&loop_header, ne, a2, Operand(zero_reg));
172       }
173       int remaining = locals_count % kMaxPushes;
174       // Emit the remaining pushes.
175       __ Dsubu(sp, sp, Operand(remaining * kPointerSize));
176       for (int i  = 0; i < remaining; i++) {
177         __ sd(t1, MemOperand(sp, i * kPointerSize));
178       }
179     }
180   }
181 
182   bool function_in_register_a1 = true;
183 
184   // Possibly allocate a local context.
185   if (info->scope()->num_heap_slots() > 0) {
186     Comment cmnt(masm_, "[ Allocate context");
187     // Argument to NewContext is the function, which is still in a1.
188     bool need_write_barrier = true;
189     int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
190     if (info->scope()->is_script_scope()) {
191       __ push(a1);
192       __ Push(info->scope()->GetScopeInfo(info->isolate()));
193       __ CallRuntime(Runtime::kNewScriptContext);
194       PrepareForBailoutForId(BailoutId::ScriptContext(), TOS_REG);
195       // The new target value is not used, clobbering is safe.
196       DCHECK_NULL(info->scope()->new_target_var());
197     } else {
198       if (info->scope()->new_target_var() != nullptr) {
199         __ push(a3);  // Preserve new target.
200       }
201       if (slots <= FastNewContextStub::kMaximumSlots) {
202         FastNewContextStub stub(isolate(), slots);
203         __ CallStub(&stub);
204         // Result of FastNewContextStub is always in new space.
205         need_write_barrier = false;
206       } else {
207         __ push(a1);
208         __ CallRuntime(Runtime::kNewFunctionContext);
209       }
210       if (info->scope()->new_target_var() != nullptr) {
211         __ pop(a3);  // Restore new target.
212       }
213     }
214     function_in_register_a1 = false;
215     // Context is returned in v0. It replaces the context passed to us.
216     // It's saved in the stack and kept live in cp.
217     __ mov(cp, v0);
218     __ sd(v0, MemOperand(fp, StandardFrameConstants::kContextOffset));
219     // Copy any necessary parameters into the context.
220     int num_parameters = info->scope()->num_parameters();
221     int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
222     for (int i = first_parameter; i < num_parameters; i++) {
223       Variable* var = (i == -1) ? scope()->receiver() : scope()->parameter(i);
224       if (var->IsContextSlot()) {
225         int parameter_offset = StandardFrameConstants::kCallerSPOffset +
226                                  (num_parameters - 1 - i) * kPointerSize;
227         // Load parameter from stack.
228         __ ld(a0, MemOperand(fp, parameter_offset));
229         // Store it in the context.
230         MemOperand target = ContextMemOperand(cp, var->index());
231         __ sd(a0, target);
232 
233         // Update the write barrier.
234         if (need_write_barrier) {
235           __ RecordWriteContextSlot(cp, target.offset(), a0, a2,
236                                     kRAHasBeenSaved, kDontSaveFPRegs);
237         } else if (FLAG_debug_code) {
238           Label done;
239           __ JumpIfInNewSpace(cp, a0, &done);
240           __ Abort(kExpectedNewSpaceObject);
241           __ bind(&done);
242         }
243       }
244     }
245   }
246 
247   // Register holding this function and new target are both trashed in case we
248   // bailout here. But since that can happen only when new target is not used
249   // and we allocate a context, the value of |function_in_register| is correct.
250   PrepareForBailoutForId(BailoutId::FunctionContext(), NO_REGISTERS);
251 
252   // Possibly set up a local binding to the this function which is used in
253   // derived constructors with super calls.
254   Variable* this_function_var = scope()->this_function_var();
255   if (this_function_var != nullptr) {
256     Comment cmnt(masm_, "[ This function");
257     if (!function_in_register_a1) {
258       __ ld(a1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
259       // The write barrier clobbers register again, keep it marked as such.
260     }
261     SetVar(this_function_var, a1, a0, a2);
262   }
263 
264   Variable* new_target_var = scope()->new_target_var();
265   if (new_target_var != nullptr) {
266     Comment cmnt(masm_, "[ new.target");
267     SetVar(new_target_var, a3, a0, a2);
268   }
269 
270   // Possibly allocate RestParameters
271   int rest_index;
272   Variable* rest_param = scope()->rest_parameter(&rest_index);
273   if (rest_param) {
274     Comment cmnt(masm_, "[ Allocate rest parameter array");
275 
276     int num_parameters = info->scope()->num_parameters();
277     int offset = num_parameters * kPointerSize;
278 
279     __ li(RestParamAccessDescriptor::parameter_count(),
280           Operand(Smi::FromInt(num_parameters)));
281     __ Daddu(RestParamAccessDescriptor::parameter_pointer(), fp,
282              Operand(StandardFrameConstants::kCallerSPOffset + offset));
283     __ li(RestParamAccessDescriptor::rest_parameter_index(),
284           Operand(Smi::FromInt(rest_index)));
285     function_in_register_a1 = false;
286 
287     RestParamAccessStub stub(isolate());
288     __ CallStub(&stub);
289 
290     SetVar(rest_param, v0, a1, a2);
291   }
292 
293   Variable* arguments = scope()->arguments();
294   if (arguments != NULL) {
295     // Function uses arguments object.
296     Comment cmnt(masm_, "[ Allocate arguments object");
297     DCHECK(a1.is(ArgumentsAccessNewDescriptor::function()));
298     if (!function_in_register_a1) {
299       // Load this again, if it's used by the local context below.
300       __ ld(a1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
301     }
302     // Receiver is just before the parameters on the caller's stack.
303     int num_parameters = info->scope()->num_parameters();
304     int offset = num_parameters * kPointerSize;
305     __ li(ArgumentsAccessNewDescriptor::parameter_count(),
306           Operand(Smi::FromInt(num_parameters)));
307     __ Daddu(ArgumentsAccessNewDescriptor::parameter_pointer(), fp,
308              Operand(StandardFrameConstants::kCallerSPOffset + offset));
309 
310     // Arguments to ArgumentsAccessStub:
311     //   function, parameter pointer, parameter count.
312     // The stub will rewrite parameter pointer and parameter count if the
313     // previous stack frame was an arguments adapter frame.
314     bool is_unmapped = is_strict(language_mode()) || !has_simple_parameters();
315     ArgumentsAccessStub::Type type = ArgumentsAccessStub::ComputeType(
316         is_unmapped, literal()->has_duplicate_parameters());
317     ArgumentsAccessStub stub(isolate(), type);
318     __ CallStub(&stub);
319 
320     SetVar(arguments, v0, a1, a2);
321   }
322 
323   if (FLAG_trace) {
324     __ CallRuntime(Runtime::kTraceEnter);
325   }
326 
327   // Visit the declarations and body unless there is an illegal
328   // redeclaration.
329   if (scope()->HasIllegalRedeclaration()) {
330     Comment cmnt(masm_, "[ Declarations");
331     VisitForEffect(scope()->GetIllegalRedeclaration());
332 
333   } else {
334     PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
335     { Comment cmnt(masm_, "[ Declarations");
336       VisitDeclarations(scope()->declarations());
337     }
338 
339     // Assert that the declarations do not use ICs. Otherwise the debugger
340     // won't be able to redirect a PC at an IC to the correct IC in newly
341     // recompiled code.
342     DCHECK_EQ(0, ic_total_count_);
343 
344     { Comment cmnt(masm_, "[ Stack check");
345       PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
346       Label ok;
347       __ LoadRoot(at, Heap::kStackLimitRootIndex);
348       __ Branch(&ok, hs, sp, Operand(at));
349       Handle<Code> stack_check = isolate()->builtins()->StackCheck();
350       PredictableCodeSizeScope predictable(masm_,
351           masm_->CallSize(stack_check, RelocInfo::CODE_TARGET));
352       __ Call(stack_check, RelocInfo::CODE_TARGET);
353       __ bind(&ok);
354     }
355 
356     { Comment cmnt(masm_, "[ Body");
357       DCHECK(loop_depth() == 0);
358 
359       VisitStatements(literal()->body());
360 
361       DCHECK(loop_depth() == 0);
362     }
363   }
364 
365   // Always emit a 'return undefined' in case control fell off the end of
366   // the body.
367   { Comment cmnt(masm_, "[ return <undefined>;");
368     __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
369   }
370   EmitReturnSequence();
371 }
372 
373 
ClearAccumulator()374 void FullCodeGenerator::ClearAccumulator() {
375   DCHECK(Smi::FromInt(0) == 0);
376   __ mov(v0, zero_reg);
377 }
378 
379 
EmitProfilingCounterDecrement(int delta)380 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
381   __ li(a2, Operand(profiling_counter_));
382   __ ld(a3, FieldMemOperand(a2, Cell::kValueOffset));
383   __ Dsubu(a3, a3, Operand(Smi::FromInt(delta)));
384   __ sd(a3, FieldMemOperand(a2, Cell::kValueOffset));
385 }
386 
387 
EmitProfilingCounterReset()388 void FullCodeGenerator::EmitProfilingCounterReset() {
389   int reset_value = FLAG_interrupt_budget;
390   if (info_->is_debug()) {
391     // Detect debug break requests as soon as possible.
392     reset_value = FLAG_interrupt_budget >> 4;
393   }
394   __ li(a2, Operand(profiling_counter_));
395   __ li(a3, Operand(Smi::FromInt(reset_value)));
396   __ sd(a3, FieldMemOperand(a2, Cell::kValueOffset));
397 }
398 
399 
EmitBackEdgeBookkeeping(IterationStatement * stmt,Label * back_edge_target)400 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
401                                                 Label* back_edge_target) {
402   // The generated code is used in Deoptimizer::PatchStackCheckCodeAt so we need
403   // to make sure it is constant. Branch may emit a skip-or-jump sequence
404   // instead of the normal Branch. It seems that the "skip" part of that
405   // sequence is about as long as this Branch would be so it is safe to ignore
406   // that.
407   Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
408   Comment cmnt(masm_, "[ Back edge bookkeeping");
409   Label ok;
410   DCHECK(back_edge_target->is_bound());
411   int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
412   int weight = Min(kMaxBackEdgeWeight,
413                    Max(1, distance / kCodeSizeMultiplier));
414   EmitProfilingCounterDecrement(weight);
415   __ slt(at, a3, zero_reg);
416   __ beq(at, zero_reg, &ok);
417   // Call will emit a li t9 first, so it is safe to use the delay slot.
418   __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
419   // Record a mapping of this PC offset to the OSR id.  This is used to find
420   // the AST id from the unoptimized code in order to use it as a key into
421   // the deoptimization input data found in the optimized code.
422   RecordBackEdge(stmt->OsrEntryId());
423   EmitProfilingCounterReset();
424 
425   __ bind(&ok);
426   PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
427   // Record a mapping of the OSR id to this PC.  This is used if the OSR
428   // entry becomes the target of a bailout.  We don't expect it to be, but
429   // we want it to work if it is.
430   PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
431 }
432 
433 
EmitReturnSequence()434 void FullCodeGenerator::EmitReturnSequence() {
435   Comment cmnt(masm_, "[ Return sequence");
436   if (return_label_.is_bound()) {
437     __ Branch(&return_label_);
438   } else {
439     __ bind(&return_label_);
440     if (FLAG_trace) {
441       // Push the return value on the stack as the parameter.
442       // Runtime::TraceExit returns its parameter in v0.
443       __ push(v0);
444       __ CallRuntime(Runtime::kTraceExit);
445     }
446     // Pretend that the exit is a backwards jump to the entry.
447     int weight = 1;
448     if (info_->ShouldSelfOptimize()) {
449       weight = FLAG_interrupt_budget / FLAG_self_opt_count;
450     } else {
451       int distance = masm_->pc_offset();
452       weight = Min(kMaxBackEdgeWeight,
453                    Max(1, distance / kCodeSizeMultiplier));
454     }
455     EmitProfilingCounterDecrement(weight);
456     Label ok;
457     __ Branch(&ok, ge, a3, Operand(zero_reg));
458     __ push(v0);
459     __ Call(isolate()->builtins()->InterruptCheck(),
460             RelocInfo::CODE_TARGET);
461     __ pop(v0);
462     EmitProfilingCounterReset();
463     __ bind(&ok);
464 
465     // Make sure that the constant pool is not emitted inside of the return
466     // sequence.
467     { Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
468       // Here we use masm_-> instead of the __ macro to avoid the code coverage
469       // tool from instrumenting as we rely on the code size here.
470       int32_t arg_count = info_->scope()->num_parameters() + 1;
471       int32_t sp_delta = arg_count * kPointerSize;
472       SetReturnPosition(literal());
473       masm_->mov(sp, fp);
474       masm_->MultiPop(static_cast<RegList>(fp.bit() | ra.bit()));
475       masm_->Daddu(sp, sp, Operand(sp_delta));
476       masm_->Jump(ra);
477     }
478   }
479 }
480 
481 
Plug(Variable * var) const482 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
483   DCHECK(var->IsStackAllocated() || var->IsContextSlot());
484   codegen()->GetVar(result_register(), var);
485   __ push(result_register());
486 }
487 
488 
Plug(Heap::RootListIndex index) const489 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
490 }
491 
492 
Plug(Heap::RootListIndex index) const493 void FullCodeGenerator::AccumulatorValueContext::Plug(
494     Heap::RootListIndex index) const {
495   __ LoadRoot(result_register(), index);
496 }
497 
498 
Plug(Heap::RootListIndex index) const499 void FullCodeGenerator::StackValueContext::Plug(
500     Heap::RootListIndex index) const {
501   __ LoadRoot(result_register(), index);
502   __ push(result_register());
503 }
504 
505 
Plug(Heap::RootListIndex index) const506 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
507   codegen()->PrepareForBailoutBeforeSplit(condition(),
508                                           true,
509                                           true_label_,
510                                           false_label_);
511   if (index == Heap::kUndefinedValueRootIndex ||
512       index == Heap::kNullValueRootIndex ||
513       index == Heap::kFalseValueRootIndex) {
514     if (false_label_ != fall_through_) __ Branch(false_label_);
515   } else if (index == Heap::kTrueValueRootIndex) {
516     if (true_label_ != fall_through_) __ Branch(true_label_);
517   } else {
518     __ LoadRoot(result_register(), index);
519     codegen()->DoTest(this);
520   }
521 }
522 
523 
Plug(Handle<Object> lit) const524 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
525 }
526 
527 
Plug(Handle<Object> lit) const528 void FullCodeGenerator::AccumulatorValueContext::Plug(
529     Handle<Object> lit) const {
530   __ li(result_register(), Operand(lit));
531 }
532 
533 
Plug(Handle<Object> lit) const534 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
535   // Immediates cannot be pushed directly.
536   __ li(result_register(), Operand(lit));
537   __ push(result_register());
538 }
539 
540 
Plug(Handle<Object> lit) const541 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
542   codegen()->PrepareForBailoutBeforeSplit(condition(),
543                                           true,
544                                           true_label_,
545                                           false_label_);
546   DCHECK(!lit->IsUndetectableObject());  // There are no undetectable literals.
547   if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
548     if (false_label_ != fall_through_) __ Branch(false_label_);
549   } else if (lit->IsTrue() || lit->IsJSObject()) {
550     if (true_label_ != fall_through_) __ Branch(true_label_);
551   } else if (lit->IsString()) {
552     if (String::cast(*lit)->length() == 0) {
553       if (false_label_ != fall_through_) __ Branch(false_label_);
554     } else {
555       if (true_label_ != fall_through_) __ Branch(true_label_);
556     }
557   } else if (lit->IsSmi()) {
558     if (Smi::cast(*lit)->value() == 0) {
559       if (false_label_ != fall_through_) __ Branch(false_label_);
560     } else {
561       if (true_label_ != fall_through_) __ Branch(true_label_);
562     }
563   } else {
564     // For simplicity we always test the accumulator register.
565     __ li(result_register(), Operand(lit));
566     codegen()->DoTest(this);
567   }
568 }
569 
570 
DropAndPlug(int count,Register reg) const571 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
572                                                    Register reg) const {
573   DCHECK(count > 0);
574   __ Drop(count);
575 }
576 
577 
DropAndPlug(int count,Register reg) const578 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
579     int count,
580     Register reg) const {
581   DCHECK(count > 0);
582   __ Drop(count);
583   __ Move(result_register(), reg);
584 }
585 
586 
DropAndPlug(int count,Register reg) const587 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
588                                                        Register reg) const {
589   DCHECK(count > 0);
590   if (count > 1) __ Drop(count - 1);
591   __ sd(reg, MemOperand(sp, 0));
592 }
593 
594 
DropAndPlug(int count,Register reg) const595 void FullCodeGenerator::TestContext::DropAndPlug(int count,
596                                                  Register reg) const {
597   DCHECK(count > 0);
598   // For simplicity we always test the accumulator register.
599   __ Drop(count);
600   __ Move(result_register(), reg);
601   codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
602   codegen()->DoTest(this);
603 }
604 
605 
Plug(Label * materialize_true,Label * materialize_false) const606 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
607                                             Label* materialize_false) const {
608   DCHECK(materialize_true == materialize_false);
609   __ bind(materialize_true);
610 }
611 
612 
Plug(Label * materialize_true,Label * materialize_false) const613 void FullCodeGenerator::AccumulatorValueContext::Plug(
614     Label* materialize_true,
615     Label* materialize_false) const {
616   Label done;
617   __ bind(materialize_true);
618   __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
619   __ Branch(&done);
620   __ bind(materialize_false);
621   __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
622   __ bind(&done);
623 }
624 
625 
Plug(Label * materialize_true,Label * materialize_false) const626 void FullCodeGenerator::StackValueContext::Plug(
627     Label* materialize_true,
628     Label* materialize_false) const {
629   Label done;
630   __ bind(materialize_true);
631   __ LoadRoot(at, Heap::kTrueValueRootIndex);
632   // Push the value as the following branch can clobber at in long branch mode.
633   __ push(at);
634   __ Branch(&done);
635   __ bind(materialize_false);
636   __ LoadRoot(at, Heap::kFalseValueRootIndex);
637   __ push(at);
638   __ bind(&done);
639 }
640 
641 
Plug(Label * materialize_true,Label * materialize_false) const642 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
643                                           Label* materialize_false) const {
644   DCHECK(materialize_true == true_label_);
645   DCHECK(materialize_false == false_label_);
646 }
647 
648 
Plug(bool flag) const649 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
650   Heap::RootListIndex value_root_index =
651       flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
652   __ LoadRoot(result_register(), value_root_index);
653 }
654 
655 
Plug(bool flag) const656 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
657   Heap::RootListIndex value_root_index =
658       flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
659   __ LoadRoot(at, value_root_index);
660   __ push(at);
661 }
662 
663 
Plug(bool flag) const664 void FullCodeGenerator::TestContext::Plug(bool flag) const {
665   codegen()->PrepareForBailoutBeforeSplit(condition(),
666                                           true,
667                                           true_label_,
668                                           false_label_);
669   if (flag) {
670     if (true_label_ != fall_through_) __ Branch(true_label_);
671   } else {
672     if (false_label_ != fall_through_) __ Branch(false_label_);
673   }
674 }
675 
676 
DoTest(Expression * condition,Label * if_true,Label * if_false,Label * fall_through)677 void FullCodeGenerator::DoTest(Expression* condition,
678                                Label* if_true,
679                                Label* if_false,
680                                Label* fall_through) {
681   __ mov(a0, result_register());
682   Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
683   CallIC(ic, condition->test_id());
684   __ LoadRoot(at, Heap::kTrueValueRootIndex);
685   Split(eq, result_register(), Operand(at), if_true, if_false, fall_through);
686 }
687 
688 
Split(Condition cc,Register lhs,const Operand & rhs,Label * if_true,Label * if_false,Label * fall_through)689 void FullCodeGenerator::Split(Condition cc,
690                               Register lhs,
691                               const Operand&  rhs,
692                               Label* if_true,
693                               Label* if_false,
694                               Label* fall_through) {
695   if (if_false == fall_through) {
696     __ Branch(if_true, cc, lhs, rhs);
697   } else if (if_true == fall_through) {
698     __ Branch(if_false, NegateCondition(cc), lhs, rhs);
699   } else {
700     __ Branch(if_true, cc, lhs, rhs);
701     __ Branch(if_false);
702   }
703 }
704 
705 
StackOperand(Variable * var)706 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
707   DCHECK(var->IsStackAllocated());
708   // Offset is negative because higher indexes are at lower addresses.
709   int offset = -var->index() * kPointerSize;
710   // Adjust by a (parameter or local) base offset.
711   if (var->IsParameter()) {
712     offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
713   } else {
714     offset += JavaScriptFrameConstants::kLocal0Offset;
715   }
716   return MemOperand(fp, offset);
717 }
718 
719 
VarOperand(Variable * var,Register scratch)720 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
721   DCHECK(var->IsContextSlot() || var->IsStackAllocated());
722   if (var->IsContextSlot()) {
723     int context_chain_length = scope()->ContextChainLength(var->scope());
724     __ LoadContext(scratch, context_chain_length);
725     return ContextMemOperand(scratch, var->index());
726   } else {
727     return StackOperand(var);
728   }
729 }
730 
731 
GetVar(Register dest,Variable * var)732 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
733   // Use destination as scratch.
734   MemOperand location = VarOperand(var, dest);
735   __ ld(dest, location);
736 }
737 
738 
SetVar(Variable * var,Register src,Register scratch0,Register scratch1)739 void FullCodeGenerator::SetVar(Variable* var,
740                                Register src,
741                                Register scratch0,
742                                Register scratch1) {
743   DCHECK(var->IsContextSlot() || var->IsStackAllocated());
744   DCHECK(!scratch0.is(src));
745   DCHECK(!scratch0.is(scratch1));
746   DCHECK(!scratch1.is(src));
747   MemOperand location = VarOperand(var, scratch0);
748   __ sd(src, location);
749   // Emit the write barrier code if the location is in the heap.
750   if (var->IsContextSlot()) {
751     __ RecordWriteContextSlot(scratch0,
752                               location.offset(),
753                               src,
754                               scratch1,
755                               kRAHasBeenSaved,
756                               kDontSaveFPRegs);
757   }
758 }
759 
760 
PrepareForBailoutBeforeSplit(Expression * expr,bool should_normalize,Label * if_true,Label * if_false)761 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
762                                                      bool should_normalize,
763                                                      Label* if_true,
764                                                      Label* if_false) {
765   // Only prepare for bailouts before splits if we're in a test
766   // context. Otherwise, we let the Visit function deal with the
767   // preparation to avoid preparing with the same AST id twice.
768   if (!context()->IsTest()) return;
769 
770   Label skip;
771   if (should_normalize) __ Branch(&skip);
772   PrepareForBailout(expr, TOS_REG);
773   if (should_normalize) {
774     __ LoadRoot(a4, Heap::kTrueValueRootIndex);
775     Split(eq, a0, Operand(a4), if_true, if_false, NULL);
776     __ bind(&skip);
777   }
778 }
779 
780 
EmitDebugCheckDeclarationContext(Variable * variable)781 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
782   // The variable in the declaration always resides in the current function
783   // context.
784   DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
785   if (generate_debug_code_) {
786     // Check that we're not inside a with or catch context.
787     __ ld(a1, FieldMemOperand(cp, HeapObject::kMapOffset));
788     __ LoadRoot(a4, Heap::kWithContextMapRootIndex);
789     __ Check(ne, kDeclarationInWithContext,
790         a1, Operand(a4));
791     __ LoadRoot(a4, Heap::kCatchContextMapRootIndex);
792     __ Check(ne, kDeclarationInCatchContext,
793         a1, Operand(a4));
794   }
795 }
796 
797 
VisitVariableDeclaration(VariableDeclaration * declaration)798 void FullCodeGenerator::VisitVariableDeclaration(
799     VariableDeclaration* declaration) {
800   // If it was not possible to allocate the variable at compile time, we
801   // need to "declare" it at runtime to make sure it actually exists in the
802   // local context.
803   VariableProxy* proxy = declaration->proxy();
804   VariableMode mode = declaration->mode();
805   Variable* variable = proxy->var();
806   bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
807   switch (variable->location()) {
808     case VariableLocation::GLOBAL:
809     case VariableLocation::UNALLOCATED:
810       globals_->Add(variable->name(), zone());
811       globals_->Add(variable->binding_needs_init()
812                         ? isolate()->factory()->the_hole_value()
813                         : isolate()->factory()->undefined_value(),
814                     zone());
815       break;
816 
817     case VariableLocation::PARAMETER:
818     case VariableLocation::LOCAL:
819       if (hole_init) {
820         Comment cmnt(masm_, "[ VariableDeclaration");
821         __ LoadRoot(a4, Heap::kTheHoleValueRootIndex);
822         __ sd(a4, StackOperand(variable));
823       }
824       break;
825 
826     case VariableLocation::CONTEXT:
827       if (hole_init) {
828         Comment cmnt(masm_, "[ VariableDeclaration");
829         EmitDebugCheckDeclarationContext(variable);
830           __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
831           __ sd(at, ContextMemOperand(cp, variable->index()));
832           // No write barrier since the_hole_value is in old space.
833           PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
834       }
835       break;
836 
837     case VariableLocation::LOOKUP: {
838       Comment cmnt(masm_, "[ VariableDeclaration");
839       __ li(a2, Operand(variable->name()));
840       // Declaration nodes are always introduced in one of four modes.
841       DCHECK(IsDeclaredVariableMode(mode));
842       // Push initial value, if any.
843       // Note: For variables we must not push an initial value (such as
844       // 'undefined') because we may have a (legal) redeclaration and we
845       // must not destroy the current value.
846       if (hole_init) {
847         __ LoadRoot(a0, Heap::kTheHoleValueRootIndex);
848       } else {
849         DCHECK(Smi::FromInt(0) == 0);
850         __ mov(a0, zero_reg);  // Smi::FromInt(0) indicates no initial value.
851       }
852       __ Push(a2, a0);
853       __ Push(Smi::FromInt(variable->DeclarationPropertyAttributes()));
854       __ CallRuntime(Runtime::kDeclareLookupSlot);
855       break;
856     }
857   }
858 }
859 
860 
VisitFunctionDeclaration(FunctionDeclaration * declaration)861 void FullCodeGenerator::VisitFunctionDeclaration(
862     FunctionDeclaration* declaration) {
863   VariableProxy* proxy = declaration->proxy();
864   Variable* variable = proxy->var();
865   switch (variable->location()) {
866     case VariableLocation::GLOBAL:
867     case VariableLocation::UNALLOCATED: {
868       globals_->Add(variable->name(), zone());
869       Handle<SharedFunctionInfo> function =
870           Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_);
871       // Check for stack-overflow exception.
872       if (function.is_null()) return SetStackOverflow();
873       globals_->Add(function, zone());
874       break;
875     }
876 
877     case VariableLocation::PARAMETER:
878     case VariableLocation::LOCAL: {
879       Comment cmnt(masm_, "[ FunctionDeclaration");
880       VisitForAccumulatorValue(declaration->fun());
881       __ sd(result_register(), StackOperand(variable));
882       break;
883     }
884 
885     case VariableLocation::CONTEXT: {
886       Comment cmnt(masm_, "[ FunctionDeclaration");
887       EmitDebugCheckDeclarationContext(variable);
888       VisitForAccumulatorValue(declaration->fun());
889       __ sd(result_register(), ContextMemOperand(cp, variable->index()));
890       int offset = Context::SlotOffset(variable->index());
891       // We know that we have written a function, which is not a smi.
892       __ RecordWriteContextSlot(cp,
893                                 offset,
894                                 result_register(),
895                                 a2,
896                                 kRAHasBeenSaved,
897                                 kDontSaveFPRegs,
898                                 EMIT_REMEMBERED_SET,
899                                 OMIT_SMI_CHECK);
900       PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
901       break;
902     }
903 
904     case VariableLocation::LOOKUP: {
905       Comment cmnt(masm_, "[ FunctionDeclaration");
906       __ li(a2, Operand(variable->name()));
907       __ Push(a2);
908       // Push initial value for function declaration.
909       VisitForStackValue(declaration->fun());
910       __ Push(Smi::FromInt(variable->DeclarationPropertyAttributes()));
911       __ CallRuntime(Runtime::kDeclareLookupSlot);
912       break;
913     }
914   }
915 }
916 
917 
DeclareGlobals(Handle<FixedArray> pairs)918 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
919   // Call the runtime to declare the globals.
920   __ li(a1, Operand(pairs));
921   __ li(a0, Operand(Smi::FromInt(DeclareGlobalsFlags())));
922   __ Push(a1, a0);
923   __ CallRuntime(Runtime::kDeclareGlobals);
924   // Return value is ignored.
925 }
926 
927 
DeclareModules(Handle<FixedArray> descriptions)928 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
929   // Call the runtime to declare the modules.
930   __ Push(descriptions);
931   __ CallRuntime(Runtime::kDeclareModules);
932   // Return value is ignored.
933 }
934 
935 
VisitSwitchStatement(SwitchStatement * stmt)936 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
937   Comment cmnt(masm_, "[ SwitchStatement");
938   Breakable nested_statement(this, stmt);
939   SetStatementPosition(stmt);
940 
941   // Keep the switch value on the stack until a case matches.
942   VisitForStackValue(stmt->tag());
943   PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
944 
945   ZoneList<CaseClause*>* clauses = stmt->cases();
946   CaseClause* default_clause = NULL;  // Can occur anywhere in the list.
947 
948   Label next_test;  // Recycled for each test.
949   // Compile all the tests with branches to their bodies.
950   for (int i = 0; i < clauses->length(); i++) {
951     CaseClause* clause = clauses->at(i);
952     clause->body_target()->Unuse();
953 
954     // The default is not a test, but remember it as final fall through.
955     if (clause->is_default()) {
956       default_clause = clause;
957       continue;
958     }
959 
960     Comment cmnt(masm_, "[ Case comparison");
961     __ bind(&next_test);
962     next_test.Unuse();
963 
964     // Compile the label expression.
965     VisitForAccumulatorValue(clause->label());
966     __ mov(a0, result_register());  // CompareStub requires args in a0, a1.
967 
968     // Perform the comparison as if via '==='.
969     __ ld(a1, MemOperand(sp, 0));  // Switch value.
970     bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
971     JumpPatchSite patch_site(masm_);
972     if (inline_smi_code) {
973       Label slow_case;
974       __ or_(a2, a1, a0);
975       patch_site.EmitJumpIfNotSmi(a2, &slow_case);
976 
977       __ Branch(&next_test, ne, a1, Operand(a0));
978       __ Drop(1);  // Switch value is no longer needed.
979       __ Branch(clause->body_target());
980 
981       __ bind(&slow_case);
982     }
983 
984     // Record position before stub call for type feedback.
985     SetExpressionPosition(clause);
986     Handle<Code> ic = CodeFactory::CompareIC(isolate(), Token::EQ_STRICT,
987                                              strength(language_mode())).code();
988     CallIC(ic, clause->CompareId());
989     patch_site.EmitPatchInfo();
990 
991     Label skip;
992     __ Branch(&skip);
993     PrepareForBailout(clause, TOS_REG);
994     __ LoadRoot(at, Heap::kTrueValueRootIndex);
995     __ Branch(&next_test, ne, v0, Operand(at));
996     __ Drop(1);
997     __ Branch(clause->body_target());
998     __ bind(&skip);
999 
1000     __ Branch(&next_test, ne, v0, Operand(zero_reg));
1001     __ Drop(1);  // Switch value is no longer needed.
1002     __ Branch(clause->body_target());
1003   }
1004 
1005   // Discard the test value and jump to the default if present, otherwise to
1006   // the end of the statement.
1007   __ bind(&next_test);
1008   __ Drop(1);  // Switch value is no longer needed.
1009   if (default_clause == NULL) {
1010     __ Branch(nested_statement.break_label());
1011   } else {
1012     __ Branch(default_clause->body_target());
1013   }
1014 
1015   // Compile all the case bodies.
1016   for (int i = 0; i < clauses->length(); i++) {
1017     Comment cmnt(masm_, "[ Case body");
1018     CaseClause* clause = clauses->at(i);
1019     __ bind(clause->body_target());
1020     PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1021     VisitStatements(clause->statements());
1022   }
1023 
1024   __ bind(nested_statement.break_label());
1025   PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1026 }
1027 
1028 
VisitForInStatement(ForInStatement * stmt)1029 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1030   Comment cmnt(masm_, "[ ForInStatement");
1031   SetStatementPosition(stmt, SKIP_BREAK);
1032 
1033   FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
1034 
1035   Label loop, exit;
1036   ForIn loop_statement(this, stmt);
1037   increment_loop_depth();
1038 
1039   // Get the object to enumerate over. If the object is null or undefined, skip
1040   // over the loop.  See ECMA-262 version 5, section 12.6.4.
1041   SetExpressionAsStatementPosition(stmt->enumerable());
1042   VisitForAccumulatorValue(stmt->enumerable());
1043   __ mov(a0, result_register());  // Result as param to InvokeBuiltin below.
1044   __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1045   __ Branch(&exit, eq, a0, Operand(at));
1046   Register null_value = a5;
1047   __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1048   __ Branch(&exit, eq, a0, Operand(null_value));
1049   PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1050   __ mov(a0, v0);
1051   // Convert the object to a JS object.
1052   Label convert, done_convert;
1053   __ JumpIfSmi(a0, &convert);
1054   __ GetObjectType(a0, a1, a1);
1055   __ Branch(&done_convert, ge, a1, Operand(FIRST_JS_RECEIVER_TYPE));
1056   __ bind(&convert);
1057   ToObjectStub stub(isolate());
1058   __ CallStub(&stub);
1059   __ mov(a0, v0);
1060   __ bind(&done_convert);
1061   PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG);
1062   __ push(a0);
1063 
1064   // Check for proxies.
1065   Label call_runtime;
1066   __ GetObjectType(a0, a1, a1);
1067   __ Branch(&call_runtime, eq, a1, Operand(JS_PROXY_TYPE));
1068 
1069   // Check cache validity in generated code. This is a fast case for
1070   // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1071   // guarantee cache validity, call the runtime system to check cache
1072   // validity or get the property names in a fixed array.
1073   __ CheckEnumCache(null_value, &call_runtime);
1074 
1075   // The enum cache is valid.  Load the map of the object being
1076   // iterated over and use the cache for the iteration.
1077   Label use_cache;
1078   __ ld(v0, FieldMemOperand(a0, HeapObject::kMapOffset));
1079   __ Branch(&use_cache);
1080 
1081   // Get the set of properties to enumerate.
1082   __ bind(&call_runtime);
1083   __ push(a0);  // Duplicate the enumerable object on the stack.
1084   __ CallRuntime(Runtime::kGetPropertyNamesFast);
1085   PrepareForBailoutForId(stmt->EnumId(), TOS_REG);
1086 
1087   // If we got a map from the runtime call, we can do a fast
1088   // modification check. Otherwise, we got a fixed array, and we have
1089   // to do a slow check.
1090   Label fixed_array;
1091   __ ld(a2, FieldMemOperand(v0, HeapObject::kMapOffset));
1092   __ LoadRoot(at, Heap::kMetaMapRootIndex);
1093   __ Branch(&fixed_array, ne, a2, Operand(at));
1094 
1095   // We got a map in register v0. Get the enumeration cache from it.
1096   Label no_descriptors;
1097   __ bind(&use_cache);
1098 
1099   __ EnumLength(a1, v0);
1100   __ Branch(&no_descriptors, eq, a1, Operand(Smi::FromInt(0)));
1101 
1102   __ LoadInstanceDescriptors(v0, a2);
1103   __ ld(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheOffset));
1104   __ ld(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheBridgeCacheOffset));
1105 
1106   // Set up the four remaining stack slots.
1107   __ li(a0, Operand(Smi::FromInt(0)));
1108   // Push map, enumeration cache, enumeration cache length (as smi) and zero.
1109   __ Push(v0, a2, a1, a0);
1110   __ jmp(&loop);
1111 
1112   __ bind(&no_descriptors);
1113   __ Drop(1);
1114   __ jmp(&exit);
1115 
1116   // We got a fixed array in register v0. Iterate through that.
1117   __ bind(&fixed_array);
1118 
1119   __ EmitLoadTypeFeedbackVector(a1);
1120   __ li(a2, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1121   int vector_index = SmiFromSlot(slot)->value();
1122   __ sd(a2, FieldMemOperand(a1, FixedArray::OffsetOfElementAt(vector_index)));
1123 
1124   __ li(a1, Operand(Smi::FromInt(1)));  // Smi(1) indicates slow check
1125   __ Push(a1, v0);  // Smi and array
1126   __ ld(a1, FieldMemOperand(v0, FixedArray::kLengthOffset));
1127   __ li(a0, Operand(Smi::FromInt(0)));
1128   __ Push(a1, a0);  // Fixed array length (as smi) and initial index.
1129 
1130   // Generate code for doing the condition check.
1131   __ bind(&loop);
1132   SetExpressionAsStatementPosition(stmt->each());
1133 
1134   // Load the current count to a0, load the length to a1.
1135   __ ld(a0, MemOperand(sp, 0 * kPointerSize));
1136   __ ld(a1, MemOperand(sp, 1 * kPointerSize));
1137   __ Branch(loop_statement.break_label(), hs, a0, Operand(a1));
1138 
1139   // Get the current entry of the array into register a3.
1140   __ ld(a2, MemOperand(sp, 2 * kPointerSize));
1141   __ Daddu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1142   __ SmiScale(a4, a0, kPointerSizeLog2);
1143   __ daddu(a4, a2, a4);  // Array base + scaled (smi) index.
1144   __ ld(a3, MemOperand(a4));  // Current entry.
1145 
1146   // Get the expected map from the stack or a smi in the
1147   // permanent slow case into register a2.
1148   __ ld(a2, MemOperand(sp, 3 * kPointerSize));
1149 
1150   // Check if the expected map still matches that of the enumerable.
1151   // If not, we may have to filter the key.
1152   Label update_each;
1153   __ ld(a1, MemOperand(sp, 4 * kPointerSize));
1154   __ ld(a4, FieldMemOperand(a1, HeapObject::kMapOffset));
1155   __ Branch(&update_each, eq, a4, Operand(a2));
1156 
1157   // Convert the entry to a string or (smi) 0 if it isn't a property
1158   // any more. If the property has been removed while iterating, we
1159   // just skip it.
1160   __ Push(a1, a3);  // Enumerable and current entry.
1161   __ CallRuntime(Runtime::kForInFilter);
1162   PrepareForBailoutForId(stmt->FilterId(), TOS_REG);
1163   __ mov(a3, result_register());
1164   __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1165   __ Branch(loop_statement.continue_label(), eq, a3, Operand(at));
1166 
1167   // Update the 'each' property or variable from the possibly filtered
1168   // entry in register a3.
1169   __ bind(&update_each);
1170   __ mov(result_register(), a3);
1171   // Perform the assignment as if via '='.
1172   { EffectContext context(this);
1173     EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
1174     PrepareForBailoutForId(stmt->AssignmentId(), NO_REGISTERS);
1175   }
1176 
1177   // Both Crankshaft and Turbofan expect BodyId to be right before stmt->body().
1178   PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1179   // Generate code for the body of the loop.
1180   Visit(stmt->body());
1181 
1182   // Generate code for the going to the next element by incrementing
1183   // the index (smi) stored on top of the stack.
1184   __ bind(loop_statement.continue_label());
1185   __ pop(a0);
1186   __ Daddu(a0, a0, Operand(Smi::FromInt(1)));
1187   __ push(a0);
1188 
1189   EmitBackEdgeBookkeeping(stmt, &loop);
1190   __ Branch(&loop);
1191 
1192   // Remove the pointers stored on the stack.
1193   __ bind(loop_statement.break_label());
1194   __ Drop(5);
1195 
1196   // Exit and decrement the loop depth.
1197   PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1198   __ bind(&exit);
1199   decrement_loop_depth();
1200 }
1201 
1202 
EmitNewClosure(Handle<SharedFunctionInfo> info,bool pretenure)1203 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1204                                        bool pretenure) {
1205   // Use the fast case closure allocation code that allocates in new
1206   // space for nested functions that don't need literals cloning. If
1207   // we're running with the --always-opt or the --prepare-always-opt
1208   // flag, we need to use the runtime function so that the new function
1209   // we are creating here gets a chance to have its code optimized and
1210   // doesn't just get a copy of the existing unoptimized code.
1211   if (!FLAG_always_opt &&
1212       !FLAG_prepare_always_opt &&
1213       !pretenure &&
1214       scope()->is_function_scope() &&
1215       info->num_literals() == 0) {
1216     FastNewClosureStub stub(isolate(), info->language_mode(), info->kind());
1217     __ li(a2, Operand(info));
1218     __ CallStub(&stub);
1219   } else {
1220     __ Push(info);
1221     __ CallRuntime(pretenure ? Runtime::kNewClosure_Tenured
1222                              : Runtime::kNewClosure);
1223   }
1224   context()->Plug(v0);
1225 }
1226 
1227 
EmitSetHomeObject(Expression * initializer,int offset,FeedbackVectorSlot slot)1228 void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset,
1229                                           FeedbackVectorSlot slot) {
1230   DCHECK(NeedsHomeObject(initializer));
1231   __ ld(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1232   __ li(StoreDescriptor::NameRegister(),
1233         Operand(isolate()->factory()->home_object_symbol()));
1234   __ ld(StoreDescriptor::ValueRegister(),
1235         MemOperand(sp, offset * kPointerSize));
1236   EmitLoadStoreICSlot(slot);
1237   CallStoreIC();
1238 }
1239 
1240 
EmitSetHomeObjectAccumulator(Expression * initializer,int offset,FeedbackVectorSlot slot)1241 void FullCodeGenerator::EmitSetHomeObjectAccumulator(Expression* initializer,
1242                                                      int offset,
1243                                                      FeedbackVectorSlot slot) {
1244   DCHECK(NeedsHomeObject(initializer));
1245   __ Move(StoreDescriptor::ReceiverRegister(), v0);
1246   __ li(StoreDescriptor::NameRegister(),
1247         Operand(isolate()->factory()->home_object_symbol()));
1248   __ ld(StoreDescriptor::ValueRegister(),
1249         MemOperand(sp, offset * kPointerSize));
1250   EmitLoadStoreICSlot(slot);
1251   CallStoreIC();
1252 }
1253 
1254 
EmitLoadGlobalCheckExtensions(VariableProxy * proxy,TypeofMode typeof_mode,Label * slow)1255 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1256                                                       TypeofMode typeof_mode,
1257                                                       Label* slow) {
1258   Register current = cp;
1259   Register next = a1;
1260   Register temp = a2;
1261 
1262   Scope* s = scope();
1263   while (s != NULL) {
1264     if (s->num_heap_slots() > 0) {
1265       if (s->calls_sloppy_eval()) {
1266         // Check that extension is "the hole".
1267         __ ld(temp, ContextMemOperand(current, Context::EXTENSION_INDEX));
1268         __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1269       }
1270       // Load next context in chain.
1271       __ ld(next, ContextMemOperand(current, Context::PREVIOUS_INDEX));
1272       // Walk the rest of the chain without clobbering cp.
1273       current = next;
1274     }
1275     // If no outer scope calls eval, we do not need to check more
1276     // context extensions.
1277     if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1278     s = s->outer_scope();
1279   }
1280 
1281   if (s->is_eval_scope()) {
1282     Label loop, fast;
1283     if (!current.is(next)) {
1284       __ Move(next, current);
1285     }
1286     __ bind(&loop);
1287     // Terminate at native context.
1288     __ ld(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1289     __ LoadRoot(a4, Heap::kNativeContextMapRootIndex);
1290     __ Branch(&fast, eq, temp, Operand(a4));
1291     // Check that extension is "the hole".
1292     __ ld(temp, ContextMemOperand(next, Context::EXTENSION_INDEX));
1293     __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1294     // Load next context in chain.
1295     __ ld(next, ContextMemOperand(next, Context::PREVIOUS_INDEX));
1296     __ Branch(&loop);
1297     __ bind(&fast);
1298   }
1299 
1300   // All extension objects were empty and it is safe to use a normal global
1301   // load machinery.
1302   EmitGlobalVariableLoad(proxy, typeof_mode);
1303 }
1304 
1305 
ContextSlotOperandCheckExtensions(Variable * var,Label * slow)1306 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1307                                                                 Label* slow) {
1308   DCHECK(var->IsContextSlot());
1309   Register context = cp;
1310   Register next = a3;
1311   Register temp = a4;
1312 
1313   for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1314     if (s->num_heap_slots() > 0) {
1315       if (s->calls_sloppy_eval()) {
1316         // Check that extension is "the hole".
1317         __ ld(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1318         __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1319       }
1320       __ ld(next, ContextMemOperand(context, Context::PREVIOUS_INDEX));
1321       // Walk the rest of the chain without clobbering cp.
1322       context = next;
1323     }
1324   }
1325   // Check that last extension is "the hole".
1326   __ ld(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1327   __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1328 
1329   // This function is used only for loads, not stores, so it's safe to
1330   // return an cp-based operand (the write barrier cannot be allowed to
1331   // destroy the cp register).
1332   return ContextMemOperand(context, var->index());
1333 }
1334 
1335 
EmitDynamicLookupFastCase(VariableProxy * proxy,TypeofMode typeof_mode,Label * slow,Label * done)1336 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1337                                                   TypeofMode typeof_mode,
1338                                                   Label* slow, Label* done) {
1339   // Generate fast-case code for variables that might be shadowed by
1340   // eval-introduced variables.  Eval is used a lot without
1341   // introducing variables.  In those cases, we do not want to
1342   // perform a runtime call for all variables in the scope
1343   // containing the eval.
1344   Variable* var = proxy->var();
1345   if (var->mode() == DYNAMIC_GLOBAL) {
1346     EmitLoadGlobalCheckExtensions(proxy, typeof_mode, slow);
1347     __ Branch(done);
1348   } else if (var->mode() == DYNAMIC_LOCAL) {
1349     Variable* local = var->local_if_not_shadowed();
1350     __ ld(v0, ContextSlotOperandCheckExtensions(local, slow));
1351     if (local->mode() == LET || local->mode() == CONST ||
1352         local->mode() == CONST_LEGACY) {
1353       __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1354       __ dsubu(at, v0, at);  // Sub as compare: at == 0 on eq.
1355       if (local->mode() == CONST_LEGACY) {
1356         __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
1357         __ Movz(v0, a0, at);  // Conditional move: return Undefined if TheHole.
1358       } else {  // LET || CONST
1359         __ Branch(done, ne, at, Operand(zero_reg));
1360         __ li(a0, Operand(var->name()));
1361         __ push(a0);
1362         __ CallRuntime(Runtime::kThrowReferenceError);
1363       }
1364     }
1365     __ Branch(done);
1366   }
1367 }
1368 
1369 
EmitGlobalVariableLoad(VariableProxy * proxy,TypeofMode typeof_mode)1370 void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
1371                                                TypeofMode typeof_mode) {
1372   Variable* var = proxy->var();
1373   DCHECK(var->IsUnallocatedOrGlobalSlot() ||
1374          (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL));
1375   __ LoadGlobalObject(LoadDescriptor::ReceiverRegister());
1376   __ li(LoadDescriptor::NameRegister(), Operand(var->name()));
1377   __ li(LoadDescriptor::SlotRegister(),
1378         Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
1379   CallLoadIC(typeof_mode);
1380 }
1381 
1382 
EmitVariableLoad(VariableProxy * proxy,TypeofMode typeof_mode)1383 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
1384                                          TypeofMode typeof_mode) {
1385   // Record position before possible IC call.
1386   SetExpressionPosition(proxy);
1387   PrepareForBailoutForId(proxy->BeforeId(), NO_REGISTERS);
1388   Variable* var = proxy->var();
1389 
1390   // Three cases: global variables, lookup variables, and all other types of
1391   // variables.
1392   switch (var->location()) {
1393     case VariableLocation::GLOBAL:
1394     case VariableLocation::UNALLOCATED: {
1395       Comment cmnt(masm_, "[ Global variable");
1396       EmitGlobalVariableLoad(proxy, typeof_mode);
1397       context()->Plug(v0);
1398       break;
1399     }
1400 
1401     case VariableLocation::PARAMETER:
1402     case VariableLocation::LOCAL:
1403     case VariableLocation::CONTEXT: {
1404       DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode);
1405       Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1406                                                : "[ Stack variable");
1407       if (NeedsHoleCheckForLoad(proxy)) {
1408         // Let and const need a read barrier.
1409         GetVar(v0, var);
1410         __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1411         __ dsubu(at, v0, at);  // Sub as compare: at == 0 on eq.
1412         if (var->mode() == LET || var->mode() == CONST) {
1413           // Throw a reference error when using an uninitialized let/const
1414           // binding in harmony mode.
1415           Label done;
1416           __ Branch(&done, ne, at, Operand(zero_reg));
1417           __ li(a0, Operand(var->name()));
1418           __ push(a0);
1419           __ CallRuntime(Runtime::kThrowReferenceError);
1420           __ bind(&done);
1421         } else {
1422           // Uninitialized legacy const bindings are unholed.
1423           DCHECK(var->mode() == CONST_LEGACY);
1424           __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
1425           __ Movz(v0, a0, at);  // Conditional move: Undefined if TheHole.
1426         }
1427         context()->Plug(v0);
1428         break;
1429       }
1430       context()->Plug(var);
1431       break;
1432     }
1433 
1434     case VariableLocation::LOOKUP: {
1435       Comment cmnt(masm_, "[ Lookup variable");
1436       Label done, slow;
1437       // Generate code for loading from variables potentially shadowed
1438       // by eval-introduced variables.
1439       EmitDynamicLookupFastCase(proxy, typeof_mode, &slow, &done);
1440       __ bind(&slow);
1441       __ li(a1, Operand(var->name()));
1442       __ Push(cp, a1);  // Context and name.
1443       Runtime::FunctionId function_id =
1444           typeof_mode == NOT_INSIDE_TYPEOF
1445               ? Runtime::kLoadLookupSlot
1446               : Runtime::kLoadLookupSlotNoReferenceError;
1447       __ CallRuntime(function_id);
1448       __ bind(&done);
1449       context()->Plug(v0);
1450     }
1451   }
1452 }
1453 
1454 
VisitRegExpLiteral(RegExpLiteral * expr)1455 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1456   Comment cmnt(masm_, "[ RegExpLiteral");
1457   __ ld(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1458   __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
1459   __ li(a1, Operand(expr->pattern()));
1460   __ li(a0, Operand(Smi::FromInt(expr->flags())));
1461   FastCloneRegExpStub stub(isolate());
1462   __ CallStub(&stub);
1463   context()->Plug(v0);
1464 }
1465 
1466 
EmitAccessor(ObjectLiteralProperty * property)1467 void FullCodeGenerator::EmitAccessor(ObjectLiteralProperty* property) {
1468   Expression* expression = (property == NULL) ? NULL : property->value();
1469   if (expression == NULL) {
1470     __ LoadRoot(a1, Heap::kNullValueRootIndex);
1471     __ push(a1);
1472   } else {
1473     VisitForStackValue(expression);
1474     if (NeedsHomeObject(expression)) {
1475       DCHECK(property->kind() == ObjectLiteral::Property::GETTER ||
1476              property->kind() == ObjectLiteral::Property::SETTER);
1477       int offset = property->kind() == ObjectLiteral::Property::GETTER ? 2 : 3;
1478       EmitSetHomeObject(expression, offset, property->GetSlot());
1479     }
1480   }
1481 }
1482 
1483 
VisitObjectLiteral(ObjectLiteral * expr)1484 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1485   Comment cmnt(masm_, "[ ObjectLiteral");
1486 
1487   Handle<FixedArray> constant_properties = expr->constant_properties();
1488   __ ld(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1489   __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
1490   __ li(a1, Operand(constant_properties));
1491   __ li(a0, Operand(Smi::FromInt(expr->ComputeFlags())));
1492   if (MustCreateObjectLiteralWithRuntime(expr)) {
1493     __ Push(a3, a2, a1, a0);
1494     __ CallRuntime(Runtime::kCreateObjectLiteral);
1495   } else {
1496     FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1497     __ CallStub(&stub);
1498   }
1499   PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1500 
1501   // If result_saved is true the result is on top of the stack.  If
1502   // result_saved is false the result is in v0.
1503   bool result_saved = false;
1504 
1505   AccessorTable accessor_table(zone());
1506   int property_index = 0;
1507   for (; property_index < expr->properties()->length(); property_index++) {
1508     ObjectLiteral::Property* property = expr->properties()->at(property_index);
1509     if (property->is_computed_name()) break;
1510     if (property->IsCompileTimeValue()) continue;
1511 
1512     Literal* key = property->key()->AsLiteral();
1513     Expression* value = property->value();
1514     if (!result_saved) {
1515       __ push(v0);  // Save result on stack.
1516       result_saved = true;
1517     }
1518     switch (property->kind()) {
1519       case ObjectLiteral::Property::CONSTANT:
1520         UNREACHABLE();
1521       case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1522         DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1523         // Fall through.
1524       case ObjectLiteral::Property::COMPUTED:
1525         // It is safe to use [[Put]] here because the boilerplate already
1526         // contains computed properties with an uninitialized value.
1527         if (key->value()->IsInternalizedString()) {
1528           if (property->emit_store()) {
1529             VisitForAccumulatorValue(value);
1530             __ mov(StoreDescriptor::ValueRegister(), result_register());
1531             DCHECK(StoreDescriptor::ValueRegister().is(a0));
1532             __ li(StoreDescriptor::NameRegister(), Operand(key->value()));
1533             __ ld(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1534             EmitLoadStoreICSlot(property->GetSlot(0));
1535             CallStoreIC();
1536             PrepareForBailoutForId(key->id(), NO_REGISTERS);
1537 
1538             if (NeedsHomeObject(value)) {
1539               EmitSetHomeObjectAccumulator(value, 0, property->GetSlot(1));
1540             }
1541           } else {
1542             VisitForEffect(value);
1543           }
1544           break;
1545         }
1546         // Duplicate receiver on stack.
1547         __ ld(a0, MemOperand(sp));
1548         __ push(a0);
1549         VisitForStackValue(key);
1550         VisitForStackValue(value);
1551         if (property->emit_store()) {
1552           if (NeedsHomeObject(value)) {
1553             EmitSetHomeObject(value, 2, property->GetSlot());
1554           }
1555           __ li(a0, Operand(Smi::FromInt(SLOPPY)));  // PropertyAttributes.
1556           __ push(a0);
1557           __ CallRuntime(Runtime::kSetProperty);
1558         } else {
1559           __ Drop(3);
1560         }
1561         break;
1562       case ObjectLiteral::Property::PROTOTYPE:
1563         // Duplicate receiver on stack.
1564         __ ld(a0, MemOperand(sp));
1565         __ push(a0);
1566         VisitForStackValue(value);
1567         DCHECK(property->emit_store());
1568         __ CallRuntime(Runtime::kInternalSetPrototype);
1569         PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1570                                NO_REGISTERS);
1571         break;
1572       case ObjectLiteral::Property::GETTER:
1573         if (property->emit_store()) {
1574           accessor_table.lookup(key)->second->getter = property;
1575         }
1576         break;
1577       case ObjectLiteral::Property::SETTER:
1578         if (property->emit_store()) {
1579           accessor_table.lookup(key)->second->setter = property;
1580         }
1581         break;
1582     }
1583   }
1584 
1585   // Emit code to define accessors, using only a single call to the runtime for
1586   // each pair of corresponding getters and setters.
1587   for (AccessorTable::Iterator it = accessor_table.begin();
1588        it != accessor_table.end();
1589        ++it) {
1590     __ ld(a0, MemOperand(sp));  // Duplicate receiver.
1591     __ push(a0);
1592     VisitForStackValue(it->first);
1593     EmitAccessor(it->second->getter);
1594     EmitAccessor(it->second->setter);
1595     __ li(a0, Operand(Smi::FromInt(NONE)));
1596     __ push(a0);
1597     __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked);
1598   }
1599 
1600   // Object literals have two parts. The "static" part on the left contains no
1601   // computed property names, and so we can compute its map ahead of time; see
1602   // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1603   // starts with the first computed property name, and continues with all
1604   // properties to its right.  All the code from above initializes the static
1605   // component of the object literal, and arranges for the map of the result to
1606   // reflect the static order in which the keys appear. For the dynamic
1607   // properties, we compile them into a series of "SetOwnProperty" runtime
1608   // calls. This will preserve insertion order.
1609   for (; property_index < expr->properties()->length(); property_index++) {
1610     ObjectLiteral::Property* property = expr->properties()->at(property_index);
1611 
1612     Expression* value = property->value();
1613     if (!result_saved) {
1614       __ push(v0);  // Save result on the stack
1615       result_saved = true;
1616     }
1617 
1618     __ ld(a0, MemOperand(sp));  // Duplicate receiver.
1619     __ push(a0);
1620 
1621     if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1622       DCHECK(!property->is_computed_name());
1623       VisitForStackValue(value);
1624       DCHECK(property->emit_store());
1625       __ CallRuntime(Runtime::kInternalSetPrototype);
1626       PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1627                              NO_REGISTERS);
1628     } else {
1629       EmitPropertyKey(property, expr->GetIdForPropertyName(property_index));
1630       VisitForStackValue(value);
1631       if (NeedsHomeObject(value)) {
1632         EmitSetHomeObject(value, 2, property->GetSlot());
1633       }
1634 
1635       switch (property->kind()) {
1636         case ObjectLiteral::Property::CONSTANT:
1637         case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1638         case ObjectLiteral::Property::COMPUTED:
1639           if (property->emit_store()) {
1640             __ li(a0, Operand(Smi::FromInt(NONE)));
1641             __ push(a0);
1642             __ CallRuntime(Runtime::kDefineDataPropertyUnchecked);
1643           } else {
1644             __ Drop(3);
1645           }
1646           break;
1647 
1648         case ObjectLiteral::Property::PROTOTYPE:
1649           UNREACHABLE();
1650           break;
1651 
1652         case ObjectLiteral::Property::GETTER:
1653           __ li(a0, Operand(Smi::FromInt(NONE)));
1654           __ push(a0);
1655           __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked);
1656           break;
1657 
1658         case ObjectLiteral::Property::SETTER:
1659           __ li(a0, Operand(Smi::FromInt(NONE)));
1660           __ push(a0);
1661           __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked);
1662           break;
1663       }
1664     }
1665   }
1666 
1667   if (expr->has_function()) {
1668     DCHECK(result_saved);
1669     __ ld(a0, MemOperand(sp));
1670     __ push(a0);
1671     __ CallRuntime(Runtime::kToFastProperties);
1672   }
1673 
1674   if (result_saved) {
1675     context()->PlugTOS();
1676   } else {
1677     context()->Plug(v0);
1678   }
1679 }
1680 
1681 
VisitArrayLiteral(ArrayLiteral * expr)1682 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1683   Comment cmnt(masm_, "[ ArrayLiteral");
1684 
1685   Handle<FixedArray> constant_elements = expr->constant_elements();
1686   bool has_fast_elements =
1687       IsFastObjectElementsKind(expr->constant_elements_kind());
1688 
1689   AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1690   if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1691     // If the only customer of allocation sites is transitioning, then
1692     // we can turn it off if we don't have anywhere else to transition to.
1693     allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1694   }
1695 
1696   __ mov(a0, result_register());
1697   __ ld(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1698   __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
1699   __ li(a1, Operand(constant_elements));
1700   if (MustCreateArrayLiteralWithRuntime(expr)) {
1701     __ li(a0, Operand(Smi::FromInt(expr->ComputeFlags())));
1702     __ Push(a3, a2, a1, a0);
1703     __ CallRuntime(Runtime::kCreateArrayLiteral);
1704   } else {
1705     FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1706     __ CallStub(&stub);
1707   }
1708   PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1709 
1710   bool result_saved = false;  // Is the result saved to the stack?
1711   ZoneList<Expression*>* subexprs = expr->values();
1712   int length = subexprs->length();
1713 
1714   // Emit code to evaluate all the non-constant subexpressions and to store
1715   // them into the newly cloned array.
1716   int array_index = 0;
1717   for (; array_index < length; array_index++) {
1718     Expression* subexpr = subexprs->at(array_index);
1719     if (subexpr->IsSpread()) break;
1720 
1721     // If the subexpression is a literal or a simple materialized literal it
1722     // is already set in the cloned array.
1723     if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1724 
1725     if (!result_saved) {
1726       __ push(v0);  // array literal
1727       result_saved = true;
1728     }
1729 
1730     VisitForAccumulatorValue(subexpr);
1731 
1732     __ li(StoreDescriptor::NameRegister(), Operand(Smi::FromInt(array_index)));
1733     __ ld(StoreDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1734     __ mov(StoreDescriptor::ValueRegister(), result_register());
1735     EmitLoadStoreICSlot(expr->LiteralFeedbackSlot());
1736     Handle<Code> ic =
1737         CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
1738     CallIC(ic);
1739 
1740     PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1741   }
1742 
1743   // In case the array literal contains spread expressions it has two parts. The
1744   // first part is  the "static" array which has a literal index is  handled
1745   // above. The second part is the part after the first spread expression
1746   // (inclusive) and these elements gets appended to the array. Note that the
1747   // number elements an iterable produces is unknown ahead of time.
1748   if (array_index < length && result_saved) {
1749     __ Pop(v0);
1750     result_saved = false;
1751   }
1752   for (; array_index < length; array_index++) {
1753     Expression* subexpr = subexprs->at(array_index);
1754 
1755     __ Push(v0);
1756     if (subexpr->IsSpread()) {
1757       VisitForStackValue(subexpr->AsSpread()->expression());
1758       __ InvokeBuiltin(Context::CONCAT_ITERABLE_TO_ARRAY_BUILTIN_INDEX,
1759                        CALL_FUNCTION);
1760     } else {
1761       VisitForStackValue(subexpr);
1762       __ CallRuntime(Runtime::kAppendElement);
1763     }
1764 
1765     PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1766   }
1767 
1768   if (result_saved) {
1769     context()->PlugTOS();
1770   } else {
1771     context()->Plug(v0);
1772   }
1773 }
1774 
1775 
VisitAssignment(Assignment * expr)1776 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1777   DCHECK(expr->target()->IsValidReferenceExpressionOrThis());
1778 
1779   Comment cmnt(masm_, "[ Assignment");
1780   SetExpressionPosition(expr, INSERT_BREAK);
1781 
1782   Property* property = expr->target()->AsProperty();
1783   LhsKind assign_type = Property::GetAssignType(property);
1784 
1785   // Evaluate LHS expression.
1786   switch (assign_type) {
1787     case VARIABLE:
1788       // Nothing to do here.
1789       break;
1790     case NAMED_PROPERTY:
1791       if (expr->is_compound()) {
1792         // We need the receiver both on the stack and in the register.
1793         VisitForStackValue(property->obj());
1794         __ ld(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1795       } else {
1796         VisitForStackValue(property->obj());
1797       }
1798       break;
1799     case NAMED_SUPER_PROPERTY:
1800       VisitForStackValue(
1801           property->obj()->AsSuperPropertyReference()->this_var());
1802       VisitForAccumulatorValue(
1803           property->obj()->AsSuperPropertyReference()->home_object());
1804       __ Push(result_register());
1805       if (expr->is_compound()) {
1806         const Register scratch = a1;
1807         __ ld(scratch, MemOperand(sp, kPointerSize));
1808         __ Push(scratch, result_register());
1809       }
1810       break;
1811     case KEYED_SUPER_PROPERTY: {
1812       const Register scratch = a1;
1813       VisitForStackValue(
1814           property->obj()->AsSuperPropertyReference()->this_var());
1815       VisitForAccumulatorValue(
1816           property->obj()->AsSuperPropertyReference()->home_object());
1817       __ Move(scratch, result_register());
1818       VisitForAccumulatorValue(property->key());
1819       __ Push(scratch, result_register());
1820       if (expr->is_compound()) {
1821         const Register scratch1 = a4;
1822         __ ld(scratch1, MemOperand(sp, 2 * kPointerSize));
1823         __ Push(scratch1, scratch, result_register());
1824       }
1825       break;
1826     }
1827     case KEYED_PROPERTY:
1828       // We need the key and receiver on both the stack and in v0 and a1.
1829       if (expr->is_compound()) {
1830         VisitForStackValue(property->obj());
1831         VisitForStackValue(property->key());
1832         __ ld(LoadDescriptor::ReceiverRegister(),
1833               MemOperand(sp, 1 * kPointerSize));
1834         __ ld(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
1835       } else {
1836         VisitForStackValue(property->obj());
1837         VisitForStackValue(property->key());
1838       }
1839       break;
1840   }
1841 
1842   // For compound assignments we need another deoptimization point after the
1843   // variable/property load.
1844   if (expr->is_compound()) {
1845     { AccumulatorValueContext context(this);
1846       switch (assign_type) {
1847         case VARIABLE:
1848           EmitVariableLoad(expr->target()->AsVariableProxy());
1849           PrepareForBailout(expr->target(), TOS_REG);
1850           break;
1851         case NAMED_PROPERTY:
1852           EmitNamedPropertyLoad(property);
1853           PrepareForBailoutForId(property->LoadId(), TOS_REG);
1854           break;
1855         case NAMED_SUPER_PROPERTY:
1856           EmitNamedSuperPropertyLoad(property);
1857           PrepareForBailoutForId(property->LoadId(), TOS_REG);
1858           break;
1859         case KEYED_SUPER_PROPERTY:
1860           EmitKeyedSuperPropertyLoad(property);
1861           PrepareForBailoutForId(property->LoadId(), TOS_REG);
1862           break;
1863         case KEYED_PROPERTY:
1864           EmitKeyedPropertyLoad(property);
1865           PrepareForBailoutForId(property->LoadId(), TOS_REG);
1866           break;
1867       }
1868     }
1869 
1870     Token::Value op = expr->binary_op();
1871     __ push(v0);  // Left operand goes on the stack.
1872     VisitForAccumulatorValue(expr->value());
1873 
1874     AccumulatorValueContext context(this);
1875     if (ShouldInlineSmiCase(op)) {
1876       EmitInlineSmiBinaryOp(expr->binary_operation(),
1877                             op,
1878                             expr->target(),
1879                             expr->value());
1880     } else {
1881       EmitBinaryOp(expr->binary_operation(), op);
1882     }
1883 
1884     // Deoptimization point in case the binary operation may have side effects.
1885     PrepareForBailout(expr->binary_operation(), TOS_REG);
1886   } else {
1887     VisitForAccumulatorValue(expr->value());
1888   }
1889 
1890   SetExpressionPosition(expr);
1891 
1892   // Store the value.
1893   switch (assign_type) {
1894     case VARIABLE:
1895       EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1896                              expr->op(), expr->AssignmentSlot());
1897       PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1898       context()->Plug(v0);
1899       break;
1900     case NAMED_PROPERTY:
1901       EmitNamedPropertyAssignment(expr);
1902       break;
1903     case NAMED_SUPER_PROPERTY:
1904       EmitNamedSuperPropertyStore(property);
1905       context()->Plug(v0);
1906       break;
1907     case KEYED_SUPER_PROPERTY:
1908       EmitKeyedSuperPropertyStore(property);
1909       context()->Plug(v0);
1910       break;
1911     case KEYED_PROPERTY:
1912       EmitKeyedPropertyAssignment(expr);
1913       break;
1914   }
1915 }
1916 
1917 
VisitYield(Yield * expr)1918 void FullCodeGenerator::VisitYield(Yield* expr) {
1919   Comment cmnt(masm_, "[ Yield");
1920   SetExpressionPosition(expr);
1921 
1922   // Evaluate yielded value first; the initial iterator definition depends on
1923   // this.  It stays on the stack while we update the iterator.
1924   VisitForStackValue(expr->expression());
1925 
1926   switch (expr->yield_kind()) {
1927     case Yield::kSuspend:
1928       // Pop value from top-of-stack slot; box result into result register.
1929       EmitCreateIteratorResult(false);
1930       __ push(result_register());
1931       // Fall through.
1932     case Yield::kInitial: {
1933       Label suspend, continuation, post_runtime, resume;
1934 
1935       __ jmp(&suspend);
1936       __ bind(&continuation);
1937       __ RecordGeneratorContinuation();
1938       __ jmp(&resume);
1939 
1940       __ bind(&suspend);
1941       VisitForAccumulatorValue(expr->generator_object());
1942       DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
1943       __ li(a1, Operand(Smi::FromInt(continuation.pos())));
1944       __ sd(a1, FieldMemOperand(v0, JSGeneratorObject::kContinuationOffset));
1945       __ sd(cp, FieldMemOperand(v0, JSGeneratorObject::kContextOffset));
1946       __ mov(a1, cp);
1947       __ RecordWriteField(v0, JSGeneratorObject::kContextOffset, a1, a2,
1948                           kRAHasBeenSaved, kDontSaveFPRegs);
1949       __ Daddu(a1, fp, Operand(StandardFrameConstants::kExpressionsOffset));
1950       __ Branch(&post_runtime, eq, sp, Operand(a1));
1951       __ push(v0);  // generator object
1952       __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
1953       __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
1954       __ bind(&post_runtime);
1955       __ pop(result_register());
1956       EmitReturnSequence();
1957 
1958       __ bind(&resume);
1959       context()->Plug(result_register());
1960       break;
1961     }
1962 
1963     case Yield::kFinal: {
1964       VisitForAccumulatorValue(expr->generator_object());
1965       __ li(a1, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorClosed)));
1966       __ sd(a1, FieldMemOperand(result_register(),
1967                                 JSGeneratorObject::kContinuationOffset));
1968       // Pop value from top-of-stack slot, box result into result register.
1969       EmitCreateIteratorResult(true);
1970       EmitUnwindBeforeReturn();
1971       EmitReturnSequence();
1972       break;
1973     }
1974 
1975     case Yield::kDelegating: {
1976       VisitForStackValue(expr->generator_object());
1977 
1978       // Initial stack layout is as follows:
1979       // [sp + 1 * kPointerSize] iter
1980       // [sp + 0 * kPointerSize] g
1981 
1982       Label l_catch, l_try, l_suspend, l_continuation, l_resume;
1983       Label l_next, l_call;
1984       Register load_receiver = LoadDescriptor::ReceiverRegister();
1985       Register load_name = LoadDescriptor::NameRegister();
1986       // Initial send value is undefined.
1987       __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
1988       __ Branch(&l_next);
1989 
1990       // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
1991       __ bind(&l_catch);
1992       __ mov(a0, v0);
1993       __ LoadRoot(a2, Heap::kthrow_stringRootIndex);  // "throw"
1994       __ ld(a3, MemOperand(sp, 1 * kPointerSize));    // iter
1995       __ Push(a2, a3, a0);                            // "throw", iter, except
1996       __ jmp(&l_call);
1997 
1998       // try { received = %yield result }
1999       // Shuffle the received result above a try handler and yield it without
2000       // re-boxing.
2001       __ bind(&l_try);
2002       __ pop(a0);                                        // result
2003       int handler_index = NewHandlerTableEntry();
2004       EnterTryBlock(handler_index, &l_catch);
2005       const int try_block_size = TryCatch::kElementCount * kPointerSize;
2006       __ push(a0);                                       // result
2007 
2008       __ jmp(&l_suspend);
2009       __ bind(&l_continuation);
2010       __ RecordGeneratorContinuation();
2011       __ mov(a0, v0);
2012       __ jmp(&l_resume);
2013 
2014       __ bind(&l_suspend);
2015       const int generator_object_depth = kPointerSize + try_block_size;
2016       __ ld(a0, MemOperand(sp, generator_object_depth));
2017       __ push(a0);                                       // g
2018       __ Push(Smi::FromInt(handler_index));              // handler-index
2019       DCHECK(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
2020       __ li(a1, Operand(Smi::FromInt(l_continuation.pos())));
2021       __ sd(a1, FieldMemOperand(a0, JSGeneratorObject::kContinuationOffset));
2022       __ sd(cp, FieldMemOperand(a0, JSGeneratorObject::kContextOffset));
2023       __ mov(a1, cp);
2024       __ RecordWriteField(a0, JSGeneratorObject::kContextOffset, a1, a2,
2025                           kRAHasBeenSaved, kDontSaveFPRegs);
2026       __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 2);
2027       __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2028       __ pop(v0);                                      // result
2029       EmitReturnSequence();
2030       __ mov(a0, v0);
2031       __ bind(&l_resume);                              // received in a0
2032       ExitTryBlock(handler_index);
2033 
2034       // receiver = iter; f = 'next'; arg = received;
2035       __ bind(&l_next);
2036       __ LoadRoot(load_name, Heap::knext_stringRootIndex);  // "next"
2037       __ ld(a3, MemOperand(sp, 1 * kPointerSize));          // iter
2038       __ Push(load_name, a3, a0);                      // "next", iter, received
2039 
2040       // result = receiver[f](arg);
2041       __ bind(&l_call);
2042       __ ld(load_receiver, MemOperand(sp, kPointerSize));
2043       __ ld(load_name, MemOperand(sp, 2 * kPointerSize));
2044       __ li(LoadDescriptor::SlotRegister(),
2045             Operand(SmiFromSlot(expr->KeyedLoadFeedbackSlot())));
2046       Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate(), SLOPPY).code();
2047       CallIC(ic, TypeFeedbackId::None());
2048       __ mov(a0, v0);
2049       __ mov(a1, a0);
2050       __ sd(a1, MemOperand(sp, 2 * kPointerSize));
2051       SetCallPosition(expr);
2052       __ li(a0, Operand(1));
2053       __ Call(
2054           isolate()->builtins()->Call(ConvertReceiverMode::kNotNullOrUndefined),
2055           RelocInfo::CODE_TARGET);
2056 
2057       __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2058       __ Drop(1);  // The function is still on the stack; drop it.
2059 
2060       // if (!result.done) goto l_try;
2061       __ Move(load_receiver, v0);
2062 
2063       __ push(load_receiver);                               // save result
2064       __ LoadRoot(load_name, Heap::kdone_stringRootIndex);  // "done"
2065       __ li(LoadDescriptor::SlotRegister(),
2066             Operand(SmiFromSlot(expr->DoneFeedbackSlot())));
2067       CallLoadIC(NOT_INSIDE_TYPEOF);  // v0=result.done
2068       __ mov(a0, v0);
2069       Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
2070       CallIC(bool_ic);
2071       __ LoadRoot(at, Heap::kTrueValueRootIndex);
2072       __ Branch(&l_try, ne, result_register(), Operand(at));
2073 
2074       // result.value
2075       __ pop(load_receiver);                                 // result
2076       __ LoadRoot(load_name, Heap::kvalue_stringRootIndex);  // "value"
2077       __ li(LoadDescriptor::SlotRegister(),
2078             Operand(SmiFromSlot(expr->ValueFeedbackSlot())));
2079       CallLoadIC(NOT_INSIDE_TYPEOF);                         // v0=result.value
2080       context()->DropAndPlug(2, v0);                         // drop iter and g
2081       break;
2082     }
2083   }
2084 }
2085 
2086 
EmitGeneratorResume(Expression * generator,Expression * value,JSGeneratorObject::ResumeMode resume_mode)2087 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
2088     Expression *value,
2089     JSGeneratorObject::ResumeMode resume_mode) {
2090   // The value stays in a0, and is ultimately read by the resumed generator, as
2091   // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
2092   // is read to throw the value when the resumed generator is already closed.
2093   // a1 will hold the generator object until the activation has been resumed.
2094   VisitForStackValue(generator);
2095   VisitForAccumulatorValue(value);
2096   __ pop(a1);
2097 
2098   // Load suspended function and context.
2099   __ ld(cp, FieldMemOperand(a1, JSGeneratorObject::kContextOffset));
2100   __ ld(a4, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset));
2101 
2102   // Load receiver and store as the first argument.
2103   __ ld(a2, FieldMemOperand(a1, JSGeneratorObject::kReceiverOffset));
2104   __ push(a2);
2105 
2106   // Push holes for the rest of the arguments to the generator function.
2107   __ ld(a3, FieldMemOperand(a4, JSFunction::kSharedFunctionInfoOffset));
2108   // The argument count is stored as int32_t on 64-bit platforms.
2109   // TODO(plind): Smi on 32-bit platforms.
2110   __ lw(a3,
2111         FieldMemOperand(a3, SharedFunctionInfo::kFormalParameterCountOffset));
2112   __ LoadRoot(a2, Heap::kTheHoleValueRootIndex);
2113   Label push_argument_holes, push_frame;
2114   __ bind(&push_argument_holes);
2115   __ Dsubu(a3, a3, Operand(1));
2116   __ Branch(&push_frame, lt, a3, Operand(zero_reg));
2117   __ push(a2);
2118   __ jmp(&push_argument_holes);
2119 
2120   // Enter a new JavaScript frame, and initialize its slots as they were when
2121   // the generator was suspended.
2122   Label resume_frame, done;
2123   __ bind(&push_frame);
2124   __ Call(&resume_frame);
2125   __ jmp(&done);
2126   __ bind(&resume_frame);
2127   // ra = return address.
2128   // fp = caller's frame pointer.
2129   // cp = callee's context,
2130   // a4 = callee's JS function.
2131   __ Push(ra, fp, cp, a4);
2132   // Adjust FP to point to saved FP.
2133   __ Daddu(fp, sp, 2 * kPointerSize);
2134 
2135   // Load the operand stack size.
2136   __ ld(a3, FieldMemOperand(a1, JSGeneratorObject::kOperandStackOffset));
2137   __ ld(a3, FieldMemOperand(a3, FixedArray::kLengthOffset));
2138   __ SmiUntag(a3);
2139 
2140   // If we are sending a value and there is no operand stack, we can jump back
2141   // in directly.
2142   if (resume_mode == JSGeneratorObject::NEXT) {
2143     Label slow_resume;
2144     __ Branch(&slow_resume, ne, a3, Operand(zero_reg));
2145     __ ld(a3, FieldMemOperand(a4, JSFunction::kCodeEntryOffset));
2146     __ ld(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset));
2147     __ SmiUntag(a2);
2148     __ Daddu(a3, a3, Operand(a2));
2149     __ li(a2, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
2150     __ sd(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset));
2151     __ Jump(a3);
2152     __ bind(&slow_resume);
2153   }
2154 
2155   // Otherwise, we push holes for the operand stack and call the runtime to fix
2156   // up the stack and the handlers.
2157   Label push_operand_holes, call_resume;
2158   __ bind(&push_operand_holes);
2159   __ Dsubu(a3, a3, Operand(1));
2160   __ Branch(&call_resume, lt, a3, Operand(zero_reg));
2161   __ push(a2);
2162   __ Branch(&push_operand_holes);
2163   __ bind(&call_resume);
2164   DCHECK(!result_register().is(a1));
2165   __ Push(a1, result_register());
2166   __ Push(Smi::FromInt(resume_mode));
2167   __ CallRuntime(Runtime::kResumeJSGeneratorObject);
2168   // Not reached: the runtime call returns elsewhere.
2169   __ stop("not-reached");
2170 
2171   __ bind(&done);
2172   context()->Plug(result_register());
2173 }
2174 
2175 
EmitCreateIteratorResult(bool done)2176 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
2177   Label allocate, done_allocate;
2178 
2179   __ Allocate(JSIteratorResult::kSize, v0, a2, a3, &allocate, TAG_OBJECT);
2180   __ jmp(&done_allocate);
2181 
2182   __ bind(&allocate);
2183   __ Push(Smi::FromInt(JSIteratorResult::kSize));
2184   __ CallRuntime(Runtime::kAllocateInNewSpace);
2185 
2186   __ bind(&done_allocate);
2187   __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, a1);
2188   __ pop(a2);
2189   __ LoadRoot(a3,
2190               done ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex);
2191   __ LoadRoot(a4, Heap::kEmptyFixedArrayRootIndex);
2192   __ sd(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
2193   __ sd(a4, FieldMemOperand(v0, JSObject::kPropertiesOffset));
2194   __ sd(a4, FieldMemOperand(v0, JSObject::kElementsOffset));
2195   __ sd(a2, FieldMemOperand(v0, JSIteratorResult::kValueOffset));
2196   __ sd(a3, FieldMemOperand(v0, JSIteratorResult::kDoneOffset));
2197   STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
2198 }
2199 
2200 
EmitNamedPropertyLoad(Property * prop)2201 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2202   SetExpressionPosition(prop);
2203   Literal* key = prop->key()->AsLiteral();
2204   DCHECK(!prop->IsSuperAccess());
2205 
2206   __ li(LoadDescriptor::NameRegister(), Operand(key->value()));
2207   __ li(LoadDescriptor::SlotRegister(),
2208         Operand(SmiFromSlot(prop->PropertyFeedbackSlot())));
2209   CallLoadIC(NOT_INSIDE_TYPEOF, language_mode());
2210 }
2211 
2212 
EmitNamedSuperPropertyLoad(Property * prop)2213 void FullCodeGenerator::EmitNamedSuperPropertyLoad(Property* prop) {
2214   // Stack: receiver, home_object.
2215   SetExpressionPosition(prop);
2216 
2217   Literal* key = prop->key()->AsLiteral();
2218   DCHECK(!key->value()->IsSmi());
2219   DCHECK(prop->IsSuperAccess());
2220 
2221   __ Push(key->value());
2222   __ Push(Smi::FromInt(language_mode()));
2223   __ CallRuntime(Runtime::kLoadFromSuper);
2224 }
2225 
2226 
EmitKeyedPropertyLoad(Property * prop)2227 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2228   // Call keyed load IC. It has register arguments receiver and key.
2229   SetExpressionPosition(prop);
2230 
2231   Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate(), language_mode()).code();
2232   __ li(LoadDescriptor::SlotRegister(),
2233         Operand(SmiFromSlot(prop->PropertyFeedbackSlot())));
2234   CallIC(ic);
2235 }
2236 
2237 
EmitKeyedSuperPropertyLoad(Property * prop)2238 void FullCodeGenerator::EmitKeyedSuperPropertyLoad(Property* prop) {
2239   // Stack: receiver, home_object, key.
2240   SetExpressionPosition(prop);
2241   __ Push(Smi::FromInt(language_mode()));
2242   __ CallRuntime(Runtime::kLoadKeyedFromSuper);
2243 }
2244 
2245 
EmitInlineSmiBinaryOp(BinaryOperation * expr,Token::Value op,Expression * left_expr,Expression * right_expr)2246 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2247                                               Token::Value op,
2248                                               Expression* left_expr,
2249                                               Expression* right_expr) {
2250   Label done, smi_case, stub_call;
2251 
2252   Register scratch1 = a2;
2253   Register scratch2 = a3;
2254 
2255   // Get the arguments.
2256   Register left = a1;
2257   Register right = a0;
2258   __ pop(left);
2259   __ mov(a0, result_register());
2260 
2261   // Perform combined smi check on both operands.
2262   __ Or(scratch1, left, Operand(right));
2263   STATIC_ASSERT(kSmiTag == 0);
2264   JumpPatchSite patch_site(masm_);
2265   patch_site.EmitJumpIfSmi(scratch1, &smi_case);
2266 
2267   __ bind(&stub_call);
2268   Handle<Code> code =
2269       CodeFactory::BinaryOpIC(isolate(), op, strength(language_mode())).code();
2270   CallIC(code, expr->BinaryOperationFeedbackId());
2271   patch_site.EmitPatchInfo();
2272   __ jmp(&done);
2273 
2274   __ bind(&smi_case);
2275   // Smi case. This code works the same way as the smi-smi case in the type
2276   // recording binary operation stub, see
2277   switch (op) {
2278     case Token::SAR:
2279       __ GetLeastBitsFromSmi(scratch1, right, 5);
2280       __ dsrav(right, left, scratch1);
2281       __ And(v0, right, Operand(0xffffffff00000000L));
2282       break;
2283     case Token::SHL: {
2284       __ SmiUntag(scratch1, left);
2285       __ GetLeastBitsFromSmi(scratch2, right, 5);
2286       __ dsllv(scratch1, scratch1, scratch2);
2287       __ SmiTag(v0, scratch1);
2288       break;
2289     }
2290     case Token::SHR: {
2291       __ SmiUntag(scratch1, left);
2292       __ GetLeastBitsFromSmi(scratch2, right, 5);
2293       __ dsrlv(scratch1, scratch1, scratch2);
2294       __ And(scratch2, scratch1, 0x80000000);
2295       __ Branch(&stub_call, ne, scratch2, Operand(zero_reg));
2296       __ SmiTag(v0, scratch1);
2297       break;
2298     }
2299     case Token::ADD:
2300       __ DadduAndCheckForOverflow(v0, left, right, scratch1);
2301       __ BranchOnOverflow(&stub_call, scratch1);
2302       break;
2303     case Token::SUB:
2304       __ DsubuAndCheckForOverflow(v0, left, right, scratch1);
2305       __ BranchOnOverflow(&stub_call, scratch1);
2306       break;
2307     case Token::MUL: {
2308       __ Dmulh(v0, left, right);
2309       __ dsra32(scratch2, v0, 0);
2310       __ sra(scratch1, v0, 31);
2311       __ Branch(USE_DELAY_SLOT, &stub_call, ne, scratch2, Operand(scratch1));
2312       __ SmiTag(v0);
2313       __ Branch(USE_DELAY_SLOT, &done, ne, v0, Operand(zero_reg));
2314       __ Daddu(scratch2, right, left);
2315       __ Branch(&stub_call, lt, scratch2, Operand(zero_reg));
2316       DCHECK(Smi::FromInt(0) == 0);
2317       __ mov(v0, zero_reg);
2318       break;
2319     }
2320     case Token::BIT_OR:
2321       __ Or(v0, left, Operand(right));
2322       break;
2323     case Token::BIT_AND:
2324       __ And(v0, left, Operand(right));
2325       break;
2326     case Token::BIT_XOR:
2327       __ Xor(v0, left, Operand(right));
2328       break;
2329     default:
2330       UNREACHABLE();
2331   }
2332 
2333   __ bind(&done);
2334   context()->Plug(v0);
2335 }
2336 
2337 
EmitClassDefineProperties(ClassLiteral * lit)2338 void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) {
2339   // Constructor is in v0.
2340   DCHECK(lit != NULL);
2341   __ push(v0);
2342 
2343   // No access check is needed here since the constructor is created by the
2344   // class literal.
2345   Register scratch = a1;
2346   __ ld(scratch,
2347         FieldMemOperand(v0, JSFunction::kPrototypeOrInitialMapOffset));
2348   __ push(scratch);
2349 
2350   for (int i = 0; i < lit->properties()->length(); i++) {
2351     ObjectLiteral::Property* property = lit->properties()->at(i);
2352     Expression* value = property->value();
2353 
2354     if (property->is_static()) {
2355       __ ld(scratch, MemOperand(sp, kPointerSize));  // constructor
2356     } else {
2357       __ ld(scratch, MemOperand(sp, 0));  // prototype
2358     }
2359     __ push(scratch);
2360     EmitPropertyKey(property, lit->GetIdForProperty(i));
2361 
2362     // The static prototype property is read only. We handle the non computed
2363     // property name case in the parser. Since this is the only case where we
2364     // need to check for an own read only property we special case this so we do
2365     // not need to do this for every property.
2366     if (property->is_static() && property->is_computed_name()) {
2367       __ CallRuntime(Runtime::kThrowIfStaticPrototype);
2368       __ push(v0);
2369     }
2370 
2371     VisitForStackValue(value);
2372     if (NeedsHomeObject(value)) {
2373       EmitSetHomeObject(value, 2, property->GetSlot());
2374     }
2375 
2376     switch (property->kind()) {
2377       case ObjectLiteral::Property::CONSTANT:
2378       case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2379       case ObjectLiteral::Property::PROTOTYPE:
2380         UNREACHABLE();
2381       case ObjectLiteral::Property::COMPUTED:
2382         __ CallRuntime(Runtime::kDefineClassMethod);
2383         break;
2384 
2385       case ObjectLiteral::Property::GETTER:
2386         __ li(a0, Operand(Smi::FromInt(DONT_ENUM)));
2387         __ push(a0);
2388         __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked);
2389         break;
2390 
2391       case ObjectLiteral::Property::SETTER:
2392         __ li(a0, Operand(Smi::FromInt(DONT_ENUM)));
2393         __ push(a0);
2394         __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked);
2395         break;
2396 
2397       default:
2398         UNREACHABLE();
2399     }
2400   }
2401 
2402   // Set both the prototype and constructor to have fast properties, and also
2403   // freeze them in strong mode.
2404   __ CallRuntime(Runtime::kFinalizeClassDefinition);
2405 }
2406 
2407 
EmitBinaryOp(BinaryOperation * expr,Token::Value op)2408 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
2409   __ mov(a0, result_register());
2410   __ pop(a1);
2411   Handle<Code> code =
2412       CodeFactory::BinaryOpIC(isolate(), op, strength(language_mode())).code();
2413   JumpPatchSite patch_site(masm_);    // unbound, signals no inlined smi code.
2414   CallIC(code, expr->BinaryOperationFeedbackId());
2415   patch_site.EmitPatchInfo();
2416   context()->Plug(v0);
2417 }
2418 
2419 
EmitAssignment(Expression * expr,FeedbackVectorSlot slot)2420 void FullCodeGenerator::EmitAssignment(Expression* expr,
2421                                        FeedbackVectorSlot slot) {
2422   DCHECK(expr->IsValidReferenceExpressionOrThis());
2423 
2424   Property* prop = expr->AsProperty();
2425   LhsKind assign_type = Property::GetAssignType(prop);
2426 
2427   switch (assign_type) {
2428     case VARIABLE: {
2429       Variable* var = expr->AsVariableProxy()->var();
2430       EffectContext context(this);
2431       EmitVariableAssignment(var, Token::ASSIGN, slot);
2432       break;
2433     }
2434     case NAMED_PROPERTY: {
2435       __ push(result_register());  // Preserve value.
2436       VisitForAccumulatorValue(prop->obj());
2437       __ mov(StoreDescriptor::ReceiverRegister(), result_register());
2438       __ pop(StoreDescriptor::ValueRegister());  // Restore value.
2439       __ li(StoreDescriptor::NameRegister(),
2440             Operand(prop->key()->AsLiteral()->value()));
2441       EmitLoadStoreICSlot(slot);
2442       CallStoreIC();
2443       break;
2444     }
2445     case NAMED_SUPER_PROPERTY: {
2446       __ Push(v0);
2447       VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2448       VisitForAccumulatorValue(
2449           prop->obj()->AsSuperPropertyReference()->home_object());
2450       // stack: value, this; v0: home_object
2451       Register scratch = a2;
2452       Register scratch2 = a3;
2453       __ mov(scratch, result_register());             // home_object
2454       __ ld(v0, MemOperand(sp, kPointerSize));        // value
2455       __ ld(scratch2, MemOperand(sp, 0));             // this
2456       __ sd(scratch2, MemOperand(sp, kPointerSize));  // this
2457       __ sd(scratch, MemOperand(sp, 0));              // home_object
2458       // stack: this, home_object; v0: value
2459       EmitNamedSuperPropertyStore(prop);
2460       break;
2461     }
2462     case KEYED_SUPER_PROPERTY: {
2463       __ Push(v0);
2464       VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2465       VisitForStackValue(
2466           prop->obj()->AsSuperPropertyReference()->home_object());
2467       VisitForAccumulatorValue(prop->key());
2468       Register scratch = a2;
2469       Register scratch2 = a3;
2470       __ ld(scratch2, MemOperand(sp, 2 * kPointerSize));  // value
2471       // stack: value, this, home_object; v0: key, a3: value
2472       __ ld(scratch, MemOperand(sp, kPointerSize));  // this
2473       __ sd(scratch, MemOperand(sp, 2 * kPointerSize));
2474       __ ld(scratch, MemOperand(sp, 0));  // home_object
2475       __ sd(scratch, MemOperand(sp, kPointerSize));
2476       __ sd(v0, MemOperand(sp, 0));
2477       __ Move(v0, scratch2);
2478       // stack: this, home_object, key; v0: value.
2479       EmitKeyedSuperPropertyStore(prop);
2480       break;
2481     }
2482     case KEYED_PROPERTY: {
2483       __ push(result_register());  // Preserve value.
2484       VisitForStackValue(prop->obj());
2485       VisitForAccumulatorValue(prop->key());
2486       __ Move(StoreDescriptor::NameRegister(), result_register());
2487       __ Pop(StoreDescriptor::ValueRegister(),
2488              StoreDescriptor::ReceiverRegister());
2489       EmitLoadStoreICSlot(slot);
2490       Handle<Code> ic =
2491           CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2492       CallIC(ic);
2493       break;
2494     }
2495   }
2496   context()->Plug(v0);
2497 }
2498 
2499 
EmitStoreToStackLocalOrContextSlot(Variable * var,MemOperand location)2500 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2501     Variable* var, MemOperand location) {
2502   __ sd(result_register(), location);
2503   if (var->IsContextSlot()) {
2504     // RecordWrite may destroy all its register arguments.
2505     __ Move(a3, result_register());
2506     int offset = Context::SlotOffset(var->index());
2507     __ RecordWriteContextSlot(
2508         a1, offset, a3, a2, kRAHasBeenSaved, kDontSaveFPRegs);
2509   }
2510 }
2511 
2512 
EmitVariableAssignment(Variable * var,Token::Value op,FeedbackVectorSlot slot)2513 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
2514                                                FeedbackVectorSlot slot) {
2515   if (var->IsUnallocated()) {
2516     // Global var, const, or let.
2517     __ mov(StoreDescriptor::ValueRegister(), result_register());
2518     __ li(StoreDescriptor::NameRegister(), Operand(var->name()));
2519     __ LoadGlobalObject(StoreDescriptor::ReceiverRegister());
2520     EmitLoadStoreICSlot(slot);
2521     CallStoreIC();
2522 
2523   } else if (var->mode() == LET && op != Token::INIT) {
2524     // Non-initializing assignment to let variable needs a write barrier.
2525     DCHECK(!var->IsLookupSlot());
2526     DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2527     Label assign;
2528     MemOperand location = VarOperand(var, a1);
2529     __ ld(a3, location);
2530     __ LoadRoot(a4, Heap::kTheHoleValueRootIndex);
2531     __ Branch(&assign, ne, a3, Operand(a4));
2532     __ li(a3, Operand(var->name()));
2533     __ push(a3);
2534     __ CallRuntime(Runtime::kThrowReferenceError);
2535     // Perform the assignment.
2536     __ bind(&assign);
2537     EmitStoreToStackLocalOrContextSlot(var, location);
2538 
2539   } else if (var->mode() == CONST && op != Token::INIT) {
2540     // Assignment to const variable needs a write barrier.
2541     DCHECK(!var->IsLookupSlot());
2542     DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2543     Label const_error;
2544     MemOperand location = VarOperand(var, a1);
2545     __ ld(a3, location);
2546     __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2547     __ Branch(&const_error, ne, a3, Operand(at));
2548     __ li(a3, Operand(var->name()));
2549     __ push(a3);
2550     __ CallRuntime(Runtime::kThrowReferenceError);
2551     __ bind(&const_error);
2552     __ CallRuntime(Runtime::kThrowConstAssignError);
2553 
2554   } else if (var->is_this() && var->mode() == CONST && op == Token::INIT) {
2555     // Initializing assignment to const {this} needs a write barrier.
2556     DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2557     Label uninitialized_this;
2558     MemOperand location = VarOperand(var, a1);
2559     __ ld(a3, location);
2560     __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2561     __ Branch(&uninitialized_this, eq, a3, Operand(at));
2562     __ li(a0, Operand(var->name()));
2563     __ Push(a0);
2564     __ CallRuntime(Runtime::kThrowReferenceError);
2565     __ bind(&uninitialized_this);
2566     EmitStoreToStackLocalOrContextSlot(var, location);
2567 
2568   } else if (!var->is_const_mode() ||
2569              (var->mode() == CONST && op == Token::INIT)) {
2570     if (var->IsLookupSlot()) {
2571       // Assignment to var.
2572       __ li(a4, Operand(var->name()));
2573       __ li(a3, Operand(Smi::FromInt(language_mode())));
2574       // jssp[0]  : language mode.
2575       // jssp[8]  : name.
2576       // jssp[16] : context.
2577       // jssp[24] : value.
2578       __ Push(v0, cp, a4, a3);
2579       __ CallRuntime(Runtime::kStoreLookupSlot);
2580     } else {
2581       // Assignment to var or initializing assignment to let/const in harmony
2582       // mode.
2583       DCHECK((var->IsStackAllocated() || var->IsContextSlot()));
2584       MemOperand location = VarOperand(var, a1);
2585       if (generate_debug_code_ && var->mode() == LET && op == Token::INIT) {
2586         // Check for an uninitialized let binding.
2587         __ ld(a2, location);
2588         __ LoadRoot(a4, Heap::kTheHoleValueRootIndex);
2589         __ Check(eq, kLetBindingReInitialization, a2, Operand(a4));
2590       }
2591       EmitStoreToStackLocalOrContextSlot(var, location);
2592     }
2593 
2594   } else if (var->mode() == CONST_LEGACY && op == Token::INIT) {
2595     // Const initializers need a write barrier.
2596     DCHECK(!var->IsParameter());  // No const parameters.
2597     if (var->IsLookupSlot()) {
2598       __ li(a0, Operand(var->name()));
2599       __ Push(v0, cp, a0);  // Context and name.
2600       __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot);
2601     } else {
2602       DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2603       Label skip;
2604       MemOperand location = VarOperand(var, a1);
2605       __ ld(a2, location);
2606       __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2607       __ Branch(&skip, ne, a2, Operand(at));
2608       EmitStoreToStackLocalOrContextSlot(var, location);
2609       __ bind(&skip);
2610     }
2611 
2612   } else {
2613     DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT);
2614     if (is_strict(language_mode())) {
2615       __ CallRuntime(Runtime::kThrowConstAssignError);
2616     }
2617     // Silently ignore store in sloppy mode.
2618   }
2619 }
2620 
2621 
EmitNamedPropertyAssignment(Assignment * expr)2622 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2623   // Assignment to a property, using a named store IC.
2624   Property* prop = expr->target()->AsProperty();
2625   DCHECK(prop != NULL);
2626   DCHECK(prop->key()->IsLiteral());
2627 
2628   __ mov(StoreDescriptor::ValueRegister(), result_register());
2629   __ li(StoreDescriptor::NameRegister(),
2630         Operand(prop->key()->AsLiteral()->value()));
2631   __ pop(StoreDescriptor::ReceiverRegister());
2632   EmitLoadStoreICSlot(expr->AssignmentSlot());
2633   CallStoreIC();
2634 
2635   PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2636   context()->Plug(v0);
2637 }
2638 
2639 
EmitNamedSuperPropertyStore(Property * prop)2640 void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2641   // Assignment to named property of super.
2642   // v0 : value
2643   // stack : receiver ('this'), home_object
2644   DCHECK(prop != NULL);
2645   Literal* key = prop->key()->AsLiteral();
2646   DCHECK(key != NULL);
2647 
2648   __ Push(key->value());
2649   __ Push(v0);
2650   __ CallRuntime((is_strict(language_mode()) ? Runtime::kStoreToSuper_Strict
2651                                              : Runtime::kStoreToSuper_Sloppy));
2652 }
2653 
2654 
EmitKeyedSuperPropertyStore(Property * prop)2655 void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2656   // Assignment to named property of super.
2657   // v0 : value
2658   // stack : receiver ('this'), home_object, key
2659   DCHECK(prop != NULL);
2660 
2661   __ Push(v0);
2662   __ CallRuntime((is_strict(language_mode())
2663                       ? Runtime::kStoreKeyedToSuper_Strict
2664                       : Runtime::kStoreKeyedToSuper_Sloppy));
2665 }
2666 
2667 
EmitKeyedPropertyAssignment(Assignment * expr)2668 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2669   // Assignment to a property, using a keyed store IC.
2670   // Call keyed store IC.
2671   // The arguments are:
2672   // - a0 is the value,
2673   // - a1 is the key,
2674   // - a2 is the receiver.
2675   __ mov(StoreDescriptor::ValueRegister(), result_register());
2676   __ Pop(StoreDescriptor::ReceiverRegister(), StoreDescriptor::NameRegister());
2677   DCHECK(StoreDescriptor::ValueRegister().is(a0));
2678 
2679   Handle<Code> ic =
2680       CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2681   EmitLoadStoreICSlot(expr->AssignmentSlot());
2682   CallIC(ic);
2683 
2684   PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2685   context()->Plug(v0);
2686 }
2687 
2688 
VisitProperty(Property * expr)2689 void FullCodeGenerator::VisitProperty(Property* expr) {
2690   Comment cmnt(masm_, "[ Property");
2691   SetExpressionPosition(expr);
2692 
2693   Expression* key = expr->key();
2694 
2695   if (key->IsPropertyName()) {
2696     if (!expr->IsSuperAccess()) {
2697       VisitForAccumulatorValue(expr->obj());
2698       __ Move(LoadDescriptor::ReceiverRegister(), v0);
2699       EmitNamedPropertyLoad(expr);
2700     } else {
2701       VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2702       VisitForStackValue(
2703           expr->obj()->AsSuperPropertyReference()->home_object());
2704       EmitNamedSuperPropertyLoad(expr);
2705     }
2706   } else {
2707     if (!expr->IsSuperAccess()) {
2708       VisitForStackValue(expr->obj());
2709       VisitForAccumulatorValue(expr->key());
2710       __ Move(LoadDescriptor::NameRegister(), v0);
2711       __ pop(LoadDescriptor::ReceiverRegister());
2712       EmitKeyedPropertyLoad(expr);
2713     } else {
2714       VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2715       VisitForStackValue(
2716           expr->obj()->AsSuperPropertyReference()->home_object());
2717       VisitForStackValue(expr->key());
2718       EmitKeyedSuperPropertyLoad(expr);
2719     }
2720   }
2721   PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2722   context()->Plug(v0);
2723 }
2724 
2725 
CallIC(Handle<Code> code,TypeFeedbackId id)2726 void FullCodeGenerator::CallIC(Handle<Code> code,
2727                                TypeFeedbackId id) {
2728   ic_total_count_++;
2729   __ Call(code, RelocInfo::CODE_TARGET, id);
2730 }
2731 
2732 
2733 // Code common for calls using the IC.
EmitCallWithLoadIC(Call * expr)2734 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2735   Expression* callee = expr->expression();
2736 
2737   // Get the target function.
2738   ConvertReceiverMode convert_mode;
2739   if (callee->IsVariableProxy()) {
2740     { StackValueContext context(this);
2741       EmitVariableLoad(callee->AsVariableProxy());
2742       PrepareForBailout(callee, NO_REGISTERS);
2743     }
2744     // Push undefined as receiver. This is patched in the method prologue if it
2745     // is a sloppy mode method.
2746     __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
2747     __ push(at);
2748     convert_mode = ConvertReceiverMode::kNullOrUndefined;
2749   } else {
2750     // Load the function from the receiver.
2751     DCHECK(callee->IsProperty());
2752     DCHECK(!callee->AsProperty()->IsSuperAccess());
2753     __ ld(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2754     EmitNamedPropertyLoad(callee->AsProperty());
2755     PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2756     // Push the target function under the receiver.
2757     __ ld(at, MemOperand(sp, 0));
2758     __ push(at);
2759     __ sd(v0, MemOperand(sp, kPointerSize));
2760     convert_mode = ConvertReceiverMode::kNotNullOrUndefined;
2761   }
2762 
2763   EmitCall(expr, convert_mode);
2764 }
2765 
2766 
EmitSuperCallWithLoadIC(Call * expr)2767 void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2768   SetExpressionPosition(expr);
2769   Expression* callee = expr->expression();
2770   DCHECK(callee->IsProperty());
2771   Property* prop = callee->AsProperty();
2772   DCHECK(prop->IsSuperAccess());
2773 
2774   Literal* key = prop->key()->AsLiteral();
2775   DCHECK(!key->value()->IsSmi());
2776   // Load the function from the receiver.
2777   const Register scratch = a1;
2778   SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2779   VisitForAccumulatorValue(super_ref->home_object());
2780   __ mov(scratch, v0);
2781   VisitForAccumulatorValue(super_ref->this_var());
2782   __ Push(scratch, v0, v0, scratch);
2783   __ Push(key->value());
2784   __ Push(Smi::FromInt(language_mode()));
2785 
2786   // Stack here:
2787   //  - home_object
2788   //  - this (receiver)
2789   //  - this (receiver) <-- LoadFromSuper will pop here and below.
2790   //  - home_object
2791   //  - key
2792   //  - language_mode
2793   __ CallRuntime(Runtime::kLoadFromSuper);
2794 
2795   // Replace home_object with target function.
2796   __ sd(v0, MemOperand(sp, kPointerSize));
2797 
2798   // Stack here:
2799   // - target function
2800   // - this (receiver)
2801   EmitCall(expr);
2802 }
2803 
2804 
2805 // Code common for calls using the IC.
EmitKeyedCallWithLoadIC(Call * expr,Expression * key)2806 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2807                                                 Expression* key) {
2808   // Load the key.
2809   VisitForAccumulatorValue(key);
2810 
2811   Expression* callee = expr->expression();
2812 
2813   // Load the function from the receiver.
2814   DCHECK(callee->IsProperty());
2815   __ ld(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2816   __ Move(LoadDescriptor::NameRegister(), v0);
2817   EmitKeyedPropertyLoad(callee->AsProperty());
2818   PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2819 
2820   // Push the target function under the receiver.
2821   __ ld(at, MemOperand(sp, 0));
2822   __ push(at);
2823   __ sd(v0, MemOperand(sp, kPointerSize));
2824 
2825   EmitCall(expr, ConvertReceiverMode::kNotNullOrUndefined);
2826 }
2827 
2828 
EmitKeyedSuperCallWithLoadIC(Call * expr)2829 void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
2830   Expression* callee = expr->expression();
2831   DCHECK(callee->IsProperty());
2832   Property* prop = callee->AsProperty();
2833   DCHECK(prop->IsSuperAccess());
2834 
2835   SetExpressionPosition(prop);
2836   // Load the function from the receiver.
2837   const Register scratch = a1;
2838   SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2839   VisitForAccumulatorValue(super_ref->home_object());
2840   __ Move(scratch, v0);
2841   VisitForAccumulatorValue(super_ref->this_var());
2842   __ Push(scratch, v0, v0, scratch);
2843   VisitForStackValue(prop->key());
2844   __ Push(Smi::FromInt(language_mode()));
2845 
2846   // Stack here:
2847   //  - home_object
2848   //  - this (receiver)
2849   //  - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
2850   //  - home_object
2851   //  - key
2852   //  - language_mode
2853   __ CallRuntime(Runtime::kLoadKeyedFromSuper);
2854 
2855   // Replace home_object with target function.
2856   __ sd(v0, MemOperand(sp, kPointerSize));
2857 
2858   // Stack here:
2859   // - target function
2860   // - this (receiver)
2861   EmitCall(expr);
2862 }
2863 
2864 
EmitCall(Call * expr,ConvertReceiverMode mode)2865 void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) {
2866   // Load the arguments.
2867   ZoneList<Expression*>* args = expr->arguments();
2868   int arg_count = args->length();
2869   for (int i = 0; i < arg_count; i++) {
2870     VisitForStackValue(args->at(i));
2871   }
2872 
2873   PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
2874   // Record source position of the IC call.
2875   SetCallPosition(expr);
2876   Handle<Code> ic = CodeFactory::CallIC(isolate(), arg_count, mode).code();
2877   __ li(a3, Operand(SmiFromSlot(expr->CallFeedbackICSlot())));
2878   __ ld(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2879   // Don't assign a type feedback id to the IC, since type feedback is provided
2880   // by the vector above.
2881   CallIC(ic);
2882   RecordJSReturnSite(expr);
2883   // Restore context register.
2884   __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2885   context()->DropAndPlug(1, v0);
2886 }
2887 
2888 
EmitResolvePossiblyDirectEval(int arg_count)2889 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2890   // a6: copy of the first argument or undefined if it doesn't exist.
2891   if (arg_count > 0) {
2892     __ ld(a6, MemOperand(sp, arg_count * kPointerSize));
2893   } else {
2894     __ LoadRoot(a6, Heap::kUndefinedValueRootIndex);
2895   }
2896 
2897   // a5: the receiver of the enclosing function.
2898   __ ld(a5, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
2899 
2900   // a4: the language mode.
2901   __ li(a4, Operand(Smi::FromInt(language_mode())));
2902 
2903   // a1: the start position of the scope the calls resides in.
2904   __ li(a1, Operand(Smi::FromInt(scope()->start_position())));
2905 
2906   // Do the runtime call.
2907   __ Push(a6, a5, a4, a1);
2908   __ CallRuntime(Runtime::kResolvePossiblyDirectEval);
2909 }
2910 
2911 
2912 // See http://www.ecma-international.org/ecma-262/6.0/#sec-function-calls.
PushCalleeAndWithBaseObject(Call * expr)2913 void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) {
2914   VariableProxy* callee = expr->expression()->AsVariableProxy();
2915   if (callee->var()->IsLookupSlot()) {
2916     Label slow, done;
2917 
2918     SetExpressionPosition(callee);
2919     // Generate code for loading from variables potentially shadowed by
2920     // eval-introduced variables.
2921     EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done);
2922 
2923     __ bind(&slow);
2924     // Call the runtime to find the function to call (returned in v0)
2925     // and the object holding it (returned in v1).
2926     DCHECK(!context_register().is(a2));
2927     __ li(a2, Operand(callee->name()));
2928     __ Push(context_register(), a2);
2929     __ CallRuntime(Runtime::kLoadLookupSlot);
2930     __ Push(v0, v1);  // Function, receiver.
2931     PrepareForBailoutForId(expr->LookupId(), NO_REGISTERS);
2932 
2933     // If fast case code has been generated, emit code to push the
2934     // function and receiver and have the slow path jump around this
2935     // code.
2936     if (done.is_linked()) {
2937       Label call;
2938       __ Branch(&call);
2939       __ bind(&done);
2940       // Push function.
2941       __ push(v0);
2942       // The receiver is implicitly the global receiver. Indicate this
2943       // by passing the hole to the call function stub.
2944       __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
2945       __ push(a1);
2946       __ bind(&call);
2947     }
2948   } else {
2949     VisitForStackValue(callee);
2950     // refEnv.WithBaseObject()
2951     __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
2952     __ push(a2);  // Reserved receiver slot.
2953   }
2954 }
2955 
2956 
EmitPossiblyEvalCall(Call * expr)2957 void FullCodeGenerator::EmitPossiblyEvalCall(Call* expr) {
2958   // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
2959   // to resolve the function we need to call.  Then we call the resolved
2960   // function using the given arguments.
2961   ZoneList<Expression*>* args = expr->arguments();
2962   int arg_count = args->length();
2963   PushCalleeAndWithBaseObject(expr);
2964 
2965   // Push the arguments.
2966   for (int i = 0; i < arg_count; i++) {
2967     VisitForStackValue(args->at(i));
2968   }
2969 
2970   // Push a copy of the function (found below the arguments) and
2971   // resolve eval.
2972   __ ld(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2973   __ push(a1);
2974   EmitResolvePossiblyDirectEval(arg_count);
2975 
2976   // Touch up the stack with the resolved function.
2977   __ sd(v0, MemOperand(sp, (arg_count + 1) * kPointerSize));
2978 
2979   PrepareForBailoutForId(expr->EvalId(), NO_REGISTERS);
2980   // Record source position for debugger.
2981   SetCallPosition(expr);
2982   __ ld(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2983   __ li(a0, Operand(arg_count));
2984   __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
2985   RecordJSReturnSite(expr);
2986   // Restore context register.
2987   __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2988   context()->DropAndPlug(1, v0);
2989 }
2990 
2991 
VisitCallNew(CallNew * expr)2992 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2993   Comment cmnt(masm_, "[ CallNew");
2994   // According to ECMA-262, section 11.2.2, page 44, the function
2995   // expression in new calls must be evaluated before the
2996   // arguments.
2997 
2998   // Push constructor on the stack.  If it's not a function it's used as
2999   // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
3000   // ignored.
3001   DCHECK(!expr->expression()->IsSuperPropertyReference());
3002   VisitForStackValue(expr->expression());
3003 
3004   // Push the arguments ("left-to-right") on the stack.
3005   ZoneList<Expression*>* args = expr->arguments();
3006   int arg_count = args->length();
3007   for (int i = 0; i < arg_count; i++) {
3008     VisitForStackValue(args->at(i));
3009   }
3010 
3011   // Call the construct call builtin that handles allocation and
3012   // constructor invocation.
3013   SetConstructCallPosition(expr);
3014 
3015   // Load function and argument count into a1 and a0.
3016   __ li(a0, Operand(arg_count));
3017   __ ld(a1, MemOperand(sp, arg_count * kPointerSize));
3018 
3019   // Record call targets in unoptimized code.
3020   __ EmitLoadTypeFeedbackVector(a2);
3021   __ li(a3, Operand(SmiFromSlot(expr->CallNewFeedbackSlot())));
3022 
3023   CallConstructStub stub(isolate());
3024   __ Call(stub.GetCode(), RelocInfo::CODE_TARGET);
3025   PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
3026   // Restore context register.
3027   __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3028   context()->Plug(v0);
3029 }
3030 
3031 
EmitSuperConstructorCall(Call * expr)3032 void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
3033   SuperCallReference* super_call_ref =
3034       expr->expression()->AsSuperCallReference();
3035   DCHECK_NOT_NULL(super_call_ref);
3036 
3037   // Push the super constructor target on the stack (may be null,
3038   // but the Construct builtin can deal with that properly).
3039   VisitForAccumulatorValue(super_call_ref->this_function_var());
3040   __ AssertFunction(result_register());
3041   __ ld(result_register(),
3042         FieldMemOperand(result_register(), HeapObject::kMapOffset));
3043   __ ld(result_register(),
3044         FieldMemOperand(result_register(), Map::kPrototypeOffset));
3045   __ Push(result_register());
3046 
3047   // Push the arguments ("left-to-right") on the stack.
3048   ZoneList<Expression*>* args = expr->arguments();
3049   int arg_count = args->length();
3050   for (int i = 0; i < arg_count; i++) {
3051     VisitForStackValue(args->at(i));
3052   }
3053 
3054   // Call the construct call builtin that handles allocation and
3055   // constructor invocation.
3056   SetConstructCallPosition(expr);
3057 
3058   // Load new target into a3.
3059   VisitForAccumulatorValue(super_call_ref->new_target_var());
3060   __ mov(a3, result_register());
3061 
3062   // Load function and argument count into a1 and a0.
3063   __ li(a0, Operand(arg_count));
3064   __ ld(a1, MemOperand(sp, arg_count * kPointerSize));
3065 
3066   __ Call(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
3067 
3068   RecordJSReturnSite(expr);
3069 
3070   // Restore context register.
3071   __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3072   context()->Plug(v0);
3073 }
3074 
3075 
EmitIsSmi(CallRuntime * expr)3076 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
3077   ZoneList<Expression*>* args = expr->arguments();
3078   DCHECK(args->length() == 1);
3079 
3080   VisitForAccumulatorValue(args->at(0));
3081 
3082   Label materialize_true, materialize_false;
3083   Label* if_true = NULL;
3084   Label* if_false = NULL;
3085   Label* fall_through = NULL;
3086   context()->PrepareTest(&materialize_true, &materialize_false,
3087                          &if_true, &if_false, &fall_through);
3088 
3089   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3090   __ SmiTst(v0, a4);
3091   Split(eq, a4, Operand(zero_reg), if_true, if_false, fall_through);
3092 
3093   context()->Plug(if_true, if_false);
3094 }
3095 
3096 
EmitIsJSReceiver(CallRuntime * expr)3097 void FullCodeGenerator::EmitIsJSReceiver(CallRuntime* expr) {
3098   ZoneList<Expression*>* args = expr->arguments();
3099   DCHECK(args->length() == 1);
3100 
3101   VisitForAccumulatorValue(args->at(0));
3102 
3103   Label materialize_true, materialize_false;
3104   Label* if_true = NULL;
3105   Label* if_false = NULL;
3106   Label* fall_through = NULL;
3107   context()->PrepareTest(&materialize_true, &materialize_false,
3108                          &if_true, &if_false, &fall_through);
3109 
3110   __ JumpIfSmi(v0, if_false);
3111   __ GetObjectType(v0, a1, a1);
3112   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3113   Split(ge, a1, Operand(FIRST_JS_RECEIVER_TYPE),
3114         if_true, if_false, fall_through);
3115 
3116   context()->Plug(if_true, if_false);
3117 }
3118 
3119 
EmitIsSimdValue(CallRuntime * expr)3120 void FullCodeGenerator::EmitIsSimdValue(CallRuntime* expr) {
3121   ZoneList<Expression*>* args = expr->arguments();
3122   DCHECK(args->length() == 1);
3123 
3124   VisitForAccumulatorValue(args->at(0));
3125 
3126   Label materialize_true, materialize_false;
3127   Label* if_true = NULL;
3128   Label* if_false = NULL;
3129   Label* fall_through = NULL;
3130   context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3131                          &if_false, &fall_through);
3132 
3133   __ JumpIfSmi(v0, if_false);
3134   __ GetObjectType(v0, a1, a1);
3135   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3136   Split(eq, a1, Operand(SIMD128_VALUE_TYPE), if_true, if_false, fall_through);
3137 
3138   context()->Plug(if_true, if_false);
3139 }
3140 
3141 
EmitIsFunction(CallRuntime * expr)3142 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3143   ZoneList<Expression*>* args = expr->arguments();
3144   DCHECK(args->length() == 1);
3145 
3146   VisitForAccumulatorValue(args->at(0));
3147 
3148   Label materialize_true, materialize_false;
3149   Label* if_true = NULL;
3150   Label* if_false = NULL;
3151   Label* fall_through = NULL;
3152   context()->PrepareTest(&materialize_true, &materialize_false,
3153                          &if_true, &if_false, &fall_through);
3154 
3155   __ JumpIfSmi(v0, if_false);
3156   __ GetObjectType(v0, a1, a2);
3157   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3158   __ Branch(if_true, hs, a2, Operand(FIRST_FUNCTION_TYPE));
3159   __ Branch(if_false);
3160 
3161   context()->Plug(if_true, if_false);
3162 }
3163 
3164 
EmitIsMinusZero(CallRuntime * expr)3165 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3166   ZoneList<Expression*>* args = expr->arguments();
3167   DCHECK(args->length() == 1);
3168 
3169   VisitForAccumulatorValue(args->at(0));
3170 
3171   Label materialize_true, materialize_false;
3172   Label* if_true = NULL;
3173   Label* if_false = NULL;
3174   Label* fall_through = NULL;
3175   context()->PrepareTest(&materialize_true, &materialize_false,
3176                          &if_true, &if_false, &fall_through);
3177 
3178   __ CheckMap(v0, a1, Heap::kHeapNumberMapRootIndex, if_false, DO_SMI_CHECK);
3179   __ lwu(a2, FieldMemOperand(v0, HeapNumber::kExponentOffset));
3180   __ lwu(a1, FieldMemOperand(v0, HeapNumber::kMantissaOffset));
3181   __ li(a4, 0x80000000);
3182   Label not_nan;
3183   __ Branch(&not_nan, ne, a2, Operand(a4));
3184   __ mov(a4, zero_reg);
3185   __ mov(a2, a1);
3186   __ bind(&not_nan);
3187 
3188   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3189   Split(eq, a2, Operand(a4), if_true, if_false, fall_through);
3190 
3191   context()->Plug(if_true, if_false);
3192 }
3193 
3194 
EmitIsArray(CallRuntime * expr)3195 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3196   ZoneList<Expression*>* args = expr->arguments();
3197   DCHECK(args->length() == 1);
3198 
3199   VisitForAccumulatorValue(args->at(0));
3200 
3201   Label materialize_true, materialize_false;
3202   Label* if_true = NULL;
3203   Label* if_false = NULL;
3204   Label* fall_through = NULL;
3205   context()->PrepareTest(&materialize_true, &materialize_false,
3206                          &if_true, &if_false, &fall_through);
3207 
3208   __ JumpIfSmi(v0, if_false);
3209   __ GetObjectType(v0, a1, a1);
3210   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3211   Split(eq, a1, Operand(JS_ARRAY_TYPE),
3212         if_true, if_false, fall_through);
3213 
3214   context()->Plug(if_true, if_false);
3215 }
3216 
3217 
EmitIsTypedArray(CallRuntime * expr)3218 void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) {
3219   ZoneList<Expression*>* args = expr->arguments();
3220   DCHECK(args->length() == 1);
3221 
3222   VisitForAccumulatorValue(args->at(0));
3223 
3224   Label materialize_true, materialize_false;
3225   Label* if_true = NULL;
3226   Label* if_false = NULL;
3227   Label* fall_through = NULL;
3228   context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3229                          &if_false, &fall_through);
3230 
3231   __ JumpIfSmi(v0, if_false);
3232   __ GetObjectType(v0, a1, a1);
3233   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3234   Split(eq, a1, Operand(JS_TYPED_ARRAY_TYPE), if_true, if_false, fall_through);
3235 
3236   context()->Plug(if_true, if_false);
3237 }
3238 
3239 
EmitIsRegExp(CallRuntime * expr)3240 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3241   ZoneList<Expression*>* args = expr->arguments();
3242   DCHECK(args->length() == 1);
3243 
3244   VisitForAccumulatorValue(args->at(0));
3245 
3246   Label materialize_true, materialize_false;
3247   Label* if_true = NULL;
3248   Label* if_false = NULL;
3249   Label* fall_through = NULL;
3250   context()->PrepareTest(&materialize_true, &materialize_false,
3251                          &if_true, &if_false, &fall_through);
3252 
3253   __ JumpIfSmi(v0, if_false);
3254   __ GetObjectType(v0, a1, a1);
3255   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3256   Split(eq, a1, Operand(JS_REGEXP_TYPE), if_true, if_false, fall_through);
3257 
3258   context()->Plug(if_true, if_false);
3259 }
3260 
3261 
EmitIsJSProxy(CallRuntime * expr)3262 void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
3263   ZoneList<Expression*>* args = expr->arguments();
3264   DCHECK(args->length() == 1);
3265 
3266   VisitForAccumulatorValue(args->at(0));
3267 
3268   Label materialize_true, materialize_false;
3269   Label* if_true = NULL;
3270   Label* if_false = NULL;
3271   Label* fall_through = NULL;
3272   context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3273                          &if_false, &fall_through);
3274 
3275   __ JumpIfSmi(v0, if_false);
3276   __ GetObjectType(v0, a1, a1);
3277   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3278   Split(eq, a1, Operand(JS_PROXY_TYPE), if_true, if_false, fall_through);
3279 
3280   context()->Plug(if_true, if_false);
3281 }
3282 
3283 
EmitObjectEquals(CallRuntime * expr)3284 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3285   ZoneList<Expression*>* args = expr->arguments();
3286   DCHECK(args->length() == 2);
3287 
3288   // Load the two objects into registers and perform the comparison.
3289   VisitForStackValue(args->at(0));
3290   VisitForAccumulatorValue(args->at(1));
3291 
3292   Label materialize_true, materialize_false;
3293   Label* if_true = NULL;
3294   Label* if_false = NULL;
3295   Label* fall_through = NULL;
3296   context()->PrepareTest(&materialize_true, &materialize_false,
3297                          &if_true, &if_false, &fall_through);
3298 
3299   __ pop(a1);
3300   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3301   Split(eq, v0, Operand(a1), if_true, if_false, fall_through);
3302 
3303   context()->Plug(if_true, if_false);
3304 }
3305 
3306 
EmitArguments(CallRuntime * expr)3307 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3308   ZoneList<Expression*>* args = expr->arguments();
3309   DCHECK(args->length() == 1);
3310 
3311   // ArgumentsAccessStub expects the key in a1 and the formal
3312   // parameter count in a0.
3313   VisitForAccumulatorValue(args->at(0));
3314   __ mov(a1, v0);
3315   __ li(a0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
3316   ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
3317   __ CallStub(&stub);
3318   context()->Plug(v0);
3319 }
3320 
3321 
EmitArgumentsLength(CallRuntime * expr)3322 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3323   DCHECK(expr->arguments()->length() == 0);
3324   Label exit;
3325   // Get the number of formal parameters.
3326   __ li(v0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
3327 
3328   // Check if the calling frame is an arguments adaptor frame.
3329   __ ld(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3330   __ ld(a3, MemOperand(a2, StandardFrameConstants::kContextOffset));
3331   __ Branch(&exit, ne, a3,
3332             Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3333 
3334   // Arguments adaptor case: Read the arguments length from the
3335   // adaptor frame.
3336   __ ld(v0, MemOperand(a2, ArgumentsAdaptorFrameConstants::kLengthOffset));
3337 
3338   __ bind(&exit);
3339   context()->Plug(v0);
3340 }
3341 
3342 
EmitClassOf(CallRuntime * expr)3343 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3344   ZoneList<Expression*>* args = expr->arguments();
3345   DCHECK(args->length() == 1);
3346   Label done, null, function, non_function_constructor;
3347 
3348   VisitForAccumulatorValue(args->at(0));
3349 
3350   // If the object is not a JSReceiver, we return null.
3351   __ JumpIfSmi(v0, &null);
3352   STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
3353   __ GetObjectType(v0, v0, a1);  // Map is now in v0.
3354   __ Branch(&null, lt, a1, Operand(FIRST_JS_RECEIVER_TYPE));
3355 
3356   // Return 'Function' for JSFunction objects.
3357   __ Branch(&function, eq, a1, Operand(JS_FUNCTION_TYPE));
3358 
3359   // Check if the constructor in the map is a JS function.
3360   Register instance_type = a2;
3361   __ GetMapConstructor(v0, v0, a1, instance_type);
3362   __ Branch(&non_function_constructor, ne, instance_type,
3363             Operand(JS_FUNCTION_TYPE));
3364 
3365   // v0 now contains the constructor function. Grab the
3366   // instance class name from there.
3367   __ ld(v0, FieldMemOperand(v0, JSFunction::kSharedFunctionInfoOffset));
3368   __ ld(v0, FieldMemOperand(v0, SharedFunctionInfo::kInstanceClassNameOffset));
3369   __ Branch(&done);
3370 
3371   // Functions have class 'Function'.
3372   __ bind(&function);
3373   __ LoadRoot(v0, Heap::kFunction_stringRootIndex);
3374   __ jmp(&done);
3375 
3376   // Objects with a non-function constructor have class 'Object'.
3377   __ bind(&non_function_constructor);
3378   __ LoadRoot(v0, Heap::kObject_stringRootIndex);
3379   __ jmp(&done);
3380 
3381   // Non-JS objects have class null.
3382   __ bind(&null);
3383   __ LoadRoot(v0, Heap::kNullValueRootIndex);
3384 
3385   // All done.
3386   __ bind(&done);
3387 
3388   context()->Plug(v0);
3389 }
3390 
3391 
EmitValueOf(CallRuntime * expr)3392 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3393   ZoneList<Expression*>* args = expr->arguments();
3394   DCHECK(args->length() == 1);
3395 
3396   VisitForAccumulatorValue(args->at(0));  // Load the object.
3397 
3398   Label done;
3399   // If the object is a smi return the object.
3400   __ JumpIfSmi(v0, &done);
3401   // If the object is not a value type, return the object.
3402   __ GetObjectType(v0, a1, a1);
3403   __ Branch(&done, ne, a1, Operand(JS_VALUE_TYPE));
3404 
3405   __ ld(v0, FieldMemOperand(v0, JSValue::kValueOffset));
3406 
3407   __ bind(&done);
3408   context()->Plug(v0);
3409 }
3410 
3411 
EmitIsDate(CallRuntime * expr)3412 void FullCodeGenerator::EmitIsDate(CallRuntime* expr) {
3413   ZoneList<Expression*>* args = expr->arguments();
3414   DCHECK_EQ(1, args->length());
3415 
3416   VisitForAccumulatorValue(args->at(0));
3417 
3418   Label materialize_true, materialize_false;
3419   Label* if_true = nullptr;
3420   Label* if_false = nullptr;
3421   Label* fall_through = nullptr;
3422   context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3423                          &if_false, &fall_through);
3424 
3425   __ JumpIfSmi(v0, if_false);
3426   __ GetObjectType(v0, a1, a1);
3427   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3428   Split(eq, a1, Operand(JS_DATE_TYPE), if_true, if_false, fall_through);
3429 
3430   context()->Plug(if_true, if_false);
3431 }
3432 
3433 
EmitOneByteSeqStringSetChar(CallRuntime * expr)3434 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3435   ZoneList<Expression*>* args = expr->arguments();
3436   DCHECK_EQ(3, args->length());
3437 
3438   Register string = v0;
3439   Register index = a1;
3440   Register value = a2;
3441 
3442   VisitForStackValue(args->at(0));        // index
3443   VisitForStackValue(args->at(1));        // value
3444   VisitForAccumulatorValue(args->at(2));  // string
3445   __ Pop(index, value);
3446 
3447   if (FLAG_debug_code) {
3448     __ SmiTst(value, at);
3449     __ Check(eq, kNonSmiValue, at, Operand(zero_reg));
3450     __ SmiTst(index, at);
3451     __ Check(eq, kNonSmiIndex, at, Operand(zero_reg));
3452     __ SmiUntag(index, index);
3453     static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3454     Register scratch = t1;
3455     __ EmitSeqStringSetCharCheck(
3456         string, index, value, scratch, one_byte_seq_type);
3457     __ SmiTag(index, index);
3458   }
3459 
3460   __ SmiUntag(value, value);
3461   __ Daddu(at,
3462           string,
3463           Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
3464   __ SmiUntag(index);
3465   __ Daddu(at, at, index);
3466   __ sb(value, MemOperand(at));
3467   context()->Plug(string);
3468 }
3469 
3470 
EmitTwoByteSeqStringSetChar(CallRuntime * expr)3471 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3472   ZoneList<Expression*>* args = expr->arguments();
3473   DCHECK_EQ(3, args->length());
3474 
3475   Register string = v0;
3476   Register index = a1;
3477   Register value = a2;
3478 
3479   VisitForStackValue(args->at(0));        // index
3480   VisitForStackValue(args->at(1));        // value
3481   VisitForAccumulatorValue(args->at(2));  // string
3482   __ Pop(index, value);
3483 
3484   if (FLAG_debug_code) {
3485     __ SmiTst(value, at);
3486     __ Check(eq, kNonSmiValue, at, Operand(zero_reg));
3487     __ SmiTst(index, at);
3488     __ Check(eq, kNonSmiIndex, at, Operand(zero_reg));
3489     __ SmiUntag(index, index);
3490     static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3491     Register scratch = t1;
3492     __ EmitSeqStringSetCharCheck(
3493         string, index, value, scratch, two_byte_seq_type);
3494     __ SmiTag(index, index);
3495   }
3496 
3497   __ SmiUntag(value, value);
3498   __ Daddu(at,
3499           string,
3500           Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
3501   __ dsra(index, index, 32 - 1);
3502   __ Daddu(at, at, index);
3503   STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
3504   __ sh(value, MemOperand(at));
3505     context()->Plug(string);
3506 }
3507 
3508 
EmitSetValueOf(CallRuntime * expr)3509 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3510   ZoneList<Expression*>* args = expr->arguments();
3511   DCHECK(args->length() == 2);
3512 
3513   VisitForStackValue(args->at(0));  // Load the object.
3514   VisitForAccumulatorValue(args->at(1));  // Load the value.
3515   __ pop(a1);  // v0 = value. a1 = object.
3516 
3517   Label done;
3518   // If the object is a smi, return the value.
3519   __ JumpIfSmi(a1, &done);
3520 
3521   // If the object is not a value type, return the value.
3522   __ GetObjectType(a1, a2, a2);
3523   __ Branch(&done, ne, a2, Operand(JS_VALUE_TYPE));
3524 
3525   // Store the value.
3526   __ sd(v0, FieldMemOperand(a1, JSValue::kValueOffset));
3527   // Update the write barrier.  Save the value as it will be
3528   // overwritten by the write barrier code and is needed afterward.
3529   __ mov(a2, v0);
3530   __ RecordWriteField(
3531       a1, JSValue::kValueOffset, a2, a3, kRAHasBeenSaved, kDontSaveFPRegs);
3532 
3533   __ bind(&done);
3534   context()->Plug(v0);
3535 }
3536 
3537 
EmitToInteger(CallRuntime * expr)3538 void FullCodeGenerator::EmitToInteger(CallRuntime* expr) {
3539   ZoneList<Expression*>* args = expr->arguments();
3540   DCHECK_EQ(1, args->length());
3541 
3542   // Load the argument into v0 and convert it.
3543   VisitForAccumulatorValue(args->at(0));
3544 
3545   // Convert the object to an integer.
3546   Label done_convert;
3547   __ JumpIfSmi(v0, &done_convert);
3548   __ Push(v0);
3549   __ CallRuntime(Runtime::kToInteger);
3550   __ bind(&done_convert);
3551   context()->Plug(v0);
3552 }
3553 
3554 
EmitToName(CallRuntime * expr)3555 void FullCodeGenerator::EmitToName(CallRuntime* expr) {
3556   ZoneList<Expression*>* args = expr->arguments();
3557   DCHECK_EQ(1, args->length());
3558 
3559   // Load the argument into v0 and convert it.
3560   VisitForAccumulatorValue(args->at(0));
3561 
3562   Label convert, done_convert;
3563   __ JumpIfSmi(v0, &convert);
3564   STATIC_ASSERT(FIRST_NAME_TYPE == FIRST_TYPE);
3565   __ GetObjectType(v0, a1, a1);
3566   __ Branch(&done_convert, le, a1, Operand(LAST_NAME_TYPE));
3567   __ bind(&convert);
3568   __ Push(v0);
3569   __ CallRuntime(Runtime::kToName);
3570   __ bind(&done_convert);
3571   context()->Plug(v0);
3572 }
3573 
3574 
EmitStringCharFromCode(CallRuntime * expr)3575 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3576   ZoneList<Expression*>* args = expr->arguments();
3577   DCHECK(args->length() == 1);
3578 
3579   VisitForAccumulatorValue(args->at(0));
3580 
3581   Label done;
3582   StringCharFromCodeGenerator generator(v0, a1);
3583   generator.GenerateFast(masm_);
3584   __ jmp(&done);
3585 
3586   NopRuntimeCallHelper call_helper;
3587   generator.GenerateSlow(masm_, call_helper);
3588 
3589   __ bind(&done);
3590   context()->Plug(a1);
3591 }
3592 
3593 
EmitStringCharCodeAt(CallRuntime * expr)3594 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3595   ZoneList<Expression*>* args = expr->arguments();
3596   DCHECK(args->length() == 2);
3597 
3598   VisitForStackValue(args->at(0));
3599   VisitForAccumulatorValue(args->at(1));
3600   __ mov(a0, result_register());
3601 
3602   Register object = a1;
3603   Register index = a0;
3604   Register result = v0;
3605 
3606   __ pop(object);
3607 
3608   Label need_conversion;
3609   Label index_out_of_range;
3610   Label done;
3611   StringCharCodeAtGenerator generator(object,
3612                                       index,
3613                                       result,
3614                                       &need_conversion,
3615                                       &need_conversion,
3616                                       &index_out_of_range,
3617                                       STRING_INDEX_IS_NUMBER);
3618   generator.GenerateFast(masm_);
3619   __ jmp(&done);
3620 
3621   __ bind(&index_out_of_range);
3622   // When the index is out of range, the spec requires us to return
3623   // NaN.
3624   __ LoadRoot(result, Heap::kNanValueRootIndex);
3625   __ jmp(&done);
3626 
3627   __ bind(&need_conversion);
3628   // Load the undefined value into the result register, which will
3629   // trigger conversion.
3630   __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3631   __ jmp(&done);
3632 
3633   NopRuntimeCallHelper call_helper;
3634   generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
3635 
3636   __ bind(&done);
3637   context()->Plug(result);
3638 }
3639 
3640 
EmitStringCharAt(CallRuntime * expr)3641 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3642   ZoneList<Expression*>* args = expr->arguments();
3643   DCHECK(args->length() == 2);
3644 
3645   VisitForStackValue(args->at(0));
3646   VisitForAccumulatorValue(args->at(1));
3647   __ mov(a0, result_register());
3648 
3649   Register object = a1;
3650   Register index = a0;
3651   Register scratch = a3;
3652   Register result = v0;
3653 
3654   __ pop(object);
3655 
3656   Label need_conversion;
3657   Label index_out_of_range;
3658   Label done;
3659   StringCharAtGenerator generator(object,
3660                                   index,
3661                                   scratch,
3662                                   result,
3663                                   &need_conversion,
3664                                   &need_conversion,
3665                                   &index_out_of_range,
3666                                   STRING_INDEX_IS_NUMBER);
3667   generator.GenerateFast(masm_);
3668   __ jmp(&done);
3669 
3670   __ bind(&index_out_of_range);
3671   // When the index is out of range, the spec requires us to return
3672   // the empty string.
3673   __ LoadRoot(result, Heap::kempty_stringRootIndex);
3674   __ jmp(&done);
3675 
3676   __ bind(&need_conversion);
3677   // Move smi zero into the result register, which will trigger
3678   // conversion.
3679   __ li(result, Operand(Smi::FromInt(0)));
3680   __ jmp(&done);
3681 
3682   NopRuntimeCallHelper call_helper;
3683   generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
3684 
3685   __ bind(&done);
3686   context()->Plug(result);
3687 }
3688 
3689 
EmitCall(CallRuntime * expr)3690 void FullCodeGenerator::EmitCall(CallRuntime* expr) {
3691   ZoneList<Expression*>* args = expr->arguments();
3692   DCHECK_LE(2, args->length());
3693   // Push target, receiver and arguments onto the stack.
3694   for (Expression* const arg : *args) {
3695     VisitForStackValue(arg);
3696   }
3697   PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
3698   // Move target to a1.
3699   int const argc = args->length() - 2;
3700   __ ld(a1, MemOperand(sp, (argc + 1) * kPointerSize));
3701   // Call the target.
3702   __ li(a0, Operand(argc));
3703   __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
3704   // Restore context register.
3705   __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3706   // Discard the function left on TOS.
3707   context()->DropAndPlug(1, v0);
3708 }
3709 
3710 
EmitHasCachedArrayIndex(CallRuntime * expr)3711 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3712   ZoneList<Expression*>* args = expr->arguments();
3713   VisitForAccumulatorValue(args->at(0));
3714 
3715   Label materialize_true, materialize_false;
3716   Label* if_true = NULL;
3717   Label* if_false = NULL;
3718   Label* fall_through = NULL;
3719   context()->PrepareTest(&materialize_true, &materialize_false,
3720                          &if_true, &if_false, &fall_through);
3721 
3722   __ lwu(a0, FieldMemOperand(v0, String::kHashFieldOffset));
3723   __ And(a0, a0, Operand(String::kContainsCachedArrayIndexMask));
3724 
3725   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3726   Split(eq, a0, Operand(zero_reg), if_true, if_false, fall_through);
3727 
3728   context()->Plug(if_true, if_false);
3729 }
3730 
3731 
EmitGetCachedArrayIndex(CallRuntime * expr)3732 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3733   ZoneList<Expression*>* args = expr->arguments();
3734   DCHECK(args->length() == 1);
3735   VisitForAccumulatorValue(args->at(0));
3736 
3737   __ AssertString(v0);
3738 
3739   __ lwu(v0, FieldMemOperand(v0, String::kHashFieldOffset));
3740   __ IndexFromHash(v0, v0);
3741 
3742   context()->Plug(v0);
3743 }
3744 
3745 
EmitGetSuperConstructor(CallRuntime * expr)3746 void FullCodeGenerator::EmitGetSuperConstructor(CallRuntime* expr) {
3747   ZoneList<Expression*>* args = expr->arguments();
3748   DCHECK_EQ(1, args->length());
3749   VisitForAccumulatorValue(args->at(0));
3750   __ AssertFunction(v0);
3751   __ ld(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
3752   __ ld(v0, FieldMemOperand(v0, Map::kPrototypeOffset));
3753   context()->Plug(v0);
3754 }
3755 
3756 
EmitFastOneByteArrayJoin(CallRuntime * expr)3757 void FullCodeGenerator::EmitFastOneByteArrayJoin(CallRuntime* expr) {
3758   Label bailout, done, one_char_separator, long_separator,
3759       non_trivial_array, not_size_one_array, loop,
3760       empty_separator_loop, one_char_separator_loop,
3761       one_char_separator_loop_entry, long_separator_loop;
3762   ZoneList<Expression*>* args = expr->arguments();
3763   DCHECK(args->length() == 2);
3764   VisitForStackValue(args->at(1));
3765   VisitForAccumulatorValue(args->at(0));
3766 
3767   // All aliases of the same register have disjoint lifetimes.
3768   Register array = v0;
3769   Register elements = no_reg;  // Will be v0.
3770   Register result = no_reg;  // Will be v0.
3771   Register separator = a1;
3772   Register array_length = a2;
3773   Register result_pos = no_reg;  // Will be a2.
3774   Register string_length = a3;
3775   Register string = a4;
3776   Register element = a5;
3777   Register elements_end = a6;
3778   Register scratch1 = a7;
3779   Register scratch2 = t1;
3780   Register scratch3 = t0;
3781 
3782   // Separator operand is on the stack.
3783   __ pop(separator);
3784 
3785   // Check that the array is a JSArray.
3786   __ JumpIfSmi(array, &bailout);
3787   __ GetObjectType(array, scratch1, scratch2);
3788   __ Branch(&bailout, ne, scratch2, Operand(JS_ARRAY_TYPE));
3789 
3790   // Check that the array has fast elements.
3791   __ CheckFastElements(scratch1, scratch2, &bailout);
3792 
3793   // If the array has length zero, return the empty string.
3794   __ ld(array_length, FieldMemOperand(array, JSArray::kLengthOffset));
3795   __ SmiUntag(array_length);
3796   __ Branch(&non_trivial_array, ne, array_length, Operand(zero_reg));
3797   __ LoadRoot(v0, Heap::kempty_stringRootIndex);
3798   __ Branch(&done);
3799 
3800   __ bind(&non_trivial_array);
3801 
3802   // Get the FixedArray containing array's elements.
3803   elements = array;
3804   __ ld(elements, FieldMemOperand(array, JSArray::kElementsOffset));
3805   array = no_reg;  // End of array's live range.
3806 
3807   // Check that all array elements are sequential one-byte strings, and
3808   // accumulate the sum of their lengths, as a smi-encoded value.
3809   __ mov(string_length, zero_reg);
3810   __ Daddu(element,
3811           elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3812   __ dsll(elements_end, array_length, kPointerSizeLog2);
3813   __ Daddu(elements_end, element, elements_end);
3814   // Loop condition: while (element < elements_end).
3815   // Live values in registers:
3816   //   elements: Fixed array of strings.
3817   //   array_length: Length of the fixed array of strings (not smi)
3818   //   separator: Separator string
3819   //   string_length: Accumulated sum of string lengths (smi).
3820   //   element: Current array element.
3821   //   elements_end: Array end.
3822   if (generate_debug_code_) {
3823     __ Assert(gt, kNoEmptyArraysHereInEmitFastOneByteArrayJoin, array_length,
3824               Operand(zero_reg));
3825   }
3826   __ bind(&loop);
3827   __ ld(string, MemOperand(element));
3828   __ Daddu(element, element, kPointerSize);
3829   __ JumpIfSmi(string, &bailout);
3830   __ ld(scratch1, FieldMemOperand(string, HeapObject::kMapOffset));
3831   __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
3832   __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout);
3833   __ ld(scratch1, FieldMemOperand(string, SeqOneByteString::kLengthOffset));
3834   __ DadduAndCheckForOverflow(string_length, string_length, scratch1, scratch3);
3835   __ BranchOnOverflow(&bailout, scratch3);
3836   __ Branch(&loop, lt, element, Operand(elements_end));
3837 
3838   // If array_length is 1, return elements[0], a string.
3839   __ Branch(&not_size_one_array, ne, array_length, Operand(1));
3840   __ ld(v0, FieldMemOperand(elements, FixedArray::kHeaderSize));
3841   __ Branch(&done);
3842 
3843   __ bind(&not_size_one_array);
3844 
3845   // Live values in registers:
3846   //   separator: Separator string
3847   //   array_length: Length of the array.
3848   //   string_length: Sum of string lengths (smi).
3849   //   elements: FixedArray of strings.
3850 
3851   // Check that the separator is a flat one-byte string.
3852   __ JumpIfSmi(separator, &bailout);
3853   __ ld(scratch1, FieldMemOperand(separator, HeapObject::kMapOffset));
3854   __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
3855   __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout);
3856 
3857   // Add (separator length times array_length) - separator length to the
3858   // string_length to get the length of the result string. array_length is not
3859   // smi but the other values are, so the result is a smi.
3860   __ ld(scratch1, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
3861   __ Dsubu(string_length, string_length, Operand(scratch1));
3862   __ SmiUntag(scratch1);
3863   __ Dmul(scratch2, array_length, scratch1);
3864   // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are
3865   // zero.
3866   __ dsra32(scratch1, scratch2, 0);
3867   __ Branch(&bailout, ne, scratch2, Operand(zero_reg));
3868   __ SmiUntag(string_length);
3869   __ AdduAndCheckForOverflow(string_length, string_length, scratch2, scratch3);
3870   __ BranchOnOverflow(&bailout, scratch3);
3871 
3872   // Bailout for large object allocations.
3873   __ Branch(&bailout, gt, string_length,
3874             Operand(Page::kMaxRegularHeapObjectSize));
3875 
3876   // Get first element in the array to free up the elements register to be used
3877   // for the result.
3878   __ Daddu(element,
3879           elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3880   result = elements;  // End of live range for elements.
3881   elements = no_reg;
3882   // Live values in registers:
3883   //   element: First array element
3884   //   separator: Separator string
3885   //   string_length: Length of result string (not smi)
3886   //   array_length: Length of the array.
3887   __ AllocateOneByteString(result, string_length, scratch1, scratch2,
3888                            elements_end, &bailout);
3889   // Prepare for looping. Set up elements_end to end of the array. Set
3890   // result_pos to the position of the result where to write the first
3891   // character.
3892   __ dsll(elements_end, array_length, kPointerSizeLog2);
3893   __ Daddu(elements_end, element, elements_end);
3894   result_pos = array_length;  // End of live range for array_length.
3895   array_length = no_reg;
3896   __ Daddu(result_pos,
3897           result,
3898           Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
3899 
3900   // Check the length of the separator.
3901   __ ld(scratch1, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
3902   __ li(at, Operand(Smi::FromInt(1)));
3903   __ Branch(&one_char_separator, eq, scratch1, Operand(at));
3904   __ Branch(&long_separator, gt, scratch1, Operand(at));
3905 
3906   // Empty separator case.
3907   __ bind(&empty_separator_loop);
3908   // Live values in registers:
3909   //   result_pos: the position to which we are currently copying characters.
3910   //   element: Current array element.
3911   //   elements_end: Array end.
3912 
3913   // Copy next array element to the result.
3914   __ ld(string, MemOperand(element));
3915   __ Daddu(element, element, kPointerSize);
3916   __ ld(string_length, FieldMemOperand(string, String::kLengthOffset));
3917   __ SmiUntag(string_length);
3918   __ Daddu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
3919   __ CopyBytes(string, result_pos, string_length, scratch1);
3920   // End while (element < elements_end).
3921   __ Branch(&empty_separator_loop, lt, element, Operand(elements_end));
3922   DCHECK(result.is(v0));
3923   __ Branch(&done);
3924 
3925   // One-character separator case.
3926   __ bind(&one_char_separator);
3927   // Replace separator with its one-byte character value.
3928   __ lbu(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize));
3929   // Jump into the loop after the code that copies the separator, so the first
3930   // element is not preceded by a separator.
3931   __ jmp(&one_char_separator_loop_entry);
3932 
3933   __ bind(&one_char_separator_loop);
3934   // Live values in registers:
3935   //   result_pos: the position to which we are currently copying characters.
3936   //   element: Current array element.
3937   //   elements_end: Array end.
3938   //   separator: Single separator one-byte char (in lower byte).
3939 
3940   // Copy the separator character to the result.
3941   __ sb(separator, MemOperand(result_pos));
3942   __ Daddu(result_pos, result_pos, 1);
3943 
3944   // Copy next array element to the result.
3945   __ bind(&one_char_separator_loop_entry);
3946   __ ld(string, MemOperand(element));
3947   __ Daddu(element, element, kPointerSize);
3948   __ ld(string_length, FieldMemOperand(string, String::kLengthOffset));
3949   __ SmiUntag(string_length);
3950   __ Daddu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
3951   __ CopyBytes(string, result_pos, string_length, scratch1);
3952   // End while (element < elements_end).
3953   __ Branch(&one_char_separator_loop, lt, element, Operand(elements_end));
3954   DCHECK(result.is(v0));
3955   __ Branch(&done);
3956 
3957   // Long separator case (separator is more than one character). Entry is at the
3958   // label long_separator below.
3959   __ bind(&long_separator_loop);
3960   // Live values in registers:
3961   //   result_pos: the position to which we are currently copying characters.
3962   //   element: Current array element.
3963   //   elements_end: Array end.
3964   //   separator: Separator string.
3965 
3966   // Copy the separator to the result.
3967   __ ld(string_length, FieldMemOperand(separator, String::kLengthOffset));
3968   __ SmiUntag(string_length);
3969   __ Daddu(string,
3970           separator,
3971           Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
3972   __ CopyBytes(string, result_pos, string_length, scratch1);
3973 
3974   __ bind(&long_separator);
3975   __ ld(string, MemOperand(element));
3976   __ Daddu(element, element, kPointerSize);
3977   __ ld(string_length, FieldMemOperand(string, String::kLengthOffset));
3978   __ SmiUntag(string_length);
3979   __ Daddu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
3980   __ CopyBytes(string, result_pos, string_length, scratch1);
3981   // End while (element < elements_end).
3982   __ Branch(&long_separator_loop, lt, element, Operand(elements_end));
3983   DCHECK(result.is(v0));
3984   __ Branch(&done);
3985 
3986   __ bind(&bailout);
3987   __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
3988   __ bind(&done);
3989   context()->Plug(v0);
3990 }
3991 
3992 
EmitDebugIsActive(CallRuntime * expr)3993 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
3994   DCHECK(expr->arguments()->length() == 0);
3995   ExternalReference debug_is_active =
3996       ExternalReference::debug_is_active_address(isolate());
3997   __ li(at, Operand(debug_is_active));
3998   __ lbu(v0, MemOperand(at));
3999   __ SmiTag(v0);
4000   context()->Plug(v0);
4001 }
4002 
4003 
EmitCreateIterResultObject(CallRuntime * expr)4004 void FullCodeGenerator::EmitCreateIterResultObject(CallRuntime* expr) {
4005   ZoneList<Expression*>* args = expr->arguments();
4006   DCHECK_EQ(2, args->length());
4007   VisitForStackValue(args->at(0));
4008   VisitForStackValue(args->at(1));
4009 
4010   Label runtime, done;
4011 
4012   __ Allocate(JSIteratorResult::kSize, v0, a2, a3, &runtime, TAG_OBJECT);
4013   __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, a1);
4014   __ Pop(a2, a3);
4015   __ LoadRoot(a4, Heap::kEmptyFixedArrayRootIndex);
4016   __ sd(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
4017   __ sd(a4, FieldMemOperand(v0, JSObject::kPropertiesOffset));
4018   __ sd(a4, FieldMemOperand(v0, JSObject::kElementsOffset));
4019   __ sd(a2, FieldMemOperand(v0, JSIteratorResult::kValueOffset));
4020   __ sd(a3, FieldMemOperand(v0, JSIteratorResult::kDoneOffset));
4021   STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
4022   __ jmp(&done);
4023 
4024   __ bind(&runtime);
4025   __ CallRuntime(Runtime::kCreateIterResultObject);
4026 
4027   __ bind(&done);
4028   context()->Plug(v0);
4029 }
4030 
4031 
EmitLoadJSRuntimeFunction(CallRuntime * expr)4032 void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
4033   // Push undefined as the receiver.
4034   __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
4035   __ push(v0);
4036 
4037   __ LoadNativeContextSlot(expr->context_index(), v0);
4038 }
4039 
4040 
EmitCallJSRuntimeFunction(CallRuntime * expr)4041 void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
4042   ZoneList<Expression*>* args = expr->arguments();
4043   int arg_count = args->length();
4044 
4045   SetCallPosition(expr);
4046   __ ld(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
4047   __ li(a0, Operand(arg_count));
4048   __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kNullOrUndefined),
4049           RelocInfo::CODE_TARGET);
4050 }
4051 
4052 
VisitCallRuntime(CallRuntime * expr)4053 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4054   ZoneList<Expression*>* args = expr->arguments();
4055   int arg_count = args->length();
4056 
4057   if (expr->is_jsruntime()) {
4058     Comment cmnt(masm_, "[ CallRuntime");
4059     EmitLoadJSRuntimeFunction(expr);
4060 
4061     // Push the target function under the receiver.
4062     __ ld(at, MemOperand(sp, 0));
4063     __ push(at);
4064     __ sd(v0, MemOperand(sp, kPointerSize));
4065 
4066     // Push the arguments ("left-to-right").
4067     for (int i = 0; i < arg_count; i++) {
4068       VisitForStackValue(args->at(i));
4069     }
4070 
4071     PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
4072     EmitCallJSRuntimeFunction(expr);
4073 
4074     // Restore context register.
4075     __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4076 
4077     context()->DropAndPlug(1, v0);
4078   } else {
4079     const Runtime::Function* function = expr->function();
4080     switch (function->function_id) {
4081 #define CALL_INTRINSIC_GENERATOR(Name)     \
4082   case Runtime::kInline##Name: {           \
4083     Comment cmnt(masm_, "[ Inline" #Name); \
4084     return Emit##Name(expr);               \
4085   }
4086       FOR_EACH_FULL_CODE_INTRINSIC(CALL_INTRINSIC_GENERATOR)
4087 #undef CALL_INTRINSIC_GENERATOR
4088       default: {
4089         Comment cmnt(masm_, "[ CallRuntime for unhandled intrinsic");
4090         // Push the arguments ("left-to-right").
4091         for (int i = 0; i < arg_count; i++) {
4092           VisitForStackValue(args->at(i));
4093         }
4094 
4095         // Call the C runtime function.
4096         PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
4097         __ CallRuntime(expr->function(), arg_count);
4098         context()->Plug(v0);
4099       }
4100     }
4101   }
4102 }
4103 
4104 
VisitUnaryOperation(UnaryOperation * expr)4105 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4106   switch (expr->op()) {
4107     case Token::DELETE: {
4108       Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4109       Property* property = expr->expression()->AsProperty();
4110       VariableProxy* proxy = expr->expression()->AsVariableProxy();
4111 
4112       if (property != NULL) {
4113         VisitForStackValue(property->obj());
4114         VisitForStackValue(property->key());
4115         __ CallRuntime(is_strict(language_mode())
4116                            ? Runtime::kDeleteProperty_Strict
4117                            : Runtime::kDeleteProperty_Sloppy);
4118         context()->Plug(v0);
4119       } else if (proxy != NULL) {
4120         Variable* var = proxy->var();
4121         // Delete of an unqualified identifier is disallowed in strict mode but
4122         // "delete this" is allowed.
4123         bool is_this = var->HasThisName(isolate());
4124         DCHECK(is_sloppy(language_mode()) || is_this);
4125         if (var->IsUnallocatedOrGlobalSlot()) {
4126           __ LoadGlobalObject(a2);
4127           __ li(a1, Operand(var->name()));
4128           __ Push(a2, a1);
4129           __ CallRuntime(Runtime::kDeleteProperty_Sloppy);
4130           context()->Plug(v0);
4131         } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4132           // Result of deleting non-global, non-dynamic variables is false.
4133           // The subexpression does not have side effects.
4134           context()->Plug(is_this);
4135         } else {
4136           // Non-global variable.  Call the runtime to try to delete from the
4137           // context where the variable was introduced.
4138           DCHECK(!context_register().is(a2));
4139           __ li(a2, Operand(var->name()));
4140           __ Push(context_register(), a2);
4141           __ CallRuntime(Runtime::kDeleteLookupSlot);
4142           context()->Plug(v0);
4143         }
4144       } else {
4145         // Result of deleting non-property, non-variable reference is true.
4146         // The subexpression may have side effects.
4147         VisitForEffect(expr->expression());
4148         context()->Plug(true);
4149       }
4150       break;
4151     }
4152 
4153     case Token::VOID: {
4154       Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4155       VisitForEffect(expr->expression());
4156       context()->Plug(Heap::kUndefinedValueRootIndex);
4157       break;
4158     }
4159 
4160     case Token::NOT: {
4161       Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4162       if (context()->IsEffect()) {
4163         // Unary NOT has no side effects so it's only necessary to visit the
4164         // subexpression.  Match the optimizing compiler by not branching.
4165         VisitForEffect(expr->expression());
4166       } else if (context()->IsTest()) {
4167         const TestContext* test = TestContext::cast(context());
4168         // The labels are swapped for the recursive call.
4169         VisitForControl(expr->expression(),
4170                         test->false_label(),
4171                         test->true_label(),
4172                         test->fall_through());
4173         context()->Plug(test->true_label(), test->false_label());
4174       } else {
4175         // We handle value contexts explicitly rather than simply visiting
4176         // for control and plugging the control flow into the context,
4177         // because we need to prepare a pair of extra administrative AST ids
4178         // for the optimizing compiler.
4179         DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
4180         Label materialize_true, materialize_false, done;
4181         VisitForControl(expr->expression(),
4182                         &materialize_false,
4183                         &materialize_true,
4184                         &materialize_true);
4185         __ bind(&materialize_true);
4186         PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4187         __ LoadRoot(v0, Heap::kTrueValueRootIndex);
4188         if (context()->IsStackValue()) __ push(v0);
4189         __ jmp(&done);
4190         __ bind(&materialize_false);
4191         PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4192         __ LoadRoot(v0, Heap::kFalseValueRootIndex);
4193         if (context()->IsStackValue()) __ push(v0);
4194         __ bind(&done);
4195       }
4196       break;
4197     }
4198 
4199     case Token::TYPEOF: {
4200       Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4201       {
4202         AccumulatorValueContext context(this);
4203         VisitForTypeofValue(expr->expression());
4204       }
4205       __ mov(a3, v0);
4206       TypeofStub typeof_stub(isolate());
4207       __ CallStub(&typeof_stub);
4208       context()->Plug(v0);
4209       break;
4210     }
4211 
4212     default:
4213       UNREACHABLE();
4214   }
4215 }
4216 
4217 
VisitCountOperation(CountOperation * expr)4218 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4219   DCHECK(expr->expression()->IsValidReferenceExpressionOrThis());
4220 
4221   Comment cmnt(masm_, "[ CountOperation");
4222 
4223   Property* prop = expr->expression()->AsProperty();
4224   LhsKind assign_type = Property::GetAssignType(prop);
4225 
4226   // Evaluate expression and get value.
4227   if (assign_type == VARIABLE) {
4228     DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
4229     AccumulatorValueContext context(this);
4230     EmitVariableLoad(expr->expression()->AsVariableProxy());
4231   } else {
4232     // Reserve space for result of postfix operation.
4233     if (expr->is_postfix() && !context()->IsEffect()) {
4234       __ li(at, Operand(Smi::FromInt(0)));
4235       __ push(at);
4236     }
4237     switch (assign_type) {
4238       case NAMED_PROPERTY: {
4239         // Put the object both on the stack and in the register.
4240         VisitForStackValue(prop->obj());
4241         __ ld(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
4242         EmitNamedPropertyLoad(prop);
4243         break;
4244       }
4245 
4246       case NAMED_SUPER_PROPERTY: {
4247         VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
4248         VisitForAccumulatorValue(
4249             prop->obj()->AsSuperPropertyReference()->home_object());
4250         __ Push(result_register());
4251         const Register scratch = a1;
4252         __ ld(scratch, MemOperand(sp, kPointerSize));
4253         __ Push(scratch, result_register());
4254         EmitNamedSuperPropertyLoad(prop);
4255         break;
4256       }
4257 
4258       case KEYED_SUPER_PROPERTY: {
4259         VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
4260         VisitForAccumulatorValue(
4261             prop->obj()->AsSuperPropertyReference()->home_object());
4262         const Register scratch = a1;
4263         const Register scratch1 = a4;
4264         __ Move(scratch, result_register());
4265         VisitForAccumulatorValue(prop->key());
4266         __ Push(scratch, result_register());
4267         __ ld(scratch1, MemOperand(sp, 2 * kPointerSize));
4268         __ Push(scratch1, scratch, result_register());
4269         EmitKeyedSuperPropertyLoad(prop);
4270         break;
4271       }
4272 
4273       case KEYED_PROPERTY: {
4274         VisitForStackValue(prop->obj());
4275         VisitForStackValue(prop->key());
4276         __ ld(LoadDescriptor::ReceiverRegister(),
4277               MemOperand(sp, 1 * kPointerSize));
4278         __ ld(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
4279         EmitKeyedPropertyLoad(prop);
4280         break;
4281       }
4282 
4283       case VARIABLE:
4284         UNREACHABLE();
4285     }
4286   }
4287 
4288   // We need a second deoptimization point after loading the value
4289   // in case evaluating the property load my have a side effect.
4290   if (assign_type == VARIABLE) {
4291     PrepareForBailout(expr->expression(), TOS_REG);
4292   } else {
4293     PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4294   }
4295 
4296   // Inline smi case if we are in a loop.
4297   Label stub_call, done;
4298   JumpPatchSite patch_site(masm_);
4299 
4300   int count_value = expr->op() == Token::INC ? 1 : -1;
4301   __ mov(a0, v0);
4302   if (ShouldInlineSmiCase(expr->op())) {
4303     Label slow;
4304     patch_site.EmitJumpIfNotSmi(v0, &slow);
4305 
4306     // Save result for postfix expressions.
4307     if (expr->is_postfix()) {
4308       if (!context()->IsEffect()) {
4309         // Save the result on the stack. If we have a named or keyed property
4310         // we store the result under the receiver that is currently on top
4311         // of the stack.
4312         switch (assign_type) {
4313           case VARIABLE:
4314             __ push(v0);
4315             break;
4316           case NAMED_PROPERTY:
4317             __ sd(v0, MemOperand(sp, kPointerSize));
4318             break;
4319           case NAMED_SUPER_PROPERTY:
4320             __ sd(v0, MemOperand(sp, 2 * kPointerSize));
4321             break;
4322           case KEYED_PROPERTY:
4323             __ sd(v0, MemOperand(sp, 2 * kPointerSize));
4324             break;
4325           case KEYED_SUPER_PROPERTY:
4326             __ sd(v0, MemOperand(sp, 3 * kPointerSize));
4327             break;
4328         }
4329       }
4330     }
4331 
4332     Register scratch1 = a1;
4333     Register scratch2 = a4;
4334     __ li(scratch1, Operand(Smi::FromInt(count_value)));
4335     __ DadduAndCheckForOverflow(v0, v0, scratch1, scratch2);
4336     __ BranchOnNoOverflow(&done, scratch2);
4337     // Call stub. Undo operation first.
4338     __ Move(v0, a0);
4339     __ jmp(&stub_call);
4340     __ bind(&slow);
4341   }
4342   if (!is_strong(language_mode())) {
4343     ToNumberStub convert_stub(isolate());
4344     __ CallStub(&convert_stub);
4345     PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
4346   }
4347 
4348   // Save result for postfix expressions.
4349   if (expr->is_postfix()) {
4350     if (!context()->IsEffect()) {
4351       // Save the result on the stack. If we have a named or keyed property
4352       // we store the result under the receiver that is currently on top
4353       // of the stack.
4354       switch (assign_type) {
4355         case VARIABLE:
4356           __ push(v0);
4357           break;
4358         case NAMED_PROPERTY:
4359           __ sd(v0, MemOperand(sp, kPointerSize));
4360           break;
4361         case NAMED_SUPER_PROPERTY:
4362           __ sd(v0, MemOperand(sp, 2 * kPointerSize));
4363           break;
4364         case KEYED_PROPERTY:
4365           __ sd(v0, MemOperand(sp, 2 * kPointerSize));
4366           break;
4367         case KEYED_SUPER_PROPERTY:
4368           __ sd(v0, MemOperand(sp, 3 * kPointerSize));
4369           break;
4370       }
4371     }
4372   }
4373 
4374   __ bind(&stub_call);
4375   __ mov(a1, v0);
4376   __ li(a0, Operand(Smi::FromInt(count_value)));
4377 
4378   SetExpressionPosition(expr);
4379 
4380 
4381   Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), Token::ADD,
4382                                               strength(language_mode())).code();
4383   CallIC(code, expr->CountBinOpFeedbackId());
4384   patch_site.EmitPatchInfo();
4385   __ bind(&done);
4386 
4387   if (is_strong(language_mode())) {
4388     PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
4389   }
4390   // Store the value returned in v0.
4391   switch (assign_type) {
4392     case VARIABLE:
4393       if (expr->is_postfix()) {
4394         { EffectContext context(this);
4395           EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4396                                  Token::ASSIGN, expr->CountSlot());
4397           PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4398           context.Plug(v0);
4399         }
4400         // For all contexts except EffectConstant we have the result on
4401         // top of the stack.
4402         if (!context()->IsEffect()) {
4403           context()->PlugTOS();
4404         }
4405       } else {
4406         EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4407                                Token::ASSIGN, expr->CountSlot());
4408         PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4409         context()->Plug(v0);
4410       }
4411       break;
4412     case NAMED_PROPERTY: {
4413       __ mov(StoreDescriptor::ValueRegister(), result_register());
4414       __ li(StoreDescriptor::NameRegister(),
4415             Operand(prop->key()->AsLiteral()->value()));
4416       __ pop(StoreDescriptor::ReceiverRegister());
4417       EmitLoadStoreICSlot(expr->CountSlot());
4418       CallStoreIC();
4419       PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4420       if (expr->is_postfix()) {
4421         if (!context()->IsEffect()) {
4422           context()->PlugTOS();
4423         }
4424       } else {
4425         context()->Plug(v0);
4426       }
4427       break;
4428     }
4429     case NAMED_SUPER_PROPERTY: {
4430       EmitNamedSuperPropertyStore(prop);
4431       if (expr->is_postfix()) {
4432         if (!context()->IsEffect()) {
4433           context()->PlugTOS();
4434         }
4435       } else {
4436         context()->Plug(v0);
4437       }
4438       break;
4439     }
4440     case KEYED_SUPER_PROPERTY: {
4441       EmitKeyedSuperPropertyStore(prop);
4442       if (expr->is_postfix()) {
4443         if (!context()->IsEffect()) {
4444           context()->PlugTOS();
4445         }
4446       } else {
4447         context()->Plug(v0);
4448       }
4449       break;
4450     }
4451     case KEYED_PROPERTY: {
4452       __ mov(StoreDescriptor::ValueRegister(), result_register());
4453       __ Pop(StoreDescriptor::ReceiverRegister(),
4454              StoreDescriptor::NameRegister());
4455       Handle<Code> ic =
4456           CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
4457       EmitLoadStoreICSlot(expr->CountSlot());
4458       CallIC(ic);
4459       PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4460       if (expr->is_postfix()) {
4461         if (!context()->IsEffect()) {
4462           context()->PlugTOS();
4463         }
4464       } else {
4465         context()->Plug(v0);
4466       }
4467       break;
4468     }
4469   }
4470 }
4471 
4472 
EmitLiteralCompareTypeof(Expression * expr,Expression * sub_expr,Handle<String> check)4473 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4474                                                  Expression* sub_expr,
4475                                                  Handle<String> check) {
4476   Label materialize_true, materialize_false;
4477   Label* if_true = NULL;
4478   Label* if_false = NULL;
4479   Label* fall_through = NULL;
4480   context()->PrepareTest(&materialize_true, &materialize_false,
4481                          &if_true, &if_false, &fall_through);
4482 
4483   { AccumulatorValueContext context(this);
4484     VisitForTypeofValue(sub_expr);
4485   }
4486   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4487 
4488   Factory* factory = isolate()->factory();
4489   if (String::Equals(check, factory->number_string())) {
4490     __ JumpIfSmi(v0, if_true);
4491     __ ld(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
4492     __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
4493     Split(eq, v0, Operand(at), if_true, if_false, fall_through);
4494   } else if (String::Equals(check, factory->string_string())) {
4495     __ JumpIfSmi(v0, if_false);
4496     __ GetObjectType(v0, v0, a1);
4497     Split(lt, a1, Operand(FIRST_NONSTRING_TYPE), if_true, if_false,
4498           fall_through);
4499   } else if (String::Equals(check, factory->symbol_string())) {
4500     __ JumpIfSmi(v0, if_false);
4501     __ GetObjectType(v0, v0, a1);
4502     Split(eq, a1, Operand(SYMBOL_TYPE), if_true, if_false, fall_through);
4503   } else if (String::Equals(check, factory->boolean_string())) {
4504     __ LoadRoot(at, Heap::kTrueValueRootIndex);
4505     __ Branch(if_true, eq, v0, Operand(at));
4506     __ LoadRoot(at, Heap::kFalseValueRootIndex);
4507     Split(eq, v0, Operand(at), if_true, if_false, fall_through);
4508   } else if (String::Equals(check, factory->undefined_string())) {
4509     __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
4510     __ Branch(if_true, eq, v0, Operand(at));
4511     __ JumpIfSmi(v0, if_false);
4512     // Check for undetectable objects => true.
4513     __ ld(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
4514     __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
4515     __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
4516     Split(ne, a1, Operand(zero_reg), if_true, if_false, fall_through);
4517   } else if (String::Equals(check, factory->function_string())) {
4518     __ JumpIfSmi(v0, if_false);
4519     __ ld(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
4520     __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
4521     __ And(a1, a1,
4522            Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
4523     Split(eq, a1, Operand(1 << Map::kIsCallable), if_true, if_false,
4524           fall_through);
4525   } else if (String::Equals(check, factory->object_string())) {
4526     __ JumpIfSmi(v0, if_false);
4527     __ LoadRoot(at, Heap::kNullValueRootIndex);
4528     __ Branch(if_true, eq, v0, Operand(at));
4529     STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
4530     __ GetObjectType(v0, v0, a1);
4531     __ Branch(if_false, lt, a1, Operand(FIRST_JS_RECEIVER_TYPE));
4532     // Check for callable or undetectable objects => false.
4533     __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
4534     __ And(a1, a1,
4535            Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
4536     Split(eq, a1, Operand(zero_reg), if_true, if_false, fall_through);
4537 // clang-format off
4538 #define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type)    \
4539   } else if (String::Equals(check, factory->type##_string())) {  \
4540     __ JumpIfSmi(v0, if_false);                                  \
4541     __ ld(v0, FieldMemOperand(v0, HeapObject::kMapOffset));      \
4542     __ LoadRoot(at, Heap::k##Type##MapRootIndex);                \
4543     Split(eq, v0, Operand(at), if_true, if_false, fall_through);
4544   SIMD128_TYPES(SIMD128_TYPE)
4545 #undef SIMD128_TYPE
4546     // clang-format on
4547   } else {
4548     if (if_false != fall_through) __ jmp(if_false);
4549   }
4550   context()->Plug(if_true, if_false);
4551 }
4552 
4553 
VisitCompareOperation(CompareOperation * expr)4554 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4555   Comment cmnt(masm_, "[ CompareOperation");
4556   SetExpressionPosition(expr);
4557 
4558   // First we try a fast inlined version of the compare when one of
4559   // the operands is a literal.
4560   if (TryLiteralCompare(expr)) return;
4561 
4562   // Always perform the comparison for its control flow.  Pack the result
4563   // into the expression's context after the comparison is performed.
4564   Label materialize_true, materialize_false;
4565   Label* if_true = NULL;
4566   Label* if_false = NULL;
4567   Label* fall_through = NULL;
4568   context()->PrepareTest(&materialize_true, &materialize_false,
4569                          &if_true, &if_false, &fall_through);
4570 
4571   Token::Value op = expr->op();
4572   VisitForStackValue(expr->left());
4573   switch (op) {
4574     case Token::IN:
4575       VisitForStackValue(expr->right());
4576       __ CallRuntime(Runtime::kHasProperty);
4577       PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4578       __ LoadRoot(a4, Heap::kTrueValueRootIndex);
4579       Split(eq, v0, Operand(a4), if_true, if_false, fall_through);
4580       break;
4581 
4582     case Token::INSTANCEOF: {
4583       VisitForAccumulatorValue(expr->right());
4584       __ mov(a0, result_register());
4585       __ pop(a1);
4586       InstanceOfStub stub(isolate());
4587       __ CallStub(&stub);
4588       PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4589       __ LoadRoot(a4, Heap::kTrueValueRootIndex);
4590       Split(eq, v0, Operand(a4), if_true, if_false, fall_through);
4591       break;
4592     }
4593 
4594     default: {
4595       VisitForAccumulatorValue(expr->right());
4596       Condition cc = CompareIC::ComputeCondition(op);
4597       __ mov(a0, result_register());
4598       __ pop(a1);
4599 
4600       bool inline_smi_code = ShouldInlineSmiCase(op);
4601       JumpPatchSite patch_site(masm_);
4602       if (inline_smi_code) {
4603         Label slow_case;
4604         __ Or(a2, a0, Operand(a1));
4605         patch_site.EmitJumpIfNotSmi(a2, &slow_case);
4606         Split(cc, a1, Operand(a0), if_true, if_false, NULL);
4607         __ bind(&slow_case);
4608       }
4609 
4610       Handle<Code> ic = CodeFactory::CompareIC(
4611                             isolate(), op, strength(language_mode())).code();
4612       CallIC(ic, expr->CompareOperationFeedbackId());
4613       patch_site.EmitPatchInfo();
4614       PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4615       Split(cc, v0, Operand(zero_reg), if_true, if_false, fall_through);
4616     }
4617   }
4618 
4619   // Convert the result of the comparison into one expected for this
4620   // expression's context.
4621   context()->Plug(if_true, if_false);
4622 }
4623 
4624 
EmitLiteralCompareNil(CompareOperation * expr,Expression * sub_expr,NilValue nil)4625 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4626                                               Expression* sub_expr,
4627                                               NilValue nil) {
4628   Label materialize_true, materialize_false;
4629   Label* if_true = NULL;
4630   Label* if_false = NULL;
4631   Label* fall_through = NULL;
4632   context()->PrepareTest(&materialize_true, &materialize_false,
4633                          &if_true, &if_false, &fall_through);
4634 
4635   VisitForAccumulatorValue(sub_expr);
4636   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4637   __ mov(a0, result_register());
4638   if (expr->op() == Token::EQ_STRICT) {
4639     Heap::RootListIndex nil_value = nil == kNullValue ?
4640         Heap::kNullValueRootIndex :
4641         Heap::kUndefinedValueRootIndex;
4642     __ LoadRoot(a1, nil_value);
4643     Split(eq, a0, Operand(a1), if_true, if_false, fall_through);
4644   } else {
4645     Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
4646     CallIC(ic, expr->CompareOperationFeedbackId());
4647     __ LoadRoot(a1, Heap::kTrueValueRootIndex);
4648     Split(eq, v0, Operand(a1), if_true, if_false, fall_through);
4649   }
4650   context()->Plug(if_true, if_false);
4651 }
4652 
4653 
VisitThisFunction(ThisFunction * expr)4654 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4655   __ ld(v0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4656   context()->Plug(v0);
4657 }
4658 
4659 
result_register()4660 Register FullCodeGenerator::result_register() {
4661   return v0;
4662 }
4663 
4664 
context_register()4665 Register FullCodeGenerator::context_register() {
4666   return cp;
4667 }
4668 
4669 
StoreToFrameField(int frame_offset,Register value)4670 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4671   // DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
4672   DCHECK(IsAligned(frame_offset, kPointerSize));
4673   //  __ sw(value, MemOperand(fp, frame_offset));
4674   __ sd(value, MemOperand(fp, frame_offset));
4675 }
4676 
4677 
LoadContextField(Register dst,int context_index)4678 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4679   __ ld(dst, ContextMemOperand(cp, context_index));
4680 }
4681 
4682 
PushFunctionArgumentForContextAllocation()4683 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4684   Scope* closure_scope = scope()->ClosureScope();
4685   if (closure_scope->is_script_scope() ||
4686       closure_scope->is_module_scope()) {
4687     // Contexts nested in the native context have a canonical empty function
4688     // as their closure, not the anonymous closure containing the global
4689     // code.
4690     __ LoadNativeContextSlot(Context::CLOSURE_INDEX, at);
4691   } else if (closure_scope->is_eval_scope()) {
4692     // Contexts created by a call to eval have the same closure as the
4693     // context calling eval, not the anonymous closure containing the eval
4694     // code.  Fetch it from the context.
4695     __ ld(at, ContextMemOperand(cp, Context::CLOSURE_INDEX));
4696   } else {
4697     DCHECK(closure_scope->is_function_scope());
4698     __ ld(at, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4699   }
4700   __ push(at);
4701 }
4702 
4703 
4704 // ----------------------------------------------------------------------------
4705 // Non-local control flow support.
4706 
EnterFinallyBlock()4707 void FullCodeGenerator::EnterFinallyBlock() {
4708   DCHECK(!result_register().is(a1));
4709   // Store result register while executing finally block.
4710   __ push(result_register());
4711   // Cook return address in link register to stack (smi encoded Code* delta).
4712   __ Dsubu(a1, ra, Operand(masm_->CodeObject()));
4713   __ SmiTag(a1);
4714 
4715   // Store result register while executing finally block.
4716   __ push(a1);
4717 
4718   // Store pending message while executing finally block.
4719   ExternalReference pending_message_obj =
4720       ExternalReference::address_of_pending_message_obj(isolate());
4721   __ li(at, Operand(pending_message_obj));
4722   __ ld(a1, MemOperand(at));
4723   __ push(a1);
4724 
4725   ClearPendingMessage();
4726 }
4727 
4728 
ExitFinallyBlock()4729 void FullCodeGenerator::ExitFinallyBlock() {
4730   DCHECK(!result_register().is(a1));
4731   // Restore pending message from stack.
4732   __ pop(a1);
4733   ExternalReference pending_message_obj =
4734       ExternalReference::address_of_pending_message_obj(isolate());
4735   __ li(at, Operand(pending_message_obj));
4736   __ sd(a1, MemOperand(at));
4737 
4738   // Restore result register from stack.
4739   __ pop(a1);
4740 
4741   // Uncook return address and return.
4742   __ pop(result_register());
4743 
4744   __ SmiUntag(a1);
4745   __ Daddu(at, a1, Operand(masm_->CodeObject()));
4746   __ Jump(at);
4747 }
4748 
4749 
ClearPendingMessage()4750 void FullCodeGenerator::ClearPendingMessage() {
4751   DCHECK(!result_register().is(a1));
4752   ExternalReference pending_message_obj =
4753       ExternalReference::address_of_pending_message_obj(isolate());
4754   __ LoadRoot(a1, Heap::kTheHoleValueRootIndex);
4755   __ li(at, Operand(pending_message_obj));
4756   __ sd(a1, MemOperand(at));
4757 }
4758 
4759 
EmitLoadStoreICSlot(FeedbackVectorSlot slot)4760 void FullCodeGenerator::EmitLoadStoreICSlot(FeedbackVectorSlot slot) {
4761   DCHECK(!slot.IsInvalid());
4762   __ li(VectorStoreICTrampolineDescriptor::SlotRegister(),
4763         Operand(SmiFromSlot(slot)));
4764 }
4765 
4766 
4767 #undef __
4768 
4769 
PatchAt(Code * unoptimized_code,Address pc,BackEdgeState target_state,Code * replacement_code)4770 void BackEdgeTable::PatchAt(Code* unoptimized_code,
4771                             Address pc,
4772                             BackEdgeState target_state,
4773                             Code* replacement_code) {
4774   static const int kInstrSize = Assembler::kInstrSize;
4775   Address branch_address = pc - 8 * kInstrSize;
4776   Isolate* isolate = unoptimized_code->GetIsolate();
4777   CodePatcher patcher(isolate, branch_address, 1);
4778 
4779   switch (target_state) {
4780     case INTERRUPT:
4781       // slt  at, a3, zero_reg (in case of count based interrupts)
4782       // beq  at, zero_reg, ok
4783       // lui  t9, <interrupt stub address> upper
4784       // ori  t9, <interrupt stub address> u-middle
4785       // dsll t9, t9, 16
4786       // ori  t9, <interrupt stub address> lower
4787       // jalr t9
4788       // nop
4789       // ok-label ----- pc_after points here
4790       patcher.masm()->slt(at, a3, zero_reg);
4791       break;
4792     case ON_STACK_REPLACEMENT:
4793     case OSR_AFTER_STACK_CHECK:
4794       // addiu at, zero_reg, 1
4795       // beq  at, zero_reg, ok  ;; Not changed
4796       // lui  t9, <on-stack replacement address> upper
4797       // ori  t9, <on-stack replacement address> middle
4798       // dsll t9, t9, 16
4799       // ori  t9, <on-stack replacement address> lower
4800       // jalr t9  ;; Not changed
4801       // nop  ;; Not changed
4802       // ok-label ----- pc_after points here
4803       patcher.masm()->daddiu(at, zero_reg, 1);
4804       break;
4805   }
4806   Address pc_immediate_load_address = pc - 6 * kInstrSize;
4807   // Replace the stack check address in the load-immediate (6-instr sequence)
4808   // with the entry address of the replacement code.
4809   Assembler::set_target_address_at(isolate, pc_immediate_load_address,
4810                                    replacement_code->entry());
4811 
4812   unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
4813       unoptimized_code, pc_immediate_load_address, replacement_code);
4814 }
4815 
4816 
GetBackEdgeState(Isolate * isolate,Code * unoptimized_code,Address pc)4817 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
4818     Isolate* isolate,
4819     Code* unoptimized_code,
4820     Address pc) {
4821   static const int kInstrSize = Assembler::kInstrSize;
4822   Address branch_address = pc - 8 * kInstrSize;
4823   Address pc_immediate_load_address = pc - 6 * kInstrSize;
4824 
4825   DCHECK(Assembler::IsBeq(Assembler::instr_at(pc - 7 * kInstrSize)));
4826   if (!Assembler::IsAddImmediate(Assembler::instr_at(branch_address))) {
4827     DCHECK(reinterpret_cast<uint64_t>(
4828         Assembler::target_address_at(pc_immediate_load_address)) ==
4829            reinterpret_cast<uint64_t>(
4830                isolate->builtins()->InterruptCheck()->entry()));
4831     return INTERRUPT;
4832   }
4833 
4834   DCHECK(Assembler::IsAddImmediate(Assembler::instr_at(branch_address)));
4835 
4836   if (reinterpret_cast<uint64_t>(
4837       Assembler::target_address_at(pc_immediate_load_address)) ==
4838           reinterpret_cast<uint64_t>(
4839               isolate->builtins()->OnStackReplacement()->entry())) {
4840     return ON_STACK_REPLACEMENT;
4841   }
4842 
4843   DCHECK(reinterpret_cast<uint64_t>(
4844       Assembler::target_address_at(pc_immediate_load_address)) ==
4845          reinterpret_cast<uint64_t>(
4846              isolate->builtins()->OsrAfterStackCheck()->entry()));
4847   return OSR_AFTER_STACK_CHECK;
4848 }
4849 
4850 
4851 }  // namespace internal
4852 }  // namespace v8
4853 
4854 #endif  // V8_TARGET_ARCH_MIPS64
4855