• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 //     * Redistributions of source code must retain the above copyright
7 //       notice, this list of conditions and the following disclaimer.
8 //     * Redistributions in binary form must reproduce the above
9 //       copyright notice, this list of conditions and the following
10 //       disclaimer in the documentation and/or other materials provided
11 //       with the distribution.
12 //     * Neither the name of Google Inc. nor the names of its
13 //       contributors may be used to endorse or promote products derived
14 //       from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #if defined(V8_TARGET_ARCH_MIPS)
31 
32 // Note on Mips implementation:
33 //
34 // The result_register() for mips is the 'v0' register, which is defined
35 // by the ABI to contain function return values. However, the first
36 // parameter to a function is defined to be 'a0'. So there are many
37 // places where we have to move a previous result in v0 to a0 for the
38 // next call: mov(a0, v0). This is not needed on the other architectures.
39 
40 #include "code-stubs.h"
41 #include "codegen.h"
42 #include "compiler.h"
43 #include "debug.h"
44 #include "full-codegen.h"
45 #include "isolate-inl.h"
46 #include "parser.h"
47 #include "scopes.h"
48 #include "stub-cache.h"
49 
50 #include "mips/code-stubs-mips.h"
51 #include "mips/macro-assembler-mips.h"
52 
53 namespace v8 {
54 namespace internal {
55 
56 #define __ ACCESS_MASM(masm_)
57 
58 
59 // A patch site is a location in the code which it is possible to patch. This
60 // class has a number of methods to emit the code which is patchable and the
61 // method EmitPatchInfo to record a marker back to the patchable code. This
62 // marker is a andi zero_reg, rx, #yyyy instruction, and rx * 0x0000ffff + yyyy
63 // (raw 16 bit immediate value is used) is the delta from the pc to the first
64 // instruction of the patchable code.
65 // The marker instruction is effectively a NOP (dest is zero_reg) and will
66 // never be emitted by normal code.
67 class JumpPatchSite BASE_EMBEDDED {
68  public:
JumpPatchSite(MacroAssembler * masm)69   explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
70 #ifdef DEBUG
71     info_emitted_ = false;
72 #endif
73   }
74 
~JumpPatchSite()75   ~JumpPatchSite() {
76     ASSERT(patch_site_.is_bound() == info_emitted_);
77   }
78 
79   // When initially emitting this ensure that a jump is always generated to skip
80   // the inlined smi code.
EmitJumpIfNotSmi(Register reg,Label * target)81   void EmitJumpIfNotSmi(Register reg, Label* target) {
82     ASSERT(!patch_site_.is_bound() && !info_emitted_);
83     Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
84     __ bind(&patch_site_);
85     __ andi(at, reg, 0);
86     // Always taken before patched.
87     __ Branch(target, eq, at, Operand(zero_reg));
88   }
89 
90   // When initially emitting this ensure that a jump is never generated to skip
91   // the inlined smi code.
EmitJumpIfSmi(Register reg,Label * target)92   void EmitJumpIfSmi(Register reg, Label* target) {
93     Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
94     ASSERT(!patch_site_.is_bound() && !info_emitted_);
95     __ bind(&patch_site_);
96     __ andi(at, reg, 0);
97     // Never taken before patched.
98     __ Branch(target, ne, at, Operand(zero_reg));
99   }
100 
EmitPatchInfo()101   void EmitPatchInfo() {
102     if (patch_site_.is_bound()) {
103       int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
104       Register reg = Register::from_code(delta_to_patch_site / kImm16Mask);
105       __ andi(zero_reg, reg, delta_to_patch_site % kImm16Mask);
106 #ifdef DEBUG
107       info_emitted_ = true;
108 #endif
109     } else {
110       __ nop();  // Signals no inlined code.
111     }
112   }
113 
114  private:
115   MacroAssembler* masm_;
116   Label patch_site_;
117 #ifdef DEBUG
118   bool info_emitted_;
119 #endif
120 };
121 
122 
123 // TODO(jkummerow): Obsolete as soon as x64 is updated. Remove.
self_optimization_header_size()124 int FullCodeGenerator::self_optimization_header_size() {
125   UNREACHABLE();
126   return 10 * Instruction::kInstrSize;
127 }
128 
129 
130 // Generate code for a JS function.  On entry to the function the receiver
131 // and arguments have been pushed on the stack left to right.  The actual
132 // argument count matches the formal parameter count expected by the
133 // function.
134 //
135 // The live registers are:
136 //   o a1: the JS function object being called (i.e. ourselves)
137 //   o cp: our context
138 //   o fp: our caller's frame pointer
139 //   o sp: stack pointer
140 //   o ra: return address
141 //
142 // The function builds a JS frame.  Please see JavaScriptFrameConstants in
143 // frames-mips.h for its layout.
Generate()144 void FullCodeGenerator::Generate() {
145   CompilationInfo* info = info_;
146   handler_table_ =
147       isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
148   profiling_counter_ = isolate()->factory()->NewJSGlobalPropertyCell(
149       Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget)));
150   SetFunctionPosition(function());
151   Comment cmnt(masm_, "[ function compiled by full code generator");
152 
153 #ifdef DEBUG
154   if (strlen(FLAG_stop_at) > 0 &&
155       info->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
156     __ stop("stop-at");
157   }
158 #endif
159 
160   // Strict mode functions and builtins need to replace the receiver
161   // with undefined when called as functions (without an explicit
162   // receiver object). t1 is zero for method calls and non-zero for
163   // function calls.
164   if (!info->is_classic_mode() || info->is_native()) {
165     Label ok;
166     __ Branch(&ok, eq, t1, Operand(zero_reg));
167     int receiver_offset = info->scope()->num_parameters() * kPointerSize;
168     __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
169     __ sw(a2, MemOperand(sp, receiver_offset));
170     __ bind(&ok);
171   }
172 
173   // Open a frame scope to indicate that there is a frame on the stack.  The
174   // MANUAL indicates that the scope shouldn't actually generate code to set up
175   // the frame (that is done below).
176   FrameScope frame_scope(masm_, StackFrame::MANUAL);
177 
178   int locals_count = info->scope()->num_stack_slots();
179 
180   __ Push(ra, fp, cp, a1);
181   if (locals_count > 0) {
182     // Load undefined value here, so the value is ready for the loop
183     // below.
184     __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
185   }
186   // Adjust fp to point to caller's fp.
187   __ Addu(fp, sp, Operand(2 * kPointerSize));
188 
189   { Comment cmnt(masm_, "[ Allocate locals");
190     for (int i = 0; i < locals_count; i++) {
191       __ push(at);
192     }
193   }
194 
195   bool function_in_register = true;
196 
197   // Possibly allocate a local context.
198   int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
199   if (heap_slots > 0) {
200     Comment cmnt(masm_, "[ Allocate local context");
201     // Argument to NewContext is the function, which is in a1.
202     __ push(a1);
203     if (heap_slots <= FastNewContextStub::kMaximumSlots) {
204       FastNewContextStub stub(heap_slots);
205       __ CallStub(&stub);
206     } else {
207       __ CallRuntime(Runtime::kNewFunctionContext, 1);
208     }
209     function_in_register = false;
210     // Context is returned in both v0 and cp.  It replaces the context
211     // passed to us.  It's saved in the stack and kept live in cp.
212     __ sw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
213     // Copy any necessary parameters into the context.
214     int num_parameters = info->scope()->num_parameters();
215     for (int i = 0; i < num_parameters; i++) {
216       Variable* var = scope()->parameter(i);
217       if (var->IsContextSlot()) {
218         int parameter_offset = StandardFrameConstants::kCallerSPOffset +
219                                  (num_parameters - 1 - i) * kPointerSize;
220         // Load parameter from stack.
221         __ lw(a0, MemOperand(fp, parameter_offset));
222         // Store it in the context.
223         MemOperand target = ContextOperand(cp, var->index());
224         __ sw(a0, target);
225 
226         // Update the write barrier.
227         __ RecordWriteContextSlot(
228             cp, target.offset(), a0, a3, kRAHasBeenSaved, kDontSaveFPRegs);
229       }
230     }
231   }
232 
233   Variable* arguments = scope()->arguments();
234   if (arguments != NULL) {
235     // Function uses arguments object.
236     Comment cmnt(masm_, "[ Allocate arguments object");
237     if (!function_in_register) {
238       // Load this again, if it's used by the local context below.
239       __ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
240     } else {
241       __ mov(a3, a1);
242     }
243     // Receiver is just before the parameters on the caller's stack.
244     int num_parameters = info->scope()->num_parameters();
245     int offset = num_parameters * kPointerSize;
246     __ Addu(a2, fp,
247            Operand(StandardFrameConstants::kCallerSPOffset + offset));
248     __ li(a1, Operand(Smi::FromInt(num_parameters)));
249     __ Push(a3, a2, a1);
250 
251     // Arguments to ArgumentsAccessStub:
252     //   function, receiver address, parameter count.
253     // The stub will rewrite receiever and parameter count if the previous
254     // stack frame was an arguments adapter frame.
255     ArgumentsAccessStub::Type type;
256     if (!is_classic_mode()) {
257       type = ArgumentsAccessStub::NEW_STRICT;
258     } else if (function()->has_duplicate_parameters()) {
259       type = ArgumentsAccessStub::NEW_NON_STRICT_SLOW;
260     } else {
261       type = ArgumentsAccessStub::NEW_NON_STRICT_FAST;
262     }
263     ArgumentsAccessStub stub(type);
264     __ CallStub(&stub);
265 
266     SetVar(arguments, v0, a1, a2);
267   }
268 
269   if (FLAG_trace) {
270     __ CallRuntime(Runtime::kTraceEnter, 0);
271   }
272 
273   // Visit the declarations and body unless there is an illegal
274   // redeclaration.
275   if (scope()->HasIllegalRedeclaration()) {
276     Comment cmnt(masm_, "[ Declarations");
277     scope()->VisitIllegalRedeclaration(this);
278 
279   } else {
280     PrepareForBailoutForId(AstNode::kFunctionEntryId, NO_REGISTERS);
281     { Comment cmnt(masm_, "[ Declarations");
282       // For named function expressions, declare the function name as a
283       // constant.
284       if (scope()->is_function_scope() && scope()->function() != NULL) {
285         VariableProxy* proxy = scope()->function();
286         ASSERT(proxy->var()->mode() == CONST ||
287                proxy->var()->mode() == CONST_HARMONY);
288         ASSERT(proxy->var()->location() != Variable::UNALLOCATED);
289         EmitDeclaration(proxy, proxy->var()->mode(), NULL);
290       }
291       VisitDeclarations(scope()->declarations());
292     }
293 
294     { Comment cmnt(masm_, "[ Stack check");
295       PrepareForBailoutForId(AstNode::kDeclarationsId, NO_REGISTERS);
296       Label ok;
297       __ LoadRoot(t0, Heap::kStackLimitRootIndex);
298       __ Branch(&ok, hs, sp, Operand(t0));
299       StackCheckStub stub;
300       __ CallStub(&stub);
301       __ bind(&ok);
302     }
303 
304     { Comment cmnt(masm_, "[ Body");
305       ASSERT(loop_depth() == 0);
306       VisitStatements(function()->body());
307       ASSERT(loop_depth() == 0);
308     }
309   }
310 
311   // Always emit a 'return undefined' in case control fell off the end of
312   // the body.
313   { Comment cmnt(masm_, "[ return <undefined>;");
314     __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
315   }
316   EmitReturnSequence();
317 }
318 
319 
ClearAccumulator()320 void FullCodeGenerator::ClearAccumulator() {
321   ASSERT(Smi::FromInt(0) == 0);
322   __ mov(v0, zero_reg);
323 }
324 
325 
EmitProfilingCounterDecrement(int delta)326 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
327   __ li(a2, Operand(profiling_counter_));
328   __ lw(a3, FieldMemOperand(a2, JSGlobalPropertyCell::kValueOffset));
329   __ Subu(a3, a3, Operand(Smi::FromInt(delta)));
330   __ sw(a3, FieldMemOperand(a2, JSGlobalPropertyCell::kValueOffset));
331 }
332 
333 
EmitProfilingCounterReset()334 void FullCodeGenerator::EmitProfilingCounterReset() {
335   int reset_value = FLAG_interrupt_budget;
336   if (info_->ShouldSelfOptimize() && !FLAG_retry_self_opt) {
337     // Self-optimization is a one-off thing: if it fails, don't try again.
338     reset_value = Smi::kMaxValue;
339   }
340   if (isolate()->IsDebuggerActive()) {
341     // Detect debug break requests as soon as possible.
342     reset_value = 10;
343   }
344   __ li(a2, Operand(profiling_counter_));
345   __ li(a3, Operand(Smi::FromInt(reset_value)));
346   __ sw(a3, FieldMemOperand(a2, JSGlobalPropertyCell::kValueOffset));
347 }
348 
349 
350 static const int kMaxBackEdgeWeight = 127;
351 static const int kBackEdgeDistanceDivisor = 142;
352 
353 
EmitStackCheck(IterationStatement * stmt,Label * back_edge_target)354 void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt,
355                                        Label* back_edge_target) {
356   // The generated code is used in Deoptimizer::PatchStackCheckCodeAt so we need
357   // to make sure it is constant. Branch may emit a skip-or-jump sequence
358   // instead of the normal Branch. It seems that the "skip" part of that
359   // sequence is about as long as this Branch would be so it is safe to ignore
360   // that.
361   Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
362   Comment cmnt(masm_, "[ Stack check");
363   Label ok;
364   if (FLAG_count_based_interrupts) {
365     int weight = 1;
366     if (FLAG_weighted_back_edges) {
367       ASSERT(back_edge_target->is_bound());
368       int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
369       weight = Min(kMaxBackEdgeWeight,
370                    Max(1, distance / kBackEdgeDistanceDivisor));
371     }
372     EmitProfilingCounterDecrement(weight);
373     __ slt(at, a3, zero_reg);
374     __ beq(at, zero_reg, &ok);
375     // CallStub will emit a li t9 first, so it is safe to use the delay slot.
376     InterruptStub stub;
377     __ CallStub(&stub);
378   } else {
379     __ LoadRoot(t0, Heap::kStackLimitRootIndex);
380     __ sltu(at, sp, t0);
381     __ beq(at, zero_reg, &ok);
382     // CallStub will emit a li t9 first, so it is safe to use the delay slot.
383     StackCheckStub stub;
384     __ CallStub(&stub);
385   }
386   // Record a mapping of this PC offset to the OSR id.  This is used to find
387   // the AST id from the unoptimized code in order to use it as a key into
388   // the deoptimization input data found in the optimized code.
389   RecordStackCheck(stmt->OsrEntryId());
390   if (FLAG_count_based_interrupts) {
391     EmitProfilingCounterReset();
392   }
393 
394   __ bind(&ok);
395   PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
396   // Record a mapping of the OSR id to this PC.  This is used if the OSR
397   // entry becomes the target of a bailout.  We don't expect it to be, but
398   // we want it to work if it is.
399   PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
400 }
401 
402 
EmitReturnSequence()403 void FullCodeGenerator::EmitReturnSequence() {
404   Comment cmnt(masm_, "[ Return sequence");
405   if (return_label_.is_bound()) {
406     __ Branch(&return_label_);
407   } else {
408     __ bind(&return_label_);
409     if (FLAG_trace) {
410       // Push the return value on the stack as the parameter.
411       // Runtime::TraceExit returns its parameter in v0.
412       __ push(v0);
413       __ CallRuntime(Runtime::kTraceExit, 1);
414     }
415     if (FLAG_interrupt_at_exit || FLAG_self_optimization) {
416       // Pretend that the exit is a backwards jump to the entry.
417       int weight = 1;
418       if (info_->ShouldSelfOptimize()) {
419         weight = FLAG_interrupt_budget / FLAG_self_opt_count;
420       } else if (FLAG_weighted_back_edges) {
421         int distance = masm_->pc_offset();
422         weight = Min(kMaxBackEdgeWeight,
423                      Max(1, distance / kBackEdgeDistanceDivisor));
424       }
425       EmitProfilingCounterDecrement(weight);
426       Label ok;
427       __ Branch(&ok, ge, a3, Operand(zero_reg));
428       __ push(v0);
429       if (info_->ShouldSelfOptimize() && FLAG_direct_self_opt) {
430         __ lw(a2, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
431         __ push(a2);
432         __ CallRuntime(Runtime::kOptimizeFunctionOnNextCall, 1);
433       } else {
434         InterruptStub stub;
435         __ CallStub(&stub);
436       }
437       __ pop(v0);
438       EmitProfilingCounterReset();
439       __ bind(&ok);
440     }
441 
442 #ifdef DEBUG
443     // Add a label for checking the size of the code used for returning.
444     Label check_exit_codesize;
445     masm_->bind(&check_exit_codesize);
446 #endif
447     // Make sure that the constant pool is not emitted inside of the return
448     // sequence.
449     { Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
450       // Here we use masm_-> instead of the __ macro to avoid the code coverage
451       // tool from instrumenting as we rely on the code size here.
452       int32_t sp_delta = (info_->scope()->num_parameters() + 1) * kPointerSize;
453       CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
454       __ RecordJSReturn();
455       masm_->mov(sp, fp);
456       masm_->MultiPop(static_cast<RegList>(fp.bit() | ra.bit()));
457       masm_->Addu(sp, sp, Operand(sp_delta));
458       masm_->Jump(ra);
459     }
460 
461 #ifdef DEBUG
462     // Check that the size of the code used for returning is large enough
463     // for the debugger's requirements.
464     ASSERT(Assembler::kJSReturnSequenceInstructions <=
465            masm_->InstructionsGeneratedSince(&check_exit_codesize));
466 #endif
467   }
468 }
469 
470 
Plug(Variable * var) const471 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
472   ASSERT(var->IsStackAllocated() || var->IsContextSlot());
473 }
474 
475 
Plug(Variable * var) const476 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
477   ASSERT(var->IsStackAllocated() || var->IsContextSlot());
478   codegen()->GetVar(result_register(), var);
479 }
480 
481 
Plug(Variable * var) const482 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
483   ASSERT(var->IsStackAllocated() || var->IsContextSlot());
484   codegen()->GetVar(result_register(), var);
485   __ push(result_register());
486 }
487 
488 
Plug(Variable * var) const489 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
490   // For simplicity we always test the accumulator register.
491   codegen()->GetVar(result_register(), var);
492   codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
493   codegen()->DoTest(this);
494 }
495 
496 
Plug(Heap::RootListIndex index) const497 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
498 }
499 
500 
Plug(Heap::RootListIndex index) const501 void FullCodeGenerator::AccumulatorValueContext::Plug(
502     Heap::RootListIndex index) const {
503   __ LoadRoot(result_register(), index);
504 }
505 
506 
Plug(Heap::RootListIndex index) const507 void FullCodeGenerator::StackValueContext::Plug(
508     Heap::RootListIndex index) const {
509   __ LoadRoot(result_register(), index);
510   __ push(result_register());
511 }
512 
513 
Plug(Heap::RootListIndex index) const514 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
515   codegen()->PrepareForBailoutBeforeSplit(condition(),
516                                           true,
517                                           true_label_,
518                                           false_label_);
519   if (index == Heap::kUndefinedValueRootIndex ||
520       index == Heap::kNullValueRootIndex ||
521       index == Heap::kFalseValueRootIndex) {
522     if (false_label_ != fall_through_) __ Branch(false_label_);
523   } else if (index == Heap::kTrueValueRootIndex) {
524     if (true_label_ != fall_through_) __ Branch(true_label_);
525   } else {
526     __ LoadRoot(result_register(), index);
527     codegen()->DoTest(this);
528   }
529 }
530 
531 
Plug(Handle<Object> lit) const532 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
533 }
534 
535 
Plug(Handle<Object> lit) const536 void FullCodeGenerator::AccumulatorValueContext::Plug(
537     Handle<Object> lit) const {
538   __ li(result_register(), Operand(lit));
539 }
540 
541 
Plug(Handle<Object> lit) const542 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
543   // Immediates cannot be pushed directly.
544   __ li(result_register(), Operand(lit));
545   __ push(result_register());
546 }
547 
548 
Plug(Handle<Object> lit) const549 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
550   codegen()->PrepareForBailoutBeforeSplit(condition(),
551                                           true,
552                                           true_label_,
553                                           false_label_);
554   ASSERT(!lit->IsUndetectableObject());  // There are no undetectable literals.
555   if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
556     if (false_label_ != fall_through_) __ Branch(false_label_);
557   } else if (lit->IsTrue() || lit->IsJSObject()) {
558     if (true_label_ != fall_through_) __ Branch(true_label_);
559   } else if (lit->IsString()) {
560     if (String::cast(*lit)->length() == 0) {
561       if (false_label_ != fall_through_) __ Branch(false_label_);
562     } else {
563       if (true_label_ != fall_through_) __ Branch(true_label_);
564     }
565   } else if (lit->IsSmi()) {
566     if (Smi::cast(*lit)->value() == 0) {
567       if (false_label_ != fall_through_) __ Branch(false_label_);
568     } else {
569       if (true_label_ != fall_through_) __ Branch(true_label_);
570     }
571   } else {
572     // For simplicity we always test the accumulator register.
573     __ li(result_register(), Operand(lit));
574     codegen()->DoTest(this);
575   }
576 }
577 
578 
DropAndPlug(int count,Register reg) const579 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
580                                                    Register reg) const {
581   ASSERT(count > 0);
582   __ Drop(count);
583 }
584 
585 
DropAndPlug(int count,Register reg) const586 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
587     int count,
588     Register reg) const {
589   ASSERT(count > 0);
590   __ Drop(count);
591   __ Move(result_register(), reg);
592 }
593 
594 
DropAndPlug(int count,Register reg) const595 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
596                                                        Register reg) const {
597   ASSERT(count > 0);
598   if (count > 1) __ Drop(count - 1);
599   __ sw(reg, MemOperand(sp, 0));
600 }
601 
602 
DropAndPlug(int count,Register reg) const603 void FullCodeGenerator::TestContext::DropAndPlug(int count,
604                                                  Register reg) const {
605   ASSERT(count > 0);
606   // For simplicity we always test the accumulator register.
607   __ Drop(count);
608   __ Move(result_register(), reg);
609   codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
610   codegen()->DoTest(this);
611 }
612 
613 
Plug(Label * materialize_true,Label * materialize_false) const614 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
615                                             Label* materialize_false) const {
616   ASSERT(materialize_true == materialize_false);
617   __ bind(materialize_true);
618 }
619 
620 
Plug(Label * materialize_true,Label * materialize_false) const621 void FullCodeGenerator::AccumulatorValueContext::Plug(
622     Label* materialize_true,
623     Label* materialize_false) const {
624   Label done;
625   __ bind(materialize_true);
626   __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
627   __ Branch(&done);
628   __ bind(materialize_false);
629   __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
630   __ bind(&done);
631 }
632 
633 
Plug(Label * materialize_true,Label * materialize_false) const634 void FullCodeGenerator::StackValueContext::Plug(
635     Label* materialize_true,
636     Label* materialize_false) const {
637   Label done;
638   __ bind(materialize_true);
639   __ LoadRoot(at, Heap::kTrueValueRootIndex);
640   __ push(at);
641   __ Branch(&done);
642   __ bind(materialize_false);
643   __ LoadRoot(at, Heap::kFalseValueRootIndex);
644   __ push(at);
645   __ bind(&done);
646 }
647 
648 
Plug(Label * materialize_true,Label * materialize_false) const649 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
650                                           Label* materialize_false) const {
651   ASSERT(materialize_true == true_label_);
652   ASSERT(materialize_false == false_label_);
653 }
654 
655 
Plug(bool flag) const656 void FullCodeGenerator::EffectContext::Plug(bool flag) const {
657 }
658 
659 
Plug(bool flag) const660 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
661   Heap::RootListIndex value_root_index =
662       flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
663   __ LoadRoot(result_register(), value_root_index);
664 }
665 
666 
Plug(bool flag) const667 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
668   Heap::RootListIndex value_root_index =
669       flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
670   __ LoadRoot(at, value_root_index);
671   __ push(at);
672 }
673 
674 
Plug(bool flag) const675 void FullCodeGenerator::TestContext::Plug(bool flag) const {
676   codegen()->PrepareForBailoutBeforeSplit(condition(),
677                                           true,
678                                           true_label_,
679                                           false_label_);
680   if (flag) {
681     if (true_label_ != fall_through_) __ Branch(true_label_);
682   } else {
683     if (false_label_ != fall_through_) __ Branch(false_label_);
684   }
685 }
686 
687 
DoTest(Expression * condition,Label * if_true,Label * if_false,Label * fall_through)688 void FullCodeGenerator::DoTest(Expression* condition,
689                                Label* if_true,
690                                Label* if_false,
691                                Label* fall_through) {
692   if (CpuFeatures::IsSupported(FPU)) {
693     ToBooleanStub stub(result_register());
694     __ CallStub(&stub);
695     __ mov(at, zero_reg);
696   } else {
697     // Call the runtime to find the boolean value of the source and then
698     // translate it into control flow to the pair of labels.
699     __ push(result_register());
700     __ CallRuntime(Runtime::kToBool, 1);
701     __ LoadRoot(at, Heap::kFalseValueRootIndex);
702   }
703   Split(ne, v0, Operand(at), if_true, if_false, fall_through);
704 }
705 
706 
Split(Condition cc,Register lhs,const Operand & rhs,Label * if_true,Label * if_false,Label * fall_through)707 void FullCodeGenerator::Split(Condition cc,
708                               Register lhs,
709                               const Operand&  rhs,
710                               Label* if_true,
711                               Label* if_false,
712                               Label* fall_through) {
713   if (if_false == fall_through) {
714     __ Branch(if_true, cc, lhs, rhs);
715   } else if (if_true == fall_through) {
716     __ Branch(if_false, NegateCondition(cc), lhs, rhs);
717   } else {
718     __ Branch(if_true, cc, lhs, rhs);
719     __ Branch(if_false);
720   }
721 }
722 
723 
StackOperand(Variable * var)724 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
725   ASSERT(var->IsStackAllocated());
726   // Offset is negative because higher indexes are at lower addresses.
727   int offset = -var->index() * kPointerSize;
728   // Adjust by a (parameter or local) base offset.
729   if (var->IsParameter()) {
730     offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
731   } else {
732     offset += JavaScriptFrameConstants::kLocal0Offset;
733   }
734   return MemOperand(fp, offset);
735 }
736 
737 
VarOperand(Variable * var,Register scratch)738 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
739   ASSERT(var->IsContextSlot() || var->IsStackAllocated());
740   if (var->IsContextSlot()) {
741     int context_chain_length = scope()->ContextChainLength(var->scope());
742     __ LoadContext(scratch, context_chain_length);
743     return ContextOperand(scratch, var->index());
744   } else {
745     return StackOperand(var);
746   }
747 }
748 
749 
GetVar(Register dest,Variable * var)750 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
751   // Use destination as scratch.
752   MemOperand location = VarOperand(var, dest);
753   __ lw(dest, location);
754 }
755 
756 
SetVar(Variable * var,Register src,Register scratch0,Register scratch1)757 void FullCodeGenerator::SetVar(Variable* var,
758                                Register src,
759                                Register scratch0,
760                                Register scratch1) {
761   ASSERT(var->IsContextSlot() || var->IsStackAllocated());
762   ASSERT(!scratch0.is(src));
763   ASSERT(!scratch0.is(scratch1));
764   ASSERT(!scratch1.is(src));
765   MemOperand location = VarOperand(var, scratch0);
766   __ sw(src, location);
767   // Emit the write barrier code if the location is in the heap.
768   if (var->IsContextSlot()) {
769     __ RecordWriteContextSlot(scratch0,
770                               location.offset(),
771                               src,
772                               scratch1,
773                               kRAHasBeenSaved,
774                               kDontSaveFPRegs);
775   }
776 }
777 
778 
PrepareForBailoutBeforeSplit(Expression * expr,bool should_normalize,Label * if_true,Label * if_false)779 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
780                                                      bool should_normalize,
781                                                      Label* if_true,
782                                                      Label* if_false) {
783   // Only prepare for bailouts before splits if we're in a test
784   // context. Otherwise, we let the Visit function deal with the
785   // preparation to avoid preparing with the same AST id twice.
786   if (!context()->IsTest() || !info_->IsOptimizable()) return;
787 
788   Label skip;
789   if (should_normalize) __ Branch(&skip);
790   PrepareForBailout(expr, TOS_REG);
791   if (should_normalize) {
792     __ LoadRoot(t0, Heap::kTrueValueRootIndex);
793     Split(eq, a0, Operand(t0), if_true, if_false, NULL);
794     __ bind(&skip);
795   }
796 }
797 
798 
EmitDeclaration(VariableProxy * proxy,VariableMode mode,FunctionLiteral * function)799 void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy,
800                                         VariableMode mode,
801                                         FunctionLiteral* function) {
802   // If it was not possible to allocate the variable at compile time, we
803   // need to "declare" it at runtime to make sure it actually exists in the
804   // local context.
805   Variable* variable = proxy->var();
806   bool binding_needs_init = (function == NULL) &&
807       (mode == CONST || mode == CONST_HARMONY || mode == LET);
808   switch (variable->location()) {
809     case Variable::UNALLOCATED:
810       ++global_count_;
811       break;
812 
813     case Variable::PARAMETER:
814     case Variable::LOCAL:
815       if (function != NULL) {
816         Comment cmnt(masm_, "[ Declaration");
817         VisitForAccumulatorValue(function);
818         __ sw(result_register(), StackOperand(variable));
819       } else if (binding_needs_init) {
820           Comment cmnt(masm_, "[ Declaration");
821           __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
822           __ sw(t0, StackOperand(variable));
823       }
824       break;
825 
826       case Variable::CONTEXT:
827       // The variable in the decl always resides in the current function
828       // context.
829       ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
830       if (FLAG_debug_code) {
831         // Check that we're not inside a with or catch context.
832         __ lw(a1, FieldMemOperand(cp, HeapObject::kMapOffset));
833         __ LoadRoot(t0, Heap::kWithContextMapRootIndex);
834         __ Check(ne, "Declaration in with context.",
835                  a1, Operand(t0));
836         __ LoadRoot(t0, Heap::kCatchContextMapRootIndex);
837         __ Check(ne, "Declaration in catch context.",
838                  a1, Operand(t0));
839       }
840       if (function != NULL) {
841         Comment cmnt(masm_, "[ Declaration");
842         VisitForAccumulatorValue(function);
843         __ sw(result_register(), ContextOperand(cp, variable->index()));
844         int offset = Context::SlotOffset(variable->index());
845         // We know that we have written a function, which is not a smi.
846         __ RecordWriteContextSlot(cp,
847                                   offset,
848                                   result_register(),
849                                   a2,
850                                   kRAHasBeenSaved,
851                                   kDontSaveFPRegs,
852                                   EMIT_REMEMBERED_SET,
853                                   OMIT_SMI_CHECK);
854         PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
855       } else if (binding_needs_init) {
856           Comment cmnt(masm_, "[ Declaration");
857           __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
858           __ sw(at, ContextOperand(cp, variable->index()));
859           // No write barrier since the_hole_value is in old space.
860           PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
861       }
862       break;
863 
864     case Variable::LOOKUP: {
865       Comment cmnt(masm_, "[ Declaration");
866       __ li(a2, Operand(variable->name()));
867       // Declaration nodes are always introduced in one of four modes.
868       ASSERT(mode == VAR ||
869              mode == CONST ||
870              mode == CONST_HARMONY ||
871              mode == LET);
872       PropertyAttributes attr = (mode == CONST || mode == CONST_HARMONY)
873         ? READ_ONLY : NONE;
874       __ li(a1, Operand(Smi::FromInt(attr)));
875       // Push initial value, if any.
876       // Note: For variables we must not push an initial value (such as
877       // 'undefined') because we may have a (legal) redeclaration and we
878       // must not destroy the current value.
879       if (function != NULL) {
880         __ Push(cp, a2, a1);
881         // Push initial value for function declaration.
882         VisitForStackValue(function);
883       } else if (binding_needs_init) {
884           __ LoadRoot(a0, Heap::kTheHoleValueRootIndex);
885           __ Push(cp, a2, a1, a0);
886       } else {
887         ASSERT(Smi::FromInt(0) == 0);
888         __ mov(a0, zero_reg);  // Smi::FromInt(0) indicates no initial value.
889         __ Push(cp, a2, a1, a0);
890       }
891       __ CallRuntime(Runtime::kDeclareContextSlot, 4);
892       break;
893     }
894   }
895 }
896 
897 
DeclareGlobals(Handle<FixedArray> pairs)898 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
899   // Call the runtime to declare the globals.
900   // The context is the first argument.
901   __ li(a1, Operand(pairs));
902   __ li(a0, Operand(Smi::FromInt(DeclareGlobalsFlags())));
903   __ Push(cp, a1, a0);
904   __ CallRuntime(Runtime::kDeclareGlobals, 3);
905   // Return value is ignored.
906 }
907 
908 
VisitSwitchStatement(SwitchStatement * stmt)909 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
910   Comment cmnt(masm_, "[ SwitchStatement");
911   Breakable nested_statement(this, stmt);
912   SetStatementPosition(stmt);
913 
914   // Keep the switch value on the stack until a case matches.
915   VisitForStackValue(stmt->tag());
916   PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
917 
918   ZoneList<CaseClause*>* clauses = stmt->cases();
919   CaseClause* default_clause = NULL;  // Can occur anywhere in the list.
920 
921   Label next_test;  // Recycled for each test.
922   // Compile all the tests with branches to their bodies.
923   for (int i = 0; i < clauses->length(); i++) {
924     CaseClause* clause = clauses->at(i);
925     clause->body_target()->Unuse();
926 
927     // The default is not a test, but remember it as final fall through.
928     if (clause->is_default()) {
929       default_clause = clause;
930       continue;
931     }
932 
933     Comment cmnt(masm_, "[ Case comparison");
934     __ bind(&next_test);
935     next_test.Unuse();
936 
937     // Compile the label expression.
938     VisitForAccumulatorValue(clause->label());
939     __ mov(a0, result_register());  // CompareStub requires args in a0, a1.
940 
941     // Perform the comparison as if via '==='.
942     __ lw(a1, MemOperand(sp, 0));  // Switch value.
943     bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
944     JumpPatchSite patch_site(masm_);
945     if (inline_smi_code) {
946       Label slow_case;
947       __ or_(a2, a1, a0);
948       patch_site.EmitJumpIfNotSmi(a2, &slow_case);
949 
950       __ Branch(&next_test, ne, a1, Operand(a0));
951       __ Drop(1);  // Switch value is no longer needed.
952       __ Branch(clause->body_target());
953 
954       __ bind(&slow_case);
955     }
956 
957     // Record position before stub call for type feedback.
958     SetSourcePosition(clause->position());
959     Handle<Code> ic = CompareIC::GetUninitialized(Token::EQ_STRICT);
960     CallIC(ic, RelocInfo::CODE_TARGET, clause->CompareId());
961     patch_site.EmitPatchInfo();
962 
963     __ Branch(&next_test, ne, v0, Operand(zero_reg));
964     __ Drop(1);  // Switch value is no longer needed.
965     __ Branch(clause->body_target());
966   }
967 
968   // Discard the test value and jump to the default if present, otherwise to
969   // the end of the statement.
970   __ bind(&next_test);
971   __ Drop(1);  // Switch value is no longer needed.
972   if (default_clause == NULL) {
973     __ Branch(nested_statement.break_label());
974   } else {
975     __ Branch(default_clause->body_target());
976   }
977 
978   // Compile all the case bodies.
979   for (int i = 0; i < clauses->length(); i++) {
980     Comment cmnt(masm_, "[ Case body");
981     CaseClause* clause = clauses->at(i);
982     __ bind(clause->body_target());
983     PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
984     VisitStatements(clause->statements());
985   }
986 
987   __ bind(nested_statement.break_label());
988   PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
989 }
990 
991 
VisitForInStatement(ForInStatement * stmt)992 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
993   Comment cmnt(masm_, "[ ForInStatement");
994   SetStatementPosition(stmt);
995 
996   Label loop, exit;
997   ForIn loop_statement(this, stmt);
998   increment_loop_depth();
999 
1000   // Get the object to enumerate over. Both SpiderMonkey and JSC
1001   // ignore null and undefined in contrast to the specification; see
1002   // ECMA-262 section 12.6.4.
1003   VisitForAccumulatorValue(stmt->enumerable());
1004   __ mov(a0, result_register());  // Result as param to InvokeBuiltin below.
1005   __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1006   __ Branch(&exit, eq, a0, Operand(at));
1007   Register null_value = t1;
1008   __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1009   __ Branch(&exit, eq, a0, Operand(null_value));
1010   PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1011   __ mov(a0, v0);
1012   // Convert the object to a JS object.
1013   Label convert, done_convert;
1014   __ JumpIfSmi(a0, &convert);
1015   __ GetObjectType(a0, a1, a1);
1016   __ Branch(&done_convert, ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
1017   __ bind(&convert);
1018   __ push(a0);
1019   __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1020   __ mov(a0, v0);
1021   __ bind(&done_convert);
1022   __ push(a0);
1023 
1024   // Check for proxies.
1025   Label call_runtime;
1026   STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1027   __ GetObjectType(a0, a1, a1);
1028   __ Branch(&call_runtime, le, a1, Operand(LAST_JS_PROXY_TYPE));
1029 
1030   // Check cache validity in generated code. This is a fast case for
1031   // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1032   // guarantee cache validity, call the runtime system to check cache
1033   // validity or get the property names in a fixed array.
1034   __ CheckEnumCache(null_value, &call_runtime);
1035 
1036   // The enum cache is valid.  Load the map of the object being
1037   // iterated over and use the cache for the iteration.
1038   Label use_cache;
1039   __ lw(v0, FieldMemOperand(a0, HeapObject::kMapOffset));
1040   __ Branch(&use_cache);
1041 
1042   // Get the set of properties to enumerate.
1043   __ bind(&call_runtime);
1044   __ push(a0);  // Duplicate the enumerable object on the stack.
1045   __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1046 
1047   // If we got a map from the runtime call, we can do a fast
1048   // modification check. Otherwise, we got a fixed array, and we have
1049   // to do a slow check.
1050   Label fixed_array;
1051   __ mov(a2, v0);
1052   __ lw(a1, FieldMemOperand(a2, HeapObject::kMapOffset));
1053   __ LoadRoot(at, Heap::kMetaMapRootIndex);
1054   __ Branch(&fixed_array, ne, a1, Operand(at));
1055 
1056   // We got a map in register v0. Get the enumeration cache from it.
1057   __ bind(&use_cache);
1058   __ LoadInstanceDescriptors(v0, a1);
1059   __ lw(a1, FieldMemOperand(a1, DescriptorArray::kEnumerationIndexOffset));
1060   __ lw(a2, FieldMemOperand(a1, DescriptorArray::kEnumCacheBridgeCacheOffset));
1061 
1062   // Set up the four remaining stack slots.
1063   __ push(v0);  // Map.
1064   __ lw(a1, FieldMemOperand(a2, FixedArray::kLengthOffset));
1065   __ li(a0, Operand(Smi::FromInt(0)));
1066   // Push enumeration cache, enumeration cache length (as smi) and zero.
1067   __ Push(a2, a1, a0);
1068   __ jmp(&loop);
1069 
1070   // We got a fixed array in register v0. Iterate through that.
1071   Label non_proxy;
1072   __ bind(&fixed_array);
1073 
1074   Handle<JSGlobalPropertyCell> cell =
1075       isolate()->factory()->NewJSGlobalPropertyCell(
1076           Handle<Object>(
1077               Smi::FromInt(TypeFeedbackCells::kForInFastCaseMarker)));
1078   RecordTypeFeedbackCell(stmt->PrepareId(), cell);
1079   __ LoadHeapObject(a1, cell);
1080   __ li(a2, Operand(Smi::FromInt(TypeFeedbackCells::kForInSlowCaseMarker)));
1081   __ sw(a2, FieldMemOperand(a1, JSGlobalPropertyCell::kValueOffset));
1082 
1083   __ li(a1, Operand(Smi::FromInt(1)));  // Smi indicates slow check
1084   __ lw(a2, MemOperand(sp, 0 * kPointerSize));  // Get enumerated object
1085   STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1086   __ GetObjectType(a2, a3, a3);
1087   __ Branch(&non_proxy, gt, a3, Operand(LAST_JS_PROXY_TYPE));
1088   __ li(a1, Operand(Smi::FromInt(0)));  // Zero indicates proxy
1089   __ bind(&non_proxy);
1090   __ Push(a1, v0);  // Smi and array
1091   __ lw(a1, FieldMemOperand(v0, FixedArray::kLengthOffset));
1092   __ li(a0, Operand(Smi::FromInt(0)));
1093   __ Push(a1, a0);  // Fixed array length (as smi) and initial index.
1094 
1095   // Generate code for doing the condition check.
1096   PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1097   __ bind(&loop);
1098   // Load the current count to a0, load the length to a1.
1099   __ lw(a0, MemOperand(sp, 0 * kPointerSize));
1100   __ lw(a1, MemOperand(sp, 1 * kPointerSize));
1101   __ Branch(loop_statement.break_label(), hs, a0, Operand(a1));
1102 
1103   // Get the current entry of the array into register a3.
1104   __ lw(a2, MemOperand(sp, 2 * kPointerSize));
1105   __ Addu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1106   __ sll(t0, a0, kPointerSizeLog2 - kSmiTagSize);
1107   __ addu(t0, a2, t0);  // Array base + scaled (smi) index.
1108   __ lw(a3, MemOperand(t0));  // Current entry.
1109 
1110   // Get the expected map from the stack or a smi in the
1111   // permanent slow case into register a2.
1112   __ lw(a2, MemOperand(sp, 3 * kPointerSize));
1113 
1114   // Check if the expected map still matches that of the enumerable.
1115   // If not, we may have to filter the key.
1116   Label update_each;
1117   __ lw(a1, MemOperand(sp, 4 * kPointerSize));
1118   __ lw(t0, FieldMemOperand(a1, HeapObject::kMapOffset));
1119   __ Branch(&update_each, eq, t0, Operand(a2));
1120 
1121   // For proxies, no filtering is done.
1122   // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1123   ASSERT_EQ(Smi::FromInt(0), 0);
1124   __ Branch(&update_each, eq, a2, Operand(zero_reg));
1125 
1126   // Convert the entry to a string or (smi) 0 if it isn't a property
1127   // any more. If the property has been removed while iterating, we
1128   // just skip it.
1129   __ push(a1);  // Enumerable.
1130   __ push(a3);  // Current entry.
1131   __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1132   __ mov(a3, result_register());
1133   __ Branch(loop_statement.continue_label(), eq, a3, Operand(zero_reg));
1134 
1135   // Update the 'each' property or variable from the possibly filtered
1136   // entry in register a3.
1137   __ bind(&update_each);
1138   __ mov(result_register(), a3);
1139   // Perform the assignment as if via '='.
1140   { EffectContext context(this);
1141     EmitAssignment(stmt->each());
1142   }
1143 
1144   // Generate code for the body of the loop.
1145   Visit(stmt->body());
1146 
1147   // Generate code for the going to the next element by incrementing
1148   // the index (smi) stored on top of the stack.
1149   __ bind(loop_statement.continue_label());
1150   __ pop(a0);
1151   __ Addu(a0, a0, Operand(Smi::FromInt(1)));
1152   __ push(a0);
1153 
1154   EmitStackCheck(stmt, &loop);
1155   __ Branch(&loop);
1156 
1157   // Remove the pointers stored on the stack.
1158   __ bind(loop_statement.break_label());
1159   __ Drop(5);
1160 
1161   // Exit and decrement the loop depth.
1162   PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1163   __ bind(&exit);
1164   decrement_loop_depth();
1165 }
1166 
1167 
EmitNewClosure(Handle<SharedFunctionInfo> info,bool pretenure)1168 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1169                                        bool pretenure) {
1170   // Use the fast case closure allocation code that allocates in new
1171   // space for nested functions that don't need literals cloning. If
1172   // we're running with the --always-opt or the --prepare-always-opt
1173   // flag, we need to use the runtime function so that the new function
1174   // we are creating here gets a chance to have its code optimized and
1175   // doesn't just get a copy of the existing unoptimized code.
1176   if (!FLAG_always_opt &&
1177       !FLAG_prepare_always_opt &&
1178       !pretenure &&
1179       scope()->is_function_scope() &&
1180       info->num_literals() == 0) {
1181     FastNewClosureStub stub(info->language_mode());
1182     __ li(a0, Operand(info));
1183     __ push(a0);
1184     __ CallStub(&stub);
1185   } else {
1186     __ li(a0, Operand(info));
1187     __ LoadRoot(a1, pretenure ? Heap::kTrueValueRootIndex
1188                               : Heap::kFalseValueRootIndex);
1189     __ Push(cp, a0, a1);
1190     __ CallRuntime(Runtime::kNewClosure, 3);
1191   }
1192   context()->Plug(v0);
1193 }
1194 
1195 
VisitVariableProxy(VariableProxy * expr)1196 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1197   Comment cmnt(masm_, "[ VariableProxy");
1198   EmitVariableLoad(expr);
1199 }
1200 
1201 
EmitLoadGlobalCheckExtensions(Variable * var,TypeofState typeof_state,Label * slow)1202 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
1203                                                       TypeofState typeof_state,
1204                                                       Label* slow) {
1205   Register current = cp;
1206   Register next = a1;
1207   Register temp = a2;
1208 
1209   Scope* s = scope();
1210   while (s != NULL) {
1211     if (s->num_heap_slots() > 0) {
1212       if (s->calls_non_strict_eval()) {
1213         // Check that extension is NULL.
1214         __ lw(temp, ContextOperand(current, Context::EXTENSION_INDEX));
1215         __ Branch(slow, ne, temp, Operand(zero_reg));
1216       }
1217       // Load next context in chain.
1218       __ lw(next, ContextOperand(current, Context::PREVIOUS_INDEX));
1219       // Walk the rest of the chain without clobbering cp.
1220       current = next;
1221     }
1222     // If no outer scope calls eval, we do not need to check more
1223     // context extensions.
1224     if (!s->outer_scope_calls_non_strict_eval() || s->is_eval_scope()) break;
1225     s = s->outer_scope();
1226   }
1227 
1228   if (s->is_eval_scope()) {
1229     Label loop, fast;
1230     if (!current.is(next)) {
1231       __ Move(next, current);
1232     }
1233     __ bind(&loop);
1234     // Terminate at global context.
1235     __ lw(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1236     __ LoadRoot(t0, Heap::kGlobalContextMapRootIndex);
1237     __ Branch(&fast, eq, temp, Operand(t0));
1238     // Check that extension is NULL.
1239     __ lw(temp, ContextOperand(next, Context::EXTENSION_INDEX));
1240     __ Branch(slow, ne, temp, Operand(zero_reg));
1241     // Load next context in chain.
1242     __ lw(next, ContextOperand(next, Context::PREVIOUS_INDEX));
1243     __ Branch(&loop);
1244     __ bind(&fast);
1245   }
1246 
1247   __ lw(a0, GlobalObjectOperand());
1248   __ li(a2, Operand(var->name()));
1249   RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
1250       ? RelocInfo::CODE_TARGET
1251       : RelocInfo::CODE_TARGET_CONTEXT;
1252   Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1253   CallIC(ic, mode);
1254 }
1255 
1256 
ContextSlotOperandCheckExtensions(Variable * var,Label * slow)1257 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1258                                                                 Label* slow) {
1259   ASSERT(var->IsContextSlot());
1260   Register context = cp;
1261   Register next = a3;
1262   Register temp = t0;
1263 
1264   for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1265     if (s->num_heap_slots() > 0) {
1266       if (s->calls_non_strict_eval()) {
1267         // Check that extension is NULL.
1268         __ lw(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1269         __ Branch(slow, ne, temp, Operand(zero_reg));
1270       }
1271       __ lw(next, ContextOperand(context, Context::PREVIOUS_INDEX));
1272       // Walk the rest of the chain without clobbering cp.
1273       context = next;
1274     }
1275   }
1276   // Check that last extension is NULL.
1277   __ lw(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1278   __ Branch(slow, ne, temp, Operand(zero_reg));
1279 
1280   // This function is used only for loads, not stores, so it's safe to
1281   // return an cp-based operand (the write barrier cannot be allowed to
1282   // destroy the cp register).
1283   return ContextOperand(context, var->index());
1284 }
1285 
1286 
EmitDynamicLookupFastCase(Variable * var,TypeofState typeof_state,Label * slow,Label * done)1287 void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var,
1288                                                   TypeofState typeof_state,
1289                                                   Label* slow,
1290                                                   Label* done) {
1291   // Generate fast-case code for variables that might be shadowed by
1292   // eval-introduced variables.  Eval is used a lot without
1293   // introducing variables.  In those cases, we do not want to
1294   // perform a runtime call for all variables in the scope
1295   // containing the eval.
1296   if (var->mode() == DYNAMIC_GLOBAL) {
1297     EmitLoadGlobalCheckExtensions(var, typeof_state, slow);
1298     __ Branch(done);
1299   } else if (var->mode() == DYNAMIC_LOCAL) {
1300     Variable* local = var->local_if_not_shadowed();
1301     __ lw(v0, ContextSlotOperandCheckExtensions(local, slow));
1302     if (local->mode() == CONST ||
1303         local->mode() == CONST_HARMONY ||
1304         local->mode() == LET) {
1305       __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1306       __ subu(at, v0, at);  // Sub as compare: at == 0 on eq.
1307       if (local->mode() == CONST) {
1308         __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
1309         __ Movz(v0, a0, at);  // Conditional move: return Undefined if TheHole.
1310       } else {  // LET || CONST_HARMONY
1311         __ Branch(done, ne, at, Operand(zero_reg));
1312         __ li(a0, Operand(var->name()));
1313         __ push(a0);
1314         __ CallRuntime(Runtime::kThrowReferenceError, 1);
1315       }
1316     }
1317     __ Branch(done);
1318   }
1319 }
1320 
1321 
EmitVariableLoad(VariableProxy * proxy)1322 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1323   // Record position before possible IC call.
1324   SetSourcePosition(proxy->position());
1325   Variable* var = proxy->var();
1326 
1327   // Three cases: global variables, lookup variables, and all other types of
1328   // variables.
1329   switch (var->location()) {
1330     case Variable::UNALLOCATED: {
1331       Comment cmnt(masm_, "Global variable");
1332       // Use inline caching. Variable name is passed in a2 and the global
1333       // object (receiver) in a0.
1334       __ lw(a0, GlobalObjectOperand());
1335       __ li(a2, Operand(var->name()));
1336       Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1337       CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
1338       context()->Plug(v0);
1339       break;
1340     }
1341 
1342     case Variable::PARAMETER:
1343     case Variable::LOCAL:
1344     case Variable::CONTEXT: {
1345       Comment cmnt(masm_, var->IsContextSlot()
1346                               ? "Context variable"
1347                               : "Stack variable");
1348       if (var->binding_needs_init()) {
1349         // var->scope() may be NULL when the proxy is located in eval code and
1350         // refers to a potential outside binding. Currently those bindings are
1351         // always looked up dynamically, i.e. in that case
1352         //     var->location() == LOOKUP.
1353         // always holds.
1354         ASSERT(var->scope() != NULL);
1355 
1356         // Check if the binding really needs an initialization check. The check
1357         // can be skipped in the following situation: we have a LET or CONST
1358         // binding in harmony mode, both the Variable and the VariableProxy have
1359         // the same declaration scope (i.e. they are both in global code, in the
1360         // same function or in the same eval code) and the VariableProxy is in
1361         // the source physically located after the initializer of the variable.
1362         //
1363         // We cannot skip any initialization checks for CONST in non-harmony
1364         // mode because const variables may be declared but never initialized:
1365         //   if (false) { const x; }; var y = x;
1366         //
1367         // The condition on the declaration scopes is a conservative check for
1368         // nested functions that access a binding and are called before the
1369         // binding is initialized:
1370         //   function() { f(); let x = 1; function f() { x = 2; } }
1371         //
1372         bool skip_init_check;
1373         if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1374           skip_init_check = false;
1375         } else {
1376           // Check that we always have valid source position.
1377           ASSERT(var->initializer_position() != RelocInfo::kNoPosition);
1378           ASSERT(proxy->position() != RelocInfo::kNoPosition);
1379           skip_init_check = var->mode() != CONST &&
1380               var->initializer_position() < proxy->position();
1381         }
1382 
1383         if (!skip_init_check) {
1384           // Let and const need a read barrier.
1385           GetVar(v0, var);
1386           __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1387           __ subu(at, v0, at);  // Sub as compare: at == 0 on eq.
1388           if (var->mode() == LET || var->mode() == CONST_HARMONY) {
1389             // Throw a reference error when using an uninitialized let/const
1390             // binding in harmony mode.
1391             Label done;
1392             __ Branch(&done, ne, at, Operand(zero_reg));
1393             __ li(a0, Operand(var->name()));
1394             __ push(a0);
1395             __ CallRuntime(Runtime::kThrowReferenceError, 1);
1396             __ bind(&done);
1397           } else {
1398             // Uninitalized const bindings outside of harmony mode are unholed.
1399             ASSERT(var->mode() == CONST);
1400             __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
1401             __ Movz(v0, a0, at);  // Conditional move: Undefined if TheHole.
1402           }
1403           context()->Plug(v0);
1404           break;
1405         }
1406       }
1407       context()->Plug(var);
1408       break;
1409     }
1410 
1411     case Variable::LOOKUP: {
1412       Label done, slow;
1413       // Generate code for loading from variables potentially shadowed
1414       // by eval-introduced variables.
1415       EmitDynamicLookupFastCase(var, NOT_INSIDE_TYPEOF, &slow, &done);
1416       __ bind(&slow);
1417       Comment cmnt(masm_, "Lookup variable");
1418       __ li(a1, Operand(var->name()));
1419       __ Push(cp, a1);  // Context and name.
1420       __ CallRuntime(Runtime::kLoadContextSlot, 2);
1421       __ bind(&done);
1422       context()->Plug(v0);
1423     }
1424   }
1425 }
1426 
1427 
VisitRegExpLiteral(RegExpLiteral * expr)1428 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1429   Comment cmnt(masm_, "[ RegExpLiteral");
1430   Label materialized;
1431   // Registers will be used as follows:
1432   // t1 = materialized value (RegExp literal)
1433   // t0 = JS function, literals array
1434   // a3 = literal index
1435   // a2 = RegExp pattern
1436   // a1 = RegExp flags
1437   // a0 = RegExp literal clone
1438   __ lw(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1439   __ lw(t0, FieldMemOperand(a0, JSFunction::kLiteralsOffset));
1440   int literal_offset =
1441       FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1442   __ lw(t1, FieldMemOperand(t0, literal_offset));
1443   __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1444   __ Branch(&materialized, ne, t1, Operand(at));
1445 
1446   // Create regexp literal using runtime function.
1447   // Result will be in v0.
1448   __ li(a3, Operand(Smi::FromInt(expr->literal_index())));
1449   __ li(a2, Operand(expr->pattern()));
1450   __ li(a1, Operand(expr->flags()));
1451   __ Push(t0, a3, a2, a1);
1452   __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1453   __ mov(t1, v0);
1454 
1455   __ bind(&materialized);
1456   int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1457   Label allocated, runtime_allocate;
1458   __ AllocateInNewSpace(size, v0, a2, a3, &runtime_allocate, TAG_OBJECT);
1459   __ jmp(&allocated);
1460 
1461   __ bind(&runtime_allocate);
1462   __ push(t1);
1463   __ li(a0, Operand(Smi::FromInt(size)));
1464   __ push(a0);
1465   __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1466   __ pop(t1);
1467 
1468   __ bind(&allocated);
1469 
1470   // After this, registers are used as follows:
1471   // v0: Newly allocated regexp.
1472   // t1: Materialized regexp.
1473   // a2: temp.
1474   __ CopyFields(v0, t1, a2.bit(), size / kPointerSize);
1475   context()->Plug(v0);
1476 }
1477 
1478 
EmitAccessor(Expression * expression)1479 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1480   if (expression == NULL) {
1481     __ LoadRoot(a1, Heap::kNullValueRootIndex);
1482     __ push(a1);
1483   } else {
1484     VisitForStackValue(expression);
1485   }
1486 }
1487 
1488 
VisitObjectLiteral(ObjectLiteral * expr)1489 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1490   Comment cmnt(masm_, "[ ObjectLiteral");
1491   Handle<FixedArray> constant_properties = expr->constant_properties();
1492   __ lw(a3, MemOperand(fp,  JavaScriptFrameConstants::kFunctionOffset));
1493   __ lw(a3, FieldMemOperand(a3, JSFunction::kLiteralsOffset));
1494   __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
1495   __ li(a1, Operand(constant_properties));
1496   int flags = expr->fast_elements()
1497       ? ObjectLiteral::kFastElements
1498       : ObjectLiteral::kNoFlags;
1499   flags |= expr->has_function()
1500       ? ObjectLiteral::kHasFunction
1501       : ObjectLiteral::kNoFlags;
1502   __ li(a0, Operand(Smi::FromInt(flags)));
1503   __ Push(a3, a2, a1, a0);
1504   int properties_count = constant_properties->length() / 2;
1505   if (expr->depth() > 1) {
1506     __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1507   } else if (flags != ObjectLiteral::kFastElements ||
1508       properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) {
1509     __ CallRuntime(Runtime::kCreateObjectLiteralShallow, 4);
1510   } else {
1511     FastCloneShallowObjectStub stub(properties_count);
1512     __ CallStub(&stub);
1513   }
1514 
1515   // If result_saved is true the result is on top of the stack.  If
1516   // result_saved is false the result is in v0.
1517   bool result_saved = false;
1518 
1519   // Mark all computed expressions that are bound to a key that
1520   // is shadowed by a later occurrence of the same key. For the
1521   // marked expressions, no store code is emitted.
1522   expr->CalculateEmitStore();
1523 
1524   AccessorTable accessor_table(isolate()->zone());
1525   for (int i = 0; i < expr->properties()->length(); i++) {
1526     ObjectLiteral::Property* property = expr->properties()->at(i);
1527     if (property->IsCompileTimeValue()) continue;
1528 
1529     Literal* key = property->key();
1530     Expression* value = property->value();
1531     if (!result_saved) {
1532       __ push(v0);  // Save result on stack.
1533       result_saved = true;
1534     }
1535     switch (property->kind()) {
1536       case ObjectLiteral::Property::CONSTANT:
1537         UNREACHABLE();
1538       case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1539         ASSERT(!CompileTimeValue::IsCompileTimeValue(property->value()));
1540         // Fall through.
1541       case ObjectLiteral::Property::COMPUTED:
1542         if (key->handle()->IsSymbol()) {
1543           if (property->emit_store()) {
1544             VisitForAccumulatorValue(value);
1545             __ mov(a0, result_register());
1546             __ li(a2, Operand(key->handle()));
1547             __ lw(a1, MemOperand(sp));
1548             Handle<Code> ic = is_classic_mode()
1549                 ? isolate()->builtins()->StoreIC_Initialize()
1550                 : isolate()->builtins()->StoreIC_Initialize_Strict();
1551             CallIC(ic, RelocInfo::CODE_TARGET, key->id());
1552             PrepareForBailoutForId(key->id(), NO_REGISTERS);
1553           } else {
1554             VisitForEffect(value);
1555           }
1556           break;
1557         }
1558         // Fall through.
1559       case ObjectLiteral::Property::PROTOTYPE:
1560         // Duplicate receiver on stack.
1561         __ lw(a0, MemOperand(sp));
1562         __ push(a0);
1563         VisitForStackValue(key);
1564         VisitForStackValue(value);
1565         if (property->emit_store()) {
1566           __ li(a0, Operand(Smi::FromInt(NONE)));  // PropertyAttributes.
1567           __ push(a0);
1568           __ CallRuntime(Runtime::kSetProperty, 4);
1569         } else {
1570           __ Drop(3);
1571         }
1572         break;
1573       case ObjectLiteral::Property::GETTER:
1574         accessor_table.lookup(key)->second->getter = value;
1575         break;
1576       case ObjectLiteral::Property::SETTER:
1577         accessor_table.lookup(key)->second->setter = value;
1578         break;
1579     }
1580   }
1581 
1582   // Emit code to define accessors, using only a single call to the runtime for
1583   // each pair of corresponding getters and setters.
1584   for (AccessorTable::Iterator it = accessor_table.begin();
1585        it != accessor_table.end();
1586        ++it) {
1587     __ lw(a0, MemOperand(sp));  // Duplicate receiver.
1588     __ push(a0);
1589     VisitForStackValue(it->first);
1590     EmitAccessor(it->second->getter);
1591     EmitAccessor(it->second->setter);
1592     __ li(a0, Operand(Smi::FromInt(NONE)));
1593     __ push(a0);
1594     __ CallRuntime(Runtime::kDefineOrRedefineAccessorProperty, 5);
1595   }
1596 
1597   if (expr->has_function()) {
1598     ASSERT(result_saved);
1599     __ lw(a0, MemOperand(sp));
1600     __ push(a0);
1601     __ CallRuntime(Runtime::kToFastProperties, 1);
1602   }
1603 
1604   if (result_saved) {
1605     context()->PlugTOS();
1606   } else {
1607     context()->Plug(v0);
1608   }
1609 }
1610 
1611 
VisitArrayLiteral(ArrayLiteral * expr)1612 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1613   Comment cmnt(masm_, "[ ArrayLiteral");
1614 
1615   ZoneList<Expression*>* subexprs = expr->values();
1616   int length = subexprs->length();
1617 
1618   Handle<FixedArray> constant_elements = expr->constant_elements();
1619   ASSERT_EQ(2, constant_elements->length());
1620   ElementsKind constant_elements_kind =
1621       static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
1622   bool has_fast_elements = constant_elements_kind == FAST_ELEMENTS;
1623   Handle<FixedArrayBase> constant_elements_values(
1624       FixedArrayBase::cast(constant_elements->get(1)));
1625 
1626   __ mov(a0, result_register());
1627   __ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1628   __ lw(a3, FieldMemOperand(a3, JSFunction::kLiteralsOffset));
1629   __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
1630   __ li(a1, Operand(constant_elements));
1631   __ Push(a3, a2, a1);
1632   if (has_fast_elements && constant_elements_values->map() ==
1633       isolate()->heap()->fixed_cow_array_map()) {
1634     FastCloneShallowArrayStub stub(
1635         FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length);
1636     __ CallStub(&stub);
1637     __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(),
1638         1, a1, a2);
1639   } else if (expr->depth() > 1) {
1640     __ CallRuntime(Runtime::kCreateArrayLiteral, 3);
1641   } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
1642     __ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
1643   } else {
1644     ASSERT(constant_elements_kind == FAST_ELEMENTS ||
1645            constant_elements_kind == FAST_SMI_ONLY_ELEMENTS ||
1646            FLAG_smi_only_arrays);
1647     FastCloneShallowArrayStub::Mode mode = has_fast_elements
1648       ? FastCloneShallowArrayStub::CLONE_ELEMENTS
1649       : FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS;
1650     FastCloneShallowArrayStub stub(mode, length);
1651     __ CallStub(&stub);
1652   }
1653 
1654   bool result_saved = false;  // Is the result saved to the stack?
1655 
1656   // Emit code to evaluate all the non-constant subexpressions and to store
1657   // them into the newly cloned array.
1658   for (int i = 0; i < length; i++) {
1659     Expression* subexpr = subexprs->at(i);
1660     // If the subexpression is a literal or a simple materialized literal it
1661     // is already set in the cloned array.
1662     if (subexpr->AsLiteral() != NULL ||
1663         CompileTimeValue::IsCompileTimeValue(subexpr)) {
1664       continue;
1665     }
1666 
1667     if (!result_saved) {
1668       __ push(v0);
1669       result_saved = true;
1670     }
1671 
1672     VisitForAccumulatorValue(subexpr);
1673 
1674     if (constant_elements_kind == FAST_ELEMENTS) {
1675       int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1676       __ lw(t2, MemOperand(sp));  // Copy of array literal.
1677       __ lw(a1, FieldMemOperand(t2, JSObject::kElementsOffset));
1678       __ sw(result_register(), FieldMemOperand(a1, offset));
1679       // Update the write barrier for the array store.
1680       __ RecordWriteField(a1, offset, result_register(), a2,
1681                           kRAHasBeenSaved, kDontSaveFPRegs,
1682                           EMIT_REMEMBERED_SET, INLINE_SMI_CHECK);
1683     } else {
1684       __ lw(a1, MemOperand(sp));  // Copy of array literal.
1685       __ lw(a2, FieldMemOperand(a1, JSObject::kMapOffset));
1686       __ li(a3, Operand(Smi::FromInt(i)));
1687       __ li(t0, Operand(Smi::FromInt(expr->literal_index())));
1688       __ mov(a0, result_register());
1689       StoreArrayLiteralElementStub stub;
1690       __ CallStub(&stub);
1691     }
1692 
1693     PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1694   }
1695   if (result_saved) {
1696     context()->PlugTOS();
1697   } else {
1698     context()->Plug(v0);
1699   }
1700 }
1701 
1702 
VisitAssignment(Assignment * expr)1703 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1704   Comment cmnt(masm_, "[ Assignment");
1705   // Invalid left-hand sides are rewritten to have a 'throw ReferenceError'
1706   // on the left-hand side.
1707   if (!expr->target()->IsValidLeftHandSide()) {
1708     VisitForEffect(expr->target());
1709     return;
1710   }
1711 
1712   // Left-hand side can only be a property, a global or a (parameter or local)
1713   // slot.
1714   enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1715   LhsKind assign_type = VARIABLE;
1716   Property* property = expr->target()->AsProperty();
1717   if (property != NULL) {
1718     assign_type = (property->key()->IsPropertyName())
1719         ? NAMED_PROPERTY
1720         : KEYED_PROPERTY;
1721   }
1722 
1723   // Evaluate LHS expression.
1724   switch (assign_type) {
1725     case VARIABLE:
1726       // Nothing to do here.
1727       break;
1728     case NAMED_PROPERTY:
1729       if (expr->is_compound()) {
1730         // We need the receiver both on the stack and in the accumulator.
1731         VisitForAccumulatorValue(property->obj());
1732         __ push(result_register());
1733       } else {
1734         VisitForStackValue(property->obj());
1735       }
1736       break;
1737     case KEYED_PROPERTY:
1738       // We need the key and receiver on both the stack and in v0 and a1.
1739       if (expr->is_compound()) {
1740         VisitForStackValue(property->obj());
1741         VisitForAccumulatorValue(property->key());
1742         __ lw(a1, MemOperand(sp, 0));
1743         __ push(v0);
1744       } else {
1745         VisitForStackValue(property->obj());
1746         VisitForStackValue(property->key());
1747       }
1748       break;
1749   }
1750 
1751   // For compound assignments we need another deoptimization point after the
1752   // variable/property load.
1753   if (expr->is_compound()) {
1754     { AccumulatorValueContext context(this);
1755       switch (assign_type) {
1756         case VARIABLE:
1757           EmitVariableLoad(expr->target()->AsVariableProxy());
1758           PrepareForBailout(expr->target(), TOS_REG);
1759           break;
1760         case NAMED_PROPERTY:
1761           EmitNamedPropertyLoad(property);
1762           PrepareForBailoutForId(expr->CompoundLoadId(), TOS_REG);
1763           break;
1764         case KEYED_PROPERTY:
1765           EmitKeyedPropertyLoad(property);
1766           PrepareForBailoutForId(expr->CompoundLoadId(), TOS_REG);
1767           break;
1768       }
1769     }
1770 
1771     Token::Value op = expr->binary_op();
1772     __ push(v0);  // Left operand goes on the stack.
1773     VisitForAccumulatorValue(expr->value());
1774 
1775     OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1776         ? OVERWRITE_RIGHT
1777         : NO_OVERWRITE;
1778     SetSourcePosition(expr->position() + 1);
1779     AccumulatorValueContext context(this);
1780     if (ShouldInlineSmiCase(op)) {
1781       EmitInlineSmiBinaryOp(expr->binary_operation(),
1782                             op,
1783                             mode,
1784                             expr->target(),
1785                             expr->value());
1786     } else {
1787       EmitBinaryOp(expr->binary_operation(), op, mode);
1788     }
1789 
1790     // Deoptimization point in case the binary operation may have side effects.
1791     PrepareForBailout(expr->binary_operation(), TOS_REG);
1792   } else {
1793     VisitForAccumulatorValue(expr->value());
1794   }
1795 
1796   // Record source position before possible IC call.
1797   SetSourcePosition(expr->position());
1798 
1799   // Store the value.
1800   switch (assign_type) {
1801     case VARIABLE:
1802       EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1803                              expr->op());
1804       PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1805       context()->Plug(v0);
1806       break;
1807     case NAMED_PROPERTY:
1808       EmitNamedPropertyAssignment(expr);
1809       break;
1810     case KEYED_PROPERTY:
1811       EmitKeyedPropertyAssignment(expr);
1812       break;
1813   }
1814 }
1815 
1816 
EmitNamedPropertyLoad(Property * prop)1817 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
1818   SetSourcePosition(prop->position());
1819   Literal* key = prop->key()->AsLiteral();
1820   __ mov(a0, result_register());
1821   __ li(a2, Operand(key->handle()));
1822   // Call load IC. It has arguments receiver and property name a0 and a2.
1823   Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1824   CallIC(ic, RelocInfo::CODE_TARGET, prop->id());
1825 }
1826 
1827 
EmitKeyedPropertyLoad(Property * prop)1828 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
1829   SetSourcePosition(prop->position());
1830   __ mov(a0, result_register());
1831   // Call keyed load IC. It has arguments key and receiver in a0 and a1.
1832   Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
1833   CallIC(ic, RelocInfo::CODE_TARGET, prop->id());
1834 }
1835 
1836 
EmitInlineSmiBinaryOp(BinaryOperation * expr,Token::Value op,OverwriteMode mode,Expression * left_expr,Expression * right_expr)1837 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
1838                                               Token::Value op,
1839                                               OverwriteMode mode,
1840                                               Expression* left_expr,
1841                                               Expression* right_expr) {
1842   Label done, smi_case, stub_call;
1843 
1844   Register scratch1 = a2;
1845   Register scratch2 = a3;
1846 
1847   // Get the arguments.
1848   Register left = a1;
1849   Register right = a0;
1850   __ pop(left);
1851   __ mov(a0, result_register());
1852 
1853   // Perform combined smi check on both operands.
1854   __ Or(scratch1, left, Operand(right));
1855   STATIC_ASSERT(kSmiTag == 0);
1856   JumpPatchSite patch_site(masm_);
1857   patch_site.EmitJumpIfSmi(scratch1, &smi_case);
1858 
1859   __ bind(&stub_call);
1860   BinaryOpStub stub(op, mode);
1861   CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
1862   patch_site.EmitPatchInfo();
1863   __ jmp(&done);
1864 
1865   __ bind(&smi_case);
1866   // Smi case. This code works the same way as the smi-smi case in the type
1867   // recording binary operation stub, see
1868   // BinaryOpStub::GenerateSmiSmiOperation for comments.
1869   switch (op) {
1870     case Token::SAR:
1871       __ Branch(&stub_call);
1872       __ GetLeastBitsFromSmi(scratch1, right, 5);
1873       __ srav(right, left, scratch1);
1874       __ And(v0, right, Operand(~kSmiTagMask));
1875       break;
1876     case Token::SHL: {
1877       __ Branch(&stub_call);
1878       __ SmiUntag(scratch1, left);
1879       __ GetLeastBitsFromSmi(scratch2, right, 5);
1880       __ sllv(scratch1, scratch1, scratch2);
1881       __ Addu(scratch2, scratch1, Operand(0x40000000));
1882       __ Branch(&stub_call, lt, scratch2, Operand(zero_reg));
1883       __ SmiTag(v0, scratch1);
1884       break;
1885     }
1886     case Token::SHR: {
1887       __ Branch(&stub_call);
1888       __ SmiUntag(scratch1, left);
1889       __ GetLeastBitsFromSmi(scratch2, right, 5);
1890       __ srlv(scratch1, scratch1, scratch2);
1891       __ And(scratch2, scratch1, 0xc0000000);
1892       __ Branch(&stub_call, ne, scratch2, Operand(zero_reg));
1893       __ SmiTag(v0, scratch1);
1894       break;
1895     }
1896     case Token::ADD:
1897       __ AdduAndCheckForOverflow(v0, left, right, scratch1);
1898       __ BranchOnOverflow(&stub_call, scratch1);
1899       break;
1900     case Token::SUB:
1901       __ SubuAndCheckForOverflow(v0, left, right, scratch1);
1902       __ BranchOnOverflow(&stub_call, scratch1);
1903       break;
1904     case Token::MUL: {
1905       __ SmiUntag(scratch1, right);
1906       __ Mult(left, scratch1);
1907       __ mflo(scratch1);
1908       __ mfhi(scratch2);
1909       __ sra(scratch1, scratch1, 31);
1910       __ Branch(&stub_call, ne, scratch1, Operand(scratch2));
1911       __ mflo(v0);
1912       __ Branch(&done, ne, v0, Operand(zero_reg));
1913       __ Addu(scratch2, right, left);
1914       __ Branch(&stub_call, lt, scratch2, Operand(zero_reg));
1915       ASSERT(Smi::FromInt(0) == 0);
1916       __ mov(v0, zero_reg);
1917       break;
1918     }
1919     case Token::BIT_OR:
1920       __ Or(v0, left, Operand(right));
1921       break;
1922     case Token::BIT_AND:
1923       __ And(v0, left, Operand(right));
1924       break;
1925     case Token::BIT_XOR:
1926       __ Xor(v0, left, Operand(right));
1927       break;
1928     default:
1929       UNREACHABLE();
1930   }
1931 
1932   __ bind(&done);
1933   context()->Plug(v0);
1934 }
1935 
1936 
EmitBinaryOp(BinaryOperation * expr,Token::Value op,OverwriteMode mode)1937 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
1938                                      Token::Value op,
1939                                      OverwriteMode mode) {
1940   __ mov(a0, result_register());
1941   __ pop(a1);
1942   BinaryOpStub stub(op, mode);
1943   JumpPatchSite patch_site(masm_);    // unbound, signals no inlined smi code.
1944   CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
1945   patch_site.EmitPatchInfo();
1946   context()->Plug(v0);
1947 }
1948 
1949 
EmitAssignment(Expression * expr)1950 void FullCodeGenerator::EmitAssignment(Expression* expr) {
1951   // Invalid left-hand sides are rewritten to have a 'throw
1952   // ReferenceError' on the left-hand side.
1953   if (!expr->IsValidLeftHandSide()) {
1954     VisitForEffect(expr);
1955     return;
1956   }
1957 
1958   // Left-hand side can only be a property, a global or a (parameter or local)
1959   // slot.
1960   enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1961   LhsKind assign_type = VARIABLE;
1962   Property* prop = expr->AsProperty();
1963   if (prop != NULL) {
1964     assign_type = (prop->key()->IsPropertyName())
1965         ? NAMED_PROPERTY
1966         : KEYED_PROPERTY;
1967   }
1968 
1969   switch (assign_type) {
1970     case VARIABLE: {
1971       Variable* var = expr->AsVariableProxy()->var();
1972       EffectContext context(this);
1973       EmitVariableAssignment(var, Token::ASSIGN);
1974       break;
1975     }
1976     case NAMED_PROPERTY: {
1977       __ push(result_register());  // Preserve value.
1978       VisitForAccumulatorValue(prop->obj());
1979       __ mov(a1, result_register());
1980       __ pop(a0);  // Restore value.
1981       __ li(a2, Operand(prop->key()->AsLiteral()->handle()));
1982       Handle<Code> ic = is_classic_mode()
1983           ? isolate()->builtins()->StoreIC_Initialize()
1984           : isolate()->builtins()->StoreIC_Initialize_Strict();
1985       CallIC(ic);
1986       break;
1987     }
1988     case KEYED_PROPERTY: {
1989       __ push(result_register());  // Preserve value.
1990       VisitForStackValue(prop->obj());
1991       VisitForAccumulatorValue(prop->key());
1992       __ mov(a1, result_register());
1993       __ pop(a2);
1994       __ pop(a0);  // Restore value.
1995       Handle<Code> ic = is_classic_mode()
1996         ? isolate()->builtins()->KeyedStoreIC_Initialize()
1997         : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
1998       CallIC(ic);
1999       break;
2000     }
2001   }
2002   context()->Plug(v0);
2003 }
2004 
2005 
EmitVariableAssignment(Variable * var,Token::Value op)2006 void FullCodeGenerator::EmitVariableAssignment(Variable* var,
2007                                                Token::Value op) {
2008   if (var->IsUnallocated()) {
2009     // Global var, const, or let.
2010     __ mov(a0, result_register());
2011     __ li(a2, Operand(var->name()));
2012     __ lw(a1, GlobalObjectOperand());
2013     Handle<Code> ic = is_classic_mode()
2014         ? isolate()->builtins()->StoreIC_Initialize()
2015         : isolate()->builtins()->StoreIC_Initialize_Strict();
2016     CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
2017 
2018   } else if (op == Token::INIT_CONST) {
2019     // Const initializers need a write barrier.
2020     ASSERT(!var->IsParameter());  // No const parameters.
2021     if (var->IsStackLocal()) {
2022       Label skip;
2023       __ lw(a1, StackOperand(var));
2024       __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
2025       __ Branch(&skip, ne, a1, Operand(t0));
2026       __ sw(result_register(), StackOperand(var));
2027       __ bind(&skip);
2028     } else {
2029       ASSERT(var->IsContextSlot() || var->IsLookupSlot());
2030       // Like var declarations, const declarations are hoisted to function
2031       // scope.  However, unlike var initializers, const initializers are
2032       // able to drill a hole to that function context, even from inside a
2033       // 'with' context.  We thus bypass the normal static scope lookup for
2034       // var->IsContextSlot().
2035       __ push(v0);
2036       __ li(a0, Operand(var->name()));
2037       __ Push(cp, a0);  // Context and name.
2038       __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
2039     }
2040 
2041   } else if (var->mode() == LET && op != Token::INIT_LET) {
2042     // Non-initializing assignment to let variable needs a write barrier.
2043     if (var->IsLookupSlot()) {
2044       __ push(v0);  // Value.
2045       __ li(a1, Operand(var->name()));
2046       __ li(a0, Operand(Smi::FromInt(language_mode())));
2047       __ Push(cp, a1, a0);  // Context, name, strict mode.
2048       __ CallRuntime(Runtime::kStoreContextSlot, 4);
2049     } else {
2050       ASSERT(var->IsStackAllocated() || var->IsContextSlot());
2051       Label assign;
2052       MemOperand location = VarOperand(var, a1);
2053       __ lw(a3, location);
2054       __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
2055       __ Branch(&assign, ne, a3, Operand(t0));
2056       __ li(a3, Operand(var->name()));
2057       __ push(a3);
2058       __ CallRuntime(Runtime::kThrowReferenceError, 1);
2059       // Perform the assignment.
2060       __ bind(&assign);
2061       __ sw(result_register(), location);
2062       if (var->IsContextSlot()) {
2063         // RecordWrite may destroy all its register arguments.
2064         __ mov(a3, result_register());
2065         int offset = Context::SlotOffset(var->index());
2066         __ RecordWriteContextSlot(
2067             a1, offset, a3, a2, kRAHasBeenSaved, kDontSaveFPRegs);
2068       }
2069     }
2070 
2071   } else if (!var->is_const_mode() || op == Token::INIT_CONST_HARMONY) {
2072     // Assignment to var or initializing assignment to let/const
2073     // in harmony mode.
2074     if (var->IsStackAllocated() || var->IsContextSlot()) {
2075       MemOperand location = VarOperand(var, a1);
2076       if (FLAG_debug_code && op == Token::INIT_LET) {
2077         // Check for an uninitialized let binding.
2078         __ lw(a2, location);
2079         __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
2080         __ Check(eq, "Let binding re-initialization.", a2, Operand(t0));
2081       }
2082       // Perform the assignment.
2083       __ sw(v0, location);
2084       if (var->IsContextSlot()) {
2085         __ mov(a3, v0);
2086         int offset = Context::SlotOffset(var->index());
2087         __ RecordWriteContextSlot(
2088             a1, offset, a3, a2, kRAHasBeenSaved, kDontSaveFPRegs);
2089       }
2090     } else {
2091       ASSERT(var->IsLookupSlot());
2092       __ push(v0);  // Value.
2093       __ li(a1, Operand(var->name()));
2094       __ li(a0, Operand(Smi::FromInt(language_mode())));
2095       __ Push(cp, a1, a0);  // Context, name, strict mode.
2096       __ CallRuntime(Runtime::kStoreContextSlot, 4);
2097     }
2098   }
2099     // Non-initializing assignments to consts are ignored.
2100 }
2101 
2102 
EmitNamedPropertyAssignment(Assignment * expr)2103 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2104   // Assignment to a property, using a named store IC.
2105   Property* prop = expr->target()->AsProperty();
2106   ASSERT(prop != NULL);
2107   ASSERT(prop->key()->AsLiteral() != NULL);
2108 
2109   // If the assignment starts a block of assignments to the same object,
2110   // change to slow case to avoid the quadratic behavior of repeatedly
2111   // adding fast properties.
2112   if (expr->starts_initialization_block()) {
2113     __ push(result_register());
2114     __ lw(t0, MemOperand(sp, kPointerSize));  // Receiver is now under value.
2115     __ push(t0);
2116     __ CallRuntime(Runtime::kToSlowProperties, 1);
2117     __ pop(result_register());
2118   }
2119 
2120   // Record source code position before IC call.
2121   SetSourcePosition(expr->position());
2122   __ mov(a0, result_register());  // Load the value.
2123   __ li(a2, Operand(prop->key()->AsLiteral()->handle()));
2124   // Load receiver to a1. Leave a copy in the stack if needed for turning the
2125   // receiver into fast case.
2126   if (expr->ends_initialization_block()) {
2127     __ lw(a1, MemOperand(sp));
2128   } else {
2129     __ pop(a1);
2130   }
2131 
2132   Handle<Code> ic = is_classic_mode()
2133         ? isolate()->builtins()->StoreIC_Initialize()
2134         : isolate()->builtins()->StoreIC_Initialize_Strict();
2135   CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
2136 
2137   // If the assignment ends an initialization block, revert to fast case.
2138   if (expr->ends_initialization_block()) {
2139     __ push(v0);  // Result of assignment, saved even if not needed.
2140     // Receiver is under the result value.
2141     __ lw(t0, MemOperand(sp, kPointerSize));
2142     __ push(t0);
2143     __ CallRuntime(Runtime::kToFastProperties, 1);
2144     __ pop(v0);
2145     __ Drop(1);
2146   }
2147   PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2148   context()->Plug(v0);
2149 }
2150 
2151 
EmitKeyedPropertyAssignment(Assignment * expr)2152 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2153   // Assignment to a property, using a keyed store IC.
2154 
2155   // If the assignment starts a block of assignments to the same object,
2156   // change to slow case to avoid the quadratic behavior of repeatedly
2157   // adding fast properties.
2158   if (expr->starts_initialization_block()) {
2159     __ push(result_register());
2160     // Receiver is now under the key and value.
2161     __ lw(t0, MemOperand(sp, 2 * kPointerSize));
2162     __ push(t0);
2163     __ CallRuntime(Runtime::kToSlowProperties, 1);
2164     __ pop(result_register());
2165   }
2166 
2167   // Record source code position before IC call.
2168   SetSourcePosition(expr->position());
2169   // Call keyed store IC.
2170   // The arguments are:
2171   // - a0 is the value,
2172   // - a1 is the key,
2173   // - a2 is the receiver.
2174   __ mov(a0, result_register());
2175   __ pop(a1);  // Key.
2176   // Load receiver to a2. Leave a copy in the stack if needed for turning the
2177   // receiver into fast case.
2178   if (expr->ends_initialization_block()) {
2179     __ lw(a2, MemOperand(sp));
2180   } else {
2181     __ pop(a2);
2182   }
2183 
2184   Handle<Code> ic = is_classic_mode()
2185       ? isolate()->builtins()->KeyedStoreIC_Initialize()
2186       : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2187   CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
2188 
2189   // If the assignment ends an initialization block, revert to fast case.
2190   if (expr->ends_initialization_block()) {
2191     __ push(v0);  // Result of assignment, saved even if not needed.
2192     // Receiver is under the result value.
2193     __ lw(t0, MemOperand(sp, kPointerSize));
2194     __ push(t0);
2195     __ CallRuntime(Runtime::kToFastProperties, 1);
2196     __ pop(v0);
2197     __ Drop(1);
2198   }
2199   PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2200   context()->Plug(v0);
2201 }
2202 
2203 
VisitProperty(Property * expr)2204 void FullCodeGenerator::VisitProperty(Property* expr) {
2205   Comment cmnt(masm_, "[ Property");
2206   Expression* key = expr->key();
2207 
2208   if (key->IsPropertyName()) {
2209     VisitForAccumulatorValue(expr->obj());
2210     EmitNamedPropertyLoad(expr);
2211     context()->Plug(v0);
2212   } else {
2213     VisitForStackValue(expr->obj());
2214     VisitForAccumulatorValue(expr->key());
2215     __ pop(a1);
2216     EmitKeyedPropertyLoad(expr);
2217     context()->Plug(v0);
2218   }
2219 }
2220 
2221 
CallIC(Handle<Code> code,RelocInfo::Mode rmode,unsigned ast_id)2222 void FullCodeGenerator::CallIC(Handle<Code> code,
2223                                RelocInfo::Mode rmode,
2224                                unsigned ast_id) {
2225   ic_total_count_++;
2226   __ Call(code, rmode, ast_id);
2227 }
2228 
2229 
EmitCallWithIC(Call * expr,Handle<Object> name,RelocInfo::Mode mode)2230 void FullCodeGenerator::EmitCallWithIC(Call* expr,
2231                                        Handle<Object> name,
2232                                        RelocInfo::Mode mode) {
2233   // Code common for calls using the IC.
2234   ZoneList<Expression*>* args = expr->arguments();
2235   int arg_count = args->length();
2236   { PreservePositionScope scope(masm()->positions_recorder());
2237     for (int i = 0; i < arg_count; i++) {
2238       VisitForStackValue(args->at(i));
2239     }
2240     __ li(a2, Operand(name));
2241   }
2242   // Record source position for debugger.
2243   SetSourcePosition(expr->position());
2244   // Call the IC initialization code.
2245   Handle<Code> ic =
2246       isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
2247   CallIC(ic, mode, expr->id());
2248   RecordJSReturnSite(expr);
2249   // Restore context register.
2250   __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2251   context()->Plug(v0);
2252 }
2253 
2254 
EmitKeyedCallWithIC(Call * expr,Expression * key)2255 void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
2256                                             Expression* key) {
2257   // Load the key.
2258   VisitForAccumulatorValue(key);
2259 
2260   // Swap the name of the function and the receiver on the stack to follow
2261   // the calling convention for call ICs.
2262   __ pop(a1);
2263   __ push(v0);
2264   __ push(a1);
2265 
2266   // Code common for calls using the IC.
2267   ZoneList<Expression*>* args = expr->arguments();
2268   int arg_count = args->length();
2269   { PreservePositionScope scope(masm()->positions_recorder());
2270     for (int i = 0; i < arg_count; i++) {
2271       VisitForStackValue(args->at(i));
2272     }
2273   }
2274   // Record source position for debugger.
2275   SetSourcePosition(expr->position());
2276   // Call the IC initialization code.
2277   Handle<Code> ic =
2278       isolate()->stub_cache()->ComputeKeyedCallInitialize(arg_count);
2279   __ lw(a2, MemOperand(sp, (arg_count + 1) * kPointerSize));  // Key.
2280   CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
2281   RecordJSReturnSite(expr);
2282   // Restore context register.
2283   __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2284   context()->DropAndPlug(1, v0);  // Drop the key still on the stack.
2285 }
2286 
2287 
EmitCallWithStub(Call * expr,CallFunctionFlags flags)2288 void FullCodeGenerator::EmitCallWithStub(Call* expr, CallFunctionFlags flags) {
2289   // Code common for calls using the call stub.
2290   ZoneList<Expression*>* args = expr->arguments();
2291   int arg_count = args->length();
2292   { PreservePositionScope scope(masm()->positions_recorder());
2293     for (int i = 0; i < arg_count; i++) {
2294       VisitForStackValue(args->at(i));
2295     }
2296   }
2297   // Record source position for debugger.
2298   SetSourcePosition(expr->position());
2299   CallFunctionStub stub(arg_count, flags);
2300   __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2301   __ CallStub(&stub);
2302   RecordJSReturnSite(expr);
2303   // Restore context register.
2304   __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2305   context()->DropAndPlug(1, v0);
2306 }
2307 
2308 
EmitResolvePossiblyDirectEval(int arg_count)2309 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2310   // Push copy of the first argument or undefined if it doesn't exist.
2311   if (arg_count > 0) {
2312     __ lw(a1, MemOperand(sp, arg_count * kPointerSize));
2313   } else {
2314     __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
2315   }
2316   __ push(a1);
2317 
2318   // Push the receiver of the enclosing function.
2319   int receiver_offset = 2 + info_->scope()->num_parameters();
2320   __ lw(a1, MemOperand(fp, receiver_offset * kPointerSize));
2321   __ push(a1);
2322   // Push the language mode.
2323   __ li(a1, Operand(Smi::FromInt(language_mode())));
2324   __ push(a1);
2325 
2326   // Push the start position of the scope the calls resides in.
2327   __ li(a1, Operand(Smi::FromInt(scope()->start_position())));
2328   __ push(a1);
2329 
2330   // Do the runtime call.
2331   __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
2332 }
2333 
2334 
VisitCall(Call * expr)2335 void FullCodeGenerator::VisitCall(Call* expr) {
2336 #ifdef DEBUG
2337   // We want to verify that RecordJSReturnSite gets called on all paths
2338   // through this function.  Avoid early returns.
2339   expr->return_is_recorded_ = false;
2340 #endif
2341 
2342   Comment cmnt(masm_, "[ Call");
2343   Expression* callee = expr->expression();
2344   VariableProxy* proxy = callee->AsVariableProxy();
2345   Property* property = callee->AsProperty();
2346 
2347   if (proxy != NULL && proxy->var()->is_possibly_eval()) {
2348     // In a call to eval, we first call %ResolvePossiblyDirectEval to
2349     // resolve the function we need to call and the receiver of the
2350     // call.  Then we call the resolved function using the given
2351     // arguments.
2352     ZoneList<Expression*>* args = expr->arguments();
2353     int arg_count = args->length();
2354 
2355     { PreservePositionScope pos_scope(masm()->positions_recorder());
2356       VisitForStackValue(callee);
2357       __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
2358       __ push(a2);  // Reserved receiver slot.
2359 
2360       // Push the arguments.
2361       for (int i = 0; i < arg_count; i++) {
2362         VisitForStackValue(args->at(i));
2363       }
2364 
2365       // Push a copy of the function (found below the arguments) and
2366       // resolve eval.
2367       __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2368       __ push(a1);
2369       EmitResolvePossiblyDirectEval(arg_count);
2370 
2371       // The runtime call returns a pair of values in v0 (function) and
2372       // v1 (receiver). Touch up the stack with the right values.
2373       __ sw(v0, MemOperand(sp, (arg_count + 1) * kPointerSize));
2374       __ sw(v1, MemOperand(sp, arg_count * kPointerSize));
2375     }
2376     // Record source position for debugger.
2377     SetSourcePosition(expr->position());
2378     CallFunctionStub stub(arg_count, RECEIVER_MIGHT_BE_IMPLICIT);
2379     __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2380     __ CallStub(&stub);
2381     RecordJSReturnSite(expr);
2382     // Restore context register.
2383     __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2384     context()->DropAndPlug(1, v0);
2385   } else if (proxy != NULL && proxy->var()->IsUnallocated()) {
2386     // Push global object as receiver for the call IC.
2387     __ lw(a0, GlobalObjectOperand());
2388     __ push(a0);
2389     EmitCallWithIC(expr, proxy->name(), RelocInfo::CODE_TARGET_CONTEXT);
2390   } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
2391     // Call to a lookup slot (dynamically introduced variable).
2392     Label slow, done;
2393 
2394     { PreservePositionScope scope(masm()->positions_recorder());
2395       // Generate code for loading from variables potentially shadowed
2396       // by eval-introduced variables.
2397       EmitDynamicLookupFastCase(proxy->var(), NOT_INSIDE_TYPEOF, &slow, &done);
2398     }
2399 
2400     __ bind(&slow);
2401     // Call the runtime to find the function to call (returned in v0)
2402     // and the object holding it (returned in v1).
2403     __ push(context_register());
2404     __ li(a2, Operand(proxy->name()));
2405     __ push(a2);
2406     __ CallRuntime(Runtime::kLoadContextSlot, 2);
2407     __ Push(v0, v1);  // Function, receiver.
2408 
2409     // If fast case code has been generated, emit code to push the
2410     // function and receiver and have the slow path jump around this
2411     // code.
2412     if (done.is_linked()) {
2413       Label call;
2414       __ Branch(&call);
2415       __ bind(&done);
2416       // Push function.
2417       __ push(v0);
2418       // The receiver is implicitly the global receiver. Indicate this
2419       // by passing the hole to the call function stub.
2420       __ LoadRoot(a1, Heap::kTheHoleValueRootIndex);
2421       __ push(a1);
2422       __ bind(&call);
2423     }
2424 
2425     // The receiver is either the global receiver or an object found
2426     // by LoadContextSlot. That object could be the hole if the
2427     // receiver is implicitly the global object.
2428     EmitCallWithStub(expr, RECEIVER_MIGHT_BE_IMPLICIT);
2429   } else if (property != NULL) {
2430     { PreservePositionScope scope(masm()->positions_recorder());
2431       VisitForStackValue(property->obj());
2432     }
2433     if (property->key()->IsPropertyName()) {
2434       EmitCallWithIC(expr,
2435                      property->key()->AsLiteral()->handle(),
2436                      RelocInfo::CODE_TARGET);
2437     } else {
2438       EmitKeyedCallWithIC(expr, property->key());
2439     }
2440   } else {
2441     // Call to an arbitrary expression not handled specially above.
2442     { PreservePositionScope scope(masm()->positions_recorder());
2443       VisitForStackValue(callee);
2444     }
2445     // Load global receiver object.
2446     __ lw(a1, GlobalObjectOperand());
2447     __ lw(a1, FieldMemOperand(a1, GlobalObject::kGlobalReceiverOffset));
2448     __ push(a1);
2449     // Emit function call.
2450     EmitCallWithStub(expr, NO_CALL_FUNCTION_FLAGS);
2451   }
2452 
2453 #ifdef DEBUG
2454   // RecordJSReturnSite should have been called.
2455   ASSERT(expr->return_is_recorded_);
2456 #endif
2457 }
2458 
2459 
VisitCallNew(CallNew * expr)2460 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2461   Comment cmnt(masm_, "[ CallNew");
2462   // According to ECMA-262, section 11.2.2, page 44, the function
2463   // expression in new calls must be evaluated before the
2464   // arguments.
2465 
2466   // Push constructor on the stack.  If it's not a function it's used as
2467   // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2468   // ignored.
2469   VisitForStackValue(expr->expression());
2470 
2471   // Push the arguments ("left-to-right") on the stack.
2472   ZoneList<Expression*>* args = expr->arguments();
2473   int arg_count = args->length();
2474   for (int i = 0; i < arg_count; i++) {
2475     VisitForStackValue(args->at(i));
2476   }
2477 
2478   // Call the construct call builtin that handles allocation and
2479   // constructor invocation.
2480   SetSourcePosition(expr->position());
2481 
2482   // Load function and argument count into a1 and a0.
2483   __ li(a0, Operand(arg_count));
2484   __ lw(a1, MemOperand(sp, arg_count * kPointerSize));
2485 
2486   // Record call targets in unoptimized code, but not in the snapshot.
2487   CallFunctionFlags flags;
2488   if (!Serializer::enabled()) {
2489     flags = RECORD_CALL_TARGET;
2490     Handle<Object> uninitialized =
2491        TypeFeedbackCells::UninitializedSentinel(isolate());
2492     Handle<JSGlobalPropertyCell> cell =
2493         isolate()->factory()->NewJSGlobalPropertyCell(uninitialized);
2494     RecordTypeFeedbackCell(expr->id(), cell);
2495     __ li(a2, Operand(cell));
2496   } else {
2497     flags = NO_CALL_FUNCTION_FLAGS;
2498   }
2499 
2500   CallConstructStub stub(flags);
2501   __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
2502   PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2503   context()->Plug(v0);
2504 }
2505 
2506 
EmitIsSmi(CallRuntime * expr)2507 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2508   ZoneList<Expression*>* args = expr->arguments();
2509   ASSERT(args->length() == 1);
2510 
2511   VisitForAccumulatorValue(args->at(0));
2512 
2513   Label materialize_true, materialize_false;
2514   Label* if_true = NULL;
2515   Label* if_false = NULL;
2516   Label* fall_through = NULL;
2517   context()->PrepareTest(&materialize_true, &materialize_false,
2518                          &if_true, &if_false, &fall_through);
2519 
2520   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2521   __ And(t0, v0, Operand(kSmiTagMask));
2522   Split(eq, t0, Operand(zero_reg), if_true, if_false, fall_through);
2523 
2524   context()->Plug(if_true, if_false);
2525 }
2526 
2527 
EmitIsNonNegativeSmi(CallRuntime * expr)2528 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
2529   ZoneList<Expression*>* args = expr->arguments();
2530   ASSERT(args->length() == 1);
2531 
2532   VisitForAccumulatorValue(args->at(0));
2533 
2534   Label materialize_true, materialize_false;
2535   Label* if_true = NULL;
2536   Label* if_false = NULL;
2537   Label* fall_through = NULL;
2538   context()->PrepareTest(&materialize_true, &materialize_false,
2539                          &if_true, &if_false, &fall_through);
2540 
2541   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2542   __ And(at, v0, Operand(kSmiTagMask | 0x80000000));
2543   Split(eq, at, Operand(zero_reg), if_true, if_false, fall_through);
2544 
2545   context()->Plug(if_true, if_false);
2546 }
2547 
2548 
EmitIsObject(CallRuntime * expr)2549 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
2550   ZoneList<Expression*>* args = expr->arguments();
2551   ASSERT(args->length() == 1);
2552 
2553   VisitForAccumulatorValue(args->at(0));
2554 
2555   Label materialize_true, materialize_false;
2556   Label* if_true = NULL;
2557   Label* if_false = NULL;
2558   Label* fall_through = NULL;
2559   context()->PrepareTest(&materialize_true, &materialize_false,
2560                          &if_true, &if_false, &fall_through);
2561 
2562   __ JumpIfSmi(v0, if_false);
2563   __ LoadRoot(at, Heap::kNullValueRootIndex);
2564   __ Branch(if_true, eq, v0, Operand(at));
2565   __ lw(a2, FieldMemOperand(v0, HeapObject::kMapOffset));
2566   // Undetectable objects behave like undefined when tested with typeof.
2567   __ lbu(a1, FieldMemOperand(a2, Map::kBitFieldOffset));
2568   __ And(at, a1, Operand(1 << Map::kIsUndetectable));
2569   __ Branch(if_false, ne, at, Operand(zero_reg));
2570   __ lbu(a1, FieldMemOperand(a2, Map::kInstanceTypeOffset));
2571   __ Branch(if_false, lt, a1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
2572   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2573   Split(le, a1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE),
2574         if_true, if_false, fall_through);
2575 
2576   context()->Plug(if_true, if_false);
2577 }
2578 
2579 
EmitIsSpecObject(CallRuntime * expr)2580 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
2581   ZoneList<Expression*>* args = expr->arguments();
2582   ASSERT(args->length() == 1);
2583 
2584   VisitForAccumulatorValue(args->at(0));
2585 
2586   Label materialize_true, materialize_false;
2587   Label* if_true = NULL;
2588   Label* if_false = NULL;
2589   Label* fall_through = NULL;
2590   context()->PrepareTest(&materialize_true, &materialize_false,
2591                          &if_true, &if_false, &fall_through);
2592 
2593   __ JumpIfSmi(v0, if_false);
2594   __ GetObjectType(v0, a1, a1);
2595   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2596   Split(ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE),
2597         if_true, if_false, fall_through);
2598 
2599   context()->Plug(if_true, if_false);
2600 }
2601 
2602 
EmitIsUndetectableObject(CallRuntime * expr)2603 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
2604   ZoneList<Expression*>* args = expr->arguments();
2605   ASSERT(args->length() == 1);
2606 
2607   VisitForAccumulatorValue(args->at(0));
2608 
2609   Label materialize_true, materialize_false;
2610   Label* if_true = NULL;
2611   Label* if_false = NULL;
2612   Label* fall_through = NULL;
2613   context()->PrepareTest(&materialize_true, &materialize_false,
2614                          &if_true, &if_false, &fall_through);
2615 
2616   __ JumpIfSmi(v0, if_false);
2617   __ lw(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
2618   __ lbu(a1, FieldMemOperand(a1, Map::kBitFieldOffset));
2619   __ And(at, a1, Operand(1 << Map::kIsUndetectable));
2620   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2621   Split(ne, at, Operand(zero_reg), if_true, if_false, fall_through);
2622 
2623   context()->Plug(if_true, if_false);
2624 }
2625 
2626 
EmitIsStringWrapperSafeForDefaultValueOf(CallRuntime * expr)2627 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
2628     CallRuntime* expr) {
2629   ZoneList<Expression*>* args = expr->arguments();
2630   ASSERT(args->length() == 1);
2631 
2632   VisitForAccumulatorValue(args->at(0));
2633 
2634   Label materialize_true, materialize_false;
2635   Label* if_true = NULL;
2636   Label* if_false = NULL;
2637   Label* fall_through = NULL;
2638   context()->PrepareTest(&materialize_true, &materialize_false,
2639                          &if_true, &if_false, &fall_through);
2640 
2641   if (FLAG_debug_code) __ AbortIfSmi(v0);
2642 
2643   __ lw(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
2644   __ lbu(t0, FieldMemOperand(a1, Map::kBitField2Offset));
2645   __ And(t0, t0, 1 << Map::kStringWrapperSafeForDefaultValueOf);
2646   __ Branch(if_true, ne, t0, Operand(zero_reg));
2647 
2648   // Check for fast case object. Generate false result for slow case object.
2649   __ lw(a2, FieldMemOperand(v0, JSObject::kPropertiesOffset));
2650   __ lw(a2, FieldMemOperand(a2, HeapObject::kMapOffset));
2651   __ LoadRoot(t0, Heap::kHashTableMapRootIndex);
2652   __ Branch(if_false, eq, a2, Operand(t0));
2653 
2654   // Look for valueOf symbol in the descriptor array, and indicate false if
2655   // found. The type is not checked, so if it is a transition it is a false
2656   // negative.
2657   __ LoadInstanceDescriptors(a1, t0);
2658   __ lw(a3, FieldMemOperand(t0, FixedArray::kLengthOffset));
2659   // t0: descriptor array
2660   // a3: length of descriptor array
2661   // Calculate the end of the descriptor array.
2662   STATIC_ASSERT(kSmiTag == 0);
2663   STATIC_ASSERT(kSmiTagSize == 1);
2664   STATIC_ASSERT(kPointerSize == 4);
2665   __ Addu(a2, t0, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
2666   __ sll(t1, a3, kPointerSizeLog2 - kSmiTagSize);
2667   __ Addu(a2, a2, t1);
2668 
2669   // Calculate location of the first key name.
2670   __ Addu(t0,
2671           t0,
2672           Operand(FixedArray::kHeaderSize - kHeapObjectTag +
2673                   DescriptorArray::kFirstIndex * kPointerSize));
2674   // Loop through all the keys in the descriptor array. If one of these is the
2675   // symbol valueOf the result is false.
2676   Label entry, loop;
2677   // The use of t2 to store the valueOf symbol asumes that it is not otherwise
2678   // used in the loop below.
2679   __ LoadRoot(t2, Heap::kvalue_of_symbolRootIndex);
2680   __ jmp(&entry);
2681   __ bind(&loop);
2682   __ lw(a3, MemOperand(t0, 0));
2683   __ Branch(if_false, eq, a3, Operand(t2));
2684   __ Addu(t0, t0, Operand(kPointerSize));
2685   __ bind(&entry);
2686   __ Branch(&loop, ne, t0, Operand(a2));
2687 
2688   // If a valueOf property is not found on the object check that it's
2689   // prototype is the un-modified String prototype. If not result is false.
2690   __ lw(a2, FieldMemOperand(a1, Map::kPrototypeOffset));
2691   __ JumpIfSmi(a2, if_false);
2692   __ lw(a2, FieldMemOperand(a2, HeapObject::kMapOffset));
2693   __ lw(a3, ContextOperand(cp, Context::GLOBAL_INDEX));
2694   __ lw(a3, FieldMemOperand(a3, GlobalObject::kGlobalContextOffset));
2695   __ lw(a3, ContextOperand(a3, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
2696   __ Branch(if_false, ne, a2, Operand(a3));
2697 
2698   // Set the bit in the map to indicate that it has been checked safe for
2699   // default valueOf and set true result.
2700   __ lbu(a2, FieldMemOperand(a1, Map::kBitField2Offset));
2701   __ Or(a2, a2, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
2702   __ sb(a2, FieldMemOperand(a1, Map::kBitField2Offset));
2703   __ jmp(if_true);
2704 
2705   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2706   context()->Plug(if_true, if_false);
2707 }
2708 
2709 
EmitIsFunction(CallRuntime * expr)2710 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
2711   ZoneList<Expression*>* args = expr->arguments();
2712   ASSERT(args->length() == 1);
2713 
2714   VisitForAccumulatorValue(args->at(0));
2715 
2716   Label materialize_true, materialize_false;
2717   Label* if_true = NULL;
2718   Label* if_false = NULL;
2719   Label* fall_through = NULL;
2720   context()->PrepareTest(&materialize_true, &materialize_false,
2721                          &if_true, &if_false, &fall_through);
2722 
2723   __ JumpIfSmi(v0, if_false);
2724   __ GetObjectType(v0, a1, a2);
2725   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2726   __ Branch(if_true, eq, a2, Operand(JS_FUNCTION_TYPE));
2727   __ Branch(if_false);
2728 
2729   context()->Plug(if_true, if_false);
2730 }
2731 
2732 
EmitIsArray(CallRuntime * expr)2733 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
2734   ZoneList<Expression*>* args = expr->arguments();
2735   ASSERT(args->length() == 1);
2736 
2737   VisitForAccumulatorValue(args->at(0));
2738 
2739   Label materialize_true, materialize_false;
2740   Label* if_true = NULL;
2741   Label* if_false = NULL;
2742   Label* fall_through = NULL;
2743   context()->PrepareTest(&materialize_true, &materialize_false,
2744                          &if_true, &if_false, &fall_through);
2745 
2746   __ JumpIfSmi(v0, if_false);
2747   __ GetObjectType(v0, a1, a1);
2748   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2749   Split(eq, a1, Operand(JS_ARRAY_TYPE),
2750         if_true, if_false, fall_through);
2751 
2752   context()->Plug(if_true, if_false);
2753 }
2754 
2755 
EmitIsRegExp(CallRuntime * expr)2756 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
2757   ZoneList<Expression*>* args = expr->arguments();
2758   ASSERT(args->length() == 1);
2759 
2760   VisitForAccumulatorValue(args->at(0));
2761 
2762   Label materialize_true, materialize_false;
2763   Label* if_true = NULL;
2764   Label* if_false = NULL;
2765   Label* fall_through = NULL;
2766   context()->PrepareTest(&materialize_true, &materialize_false,
2767                          &if_true, &if_false, &fall_through);
2768 
2769   __ JumpIfSmi(v0, if_false);
2770   __ GetObjectType(v0, a1, a1);
2771   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2772   Split(eq, a1, Operand(JS_REGEXP_TYPE), if_true, if_false, fall_through);
2773 
2774   context()->Plug(if_true, if_false);
2775 }
2776 
2777 
EmitIsConstructCall(CallRuntime * expr)2778 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
2779   ASSERT(expr->arguments()->length() == 0);
2780 
2781   Label materialize_true, materialize_false;
2782   Label* if_true = NULL;
2783   Label* if_false = NULL;
2784   Label* fall_through = NULL;
2785   context()->PrepareTest(&materialize_true, &materialize_false,
2786                          &if_true, &if_false, &fall_through);
2787 
2788   // Get the frame pointer for the calling frame.
2789   __ lw(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2790 
2791   // Skip the arguments adaptor frame if it exists.
2792   Label check_frame_marker;
2793   __ lw(a1, MemOperand(a2, StandardFrameConstants::kContextOffset));
2794   __ Branch(&check_frame_marker, ne,
2795             a1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2796   __ lw(a2, MemOperand(a2, StandardFrameConstants::kCallerFPOffset));
2797 
2798   // Check the marker in the calling frame.
2799   __ bind(&check_frame_marker);
2800   __ lw(a1, MemOperand(a2, StandardFrameConstants::kMarkerOffset));
2801   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2802   Split(eq, a1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)),
2803         if_true, if_false, fall_through);
2804 
2805   context()->Plug(if_true, if_false);
2806 }
2807 
2808 
EmitObjectEquals(CallRuntime * expr)2809 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
2810   ZoneList<Expression*>* args = expr->arguments();
2811   ASSERT(args->length() == 2);
2812 
2813   // Load the two objects into registers and perform the comparison.
2814   VisitForStackValue(args->at(0));
2815   VisitForAccumulatorValue(args->at(1));
2816 
2817   Label materialize_true, materialize_false;
2818   Label* if_true = NULL;
2819   Label* if_false = NULL;
2820   Label* fall_through = NULL;
2821   context()->PrepareTest(&materialize_true, &materialize_false,
2822                          &if_true, &if_false, &fall_through);
2823 
2824   __ pop(a1);
2825   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2826   Split(eq, v0, Operand(a1), if_true, if_false, fall_through);
2827 
2828   context()->Plug(if_true, if_false);
2829 }
2830 
2831 
EmitArguments(CallRuntime * expr)2832 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
2833   ZoneList<Expression*>* args = expr->arguments();
2834   ASSERT(args->length() == 1);
2835 
2836   // ArgumentsAccessStub expects the key in a1 and the formal
2837   // parameter count in a0.
2838   VisitForAccumulatorValue(args->at(0));
2839   __ mov(a1, v0);
2840   __ li(a0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
2841   ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
2842   __ CallStub(&stub);
2843   context()->Plug(v0);
2844 }
2845 
2846 
EmitArgumentsLength(CallRuntime * expr)2847 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
2848   ASSERT(expr->arguments()->length() == 0);
2849   Label exit;
2850   // Get the number of formal parameters.
2851   __ li(v0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
2852 
2853   // Check if the calling frame is an arguments adaptor frame.
2854   __ lw(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2855   __ lw(a3, MemOperand(a2, StandardFrameConstants::kContextOffset));
2856   __ Branch(&exit, ne, a3,
2857             Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2858 
2859   // Arguments adaptor case: Read the arguments length from the
2860   // adaptor frame.
2861   __ lw(v0, MemOperand(a2, ArgumentsAdaptorFrameConstants::kLengthOffset));
2862 
2863   __ bind(&exit);
2864   context()->Plug(v0);
2865 }
2866 
2867 
EmitClassOf(CallRuntime * expr)2868 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
2869   ZoneList<Expression*>* args = expr->arguments();
2870   ASSERT(args->length() == 1);
2871   Label done, null, function, non_function_constructor;
2872 
2873   VisitForAccumulatorValue(args->at(0));
2874 
2875   // If the object is a smi, we return null.
2876   __ JumpIfSmi(v0, &null);
2877 
2878   // Check that the object is a JS object but take special care of JS
2879   // functions to make sure they have 'Function' as their class.
2880   // Assume that there are only two callable types, and one of them is at
2881   // either end of the type range for JS object types. Saves extra comparisons.
2882   STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
2883   __ GetObjectType(v0, v0, a1);  // Map is now in v0.
2884   __ Branch(&null, lt, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
2885 
2886   STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
2887                 FIRST_SPEC_OBJECT_TYPE + 1);
2888   __ Branch(&function, eq, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
2889 
2890   STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
2891                 LAST_SPEC_OBJECT_TYPE - 1);
2892   __ Branch(&function, eq, a1, Operand(LAST_SPEC_OBJECT_TYPE));
2893   // Assume that there is no larger type.
2894   STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
2895 
2896   // Check if the constructor in the map is a JS function.
2897   __ lw(v0, FieldMemOperand(v0, Map::kConstructorOffset));
2898   __ GetObjectType(v0, a1, a1);
2899   __ Branch(&non_function_constructor, ne, a1, Operand(JS_FUNCTION_TYPE));
2900 
2901   // v0 now contains the constructor function. Grab the
2902   // instance class name from there.
2903   __ lw(v0, FieldMemOperand(v0, JSFunction::kSharedFunctionInfoOffset));
2904   __ lw(v0, FieldMemOperand(v0, SharedFunctionInfo::kInstanceClassNameOffset));
2905   __ Branch(&done);
2906 
2907   // Functions have class 'Function'.
2908   __ bind(&function);
2909   __ LoadRoot(v0, Heap::kfunction_class_symbolRootIndex);
2910   __ jmp(&done);
2911 
2912   // Objects with a non-function constructor have class 'Object'.
2913   __ bind(&non_function_constructor);
2914   __ LoadRoot(v0, Heap::kObject_symbolRootIndex);
2915   __ jmp(&done);
2916 
2917   // Non-JS objects have class null.
2918   __ bind(&null);
2919   __ LoadRoot(v0, Heap::kNullValueRootIndex);
2920 
2921   // All done.
2922   __ bind(&done);
2923 
2924   context()->Plug(v0);
2925 }
2926 
2927 
EmitLog(CallRuntime * expr)2928 void FullCodeGenerator::EmitLog(CallRuntime* expr) {
2929   // Conditionally generate a log call.
2930   // Args:
2931   //   0 (literal string): The type of logging (corresponds to the flags).
2932   //     This is used to determine whether or not to generate the log call.
2933   //   1 (string): Format string.  Access the string at argument index 2
2934   //     with '%2s' (see Logger::LogRuntime for all the formats).
2935   //   2 (array): Arguments to the format string.
2936   ZoneList<Expression*>* args = expr->arguments();
2937   ASSERT_EQ(args->length(), 3);
2938   if (CodeGenerator::ShouldGenerateLog(args->at(0))) {
2939     VisitForStackValue(args->at(1));
2940     VisitForStackValue(args->at(2));
2941     __ CallRuntime(Runtime::kLog, 2);
2942   }
2943 
2944   // Finally, we're expected to leave a value on the top of the stack.
2945   __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
2946   context()->Plug(v0);
2947 }
2948 
2949 
EmitRandomHeapNumber(CallRuntime * expr)2950 void FullCodeGenerator::EmitRandomHeapNumber(CallRuntime* expr) {
2951   ASSERT(expr->arguments()->length() == 0);
2952   Label slow_allocate_heapnumber;
2953   Label heapnumber_allocated;
2954 
2955   // Save the new heap number in callee-saved register s0, since
2956   // we call out to external C code below.
2957   __ LoadRoot(t6, Heap::kHeapNumberMapRootIndex);
2958   __ AllocateHeapNumber(s0, a1, a2, t6, &slow_allocate_heapnumber);
2959   __ jmp(&heapnumber_allocated);
2960 
2961   __ bind(&slow_allocate_heapnumber);
2962 
2963   // Allocate a heap number.
2964   __ CallRuntime(Runtime::kNumberAlloc, 0);
2965   __ mov(s0, v0);   // Save result in s0, so it is saved thru CFunc call.
2966 
2967   __ bind(&heapnumber_allocated);
2968 
2969   // Convert 32 random bits in v0 to 0.(32 random bits) in a double
2970   // by computing:
2971   // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
2972   if (CpuFeatures::IsSupported(FPU)) {
2973     __ PrepareCallCFunction(1, a0);
2974     __ lw(a0, ContextOperand(cp, Context::GLOBAL_INDEX));
2975     __ lw(a0, FieldMemOperand(a0, GlobalObject::kGlobalContextOffset));
2976     __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
2977 
2978     CpuFeatures::Scope scope(FPU);
2979     // 0x41300000 is the top half of 1.0 x 2^20 as a double.
2980     __ li(a1, Operand(0x41300000));
2981     // Move 0x41300000xxxxxxxx (x = random bits in v0) to FPU.
2982     __ Move(f12, v0, a1);
2983     // Move 0x4130000000000000 to FPU.
2984     __ Move(f14, zero_reg, a1);
2985     // Subtract and store the result in the heap number.
2986     __ sub_d(f0, f12, f14);
2987     __ sdc1(f0, MemOperand(s0, HeapNumber::kValueOffset - kHeapObjectTag));
2988     __ mov(v0, s0);
2989   } else {
2990     __ PrepareCallCFunction(2, a0);
2991     __ mov(a0, s0);
2992     __ lw(a1, ContextOperand(cp, Context::GLOBAL_INDEX));
2993     __ lw(a1, FieldMemOperand(a1, GlobalObject::kGlobalContextOffset));
2994     __ CallCFunction(
2995         ExternalReference::fill_heap_number_with_random_function(isolate()), 2);
2996   }
2997 
2998   context()->Plug(v0);
2999 }
3000 
3001 
EmitSubString(CallRuntime * expr)3002 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3003   // Load the arguments on the stack and call the stub.
3004   SubStringStub stub;
3005   ZoneList<Expression*>* args = expr->arguments();
3006   ASSERT(args->length() == 3);
3007   VisitForStackValue(args->at(0));
3008   VisitForStackValue(args->at(1));
3009   VisitForStackValue(args->at(2));
3010   __ CallStub(&stub);
3011   context()->Plug(v0);
3012 }
3013 
3014 
EmitRegExpExec(CallRuntime * expr)3015 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3016   // Load the arguments on the stack and call the stub.
3017   RegExpExecStub stub;
3018   ZoneList<Expression*>* args = expr->arguments();
3019   ASSERT(args->length() == 4);
3020   VisitForStackValue(args->at(0));
3021   VisitForStackValue(args->at(1));
3022   VisitForStackValue(args->at(2));
3023   VisitForStackValue(args->at(3));
3024   __ CallStub(&stub);
3025   context()->Plug(v0);
3026 }
3027 
3028 
EmitValueOf(CallRuntime * expr)3029 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3030   ZoneList<Expression*>* args = expr->arguments();
3031   ASSERT(args->length() == 1);
3032 
3033   VisitForAccumulatorValue(args->at(0));  // Load the object.
3034 
3035   Label done;
3036   // If the object is a smi return the object.
3037   __ JumpIfSmi(v0, &done);
3038   // If the object is not a value type, return the object.
3039   __ GetObjectType(v0, a1, a1);
3040   __ Branch(&done, ne, a1, Operand(JS_VALUE_TYPE));
3041 
3042   __ lw(v0, FieldMemOperand(v0, JSValue::kValueOffset));
3043 
3044   __ bind(&done);
3045   context()->Plug(v0);
3046 }
3047 
3048 
EmitDateField(CallRuntime * expr)3049 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3050   ZoneList<Expression*>* args = expr->arguments();
3051   ASSERT(args->length() == 2);
3052   ASSERT_NE(NULL, args->at(1)->AsLiteral());
3053   Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->handle()));
3054 
3055   VisitForAccumulatorValue(args->at(0));  // Load the object.
3056 
3057   Label runtime, done;
3058   Register object = v0;
3059   Register result = v0;
3060   Register scratch0 = t5;
3061   Register scratch1 = a1;
3062 
3063 #ifdef DEBUG
3064   __ AbortIfSmi(object);
3065   __ GetObjectType(object, scratch1, scratch1);
3066   __ Assert(eq, "Trying to get date field from non-date.",
3067       scratch1, Operand(JS_DATE_TYPE));
3068 #endif
3069 
3070   if (index->value() == 0) {
3071     __ lw(result, FieldMemOperand(object, JSDate::kValueOffset));
3072   } else {
3073     if (index->value() < JSDate::kFirstUncachedField) {
3074       ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3075       __ li(scratch1, Operand(stamp));
3076       __ lw(scratch1, MemOperand(scratch1));
3077       __ lw(scratch0, FieldMemOperand(object, JSDate::kCacheStampOffset));
3078       __ Branch(&runtime, ne, scratch1, Operand(scratch0));
3079       __ lw(result, FieldMemOperand(object, JSDate::kValueOffset +
3080                                             kPointerSize * index->value()));
3081       __ jmp(&done);
3082     }
3083     __ bind(&runtime);
3084     __ PrepareCallCFunction(2, scratch1);
3085     __ li(a1, Operand(index));
3086     __ Move(a0, object);
3087     __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3088     __ bind(&done);
3089   }
3090 
3091   context()->Plug(v0);
3092 }
3093 
3094 
EmitMathPow(CallRuntime * expr)3095 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3096   // Load the arguments on the stack and call the runtime function.
3097   ZoneList<Expression*>* args = expr->arguments();
3098   ASSERT(args->length() == 2);
3099   VisitForStackValue(args->at(0));
3100   VisitForStackValue(args->at(1));
3101   if (CpuFeatures::IsSupported(FPU)) {
3102     MathPowStub stub(MathPowStub::ON_STACK);
3103     __ CallStub(&stub);
3104   } else {
3105     __ CallRuntime(Runtime::kMath_pow, 2);
3106   }
3107   context()->Plug(v0);
3108 }
3109 
3110 
EmitSetValueOf(CallRuntime * expr)3111 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3112   ZoneList<Expression*>* args = expr->arguments();
3113   ASSERT(args->length() == 2);
3114 
3115   VisitForStackValue(args->at(0));  // Load the object.
3116   VisitForAccumulatorValue(args->at(1));  // Load the value.
3117   __ pop(a1);  // v0 = value. a1 = object.
3118 
3119   Label done;
3120   // If the object is a smi, return the value.
3121   __ JumpIfSmi(a1, &done);
3122 
3123   // If the object is not a value type, return the value.
3124   __ GetObjectType(a1, a2, a2);
3125   __ Branch(&done, ne, a2, Operand(JS_VALUE_TYPE));
3126 
3127   // Store the value.
3128   __ sw(v0, FieldMemOperand(a1, JSValue::kValueOffset));
3129   // Update the write barrier.  Save the value as it will be
3130   // overwritten by the write barrier code and is needed afterward.
3131   __ mov(a2, v0);
3132   __ RecordWriteField(
3133       a1, JSValue::kValueOffset, a2, a3, kRAHasBeenSaved, kDontSaveFPRegs);
3134 
3135   __ bind(&done);
3136   context()->Plug(v0);
3137 }
3138 
3139 
EmitNumberToString(CallRuntime * expr)3140 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3141   ZoneList<Expression*>* args = expr->arguments();
3142   ASSERT_EQ(args->length(), 1);
3143 
3144   // Load the argument on the stack and call the stub.
3145   VisitForStackValue(args->at(0));
3146 
3147   NumberToStringStub stub;
3148   __ CallStub(&stub);
3149   context()->Plug(v0);
3150 }
3151 
3152 
EmitStringCharFromCode(CallRuntime * expr)3153 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3154   ZoneList<Expression*>* args = expr->arguments();
3155   ASSERT(args->length() == 1);
3156 
3157   VisitForAccumulatorValue(args->at(0));
3158 
3159   Label done;
3160   StringCharFromCodeGenerator generator(v0, a1);
3161   generator.GenerateFast(masm_);
3162   __ jmp(&done);
3163 
3164   NopRuntimeCallHelper call_helper;
3165   generator.GenerateSlow(masm_, call_helper);
3166 
3167   __ bind(&done);
3168   context()->Plug(a1);
3169 }
3170 
3171 
EmitStringCharCodeAt(CallRuntime * expr)3172 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3173   ZoneList<Expression*>* args = expr->arguments();
3174   ASSERT(args->length() == 2);
3175 
3176   VisitForStackValue(args->at(0));
3177   VisitForAccumulatorValue(args->at(1));
3178   __ mov(a0, result_register());
3179 
3180   Register object = a1;
3181   Register index = a0;
3182   Register result = v0;
3183 
3184   __ pop(object);
3185 
3186   Label need_conversion;
3187   Label index_out_of_range;
3188   Label done;
3189   StringCharCodeAtGenerator generator(object,
3190                                       index,
3191                                       result,
3192                                       &need_conversion,
3193                                       &need_conversion,
3194                                       &index_out_of_range,
3195                                       STRING_INDEX_IS_NUMBER);
3196   generator.GenerateFast(masm_);
3197   __ jmp(&done);
3198 
3199   __ bind(&index_out_of_range);
3200   // When the index is out of range, the spec requires us to return
3201   // NaN.
3202   __ LoadRoot(result, Heap::kNanValueRootIndex);
3203   __ jmp(&done);
3204 
3205   __ bind(&need_conversion);
3206   // Load the undefined value into the result register, which will
3207   // trigger conversion.
3208   __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3209   __ jmp(&done);
3210 
3211   NopRuntimeCallHelper call_helper;
3212   generator.GenerateSlow(masm_, call_helper);
3213 
3214   __ bind(&done);
3215   context()->Plug(result);
3216 }
3217 
3218 
EmitStringCharAt(CallRuntime * expr)3219 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3220   ZoneList<Expression*>* args = expr->arguments();
3221   ASSERT(args->length() == 2);
3222 
3223   VisitForStackValue(args->at(0));
3224   VisitForAccumulatorValue(args->at(1));
3225   __ mov(a0, result_register());
3226 
3227   Register object = a1;
3228   Register index = a0;
3229   Register scratch = a3;
3230   Register result = v0;
3231 
3232   __ pop(object);
3233 
3234   Label need_conversion;
3235   Label index_out_of_range;
3236   Label done;
3237   StringCharAtGenerator generator(object,
3238                                   index,
3239                                   scratch,
3240                                   result,
3241                                   &need_conversion,
3242                                   &need_conversion,
3243                                   &index_out_of_range,
3244                                   STRING_INDEX_IS_NUMBER);
3245   generator.GenerateFast(masm_);
3246   __ jmp(&done);
3247 
3248   __ bind(&index_out_of_range);
3249   // When the index is out of range, the spec requires us to return
3250   // the empty string.
3251   __ LoadRoot(result, Heap::kEmptyStringRootIndex);
3252   __ jmp(&done);
3253 
3254   __ bind(&need_conversion);
3255   // Move smi zero into the result register, which will trigger
3256   // conversion.
3257   __ li(result, Operand(Smi::FromInt(0)));
3258   __ jmp(&done);
3259 
3260   NopRuntimeCallHelper call_helper;
3261   generator.GenerateSlow(masm_, call_helper);
3262 
3263   __ bind(&done);
3264   context()->Plug(result);
3265 }
3266 
3267 
EmitStringAdd(CallRuntime * expr)3268 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3269   ZoneList<Expression*>* args = expr->arguments();
3270   ASSERT_EQ(2, args->length());
3271   VisitForStackValue(args->at(0));
3272   VisitForStackValue(args->at(1));
3273 
3274   StringAddStub stub(NO_STRING_ADD_FLAGS);
3275   __ CallStub(&stub);
3276   context()->Plug(v0);
3277 }
3278 
3279 
EmitStringCompare(CallRuntime * expr)3280 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3281   ZoneList<Expression*>* args = expr->arguments();
3282   ASSERT_EQ(2, args->length());
3283 
3284   VisitForStackValue(args->at(0));
3285   VisitForStackValue(args->at(1));
3286 
3287   StringCompareStub stub;
3288   __ CallStub(&stub);
3289   context()->Plug(v0);
3290 }
3291 
3292 
EmitMathSin(CallRuntime * expr)3293 void FullCodeGenerator::EmitMathSin(CallRuntime* expr) {
3294   // Load the argument on the stack and call the stub.
3295   TranscendentalCacheStub stub(TranscendentalCache::SIN,
3296                                TranscendentalCacheStub::TAGGED);
3297   ZoneList<Expression*>* args = expr->arguments();
3298   ASSERT(args->length() == 1);
3299   VisitForStackValue(args->at(0));
3300   __ mov(a0, result_register());  // Stub requires parameter in a0 and on tos.
3301   __ CallStub(&stub);
3302   context()->Plug(v0);
3303 }
3304 
3305 
EmitMathCos(CallRuntime * expr)3306 void FullCodeGenerator::EmitMathCos(CallRuntime* expr) {
3307   // Load the argument on the stack and call the stub.
3308   TranscendentalCacheStub stub(TranscendentalCache::COS,
3309                                TranscendentalCacheStub::TAGGED);
3310   ZoneList<Expression*>* args = expr->arguments();
3311   ASSERT(args->length() == 1);
3312   VisitForStackValue(args->at(0));
3313   __ mov(a0, result_register());  // Stub requires parameter in a0 and on tos.
3314   __ CallStub(&stub);
3315   context()->Plug(v0);
3316 }
3317 
3318 
EmitMathTan(CallRuntime * expr)3319 void FullCodeGenerator::EmitMathTan(CallRuntime* expr) {
3320   // Load the argument on the stack and call the stub.
3321   TranscendentalCacheStub stub(TranscendentalCache::TAN,
3322                                TranscendentalCacheStub::TAGGED);
3323   ZoneList<Expression*>* args = expr->arguments();
3324   ASSERT(args->length() == 1);
3325   VisitForStackValue(args->at(0));
3326   __ mov(a0, result_register());  // Stub requires parameter in a0 and on tos.
3327   __ CallStub(&stub);
3328   context()->Plug(v0);
3329 }
3330 
3331 
EmitMathLog(CallRuntime * expr)3332 void FullCodeGenerator::EmitMathLog(CallRuntime* expr) {
3333   // Load the argument on the stack and call the stub.
3334   TranscendentalCacheStub stub(TranscendentalCache::LOG,
3335                                TranscendentalCacheStub::TAGGED);
3336   ZoneList<Expression*>* args = expr->arguments();
3337   ASSERT(args->length() == 1);
3338   VisitForStackValue(args->at(0));
3339   __ mov(a0, result_register());  // Stub requires parameter in a0 and on tos.
3340   __ CallStub(&stub);
3341   context()->Plug(v0);
3342 }
3343 
3344 
EmitMathSqrt(CallRuntime * expr)3345 void FullCodeGenerator::EmitMathSqrt(CallRuntime* expr) {
3346   // Load the argument on the stack and call the runtime function.
3347   ZoneList<Expression*>* args = expr->arguments();
3348   ASSERT(args->length() == 1);
3349   VisitForStackValue(args->at(0));
3350   __ CallRuntime(Runtime::kMath_sqrt, 1);
3351   context()->Plug(v0);
3352 }
3353 
3354 
EmitCallFunction(CallRuntime * expr)3355 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3356   ZoneList<Expression*>* args = expr->arguments();
3357   ASSERT(args->length() >= 2);
3358 
3359   int arg_count = args->length() - 2;  // 2 ~ receiver and function.
3360   for (int i = 0; i < arg_count + 1; i++) {
3361     VisitForStackValue(args->at(i));
3362   }
3363   VisitForAccumulatorValue(args->last());  // Function.
3364 
3365   // Check for proxy.
3366   Label proxy, done;
3367   __ GetObjectType(v0, a1, a1);
3368   __ Branch(&proxy, eq, a1, Operand(JS_FUNCTION_PROXY_TYPE));
3369 
3370   // InvokeFunction requires the function in a1. Move it in there.
3371   __ mov(a1, result_register());
3372   ParameterCount count(arg_count);
3373   __ InvokeFunction(a1, count, CALL_FUNCTION,
3374                     NullCallWrapper(), CALL_AS_METHOD);
3375   __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3376   __ jmp(&done);
3377 
3378   __ bind(&proxy);
3379   __ push(v0);
3380   __ CallRuntime(Runtime::kCall, args->length());
3381   __ bind(&done);
3382 
3383   context()->Plug(v0);
3384 }
3385 
3386 
EmitRegExpConstructResult(CallRuntime * expr)3387 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3388   RegExpConstructResultStub stub;
3389   ZoneList<Expression*>* args = expr->arguments();
3390   ASSERT(args->length() == 3);
3391   VisitForStackValue(args->at(0));
3392   VisitForStackValue(args->at(1));
3393   VisitForStackValue(args->at(2));
3394   __ CallStub(&stub);
3395   context()->Plug(v0);
3396 }
3397 
3398 
EmitGetFromCache(CallRuntime * expr)3399 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3400   ZoneList<Expression*>* args = expr->arguments();
3401   ASSERT_EQ(2, args->length());
3402 
3403   ASSERT_NE(NULL, args->at(0)->AsLiteral());
3404   int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value();
3405 
3406   Handle<FixedArray> jsfunction_result_caches(
3407       isolate()->global_context()->jsfunction_result_caches());
3408   if (jsfunction_result_caches->length() <= cache_id) {
3409     __ Abort("Attempt to use undefined cache.");
3410     __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
3411     context()->Plug(v0);
3412     return;
3413   }
3414 
3415   VisitForAccumulatorValue(args->at(1));
3416 
3417   Register key = v0;
3418   Register cache = a1;
3419   __ lw(cache, ContextOperand(cp, Context::GLOBAL_INDEX));
3420   __ lw(cache, FieldMemOperand(cache, GlobalObject::kGlobalContextOffset));
3421   __ lw(cache,
3422          ContextOperand(
3423              cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
3424   __ lw(cache,
3425          FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
3426 
3427 
3428   Label done, not_found;
3429   STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
3430   __ lw(a2, FieldMemOperand(cache, JSFunctionResultCache::kFingerOffset));
3431   // a2 now holds finger offset as a smi.
3432   __ Addu(a3, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3433   // a3 now points to the start of fixed array elements.
3434   __ sll(at, a2, kPointerSizeLog2 - kSmiTagSize);
3435   __ addu(a3, a3, at);
3436   // a3 now points to key of indexed element of cache.
3437   __ lw(a2, MemOperand(a3));
3438   __ Branch(&not_found, ne, key, Operand(a2));
3439 
3440   __ lw(v0, MemOperand(a3, kPointerSize));
3441   __ Branch(&done);
3442 
3443   __ bind(&not_found);
3444   // Call runtime to perform the lookup.
3445   __ Push(cache, key);
3446   __ CallRuntime(Runtime::kGetFromCache, 2);
3447 
3448   __ bind(&done);
3449   context()->Plug(v0);
3450 }
3451 
3452 
EmitIsRegExpEquivalent(CallRuntime * expr)3453 void FullCodeGenerator::EmitIsRegExpEquivalent(CallRuntime* expr) {
3454   ZoneList<Expression*>* args = expr->arguments();
3455   ASSERT_EQ(2, args->length());
3456 
3457   Register right = v0;
3458   Register left = a1;
3459   Register tmp = a2;
3460   Register tmp2 = a3;
3461 
3462   VisitForStackValue(args->at(0));
3463   VisitForAccumulatorValue(args->at(1));  // Result (right) in v0.
3464   __ pop(left);
3465 
3466   Label done, fail, ok;
3467   __ Branch(&ok, eq, left, Operand(right));
3468   // Fail if either is a non-HeapObject.
3469   __ And(tmp, left, Operand(right));
3470   __ JumpIfSmi(tmp, &fail);
3471   __ lw(tmp, FieldMemOperand(left, HeapObject::kMapOffset));
3472   __ lbu(tmp2, FieldMemOperand(tmp, Map::kInstanceTypeOffset));
3473   __ Branch(&fail, ne, tmp2, Operand(JS_REGEXP_TYPE));
3474   __ lw(tmp2, FieldMemOperand(right, HeapObject::kMapOffset));
3475   __ Branch(&fail, ne, tmp, Operand(tmp2));
3476   __ lw(tmp, FieldMemOperand(left, JSRegExp::kDataOffset));
3477   __ lw(tmp2, FieldMemOperand(right, JSRegExp::kDataOffset));
3478   __ Branch(&ok, eq, tmp, Operand(tmp2));
3479   __ bind(&fail);
3480   __ LoadRoot(v0, Heap::kFalseValueRootIndex);
3481   __ jmp(&done);
3482   __ bind(&ok);
3483   __ LoadRoot(v0, Heap::kTrueValueRootIndex);
3484   __ bind(&done);
3485 
3486   context()->Plug(v0);
3487 }
3488 
3489 
EmitHasCachedArrayIndex(CallRuntime * expr)3490 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3491   ZoneList<Expression*>* args = expr->arguments();
3492   VisitForAccumulatorValue(args->at(0));
3493 
3494   Label materialize_true, materialize_false;
3495   Label* if_true = NULL;
3496   Label* if_false = NULL;
3497   Label* fall_through = NULL;
3498   context()->PrepareTest(&materialize_true, &materialize_false,
3499                          &if_true, &if_false, &fall_through);
3500 
3501   __ lw(a0, FieldMemOperand(v0, String::kHashFieldOffset));
3502   __ And(a0, a0, Operand(String::kContainsCachedArrayIndexMask));
3503 
3504   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3505   Split(eq, a0, Operand(zero_reg), if_true, if_false, fall_through);
3506 
3507   context()->Plug(if_true, if_false);
3508 }
3509 
3510 
EmitGetCachedArrayIndex(CallRuntime * expr)3511 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3512   ZoneList<Expression*>* args = expr->arguments();
3513   ASSERT(args->length() == 1);
3514   VisitForAccumulatorValue(args->at(0));
3515 
3516   if (FLAG_debug_code) {
3517     __ AbortIfNotString(v0);
3518   }
3519 
3520   __ lw(v0, FieldMemOperand(v0, String::kHashFieldOffset));
3521   __ IndexFromHash(v0, v0);
3522 
3523   context()->Plug(v0);
3524 }
3525 
3526 
EmitFastAsciiArrayJoin(CallRuntime * expr)3527 void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
3528   Label bailout, done, one_char_separator, long_separator,
3529       non_trivial_array, not_size_one_array, loop,
3530       empty_separator_loop, one_char_separator_loop,
3531       one_char_separator_loop_entry, long_separator_loop;
3532   ZoneList<Expression*>* args = expr->arguments();
3533   ASSERT(args->length() == 2);
3534   VisitForStackValue(args->at(1));
3535   VisitForAccumulatorValue(args->at(0));
3536 
3537   // All aliases of the same register have disjoint lifetimes.
3538   Register array = v0;
3539   Register elements = no_reg;  // Will be v0.
3540   Register result = no_reg;  // Will be v0.
3541   Register separator = a1;
3542   Register array_length = a2;
3543   Register result_pos = no_reg;  // Will be a2.
3544   Register string_length = a3;
3545   Register string = t0;
3546   Register element = t1;
3547   Register elements_end = t2;
3548   Register scratch1 = t3;
3549   Register scratch2 = t5;
3550   Register scratch3 = t4;
3551 
3552   // Separator operand is on the stack.
3553   __ pop(separator);
3554 
3555   // Check that the array is a JSArray.
3556   __ JumpIfSmi(array, &bailout);
3557   __ GetObjectType(array, scratch1, scratch2);
3558   __ Branch(&bailout, ne, scratch2, Operand(JS_ARRAY_TYPE));
3559 
3560   // Check that the array has fast elements.
3561   __ CheckFastElements(scratch1, scratch2, &bailout);
3562 
3563   // If the array has length zero, return the empty string.
3564   __ lw(array_length, FieldMemOperand(array, JSArray::kLengthOffset));
3565   __ SmiUntag(array_length);
3566   __ Branch(&non_trivial_array, ne, array_length, Operand(zero_reg));
3567   __ LoadRoot(v0, Heap::kEmptyStringRootIndex);
3568   __ Branch(&done);
3569 
3570   __ bind(&non_trivial_array);
3571 
3572   // Get the FixedArray containing array's elements.
3573   elements = array;
3574   __ lw(elements, FieldMemOperand(array, JSArray::kElementsOffset));
3575   array = no_reg;  // End of array's live range.
3576 
3577   // Check that all array elements are sequential ASCII strings, and
3578   // accumulate the sum of their lengths, as a smi-encoded value.
3579   __ mov(string_length, zero_reg);
3580   __ Addu(element,
3581           elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3582   __ sll(elements_end, array_length, kPointerSizeLog2);
3583   __ Addu(elements_end, element, elements_end);
3584   // Loop condition: while (element < elements_end).
3585   // Live values in registers:
3586   //   elements: Fixed array of strings.
3587   //   array_length: Length of the fixed array of strings (not smi)
3588   //   separator: Separator string
3589   //   string_length: Accumulated sum of string lengths (smi).
3590   //   element: Current array element.
3591   //   elements_end: Array end.
3592   if (FLAG_debug_code) {
3593     __ Assert(gt, "No empty arrays here in EmitFastAsciiArrayJoin",
3594         array_length, Operand(zero_reg));
3595   }
3596   __ bind(&loop);
3597   __ lw(string, MemOperand(element));
3598   __ Addu(element, element, kPointerSize);
3599   __ JumpIfSmi(string, &bailout);
3600   __ lw(scratch1, FieldMemOperand(string, HeapObject::kMapOffset));
3601   __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
3602   __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout);
3603   __ lw(scratch1, FieldMemOperand(string, SeqAsciiString::kLengthOffset));
3604   __ AdduAndCheckForOverflow(string_length, string_length, scratch1, scratch3);
3605   __ BranchOnOverflow(&bailout, scratch3);
3606   __ Branch(&loop, lt, element, Operand(elements_end));
3607 
3608   // If array_length is 1, return elements[0], a string.
3609   __ Branch(&not_size_one_array, ne, array_length, Operand(1));
3610   __ lw(v0, FieldMemOperand(elements, FixedArray::kHeaderSize));
3611   __ Branch(&done);
3612 
3613   __ bind(&not_size_one_array);
3614 
3615   // Live values in registers:
3616   //   separator: Separator string
3617   //   array_length: Length of the array.
3618   //   string_length: Sum of string lengths (smi).
3619   //   elements: FixedArray of strings.
3620 
3621   // Check that the separator is a flat ASCII string.
3622   __ JumpIfSmi(separator, &bailout);
3623   __ lw(scratch1, FieldMemOperand(separator, HeapObject::kMapOffset));
3624   __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
3625   __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout);
3626 
3627   // Add (separator length times array_length) - separator length to the
3628   // string_length to get the length of the result string. array_length is not
3629   // smi but the other values are, so the result is a smi.
3630   __ lw(scratch1, FieldMemOperand(separator, SeqAsciiString::kLengthOffset));
3631   __ Subu(string_length, string_length, Operand(scratch1));
3632   __ Mult(array_length, scratch1);
3633   // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are
3634   // zero.
3635   __ mfhi(scratch2);
3636   __ Branch(&bailout, ne, scratch2, Operand(zero_reg));
3637   __ mflo(scratch2);
3638   __ And(scratch3, scratch2, Operand(0x80000000));
3639   __ Branch(&bailout, ne, scratch3, Operand(zero_reg));
3640   __ AdduAndCheckForOverflow(string_length, string_length, scratch2, scratch3);
3641   __ BranchOnOverflow(&bailout, scratch3);
3642   __ SmiUntag(string_length);
3643 
3644   // Get first element in the array to free up the elements register to be used
3645   // for the result.
3646   __ Addu(element,
3647           elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3648   result = elements;  // End of live range for elements.
3649   elements = no_reg;
3650   // Live values in registers:
3651   //   element: First array element
3652   //   separator: Separator string
3653   //   string_length: Length of result string (not smi)
3654   //   array_length: Length of the array.
3655   __ AllocateAsciiString(result,
3656                          string_length,
3657                          scratch1,
3658                          scratch2,
3659                          elements_end,
3660                          &bailout);
3661   // Prepare for looping. Set up elements_end to end of the array. Set
3662   // result_pos to the position of the result where to write the first
3663   // character.
3664   __ sll(elements_end, array_length, kPointerSizeLog2);
3665   __ Addu(elements_end, element, elements_end);
3666   result_pos = array_length;  // End of live range for array_length.
3667   array_length = no_reg;
3668   __ Addu(result_pos,
3669           result,
3670           Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
3671 
3672   // Check the length of the separator.
3673   __ lw(scratch1, FieldMemOperand(separator, SeqAsciiString::kLengthOffset));
3674   __ li(at, Operand(Smi::FromInt(1)));
3675   __ Branch(&one_char_separator, eq, scratch1, Operand(at));
3676   __ Branch(&long_separator, gt, scratch1, Operand(at));
3677 
3678   // Empty separator case.
3679   __ bind(&empty_separator_loop);
3680   // Live values in registers:
3681   //   result_pos: the position to which we are currently copying characters.
3682   //   element: Current array element.
3683   //   elements_end: Array end.
3684 
3685   // Copy next array element to the result.
3686   __ lw(string, MemOperand(element));
3687   __ Addu(element, element, kPointerSize);
3688   __ lw(string_length, FieldMemOperand(string, String::kLengthOffset));
3689   __ SmiUntag(string_length);
3690   __ Addu(string, string, SeqAsciiString::kHeaderSize - kHeapObjectTag);
3691   __ CopyBytes(string, result_pos, string_length, scratch1);
3692   // End while (element < elements_end).
3693   __ Branch(&empty_separator_loop, lt, element, Operand(elements_end));
3694   ASSERT(result.is(v0));
3695   __ Branch(&done);
3696 
3697   // One-character separator case.
3698   __ bind(&one_char_separator);
3699   // Replace separator with its ASCII character value.
3700   __ lbu(separator, FieldMemOperand(separator, SeqAsciiString::kHeaderSize));
3701   // Jump into the loop after the code that copies the separator, so the first
3702   // element is not preceded by a separator.
3703   __ jmp(&one_char_separator_loop_entry);
3704 
3705   __ bind(&one_char_separator_loop);
3706   // Live values in registers:
3707   //   result_pos: the position to which we are currently copying characters.
3708   //   element: Current array element.
3709   //   elements_end: Array end.
3710   //   separator: Single separator ASCII char (in lower byte).
3711 
3712   // Copy the separator character to the result.
3713   __ sb(separator, MemOperand(result_pos));
3714   __ Addu(result_pos, result_pos, 1);
3715 
3716   // Copy next array element to the result.
3717   __ bind(&one_char_separator_loop_entry);
3718   __ lw(string, MemOperand(element));
3719   __ Addu(element, element, kPointerSize);
3720   __ lw(string_length, FieldMemOperand(string, String::kLengthOffset));
3721   __ SmiUntag(string_length);
3722   __ Addu(string, string, SeqAsciiString::kHeaderSize - kHeapObjectTag);
3723   __ CopyBytes(string, result_pos, string_length, scratch1);
3724   // End while (element < elements_end).
3725   __ Branch(&one_char_separator_loop, lt, element, Operand(elements_end));
3726   ASSERT(result.is(v0));
3727   __ Branch(&done);
3728 
3729   // Long separator case (separator is more than one character). Entry is at the
3730   // label long_separator below.
3731   __ bind(&long_separator_loop);
3732   // Live values in registers:
3733   //   result_pos: the position to which we are currently copying characters.
3734   //   element: Current array element.
3735   //   elements_end: Array end.
3736   //   separator: Separator string.
3737 
3738   // Copy the separator to the result.
3739   __ lw(string_length, FieldMemOperand(separator, String::kLengthOffset));
3740   __ SmiUntag(string_length);
3741   __ Addu(string,
3742           separator,
3743           Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
3744   __ CopyBytes(string, result_pos, string_length, scratch1);
3745 
3746   __ bind(&long_separator);
3747   __ lw(string, MemOperand(element));
3748   __ Addu(element, element, kPointerSize);
3749   __ lw(string_length, FieldMemOperand(string, String::kLengthOffset));
3750   __ SmiUntag(string_length);
3751   __ Addu(string, string, SeqAsciiString::kHeaderSize - kHeapObjectTag);
3752   __ CopyBytes(string, result_pos, string_length, scratch1);
3753   // End while (element < elements_end).
3754   __ Branch(&long_separator_loop, lt, element, Operand(elements_end));
3755   ASSERT(result.is(v0));
3756   __ Branch(&done);
3757 
3758   __ bind(&bailout);
3759   __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
3760   __ bind(&done);
3761   context()->Plug(v0);
3762 }
3763 
3764 
VisitCallRuntime(CallRuntime * expr)3765 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
3766   Handle<String> name = expr->name();
3767   if (name->length() > 0 && name->Get(0) == '_') {
3768     Comment cmnt(masm_, "[ InlineRuntimeCall");
3769     EmitInlineRuntimeCall(expr);
3770     return;
3771   }
3772 
3773   Comment cmnt(masm_, "[ CallRuntime");
3774   ZoneList<Expression*>* args = expr->arguments();
3775 
3776   if (expr->is_jsruntime()) {
3777     // Prepare for calling JS runtime function.
3778     __ lw(a0, GlobalObjectOperand());
3779     __ lw(a0, FieldMemOperand(a0, GlobalObject::kBuiltinsOffset));
3780     __ push(a0);
3781   }
3782 
3783   // Push the arguments ("left-to-right").
3784   int arg_count = args->length();
3785   for (int i = 0; i < arg_count; i++) {
3786     VisitForStackValue(args->at(i));
3787   }
3788 
3789   if (expr->is_jsruntime()) {
3790     // Call the JS runtime function.
3791     __ li(a2, Operand(expr->name()));
3792     RelocInfo::Mode mode = RelocInfo::CODE_TARGET;
3793     Handle<Code> ic =
3794         isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
3795     CallIC(ic, mode, expr->id());
3796     // Restore context register.
3797     __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3798   } else {
3799     // Call the C runtime function.
3800     __ CallRuntime(expr->function(), arg_count);
3801   }
3802   context()->Plug(v0);
3803 }
3804 
3805 
VisitUnaryOperation(UnaryOperation * expr)3806 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3807   switch (expr->op()) {
3808     case Token::DELETE: {
3809       Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
3810       Property* property = expr->expression()->AsProperty();
3811       VariableProxy* proxy = expr->expression()->AsVariableProxy();
3812 
3813       if (property != NULL) {
3814         VisitForStackValue(property->obj());
3815         VisitForStackValue(property->key());
3816         StrictModeFlag strict_mode_flag = (language_mode() == CLASSIC_MODE)
3817             ? kNonStrictMode : kStrictMode;
3818         __ li(a1, Operand(Smi::FromInt(strict_mode_flag)));
3819         __ push(a1);
3820         __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3821         context()->Plug(v0);
3822       } else if (proxy != NULL) {
3823         Variable* var = proxy->var();
3824         // Delete of an unqualified identifier is disallowed in strict mode
3825         // but "delete this" is allowed.
3826         ASSERT(language_mode() == CLASSIC_MODE || var->is_this());
3827         if (var->IsUnallocated()) {
3828           __ lw(a2, GlobalObjectOperand());
3829           __ li(a1, Operand(var->name()));
3830           __ li(a0, Operand(Smi::FromInt(kNonStrictMode)));
3831           __ Push(a2, a1, a0);
3832           __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3833           context()->Plug(v0);
3834         } else if (var->IsStackAllocated() || var->IsContextSlot()) {
3835           // Result of deleting non-global, non-dynamic variables is false.
3836           // The subexpression does not have side effects.
3837           context()->Plug(var->is_this());
3838         } else {
3839           // Non-global variable.  Call the runtime to try to delete from the
3840           // context where the variable was introduced.
3841           __ push(context_register());
3842           __ li(a2, Operand(var->name()));
3843           __ push(a2);
3844           __ CallRuntime(Runtime::kDeleteContextSlot, 2);
3845           context()->Plug(v0);
3846         }
3847       } else {
3848         // Result of deleting non-property, non-variable reference is true.
3849         // The subexpression may have side effects.
3850         VisitForEffect(expr->expression());
3851         context()->Plug(true);
3852       }
3853       break;
3854     }
3855 
3856     case Token::VOID: {
3857       Comment cmnt(masm_, "[ UnaryOperation (VOID)");
3858       VisitForEffect(expr->expression());
3859       context()->Plug(Heap::kUndefinedValueRootIndex);
3860       break;
3861     }
3862 
3863     case Token::NOT: {
3864       Comment cmnt(masm_, "[ UnaryOperation (NOT)");
3865       if (context()->IsEffect()) {
3866         // Unary NOT has no side effects so it's only necessary to visit the
3867         // subexpression.  Match the optimizing compiler by not branching.
3868         VisitForEffect(expr->expression());
3869       } else if (context()->IsTest()) {
3870         const TestContext* test = TestContext::cast(context());
3871         // The labels are swapped for the recursive call.
3872         VisitForControl(expr->expression(),
3873                         test->false_label(),
3874                         test->true_label(),
3875                         test->fall_through());
3876         context()->Plug(test->true_label(), test->false_label());
3877       } else {
3878         // We handle value contexts explicitly rather than simply visiting
3879         // for control and plugging the control flow into the context,
3880         // because we need to prepare a pair of extra administrative AST ids
3881         // for the optimizing compiler.
3882         ASSERT(context()->IsAccumulatorValue() || context()->IsStackValue());
3883         Label materialize_true, materialize_false, done;
3884         VisitForControl(expr->expression(),
3885                         &materialize_false,
3886                         &materialize_true,
3887                         &materialize_true);
3888         __ bind(&materialize_true);
3889         PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
3890         __ LoadRoot(v0, Heap::kTrueValueRootIndex);
3891         if (context()->IsStackValue()) __ push(v0);
3892         __ jmp(&done);
3893         __ bind(&materialize_false);
3894         PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
3895         __ LoadRoot(v0, Heap::kFalseValueRootIndex);
3896         if (context()->IsStackValue()) __ push(v0);
3897         __ bind(&done);
3898       }
3899       break;
3900     }
3901 
3902     case Token::TYPEOF: {
3903       Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
3904       { StackValueContext context(this);
3905         VisitForTypeofValue(expr->expression());
3906       }
3907       __ CallRuntime(Runtime::kTypeof, 1);
3908       context()->Plug(v0);
3909       break;
3910     }
3911 
3912     case Token::ADD: {
3913       Comment cmt(masm_, "[ UnaryOperation (ADD)");
3914       VisitForAccumulatorValue(expr->expression());
3915       Label no_conversion;
3916       __ JumpIfSmi(result_register(), &no_conversion);
3917       __ mov(a0, result_register());
3918       ToNumberStub convert_stub;
3919       __ CallStub(&convert_stub);
3920       __ bind(&no_conversion);
3921       context()->Plug(result_register());
3922       break;
3923     }
3924 
3925     case Token::SUB:
3926       EmitUnaryOperation(expr, "[ UnaryOperation (SUB)");
3927       break;
3928 
3929     case Token::BIT_NOT:
3930       EmitUnaryOperation(expr, "[ UnaryOperation (BIT_NOT)");
3931       break;
3932 
3933     default:
3934       UNREACHABLE();
3935   }
3936 }
3937 
3938 
EmitUnaryOperation(UnaryOperation * expr,const char * comment)3939 void FullCodeGenerator::EmitUnaryOperation(UnaryOperation* expr,
3940                                            const char* comment) {
3941   // TODO(svenpanne): Allowing format strings in Comment would be nice here...
3942   Comment cmt(masm_, comment);
3943   bool can_overwrite = expr->expression()->ResultOverwriteAllowed();
3944   UnaryOverwriteMode overwrite =
3945       can_overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE;
3946   UnaryOpStub stub(expr->op(), overwrite);
3947   // GenericUnaryOpStub expects the argument to be in a0.
3948   VisitForAccumulatorValue(expr->expression());
3949   SetSourcePosition(expr->position());
3950   __ mov(a0, result_register());
3951   CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
3952   context()->Plug(v0);
3953 }
3954 
3955 
VisitCountOperation(CountOperation * expr)3956 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3957   Comment cmnt(masm_, "[ CountOperation");
3958   SetSourcePosition(expr->position());
3959 
3960   // Invalid left-hand sides are rewritten to have a 'throw ReferenceError'
3961   // as the left-hand side.
3962   if (!expr->expression()->IsValidLeftHandSide()) {
3963     VisitForEffect(expr->expression());
3964     return;
3965   }
3966 
3967   // Expression can only be a property, a global or a (parameter or local)
3968   // slot.
3969   enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
3970   LhsKind assign_type = VARIABLE;
3971   Property* prop = expr->expression()->AsProperty();
3972   // In case of a property we use the uninitialized expression context
3973   // of the key to detect a named property.
3974   if (prop != NULL) {
3975     assign_type =
3976         (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
3977   }
3978 
3979   // Evaluate expression and get value.
3980   if (assign_type == VARIABLE) {
3981     ASSERT(expr->expression()->AsVariableProxy()->var() != NULL);
3982     AccumulatorValueContext context(this);
3983     EmitVariableLoad(expr->expression()->AsVariableProxy());
3984   } else {
3985     // Reserve space for result of postfix operation.
3986     if (expr->is_postfix() && !context()->IsEffect()) {
3987       __ li(at, Operand(Smi::FromInt(0)));
3988       __ push(at);
3989     }
3990     if (assign_type == NAMED_PROPERTY) {
3991       // Put the object both on the stack and in the accumulator.
3992       VisitForAccumulatorValue(prop->obj());
3993       __ push(v0);
3994       EmitNamedPropertyLoad(prop);
3995     } else {
3996       VisitForStackValue(prop->obj());
3997       VisitForAccumulatorValue(prop->key());
3998       __ lw(a1, MemOperand(sp, 0));
3999       __ push(v0);
4000       EmitKeyedPropertyLoad(prop);
4001     }
4002   }
4003 
4004   // We need a second deoptimization point after loading the value
4005   // in case evaluating the property load my have a side effect.
4006   if (assign_type == VARIABLE) {
4007     PrepareForBailout(expr->expression(), TOS_REG);
4008   } else {
4009     PrepareForBailoutForId(expr->CountId(), TOS_REG);
4010   }
4011 
4012   // Call ToNumber only if operand is not a smi.
4013   Label no_conversion;
4014   __ JumpIfSmi(v0, &no_conversion);
4015   __ mov(a0, v0);
4016   ToNumberStub convert_stub;
4017   __ CallStub(&convert_stub);
4018   __ bind(&no_conversion);
4019 
4020   // Save result for postfix expressions.
4021   if (expr->is_postfix()) {
4022     if (!context()->IsEffect()) {
4023       // Save the result on the stack. If we have a named or keyed property
4024       // we store the result under the receiver that is currently on top
4025       // of the stack.
4026       switch (assign_type) {
4027         case VARIABLE:
4028           __ push(v0);
4029           break;
4030         case NAMED_PROPERTY:
4031           __ sw(v0, MemOperand(sp, kPointerSize));
4032           break;
4033         case KEYED_PROPERTY:
4034           __ sw(v0, MemOperand(sp, 2 * kPointerSize));
4035           break;
4036       }
4037     }
4038   }
4039   __ mov(a0, result_register());
4040 
4041   // Inline smi case if we are in a loop.
4042   Label stub_call, done;
4043   JumpPatchSite patch_site(masm_);
4044 
4045   int count_value = expr->op() == Token::INC ? 1 : -1;
4046   __ li(a1, Operand(Smi::FromInt(count_value)));
4047 
4048   if (ShouldInlineSmiCase(expr->op())) {
4049     __ AdduAndCheckForOverflow(v0, a0, a1, t0);
4050     __ BranchOnOverflow(&stub_call, t0);  // Do stub on overflow.
4051 
4052     // We could eliminate this smi check if we split the code at
4053     // the first smi check before calling ToNumber.
4054     patch_site.EmitJumpIfSmi(v0, &done);
4055     __ bind(&stub_call);
4056   }
4057 
4058   // Record position before stub call.
4059   SetSourcePosition(expr->position());
4060 
4061   BinaryOpStub stub(Token::ADD, NO_OVERWRITE);
4062   CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->CountId());
4063   patch_site.EmitPatchInfo();
4064   __ bind(&done);
4065 
4066   // Store the value returned in v0.
4067   switch (assign_type) {
4068     case VARIABLE:
4069       if (expr->is_postfix()) {
4070         { EffectContext context(this);
4071           EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4072                                  Token::ASSIGN);
4073           PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4074           context.Plug(v0);
4075         }
4076         // For all contexts except EffectConstant we have the result on
4077         // top of the stack.
4078         if (!context()->IsEffect()) {
4079           context()->PlugTOS();
4080         }
4081       } else {
4082         EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4083                                Token::ASSIGN);
4084         PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4085         context()->Plug(v0);
4086       }
4087       break;
4088     case NAMED_PROPERTY: {
4089       __ mov(a0, result_register());  // Value.
4090       __ li(a2, Operand(prop->key()->AsLiteral()->handle()));  // Name.
4091       __ pop(a1);  // Receiver.
4092       Handle<Code> ic = is_classic_mode()
4093           ? isolate()->builtins()->StoreIC_Initialize()
4094           : isolate()->builtins()->StoreIC_Initialize_Strict();
4095       CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
4096       PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4097       if (expr->is_postfix()) {
4098         if (!context()->IsEffect()) {
4099           context()->PlugTOS();
4100         }
4101       } else {
4102         context()->Plug(v0);
4103       }
4104       break;
4105     }
4106     case KEYED_PROPERTY: {
4107       __ mov(a0, result_register());  // Value.
4108       __ pop(a1);  // Key.
4109       __ pop(a2);  // Receiver.
4110       Handle<Code> ic = is_classic_mode()
4111           ? isolate()->builtins()->KeyedStoreIC_Initialize()
4112           : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
4113       CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
4114       PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4115       if (expr->is_postfix()) {
4116         if (!context()->IsEffect()) {
4117           context()->PlugTOS();
4118         }
4119       } else {
4120         context()->Plug(v0);
4121       }
4122       break;
4123     }
4124   }
4125 }
4126 
4127 
VisitForTypeofValue(Expression * expr)4128 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4129   ASSERT(!context()->IsEffect());
4130   ASSERT(!context()->IsTest());
4131   VariableProxy* proxy = expr->AsVariableProxy();
4132   if (proxy != NULL && proxy->var()->IsUnallocated()) {
4133     Comment cmnt(masm_, "Global variable");
4134     __ lw(a0, GlobalObjectOperand());
4135     __ li(a2, Operand(proxy->name()));
4136     Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
4137     // Use a regular load, not a contextual load, to avoid a reference
4138     // error.
4139     CallIC(ic);
4140     PrepareForBailout(expr, TOS_REG);
4141     context()->Plug(v0);
4142   } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
4143     Label done, slow;
4144 
4145     // Generate code for loading from variables potentially shadowed
4146     // by eval-introduced variables.
4147     EmitDynamicLookupFastCase(proxy->var(), INSIDE_TYPEOF, &slow, &done);
4148 
4149     __ bind(&slow);
4150     __ li(a0, Operand(proxy->name()));
4151     __ Push(cp, a0);
4152     __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
4153     PrepareForBailout(expr, TOS_REG);
4154     __ bind(&done);
4155 
4156     context()->Plug(v0);
4157   } else {
4158     // This expression cannot throw a reference error at the top level.
4159     VisitInDuplicateContext(expr);
4160   }
4161 }
4162 
EmitLiteralCompareTypeof(Expression * expr,Expression * sub_expr,Handle<String> check)4163 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4164                                                  Expression* sub_expr,
4165                                                  Handle<String> check) {
4166   Label materialize_true, materialize_false;
4167   Label* if_true = NULL;
4168   Label* if_false = NULL;
4169   Label* fall_through = NULL;
4170   context()->PrepareTest(&materialize_true, &materialize_false,
4171                          &if_true, &if_false, &fall_through);
4172 
4173   { AccumulatorValueContext context(this);
4174     VisitForTypeofValue(sub_expr);
4175   }
4176   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4177 
4178   if (check->Equals(isolate()->heap()->number_symbol())) {
4179     __ JumpIfSmi(v0, if_true);
4180     __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
4181     __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
4182     Split(eq, v0, Operand(at), if_true, if_false, fall_through);
4183   } else if (check->Equals(isolate()->heap()->string_symbol())) {
4184     __ JumpIfSmi(v0, if_false);
4185     // Check for undetectable objects => false.
4186     __ GetObjectType(v0, v0, a1);
4187     __ Branch(if_false, ge, a1, Operand(FIRST_NONSTRING_TYPE));
4188     __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
4189     __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
4190     Split(eq, a1, Operand(zero_reg),
4191           if_true, if_false, fall_through);
4192   } else if (check->Equals(isolate()->heap()->boolean_symbol())) {
4193     __ LoadRoot(at, Heap::kTrueValueRootIndex);
4194     __ Branch(if_true, eq, v0, Operand(at));
4195     __ LoadRoot(at, Heap::kFalseValueRootIndex);
4196     Split(eq, v0, Operand(at), if_true, if_false, fall_through);
4197   } else if (FLAG_harmony_typeof &&
4198              check->Equals(isolate()->heap()->null_symbol())) {
4199     __ LoadRoot(at, Heap::kNullValueRootIndex);
4200     Split(eq, v0, Operand(at), if_true, if_false, fall_through);
4201   } else if (check->Equals(isolate()->heap()->undefined_symbol())) {
4202     __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
4203     __ Branch(if_true, eq, v0, Operand(at));
4204     __ JumpIfSmi(v0, if_false);
4205     // Check for undetectable objects => true.
4206     __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
4207     __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
4208     __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
4209     Split(ne, a1, Operand(zero_reg), if_true, if_false, fall_through);
4210   } else if (check->Equals(isolate()->heap()->function_symbol())) {
4211     __ JumpIfSmi(v0, if_false);
4212     STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
4213     __ GetObjectType(v0, v0, a1);
4214     __ Branch(if_true, eq, a1, Operand(JS_FUNCTION_TYPE));
4215     Split(eq, a1, Operand(JS_FUNCTION_PROXY_TYPE),
4216           if_true, if_false, fall_through);
4217   } else if (check->Equals(isolate()->heap()->object_symbol())) {
4218     __ JumpIfSmi(v0, if_false);
4219     if (!FLAG_harmony_typeof) {
4220       __ LoadRoot(at, Heap::kNullValueRootIndex);
4221       __ Branch(if_true, eq, v0, Operand(at));
4222     }
4223     // Check for JS objects => true.
4224     __ GetObjectType(v0, v0, a1);
4225     __ Branch(if_false, lt, a1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
4226     __ lbu(a1, FieldMemOperand(v0, Map::kInstanceTypeOffset));
4227     __ Branch(if_false, gt, a1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
4228     // Check for undetectable objects => false.
4229     __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
4230     __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
4231     Split(eq, a1, Operand(zero_reg), if_true, if_false, fall_through);
4232   } else {
4233     if (if_false != fall_through) __ jmp(if_false);
4234   }
4235   context()->Plug(if_true, if_false);
4236 }
4237 
4238 
VisitCompareOperation(CompareOperation * expr)4239 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4240   Comment cmnt(masm_, "[ CompareOperation");
4241   SetSourcePosition(expr->position());
4242 
4243   // First we try a fast inlined version of the compare when one of
4244   // the operands is a literal.
4245   if (TryLiteralCompare(expr)) return;
4246 
4247   // Always perform the comparison for its control flow.  Pack the result
4248   // into the expression's context after the comparison is performed.
4249   Label materialize_true, materialize_false;
4250   Label* if_true = NULL;
4251   Label* if_false = NULL;
4252   Label* fall_through = NULL;
4253   context()->PrepareTest(&materialize_true, &materialize_false,
4254                          &if_true, &if_false, &fall_through);
4255 
4256   Token::Value op = expr->op();
4257   VisitForStackValue(expr->left());
4258   switch (op) {
4259     case Token::IN:
4260       VisitForStackValue(expr->right());
4261       __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
4262       PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4263       __ LoadRoot(t0, Heap::kTrueValueRootIndex);
4264       Split(eq, v0, Operand(t0), if_true, if_false, fall_through);
4265       break;
4266 
4267     case Token::INSTANCEOF: {
4268       VisitForStackValue(expr->right());
4269       InstanceofStub stub(InstanceofStub::kNoFlags);
4270       __ CallStub(&stub);
4271       PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4272       // The stub returns 0 for true.
4273       Split(eq, v0, Operand(zero_reg), if_true, if_false, fall_through);
4274       break;
4275     }
4276 
4277     default: {
4278       VisitForAccumulatorValue(expr->right());
4279       Condition cc = eq;
4280       switch (op) {
4281         case Token::EQ_STRICT:
4282         case Token::EQ:
4283           cc = eq;
4284           break;
4285         case Token::LT:
4286           cc = lt;
4287           break;
4288         case Token::GT:
4289           cc = gt;
4290          break;
4291         case Token::LTE:
4292           cc = le;
4293           break;
4294         case Token::GTE:
4295           cc = ge;
4296           break;
4297         case Token::IN:
4298         case Token::INSTANCEOF:
4299         default:
4300           UNREACHABLE();
4301       }
4302       __ mov(a0, result_register());
4303       __ pop(a1);
4304 
4305       bool inline_smi_code = ShouldInlineSmiCase(op);
4306       JumpPatchSite patch_site(masm_);
4307       if (inline_smi_code) {
4308         Label slow_case;
4309         __ Or(a2, a0, Operand(a1));
4310         patch_site.EmitJumpIfNotSmi(a2, &slow_case);
4311         Split(cc, a1, Operand(a0), if_true, if_false, NULL);
4312         __ bind(&slow_case);
4313       }
4314       // Record position and call the compare IC.
4315       SetSourcePosition(expr->position());
4316       Handle<Code> ic = CompareIC::GetUninitialized(op);
4317       CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
4318       patch_site.EmitPatchInfo();
4319       PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4320       Split(cc, v0, Operand(zero_reg), if_true, if_false, fall_through);
4321     }
4322   }
4323 
4324   // Convert the result of the comparison into one expected for this
4325   // expression's context.
4326   context()->Plug(if_true, if_false);
4327 }
4328 
4329 
EmitLiteralCompareNil(CompareOperation * expr,Expression * sub_expr,NilValue nil)4330 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4331                                               Expression* sub_expr,
4332                                               NilValue nil) {
4333   Label materialize_true, materialize_false;
4334   Label* if_true = NULL;
4335   Label* if_false = NULL;
4336   Label* fall_through = NULL;
4337   context()->PrepareTest(&materialize_true, &materialize_false,
4338                          &if_true, &if_false, &fall_through);
4339 
4340   VisitForAccumulatorValue(sub_expr);
4341   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4342   Heap::RootListIndex nil_value = nil == kNullValue ?
4343       Heap::kNullValueRootIndex :
4344       Heap::kUndefinedValueRootIndex;
4345   __ mov(a0, result_register());
4346   __ LoadRoot(a1, nil_value);
4347   if (expr->op() == Token::EQ_STRICT) {
4348     Split(eq, a0, Operand(a1), if_true, if_false, fall_through);
4349   } else {
4350     Heap::RootListIndex other_nil_value = nil == kNullValue ?
4351         Heap::kUndefinedValueRootIndex :
4352         Heap::kNullValueRootIndex;
4353     __ Branch(if_true, eq, a0, Operand(a1));
4354     __ LoadRoot(a1, other_nil_value);
4355     __ Branch(if_true, eq, a0, Operand(a1));
4356     __ JumpIfSmi(a0, if_false);
4357     // It can be an undetectable object.
4358     __ lw(a1, FieldMemOperand(a0, HeapObject::kMapOffset));
4359     __ lbu(a1, FieldMemOperand(a1, Map::kBitFieldOffset));
4360     __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
4361     Split(ne, a1, Operand(zero_reg), if_true, if_false, fall_through);
4362   }
4363   context()->Plug(if_true, if_false);
4364 }
4365 
4366 
VisitThisFunction(ThisFunction * expr)4367 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4368   __ lw(v0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4369   context()->Plug(v0);
4370 }
4371 
4372 
result_register()4373 Register FullCodeGenerator::result_register() {
4374   return v0;
4375 }
4376 
4377 
context_register()4378 Register FullCodeGenerator::context_register() {
4379   return cp;
4380 }
4381 
4382 
StoreToFrameField(int frame_offset,Register value)4383 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4384   ASSERT_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
4385   __ sw(value, MemOperand(fp, frame_offset));
4386 }
4387 
4388 
LoadContextField(Register dst,int context_index)4389 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4390   __ lw(dst, ContextOperand(cp, context_index));
4391 }
4392 
4393 
PushFunctionArgumentForContextAllocation()4394 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4395   Scope* declaration_scope = scope()->DeclarationScope();
4396   if (declaration_scope->is_global_scope()) {
4397     // Contexts nested in the global context have a canonical empty function
4398     // as their closure, not the anonymous closure containing the global
4399     // code.  Pass a smi sentinel and let the runtime look up the empty
4400     // function.
4401     __ li(at, Operand(Smi::FromInt(0)));
4402   } else if (declaration_scope->is_eval_scope()) {
4403     // Contexts created by a call to eval have the same closure as the
4404     // context calling eval, not the anonymous closure containing the eval
4405     // code.  Fetch it from the context.
4406     __ lw(at, ContextOperand(cp, Context::CLOSURE_INDEX));
4407   } else {
4408     ASSERT(declaration_scope->is_function_scope());
4409     __ lw(at, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4410   }
4411   __ push(at);
4412 }
4413 
4414 
4415 // ----------------------------------------------------------------------------
4416 // Non-local control flow support.
4417 
EnterFinallyBlock()4418 void FullCodeGenerator::EnterFinallyBlock() {
4419   ASSERT(!result_register().is(a1));
4420   // Store result register while executing finally block.
4421   __ push(result_register());
4422   // Cook return address in link register to stack (smi encoded Code* delta).
4423   __ Subu(a1, ra, Operand(masm_->CodeObject()));
4424   ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize);
4425   STATIC_ASSERT(0 == kSmiTag);
4426   __ Addu(a1, a1, Operand(a1));  // Convert to smi.
4427   __ push(a1);
4428 }
4429 
4430 
ExitFinallyBlock()4431 void FullCodeGenerator::ExitFinallyBlock() {
4432   ASSERT(!result_register().is(a1));
4433   // Restore result register from stack.
4434   __ pop(a1);
4435   // Uncook return address and return.
4436   __ pop(result_register());
4437   ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize);
4438   __ sra(a1, a1, 1);  // Un-smi-tag value.
4439   __ Addu(at, a1, Operand(masm_->CodeObject()));
4440   __ Jump(at);
4441 }
4442 
4443 
4444 #undef __
4445 
4446 #define __ ACCESS_MASM(masm())
4447 
Exit(int * stack_depth,int * context_length)4448 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
4449     int* stack_depth,
4450     int* context_length) {
4451   // The macros used here must preserve the result register.
4452 
4453   // Because the handler block contains the context of the finally
4454   // code, we can restore it directly from there for the finally code
4455   // rather than iteratively unwinding contexts via their previous
4456   // links.
4457   __ Drop(*stack_depth);  // Down to the handler block.
4458   if (*context_length > 0) {
4459     // Restore the context to its dedicated register and the stack.
4460     __ lw(cp, MemOperand(sp, StackHandlerConstants::kContextOffset));
4461     __ sw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4462   }
4463   __ PopTryHandler();
4464   __ Call(finally_entry_);
4465 
4466   *stack_depth = 0;
4467   *context_length = 0;
4468   return previous_;
4469 }
4470 
4471 
4472 #undef __
4473 
4474 } }  // namespace v8::internal
4475 
4476 #endif  // V8_TARGET_ARCH_MIPS
4477