• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #if V8_TARGET_ARCH_PPC
6 
7 #include "src/ast/scopes.h"
8 #include "src/code-factory.h"
9 #include "src/code-stubs.h"
10 #include "src/codegen.h"
11 #include "src/debug/debug.h"
12 #include "src/full-codegen/full-codegen.h"
13 #include "src/ic/ic.h"
14 #include "src/parsing/parser.h"
15 
16 #include "src/ppc/code-stubs-ppc.h"
17 #include "src/ppc/macro-assembler-ppc.h"
18 
19 namespace v8 {
20 namespace internal {
21 
22 #define __ ACCESS_MASM(masm_)
23 
24 // A patch site is a location in the code which it is possible to patch. This
25 // class has a number of methods to emit the code which is patchable and the
26 // method EmitPatchInfo to record a marker back to the patchable code. This
27 // marker is a cmpi rx, #yyy instruction, and x * 0x0000ffff + yyy (raw 16 bit
28 // immediate value is used) is the delta from the pc to the first instruction of
29 // the patchable code.
30 // See PatchInlinedSmiCode in ic-ppc.cc for the code that patches it
31 class JumpPatchSite BASE_EMBEDDED {
32  public:
JumpPatchSite(MacroAssembler * masm)33   explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
34 #ifdef DEBUG
35     info_emitted_ = false;
36 #endif
37   }
38 
~JumpPatchSite()39   ~JumpPatchSite() { DCHECK(patch_site_.is_bound() == info_emitted_); }
40 
41   // When initially emitting this ensure that a jump is always generated to skip
42   // the inlined smi code.
EmitJumpIfNotSmi(Register reg,Label * target)43   void EmitJumpIfNotSmi(Register reg, Label* target) {
44     DCHECK(!patch_site_.is_bound() && !info_emitted_);
45     Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
46     __ bind(&patch_site_);
47     __ cmp(reg, reg, cr0);
48     __ beq(target, cr0);  // Always taken before patched.
49   }
50 
51   // When initially emitting this ensure that a jump is never generated to skip
52   // the inlined smi code.
EmitJumpIfSmi(Register reg,Label * target)53   void EmitJumpIfSmi(Register reg, Label* target) {
54     Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
55     DCHECK(!patch_site_.is_bound() && !info_emitted_);
56     __ bind(&patch_site_);
57     __ cmp(reg, reg, cr0);
58     __ bne(target, cr0);  // Never taken before patched.
59   }
60 
EmitPatchInfo()61   void EmitPatchInfo() {
62     if (patch_site_.is_bound()) {
63       int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
64       Register reg;
65       // I believe this is using reg as the high bits of of the offset
66       reg.set_code(delta_to_patch_site / kOff16Mask);
67       __ cmpi(reg, Operand(delta_to_patch_site % kOff16Mask));
68 #ifdef DEBUG
69       info_emitted_ = true;
70 #endif
71     } else {
72       __ nop();  // Signals no inlined code.
73     }
74   }
75 
76  private:
77   MacroAssembler* masm_;
78   Label patch_site_;
79 #ifdef DEBUG
80   bool info_emitted_;
81 #endif
82 };
83 
84 
85 // Generate code for a JS function.  On entry to the function the receiver
86 // and arguments have been pushed on the stack left to right.  The actual
87 // argument count matches the formal parameter count expected by the
88 // function.
89 //
90 // The live registers are:
91 //   o r4: the JS function object being called (i.e., ourselves)
92 //   o r6: the new target value
93 //   o cp: our context
94 //   o fp: our caller's frame pointer (aka r31)
95 //   o sp: stack pointer
96 //   o lr: return address
97 //   o ip: our own function entry (required by the prologue)
98 //
99 // The function builds a JS frame.  Please see JavaScriptFrameConstants in
100 // frames-ppc.h for its layout.
Generate()101 void FullCodeGenerator::Generate() {
102   CompilationInfo* info = info_;
103   profiling_counter_ = isolate()->factory()->NewCell(
104       Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
105   SetFunctionPosition(literal());
106   Comment cmnt(masm_, "[ function compiled by full code generator");
107 
108   ProfileEntryHookStub::MaybeCallEntryHook(masm_);
109 
110 #ifdef DEBUG
111   if (strlen(FLAG_stop_at) > 0 &&
112       info->literal()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
113     __ stop("stop-at");
114   }
115 #endif
116 
117   if (FLAG_debug_code && info->ExpectsJSReceiverAsReceiver()) {
118     int receiver_offset = info->scope()->num_parameters() * kPointerSize;
119     __ LoadP(r5, MemOperand(sp, receiver_offset), r0);
120     __ AssertNotSmi(r5);
121     __ CompareObjectType(r5, r5, no_reg, FIRST_JS_RECEIVER_TYPE);
122     __ Assert(ge, kSloppyFunctionExpectsJSReceiverReceiver);
123   }
124 
125   // Open a frame scope to indicate that there is a frame on the stack.  The
126   // MANUAL indicates that the scope shouldn't actually generate code to set up
127   // the frame (that is done below).
128   FrameScope frame_scope(masm_, StackFrame::MANUAL);
129   int prologue_offset = masm_->pc_offset();
130 
131   if (prologue_offset) {
132     // Prologue logic requires it's starting address in ip and the
133     // corresponding offset from the function entry.
134     prologue_offset += Instruction::kInstrSize;
135     __ addi(ip, ip, Operand(prologue_offset));
136   }
137   info->set_prologue_offset(prologue_offset);
138   __ Prologue(info->GeneratePreagedPrologue(), ip, prologue_offset);
139 
140   {
141     Comment cmnt(masm_, "[ Allocate locals");
142     int locals_count = info->scope()->num_stack_slots();
143     // Generators allocate locals, if any, in context slots.
144     DCHECK(!IsGeneratorFunction(info->literal()->kind()) || locals_count == 0);
145     if (locals_count > 0) {
146       if (locals_count >= 128) {
147         Label ok;
148         __ Add(ip, sp, -(locals_count * kPointerSize), r0);
149         __ LoadRoot(r5, Heap::kRealStackLimitRootIndex);
150         __ cmpl(ip, r5);
151         __ bc_short(ge, &ok);
152         __ CallRuntime(Runtime::kThrowStackOverflow);
153         __ bind(&ok);
154       }
155       __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
156       int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
157       if (locals_count >= kMaxPushes) {
158         int loop_iterations = locals_count / kMaxPushes;
159         __ mov(r5, Operand(loop_iterations));
160         __ mtctr(r5);
161         Label loop_header;
162         __ bind(&loop_header);
163         // Do pushes.
164         for (int i = 0; i < kMaxPushes; i++) {
165           __ push(ip);
166         }
167         // Continue loop if not done.
168         __ bdnz(&loop_header);
169       }
170       int remaining = locals_count % kMaxPushes;
171       // Emit the remaining pushes.
172       for (int i = 0; i < remaining; i++) {
173         __ push(ip);
174       }
175     }
176   }
177 
178   bool function_in_register_r4 = true;
179 
180   // Possibly allocate a local context.
181   if (info->scope()->num_heap_slots() > 0) {
182     // Argument to NewContext is the function, which is still in r4.
183     Comment cmnt(masm_, "[ Allocate context");
184     bool need_write_barrier = true;
185     int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
186     if (info->scope()->is_script_scope()) {
187       __ push(r4);
188       __ Push(info->scope()->GetScopeInfo(info->isolate()));
189       __ CallRuntime(Runtime::kNewScriptContext);
190       PrepareForBailoutForId(BailoutId::ScriptContext(), TOS_REG);
191       // The new target value is not used, clobbering is safe.
192       DCHECK_NULL(info->scope()->new_target_var());
193     } else {
194       if (info->scope()->new_target_var() != nullptr) {
195         __ push(r6);  // Preserve new target.
196       }
197       if (slots <= FastNewContextStub::kMaximumSlots) {
198         FastNewContextStub stub(isolate(), slots);
199         __ CallStub(&stub);
200         // Result of FastNewContextStub is always in new space.
201         need_write_barrier = false;
202       } else {
203         __ push(r4);
204         __ CallRuntime(Runtime::kNewFunctionContext);
205       }
206       if (info->scope()->new_target_var() != nullptr) {
207         __ pop(r6);  // Preserve new target.
208       }
209     }
210     function_in_register_r4 = false;
211     // Context is returned in r3.  It replaces the context passed to us.
212     // It's saved in the stack and kept live in cp.
213     __ mr(cp, r3);
214     __ StoreP(r3, MemOperand(fp, StandardFrameConstants::kContextOffset));
215     // Copy any necessary parameters into the context.
216     int num_parameters = info->scope()->num_parameters();
217     int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
218     for (int i = first_parameter; i < num_parameters; i++) {
219       Variable* var = (i == -1) ? scope()->receiver() : scope()->parameter(i);
220       if (var->IsContextSlot()) {
221         int parameter_offset = StandardFrameConstants::kCallerSPOffset +
222                                (num_parameters - 1 - i) * kPointerSize;
223         // Load parameter from stack.
224         __ LoadP(r3, MemOperand(fp, parameter_offset), r0);
225         // Store it in the context.
226         MemOperand target = ContextMemOperand(cp, var->index());
227         __ StoreP(r3, target, r0);
228 
229         // Update the write barrier.
230         if (need_write_barrier) {
231           __ RecordWriteContextSlot(cp, target.offset(), r3, r5,
232                                     kLRHasBeenSaved, kDontSaveFPRegs);
233         } else if (FLAG_debug_code) {
234           Label done;
235           __ JumpIfInNewSpace(cp, r3, &done);
236           __ Abort(kExpectedNewSpaceObject);
237           __ bind(&done);
238         }
239       }
240     }
241   }
242 
243   // Register holding this function and new target are both trashed in case we
244   // bailout here. But since that can happen only when new target is not used
245   // and we allocate a context, the value of |function_in_register| is correct.
246   PrepareForBailoutForId(BailoutId::FunctionContext(), NO_REGISTERS);
247 
248   // Possibly set up a local binding to the this function which is used in
249   // derived constructors with super calls.
250   Variable* this_function_var = scope()->this_function_var();
251   if (this_function_var != nullptr) {
252     Comment cmnt(masm_, "[ This function");
253     if (!function_in_register_r4) {
254       __ LoadP(r4, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
255       // The write barrier clobbers register again, keep it marked as such.
256     }
257     SetVar(this_function_var, r4, r3, r5);
258   }
259 
260   // Possibly set up a local binding to the new target value.
261   Variable* new_target_var = scope()->new_target_var();
262   if (new_target_var != nullptr) {
263     Comment cmnt(masm_, "[ new.target");
264     SetVar(new_target_var, r6, r3, r5);
265   }
266 
267   // Possibly allocate RestParameters
268   int rest_index;
269   Variable* rest_param = scope()->rest_parameter(&rest_index);
270   if (rest_param) {
271     Comment cmnt(masm_, "[ Allocate rest parameter array");
272 
273     int num_parameters = info->scope()->num_parameters();
274     int offset = num_parameters * kPointerSize;
275 
276     __ LoadSmiLiteral(RestParamAccessDescriptor::parameter_count(),
277                       Smi::FromInt(num_parameters));
278     __ addi(RestParamAccessDescriptor::parameter_pointer(), fp,
279             Operand(StandardFrameConstants::kCallerSPOffset + offset));
280     __ LoadSmiLiteral(RestParamAccessDescriptor::rest_parameter_index(),
281                       Smi::FromInt(rest_index));
282     function_in_register_r4 = false;
283 
284     RestParamAccessStub stub(isolate());
285     __ CallStub(&stub);
286 
287     SetVar(rest_param, r3, r4, r5);
288   }
289 
290   Variable* arguments = scope()->arguments();
291   if (arguments != NULL) {
292     // Function uses arguments object.
293     Comment cmnt(masm_, "[ Allocate arguments object");
294     DCHECK(r4.is(ArgumentsAccessNewDescriptor::function()));
295     if (!function_in_register_r4) {
296       // Load this again, if it's used by the local context below.
297       __ LoadP(r4, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
298     }
299     // Receiver is just before the parameters on the caller's stack.
300     int num_parameters = info->scope()->num_parameters();
301     int offset = num_parameters * kPointerSize;
302     __ LoadSmiLiteral(ArgumentsAccessNewDescriptor::parameter_count(),
303                       Smi::FromInt(num_parameters));
304     __ addi(ArgumentsAccessNewDescriptor::parameter_pointer(), fp,
305             Operand(StandardFrameConstants::kCallerSPOffset + offset));
306 
307     // Arguments to ArgumentsAccessStub:
308     //   function, parameter pointer, parameter count.
309     // The stub will rewrite parameter pointer and parameter count if the
310     // previous stack frame was an arguments adapter frame.
311     bool is_unmapped = is_strict(language_mode()) || !has_simple_parameters();
312     ArgumentsAccessStub::Type type = ArgumentsAccessStub::ComputeType(
313         is_unmapped, literal()->has_duplicate_parameters());
314     ArgumentsAccessStub stub(isolate(), type);
315     __ CallStub(&stub);
316 
317     SetVar(arguments, r3, r4, r5);
318   }
319 
320   if (FLAG_trace) {
321     __ CallRuntime(Runtime::kTraceEnter);
322   }
323 
324   // Visit the declarations and body unless there is an illegal
325   // redeclaration.
326   if (scope()->HasIllegalRedeclaration()) {
327     Comment cmnt(masm_, "[ Declarations");
328     VisitForEffect(scope()->GetIllegalRedeclaration());
329 
330   } else {
331     PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
332     {
333       Comment cmnt(masm_, "[ Declarations");
334       VisitDeclarations(scope()->declarations());
335     }
336 
337     // Assert that the declarations do not use ICs. Otherwise the debugger
338     // won't be able to redirect a PC at an IC to the correct IC in newly
339     // recompiled code.
340     DCHECK_EQ(0, ic_total_count_);
341 
342     {
343       Comment cmnt(masm_, "[ Stack check");
344       PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
345       Label ok;
346       __ LoadRoot(ip, Heap::kStackLimitRootIndex);
347       __ cmpl(sp, ip);
348       __ bc_short(ge, &ok);
349       __ Call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
350       __ bind(&ok);
351     }
352 
353     {
354       Comment cmnt(masm_, "[ Body");
355       DCHECK(loop_depth() == 0);
356       VisitStatements(literal()->body());
357       DCHECK(loop_depth() == 0);
358     }
359   }
360 
361   // Always emit a 'return undefined' in case control fell off the end of
362   // the body.
363   {
364     Comment cmnt(masm_, "[ return <undefined>;");
365     __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
366   }
367   EmitReturnSequence();
368 
369   if (HasStackOverflow()) {
370     masm_->AbortConstantPoolBuilding();
371   }
372 }
373 
374 
ClearAccumulator()375 void FullCodeGenerator::ClearAccumulator() {
376   __ LoadSmiLiteral(r3, Smi::FromInt(0));
377 }
378 
379 
EmitProfilingCounterDecrement(int delta)380 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
381   __ mov(r5, Operand(profiling_counter_));
382   __ LoadP(r6, FieldMemOperand(r5, Cell::kValueOffset));
383   __ SubSmiLiteral(r6, r6, Smi::FromInt(delta), r0);
384   __ StoreP(r6, FieldMemOperand(r5, Cell::kValueOffset), r0);
385 }
386 
387 
EmitProfilingCounterReset()388 void FullCodeGenerator::EmitProfilingCounterReset() {
389   int reset_value = FLAG_interrupt_budget;
390   __ mov(r5, Operand(profiling_counter_));
391   __ LoadSmiLiteral(r6, Smi::FromInt(reset_value));
392   __ StoreP(r6, FieldMemOperand(r5, Cell::kValueOffset), r0);
393 }
394 
395 
EmitBackEdgeBookkeeping(IterationStatement * stmt,Label * back_edge_target)396 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
397                                                 Label* back_edge_target) {
398   Comment cmnt(masm_, "[ Back edge bookkeeping");
399   Label ok;
400 
401   DCHECK(back_edge_target->is_bound());
402   int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target) +
403                  kCodeSizeMultiplier / 2;
404   int weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
405   EmitProfilingCounterDecrement(weight);
406   {
407     Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
408     Assembler::BlockConstantPoolEntrySharingScope prevent_entry_sharing(masm_);
409     // BackEdgeTable::PatchAt manipulates this sequence.
410     __ cmpi(r6, Operand::Zero());
411     __ bc_short(ge, &ok);
412     __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
413 
414     // Record a mapping of this PC offset to the OSR id.  This is used to find
415     // the AST id from the unoptimized code in order to use it as a key into
416     // the deoptimization input data found in the optimized code.
417     RecordBackEdge(stmt->OsrEntryId());
418   }
419   EmitProfilingCounterReset();
420 
421   __ bind(&ok);
422   PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
423   // Record a mapping of the OSR id to this PC.  This is used if the OSR
424   // entry becomes the target of a bailout.  We don't expect it to be, but
425   // we want it to work if it is.
426   PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
427 }
428 
429 
EmitReturnSequence()430 void FullCodeGenerator::EmitReturnSequence() {
431   Comment cmnt(masm_, "[ Return sequence");
432   if (return_label_.is_bound()) {
433     __ b(&return_label_);
434   } else {
435     __ bind(&return_label_);
436     if (FLAG_trace) {
437       // Push the return value on the stack as the parameter.
438       // Runtime::TraceExit returns its parameter in r3
439       __ push(r3);
440       __ CallRuntime(Runtime::kTraceExit);
441     }
442     // Pretend that the exit is a backwards jump to the entry.
443     int weight = 1;
444     if (info_->ShouldSelfOptimize()) {
445       weight = FLAG_interrupt_budget / FLAG_self_opt_count;
446     } else {
447       int distance = masm_->pc_offset() + kCodeSizeMultiplier / 2;
448       weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
449     }
450     EmitProfilingCounterDecrement(weight);
451     Label ok;
452     __ cmpi(r6, Operand::Zero());
453     __ bge(&ok);
454     __ push(r3);
455     __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
456     __ pop(r3);
457     EmitProfilingCounterReset();
458     __ bind(&ok);
459 
460     // Make sure that the constant pool is not emitted inside of the return
461     // sequence.
462     {
463       Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
464       int32_t arg_count = info_->scope()->num_parameters() + 1;
465       int32_t sp_delta = arg_count * kPointerSize;
466       SetReturnPosition(literal());
467       __ LeaveFrame(StackFrame::JAVA_SCRIPT, sp_delta);
468       __ blr();
469     }
470   }
471 }
472 
473 
Plug(Variable * var) const474 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
475   DCHECK(var->IsStackAllocated() || var->IsContextSlot());
476   codegen()->GetVar(result_register(), var);
477   __ push(result_register());
478 }
479 
480 
Plug(Heap::RootListIndex index) const481 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {}
482 
483 
Plug(Heap::RootListIndex index) const484 void FullCodeGenerator::AccumulatorValueContext::Plug(
485     Heap::RootListIndex index) const {
486   __ LoadRoot(result_register(), index);
487 }
488 
489 
Plug(Heap::RootListIndex index) const490 void FullCodeGenerator::StackValueContext::Plug(
491     Heap::RootListIndex index) const {
492   __ LoadRoot(result_register(), index);
493   __ push(result_register());
494 }
495 
496 
Plug(Heap::RootListIndex index) const497 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
498   codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_,
499                                           false_label_);
500   if (index == Heap::kUndefinedValueRootIndex ||
501       index == Heap::kNullValueRootIndex ||
502       index == Heap::kFalseValueRootIndex) {
503     if (false_label_ != fall_through_) __ b(false_label_);
504   } else if (index == Heap::kTrueValueRootIndex) {
505     if (true_label_ != fall_through_) __ b(true_label_);
506   } else {
507     __ LoadRoot(result_register(), index);
508     codegen()->DoTest(this);
509   }
510 }
511 
512 
Plug(Handle<Object> lit) const513 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {}
514 
515 
Plug(Handle<Object> lit) const516 void FullCodeGenerator::AccumulatorValueContext::Plug(
517     Handle<Object> lit) const {
518   __ mov(result_register(), Operand(lit));
519 }
520 
521 
Plug(Handle<Object> lit) const522 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
523   // Immediates cannot be pushed directly.
524   __ mov(result_register(), Operand(lit));
525   __ push(result_register());
526 }
527 
528 
Plug(Handle<Object> lit) const529 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
530   codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_,
531                                           false_label_);
532   DCHECK(!lit->IsUndetectableObject());  // There are no undetectable literals.
533   if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
534     if (false_label_ != fall_through_) __ b(false_label_);
535   } else if (lit->IsTrue() || lit->IsJSObject()) {
536     if (true_label_ != fall_through_) __ b(true_label_);
537   } else if (lit->IsString()) {
538     if (String::cast(*lit)->length() == 0) {
539       if (false_label_ != fall_through_) __ b(false_label_);
540     } else {
541       if (true_label_ != fall_through_) __ b(true_label_);
542     }
543   } else if (lit->IsSmi()) {
544     if (Smi::cast(*lit)->value() == 0) {
545       if (false_label_ != fall_through_) __ b(false_label_);
546     } else {
547       if (true_label_ != fall_through_) __ b(true_label_);
548     }
549   } else {
550     // For simplicity we always test the accumulator register.
551     __ mov(result_register(), Operand(lit));
552     codegen()->DoTest(this);
553   }
554 }
555 
556 
DropAndPlug(int count,Register reg) const557 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
558                                                    Register reg) const {
559   DCHECK(count > 0);
560   __ Drop(count);
561 }
562 
563 
DropAndPlug(int count,Register reg) const564 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
565     int count, Register reg) const {
566   DCHECK(count > 0);
567   __ Drop(count);
568   __ Move(result_register(), reg);
569 }
570 
571 
DropAndPlug(int count,Register reg) const572 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
573                                                        Register reg) const {
574   DCHECK(count > 0);
575   if (count > 1) __ Drop(count - 1);
576   __ StoreP(reg, MemOperand(sp, 0));
577 }
578 
579 
DropAndPlug(int count,Register reg) const580 void FullCodeGenerator::TestContext::DropAndPlug(int count,
581                                                  Register reg) const {
582   DCHECK(count > 0);
583   // For simplicity we always test the accumulator register.
584   __ Drop(count);
585   __ Move(result_register(), reg);
586   codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
587   codegen()->DoTest(this);
588 }
589 
590 
Plug(Label * materialize_true,Label * materialize_false) const591 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
592                                             Label* materialize_false) const {
593   DCHECK(materialize_true == materialize_false);
594   __ bind(materialize_true);
595 }
596 
597 
Plug(Label * materialize_true,Label * materialize_false) const598 void FullCodeGenerator::AccumulatorValueContext::Plug(
599     Label* materialize_true, Label* materialize_false) const {
600   Label done;
601   __ bind(materialize_true);
602   __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
603   __ b(&done);
604   __ bind(materialize_false);
605   __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
606   __ bind(&done);
607 }
608 
609 
Plug(Label * materialize_true,Label * materialize_false) const610 void FullCodeGenerator::StackValueContext::Plug(
611     Label* materialize_true, Label* materialize_false) const {
612   Label done;
613   __ bind(materialize_true);
614   __ LoadRoot(ip, Heap::kTrueValueRootIndex);
615   __ b(&done);
616   __ bind(materialize_false);
617   __ LoadRoot(ip, Heap::kFalseValueRootIndex);
618   __ bind(&done);
619   __ push(ip);
620 }
621 
622 
Plug(Label * materialize_true,Label * materialize_false) const623 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
624                                           Label* materialize_false) const {
625   DCHECK(materialize_true == true_label_);
626   DCHECK(materialize_false == false_label_);
627 }
628 
629 
Plug(bool flag) const630 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
631   Heap::RootListIndex value_root_index =
632       flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
633   __ LoadRoot(result_register(), value_root_index);
634 }
635 
636 
Plug(bool flag) const637 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
638   Heap::RootListIndex value_root_index =
639       flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
640   __ LoadRoot(ip, value_root_index);
641   __ push(ip);
642 }
643 
644 
Plug(bool flag) const645 void FullCodeGenerator::TestContext::Plug(bool flag) const {
646   codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_,
647                                           false_label_);
648   if (flag) {
649     if (true_label_ != fall_through_) __ b(true_label_);
650   } else {
651     if (false_label_ != fall_through_) __ b(false_label_);
652   }
653 }
654 
655 
DoTest(Expression * condition,Label * if_true,Label * if_false,Label * fall_through)656 void FullCodeGenerator::DoTest(Expression* condition, Label* if_true,
657                                Label* if_false, Label* fall_through) {
658   Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
659   CallIC(ic, condition->test_id());
660   __ CompareRoot(result_register(), Heap::kTrueValueRootIndex);
661   Split(eq, if_true, if_false, fall_through);
662 }
663 
664 
Split(Condition cond,Label * if_true,Label * if_false,Label * fall_through,CRegister cr)665 void FullCodeGenerator::Split(Condition cond, Label* if_true, Label* if_false,
666                               Label* fall_through, CRegister cr) {
667   if (if_false == fall_through) {
668     __ b(cond, if_true, cr);
669   } else if (if_true == fall_through) {
670     __ b(NegateCondition(cond), if_false, cr);
671   } else {
672     __ b(cond, if_true, cr);
673     __ b(if_false);
674   }
675 }
676 
677 
StackOperand(Variable * var)678 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
679   DCHECK(var->IsStackAllocated());
680   // Offset is negative because higher indexes are at lower addresses.
681   int offset = -var->index() * kPointerSize;
682   // Adjust by a (parameter or local) base offset.
683   if (var->IsParameter()) {
684     offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
685   } else {
686     offset += JavaScriptFrameConstants::kLocal0Offset;
687   }
688   return MemOperand(fp, offset);
689 }
690 
691 
VarOperand(Variable * var,Register scratch)692 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
693   DCHECK(var->IsContextSlot() || var->IsStackAllocated());
694   if (var->IsContextSlot()) {
695     int context_chain_length = scope()->ContextChainLength(var->scope());
696     __ LoadContext(scratch, context_chain_length);
697     return ContextMemOperand(scratch, var->index());
698   } else {
699     return StackOperand(var);
700   }
701 }
702 
703 
GetVar(Register dest,Variable * var)704 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
705   // Use destination as scratch.
706   MemOperand location = VarOperand(var, dest);
707   __ LoadP(dest, location, r0);
708 }
709 
710 
SetVar(Variable * var,Register src,Register scratch0,Register scratch1)711 void FullCodeGenerator::SetVar(Variable* var, Register src, Register scratch0,
712                                Register scratch1) {
713   DCHECK(var->IsContextSlot() || var->IsStackAllocated());
714   DCHECK(!scratch0.is(src));
715   DCHECK(!scratch0.is(scratch1));
716   DCHECK(!scratch1.is(src));
717   MemOperand location = VarOperand(var, scratch0);
718   __ StoreP(src, location, r0);
719 
720   // Emit the write barrier code if the location is in the heap.
721   if (var->IsContextSlot()) {
722     __ RecordWriteContextSlot(scratch0, location.offset(), src, scratch1,
723                               kLRHasBeenSaved, kDontSaveFPRegs);
724   }
725 }
726 
727 
PrepareForBailoutBeforeSplit(Expression * expr,bool should_normalize,Label * if_true,Label * if_false)728 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
729                                                      bool should_normalize,
730                                                      Label* if_true,
731                                                      Label* if_false) {
732   // Only prepare for bailouts before splits if we're in a test
733   // context. Otherwise, we let the Visit function deal with the
734   // preparation to avoid preparing with the same AST id twice.
735   if (!context()->IsTest()) return;
736 
737   Label skip;
738   if (should_normalize) __ b(&skip);
739   PrepareForBailout(expr, TOS_REG);
740   if (should_normalize) {
741     __ LoadRoot(ip, Heap::kTrueValueRootIndex);
742     __ cmp(r3, ip);
743     Split(eq, if_true, if_false, NULL);
744     __ bind(&skip);
745   }
746 }
747 
748 
EmitDebugCheckDeclarationContext(Variable * variable)749 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
750   // The variable in the declaration always resides in the current function
751   // context.
752   DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
753   if (generate_debug_code_) {
754     // Check that we're not inside a with or catch context.
755     __ LoadP(r4, FieldMemOperand(cp, HeapObject::kMapOffset));
756     __ CompareRoot(r4, Heap::kWithContextMapRootIndex);
757     __ Check(ne, kDeclarationInWithContext);
758     __ CompareRoot(r4, Heap::kCatchContextMapRootIndex);
759     __ Check(ne, kDeclarationInCatchContext);
760   }
761 }
762 
763 
VisitVariableDeclaration(VariableDeclaration * declaration)764 void FullCodeGenerator::VisitVariableDeclaration(
765     VariableDeclaration* declaration) {
766   // If it was not possible to allocate the variable at compile time, we
767   // need to "declare" it at runtime to make sure it actually exists in the
768   // local context.
769   VariableProxy* proxy = declaration->proxy();
770   VariableMode mode = declaration->mode();
771   Variable* variable = proxy->var();
772   bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
773   switch (variable->location()) {
774     case VariableLocation::GLOBAL:
775     case VariableLocation::UNALLOCATED:
776       globals_->Add(variable->name(), zone());
777       globals_->Add(variable->binding_needs_init()
778                         ? isolate()->factory()->the_hole_value()
779                         : isolate()->factory()->undefined_value(),
780                     zone());
781       break;
782 
783     case VariableLocation::PARAMETER:
784     case VariableLocation::LOCAL:
785       if (hole_init) {
786         Comment cmnt(masm_, "[ VariableDeclaration");
787         __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
788         __ StoreP(ip, StackOperand(variable));
789       }
790       break;
791 
792     case VariableLocation::CONTEXT:
793       if (hole_init) {
794         Comment cmnt(masm_, "[ VariableDeclaration");
795         EmitDebugCheckDeclarationContext(variable);
796         __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
797         __ StoreP(ip, ContextMemOperand(cp, variable->index()), r0);
798         // No write barrier since the_hole_value is in old space.
799         PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
800       }
801       break;
802 
803     case VariableLocation::LOOKUP: {
804       Comment cmnt(masm_, "[ VariableDeclaration");
805       __ mov(r5, Operand(variable->name()));
806       // Declaration nodes are always introduced in one of four modes.
807       DCHECK(IsDeclaredVariableMode(mode));
808       // Push initial value, if any.
809       // Note: For variables we must not push an initial value (such as
810       // 'undefined') because we may have a (legal) redeclaration and we
811       // must not destroy the current value.
812       if (hole_init) {
813         __ LoadRoot(r3, Heap::kTheHoleValueRootIndex);
814       } else {
815         __ LoadSmiLiteral(r3, Smi::FromInt(0));  // Indicates no initial value.
816       }
817       __ Push(r5, r3);
818       __ Push(Smi::FromInt(variable->DeclarationPropertyAttributes()));
819       __ CallRuntime(Runtime::kDeclareLookupSlot);
820       break;
821     }
822   }
823 }
824 
825 
VisitFunctionDeclaration(FunctionDeclaration * declaration)826 void FullCodeGenerator::VisitFunctionDeclaration(
827     FunctionDeclaration* declaration) {
828   VariableProxy* proxy = declaration->proxy();
829   Variable* variable = proxy->var();
830   switch (variable->location()) {
831     case VariableLocation::GLOBAL:
832     case VariableLocation::UNALLOCATED: {
833       globals_->Add(variable->name(), zone());
834       Handle<SharedFunctionInfo> function =
835           Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_);
836       // Check for stack-overflow exception.
837       if (function.is_null()) return SetStackOverflow();
838       globals_->Add(function, zone());
839       break;
840     }
841 
842     case VariableLocation::PARAMETER:
843     case VariableLocation::LOCAL: {
844       Comment cmnt(masm_, "[ FunctionDeclaration");
845       VisitForAccumulatorValue(declaration->fun());
846       __ StoreP(result_register(), StackOperand(variable));
847       break;
848     }
849 
850     case VariableLocation::CONTEXT: {
851       Comment cmnt(masm_, "[ FunctionDeclaration");
852       EmitDebugCheckDeclarationContext(variable);
853       VisitForAccumulatorValue(declaration->fun());
854       __ StoreP(result_register(), ContextMemOperand(cp, variable->index()),
855                 r0);
856       int offset = Context::SlotOffset(variable->index());
857       // We know that we have written a function, which is not a smi.
858       __ RecordWriteContextSlot(cp, offset, result_register(), r5,
859                                 kLRHasBeenSaved, kDontSaveFPRegs,
860                                 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
861       PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
862       break;
863     }
864 
865     case VariableLocation::LOOKUP: {
866       Comment cmnt(masm_, "[ FunctionDeclaration");
867       __ mov(r5, Operand(variable->name()));
868       __ Push(r5);
869       // Push initial value for function declaration.
870       VisitForStackValue(declaration->fun());
871       __ Push(Smi::FromInt(variable->DeclarationPropertyAttributes()));
872       __ CallRuntime(Runtime::kDeclareLookupSlot);
873       break;
874     }
875   }
876 }
877 
878 
DeclareGlobals(Handle<FixedArray> pairs)879 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
880   // Call the runtime to declare the globals.
881   __ mov(r4, Operand(pairs));
882   __ LoadSmiLiteral(r3, Smi::FromInt(DeclareGlobalsFlags()));
883   __ Push(r4, r3);
884   __ CallRuntime(Runtime::kDeclareGlobals);
885   // Return value is ignored.
886 }
887 
888 
DeclareModules(Handle<FixedArray> descriptions)889 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
890   // Call the runtime to declare the modules.
891   __ Push(descriptions);
892   __ CallRuntime(Runtime::kDeclareModules);
893   // Return value is ignored.
894 }
895 
896 
VisitSwitchStatement(SwitchStatement * stmt)897 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
898   Comment cmnt(masm_, "[ SwitchStatement");
899   Breakable nested_statement(this, stmt);
900   SetStatementPosition(stmt);
901 
902   // Keep the switch value on the stack until a case matches.
903   VisitForStackValue(stmt->tag());
904   PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
905 
906   ZoneList<CaseClause*>* clauses = stmt->cases();
907   CaseClause* default_clause = NULL;  // Can occur anywhere in the list.
908 
909   Label next_test;  // Recycled for each test.
910   // Compile all the tests with branches to their bodies.
911   for (int i = 0; i < clauses->length(); i++) {
912     CaseClause* clause = clauses->at(i);
913     clause->body_target()->Unuse();
914 
915     // The default is not a test, but remember it as final fall through.
916     if (clause->is_default()) {
917       default_clause = clause;
918       continue;
919     }
920 
921     Comment cmnt(masm_, "[ Case comparison");
922     __ bind(&next_test);
923     next_test.Unuse();
924 
925     // Compile the label expression.
926     VisitForAccumulatorValue(clause->label());
927 
928     // Perform the comparison as if via '==='.
929     __ LoadP(r4, MemOperand(sp, 0));  // Switch value.
930     bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
931     JumpPatchSite patch_site(masm_);
932     if (inline_smi_code) {
933       Label slow_case;
934       __ orx(r5, r4, r3);
935       patch_site.EmitJumpIfNotSmi(r5, &slow_case);
936 
937       __ cmp(r4, r3);
938       __ bne(&next_test);
939       __ Drop(1);  // Switch value is no longer needed.
940       __ b(clause->body_target());
941       __ bind(&slow_case);
942     }
943 
944     // Record position before stub call for type feedback.
945     SetExpressionPosition(clause);
946     Handle<Code> ic = CodeFactory::CompareIC(isolate(), Token::EQ_STRICT,
947                                              strength(language_mode())).code();
948     CallIC(ic, clause->CompareId());
949     patch_site.EmitPatchInfo();
950 
951     Label skip;
952     __ b(&skip);
953     PrepareForBailout(clause, TOS_REG);
954     __ LoadRoot(ip, Heap::kTrueValueRootIndex);
955     __ cmp(r3, ip);
956     __ bne(&next_test);
957     __ Drop(1);
958     __ b(clause->body_target());
959     __ bind(&skip);
960 
961     __ cmpi(r3, Operand::Zero());
962     __ bne(&next_test);
963     __ Drop(1);  // Switch value is no longer needed.
964     __ b(clause->body_target());
965   }
966 
967   // Discard the test value and jump to the default if present, otherwise to
968   // the end of the statement.
969   __ bind(&next_test);
970   __ Drop(1);  // Switch value is no longer needed.
971   if (default_clause == NULL) {
972     __ b(nested_statement.break_label());
973   } else {
974     __ b(default_clause->body_target());
975   }
976 
977   // Compile all the case bodies.
978   for (int i = 0; i < clauses->length(); i++) {
979     Comment cmnt(masm_, "[ Case body");
980     CaseClause* clause = clauses->at(i);
981     __ bind(clause->body_target());
982     PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
983     VisitStatements(clause->statements());
984   }
985 
986   __ bind(nested_statement.break_label());
987   PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
988 }
989 
990 
VisitForInStatement(ForInStatement * stmt)991 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
992   Comment cmnt(masm_, "[ ForInStatement");
993   SetStatementPosition(stmt, SKIP_BREAK);
994 
995   FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
996 
997   Label loop, exit;
998   ForIn loop_statement(this, stmt);
999   increment_loop_depth();
1000 
1001   // Get the object to enumerate over. If the object is null or undefined, skip
1002   // over the loop.  See ECMA-262 version 5, section 12.6.4.
1003   SetExpressionAsStatementPosition(stmt->enumerable());
1004   VisitForAccumulatorValue(stmt->enumerable());
1005   __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1006   __ cmp(r3, ip);
1007   __ beq(&exit);
1008   Register null_value = r7;
1009   __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1010   __ cmp(r3, null_value);
1011   __ beq(&exit);
1012 
1013   PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1014 
1015   // Convert the object to a JS object.
1016   Label convert, done_convert;
1017   __ JumpIfSmi(r3, &convert);
1018   __ CompareObjectType(r3, r4, r4, FIRST_JS_RECEIVER_TYPE);
1019   __ bge(&done_convert);
1020   __ bind(&convert);
1021   ToObjectStub stub(isolate());
1022   __ CallStub(&stub);
1023   __ bind(&done_convert);
1024   PrepareForBailoutForId(stmt->ToObjectId(), TOS_REG);
1025   __ push(r3);
1026 
1027   // Check for proxies.
1028   Label call_runtime;
1029   __ CompareObjectType(r3, r4, r4, JS_PROXY_TYPE);
1030   __ beq(&call_runtime);
1031 
1032   // Check cache validity in generated code. This is a fast case for
1033   // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1034   // guarantee cache validity, call the runtime system to check cache
1035   // validity or get the property names in a fixed array.
1036   __ CheckEnumCache(null_value, &call_runtime);
1037 
1038   // The enum cache is valid.  Load the map of the object being
1039   // iterated over and use the cache for the iteration.
1040   Label use_cache;
1041   __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset));
1042   __ b(&use_cache);
1043 
1044   // Get the set of properties to enumerate.
1045   __ bind(&call_runtime);
1046   __ push(r3);  // Duplicate the enumerable object on the stack.
1047   __ CallRuntime(Runtime::kGetPropertyNamesFast);
1048   PrepareForBailoutForId(stmt->EnumId(), TOS_REG);
1049 
1050   // If we got a map from the runtime call, we can do a fast
1051   // modification check. Otherwise, we got a fixed array, and we have
1052   // to do a slow check.
1053   Label fixed_array;
1054   __ LoadP(r5, FieldMemOperand(r3, HeapObject::kMapOffset));
1055   __ LoadRoot(ip, Heap::kMetaMapRootIndex);
1056   __ cmp(r5, ip);
1057   __ bne(&fixed_array);
1058 
1059   // We got a map in register r3. Get the enumeration cache from it.
1060   Label no_descriptors;
1061   __ bind(&use_cache);
1062 
1063   __ EnumLength(r4, r3);
1064   __ CmpSmiLiteral(r4, Smi::FromInt(0), r0);
1065   __ beq(&no_descriptors);
1066 
1067   __ LoadInstanceDescriptors(r3, r5);
1068   __ LoadP(r5, FieldMemOperand(r5, DescriptorArray::kEnumCacheOffset));
1069   __ LoadP(r5,
1070            FieldMemOperand(r5, DescriptorArray::kEnumCacheBridgeCacheOffset));
1071 
1072   // Set up the four remaining stack slots.
1073   __ push(r3);  // Map.
1074   __ LoadSmiLiteral(r3, Smi::FromInt(0));
1075   // Push enumeration cache, enumeration cache length (as smi) and zero.
1076   __ Push(r5, r4, r3);
1077   __ b(&loop);
1078 
1079   __ bind(&no_descriptors);
1080   __ Drop(1);
1081   __ b(&exit);
1082 
1083   // We got a fixed array in register r3. Iterate through that.
1084   __ bind(&fixed_array);
1085 
1086   __ EmitLoadTypeFeedbackVector(r4);
1087   __ mov(r5, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1088   int vector_index = SmiFromSlot(slot)->value();
1089   __ StoreP(
1090       r5, FieldMemOperand(r4, FixedArray::OffsetOfElementAt(vector_index)), r0);
1091   __ LoadSmiLiteral(r4, Smi::FromInt(1));  // Smi(1) indicates slow check
1092   __ Push(r4, r3);  // Smi and array
1093   __ LoadP(r4, FieldMemOperand(r3, FixedArray::kLengthOffset));
1094   __ LoadSmiLiteral(r3, Smi::FromInt(0));
1095   __ Push(r4, r3);  // Fixed array length (as smi) and initial index.
1096 
1097   // Generate code for doing the condition check.
1098   __ bind(&loop);
1099   SetExpressionAsStatementPosition(stmt->each());
1100 
1101   // Load the current count to r3, load the length to r4.
1102   __ LoadP(r3, MemOperand(sp, 0 * kPointerSize));
1103   __ LoadP(r4, MemOperand(sp, 1 * kPointerSize));
1104   __ cmpl(r3, r4);  // Compare to the array length.
1105   __ bge(loop_statement.break_label());
1106 
1107   // Get the current entry of the array into register r6.
1108   __ LoadP(r5, MemOperand(sp, 2 * kPointerSize));
1109   __ addi(r5, r5, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1110   __ SmiToPtrArrayOffset(r6, r3);
1111   __ LoadPX(r6, MemOperand(r6, r5));
1112 
1113   // Get the expected map from the stack or a smi in the
1114   // permanent slow case into register r5.
1115   __ LoadP(r5, MemOperand(sp, 3 * kPointerSize));
1116 
1117   // Check if the expected map still matches that of the enumerable.
1118   // If not, we may have to filter the key.
1119   Label update_each;
1120   __ LoadP(r4, MemOperand(sp, 4 * kPointerSize));
1121   __ LoadP(r7, FieldMemOperand(r4, HeapObject::kMapOffset));
1122   __ cmp(r7, r5);
1123   __ beq(&update_each);
1124 
1125   // Convert the entry to a string or (smi) 0 if it isn't a property
1126   // any more. If the property has been removed while iterating, we
1127   // just skip it.
1128   __ Push(r4, r6);  // Enumerable and current entry.
1129   __ CallRuntime(Runtime::kForInFilter);
1130   PrepareForBailoutForId(stmt->FilterId(), TOS_REG);
1131   __ mr(r6, r3);
1132   __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
1133   __ cmp(r3, r0);
1134   __ beq(loop_statement.continue_label());
1135 
1136   // Update the 'each' property or variable from the possibly filtered
1137   // entry in register r6.
1138   __ bind(&update_each);
1139   __ mr(result_register(), r6);
1140   // Perform the assignment as if via '='.
1141   {
1142     EffectContext context(this);
1143     EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
1144     PrepareForBailoutForId(stmt->AssignmentId(), NO_REGISTERS);
1145   }
1146 
1147   // Both Crankshaft and Turbofan expect BodyId to be right before stmt->body().
1148   PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1149   // Generate code for the body of the loop.
1150   Visit(stmt->body());
1151 
1152   // Generate code for the going to the next element by incrementing
1153   // the index (smi) stored on top of the stack.
1154   __ bind(loop_statement.continue_label());
1155   __ pop(r3);
1156   __ AddSmiLiteral(r3, r3, Smi::FromInt(1), r0);
1157   __ push(r3);
1158 
1159   EmitBackEdgeBookkeeping(stmt, &loop);
1160   __ b(&loop);
1161 
1162   // Remove the pointers stored on the stack.
1163   __ bind(loop_statement.break_label());
1164   __ Drop(5);
1165 
1166   // Exit and decrement the loop depth.
1167   PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1168   __ bind(&exit);
1169   decrement_loop_depth();
1170 }
1171 
1172 
EmitNewClosure(Handle<SharedFunctionInfo> info,bool pretenure)1173 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1174                                        bool pretenure) {
1175   // Use the fast case closure allocation code that allocates in new
1176   // space for nested functions that don't need literals cloning. If
1177   // we're running with the --always-opt or the --prepare-always-opt
1178   // flag, we need to use the runtime function so that the new function
1179   // we are creating here gets a chance to have its code optimized and
1180   // doesn't just get a copy of the existing unoptimized code.
1181   if (!FLAG_always_opt && !FLAG_prepare_always_opt && !pretenure &&
1182       scope()->is_function_scope() && info->num_literals() == 0) {
1183     FastNewClosureStub stub(isolate(), info->language_mode(), info->kind());
1184     __ mov(r5, Operand(info));
1185     __ CallStub(&stub);
1186   } else {
1187     __ Push(info);
1188     __ CallRuntime(pretenure ? Runtime::kNewClosure_Tenured
1189                              : Runtime::kNewClosure);
1190   }
1191   context()->Plug(r3);
1192 }
1193 
1194 
EmitSetHomeObject(Expression * initializer,int offset,FeedbackVectorSlot slot)1195 void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset,
1196                                           FeedbackVectorSlot slot) {
1197   DCHECK(NeedsHomeObject(initializer));
1198   __ LoadP(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1199   __ mov(StoreDescriptor::NameRegister(),
1200          Operand(isolate()->factory()->home_object_symbol()));
1201   __ LoadP(StoreDescriptor::ValueRegister(),
1202            MemOperand(sp, offset * kPointerSize));
1203   EmitLoadStoreICSlot(slot);
1204   CallStoreIC();
1205 }
1206 
1207 
EmitSetHomeObjectAccumulator(Expression * initializer,int offset,FeedbackVectorSlot slot)1208 void FullCodeGenerator::EmitSetHomeObjectAccumulator(Expression* initializer,
1209                                                      int offset,
1210                                                      FeedbackVectorSlot slot) {
1211   DCHECK(NeedsHomeObject(initializer));
1212   __ Move(StoreDescriptor::ReceiverRegister(), r3);
1213   __ mov(StoreDescriptor::NameRegister(),
1214          Operand(isolate()->factory()->home_object_symbol()));
1215   __ LoadP(StoreDescriptor::ValueRegister(),
1216            MemOperand(sp, offset * kPointerSize));
1217   EmitLoadStoreICSlot(slot);
1218   CallStoreIC();
1219 }
1220 
1221 
EmitLoadGlobalCheckExtensions(VariableProxy * proxy,TypeofMode typeof_mode,Label * slow)1222 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1223                                                       TypeofMode typeof_mode,
1224                                                       Label* slow) {
1225   Register current = cp;
1226   Register next = r4;
1227   Register temp = r5;
1228 
1229   Scope* s = scope();
1230   while (s != NULL) {
1231     if (s->num_heap_slots() > 0) {
1232       if (s->calls_sloppy_eval()) {
1233         // Check that extension is "the hole".
1234         __ LoadP(temp, ContextMemOperand(current, Context::EXTENSION_INDEX));
1235         __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1236       }
1237       // Load next context in chain.
1238       __ LoadP(next, ContextMemOperand(current, Context::PREVIOUS_INDEX));
1239       // Walk the rest of the chain without clobbering cp.
1240       current = next;
1241     }
1242     // If no outer scope calls eval, we do not need to check more
1243     // context extensions.
1244     if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1245     s = s->outer_scope();
1246   }
1247 
1248   if (s->is_eval_scope()) {
1249     Label loop, fast;
1250     if (!current.is(next)) {
1251       __ Move(next, current);
1252     }
1253     __ bind(&loop);
1254     // Terminate at native context.
1255     __ LoadP(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1256     __ LoadRoot(ip, Heap::kNativeContextMapRootIndex);
1257     __ cmp(temp, ip);
1258     __ beq(&fast);
1259     // Check that extension is "the hole".
1260     __ LoadP(temp, ContextMemOperand(next, Context::EXTENSION_INDEX));
1261     __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1262     // Load next context in chain.
1263     __ LoadP(next, ContextMemOperand(next, Context::PREVIOUS_INDEX));
1264     __ b(&loop);
1265     __ bind(&fast);
1266   }
1267 
1268   // All extension objects were empty and it is safe to use a normal global
1269   // load machinery.
1270   EmitGlobalVariableLoad(proxy, typeof_mode);
1271 }
1272 
1273 
ContextSlotOperandCheckExtensions(Variable * var,Label * slow)1274 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1275                                                                 Label* slow) {
1276   DCHECK(var->IsContextSlot());
1277   Register context = cp;
1278   Register next = r6;
1279   Register temp = r7;
1280 
1281   for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1282     if (s->num_heap_slots() > 0) {
1283       if (s->calls_sloppy_eval()) {
1284         // Check that extension is "the hole".
1285         __ LoadP(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1286         __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1287       }
1288       __ LoadP(next, ContextMemOperand(context, Context::PREVIOUS_INDEX));
1289       // Walk the rest of the chain without clobbering cp.
1290       context = next;
1291     }
1292   }
1293   // Check that last extension is "the hole".
1294   __ LoadP(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1295   __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1296 
1297   // This function is used only for loads, not stores, so it's safe to
1298   // return an cp-based operand (the write barrier cannot be allowed to
1299   // destroy the cp register).
1300   return ContextMemOperand(context, var->index());
1301 }
1302 
1303 
EmitDynamicLookupFastCase(VariableProxy * proxy,TypeofMode typeof_mode,Label * slow,Label * done)1304 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1305                                                   TypeofMode typeof_mode,
1306                                                   Label* slow, Label* done) {
1307   // Generate fast-case code for variables that might be shadowed by
1308   // eval-introduced variables.  Eval is used a lot without
1309   // introducing variables.  In those cases, we do not want to
1310   // perform a runtime call for all variables in the scope
1311   // containing the eval.
1312   Variable* var = proxy->var();
1313   if (var->mode() == DYNAMIC_GLOBAL) {
1314     EmitLoadGlobalCheckExtensions(proxy, typeof_mode, slow);
1315     __ b(done);
1316   } else if (var->mode() == DYNAMIC_LOCAL) {
1317     Variable* local = var->local_if_not_shadowed();
1318     __ LoadP(r3, ContextSlotOperandCheckExtensions(local, slow));
1319     if (local->mode() == LET || local->mode() == CONST ||
1320         local->mode() == CONST_LEGACY) {
1321       __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
1322       __ bne(done);
1323       if (local->mode() == CONST_LEGACY) {
1324         __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
1325       } else {  // LET || CONST
1326         __ mov(r3, Operand(var->name()));
1327         __ push(r3);
1328         __ CallRuntime(Runtime::kThrowReferenceError);
1329       }
1330     }
1331     __ b(done);
1332   }
1333 }
1334 
1335 
EmitGlobalVariableLoad(VariableProxy * proxy,TypeofMode typeof_mode)1336 void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
1337                                                TypeofMode typeof_mode) {
1338   Variable* var = proxy->var();
1339   DCHECK(var->IsUnallocatedOrGlobalSlot() ||
1340          (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL));
1341   __ LoadGlobalObject(LoadDescriptor::ReceiverRegister());
1342   __ mov(LoadDescriptor::NameRegister(), Operand(var->name()));
1343   __ mov(LoadDescriptor::SlotRegister(),
1344          Operand(SmiFromSlot(proxy->VariableFeedbackSlot())));
1345   CallLoadIC(typeof_mode);
1346 }
1347 
1348 
EmitVariableLoad(VariableProxy * proxy,TypeofMode typeof_mode)1349 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
1350                                          TypeofMode typeof_mode) {
1351   // Record position before possible IC call.
1352   SetExpressionPosition(proxy);
1353   PrepareForBailoutForId(proxy->BeforeId(), NO_REGISTERS);
1354   Variable* var = proxy->var();
1355 
1356   // Three cases: global variables, lookup variables, and all other types of
1357   // variables.
1358   switch (var->location()) {
1359     case VariableLocation::GLOBAL:
1360     case VariableLocation::UNALLOCATED: {
1361       Comment cmnt(masm_, "[ Global variable");
1362       EmitGlobalVariableLoad(proxy, typeof_mode);
1363       context()->Plug(r3);
1364       break;
1365     }
1366 
1367     case VariableLocation::PARAMETER:
1368     case VariableLocation::LOCAL:
1369     case VariableLocation::CONTEXT: {
1370       DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode);
1371       Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1372                                                : "[ Stack variable");
1373       if (NeedsHoleCheckForLoad(proxy)) {
1374         Label done;
1375         // Let and const need a read barrier.
1376         GetVar(r3, var);
1377         __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
1378         __ bne(&done);
1379         if (var->mode() == LET || var->mode() == CONST) {
1380           // Throw a reference error when using an uninitialized let/const
1381           // binding in harmony mode.
1382           __ mov(r3, Operand(var->name()));
1383           __ push(r3);
1384           __ CallRuntime(Runtime::kThrowReferenceError);
1385         } else {
1386           // Uninitialized legacy const bindings are unholed.
1387           DCHECK(var->mode() == CONST_LEGACY);
1388           __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
1389         }
1390         __ bind(&done);
1391         context()->Plug(r3);
1392         break;
1393       }
1394       context()->Plug(var);
1395       break;
1396     }
1397 
1398     case VariableLocation::LOOKUP: {
1399       Comment cmnt(masm_, "[ Lookup variable");
1400       Label done, slow;
1401       // Generate code for loading from variables potentially shadowed
1402       // by eval-introduced variables.
1403       EmitDynamicLookupFastCase(proxy, typeof_mode, &slow, &done);
1404       __ bind(&slow);
1405       __ mov(r4, Operand(var->name()));
1406       __ Push(cp, r4);  // Context and name.
1407       Runtime::FunctionId function_id =
1408           typeof_mode == NOT_INSIDE_TYPEOF
1409               ? Runtime::kLoadLookupSlot
1410               : Runtime::kLoadLookupSlotNoReferenceError;
1411       __ CallRuntime(function_id);
1412       __ bind(&done);
1413       context()->Plug(r3);
1414     }
1415   }
1416 }
1417 
1418 
VisitRegExpLiteral(RegExpLiteral * expr)1419 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1420   Comment cmnt(masm_, "[ RegExpLiteral");
1421   __ LoadP(r6, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1422   __ LoadSmiLiteral(r5, Smi::FromInt(expr->literal_index()));
1423   __ mov(r4, Operand(expr->pattern()));
1424   __ LoadSmiLiteral(r3, Smi::FromInt(expr->flags()));
1425   FastCloneRegExpStub stub(isolate());
1426   __ CallStub(&stub);
1427   context()->Plug(r3);
1428 }
1429 
1430 
EmitAccessor(ObjectLiteralProperty * property)1431 void FullCodeGenerator::EmitAccessor(ObjectLiteralProperty* property) {
1432   Expression* expression = (property == NULL) ? NULL : property->value();
1433   if (expression == NULL) {
1434     __ LoadRoot(r4, Heap::kNullValueRootIndex);
1435     __ push(r4);
1436   } else {
1437     VisitForStackValue(expression);
1438     if (NeedsHomeObject(expression)) {
1439       DCHECK(property->kind() == ObjectLiteral::Property::GETTER ||
1440              property->kind() == ObjectLiteral::Property::SETTER);
1441       int offset = property->kind() == ObjectLiteral::Property::GETTER ? 2 : 3;
1442       EmitSetHomeObject(expression, offset, property->GetSlot());
1443     }
1444   }
1445 }
1446 
1447 
VisitObjectLiteral(ObjectLiteral * expr)1448 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1449   Comment cmnt(masm_, "[ ObjectLiteral");
1450 
1451   Handle<FixedArray> constant_properties = expr->constant_properties();
1452   __ LoadP(r6, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1453   __ LoadSmiLiteral(r5, Smi::FromInt(expr->literal_index()));
1454   __ mov(r4, Operand(constant_properties));
1455   int flags = expr->ComputeFlags();
1456   __ LoadSmiLiteral(r3, Smi::FromInt(flags));
1457   if (MustCreateObjectLiteralWithRuntime(expr)) {
1458     __ Push(r6, r5, r4, r3);
1459     __ CallRuntime(Runtime::kCreateObjectLiteral);
1460   } else {
1461     FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1462     __ CallStub(&stub);
1463   }
1464   PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1465 
1466   // If result_saved is true the result is on top of the stack.  If
1467   // result_saved is false the result is in r3.
1468   bool result_saved = false;
1469 
1470   AccessorTable accessor_table(zone());
1471   int property_index = 0;
1472   for (; property_index < expr->properties()->length(); property_index++) {
1473     ObjectLiteral::Property* property = expr->properties()->at(property_index);
1474     if (property->is_computed_name()) break;
1475     if (property->IsCompileTimeValue()) continue;
1476 
1477     Literal* key = property->key()->AsLiteral();
1478     Expression* value = property->value();
1479     if (!result_saved) {
1480       __ push(r3);  // Save result on stack
1481       result_saved = true;
1482     }
1483     switch (property->kind()) {
1484       case ObjectLiteral::Property::CONSTANT:
1485         UNREACHABLE();
1486       case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1487         DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1488       // Fall through.
1489       case ObjectLiteral::Property::COMPUTED:
1490         // It is safe to use [[Put]] here because the boilerplate already
1491         // contains computed properties with an uninitialized value.
1492         if (key->value()->IsInternalizedString()) {
1493           if (property->emit_store()) {
1494             VisitForAccumulatorValue(value);
1495             DCHECK(StoreDescriptor::ValueRegister().is(r3));
1496             __ mov(StoreDescriptor::NameRegister(), Operand(key->value()));
1497             __ LoadP(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1498             EmitLoadStoreICSlot(property->GetSlot(0));
1499             CallStoreIC();
1500             PrepareForBailoutForId(key->id(), NO_REGISTERS);
1501 
1502             if (NeedsHomeObject(value)) {
1503               EmitSetHomeObjectAccumulator(value, 0, property->GetSlot(1));
1504             }
1505           } else {
1506             VisitForEffect(value);
1507           }
1508           break;
1509         }
1510         // Duplicate receiver on stack.
1511         __ LoadP(r3, MemOperand(sp));
1512         __ push(r3);
1513         VisitForStackValue(key);
1514         VisitForStackValue(value);
1515         if (property->emit_store()) {
1516           if (NeedsHomeObject(value)) {
1517             EmitSetHomeObject(value, 2, property->GetSlot());
1518           }
1519           __ LoadSmiLiteral(r3, Smi::FromInt(SLOPPY));  // PropertyAttributes
1520           __ push(r3);
1521           __ CallRuntime(Runtime::kSetProperty);
1522         } else {
1523           __ Drop(3);
1524         }
1525         break;
1526       case ObjectLiteral::Property::PROTOTYPE:
1527         // Duplicate receiver on stack.
1528         __ LoadP(r3, MemOperand(sp));
1529         __ push(r3);
1530         VisitForStackValue(value);
1531         DCHECK(property->emit_store());
1532         __ CallRuntime(Runtime::kInternalSetPrototype);
1533         PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1534                                NO_REGISTERS);
1535         break;
1536       case ObjectLiteral::Property::GETTER:
1537         if (property->emit_store()) {
1538           accessor_table.lookup(key)->second->getter = property;
1539         }
1540         break;
1541       case ObjectLiteral::Property::SETTER:
1542         if (property->emit_store()) {
1543           accessor_table.lookup(key)->second->setter = property;
1544         }
1545         break;
1546     }
1547   }
1548 
1549   // Emit code to define accessors, using only a single call to the runtime for
1550   // each pair of corresponding getters and setters.
1551   for (AccessorTable::Iterator it = accessor_table.begin();
1552        it != accessor_table.end(); ++it) {
1553     __ LoadP(r3, MemOperand(sp));  // Duplicate receiver.
1554     __ push(r3);
1555     VisitForStackValue(it->first);
1556     EmitAccessor(it->second->getter);
1557     EmitAccessor(it->second->setter);
1558     __ LoadSmiLiteral(r3, Smi::FromInt(NONE));
1559     __ push(r3);
1560     __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked);
1561   }
1562 
1563   // Object literals have two parts. The "static" part on the left contains no
1564   // computed property names, and so we can compute its map ahead of time; see
1565   // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1566   // starts with the first computed property name, and continues with all
1567   // properties to its right.  All the code from above initializes the static
1568   // component of the object literal, and arranges for the map of the result to
1569   // reflect the static order in which the keys appear. For the dynamic
1570   // properties, we compile them into a series of "SetOwnProperty" runtime
1571   // calls. This will preserve insertion order.
1572   for (; property_index < expr->properties()->length(); property_index++) {
1573     ObjectLiteral::Property* property = expr->properties()->at(property_index);
1574 
1575     Expression* value = property->value();
1576     if (!result_saved) {
1577       __ push(r3);  // Save result on the stack
1578       result_saved = true;
1579     }
1580 
1581     __ LoadP(r3, MemOperand(sp));  // Duplicate receiver.
1582     __ push(r3);
1583 
1584     if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1585       DCHECK(!property->is_computed_name());
1586       VisitForStackValue(value);
1587       DCHECK(property->emit_store());
1588       __ CallRuntime(Runtime::kInternalSetPrototype);
1589       PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1590                              NO_REGISTERS);
1591     } else {
1592       EmitPropertyKey(property, expr->GetIdForPropertyName(property_index));
1593       VisitForStackValue(value);
1594       if (NeedsHomeObject(value)) {
1595         EmitSetHomeObject(value, 2, property->GetSlot());
1596       }
1597 
1598       switch (property->kind()) {
1599         case ObjectLiteral::Property::CONSTANT:
1600         case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1601         case ObjectLiteral::Property::COMPUTED:
1602           if (property->emit_store()) {
1603             __ LoadSmiLiteral(r3, Smi::FromInt(NONE));
1604             __ push(r3);
1605             __ CallRuntime(Runtime::kDefineDataPropertyUnchecked);
1606           } else {
1607             __ Drop(3);
1608           }
1609           break;
1610 
1611         case ObjectLiteral::Property::PROTOTYPE:
1612           UNREACHABLE();
1613           break;
1614 
1615         case ObjectLiteral::Property::GETTER:
1616           __ mov(r3, Operand(Smi::FromInt(NONE)));
1617           __ push(r3);
1618           __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked);
1619           break;
1620 
1621         case ObjectLiteral::Property::SETTER:
1622           __ mov(r3, Operand(Smi::FromInt(NONE)));
1623           __ push(r3);
1624           __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked);
1625           break;
1626       }
1627     }
1628   }
1629 
1630   if (expr->has_function()) {
1631     DCHECK(result_saved);
1632     __ LoadP(r3, MemOperand(sp));
1633     __ push(r3);
1634     __ CallRuntime(Runtime::kToFastProperties);
1635   }
1636 
1637   if (result_saved) {
1638     context()->PlugTOS();
1639   } else {
1640     context()->Plug(r3);
1641   }
1642 }
1643 
1644 
VisitArrayLiteral(ArrayLiteral * expr)1645 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1646   Comment cmnt(masm_, "[ ArrayLiteral");
1647 
1648   Handle<FixedArray> constant_elements = expr->constant_elements();
1649   bool has_fast_elements =
1650       IsFastObjectElementsKind(expr->constant_elements_kind());
1651   Handle<FixedArrayBase> constant_elements_values(
1652       FixedArrayBase::cast(constant_elements->get(1)));
1653 
1654   AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1655   if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1656     // If the only customer of allocation sites is transitioning, then
1657     // we can turn it off if we don't have anywhere else to transition to.
1658     allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1659   }
1660 
1661   __ LoadP(r6, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1662   __ LoadSmiLiteral(r5, Smi::FromInt(expr->literal_index()));
1663   __ mov(r4, Operand(constant_elements));
1664   if (MustCreateArrayLiteralWithRuntime(expr)) {
1665     __ LoadSmiLiteral(r3, Smi::FromInt(expr->ComputeFlags()));
1666     __ Push(r6, r5, r4, r3);
1667     __ CallRuntime(Runtime::kCreateArrayLiteral);
1668   } else {
1669     FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1670     __ CallStub(&stub);
1671   }
1672   PrepareForBailoutForId(expr->CreateLiteralId(), TOS_REG);
1673 
1674   bool result_saved = false;  // Is the result saved to the stack?
1675   ZoneList<Expression*>* subexprs = expr->values();
1676   int length = subexprs->length();
1677 
1678   // Emit code to evaluate all the non-constant subexpressions and to store
1679   // them into the newly cloned array.
1680   int array_index = 0;
1681   for (; array_index < length; array_index++) {
1682     Expression* subexpr = subexprs->at(array_index);
1683     if (subexpr->IsSpread()) break;
1684     // If the subexpression is a literal or a simple materialized literal it
1685     // is already set in the cloned array.
1686     if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1687 
1688     if (!result_saved) {
1689       __ push(r3);
1690       result_saved = true;
1691     }
1692     VisitForAccumulatorValue(subexpr);
1693 
1694     __ LoadSmiLiteral(StoreDescriptor::NameRegister(),
1695                       Smi::FromInt(array_index));
1696     __ LoadP(StoreDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1697     EmitLoadStoreICSlot(expr->LiteralFeedbackSlot());
1698     Handle<Code> ic =
1699         CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
1700     CallIC(ic);
1701 
1702     PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1703   }
1704 
1705   // In case the array literal contains spread expressions it has two parts. The
1706   // first part is  the "static" array which has a literal index is  handled
1707   // above. The second part is the part after the first spread expression
1708   // (inclusive) and these elements gets appended to the array. Note that the
1709   // number elements an iterable produces is unknown ahead of time.
1710   if (array_index < length && result_saved) {
1711     __ Pop(r3);
1712     result_saved = false;
1713   }
1714   for (; array_index < length; array_index++) {
1715     Expression* subexpr = subexprs->at(array_index);
1716 
1717     __ Push(r3);
1718     if (subexpr->IsSpread()) {
1719       VisitForStackValue(subexpr->AsSpread()->expression());
1720       __ InvokeBuiltin(Context::CONCAT_ITERABLE_TO_ARRAY_BUILTIN_INDEX,
1721                        CALL_FUNCTION);
1722     } else {
1723       VisitForStackValue(subexpr);
1724       __ CallRuntime(Runtime::kAppendElement);
1725     }
1726 
1727     PrepareForBailoutForId(expr->GetIdForElement(array_index), NO_REGISTERS);
1728   }
1729 
1730   if (result_saved) {
1731     context()->PlugTOS();
1732   } else {
1733     context()->Plug(r3);
1734   }
1735 }
1736 
1737 
VisitAssignment(Assignment * expr)1738 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1739   DCHECK(expr->target()->IsValidReferenceExpressionOrThis());
1740 
1741   Comment cmnt(masm_, "[ Assignment");
1742   SetExpressionPosition(expr, INSERT_BREAK);
1743 
1744   Property* property = expr->target()->AsProperty();
1745   LhsKind assign_type = Property::GetAssignType(property);
1746 
1747   // Evaluate LHS expression.
1748   switch (assign_type) {
1749     case VARIABLE:
1750       // Nothing to do here.
1751       break;
1752     case NAMED_PROPERTY:
1753       if (expr->is_compound()) {
1754         // We need the receiver both on the stack and in the register.
1755         VisitForStackValue(property->obj());
1756         __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1757       } else {
1758         VisitForStackValue(property->obj());
1759       }
1760       break;
1761     case NAMED_SUPER_PROPERTY:
1762       VisitForStackValue(
1763           property->obj()->AsSuperPropertyReference()->this_var());
1764       VisitForAccumulatorValue(
1765           property->obj()->AsSuperPropertyReference()->home_object());
1766       __ Push(result_register());
1767       if (expr->is_compound()) {
1768         const Register scratch = r4;
1769         __ LoadP(scratch, MemOperand(sp, kPointerSize));
1770         __ Push(scratch, result_register());
1771       }
1772       break;
1773     case KEYED_SUPER_PROPERTY: {
1774       const Register scratch = r4;
1775       VisitForStackValue(
1776           property->obj()->AsSuperPropertyReference()->this_var());
1777       VisitForAccumulatorValue(
1778           property->obj()->AsSuperPropertyReference()->home_object());
1779       __ mr(scratch, result_register());
1780       VisitForAccumulatorValue(property->key());
1781       __ Push(scratch, result_register());
1782       if (expr->is_compound()) {
1783         const Register scratch1 = r5;
1784         __ LoadP(scratch1, MemOperand(sp, 2 * kPointerSize));
1785         __ Push(scratch1, scratch, result_register());
1786       }
1787       break;
1788     }
1789     case KEYED_PROPERTY:
1790       if (expr->is_compound()) {
1791         VisitForStackValue(property->obj());
1792         VisitForStackValue(property->key());
1793         __ LoadP(LoadDescriptor::ReceiverRegister(),
1794                  MemOperand(sp, 1 * kPointerSize));
1795         __ LoadP(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
1796       } else {
1797         VisitForStackValue(property->obj());
1798         VisitForStackValue(property->key());
1799       }
1800       break;
1801   }
1802 
1803   // For compound assignments we need another deoptimization point after the
1804   // variable/property load.
1805   if (expr->is_compound()) {
1806     {
1807       AccumulatorValueContext context(this);
1808       switch (assign_type) {
1809         case VARIABLE:
1810           EmitVariableLoad(expr->target()->AsVariableProxy());
1811           PrepareForBailout(expr->target(), TOS_REG);
1812           break;
1813         case NAMED_PROPERTY:
1814           EmitNamedPropertyLoad(property);
1815           PrepareForBailoutForId(property->LoadId(), TOS_REG);
1816           break;
1817         case NAMED_SUPER_PROPERTY:
1818           EmitNamedSuperPropertyLoad(property);
1819           PrepareForBailoutForId(property->LoadId(), TOS_REG);
1820           break;
1821         case KEYED_SUPER_PROPERTY:
1822           EmitKeyedSuperPropertyLoad(property);
1823           PrepareForBailoutForId(property->LoadId(), TOS_REG);
1824           break;
1825         case KEYED_PROPERTY:
1826           EmitKeyedPropertyLoad(property);
1827           PrepareForBailoutForId(property->LoadId(), TOS_REG);
1828           break;
1829       }
1830     }
1831 
1832     Token::Value op = expr->binary_op();
1833     __ push(r3);  // Left operand goes on the stack.
1834     VisitForAccumulatorValue(expr->value());
1835 
1836     AccumulatorValueContext context(this);
1837     if (ShouldInlineSmiCase(op)) {
1838       EmitInlineSmiBinaryOp(expr->binary_operation(), op, expr->target(),
1839                             expr->value());
1840     } else {
1841       EmitBinaryOp(expr->binary_operation(), op);
1842     }
1843 
1844     // Deoptimization point in case the binary operation may have side effects.
1845     PrepareForBailout(expr->binary_operation(), TOS_REG);
1846   } else {
1847     VisitForAccumulatorValue(expr->value());
1848   }
1849 
1850   SetExpressionPosition(expr);
1851 
1852   // Store the value.
1853   switch (assign_type) {
1854     case VARIABLE:
1855       EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1856                              expr->op(), expr->AssignmentSlot());
1857       PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1858       context()->Plug(r3);
1859       break;
1860     case NAMED_PROPERTY:
1861       EmitNamedPropertyAssignment(expr);
1862       break;
1863     case NAMED_SUPER_PROPERTY:
1864       EmitNamedSuperPropertyStore(property);
1865       context()->Plug(r3);
1866       break;
1867     case KEYED_SUPER_PROPERTY:
1868       EmitKeyedSuperPropertyStore(property);
1869       context()->Plug(r3);
1870       break;
1871     case KEYED_PROPERTY:
1872       EmitKeyedPropertyAssignment(expr);
1873       break;
1874   }
1875 }
1876 
1877 
VisitYield(Yield * expr)1878 void FullCodeGenerator::VisitYield(Yield* expr) {
1879   Comment cmnt(masm_, "[ Yield");
1880   SetExpressionPosition(expr);
1881 
1882   // Evaluate yielded value first; the initial iterator definition depends on
1883   // this.  It stays on the stack while we update the iterator.
1884   VisitForStackValue(expr->expression());
1885 
1886   switch (expr->yield_kind()) {
1887     case Yield::kSuspend:
1888       // Pop value from top-of-stack slot; box result into result register.
1889       EmitCreateIteratorResult(false);
1890       __ push(result_register());
1891     // Fall through.
1892     case Yield::kInitial: {
1893       Label suspend, continuation, post_runtime, resume;
1894 
1895       __ b(&suspend);
1896       __ bind(&continuation);
1897       __ RecordGeneratorContinuation();
1898       __ b(&resume);
1899 
1900       __ bind(&suspend);
1901       VisitForAccumulatorValue(expr->generator_object());
1902       DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
1903       __ LoadSmiLiteral(r4, Smi::FromInt(continuation.pos()));
1904       __ StoreP(r4, FieldMemOperand(r3, JSGeneratorObject::kContinuationOffset),
1905                 r0);
1906       __ StoreP(cp, FieldMemOperand(r3, JSGeneratorObject::kContextOffset), r0);
1907       __ mr(r4, cp);
1908       __ RecordWriteField(r3, JSGeneratorObject::kContextOffset, r4, r5,
1909                           kLRHasBeenSaved, kDontSaveFPRegs);
1910       __ addi(r4, fp, Operand(StandardFrameConstants::kExpressionsOffset));
1911       __ cmp(sp, r4);
1912       __ beq(&post_runtime);
1913       __ push(r3);  // generator object
1914       __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
1915       __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
1916       __ bind(&post_runtime);
1917       __ pop(result_register());
1918       EmitReturnSequence();
1919 
1920       __ bind(&resume);
1921       context()->Plug(result_register());
1922       break;
1923     }
1924 
1925     case Yield::kFinal: {
1926       VisitForAccumulatorValue(expr->generator_object());
1927       __ LoadSmiLiteral(r4, Smi::FromInt(JSGeneratorObject::kGeneratorClosed));
1928       __ StoreP(r4, FieldMemOperand(result_register(),
1929                                     JSGeneratorObject::kContinuationOffset),
1930                 r0);
1931       // Pop value from top-of-stack slot, box result into result register.
1932       EmitCreateIteratorResult(true);
1933       EmitUnwindBeforeReturn();
1934       EmitReturnSequence();
1935       break;
1936     }
1937 
1938     case Yield::kDelegating: {
1939       VisitForStackValue(expr->generator_object());
1940 
1941       // Initial stack layout is as follows:
1942       // [sp + 1 * kPointerSize] iter
1943       // [sp + 0 * kPointerSize] g
1944 
1945       Label l_catch, l_try, l_suspend, l_continuation, l_resume;
1946       Label l_next, l_call;
1947       Register load_receiver = LoadDescriptor::ReceiverRegister();
1948       Register load_name = LoadDescriptor::NameRegister();
1949 
1950       // Initial send value is undefined.
1951       __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
1952       __ b(&l_next);
1953 
1954       // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
1955       __ bind(&l_catch);
1956       __ LoadRoot(load_name, Heap::kthrow_stringRootIndex);  // "throw"
1957       __ LoadP(r6, MemOperand(sp, 1 * kPointerSize));        // iter
1958       __ Push(load_name, r6, r3);  // "throw", iter, except
1959       __ b(&l_call);
1960 
1961       // try { received = %yield result }
1962       // Shuffle the received result above a try handler and yield it without
1963       // re-boxing.
1964       __ bind(&l_try);
1965       __ pop(r3);  // result
1966       int handler_index = NewHandlerTableEntry();
1967       EnterTryBlock(handler_index, &l_catch);
1968       const int try_block_size = TryCatch::kElementCount * kPointerSize;
1969       __ push(r3);  // result
1970 
1971       __ b(&l_suspend);
1972       __ bind(&l_continuation);
1973       __ RecordGeneratorContinuation();
1974       __ b(&l_resume);
1975 
1976       __ bind(&l_suspend);
1977       const int generator_object_depth = kPointerSize + try_block_size;
1978       __ LoadP(r3, MemOperand(sp, generator_object_depth));
1979       __ push(r3);  // g
1980       __ Push(Smi::FromInt(handler_index));  // handler-index
1981       DCHECK(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
1982       __ LoadSmiLiteral(r4, Smi::FromInt(l_continuation.pos()));
1983       __ StoreP(r4, FieldMemOperand(r3, JSGeneratorObject::kContinuationOffset),
1984                 r0);
1985       __ StoreP(cp, FieldMemOperand(r3, JSGeneratorObject::kContextOffset), r0);
1986       __ mr(r4, cp);
1987       __ RecordWriteField(r3, JSGeneratorObject::kContextOffset, r4, r5,
1988                           kLRHasBeenSaved, kDontSaveFPRegs);
1989       __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 2);
1990       __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
1991       __ pop(r3);  // result
1992       EmitReturnSequence();
1993       __ bind(&l_resume);  // received in r3
1994       ExitTryBlock(handler_index);
1995 
1996       // receiver = iter; f = 'next'; arg = received;
1997       __ bind(&l_next);
1998 
1999       __ LoadRoot(load_name, Heap::knext_stringRootIndex);  // "next"
2000       __ LoadP(r6, MemOperand(sp, 1 * kPointerSize));       // iter
2001       __ Push(load_name, r6, r3);  // "next", iter, received
2002 
2003       // result = receiver[f](arg);
2004       __ bind(&l_call);
2005       __ LoadP(load_receiver, MemOperand(sp, kPointerSize));
2006       __ LoadP(load_name, MemOperand(sp, 2 * kPointerSize));
2007       __ mov(LoadDescriptor::SlotRegister(),
2008              Operand(SmiFromSlot(expr->KeyedLoadFeedbackSlot())));
2009       Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate(), SLOPPY).code();
2010       CallIC(ic, TypeFeedbackId::None());
2011       __ mr(r4, r3);
2012       __ StoreP(r4, MemOperand(sp, 2 * kPointerSize));
2013       SetCallPosition(expr);
2014       __ li(r3, Operand(1));
2015       __ Call(
2016           isolate()->builtins()->Call(ConvertReceiverMode::kNotNullOrUndefined),
2017           RelocInfo::CODE_TARGET);
2018 
2019       __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2020       __ Drop(1);  // The function is still on the stack; drop it.
2021 
2022       // if (!result.done) goto l_try;
2023       __ Move(load_receiver, r3);
2024 
2025       __ push(load_receiver);                               // save result
2026       __ LoadRoot(load_name, Heap::kdone_stringRootIndex);  // "done"
2027       __ mov(LoadDescriptor::SlotRegister(),
2028              Operand(SmiFromSlot(expr->DoneFeedbackSlot())));
2029       CallLoadIC(NOT_INSIDE_TYPEOF);  // r0=result.done
2030       Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
2031       CallIC(bool_ic);
2032       __ CompareRoot(result_register(), Heap::kTrueValueRootIndex);
2033       __ bne(&l_try);
2034 
2035       // result.value
2036       __ pop(load_receiver);                                 // result
2037       __ LoadRoot(load_name, Heap::kvalue_stringRootIndex);  // "value"
2038       __ mov(LoadDescriptor::SlotRegister(),
2039              Operand(SmiFromSlot(expr->ValueFeedbackSlot())));
2040       CallLoadIC(NOT_INSIDE_TYPEOF);  // r3=result.value
2041       context()->DropAndPlug(2, r3);  // drop iter and g
2042       break;
2043     }
2044   }
2045 }
2046 
2047 
EmitGeneratorResume(Expression * generator,Expression * value,JSGeneratorObject::ResumeMode resume_mode)2048 void FullCodeGenerator::EmitGeneratorResume(
2049     Expression* generator, Expression* value,
2050     JSGeneratorObject::ResumeMode resume_mode) {
2051   // The value stays in r3, and is ultimately read by the resumed generator, as
2052   // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
2053   // is read to throw the value when the resumed generator is already closed.
2054   // r4 will hold the generator object until the activation has been resumed.
2055   VisitForStackValue(generator);
2056   VisitForAccumulatorValue(value);
2057   __ pop(r4);
2058 
2059   // Load suspended function and context.
2060   __ LoadP(cp, FieldMemOperand(r4, JSGeneratorObject::kContextOffset));
2061   __ LoadP(r7, FieldMemOperand(r4, JSGeneratorObject::kFunctionOffset));
2062 
2063   // Load receiver and store as the first argument.
2064   __ LoadP(r5, FieldMemOperand(r4, JSGeneratorObject::kReceiverOffset));
2065   __ push(r5);
2066 
2067   // Push holes for the rest of the arguments to the generator function.
2068   __ LoadP(r6, FieldMemOperand(r7, JSFunction::kSharedFunctionInfoOffset));
2069   __ LoadWordArith(
2070       r6, FieldMemOperand(r6, SharedFunctionInfo::kFormalParameterCountOffset));
2071   __ LoadRoot(r5, Heap::kTheHoleValueRootIndex);
2072   Label argument_loop, push_frame;
2073 #if V8_TARGET_ARCH_PPC64
2074   __ cmpi(r6, Operand::Zero());
2075   __ beq(&push_frame);
2076 #else
2077   __ SmiUntag(r6, SetRC);
2078   __ beq(&push_frame, cr0);
2079 #endif
2080   __ mtctr(r6);
2081   __ bind(&argument_loop);
2082   __ push(r5);
2083   __ bdnz(&argument_loop);
2084 
2085   // Enter a new JavaScript frame, and initialize its slots as they were when
2086   // the generator was suspended.
2087   Label resume_frame, done;
2088   __ bind(&push_frame);
2089   __ b(&resume_frame, SetLK);
2090   __ b(&done);
2091   __ bind(&resume_frame);
2092   // lr = return address.
2093   // fp = caller's frame pointer.
2094   // cp = callee's context,
2095   // r7 = callee's JS function.
2096   __ PushFixedFrame(r7);
2097   // Adjust FP to point to saved FP.
2098   __ addi(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
2099 
2100   // Load the operand stack size.
2101   __ LoadP(r6, FieldMemOperand(r4, JSGeneratorObject::kOperandStackOffset));
2102   __ LoadP(r6, FieldMemOperand(r6, FixedArray::kLengthOffset));
2103   __ SmiUntag(r6, SetRC);
2104 
2105   // If we are sending a value and there is no operand stack, we can jump back
2106   // in directly.
2107   Label call_resume;
2108   if (resume_mode == JSGeneratorObject::NEXT) {
2109     Label slow_resume;
2110     __ bne(&slow_resume, cr0);
2111     __ LoadP(ip, FieldMemOperand(r7, JSFunction::kCodeEntryOffset));
2112     {
2113       ConstantPoolUnavailableScope constant_pool_unavailable(masm_);
2114       if (FLAG_enable_embedded_constant_pool) {
2115         __ LoadConstantPoolPointerRegisterFromCodeTargetAddress(ip);
2116       }
2117       __ LoadP(r5, FieldMemOperand(r4, JSGeneratorObject::kContinuationOffset));
2118       __ SmiUntag(r5);
2119       __ add(ip, ip, r5);
2120       __ LoadSmiLiteral(r5,
2121                         Smi::FromInt(JSGeneratorObject::kGeneratorExecuting));
2122       __ StoreP(r5, FieldMemOperand(r4, JSGeneratorObject::kContinuationOffset),
2123                 r0);
2124       __ Jump(ip);
2125       __ bind(&slow_resume);
2126     }
2127   } else {
2128     __ beq(&call_resume, cr0);
2129   }
2130 
2131   // Otherwise, we push holes for the operand stack and call the runtime to fix
2132   // up the stack and the handlers.
2133   Label operand_loop;
2134   __ mtctr(r6);
2135   __ bind(&operand_loop);
2136   __ push(r5);
2137   __ bdnz(&operand_loop);
2138 
2139   __ bind(&call_resume);
2140   DCHECK(!result_register().is(r4));
2141   __ Push(r4, result_register());
2142   __ Push(Smi::FromInt(resume_mode));
2143   __ CallRuntime(Runtime::kResumeJSGeneratorObject);
2144   // Not reached: the runtime call returns elsewhere.
2145   __ stop("not-reached");
2146 
2147   __ bind(&done);
2148   context()->Plug(result_register());
2149 }
2150 
2151 
EmitCreateIteratorResult(bool done)2152 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
2153   Label allocate, done_allocate;
2154 
2155   __ Allocate(JSIteratorResult::kSize, r3, r5, r6, &allocate, TAG_OBJECT);
2156   __ b(&done_allocate);
2157 
2158   __ bind(&allocate);
2159   __ Push(Smi::FromInt(JSIteratorResult::kSize));
2160   __ CallRuntime(Runtime::kAllocateInNewSpace);
2161 
2162   __ bind(&done_allocate);
2163   __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, r4);
2164   __ pop(r5);
2165   __ LoadRoot(r6,
2166               done ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex);
2167   __ LoadRoot(r7, Heap::kEmptyFixedArrayRootIndex);
2168   __ StoreP(r4, FieldMemOperand(r3, HeapObject::kMapOffset), r0);
2169   __ StoreP(r7, FieldMemOperand(r3, JSObject::kPropertiesOffset), r0);
2170   __ StoreP(r7, FieldMemOperand(r3, JSObject::kElementsOffset), r0);
2171   __ StoreP(r5, FieldMemOperand(r3, JSIteratorResult::kValueOffset), r0);
2172   __ StoreP(r6, FieldMemOperand(r3, JSIteratorResult::kDoneOffset), r0);
2173 }
2174 
2175 
EmitNamedPropertyLoad(Property * prop)2176 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2177   SetExpressionPosition(prop);
2178   Literal* key = prop->key()->AsLiteral();
2179   DCHECK(!prop->IsSuperAccess());
2180 
2181   __ mov(LoadDescriptor::NameRegister(), Operand(key->value()));
2182   __ mov(LoadDescriptor::SlotRegister(),
2183          Operand(SmiFromSlot(prop->PropertyFeedbackSlot())));
2184   CallLoadIC(NOT_INSIDE_TYPEOF, language_mode());
2185 }
2186 
2187 
EmitNamedSuperPropertyLoad(Property * prop)2188 void FullCodeGenerator::EmitNamedSuperPropertyLoad(Property* prop) {
2189   // Stack: receiver, home_object.
2190   SetExpressionPosition(prop);
2191   Literal* key = prop->key()->AsLiteral();
2192   DCHECK(!key->value()->IsSmi());
2193   DCHECK(prop->IsSuperAccess());
2194 
2195   __ Push(key->value());
2196   __ Push(Smi::FromInt(language_mode()));
2197   __ CallRuntime(Runtime::kLoadFromSuper);
2198 }
2199 
2200 
EmitKeyedPropertyLoad(Property * prop)2201 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2202   SetExpressionPosition(prop);
2203   Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate(), language_mode()).code();
2204   __ mov(LoadDescriptor::SlotRegister(),
2205          Operand(SmiFromSlot(prop->PropertyFeedbackSlot())));
2206   CallIC(ic);
2207 }
2208 
2209 
EmitKeyedSuperPropertyLoad(Property * prop)2210 void FullCodeGenerator::EmitKeyedSuperPropertyLoad(Property* prop) {
2211   // Stack: receiver, home_object, key.
2212   SetExpressionPosition(prop);
2213   __ Push(Smi::FromInt(language_mode()));
2214   __ CallRuntime(Runtime::kLoadKeyedFromSuper);
2215 }
2216 
2217 
EmitInlineSmiBinaryOp(BinaryOperation * expr,Token::Value op,Expression * left_expr,Expression * right_expr)2218 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2219                                               Token::Value op,
2220                                               Expression* left_expr,
2221                                               Expression* right_expr) {
2222   Label done, smi_case, stub_call;
2223 
2224   Register scratch1 = r5;
2225   Register scratch2 = r6;
2226 
2227   // Get the arguments.
2228   Register left = r4;
2229   Register right = r3;
2230   __ pop(left);
2231 
2232   // Perform combined smi check on both operands.
2233   __ orx(scratch1, left, right);
2234   STATIC_ASSERT(kSmiTag == 0);
2235   JumpPatchSite patch_site(masm_);
2236   patch_site.EmitJumpIfSmi(scratch1, &smi_case);
2237 
2238   __ bind(&stub_call);
2239   Handle<Code> code =
2240       CodeFactory::BinaryOpIC(isolate(), op, strength(language_mode())).code();
2241   CallIC(code, expr->BinaryOperationFeedbackId());
2242   patch_site.EmitPatchInfo();
2243   __ b(&done);
2244 
2245   __ bind(&smi_case);
2246   // Smi case. This code works the same way as the smi-smi case in the type
2247   // recording binary operation stub.
2248   switch (op) {
2249     case Token::SAR:
2250       __ GetLeastBitsFromSmi(scratch1, right, 5);
2251       __ ShiftRightArith(right, left, scratch1);
2252       __ ClearRightImm(right, right, Operand(kSmiTagSize + kSmiShiftSize));
2253       break;
2254     case Token::SHL: {
2255       __ GetLeastBitsFromSmi(scratch2, right, 5);
2256 #if V8_TARGET_ARCH_PPC64
2257       __ ShiftLeft_(right, left, scratch2);
2258 #else
2259       __ SmiUntag(scratch1, left);
2260       __ ShiftLeft_(scratch1, scratch1, scratch2);
2261       // Check that the *signed* result fits in a smi
2262       __ JumpIfNotSmiCandidate(scratch1, scratch2, &stub_call);
2263       __ SmiTag(right, scratch1);
2264 #endif
2265       break;
2266     }
2267     case Token::SHR: {
2268       __ SmiUntag(scratch1, left);
2269       __ GetLeastBitsFromSmi(scratch2, right, 5);
2270       __ srw(scratch1, scratch1, scratch2);
2271       // Unsigned shift is not allowed to produce a negative number.
2272       __ JumpIfNotUnsignedSmiCandidate(scratch1, r0, &stub_call);
2273       __ SmiTag(right, scratch1);
2274       break;
2275     }
2276     case Token::ADD: {
2277       __ AddAndCheckForOverflow(scratch1, left, right, scratch2, r0);
2278       __ BranchOnOverflow(&stub_call);
2279       __ mr(right, scratch1);
2280       break;
2281     }
2282     case Token::SUB: {
2283       __ SubAndCheckForOverflow(scratch1, left, right, scratch2, r0);
2284       __ BranchOnOverflow(&stub_call);
2285       __ mr(right, scratch1);
2286       break;
2287     }
2288     case Token::MUL: {
2289       Label mul_zero;
2290 #if V8_TARGET_ARCH_PPC64
2291       // Remove tag from both operands.
2292       __ SmiUntag(ip, right);
2293       __ SmiUntag(r0, left);
2294       __ Mul(scratch1, r0, ip);
2295       // Check for overflowing the smi range - no overflow if higher 33 bits of
2296       // the result are identical.
2297       __ TestIfInt32(scratch1, r0);
2298       __ bne(&stub_call);
2299 #else
2300       __ SmiUntag(ip, right);
2301       __ mullw(scratch1, left, ip);
2302       __ mulhw(scratch2, left, ip);
2303       // Check for overflowing the smi range - no overflow if higher 33 bits of
2304       // the result are identical.
2305       __ TestIfInt32(scratch2, scratch1, ip);
2306       __ bne(&stub_call);
2307 #endif
2308       // Go slow on zero result to handle -0.
2309       __ cmpi(scratch1, Operand::Zero());
2310       __ beq(&mul_zero);
2311 #if V8_TARGET_ARCH_PPC64
2312       __ SmiTag(right, scratch1);
2313 #else
2314       __ mr(right, scratch1);
2315 #endif
2316       __ b(&done);
2317       // We need -0 if we were multiplying a negative number with 0 to get 0.
2318       // We know one of them was zero.
2319       __ bind(&mul_zero);
2320       __ add(scratch2, right, left);
2321       __ cmpi(scratch2, Operand::Zero());
2322       __ blt(&stub_call);
2323       __ LoadSmiLiteral(right, Smi::FromInt(0));
2324       break;
2325     }
2326     case Token::BIT_OR:
2327       __ orx(right, left, right);
2328       break;
2329     case Token::BIT_AND:
2330       __ and_(right, left, right);
2331       break;
2332     case Token::BIT_XOR:
2333       __ xor_(right, left, right);
2334       break;
2335     default:
2336       UNREACHABLE();
2337   }
2338 
2339   __ bind(&done);
2340   context()->Plug(r3);
2341 }
2342 
2343 
EmitClassDefineProperties(ClassLiteral * lit)2344 void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) {
2345   // Constructor is in r3.
2346   DCHECK(lit != NULL);
2347   __ push(r3);
2348 
2349   // No access check is needed here since the constructor is created by the
2350   // class literal.
2351   Register scratch = r4;
2352   __ LoadP(scratch,
2353            FieldMemOperand(r3, JSFunction::kPrototypeOrInitialMapOffset));
2354   __ push(scratch);
2355 
2356   for (int i = 0; i < lit->properties()->length(); i++) {
2357     ObjectLiteral::Property* property = lit->properties()->at(i);
2358     Expression* value = property->value();
2359 
2360     if (property->is_static()) {
2361       __ LoadP(scratch, MemOperand(sp, kPointerSize));  // constructor
2362     } else {
2363       __ LoadP(scratch, MemOperand(sp, 0));  // prototype
2364     }
2365     __ push(scratch);
2366     EmitPropertyKey(property, lit->GetIdForProperty(i));
2367 
2368     // The static prototype property is read only. We handle the non computed
2369     // property name case in the parser. Since this is the only case where we
2370     // need to check for an own read only property we special case this so we do
2371     // not need to do this for every property.
2372     if (property->is_static() && property->is_computed_name()) {
2373       __ CallRuntime(Runtime::kThrowIfStaticPrototype);
2374       __ push(r3);
2375     }
2376 
2377     VisitForStackValue(value);
2378     if (NeedsHomeObject(value)) {
2379       EmitSetHomeObject(value, 2, property->GetSlot());
2380     }
2381 
2382     switch (property->kind()) {
2383       case ObjectLiteral::Property::CONSTANT:
2384       case ObjectLiteral::Property::MATERIALIZED_LITERAL:
2385       case ObjectLiteral::Property::PROTOTYPE:
2386         UNREACHABLE();
2387       case ObjectLiteral::Property::COMPUTED:
2388         __ CallRuntime(Runtime::kDefineClassMethod);
2389         break;
2390 
2391       case ObjectLiteral::Property::GETTER:
2392         __ mov(r3, Operand(Smi::FromInt(DONT_ENUM)));
2393         __ push(r3);
2394         __ CallRuntime(Runtime::kDefineGetterPropertyUnchecked);
2395         break;
2396 
2397       case ObjectLiteral::Property::SETTER:
2398         __ mov(r3, Operand(Smi::FromInt(DONT_ENUM)));
2399         __ push(r3);
2400         __ CallRuntime(Runtime::kDefineSetterPropertyUnchecked);
2401         break;
2402 
2403       default:
2404         UNREACHABLE();
2405     }
2406   }
2407 
2408   // Set both the prototype and constructor to have fast properties, and also
2409   // freeze them in strong mode.
2410   __ CallRuntime(Runtime::kFinalizeClassDefinition);
2411 }
2412 
2413 
EmitBinaryOp(BinaryOperation * expr,Token::Value op)2414 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
2415   __ pop(r4);
2416   Handle<Code> code =
2417       CodeFactory::BinaryOpIC(isolate(), op, strength(language_mode())).code();
2418   JumpPatchSite patch_site(masm_);  // unbound, signals no inlined smi code.
2419   CallIC(code, expr->BinaryOperationFeedbackId());
2420   patch_site.EmitPatchInfo();
2421   context()->Plug(r3);
2422 }
2423 
2424 
EmitAssignment(Expression * expr,FeedbackVectorSlot slot)2425 void FullCodeGenerator::EmitAssignment(Expression* expr,
2426                                        FeedbackVectorSlot slot) {
2427   DCHECK(expr->IsValidReferenceExpressionOrThis());
2428 
2429   Property* prop = expr->AsProperty();
2430   LhsKind assign_type = Property::GetAssignType(prop);
2431 
2432   switch (assign_type) {
2433     case VARIABLE: {
2434       Variable* var = expr->AsVariableProxy()->var();
2435       EffectContext context(this);
2436       EmitVariableAssignment(var, Token::ASSIGN, slot);
2437       break;
2438     }
2439     case NAMED_PROPERTY: {
2440       __ push(r3);  // Preserve value.
2441       VisitForAccumulatorValue(prop->obj());
2442       __ Move(StoreDescriptor::ReceiverRegister(), r3);
2443       __ pop(StoreDescriptor::ValueRegister());  // Restore value.
2444       __ mov(StoreDescriptor::NameRegister(),
2445              Operand(prop->key()->AsLiteral()->value()));
2446       EmitLoadStoreICSlot(slot);
2447       CallStoreIC();
2448       break;
2449     }
2450     case NAMED_SUPER_PROPERTY: {
2451       __ Push(r3);
2452       VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2453       VisitForAccumulatorValue(
2454           prop->obj()->AsSuperPropertyReference()->home_object());
2455       // stack: value, this; r3: home_object
2456       Register scratch = r5;
2457       Register scratch2 = r6;
2458       __ mr(scratch, result_register());                  // home_object
2459       __ LoadP(r3, MemOperand(sp, kPointerSize));         // value
2460       __ LoadP(scratch2, MemOperand(sp, 0));              // this
2461       __ StoreP(scratch2, MemOperand(sp, kPointerSize));  // this
2462       __ StoreP(scratch, MemOperand(sp, 0));              // home_object
2463       // stack: this, home_object; r3: value
2464       EmitNamedSuperPropertyStore(prop);
2465       break;
2466     }
2467     case KEYED_SUPER_PROPERTY: {
2468       __ Push(r3);
2469       VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2470       VisitForStackValue(
2471           prop->obj()->AsSuperPropertyReference()->home_object());
2472       VisitForAccumulatorValue(prop->key());
2473       Register scratch = r5;
2474       Register scratch2 = r6;
2475       __ LoadP(scratch2, MemOperand(sp, 2 * kPointerSize));  // value
2476       // stack: value, this, home_object; r3: key, r6: value
2477       __ LoadP(scratch, MemOperand(sp, kPointerSize));  // this
2478       __ StoreP(scratch, MemOperand(sp, 2 * kPointerSize));
2479       __ LoadP(scratch, MemOperand(sp, 0));  // home_object
2480       __ StoreP(scratch, MemOperand(sp, kPointerSize));
2481       __ StoreP(r3, MemOperand(sp, 0));
2482       __ Move(r3, scratch2);
2483       // stack: this, home_object, key; r3: value.
2484       EmitKeyedSuperPropertyStore(prop);
2485       break;
2486     }
2487     case KEYED_PROPERTY: {
2488       __ push(r3);  // Preserve value.
2489       VisitForStackValue(prop->obj());
2490       VisitForAccumulatorValue(prop->key());
2491       __ Move(StoreDescriptor::NameRegister(), r3);
2492       __ Pop(StoreDescriptor::ValueRegister(),
2493              StoreDescriptor::ReceiverRegister());
2494       EmitLoadStoreICSlot(slot);
2495       Handle<Code> ic =
2496           CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2497       CallIC(ic);
2498       break;
2499     }
2500   }
2501   context()->Plug(r3);
2502 }
2503 
2504 
EmitStoreToStackLocalOrContextSlot(Variable * var,MemOperand location)2505 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2506     Variable* var, MemOperand location) {
2507   __ StoreP(result_register(), location, r0);
2508   if (var->IsContextSlot()) {
2509     // RecordWrite may destroy all its register arguments.
2510     __ mr(r6, result_register());
2511     int offset = Context::SlotOffset(var->index());
2512     __ RecordWriteContextSlot(r4, offset, r6, r5, kLRHasBeenSaved,
2513                               kDontSaveFPRegs);
2514   }
2515 }
2516 
2517 
EmitVariableAssignment(Variable * var,Token::Value op,FeedbackVectorSlot slot)2518 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
2519                                                FeedbackVectorSlot slot) {
2520   if (var->IsUnallocated()) {
2521     // Global var, const, or let.
2522     __ mov(StoreDescriptor::NameRegister(), Operand(var->name()));
2523     __ LoadGlobalObject(StoreDescriptor::ReceiverRegister());
2524     EmitLoadStoreICSlot(slot);
2525     CallStoreIC();
2526 
2527   } else if (var->mode() == LET && op != Token::INIT) {
2528     // Non-initializing assignment to let variable needs a write barrier.
2529     DCHECK(!var->IsLookupSlot());
2530     DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2531     Label assign;
2532     MemOperand location = VarOperand(var, r4);
2533     __ LoadP(r6, location);
2534     __ CompareRoot(r6, Heap::kTheHoleValueRootIndex);
2535     __ bne(&assign);
2536     __ mov(r6, Operand(var->name()));
2537     __ push(r6);
2538     __ CallRuntime(Runtime::kThrowReferenceError);
2539     // Perform the assignment.
2540     __ bind(&assign);
2541     EmitStoreToStackLocalOrContextSlot(var, location);
2542 
2543   } else if (var->mode() == CONST && op != Token::INIT) {
2544     // Assignment to const variable needs a write barrier.
2545     DCHECK(!var->IsLookupSlot());
2546     DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2547     Label const_error;
2548     MemOperand location = VarOperand(var, r4);
2549     __ LoadP(r6, location);
2550     __ CompareRoot(r6, Heap::kTheHoleValueRootIndex);
2551     __ bne(&const_error);
2552     __ mov(r6, Operand(var->name()));
2553     __ push(r6);
2554     __ CallRuntime(Runtime::kThrowReferenceError);
2555     __ bind(&const_error);
2556     __ CallRuntime(Runtime::kThrowConstAssignError);
2557 
2558   } else if (var->is_this() && var->mode() == CONST && op == Token::INIT) {
2559     // Initializing assignment to const {this} needs a write barrier.
2560     DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2561     Label uninitialized_this;
2562     MemOperand location = VarOperand(var, r4);
2563     __ LoadP(r6, location);
2564     __ CompareRoot(r6, Heap::kTheHoleValueRootIndex);
2565     __ beq(&uninitialized_this);
2566     __ mov(r4, Operand(var->name()));
2567     __ push(r4);
2568     __ CallRuntime(Runtime::kThrowReferenceError);
2569     __ bind(&uninitialized_this);
2570     EmitStoreToStackLocalOrContextSlot(var, location);
2571 
2572   } else if (!var->is_const_mode() ||
2573              (var->mode() == CONST && op == Token::INIT)) {
2574     if (var->IsLookupSlot()) {
2575       // Assignment to var.
2576       __ push(r3);  // Value.
2577       __ mov(r4, Operand(var->name()));
2578       __ mov(r3, Operand(Smi::FromInt(language_mode())));
2579       __ Push(cp, r4, r3);  // Context, name, language mode.
2580       __ CallRuntime(Runtime::kStoreLookupSlot);
2581     } else {
2582       // Assignment to var or initializing assignment to let/const in harmony
2583       // mode.
2584       DCHECK((var->IsStackAllocated() || var->IsContextSlot()));
2585       MemOperand location = VarOperand(var, r4);
2586       if (generate_debug_code_ && var->mode() == LET && op == Token::INIT) {
2587         // Check for an uninitialized let binding.
2588         __ LoadP(r5, location);
2589         __ CompareRoot(r5, Heap::kTheHoleValueRootIndex);
2590         __ Check(eq, kLetBindingReInitialization);
2591       }
2592       EmitStoreToStackLocalOrContextSlot(var, location);
2593     }
2594   } else if (var->mode() == CONST_LEGACY && op == Token::INIT) {
2595     // Const initializers need a write barrier.
2596     DCHECK(!var->IsParameter());  // No const parameters.
2597     if (var->IsLookupSlot()) {
2598       __ push(r3);
2599       __ mov(r3, Operand(var->name()));
2600       __ Push(cp, r3);  // Context and name.
2601       __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot);
2602     } else {
2603       DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2604       Label skip;
2605       MemOperand location = VarOperand(var, r4);
2606       __ LoadP(r5, location);
2607       __ CompareRoot(r5, Heap::kTheHoleValueRootIndex);
2608       __ bne(&skip);
2609       EmitStoreToStackLocalOrContextSlot(var, location);
2610       __ bind(&skip);
2611     }
2612 
2613   } else {
2614     DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT);
2615     if (is_strict(language_mode())) {
2616       __ CallRuntime(Runtime::kThrowConstAssignError);
2617     }
2618     // Silently ignore store in sloppy mode.
2619   }
2620 }
2621 
2622 
EmitNamedPropertyAssignment(Assignment * expr)2623 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2624   // Assignment to a property, using a named store IC.
2625   Property* prop = expr->target()->AsProperty();
2626   DCHECK(prop != NULL);
2627   DCHECK(prop->key()->IsLiteral());
2628 
2629   __ mov(StoreDescriptor::NameRegister(),
2630          Operand(prop->key()->AsLiteral()->value()));
2631   __ pop(StoreDescriptor::ReceiverRegister());
2632   EmitLoadStoreICSlot(expr->AssignmentSlot());
2633   CallStoreIC();
2634 
2635   PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2636   context()->Plug(r3);
2637 }
2638 
2639 
EmitNamedSuperPropertyStore(Property * prop)2640 void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2641   // Assignment to named property of super.
2642   // r3 : value
2643   // stack : receiver ('this'), home_object
2644   DCHECK(prop != NULL);
2645   Literal* key = prop->key()->AsLiteral();
2646   DCHECK(key != NULL);
2647 
2648   __ Push(key->value());
2649   __ Push(r3);
2650   __ CallRuntime((is_strict(language_mode()) ? Runtime::kStoreToSuper_Strict
2651                                              : Runtime::kStoreToSuper_Sloppy));
2652 }
2653 
2654 
EmitKeyedSuperPropertyStore(Property * prop)2655 void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2656   // Assignment to named property of super.
2657   // r3 : value
2658   // stack : receiver ('this'), home_object, key
2659   DCHECK(prop != NULL);
2660 
2661   __ Push(r3);
2662   __ CallRuntime((is_strict(language_mode())
2663                       ? Runtime::kStoreKeyedToSuper_Strict
2664                       : Runtime::kStoreKeyedToSuper_Sloppy));
2665 }
2666 
2667 
EmitKeyedPropertyAssignment(Assignment * expr)2668 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2669   // Assignment to a property, using a keyed store IC.
2670   __ Pop(StoreDescriptor::ReceiverRegister(), StoreDescriptor::NameRegister());
2671   DCHECK(StoreDescriptor::ValueRegister().is(r3));
2672 
2673   Handle<Code> ic =
2674       CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2675   EmitLoadStoreICSlot(expr->AssignmentSlot());
2676   CallIC(ic);
2677 
2678   PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2679   context()->Plug(r3);
2680 }
2681 
2682 
VisitProperty(Property * expr)2683 void FullCodeGenerator::VisitProperty(Property* expr) {
2684   Comment cmnt(masm_, "[ Property");
2685   SetExpressionPosition(expr);
2686 
2687   Expression* key = expr->key();
2688 
2689   if (key->IsPropertyName()) {
2690     if (!expr->IsSuperAccess()) {
2691       VisitForAccumulatorValue(expr->obj());
2692       __ Move(LoadDescriptor::ReceiverRegister(), r3);
2693       EmitNamedPropertyLoad(expr);
2694     } else {
2695       VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2696       VisitForStackValue(
2697           expr->obj()->AsSuperPropertyReference()->home_object());
2698       EmitNamedSuperPropertyLoad(expr);
2699     }
2700   } else {
2701     if (!expr->IsSuperAccess()) {
2702       VisitForStackValue(expr->obj());
2703       VisitForAccumulatorValue(expr->key());
2704       __ Move(LoadDescriptor::NameRegister(), r3);
2705       __ pop(LoadDescriptor::ReceiverRegister());
2706       EmitKeyedPropertyLoad(expr);
2707     } else {
2708       VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
2709       VisitForStackValue(
2710           expr->obj()->AsSuperPropertyReference()->home_object());
2711       VisitForStackValue(expr->key());
2712       EmitKeyedSuperPropertyLoad(expr);
2713     }
2714   }
2715   PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2716   context()->Plug(r3);
2717 }
2718 
2719 
CallIC(Handle<Code> code,TypeFeedbackId ast_id)2720 void FullCodeGenerator::CallIC(Handle<Code> code, TypeFeedbackId ast_id) {
2721   ic_total_count_++;
2722   __ Call(code, RelocInfo::CODE_TARGET, ast_id);
2723 }
2724 
2725 
2726 // Code common for calls using the IC.
EmitCallWithLoadIC(Call * expr)2727 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2728   Expression* callee = expr->expression();
2729 
2730   // Get the target function.
2731   ConvertReceiverMode convert_mode;
2732   if (callee->IsVariableProxy()) {
2733     {
2734       StackValueContext context(this);
2735       EmitVariableLoad(callee->AsVariableProxy());
2736       PrepareForBailout(callee, NO_REGISTERS);
2737     }
2738     // Push undefined as receiver. This is patched in the method prologue if it
2739     // is a sloppy mode method.
2740     __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
2741     __ push(r0);
2742     convert_mode = ConvertReceiverMode::kNullOrUndefined;
2743   } else {
2744     // Load the function from the receiver.
2745     DCHECK(callee->IsProperty());
2746     DCHECK(!callee->AsProperty()->IsSuperAccess());
2747     __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2748     EmitNamedPropertyLoad(callee->AsProperty());
2749     PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2750     // Push the target function under the receiver.
2751     __ LoadP(r0, MemOperand(sp, 0));
2752     __ push(r0);
2753     __ StoreP(r3, MemOperand(sp, kPointerSize));
2754     convert_mode = ConvertReceiverMode::kNotNullOrUndefined;
2755   }
2756 
2757   EmitCall(expr, convert_mode);
2758 }
2759 
2760 
EmitSuperCallWithLoadIC(Call * expr)2761 void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2762   Expression* callee = expr->expression();
2763   DCHECK(callee->IsProperty());
2764   Property* prop = callee->AsProperty();
2765   DCHECK(prop->IsSuperAccess());
2766   SetExpressionPosition(prop);
2767 
2768   Literal* key = prop->key()->AsLiteral();
2769   DCHECK(!key->value()->IsSmi());
2770   // Load the function from the receiver.
2771   const Register scratch = r4;
2772   SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2773   VisitForAccumulatorValue(super_ref->home_object());
2774   __ mr(scratch, r3);
2775   VisitForAccumulatorValue(super_ref->this_var());
2776   __ Push(scratch, r3, r3, scratch);
2777   __ Push(key->value());
2778   __ Push(Smi::FromInt(language_mode()));
2779 
2780   // Stack here:
2781   //  - home_object
2782   //  - this (receiver)
2783   //  - this (receiver) <-- LoadFromSuper will pop here and below.
2784   //  - home_object
2785   //  - key
2786   //  - language_mode
2787   __ CallRuntime(Runtime::kLoadFromSuper);
2788 
2789   // Replace home_object with target function.
2790   __ StoreP(r3, MemOperand(sp, kPointerSize));
2791 
2792   // Stack here:
2793   // - target function
2794   // - this (receiver)
2795   EmitCall(expr);
2796 }
2797 
2798 
2799 // Code common for calls using the IC.
EmitKeyedCallWithLoadIC(Call * expr,Expression * key)2800 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr, Expression* key) {
2801   // Load the key.
2802   VisitForAccumulatorValue(key);
2803 
2804   Expression* callee = expr->expression();
2805 
2806   // Load the function from the receiver.
2807   DCHECK(callee->IsProperty());
2808   __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2809   __ Move(LoadDescriptor::NameRegister(), r3);
2810   EmitKeyedPropertyLoad(callee->AsProperty());
2811   PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2812 
2813   // Push the target function under the receiver.
2814   __ LoadP(ip, MemOperand(sp, 0));
2815   __ push(ip);
2816   __ StoreP(r3, MemOperand(sp, kPointerSize));
2817 
2818   EmitCall(expr, ConvertReceiverMode::kNotNullOrUndefined);
2819 }
2820 
2821 
EmitKeyedSuperCallWithLoadIC(Call * expr)2822 void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
2823   Expression* callee = expr->expression();
2824   DCHECK(callee->IsProperty());
2825   Property* prop = callee->AsProperty();
2826   DCHECK(prop->IsSuperAccess());
2827 
2828   SetExpressionPosition(prop);
2829   // Load the function from the receiver.
2830   const Register scratch = r4;
2831   SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2832   VisitForAccumulatorValue(super_ref->home_object());
2833   __ mr(scratch, r3);
2834   VisitForAccumulatorValue(super_ref->this_var());
2835   __ Push(scratch, r3, r3, scratch);
2836   VisitForStackValue(prop->key());
2837   __ Push(Smi::FromInt(language_mode()));
2838 
2839   // Stack here:
2840   //  - home_object
2841   //  - this (receiver)
2842   //  - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
2843   //  - home_object
2844   //  - key
2845   //  - language_mode
2846   __ CallRuntime(Runtime::kLoadKeyedFromSuper);
2847 
2848   // Replace home_object with target function.
2849   __ StoreP(r3, MemOperand(sp, kPointerSize));
2850 
2851   // Stack here:
2852   // - target function
2853   // - this (receiver)
2854   EmitCall(expr);
2855 }
2856 
2857 
EmitCall(Call * expr,ConvertReceiverMode mode)2858 void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) {
2859   // Load the arguments.
2860   ZoneList<Expression*>* args = expr->arguments();
2861   int arg_count = args->length();
2862   for (int i = 0; i < arg_count; i++) {
2863     VisitForStackValue(args->at(i));
2864   }
2865 
2866   PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
2867   SetCallPosition(expr);
2868   Handle<Code> ic = CodeFactory::CallIC(isolate(), arg_count, mode).code();
2869   __ LoadSmiLiteral(r6, SmiFromSlot(expr->CallFeedbackICSlot()));
2870   __ LoadP(r4, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
2871   // Don't assign a type feedback id to the IC, since type feedback is provided
2872   // by the vector above.
2873   CallIC(ic);
2874 
2875   RecordJSReturnSite(expr);
2876   // Restore context register.
2877   __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2878   context()->DropAndPlug(1, r3);
2879 }
2880 
2881 
EmitResolvePossiblyDirectEval(int arg_count)2882 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2883   // r7: copy of the first argument or undefined if it doesn't exist.
2884   if (arg_count > 0) {
2885     __ LoadP(r7, MemOperand(sp, arg_count * kPointerSize), r0);
2886   } else {
2887     __ LoadRoot(r7, Heap::kUndefinedValueRootIndex);
2888   }
2889 
2890   // r6: the receiver of the enclosing function.
2891   __ LoadP(r6, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
2892 
2893   // r5: language mode.
2894   __ LoadSmiLiteral(r5, Smi::FromInt(language_mode()));
2895 
2896   // r4: the start position of the scope the calls resides in.
2897   __ LoadSmiLiteral(r4, Smi::FromInt(scope()->start_position()));
2898 
2899   // Do the runtime call.
2900   __ Push(r7, r6, r5, r4);
2901   __ CallRuntime(Runtime::kResolvePossiblyDirectEval);
2902 }
2903 
2904 
2905 // See http://www.ecma-international.org/ecma-262/6.0/#sec-function-calls.
PushCalleeAndWithBaseObject(Call * expr)2906 void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) {
2907   VariableProxy* callee = expr->expression()->AsVariableProxy();
2908   if (callee->var()->IsLookupSlot()) {
2909     Label slow, done;
2910     SetExpressionPosition(callee);
2911     // Generate code for loading from variables potentially shadowed by
2912     // eval-introduced variables.
2913     EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done);
2914 
2915     __ bind(&slow);
2916     // Call the runtime to find the function to call (returned in r3) and
2917     // the object holding it (returned in r4).
2918     DCHECK(!context_register().is(r5));
2919     __ mov(r5, Operand(callee->name()));
2920     __ Push(context_register(), r5);
2921     __ CallRuntime(Runtime::kLoadLookupSlot);
2922     __ Push(r3, r4);  // Function, receiver.
2923     PrepareForBailoutForId(expr->LookupId(), NO_REGISTERS);
2924 
2925     // If fast case code has been generated, emit code to push the function
2926     // and receiver and have the slow path jump around this code.
2927     if (done.is_linked()) {
2928       Label call;
2929       __ b(&call);
2930       __ bind(&done);
2931       // Push function.
2932       __ push(r3);
2933       // Pass undefined as the receiver, which is the WithBaseObject of a
2934       // non-object environment record.  If the callee is sloppy, it will patch
2935       // it up to be the global receiver.
2936       __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
2937       __ push(r4);
2938       __ bind(&call);
2939     }
2940   } else {
2941     VisitForStackValue(callee);
2942     // refEnv.WithBaseObject()
2943     __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
2944     __ push(r5);  // Reserved receiver slot.
2945   }
2946 }
2947 
2948 
EmitPossiblyEvalCall(Call * expr)2949 void FullCodeGenerator::EmitPossiblyEvalCall(Call* expr) {
2950   // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
2951   // to resolve the function we need to call.  Then we call the resolved
2952   // function using the given arguments.
2953   ZoneList<Expression*>* args = expr->arguments();
2954   int arg_count = args->length();
2955 
2956   PushCalleeAndWithBaseObject(expr);
2957 
2958   // Push the arguments.
2959   for (int i = 0; i < arg_count; i++) {
2960     VisitForStackValue(args->at(i));
2961   }
2962 
2963   // Push a copy of the function (found below the arguments) and
2964   // resolve eval.
2965   __ LoadP(r4, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
2966   __ push(r4);
2967   EmitResolvePossiblyDirectEval(arg_count);
2968 
2969   // Touch up the stack with the resolved function.
2970   __ StoreP(r3, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
2971 
2972   PrepareForBailoutForId(expr->EvalId(), NO_REGISTERS);
2973 
2974   // Record source position for debugger.
2975   SetCallPosition(expr);
2976   __ LoadP(r4, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
2977   __ mov(r3, Operand(arg_count));
2978   __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
2979   RecordJSReturnSite(expr);
2980   // Restore context register.
2981   __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2982   context()->DropAndPlug(1, r3);
2983 }
2984 
2985 
VisitCallNew(CallNew * expr)2986 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2987   Comment cmnt(masm_, "[ CallNew");
2988   // According to ECMA-262, section 11.2.2, page 44, the function
2989   // expression in new calls must be evaluated before the
2990   // arguments.
2991 
2992   // Push constructor on the stack.  If it's not a function it's used as
2993   // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2994   // ignored.
2995   DCHECK(!expr->expression()->IsSuperPropertyReference());
2996   VisitForStackValue(expr->expression());
2997 
2998   // Push the arguments ("left-to-right") on the stack.
2999   ZoneList<Expression*>* args = expr->arguments();
3000   int arg_count = args->length();
3001   for (int i = 0; i < arg_count; i++) {
3002     VisitForStackValue(args->at(i));
3003   }
3004 
3005   // Call the construct call builtin that handles allocation and
3006   // constructor invocation.
3007   SetConstructCallPosition(expr);
3008 
3009   // Load function and argument count into r4 and r3.
3010   __ mov(r3, Operand(arg_count));
3011   __ LoadP(r4, MemOperand(sp, arg_count * kPointerSize), r0);
3012 
3013   // Record call targets in unoptimized code.
3014   __ EmitLoadTypeFeedbackVector(r5);
3015   __ LoadSmiLiteral(r6, SmiFromSlot(expr->CallNewFeedbackSlot()));
3016 
3017   CallConstructStub stub(isolate());
3018   __ Call(stub.GetCode(), RelocInfo::CODE_TARGET);
3019   PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
3020   // Restore context register.
3021   __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3022   context()->Plug(r3);
3023 }
3024 
3025 
EmitSuperConstructorCall(Call * expr)3026 void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
3027   SuperCallReference* super_call_ref =
3028       expr->expression()->AsSuperCallReference();
3029   DCHECK_NOT_NULL(super_call_ref);
3030 
3031   // Push the super constructor target on the stack (may be null,
3032   // but the Construct builtin can deal with that properly).
3033   VisitForAccumulatorValue(super_call_ref->this_function_var());
3034   __ AssertFunction(result_register());
3035   __ LoadP(result_register(),
3036            FieldMemOperand(result_register(), HeapObject::kMapOffset));
3037   __ LoadP(result_register(),
3038            FieldMemOperand(result_register(), Map::kPrototypeOffset));
3039   __ Push(result_register());
3040 
3041   // Push the arguments ("left-to-right") on the stack.
3042   ZoneList<Expression*>* args = expr->arguments();
3043   int arg_count = args->length();
3044   for (int i = 0; i < arg_count; i++) {
3045     VisitForStackValue(args->at(i));
3046   }
3047 
3048   // Call the construct call builtin that handles allocation and
3049   // constructor invocation.
3050   SetConstructCallPosition(expr);
3051 
3052   // Load new target into r6.
3053   VisitForAccumulatorValue(super_call_ref->new_target_var());
3054   __ mr(r6, result_register());
3055 
3056   // Load function and argument count into r1 and r0.
3057   __ mov(r3, Operand(arg_count));
3058   __ LoadP(r4, MemOperand(sp, arg_count * kPointerSize));
3059 
3060   __ Call(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
3061 
3062   RecordJSReturnSite(expr);
3063 
3064   // Restore context register.
3065   __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3066   context()->Plug(r3);
3067 }
3068 
3069 
EmitIsSmi(CallRuntime * expr)3070 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
3071   ZoneList<Expression*>* args = expr->arguments();
3072   DCHECK(args->length() == 1);
3073 
3074   VisitForAccumulatorValue(args->at(0));
3075 
3076   Label materialize_true, materialize_false;
3077   Label* if_true = NULL;
3078   Label* if_false = NULL;
3079   Label* fall_through = NULL;
3080   context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3081                          &if_false, &fall_through);
3082 
3083   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3084   __ TestIfSmi(r3, r0);
3085   Split(eq, if_true, if_false, fall_through, cr0);
3086 
3087   context()->Plug(if_true, if_false);
3088 }
3089 
3090 
EmitIsJSReceiver(CallRuntime * expr)3091 void FullCodeGenerator::EmitIsJSReceiver(CallRuntime* expr) {
3092   ZoneList<Expression*>* args = expr->arguments();
3093   DCHECK(args->length() == 1);
3094 
3095   VisitForAccumulatorValue(args->at(0));
3096 
3097   Label materialize_true, materialize_false;
3098   Label* if_true = NULL;
3099   Label* if_false = NULL;
3100   Label* fall_through = NULL;
3101   context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3102                          &if_false, &fall_through);
3103 
3104   __ JumpIfSmi(r3, if_false);
3105   __ CompareObjectType(r3, r4, r4, FIRST_JS_RECEIVER_TYPE);
3106   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3107   Split(ge, if_true, if_false, fall_through);
3108 
3109   context()->Plug(if_true, if_false);
3110 }
3111 
3112 
EmitIsSimdValue(CallRuntime * expr)3113 void FullCodeGenerator::EmitIsSimdValue(CallRuntime* expr) {
3114   ZoneList<Expression*>* args = expr->arguments();
3115   DCHECK(args->length() == 1);
3116 
3117   VisitForAccumulatorValue(args->at(0));
3118 
3119   Label materialize_true, materialize_false;
3120   Label* if_true = NULL;
3121   Label* if_false = NULL;
3122   Label* fall_through = NULL;
3123   context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3124                          &if_false, &fall_through);
3125 
3126   __ JumpIfSmi(r3, if_false);
3127   __ CompareObjectType(r3, r4, r4, SIMD128_VALUE_TYPE);
3128   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3129   Split(eq, if_true, if_false, fall_through);
3130 
3131   context()->Plug(if_true, if_false);
3132 }
3133 
3134 
EmitIsFunction(CallRuntime * expr)3135 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3136   ZoneList<Expression*>* args = expr->arguments();
3137   DCHECK(args->length() == 1);
3138 
3139   VisitForAccumulatorValue(args->at(0));
3140 
3141   Label materialize_true, materialize_false;
3142   Label* if_true = NULL;
3143   Label* if_false = NULL;
3144   Label* fall_through = NULL;
3145   context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3146                          &if_false, &fall_through);
3147 
3148   __ JumpIfSmi(r3, if_false);
3149   __ CompareObjectType(r3, r4, r5, FIRST_FUNCTION_TYPE);
3150   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3151   Split(ge, if_true, if_false, fall_through);
3152 
3153   context()->Plug(if_true, if_false);
3154 }
3155 
3156 
EmitIsMinusZero(CallRuntime * expr)3157 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3158   ZoneList<Expression*>* args = expr->arguments();
3159   DCHECK(args->length() == 1);
3160 
3161   VisitForAccumulatorValue(args->at(0));
3162 
3163   Label materialize_true, materialize_false;
3164   Label* if_true = NULL;
3165   Label* if_false = NULL;
3166   Label* fall_through = NULL;
3167   context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3168                          &if_false, &fall_through);
3169 
3170   __ CheckMap(r3, r4, Heap::kHeapNumberMapRootIndex, if_false, DO_SMI_CHECK);
3171 #if V8_TARGET_ARCH_PPC64
3172   __ LoadP(r4, FieldMemOperand(r3, HeapNumber::kValueOffset));
3173   __ li(r5, Operand(1));
3174   __ rotrdi(r5, r5, 1);  // r5 = 0x80000000_00000000
3175   __ cmp(r4, r5);
3176 #else
3177   __ lwz(r5, FieldMemOperand(r3, HeapNumber::kExponentOffset));
3178   __ lwz(r4, FieldMemOperand(r3, HeapNumber::kMantissaOffset));
3179   Label skip;
3180   __ lis(r0, Operand(SIGN_EXT_IMM16(0x8000)));
3181   __ cmp(r5, r0);
3182   __ bne(&skip);
3183   __ cmpi(r4, Operand::Zero());
3184   __ bind(&skip);
3185 #endif
3186 
3187   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3188   Split(eq, if_true, if_false, fall_through);
3189 
3190   context()->Plug(if_true, if_false);
3191 }
3192 
3193 
EmitIsArray(CallRuntime * expr)3194 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3195   ZoneList<Expression*>* args = expr->arguments();
3196   DCHECK(args->length() == 1);
3197 
3198   VisitForAccumulatorValue(args->at(0));
3199 
3200   Label materialize_true, materialize_false;
3201   Label* if_true = NULL;
3202   Label* if_false = NULL;
3203   Label* fall_through = NULL;
3204   context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3205                          &if_false, &fall_through);
3206 
3207   __ JumpIfSmi(r3, if_false);
3208   __ CompareObjectType(r3, r4, r4, JS_ARRAY_TYPE);
3209   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3210   Split(eq, if_true, if_false, fall_through);
3211 
3212   context()->Plug(if_true, if_false);
3213 }
3214 
3215 
EmitIsTypedArray(CallRuntime * expr)3216 void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) {
3217   ZoneList<Expression*>* args = expr->arguments();
3218   DCHECK(args->length() == 1);
3219 
3220   VisitForAccumulatorValue(args->at(0));
3221 
3222   Label materialize_true, materialize_false;
3223   Label* if_true = NULL;
3224   Label* if_false = NULL;
3225   Label* fall_through = NULL;
3226   context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3227                          &if_false, &fall_through);
3228 
3229   __ JumpIfSmi(r3, if_false);
3230   __ CompareObjectType(r3, r4, r4, JS_TYPED_ARRAY_TYPE);
3231   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3232   Split(eq, if_true, if_false, fall_through);
3233 
3234   context()->Plug(if_true, if_false);
3235 }
3236 
3237 
EmitIsRegExp(CallRuntime * expr)3238 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3239   ZoneList<Expression*>* args = expr->arguments();
3240   DCHECK(args->length() == 1);
3241 
3242   VisitForAccumulatorValue(args->at(0));
3243 
3244   Label materialize_true, materialize_false;
3245   Label* if_true = NULL;
3246   Label* if_false = NULL;
3247   Label* fall_through = NULL;
3248   context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3249                          &if_false, &fall_through);
3250 
3251   __ JumpIfSmi(r3, if_false);
3252   __ CompareObjectType(r3, r4, r4, JS_REGEXP_TYPE);
3253   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3254   Split(eq, if_true, if_false, fall_through);
3255 
3256   context()->Plug(if_true, if_false);
3257 }
3258 
3259 
EmitIsJSProxy(CallRuntime * expr)3260 void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
3261   ZoneList<Expression*>* args = expr->arguments();
3262   DCHECK(args->length() == 1);
3263 
3264   VisitForAccumulatorValue(args->at(0));
3265 
3266   Label materialize_true, materialize_false;
3267   Label* if_true = NULL;
3268   Label* if_false = NULL;
3269   Label* fall_through = NULL;
3270   context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3271                          &if_false, &fall_through);
3272 
3273   __ JumpIfSmi(r3, if_false);
3274   __ CompareObjectType(r3, r4, r4, JS_PROXY_TYPE);
3275   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3276   Split(eq, if_true, if_false, fall_through);
3277 
3278   context()->Plug(if_true, if_false);
3279 }
3280 
3281 
EmitObjectEquals(CallRuntime * expr)3282 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3283   ZoneList<Expression*>* args = expr->arguments();
3284   DCHECK(args->length() == 2);
3285 
3286   // Load the two objects into registers and perform the comparison.
3287   VisitForStackValue(args->at(0));
3288   VisitForAccumulatorValue(args->at(1));
3289 
3290   Label materialize_true, materialize_false;
3291   Label* if_true = NULL;
3292   Label* if_false = NULL;
3293   Label* fall_through = NULL;
3294   context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3295                          &if_false, &fall_through);
3296 
3297   __ pop(r4);
3298   __ cmp(r3, r4);
3299   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3300   Split(eq, if_true, if_false, fall_through);
3301 
3302   context()->Plug(if_true, if_false);
3303 }
3304 
3305 
EmitArguments(CallRuntime * expr)3306 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3307   ZoneList<Expression*>* args = expr->arguments();
3308   DCHECK(args->length() == 1);
3309 
3310   // ArgumentsAccessStub expects the key in r4 and the formal
3311   // parameter count in r3.
3312   VisitForAccumulatorValue(args->at(0));
3313   __ mr(r4, r3);
3314   __ LoadSmiLiteral(r3, Smi::FromInt(info_->scope()->num_parameters()));
3315   ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
3316   __ CallStub(&stub);
3317   context()->Plug(r3);
3318 }
3319 
3320 
EmitArgumentsLength(CallRuntime * expr)3321 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3322   DCHECK(expr->arguments()->length() == 0);
3323   Label exit;
3324   // Get the number of formal parameters.
3325   __ LoadSmiLiteral(r3, Smi::FromInt(info_->scope()->num_parameters()));
3326 
3327   // Check if the calling frame is an arguments adaptor frame.
3328   __ LoadP(r5, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3329   __ LoadP(r6, MemOperand(r5, StandardFrameConstants::kContextOffset));
3330   __ CmpSmiLiteral(r6, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0);
3331   __ bne(&exit);
3332 
3333   // Arguments adaptor case: Read the arguments length from the
3334   // adaptor frame.
3335   __ LoadP(r3, MemOperand(r5, ArgumentsAdaptorFrameConstants::kLengthOffset));
3336 
3337   __ bind(&exit);
3338   context()->Plug(r3);
3339 }
3340 
3341 
EmitClassOf(CallRuntime * expr)3342 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3343   ZoneList<Expression*>* args = expr->arguments();
3344   DCHECK(args->length() == 1);
3345   Label done, null, function, non_function_constructor;
3346 
3347   VisitForAccumulatorValue(args->at(0));
3348 
3349   // If the object is not a JSReceiver, we return null.
3350   __ JumpIfSmi(r3, &null);
3351   STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
3352   __ CompareObjectType(r3, r3, r4, FIRST_JS_RECEIVER_TYPE);
3353   // Map is now in r3.
3354   __ blt(&null);
3355 
3356   // Return 'Function' for JSFunction objects.
3357   __ cmpi(r4, Operand(JS_FUNCTION_TYPE));
3358   __ beq(&function);
3359 
3360   // Check if the constructor in the map is a JS function.
3361   Register instance_type = r5;
3362   __ GetMapConstructor(r3, r3, r4, instance_type);
3363   __ cmpi(instance_type, Operand(JS_FUNCTION_TYPE));
3364   __ bne(&non_function_constructor);
3365 
3366   // r3 now contains the constructor function. Grab the
3367   // instance class name from there.
3368   __ LoadP(r3, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
3369   __ LoadP(r3,
3370            FieldMemOperand(r3, SharedFunctionInfo::kInstanceClassNameOffset));
3371   __ b(&done);
3372 
3373   // Functions have class 'Function'.
3374   __ bind(&function);
3375   __ LoadRoot(r3, Heap::kFunction_stringRootIndex);
3376   __ b(&done);
3377 
3378   // Objects with a non-function constructor have class 'Object'.
3379   __ bind(&non_function_constructor);
3380   __ LoadRoot(r3, Heap::kObject_stringRootIndex);
3381   __ b(&done);
3382 
3383   // Non-JS objects have class null.
3384   __ bind(&null);
3385   __ LoadRoot(r3, Heap::kNullValueRootIndex);
3386 
3387   // All done.
3388   __ bind(&done);
3389 
3390   context()->Plug(r3);
3391 }
3392 
3393 
EmitValueOf(CallRuntime * expr)3394 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3395   ZoneList<Expression*>* args = expr->arguments();
3396   DCHECK(args->length() == 1);
3397   VisitForAccumulatorValue(args->at(0));  // Load the object.
3398 
3399   Label done;
3400   // If the object is a smi return the object.
3401   __ JumpIfSmi(r3, &done);
3402   // If the object is not a value type, return the object.
3403   __ CompareObjectType(r3, r4, r4, JS_VALUE_TYPE);
3404   __ bne(&done);
3405   __ LoadP(r3, FieldMemOperand(r3, JSValue::kValueOffset));
3406 
3407   __ bind(&done);
3408   context()->Plug(r3);
3409 }
3410 
3411 
EmitIsDate(CallRuntime * expr)3412 void FullCodeGenerator::EmitIsDate(CallRuntime* expr) {
3413   ZoneList<Expression*>* args = expr->arguments();
3414   DCHECK_EQ(1, args->length());
3415 
3416   VisitForAccumulatorValue(args->at(0));
3417 
3418   Label materialize_true, materialize_false;
3419   Label* if_true = nullptr;
3420   Label* if_false = nullptr;
3421   Label* fall_through = nullptr;
3422   context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3423                          &if_false, &fall_through);
3424 
3425   __ JumpIfSmi(r3, if_false);
3426   __ CompareObjectType(r3, r4, r4, JS_DATE_TYPE);
3427   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3428   Split(eq, if_true, if_false, fall_through);
3429 
3430   context()->Plug(if_true, if_false);
3431 }
3432 
3433 
EmitOneByteSeqStringSetChar(CallRuntime * expr)3434 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3435   ZoneList<Expression*>* args = expr->arguments();
3436   DCHECK_EQ(3, args->length());
3437 
3438   Register string = r3;
3439   Register index = r4;
3440   Register value = r5;
3441 
3442   VisitForStackValue(args->at(0));        // index
3443   VisitForStackValue(args->at(1));        // value
3444   VisitForAccumulatorValue(args->at(2));  // string
3445   __ Pop(index, value);
3446 
3447   if (FLAG_debug_code) {
3448     __ TestIfSmi(value, r0);
3449     __ Check(eq, kNonSmiValue, cr0);
3450     __ TestIfSmi(index, r0);
3451     __ Check(eq, kNonSmiIndex, cr0);
3452     __ SmiUntag(index, index);
3453     static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3454     __ EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type);
3455     __ SmiTag(index, index);
3456   }
3457 
3458   __ SmiUntag(value);
3459   __ addi(ip, string, Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
3460   __ SmiToByteArrayOffset(r0, index);
3461   __ stbx(value, MemOperand(ip, r0));
3462   context()->Plug(string);
3463 }
3464 
3465 
EmitTwoByteSeqStringSetChar(CallRuntime * expr)3466 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3467   ZoneList<Expression*>* args = expr->arguments();
3468   DCHECK_EQ(3, args->length());
3469 
3470   Register string = r3;
3471   Register index = r4;
3472   Register value = r5;
3473 
3474   VisitForStackValue(args->at(0));        // index
3475   VisitForStackValue(args->at(1));        // value
3476   VisitForAccumulatorValue(args->at(2));  // string
3477   __ Pop(index, value);
3478 
3479   if (FLAG_debug_code) {
3480     __ TestIfSmi(value, r0);
3481     __ Check(eq, kNonSmiValue, cr0);
3482     __ TestIfSmi(index, r0);
3483     __ Check(eq, kNonSmiIndex, cr0);
3484     __ SmiUntag(index, index);
3485     static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3486     __ EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type);
3487     __ SmiTag(index, index);
3488   }
3489 
3490   __ SmiUntag(value);
3491   __ addi(ip, string, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
3492   __ SmiToShortArrayOffset(r0, index);
3493   __ sthx(value, MemOperand(ip, r0));
3494   context()->Plug(string);
3495 }
3496 
3497 
EmitSetValueOf(CallRuntime * expr)3498 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3499   ZoneList<Expression*>* args = expr->arguments();
3500   DCHECK(args->length() == 2);
3501   VisitForStackValue(args->at(0));        // Load the object.
3502   VisitForAccumulatorValue(args->at(1));  // Load the value.
3503   __ pop(r4);                             // r3 = value. r4 = object.
3504 
3505   Label done;
3506   // If the object is a smi, return the value.
3507   __ JumpIfSmi(r4, &done);
3508 
3509   // If the object is not a value type, return the value.
3510   __ CompareObjectType(r4, r5, r5, JS_VALUE_TYPE);
3511   __ bne(&done);
3512 
3513   // Store the value.
3514   __ StoreP(r3, FieldMemOperand(r4, JSValue::kValueOffset), r0);
3515   // Update the write barrier.  Save the value as it will be
3516   // overwritten by the write barrier code and is needed afterward.
3517   __ mr(r5, r3);
3518   __ RecordWriteField(r4, JSValue::kValueOffset, r5, r6, kLRHasBeenSaved,
3519                       kDontSaveFPRegs);
3520 
3521   __ bind(&done);
3522   context()->Plug(r3);
3523 }
3524 
3525 
EmitToInteger(CallRuntime * expr)3526 void FullCodeGenerator::EmitToInteger(CallRuntime* expr) {
3527   ZoneList<Expression*>* args = expr->arguments();
3528   DCHECK_EQ(1, args->length());
3529 
3530   // Load the argument into r3 and convert it.
3531   VisitForAccumulatorValue(args->at(0));
3532 
3533   // Convert the object to an integer.
3534   Label done_convert;
3535   __ JumpIfSmi(r3, &done_convert);
3536   __ Push(r3);
3537   __ CallRuntime(Runtime::kToInteger);
3538   __ bind(&done_convert);
3539   context()->Plug(r3);
3540 }
3541 
3542 
EmitToName(CallRuntime * expr)3543 void FullCodeGenerator::EmitToName(CallRuntime* expr) {
3544   ZoneList<Expression*>* args = expr->arguments();
3545   DCHECK_EQ(1, args->length());
3546 
3547   // Load the argument into r3 and convert it.
3548   VisitForAccumulatorValue(args->at(0));
3549 
3550   Label convert, done_convert;
3551   __ JumpIfSmi(r3, &convert);
3552   STATIC_ASSERT(FIRST_NAME_TYPE == FIRST_TYPE);
3553   __ CompareObjectType(r3, r4, r4, LAST_NAME_TYPE);
3554   __ ble(&done_convert);
3555   __ bind(&convert);
3556   __ Push(r3);
3557   __ CallRuntime(Runtime::kToName);
3558   __ bind(&done_convert);
3559   context()->Plug(r3);
3560 }
3561 
3562 
EmitStringCharFromCode(CallRuntime * expr)3563 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3564   ZoneList<Expression*>* args = expr->arguments();
3565   DCHECK(args->length() == 1);
3566   VisitForAccumulatorValue(args->at(0));
3567 
3568   Label done;
3569   StringCharFromCodeGenerator generator(r3, r4);
3570   generator.GenerateFast(masm_);
3571   __ b(&done);
3572 
3573   NopRuntimeCallHelper call_helper;
3574   generator.GenerateSlow(masm_, call_helper);
3575 
3576   __ bind(&done);
3577   context()->Plug(r4);
3578 }
3579 
3580 
EmitStringCharCodeAt(CallRuntime * expr)3581 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3582   ZoneList<Expression*>* args = expr->arguments();
3583   DCHECK(args->length() == 2);
3584   VisitForStackValue(args->at(0));
3585   VisitForAccumulatorValue(args->at(1));
3586 
3587   Register object = r4;
3588   Register index = r3;
3589   Register result = r6;
3590 
3591   __ pop(object);
3592 
3593   Label need_conversion;
3594   Label index_out_of_range;
3595   Label done;
3596   StringCharCodeAtGenerator generator(object, index, result, &need_conversion,
3597                                       &need_conversion, &index_out_of_range,
3598                                       STRING_INDEX_IS_NUMBER);
3599   generator.GenerateFast(masm_);
3600   __ b(&done);
3601 
3602   __ bind(&index_out_of_range);
3603   // When the index is out of range, the spec requires us to return
3604   // NaN.
3605   __ LoadRoot(result, Heap::kNanValueRootIndex);
3606   __ b(&done);
3607 
3608   __ bind(&need_conversion);
3609   // Load the undefined value into the result register, which will
3610   // trigger conversion.
3611   __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3612   __ b(&done);
3613 
3614   NopRuntimeCallHelper call_helper;
3615   generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
3616 
3617   __ bind(&done);
3618   context()->Plug(result);
3619 }
3620 
3621 
EmitStringCharAt(CallRuntime * expr)3622 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3623   ZoneList<Expression*>* args = expr->arguments();
3624   DCHECK(args->length() == 2);
3625   VisitForStackValue(args->at(0));
3626   VisitForAccumulatorValue(args->at(1));
3627 
3628   Register object = r4;
3629   Register index = r3;
3630   Register scratch = r6;
3631   Register result = r3;
3632 
3633   __ pop(object);
3634 
3635   Label need_conversion;
3636   Label index_out_of_range;
3637   Label done;
3638   StringCharAtGenerator generator(object, index, scratch, result,
3639                                   &need_conversion, &need_conversion,
3640                                   &index_out_of_range, STRING_INDEX_IS_NUMBER);
3641   generator.GenerateFast(masm_);
3642   __ b(&done);
3643 
3644   __ bind(&index_out_of_range);
3645   // When the index is out of range, the spec requires us to return
3646   // the empty string.
3647   __ LoadRoot(result, Heap::kempty_stringRootIndex);
3648   __ b(&done);
3649 
3650   __ bind(&need_conversion);
3651   // Move smi zero into the result register, which will trigger
3652   // conversion.
3653   __ LoadSmiLiteral(result, Smi::FromInt(0));
3654   __ b(&done);
3655 
3656   NopRuntimeCallHelper call_helper;
3657   generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
3658 
3659   __ bind(&done);
3660   context()->Plug(result);
3661 }
3662 
3663 
EmitCall(CallRuntime * expr)3664 void FullCodeGenerator::EmitCall(CallRuntime* expr) {
3665   ZoneList<Expression*>* args = expr->arguments();
3666   DCHECK_LE(2, args->length());
3667   // Push target, receiver and arguments onto the stack.
3668   for (Expression* const arg : *args) {
3669     VisitForStackValue(arg);
3670   }
3671   PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
3672   // Move target to r4.
3673   int const argc = args->length() - 2;
3674   __ LoadP(r4, MemOperand(sp, (argc + 1) * kPointerSize));
3675   // Call the target.
3676   __ mov(r3, Operand(argc));
3677   __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
3678   // Restore context register.
3679   __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3680   // Discard the function left on TOS.
3681   context()->DropAndPlug(1, r3);
3682 }
3683 
3684 
EmitHasCachedArrayIndex(CallRuntime * expr)3685 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3686   ZoneList<Expression*>* args = expr->arguments();
3687   VisitForAccumulatorValue(args->at(0));
3688 
3689   Label materialize_true, materialize_false;
3690   Label* if_true = NULL;
3691   Label* if_false = NULL;
3692   Label* fall_through = NULL;
3693   context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3694                          &if_false, &fall_through);
3695 
3696   __ lwz(r3, FieldMemOperand(r3, String::kHashFieldOffset));
3697   // PPC - assume ip is free
3698   __ mov(ip, Operand(String::kContainsCachedArrayIndexMask));
3699   __ and_(r0, r3, ip, SetRC);
3700   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3701   Split(eq, if_true, if_false, fall_through, cr0);
3702 
3703   context()->Plug(if_true, if_false);
3704 }
3705 
3706 
EmitGetCachedArrayIndex(CallRuntime * expr)3707 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3708   ZoneList<Expression*>* args = expr->arguments();
3709   DCHECK(args->length() == 1);
3710   VisitForAccumulatorValue(args->at(0));
3711 
3712   __ AssertString(r3);
3713 
3714   __ lwz(r3, FieldMemOperand(r3, String::kHashFieldOffset));
3715   __ IndexFromHash(r3, r3);
3716 
3717   context()->Plug(r3);
3718 }
3719 
3720 
EmitGetSuperConstructor(CallRuntime * expr)3721 void FullCodeGenerator::EmitGetSuperConstructor(CallRuntime* expr) {
3722   ZoneList<Expression*>* args = expr->arguments();
3723   DCHECK_EQ(1, args->length());
3724   VisitForAccumulatorValue(args->at(0));
3725   __ AssertFunction(r3);
3726   __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset));
3727   __ LoadP(r3, FieldMemOperand(r3, Map::kPrototypeOffset));
3728   context()->Plug(r3);
3729 }
3730 
3731 
EmitFastOneByteArrayJoin(CallRuntime * expr)3732 void FullCodeGenerator::EmitFastOneByteArrayJoin(CallRuntime* expr) {
3733   Label bailout, done, one_char_separator, long_separator, non_trivial_array,
3734       not_size_one_array, loop, empty_separator_loop, one_char_separator_loop,
3735       one_char_separator_loop_entry, long_separator_loop;
3736   ZoneList<Expression*>* args = expr->arguments();
3737   DCHECK(args->length() == 2);
3738   VisitForStackValue(args->at(1));
3739   VisitForAccumulatorValue(args->at(0));
3740 
3741   // All aliases of the same register have disjoint lifetimes.
3742   Register array = r3;
3743   Register elements = no_reg;  // Will be r3.
3744   Register result = no_reg;    // Will be r3.
3745   Register separator = r4;
3746   Register array_length = r5;
3747   Register result_pos = no_reg;  // Will be r5
3748   Register string_length = r6;
3749   Register string = r7;
3750   Register element = r8;
3751   Register elements_end = r9;
3752   Register scratch1 = r10;
3753   Register scratch2 = r11;
3754 
3755   // Separator operand is on the stack.
3756   __ pop(separator);
3757 
3758   // Check that the array is a JSArray.
3759   __ JumpIfSmi(array, &bailout);
3760   __ CompareObjectType(array, scratch1, scratch2, JS_ARRAY_TYPE);
3761   __ bne(&bailout);
3762 
3763   // Check that the array has fast elements.
3764   __ CheckFastElements(scratch1, scratch2, &bailout);
3765 
3766   // If the array has length zero, return the empty string.
3767   __ LoadP(array_length, FieldMemOperand(array, JSArray::kLengthOffset));
3768   __ SmiUntag(array_length);
3769   __ cmpi(array_length, Operand::Zero());
3770   __ bne(&non_trivial_array);
3771   __ LoadRoot(r3, Heap::kempty_stringRootIndex);
3772   __ b(&done);
3773 
3774   __ bind(&non_trivial_array);
3775 
3776   // Get the FixedArray containing array's elements.
3777   elements = array;
3778   __ LoadP(elements, FieldMemOperand(array, JSArray::kElementsOffset));
3779   array = no_reg;  // End of array's live range.
3780 
3781   // Check that all array elements are sequential one-byte strings, and
3782   // accumulate the sum of their lengths, as a smi-encoded value.
3783   __ li(string_length, Operand::Zero());
3784   __ addi(element, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3785   __ ShiftLeftImm(elements_end, array_length, Operand(kPointerSizeLog2));
3786   __ add(elements_end, element, elements_end);
3787   // Loop condition: while (element < elements_end).
3788   // Live values in registers:
3789   //   elements: Fixed array of strings.
3790   //   array_length: Length of the fixed array of strings (not smi)
3791   //   separator: Separator string
3792   //   string_length: Accumulated sum of string lengths (smi).
3793   //   element: Current array element.
3794   //   elements_end: Array end.
3795   if (generate_debug_code_) {
3796     __ cmpi(array_length, Operand::Zero());
3797     __ Assert(gt, kNoEmptyArraysHereInEmitFastOneByteArrayJoin);
3798   }
3799   __ bind(&loop);
3800   __ LoadP(string, MemOperand(element));
3801   __ addi(element, element, Operand(kPointerSize));
3802   __ JumpIfSmi(string, &bailout);
3803   __ LoadP(scratch1, FieldMemOperand(string, HeapObject::kMapOffset));
3804   __ lbz(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
3805   __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout);
3806   __ LoadP(scratch1, FieldMemOperand(string, SeqOneByteString::kLengthOffset));
3807 
3808   __ AddAndCheckForOverflow(string_length, string_length, scratch1, scratch2,
3809                             r0);
3810   __ BranchOnOverflow(&bailout);
3811 
3812   __ cmp(element, elements_end);
3813   __ blt(&loop);
3814 
3815   // If array_length is 1, return elements[0], a string.
3816   __ cmpi(array_length, Operand(1));
3817   __ bne(&not_size_one_array);
3818   __ LoadP(r3, FieldMemOperand(elements, FixedArray::kHeaderSize));
3819   __ b(&done);
3820 
3821   __ bind(&not_size_one_array);
3822 
3823   // Live values in registers:
3824   //   separator: Separator string
3825   //   array_length: Length of the array.
3826   //   string_length: Sum of string lengths (smi).
3827   //   elements: FixedArray of strings.
3828 
3829   // Check that the separator is a flat one-byte string.
3830   __ JumpIfSmi(separator, &bailout);
3831   __ LoadP(scratch1, FieldMemOperand(separator, HeapObject::kMapOffset));
3832   __ lbz(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
3833   __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout);
3834 
3835   // Add (separator length times array_length) - separator length to the
3836   // string_length to get the length of the result string.
3837   __ LoadP(scratch1,
3838            FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
3839   __ sub(string_length, string_length, scratch1);
3840 #if V8_TARGET_ARCH_PPC64
3841   __ SmiUntag(scratch1, scratch1);
3842   __ Mul(scratch2, array_length, scratch1);
3843   // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are
3844   // zero.
3845   __ ShiftRightImm(ip, scratch2, Operand(31), SetRC);
3846   __ bne(&bailout, cr0);
3847   __ SmiTag(scratch2, scratch2);
3848 #else
3849   // array_length is not smi but the other values are, so the result is a smi
3850   __ mullw(scratch2, array_length, scratch1);
3851   __ mulhw(ip, array_length, scratch1);
3852   // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are
3853   // zero.
3854   __ cmpi(ip, Operand::Zero());
3855   __ bne(&bailout);
3856   __ cmpwi(scratch2, Operand::Zero());
3857   __ blt(&bailout);
3858 #endif
3859 
3860   __ AddAndCheckForOverflow(string_length, string_length, scratch2, scratch1,
3861                             r0);
3862   __ BranchOnOverflow(&bailout);
3863   __ SmiUntag(string_length);
3864 
3865   // Bailout for large object allocations.
3866   __ Cmpi(string_length, Operand(Page::kMaxRegularHeapObjectSize), r0);
3867   __ bgt(&bailout);
3868 
3869   // Get first element in the array to free up the elements register to be used
3870   // for the result.
3871   __ addi(element, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3872   result = elements;  // End of live range for elements.
3873   elements = no_reg;
3874   // Live values in registers:
3875   //   element: First array element
3876   //   separator: Separator string
3877   //   string_length: Length of result string (not smi)
3878   //   array_length: Length of the array.
3879   __ AllocateOneByteString(result, string_length, scratch1, scratch2,
3880                            elements_end, &bailout);
3881   // Prepare for looping. Set up elements_end to end of the array. Set
3882   // result_pos to the position of the result where to write the first
3883   // character.
3884   __ ShiftLeftImm(elements_end, array_length, Operand(kPointerSizeLog2));
3885   __ add(elements_end, element, elements_end);
3886   result_pos = array_length;  // End of live range for array_length.
3887   array_length = no_reg;
3888   __ addi(result_pos, result,
3889           Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
3890 
3891   // Check the length of the separator.
3892   __ LoadP(scratch1,
3893            FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
3894   __ CmpSmiLiteral(scratch1, Smi::FromInt(1), r0);
3895   __ beq(&one_char_separator);
3896   __ bgt(&long_separator);
3897 
3898   // Empty separator case
3899   __ bind(&empty_separator_loop);
3900   // Live values in registers:
3901   //   result_pos: the position to which we are currently copying characters.
3902   //   element: Current array element.
3903   //   elements_end: Array end.
3904 
3905   // Copy next array element to the result.
3906   __ LoadP(string, MemOperand(element));
3907   __ addi(element, element, Operand(kPointerSize));
3908   __ LoadP(string_length, FieldMemOperand(string, String::kLengthOffset));
3909   __ SmiUntag(string_length);
3910   __ addi(string, string,
3911           Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
3912   __ CopyBytes(string, result_pos, string_length, scratch1);
3913   __ cmp(element, elements_end);
3914   __ blt(&empty_separator_loop);  // End while (element < elements_end).
3915   DCHECK(result.is(r3));
3916   __ b(&done);
3917 
3918   // One-character separator case
3919   __ bind(&one_char_separator);
3920   // Replace separator with its one-byte character value.
3921   __ lbz(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize));
3922   // Jump into the loop after the code that copies the separator, so the first
3923   // element is not preceded by a separator
3924   __ b(&one_char_separator_loop_entry);
3925 
3926   __ bind(&one_char_separator_loop);
3927   // Live values in registers:
3928   //   result_pos: the position to which we are currently copying characters.
3929   //   element: Current array element.
3930   //   elements_end: Array end.
3931   //   separator: Single separator one-byte char (in lower byte).
3932 
3933   // Copy the separator character to the result.
3934   __ stb(separator, MemOperand(result_pos));
3935   __ addi(result_pos, result_pos, Operand(1));
3936 
3937   // Copy next array element to the result.
3938   __ bind(&one_char_separator_loop_entry);
3939   __ LoadP(string, MemOperand(element));
3940   __ addi(element, element, Operand(kPointerSize));
3941   __ LoadP(string_length, FieldMemOperand(string, String::kLengthOffset));
3942   __ SmiUntag(string_length);
3943   __ addi(string, string,
3944           Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
3945   __ CopyBytes(string, result_pos, string_length, scratch1);
3946   __ cmpl(element, elements_end);
3947   __ blt(&one_char_separator_loop);  // End while (element < elements_end).
3948   DCHECK(result.is(r3));
3949   __ b(&done);
3950 
3951   // Long separator case (separator is more than one character). Entry is at the
3952   // label long_separator below.
3953   __ bind(&long_separator_loop);
3954   // Live values in registers:
3955   //   result_pos: the position to which we are currently copying characters.
3956   //   element: Current array element.
3957   //   elements_end: Array end.
3958   //   separator: Separator string.
3959 
3960   // Copy the separator to the result.
3961   __ LoadP(string_length, FieldMemOperand(separator, String::kLengthOffset));
3962   __ SmiUntag(string_length);
3963   __ addi(string, separator,
3964           Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
3965   __ CopyBytes(string, result_pos, string_length, scratch1);
3966 
3967   __ bind(&long_separator);
3968   __ LoadP(string, MemOperand(element));
3969   __ addi(element, element, Operand(kPointerSize));
3970   __ LoadP(string_length, FieldMemOperand(string, String::kLengthOffset));
3971   __ SmiUntag(string_length);
3972   __ addi(string, string,
3973           Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
3974   __ CopyBytes(string, result_pos, string_length, scratch1);
3975   __ cmpl(element, elements_end);
3976   __ blt(&long_separator_loop);  // End while (element < elements_end).
3977   DCHECK(result.is(r3));
3978   __ b(&done);
3979 
3980   __ bind(&bailout);
3981   __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
3982   __ bind(&done);
3983   context()->Plug(r3);
3984 }
3985 
3986 
EmitDebugIsActive(CallRuntime * expr)3987 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
3988   DCHECK(expr->arguments()->length() == 0);
3989   ExternalReference debug_is_active =
3990       ExternalReference::debug_is_active_address(isolate());
3991   __ mov(ip, Operand(debug_is_active));
3992   __ lbz(r3, MemOperand(ip));
3993   __ SmiTag(r3);
3994   context()->Plug(r3);
3995 }
3996 
3997 
EmitCreateIterResultObject(CallRuntime * expr)3998 void FullCodeGenerator::EmitCreateIterResultObject(CallRuntime* expr) {
3999   ZoneList<Expression*>* args = expr->arguments();
4000   DCHECK_EQ(2, args->length());
4001   VisitForStackValue(args->at(0));
4002   VisitForStackValue(args->at(1));
4003 
4004   Label runtime, done;
4005 
4006   __ Allocate(JSIteratorResult::kSize, r3, r5, r6, &runtime, TAG_OBJECT);
4007   __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, r4);
4008   __ Pop(r5, r6);
4009   __ LoadRoot(r7, Heap::kEmptyFixedArrayRootIndex);
4010   __ StoreP(r4, FieldMemOperand(r3, HeapObject::kMapOffset), r0);
4011   __ StoreP(r7, FieldMemOperand(r3, JSObject::kPropertiesOffset), r0);
4012   __ StoreP(r7, FieldMemOperand(r3, JSObject::kElementsOffset), r0);
4013   __ StoreP(r5, FieldMemOperand(r3, JSIteratorResult::kValueOffset), r0);
4014   __ StoreP(r6, FieldMemOperand(r3, JSIteratorResult::kDoneOffset), r0);
4015   STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
4016   __ b(&done);
4017 
4018   __ bind(&runtime);
4019   __ CallRuntime(Runtime::kCreateIterResultObject);
4020 
4021   __ bind(&done);
4022   context()->Plug(r3);
4023 }
4024 
4025 
EmitLoadJSRuntimeFunction(CallRuntime * expr)4026 void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
4027   // Push undefined as the receiver.
4028   __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
4029   __ push(r3);
4030 
4031   __ LoadNativeContextSlot(expr->context_index(), r3);
4032 }
4033 
4034 
EmitCallJSRuntimeFunction(CallRuntime * expr)4035 void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
4036   ZoneList<Expression*>* args = expr->arguments();
4037   int arg_count = args->length();
4038 
4039   SetCallPosition(expr);
4040   __ LoadP(r4, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
4041   __ mov(r3, Operand(arg_count));
4042   __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kNullOrUndefined),
4043           RelocInfo::CODE_TARGET);
4044 }
4045 
4046 
VisitCallRuntime(CallRuntime * expr)4047 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4048   ZoneList<Expression*>* args = expr->arguments();
4049   int arg_count = args->length();
4050 
4051   if (expr->is_jsruntime()) {
4052     Comment cmnt(masm_, "[ CallRuntime");
4053     EmitLoadJSRuntimeFunction(expr);
4054 
4055     // Push the target function under the receiver.
4056     __ LoadP(ip, MemOperand(sp, 0));
4057     __ push(ip);
4058     __ StoreP(r3, MemOperand(sp, kPointerSize));
4059 
4060     // Push the arguments ("left-to-right").
4061     for (int i = 0; i < arg_count; i++) {
4062       VisitForStackValue(args->at(i));
4063     }
4064 
4065     PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
4066     EmitCallJSRuntimeFunction(expr);
4067 
4068     // Restore context register.
4069     __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4070 
4071     context()->DropAndPlug(1, r3);
4072 
4073   } else {
4074     const Runtime::Function* function = expr->function();
4075     switch (function->function_id) {
4076 #define CALL_INTRINSIC_GENERATOR(Name)     \
4077   case Runtime::kInline##Name: {           \
4078     Comment cmnt(masm_, "[ Inline" #Name); \
4079     return Emit##Name(expr);               \
4080   }
4081       FOR_EACH_FULL_CODE_INTRINSIC(CALL_INTRINSIC_GENERATOR)
4082 #undef CALL_INTRINSIC_GENERATOR
4083       default: {
4084         Comment cmnt(masm_, "[ CallRuntime for unhandled intrinsic");
4085         // Push the arguments ("left-to-right").
4086         for (int i = 0; i < arg_count; i++) {
4087           VisitForStackValue(args->at(i));
4088         }
4089 
4090         // Call the C runtime function.
4091         PrepareForBailoutForId(expr->CallId(), NO_REGISTERS);
4092         __ CallRuntime(expr->function(), arg_count);
4093         context()->Plug(r3);
4094       }
4095     }
4096   }
4097 }
4098 
4099 
VisitUnaryOperation(UnaryOperation * expr)4100 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4101   switch (expr->op()) {
4102     case Token::DELETE: {
4103       Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4104       Property* property = expr->expression()->AsProperty();
4105       VariableProxy* proxy = expr->expression()->AsVariableProxy();
4106 
4107       if (property != NULL) {
4108         VisitForStackValue(property->obj());
4109         VisitForStackValue(property->key());
4110         __ CallRuntime(is_strict(language_mode())
4111                            ? Runtime::kDeleteProperty_Strict
4112                            : Runtime::kDeleteProperty_Sloppy);
4113         context()->Plug(r3);
4114       } else if (proxy != NULL) {
4115         Variable* var = proxy->var();
4116         // Delete of an unqualified identifier is disallowed in strict mode but
4117         // "delete this" is allowed.
4118         bool is_this = var->HasThisName(isolate());
4119         DCHECK(is_sloppy(language_mode()) || is_this);
4120         if (var->IsUnallocatedOrGlobalSlot()) {
4121           __ LoadGlobalObject(r5);
4122           __ mov(r4, Operand(var->name()));
4123           __ Push(r5, r4);
4124           __ CallRuntime(Runtime::kDeleteProperty_Sloppy);
4125           context()->Plug(r3);
4126         } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4127           // Result of deleting non-global, non-dynamic variables is false.
4128           // The subexpression does not have side effects.
4129           context()->Plug(is_this);
4130         } else {
4131           // Non-global variable.  Call the runtime to try to delete from the
4132           // context where the variable was introduced.
4133           DCHECK(!context_register().is(r5));
4134           __ mov(r5, Operand(var->name()));
4135           __ Push(context_register(), r5);
4136           __ CallRuntime(Runtime::kDeleteLookupSlot);
4137           context()->Plug(r3);
4138         }
4139       } else {
4140         // Result of deleting non-property, non-variable reference is true.
4141         // The subexpression may have side effects.
4142         VisitForEffect(expr->expression());
4143         context()->Plug(true);
4144       }
4145       break;
4146     }
4147 
4148     case Token::VOID: {
4149       Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4150       VisitForEffect(expr->expression());
4151       context()->Plug(Heap::kUndefinedValueRootIndex);
4152       break;
4153     }
4154 
4155     case Token::NOT: {
4156       Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4157       if (context()->IsEffect()) {
4158         // Unary NOT has no side effects so it's only necessary to visit the
4159         // subexpression.  Match the optimizing compiler by not branching.
4160         VisitForEffect(expr->expression());
4161       } else if (context()->IsTest()) {
4162         const TestContext* test = TestContext::cast(context());
4163         // The labels are swapped for the recursive call.
4164         VisitForControl(expr->expression(), test->false_label(),
4165                         test->true_label(), test->fall_through());
4166         context()->Plug(test->true_label(), test->false_label());
4167       } else {
4168         // We handle value contexts explicitly rather than simply visiting
4169         // for control and plugging the control flow into the context,
4170         // because we need to prepare a pair of extra administrative AST ids
4171         // for the optimizing compiler.
4172         DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
4173         Label materialize_true, materialize_false, done;
4174         VisitForControl(expr->expression(), &materialize_false,
4175                         &materialize_true, &materialize_true);
4176         __ bind(&materialize_true);
4177         PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4178         __ LoadRoot(r3, Heap::kTrueValueRootIndex);
4179         if (context()->IsStackValue()) __ push(r3);
4180         __ b(&done);
4181         __ bind(&materialize_false);
4182         PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4183         __ LoadRoot(r3, Heap::kFalseValueRootIndex);
4184         if (context()->IsStackValue()) __ push(r3);
4185         __ bind(&done);
4186       }
4187       break;
4188     }
4189 
4190     case Token::TYPEOF: {
4191       Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4192       {
4193         AccumulatorValueContext context(this);
4194         VisitForTypeofValue(expr->expression());
4195       }
4196       __ mr(r6, r3);
4197       TypeofStub typeof_stub(isolate());
4198       __ CallStub(&typeof_stub);
4199       context()->Plug(r3);
4200       break;
4201     }
4202 
4203     default:
4204       UNREACHABLE();
4205   }
4206 }
4207 
4208 
VisitCountOperation(CountOperation * expr)4209 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4210   DCHECK(expr->expression()->IsValidReferenceExpressionOrThis());
4211 
4212   Comment cmnt(masm_, "[ CountOperation");
4213 
4214   Property* prop = expr->expression()->AsProperty();
4215   LhsKind assign_type = Property::GetAssignType(prop);
4216 
4217   // Evaluate expression and get value.
4218   if (assign_type == VARIABLE) {
4219     DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
4220     AccumulatorValueContext context(this);
4221     EmitVariableLoad(expr->expression()->AsVariableProxy());
4222   } else {
4223     // Reserve space for result of postfix operation.
4224     if (expr->is_postfix() && !context()->IsEffect()) {
4225       __ LoadSmiLiteral(ip, Smi::FromInt(0));
4226       __ push(ip);
4227     }
4228     switch (assign_type) {
4229       case NAMED_PROPERTY: {
4230         // Put the object both on the stack and in the register.
4231         VisitForStackValue(prop->obj());
4232         __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
4233         EmitNamedPropertyLoad(prop);
4234         break;
4235       }
4236 
4237       case NAMED_SUPER_PROPERTY: {
4238         VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
4239         VisitForAccumulatorValue(
4240             prop->obj()->AsSuperPropertyReference()->home_object());
4241         __ Push(result_register());
4242         const Register scratch = r4;
4243         __ LoadP(scratch, MemOperand(sp, kPointerSize));
4244         __ Push(scratch, result_register());
4245         EmitNamedSuperPropertyLoad(prop);
4246         break;
4247       }
4248 
4249       case KEYED_SUPER_PROPERTY: {
4250         VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
4251         VisitForAccumulatorValue(
4252             prop->obj()->AsSuperPropertyReference()->home_object());
4253         const Register scratch = r4;
4254         const Register scratch1 = r5;
4255         __ mr(scratch, result_register());
4256         VisitForAccumulatorValue(prop->key());
4257         __ Push(scratch, result_register());
4258         __ LoadP(scratch1, MemOperand(sp, 2 * kPointerSize));
4259         __ Push(scratch1, scratch, result_register());
4260         EmitKeyedSuperPropertyLoad(prop);
4261         break;
4262       }
4263 
4264       case KEYED_PROPERTY: {
4265         VisitForStackValue(prop->obj());
4266         VisitForStackValue(prop->key());
4267         __ LoadP(LoadDescriptor::ReceiverRegister(),
4268                  MemOperand(sp, 1 * kPointerSize));
4269         __ LoadP(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
4270         EmitKeyedPropertyLoad(prop);
4271         break;
4272       }
4273 
4274       case VARIABLE:
4275         UNREACHABLE();
4276     }
4277   }
4278 
4279   // We need a second deoptimization point after loading the value
4280   // in case evaluating the property load my have a side effect.
4281   if (assign_type == VARIABLE) {
4282     PrepareForBailout(expr->expression(), TOS_REG);
4283   } else {
4284     PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4285   }
4286 
4287   // Inline smi case if we are in a loop.
4288   Label stub_call, done;
4289   JumpPatchSite patch_site(masm_);
4290 
4291   int count_value = expr->op() == Token::INC ? 1 : -1;
4292   if (ShouldInlineSmiCase(expr->op())) {
4293     Label slow;
4294     patch_site.EmitJumpIfNotSmi(r3, &slow);
4295 
4296     // Save result for postfix expressions.
4297     if (expr->is_postfix()) {
4298       if (!context()->IsEffect()) {
4299         // Save the result on the stack. If we have a named or keyed property
4300         // we store the result under the receiver that is currently on top
4301         // of the stack.
4302         switch (assign_type) {
4303           case VARIABLE:
4304             __ push(r3);
4305             break;
4306           case NAMED_PROPERTY:
4307             __ StoreP(r3, MemOperand(sp, kPointerSize));
4308             break;
4309           case NAMED_SUPER_PROPERTY:
4310             __ StoreP(r3, MemOperand(sp, 2 * kPointerSize));
4311             break;
4312           case KEYED_PROPERTY:
4313             __ StoreP(r3, MemOperand(sp, 2 * kPointerSize));
4314             break;
4315           case KEYED_SUPER_PROPERTY:
4316             __ StoreP(r3, MemOperand(sp, 3 * kPointerSize));
4317             break;
4318         }
4319       }
4320     }
4321 
4322     Register scratch1 = r4;
4323     Register scratch2 = r5;
4324     __ LoadSmiLiteral(scratch1, Smi::FromInt(count_value));
4325     __ AddAndCheckForOverflow(r3, r3, scratch1, scratch2, r0);
4326     __ BranchOnNoOverflow(&done);
4327     // Call stub. Undo operation first.
4328     __ sub(r3, r3, scratch1);
4329     __ b(&stub_call);
4330     __ bind(&slow);
4331   }
4332   if (!is_strong(language_mode())) {
4333     ToNumberStub convert_stub(isolate());
4334     __ CallStub(&convert_stub);
4335     PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
4336   }
4337 
4338   // Save result for postfix expressions.
4339   if (expr->is_postfix()) {
4340     if (!context()->IsEffect()) {
4341       // Save the result on the stack. If we have a named or keyed property
4342       // we store the result under the receiver that is currently on top
4343       // of the stack.
4344       switch (assign_type) {
4345         case VARIABLE:
4346           __ push(r3);
4347           break;
4348         case NAMED_PROPERTY:
4349           __ StoreP(r3, MemOperand(sp, kPointerSize));
4350           break;
4351         case NAMED_SUPER_PROPERTY:
4352           __ StoreP(r3, MemOperand(sp, 2 * kPointerSize));
4353           break;
4354         case KEYED_PROPERTY:
4355           __ StoreP(r3, MemOperand(sp, 2 * kPointerSize));
4356           break;
4357         case KEYED_SUPER_PROPERTY:
4358           __ StoreP(r3, MemOperand(sp, 3 * kPointerSize));
4359           break;
4360       }
4361     }
4362   }
4363 
4364   __ bind(&stub_call);
4365   __ mr(r4, r3);
4366   __ LoadSmiLiteral(r3, Smi::FromInt(count_value));
4367 
4368   SetExpressionPosition(expr);
4369 
4370   Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), Token::ADD,
4371                                               strength(language_mode())).code();
4372   CallIC(code, expr->CountBinOpFeedbackId());
4373   patch_site.EmitPatchInfo();
4374   __ bind(&done);
4375 
4376   if (is_strong(language_mode())) {
4377     PrepareForBailoutForId(expr->ToNumberId(), TOS_REG);
4378   }
4379   // Store the value returned in r3.
4380   switch (assign_type) {
4381     case VARIABLE:
4382       if (expr->is_postfix()) {
4383         {
4384           EffectContext context(this);
4385           EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4386                                  Token::ASSIGN, expr->CountSlot());
4387           PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4388           context.Plug(r3);
4389         }
4390         // For all contexts except EffectConstant We have the result on
4391         // top of the stack.
4392         if (!context()->IsEffect()) {
4393           context()->PlugTOS();
4394         }
4395       } else {
4396         EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4397                                Token::ASSIGN, expr->CountSlot());
4398         PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4399         context()->Plug(r3);
4400       }
4401       break;
4402     case NAMED_PROPERTY: {
4403       __ mov(StoreDescriptor::NameRegister(),
4404              Operand(prop->key()->AsLiteral()->value()));
4405       __ pop(StoreDescriptor::ReceiverRegister());
4406       EmitLoadStoreICSlot(expr->CountSlot());
4407       CallStoreIC();
4408       PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4409       if (expr->is_postfix()) {
4410         if (!context()->IsEffect()) {
4411           context()->PlugTOS();
4412         }
4413       } else {
4414         context()->Plug(r3);
4415       }
4416       break;
4417     }
4418     case NAMED_SUPER_PROPERTY: {
4419       EmitNamedSuperPropertyStore(prop);
4420       if (expr->is_postfix()) {
4421         if (!context()->IsEffect()) {
4422           context()->PlugTOS();
4423         }
4424       } else {
4425         context()->Plug(r3);
4426       }
4427       break;
4428     }
4429     case KEYED_SUPER_PROPERTY: {
4430       EmitKeyedSuperPropertyStore(prop);
4431       if (expr->is_postfix()) {
4432         if (!context()->IsEffect()) {
4433           context()->PlugTOS();
4434         }
4435       } else {
4436         context()->Plug(r3);
4437       }
4438       break;
4439     }
4440     case KEYED_PROPERTY: {
4441       __ Pop(StoreDescriptor::ReceiverRegister(),
4442              StoreDescriptor::NameRegister());
4443       Handle<Code> ic =
4444           CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
4445       EmitLoadStoreICSlot(expr->CountSlot());
4446       CallIC(ic);
4447       PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4448       if (expr->is_postfix()) {
4449         if (!context()->IsEffect()) {
4450           context()->PlugTOS();
4451         }
4452       } else {
4453         context()->Plug(r3);
4454       }
4455       break;
4456     }
4457   }
4458 }
4459 
4460 
EmitLiteralCompareTypeof(Expression * expr,Expression * sub_expr,Handle<String> check)4461 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4462                                                  Expression* sub_expr,
4463                                                  Handle<String> check) {
4464   Label materialize_true, materialize_false;
4465   Label* if_true = NULL;
4466   Label* if_false = NULL;
4467   Label* fall_through = NULL;
4468   context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
4469                          &if_false, &fall_through);
4470 
4471   {
4472     AccumulatorValueContext context(this);
4473     VisitForTypeofValue(sub_expr);
4474   }
4475   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4476 
4477   Factory* factory = isolate()->factory();
4478   if (String::Equals(check, factory->number_string())) {
4479     __ JumpIfSmi(r3, if_true);
4480     __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset));
4481     __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
4482     __ cmp(r3, ip);
4483     Split(eq, if_true, if_false, fall_through);
4484   } else if (String::Equals(check, factory->string_string())) {
4485     __ JumpIfSmi(r3, if_false);
4486     __ CompareObjectType(r3, r3, r4, FIRST_NONSTRING_TYPE);
4487     Split(lt, if_true, if_false, fall_through);
4488   } else if (String::Equals(check, factory->symbol_string())) {
4489     __ JumpIfSmi(r3, if_false);
4490     __ CompareObjectType(r3, r3, r4, SYMBOL_TYPE);
4491     Split(eq, if_true, if_false, fall_through);
4492   } else if (String::Equals(check, factory->boolean_string())) {
4493     __ CompareRoot(r3, Heap::kTrueValueRootIndex);
4494     __ beq(if_true);
4495     __ CompareRoot(r3, Heap::kFalseValueRootIndex);
4496     Split(eq, if_true, if_false, fall_through);
4497   } else if (String::Equals(check, factory->undefined_string())) {
4498     __ CompareRoot(r3, Heap::kUndefinedValueRootIndex);
4499     __ beq(if_true);
4500     __ JumpIfSmi(r3, if_false);
4501     // Check for undetectable objects => true.
4502     __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset));
4503     __ lbz(r4, FieldMemOperand(r3, Map::kBitFieldOffset));
4504     __ andi(r0, r4, Operand(1 << Map::kIsUndetectable));
4505     Split(ne, if_true, if_false, fall_through, cr0);
4506 
4507   } else if (String::Equals(check, factory->function_string())) {
4508     __ JumpIfSmi(r3, if_false);
4509     __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset));
4510     __ lbz(r4, FieldMemOperand(r3, Map::kBitFieldOffset));
4511     __ andi(r4, r4,
4512             Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
4513     __ cmpi(r4, Operand(1 << Map::kIsCallable));
4514     Split(eq, if_true, if_false, fall_through);
4515   } else if (String::Equals(check, factory->object_string())) {
4516     __ JumpIfSmi(r3, if_false);
4517     __ CompareRoot(r3, Heap::kNullValueRootIndex);
4518     __ beq(if_true);
4519     STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
4520     __ CompareObjectType(r3, r3, r4, FIRST_JS_RECEIVER_TYPE);
4521     __ blt(if_false);
4522     // Check for callable or undetectable objects => false.
4523     __ lbz(r4, FieldMemOperand(r3, Map::kBitFieldOffset));
4524     __ andi(r0, r4,
4525             Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
4526     Split(eq, if_true, if_false, fall_through, cr0);
4527 // clang-format off
4528 #define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type)   \
4529   } else if (String::Equals(check, factory->type##_string())) { \
4530     __ JumpIfSmi(r3, if_false);                                 \
4531     __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset));    \
4532     __ CompareRoot(r3, Heap::k##Type##MapRootIndex);            \
4533     Split(eq, if_true, if_false, fall_through);
4534   SIMD128_TYPES(SIMD128_TYPE)
4535 #undef SIMD128_TYPE
4536     // clang-format on
4537   } else {
4538     if (if_false != fall_through) __ b(if_false);
4539   }
4540   context()->Plug(if_true, if_false);
4541 }
4542 
4543 
VisitCompareOperation(CompareOperation * expr)4544 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4545   Comment cmnt(masm_, "[ CompareOperation");
4546   SetExpressionPosition(expr);
4547 
4548   // First we try a fast inlined version of the compare when one of
4549   // the operands is a literal.
4550   if (TryLiteralCompare(expr)) return;
4551 
4552   // Always perform the comparison for its control flow.  Pack the result
4553   // into the expression's context after the comparison is performed.
4554   Label materialize_true, materialize_false;
4555   Label* if_true = NULL;
4556   Label* if_false = NULL;
4557   Label* fall_through = NULL;
4558   context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
4559                          &if_false, &fall_through);
4560 
4561   Token::Value op = expr->op();
4562   VisitForStackValue(expr->left());
4563   switch (op) {
4564     case Token::IN:
4565       VisitForStackValue(expr->right());
4566       __ CallRuntime(Runtime::kHasProperty);
4567       PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4568       __ CompareRoot(r3, Heap::kTrueValueRootIndex);
4569       Split(eq, if_true, if_false, fall_through);
4570       break;
4571 
4572     case Token::INSTANCEOF: {
4573       VisitForAccumulatorValue(expr->right());
4574       __ pop(r4);
4575       InstanceOfStub stub(isolate());
4576       __ CallStub(&stub);
4577       PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4578       __ CompareRoot(r3, Heap::kTrueValueRootIndex);
4579       Split(eq, if_true, if_false, fall_through);
4580       break;
4581     }
4582 
4583     default: {
4584       VisitForAccumulatorValue(expr->right());
4585       Condition cond = CompareIC::ComputeCondition(op);
4586       __ pop(r4);
4587 
4588       bool inline_smi_code = ShouldInlineSmiCase(op);
4589       JumpPatchSite patch_site(masm_);
4590       if (inline_smi_code) {
4591         Label slow_case;
4592         __ orx(r5, r3, r4);
4593         patch_site.EmitJumpIfNotSmi(r5, &slow_case);
4594         __ cmp(r4, r3);
4595         Split(cond, if_true, if_false, NULL);
4596         __ bind(&slow_case);
4597       }
4598 
4599       Handle<Code> ic = CodeFactory::CompareIC(
4600                             isolate(), op, strength(language_mode())).code();
4601       CallIC(ic, expr->CompareOperationFeedbackId());
4602       patch_site.EmitPatchInfo();
4603       PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4604       __ cmpi(r3, Operand::Zero());
4605       Split(cond, if_true, if_false, fall_through);
4606     }
4607   }
4608 
4609   // Convert the result of the comparison into one expected for this
4610   // expression's context.
4611   context()->Plug(if_true, if_false);
4612 }
4613 
4614 
EmitLiteralCompareNil(CompareOperation * expr,Expression * sub_expr,NilValue nil)4615 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4616                                               Expression* sub_expr,
4617                                               NilValue nil) {
4618   Label materialize_true, materialize_false;
4619   Label* if_true = NULL;
4620   Label* if_false = NULL;
4621   Label* fall_through = NULL;
4622   context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
4623                          &if_false, &fall_through);
4624 
4625   VisitForAccumulatorValue(sub_expr);
4626   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4627   if (expr->op() == Token::EQ_STRICT) {
4628     Heap::RootListIndex nil_value = nil == kNullValue
4629                                         ? Heap::kNullValueRootIndex
4630                                         : Heap::kUndefinedValueRootIndex;
4631     __ LoadRoot(r4, nil_value);
4632     __ cmp(r3, r4);
4633     Split(eq, if_true, if_false, fall_through);
4634   } else {
4635     Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
4636     CallIC(ic, expr->CompareOperationFeedbackId());
4637     __ CompareRoot(r3, Heap::kTrueValueRootIndex);
4638     Split(eq, if_true, if_false, fall_through);
4639   }
4640   context()->Plug(if_true, if_false);
4641 }
4642 
4643 
VisitThisFunction(ThisFunction * expr)4644 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4645   __ LoadP(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4646   context()->Plug(r3);
4647 }
4648 
4649 
result_register()4650 Register FullCodeGenerator::result_register() { return r3; }
4651 
4652 
context_register()4653 Register FullCodeGenerator::context_register() { return cp; }
4654 
4655 
StoreToFrameField(int frame_offset,Register value)4656 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4657   DCHECK_EQ(static_cast<int>(POINTER_SIZE_ALIGN(frame_offset)), frame_offset);
4658   __ StoreP(value, MemOperand(fp, frame_offset), r0);
4659 }
4660 
4661 
LoadContextField(Register dst,int context_index)4662 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4663   __ LoadP(dst, ContextMemOperand(cp, context_index), r0);
4664 }
4665 
4666 
PushFunctionArgumentForContextAllocation()4667 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4668   Scope* closure_scope = scope()->ClosureScope();
4669   if (closure_scope->is_script_scope() ||
4670       closure_scope->is_module_scope()) {
4671     // Contexts nested in the native context have a canonical empty function
4672     // as their closure, not the anonymous closure containing the global
4673     // code.
4674     __ LoadNativeContextSlot(Context::CLOSURE_INDEX, ip);
4675   } else if (closure_scope->is_eval_scope()) {
4676     // Contexts created by a call to eval have the same closure as the
4677     // context calling eval, not the anonymous closure containing the eval
4678     // code.  Fetch it from the context.
4679     __ LoadP(ip, ContextMemOperand(cp, Context::CLOSURE_INDEX));
4680   } else {
4681     DCHECK(closure_scope->is_function_scope());
4682     __ LoadP(ip, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4683   }
4684   __ push(ip);
4685 }
4686 
4687 
4688 // ----------------------------------------------------------------------------
4689 // Non-local control flow support.
4690 
EnterFinallyBlock()4691 void FullCodeGenerator::EnterFinallyBlock() {
4692   DCHECK(!result_register().is(r4));
4693   // Store result register while executing finally block.
4694   __ push(result_register());
4695   // Cook return address in link register to stack (smi encoded Code* delta)
4696   __ mflr(r4);
4697   __ mov(ip, Operand(masm_->CodeObject()));
4698   __ sub(r4, r4, ip);
4699   __ SmiTag(r4);
4700 
4701   // Store result register while executing finally block.
4702   __ push(r4);
4703 
4704   // Store pending message while executing finally block.
4705   ExternalReference pending_message_obj =
4706       ExternalReference::address_of_pending_message_obj(isolate());
4707   __ mov(ip, Operand(pending_message_obj));
4708   __ LoadP(r4, MemOperand(ip));
4709   __ push(r4);
4710 
4711   ClearPendingMessage();
4712 }
4713 
4714 
ExitFinallyBlock()4715 void FullCodeGenerator::ExitFinallyBlock() {
4716   DCHECK(!result_register().is(r4));
4717   // Restore pending message from stack.
4718   __ pop(r4);
4719   ExternalReference pending_message_obj =
4720       ExternalReference::address_of_pending_message_obj(isolate());
4721   __ mov(ip, Operand(pending_message_obj));
4722   __ StoreP(r4, MemOperand(ip));
4723 
4724   // Restore result register from stack.
4725   __ pop(r4);
4726 
4727   // Uncook return address and return.
4728   __ pop(result_register());
4729   __ SmiUntag(r4);
4730   __ mov(ip, Operand(masm_->CodeObject()));
4731   __ add(ip, ip, r4);
4732   __ mtctr(ip);
4733   __ bctr();
4734 }
4735 
4736 
ClearPendingMessage()4737 void FullCodeGenerator::ClearPendingMessage() {
4738   DCHECK(!result_register().is(r4));
4739   ExternalReference pending_message_obj =
4740       ExternalReference::address_of_pending_message_obj(isolate());
4741   __ LoadRoot(r4, Heap::kTheHoleValueRootIndex);
4742   __ mov(ip, Operand(pending_message_obj));
4743   __ StoreP(r4, MemOperand(ip));
4744 }
4745 
4746 
EmitLoadStoreICSlot(FeedbackVectorSlot slot)4747 void FullCodeGenerator::EmitLoadStoreICSlot(FeedbackVectorSlot slot) {
4748   DCHECK(!slot.IsInvalid());
4749   __ mov(VectorStoreICTrampolineDescriptor::SlotRegister(),
4750          Operand(SmiFromSlot(slot)));
4751 }
4752 
4753 
4754 #undef __
4755 
4756 
PatchAt(Code * unoptimized_code,Address pc,BackEdgeState target_state,Code * replacement_code)4757 void BackEdgeTable::PatchAt(Code* unoptimized_code, Address pc,
4758                             BackEdgeState target_state,
4759                             Code* replacement_code) {
4760   Address mov_address = Assembler::target_address_from_return_address(pc);
4761   Address cmp_address = mov_address - 2 * Assembler::kInstrSize;
4762   Isolate* isolate = unoptimized_code->GetIsolate();
4763   CodePatcher patcher(isolate, cmp_address, 1);
4764 
4765   switch (target_state) {
4766     case INTERRUPT: {
4767       //  <decrement profiling counter>
4768       //         cmpi    r6, 0
4769       //         bge     <ok>            ;; not changed
4770       //         mov     r12, <interrupt stub address>
4771       //         mtlr    r12
4772       //         blrl
4773       //  <reset profiling counter>
4774       //  ok-label
4775       patcher.masm()->cmpi(r6, Operand::Zero());
4776       break;
4777     }
4778     case ON_STACK_REPLACEMENT:
4779     case OSR_AFTER_STACK_CHECK:
4780       //  <decrement profiling counter>
4781       //         crset
4782       //         bge     <ok>            ;; not changed
4783       //         mov     r12, <on-stack replacement address>
4784       //         mtlr    r12
4785       //         blrl
4786       //  <reset profiling counter>
4787       //  ok-label ----- pc_after points here
4788 
4789       // Set the LT bit such that bge is a NOP
4790       patcher.masm()->crset(Assembler::encode_crbit(cr7, CR_LT));
4791       break;
4792   }
4793 
4794   // Replace the stack check address in the mov sequence with the
4795   // entry address of the replacement code.
4796   Assembler::set_target_address_at(isolate, mov_address, unoptimized_code,
4797                                    replacement_code->entry());
4798 
4799   unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
4800       unoptimized_code, mov_address, replacement_code);
4801 }
4802 
4803 
GetBackEdgeState(Isolate * isolate,Code * unoptimized_code,Address pc)4804 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
4805     Isolate* isolate, Code* unoptimized_code, Address pc) {
4806   Address mov_address = Assembler::target_address_from_return_address(pc);
4807   Address cmp_address = mov_address - 2 * Assembler::kInstrSize;
4808   Address interrupt_address =
4809       Assembler::target_address_at(mov_address, unoptimized_code);
4810 
4811   if (Assembler::IsCmpImmediate(Assembler::instr_at(cmp_address))) {
4812     DCHECK(interrupt_address == isolate->builtins()->InterruptCheck()->entry());
4813     return INTERRUPT;
4814   }
4815 
4816   DCHECK(Assembler::IsCrSet(Assembler::instr_at(cmp_address)));
4817 
4818   if (interrupt_address == isolate->builtins()->OnStackReplacement()->entry()) {
4819     return ON_STACK_REPLACEMENT;
4820   }
4821 
4822   DCHECK(interrupt_address ==
4823          isolate->builtins()->OsrAfterStackCheck()->entry());
4824   return OSR_AFTER_STACK_CHECK;
4825 }
4826 }  // namespace internal
4827 }  // namespace v8
4828 #endif  // V8_TARGET_ARCH_PPC
4829