• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #if V8_TARGET_ARCH_ARM64
6 
7 #include "src/ast/scopes.h"
8 #include "src/code-factory.h"
9 #include "src/code-stubs.h"
10 #include "src/codegen.h"
11 #include "src/debug/debug.h"
12 #include "src/full-codegen/full-codegen.h"
13 #include "src/ic/ic.h"
14 #include "src/parsing/parser.h"
15 
16 #include "src/arm64/code-stubs-arm64.h"
17 #include "src/arm64/frames-arm64.h"
18 #include "src/arm64/macro-assembler-arm64.h"
19 
20 namespace v8 {
21 namespace internal {
22 
23 #define __ ACCESS_MASM(masm())
24 
25 class JumpPatchSite BASE_EMBEDDED {
26  public:
JumpPatchSite(MacroAssembler * masm)27   explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm), reg_(NoReg) {
28 #ifdef DEBUG
29     info_emitted_ = false;
30 #endif
31   }
32 
~JumpPatchSite()33   ~JumpPatchSite() {
34     if (patch_site_.is_bound()) {
35       DCHECK(info_emitted_);
36     } else {
37       DCHECK(reg_.IsNone());
38     }
39   }
40 
EmitJumpIfNotSmi(Register reg,Label * target)41   void EmitJumpIfNotSmi(Register reg, Label* target) {
42     // This code will be patched by PatchInlinedSmiCode, in ic-arm64.cc.
43     InstructionAccurateScope scope(masm_, 1);
44     DCHECK(!info_emitted_);
45     DCHECK(reg.Is64Bits());
46     DCHECK(!reg.Is(csp));
47     reg_ = reg;
48     __ bind(&patch_site_);
49     __ tbz(xzr, 0, target);   // Always taken before patched.
50   }
51 
EmitJumpIfSmi(Register reg,Label * target)52   void EmitJumpIfSmi(Register reg, Label* target) {
53     // This code will be patched by PatchInlinedSmiCode, in ic-arm64.cc.
54     InstructionAccurateScope scope(masm_, 1);
55     DCHECK(!info_emitted_);
56     DCHECK(reg.Is64Bits());
57     DCHECK(!reg.Is(csp));
58     reg_ = reg;
59     __ bind(&patch_site_);
60     __ tbnz(xzr, 0, target);  // Never taken before patched.
61   }
62 
EmitJumpIfEitherNotSmi(Register reg1,Register reg2,Label * target)63   void EmitJumpIfEitherNotSmi(Register reg1, Register reg2, Label* target) {
64     UseScratchRegisterScope temps(masm_);
65     Register temp = temps.AcquireX();
66     __ Orr(temp, reg1, reg2);
67     EmitJumpIfNotSmi(temp, target);
68   }
69 
EmitPatchInfo()70   void EmitPatchInfo() {
71     Assembler::BlockPoolsScope scope(masm_);
72     InlineSmiCheckInfo::Emit(masm_, reg_, &patch_site_);
73 #ifdef DEBUG
74     info_emitted_ = true;
75 #endif
76   }
77 
78  private:
masm()79   MacroAssembler* masm() { return masm_; }
80   MacroAssembler* masm_;
81   Label patch_site_;
82   Register reg_;
83 #ifdef DEBUG
84   bool info_emitted_;
85 #endif
86 };
87 
88 
89 // Generate code for a JS function. On entry to the function the receiver
90 // and arguments have been pushed on the stack left to right. The actual
91 // argument count matches the formal parameter count expected by the
92 // function.
93 //
94 // The live registers are:
95 //   - x1: the JS function object being called (i.e. ourselves).
96 //   - x3: the new target value
97 //   - cp: our context.
98 //   - fp: our caller's frame pointer.
99 //   - jssp: stack pointer.
100 //   - lr: return address.
101 //
102 // The function builds a JS frame. See JavaScriptFrameConstants in
103 // frames-arm.h for its layout.
Generate()104 void FullCodeGenerator::Generate() {
105   CompilationInfo* info = info_;
106   profiling_counter_ = isolate()->factory()->NewCell(
107       Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
108   SetFunctionPosition(literal());
109   Comment cmnt(masm_, "[ Function compiled by full code generator");
110 
111   ProfileEntryHookStub::MaybeCallEntryHook(masm_);
112 
113   if (FLAG_debug_code && info->ExpectsJSReceiverAsReceiver()) {
114     int receiver_offset = info->scope()->num_parameters() * kXRegSize;
115     __ Peek(x10, receiver_offset);
116     __ AssertNotSmi(x10);
117     __ CompareObjectType(x10, x10, x11, FIRST_JS_RECEIVER_TYPE);
118     __ Assert(ge, kSloppyFunctionExpectsJSReceiverReceiver);
119   }
120 
121   // Open a frame scope to indicate that there is a frame on the stack.
122   // The MANUAL indicates that the scope shouldn't actually generate code
123   // to set up the frame because we do it manually below.
124   FrameScope frame_scope(masm_, StackFrame::MANUAL);
125 
126   // This call emits the following sequence in a way that can be patched for
127   // code ageing support:
128   //  Push(lr, fp, cp, x1);
129   //  Add(fp, jssp, 2 * kPointerSize);
130   info->set_prologue_offset(masm_->pc_offset());
131   __ Prologue(info->GeneratePreagedPrologue());
132 
133   // Reserve space on the stack for locals.
134   { Comment cmnt(masm_, "[ Allocate locals");
135     int locals_count = info->scope()->num_stack_slots();
136     // Generators allocate locals, if any, in context slots.
137     DCHECK(!IsGeneratorFunction(info->literal()->kind()) || locals_count == 0);
138     OperandStackDepthIncrement(locals_count);
139     if (locals_count > 0) {
140       if (locals_count >= 128) {
141         Label ok;
142         DCHECK(jssp.Is(__ StackPointer()));
143         __ Sub(x10, jssp, locals_count * kPointerSize);
144         __ CompareRoot(x10, Heap::kRealStackLimitRootIndex);
145         __ B(hs, &ok);
146         __ CallRuntime(Runtime::kThrowStackOverflow);
147         __ Bind(&ok);
148       }
149       __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
150       if (FLAG_optimize_for_size) {
151         __ PushMultipleTimes(x10 , locals_count);
152       } else {
153         const int kMaxPushes = 32;
154         if (locals_count >= kMaxPushes) {
155           int loop_iterations = locals_count / kMaxPushes;
156           __ Mov(x2, loop_iterations);
157           Label loop_header;
158           __ Bind(&loop_header);
159           // Do pushes.
160           __ PushMultipleTimes(x10 , kMaxPushes);
161           __ Subs(x2, x2, 1);
162           __ B(ne, &loop_header);
163         }
164         int remaining = locals_count % kMaxPushes;
165         // Emit the remaining pushes.
166         __ PushMultipleTimes(x10 , remaining);
167       }
168     }
169   }
170 
171   bool function_in_register_x1 = true;
172 
173   if (info->scope()->num_heap_slots() > 0) {
174     // Argument to NewContext is the function, which is still in x1.
175     Comment cmnt(masm_, "[ Allocate context");
176     bool need_write_barrier = true;
177     int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
178     if (info->scope()->is_script_scope()) {
179       __ Mov(x10, Operand(info->scope()->GetScopeInfo(info->isolate())));
180       __ Push(x1, x10);
181       __ CallRuntime(Runtime::kNewScriptContext);
182       PrepareForBailoutForId(BailoutId::ScriptContext(),
183                              BailoutState::TOS_REGISTER);
184       // The new target value is not used, clobbering is safe.
185       DCHECK_NULL(info->scope()->new_target_var());
186     } else {
187       if (info->scope()->new_target_var() != nullptr) {
188         __ Push(x3);  // Preserve new target.
189       }
190       if (slots <= FastNewContextStub::kMaximumSlots) {
191         FastNewContextStub stub(isolate(), slots);
192         __ CallStub(&stub);
193         // Result of FastNewContextStub is always in new space.
194         need_write_barrier = false;
195       } else {
196         __ Push(x1);
197         __ CallRuntime(Runtime::kNewFunctionContext);
198       }
199       if (info->scope()->new_target_var() != nullptr) {
200         __ Pop(x3);  // Restore new target.
201       }
202     }
203     function_in_register_x1 = false;
204     // Context is returned in x0.  It replaces the context passed to us.
205     // It's saved in the stack and kept live in cp.
206     __ Mov(cp, x0);
207     __ Str(x0, MemOperand(fp, StandardFrameConstants::kContextOffset));
208     // Copy any necessary parameters into the context.
209     int num_parameters = info->scope()->num_parameters();
210     int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
211     for (int i = first_parameter; i < num_parameters; i++) {
212       Variable* var = (i == -1) ? scope()->receiver() : scope()->parameter(i);
213       if (var->IsContextSlot()) {
214         int parameter_offset = StandardFrameConstants::kCallerSPOffset +
215             (num_parameters - 1 - i) * kPointerSize;
216         // Load parameter from stack.
217         __ Ldr(x10, MemOperand(fp, parameter_offset));
218         // Store it in the context.
219         MemOperand target = ContextMemOperand(cp, var->index());
220         __ Str(x10, target);
221 
222         // Update the write barrier.
223         if (need_write_barrier) {
224           __ RecordWriteContextSlot(cp, static_cast<int>(target.offset()), x10,
225                                     x11, kLRHasBeenSaved, kDontSaveFPRegs);
226         } else if (FLAG_debug_code) {
227           Label done;
228           __ JumpIfInNewSpace(cp, &done);
229           __ Abort(kExpectedNewSpaceObject);
230           __ bind(&done);
231         }
232       }
233     }
234   }
235 
236   // Register holding this function and new target are both trashed in case we
237   // bailout here. But since that can happen only when new target is not used
238   // and we allocate a context, the value of |function_in_register| is correct.
239   PrepareForBailoutForId(BailoutId::FunctionContext(),
240                          BailoutState::NO_REGISTERS);
241 
242   // Possibly set up a local binding to the this function which is used in
243   // derived constructors with super calls.
244   Variable* this_function_var = scope()->this_function_var();
245   if (this_function_var != nullptr) {
246     Comment cmnt(masm_, "[ This function");
247     if (!function_in_register_x1) {
248       __ Ldr(x1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
249       // The write barrier clobbers register again, keep it marked as such.
250     }
251     SetVar(this_function_var, x1, x0, x2);
252   }
253 
254   // Possibly set up a local binding to the new target value.
255   Variable* new_target_var = scope()->new_target_var();
256   if (new_target_var != nullptr) {
257     Comment cmnt(masm_, "[ new.target");
258     SetVar(new_target_var, x3, x0, x2);
259   }
260 
261   // Possibly allocate RestParameters
262   int rest_index;
263   Variable* rest_param = scope()->rest_parameter(&rest_index);
264   if (rest_param) {
265     Comment cmnt(masm_, "[ Allocate rest parameter array");
266     if (!function_in_register_x1) {
267       __ Ldr(x1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
268     }
269     FastNewRestParameterStub stub(isolate());
270     __ CallStub(&stub);
271     function_in_register_x1 = false;
272     SetVar(rest_param, x0, x1, x2);
273   }
274 
275   Variable* arguments = scope()->arguments();
276   if (arguments != NULL) {
277     // Function uses arguments object.
278     Comment cmnt(masm_, "[ Allocate arguments object");
279     if (!function_in_register_x1) {
280       // Load this again, if it's used by the local context below.
281       __ Ldr(x1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
282     }
283     if (is_strict(language_mode()) || !has_simple_parameters()) {
284       FastNewStrictArgumentsStub stub(isolate());
285       __ CallStub(&stub);
286     } else if (literal()->has_duplicate_parameters()) {
287       __ Push(x1);
288       __ CallRuntime(Runtime::kNewSloppyArguments_Generic);
289     } else {
290       FastNewSloppyArgumentsStub stub(isolate());
291       __ CallStub(&stub);
292     }
293 
294     SetVar(arguments, x0, x1, x2);
295   }
296 
297   if (FLAG_trace) {
298     __ CallRuntime(Runtime::kTraceEnter);
299   }
300 
301   // Visit the declarations and body.
302   PrepareForBailoutForId(BailoutId::FunctionEntry(),
303                          BailoutState::NO_REGISTERS);
304   {
305     Comment cmnt(masm_, "[ Declarations");
306     VisitDeclarations(scope()->declarations());
307   }
308 
309   // Assert that the declarations do not use ICs. Otherwise the debugger
310   // won't be able to redirect a PC at an IC to the correct IC in newly
311   // recompiled code.
312   DCHECK_EQ(0, ic_total_count_);
313 
314   {
315     Comment cmnt(masm_, "[ Stack check");
316     PrepareForBailoutForId(BailoutId::Declarations(),
317                            BailoutState::NO_REGISTERS);
318     Label ok;
319     DCHECK(jssp.Is(__ StackPointer()));
320     __ CompareRoot(jssp, Heap::kStackLimitRootIndex);
321     __ B(hs, &ok);
322     PredictableCodeSizeScope predictable(masm_,
323                                          Assembler::kCallSizeWithRelocation);
324     __ Call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
325     __ Bind(&ok);
326   }
327 
328   {
329     Comment cmnt(masm_, "[ Body");
330     DCHECK(loop_depth() == 0);
331     VisitStatements(literal()->body());
332     DCHECK(loop_depth() == 0);
333   }
334 
335   // Always emit a 'return undefined' in case control fell off the end of
336   // the body.
337   { Comment cmnt(masm_, "[ return <undefined>;");
338     __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
339   }
340   EmitReturnSequence();
341 
342   // Force emission of the pools, so they don't get emitted in the middle
343   // of the back edge table.
344   masm()->CheckVeneerPool(true, false);
345   masm()->CheckConstPool(true, false);
346 }
347 
348 
ClearAccumulator()349 void FullCodeGenerator::ClearAccumulator() {
350   __ Mov(x0, Smi::FromInt(0));
351 }
352 
353 
EmitProfilingCounterDecrement(int delta)354 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
355   __ Mov(x2, Operand(profiling_counter_));
356   __ Ldr(x3, FieldMemOperand(x2, Cell::kValueOffset));
357   __ Subs(x3, x3, Smi::FromInt(delta));
358   __ Str(x3, FieldMemOperand(x2, Cell::kValueOffset));
359 }
360 
361 
EmitProfilingCounterReset()362 void FullCodeGenerator::EmitProfilingCounterReset() {
363   int reset_value = FLAG_interrupt_budget;
364   __ Mov(x2, Operand(profiling_counter_));
365   __ Mov(x3, Smi::FromInt(reset_value));
366   __ Str(x3, FieldMemOperand(x2, Cell::kValueOffset));
367 }
368 
369 
EmitBackEdgeBookkeeping(IterationStatement * stmt,Label * back_edge_target)370 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
371                                                 Label* back_edge_target) {
372   DCHECK(jssp.Is(__ StackPointer()));
373   Comment cmnt(masm_, "[ Back edge bookkeeping");
374   // Block literal pools whilst emitting back edge code.
375   Assembler::BlockPoolsScope block_const_pool(masm_);
376   Label ok;
377 
378   DCHECK(back_edge_target->is_bound());
379   // We want to do a round rather than a floor of distance/kCodeSizeMultiplier
380   // to reduce the absolute error due to the integer division. To do that,
381   // we add kCodeSizeMultiplier/2 to the distance (equivalent to adding 0.5 to
382   // the result).
383   int distance =
384       static_cast<int>(masm_->SizeOfCodeGeneratedSince(back_edge_target) +
385                        kCodeSizeMultiplier / 2);
386   int weight = Min(kMaxBackEdgeWeight,
387                    Max(1, distance / kCodeSizeMultiplier));
388   EmitProfilingCounterDecrement(weight);
389   __ B(pl, &ok);
390   __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
391 
392   // Record a mapping of this PC offset to the OSR id.  This is used to find
393   // the AST id from the unoptimized code in order to use it as a key into
394   // the deoptimization input data found in the optimized code.
395   RecordBackEdge(stmt->OsrEntryId());
396 
397   EmitProfilingCounterReset();
398 
399   __ Bind(&ok);
400   PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS);
401   // Record a mapping of the OSR id to this PC.  This is used if the OSR
402   // entry becomes the target of a bailout.  We don't expect it to be, but
403   // we want it to work if it is.
404   PrepareForBailoutForId(stmt->OsrEntryId(), BailoutState::NO_REGISTERS);
405 }
406 
EmitProfilingCounterHandlingForReturnSequence(bool is_tail_call)407 void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence(
408     bool is_tail_call) {
409   // Pretend that the exit is a backwards jump to the entry.
410   int weight = 1;
411   if (info_->ShouldSelfOptimize()) {
412     weight = FLAG_interrupt_budget / FLAG_self_opt_count;
413   } else {
414     int distance = masm_->pc_offset() + kCodeSizeMultiplier / 2;
415     weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
416   }
417   EmitProfilingCounterDecrement(weight);
418   Label ok;
419   __ B(pl, &ok);
420   // Don't need to save result register if we are going to do a tail call.
421   if (!is_tail_call) {
422     __ Push(x0);
423   }
424   __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
425   if (!is_tail_call) {
426     __ Pop(x0);
427   }
428   EmitProfilingCounterReset();
429   __ Bind(&ok);
430 }
431 
EmitReturnSequence()432 void FullCodeGenerator::EmitReturnSequence() {
433   Comment cmnt(masm_, "[ Return sequence");
434 
435   if (return_label_.is_bound()) {
436     __ B(&return_label_);
437 
438   } else {
439     __ Bind(&return_label_);
440     if (FLAG_trace) {
441       // Push the return value on the stack as the parameter.
442       // Runtime::TraceExit returns its parameter in x0.
443       __ Push(result_register());
444       __ CallRuntime(Runtime::kTraceExit);
445       DCHECK(x0.Is(result_register()));
446     }
447     EmitProfilingCounterHandlingForReturnSequence(false);
448 
449     SetReturnPosition(literal());
450     const Register& current_sp = __ StackPointer();
451     // Nothing ensures 16 bytes alignment here.
452     DCHECK(!current_sp.Is(csp));
453     __ Mov(current_sp, fp);
454     __ Ldp(fp, lr, MemOperand(current_sp, 2 * kXRegSize, PostIndex));
455     // Drop the arguments and receiver and return.
456     // TODO(all): This implementation is overkill as it supports 2**31+1
457     // arguments, consider how to improve it without creating a security
458     // hole.
459     __ ldr_pcrel(ip0, (3 * kInstructionSize) >> kLoadLiteralScaleLog2);
460     __ Add(current_sp, current_sp, ip0);
461     __ Ret();
462     int32_t arg_count = info_->scope()->num_parameters() + 1;
463     __ dc64(kXRegSize * arg_count);
464   }
465 }
466 
RestoreContext()467 void FullCodeGenerator::RestoreContext() {
468   __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
469 }
470 
Plug(Variable * var) const471 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
472   DCHECK(var->IsStackAllocated() || var->IsContextSlot());
473   codegen()->GetVar(result_register(), var);
474   codegen()->PushOperand(result_register());
475 }
476 
477 
Plug(Heap::RootListIndex index) const478 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
479   // Root values have no side effects.
480 }
481 
482 
Plug(Heap::RootListIndex index) const483 void FullCodeGenerator::AccumulatorValueContext::Plug(
484     Heap::RootListIndex index) const {
485   __ LoadRoot(result_register(), index);
486 }
487 
488 
Plug(Heap::RootListIndex index) const489 void FullCodeGenerator::StackValueContext::Plug(
490     Heap::RootListIndex index) const {
491   __ LoadRoot(result_register(), index);
492   codegen()->PushOperand(result_register());
493 }
494 
495 
Plug(Heap::RootListIndex index) const496 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
497   codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_,
498                                           false_label_);
499   if (index == Heap::kUndefinedValueRootIndex ||
500       index == Heap::kNullValueRootIndex ||
501       index == Heap::kFalseValueRootIndex) {
502     if (false_label_ != fall_through_) __ B(false_label_);
503   } else if (index == Heap::kTrueValueRootIndex) {
504     if (true_label_ != fall_through_) __ B(true_label_);
505   } else {
506     __ LoadRoot(result_register(), index);
507     codegen()->DoTest(this);
508   }
509 }
510 
511 
Plug(Handle<Object> lit) const512 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
513 }
514 
515 
Plug(Handle<Object> lit) const516 void FullCodeGenerator::AccumulatorValueContext::Plug(
517     Handle<Object> lit) const {
518   __ Mov(result_register(), Operand(lit));
519 }
520 
521 
Plug(Handle<Object> lit) const522 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
523   // Immediates cannot be pushed directly.
524   __ Mov(result_register(), Operand(lit));
525   codegen()->PushOperand(result_register());
526 }
527 
528 
Plug(Handle<Object> lit) const529 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
530   codegen()->PrepareForBailoutBeforeSplit(condition(),
531                                           true,
532                                           true_label_,
533                                           false_label_);
534   DCHECK(lit->IsNull(isolate()) || lit->IsUndefined(isolate()) ||
535          !lit->IsUndetectable());
536   if (lit->IsUndefined(isolate()) || lit->IsNull(isolate()) ||
537       lit->IsFalse(isolate())) {
538     if (false_label_ != fall_through_) __ B(false_label_);
539   } else if (lit->IsTrue(isolate()) || lit->IsJSObject()) {
540     if (true_label_ != fall_through_) __ B(true_label_);
541   } else if (lit->IsString()) {
542     if (String::cast(*lit)->length() == 0) {
543       if (false_label_ != fall_through_) __ B(false_label_);
544     } else {
545       if (true_label_ != fall_through_) __ B(true_label_);
546     }
547   } else if (lit->IsSmi()) {
548     if (Smi::cast(*lit)->value() == 0) {
549       if (false_label_ != fall_through_) __ B(false_label_);
550     } else {
551       if (true_label_ != fall_through_) __ B(true_label_);
552     }
553   } else {
554     // For simplicity we always test the accumulator register.
555     __ Mov(result_register(), Operand(lit));
556     codegen()->DoTest(this);
557   }
558 }
559 
560 
DropAndPlug(int count,Register reg) const561 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
562                                                        Register reg) const {
563   DCHECK(count > 0);
564   if (count > 1) codegen()->DropOperands(count - 1);
565   __ Poke(reg, 0);
566 }
567 
568 
Plug(Label * materialize_true,Label * materialize_false) const569 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
570                                             Label* materialize_false) const {
571   DCHECK(materialize_true == materialize_false);
572   __ Bind(materialize_true);
573 }
574 
575 
Plug(Label * materialize_true,Label * materialize_false) const576 void FullCodeGenerator::AccumulatorValueContext::Plug(
577     Label* materialize_true,
578     Label* materialize_false) const {
579   Label done;
580   __ Bind(materialize_true);
581   __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
582   __ B(&done);
583   __ Bind(materialize_false);
584   __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
585   __ Bind(&done);
586 }
587 
588 
Plug(Label * materialize_true,Label * materialize_false) const589 void FullCodeGenerator::StackValueContext::Plug(
590     Label* materialize_true,
591     Label* materialize_false) const {
592   Label done;
593   __ Bind(materialize_true);
594   __ LoadRoot(x10, Heap::kTrueValueRootIndex);
595   __ B(&done);
596   __ Bind(materialize_false);
597   __ LoadRoot(x10, Heap::kFalseValueRootIndex);
598   __ Bind(&done);
599   codegen()->PushOperand(x10);
600 }
601 
602 
Plug(Label * materialize_true,Label * materialize_false) const603 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
604                                           Label* materialize_false) const {
605   DCHECK(materialize_true == true_label_);
606   DCHECK(materialize_false == false_label_);
607 }
608 
609 
Plug(bool flag) const610 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
611   Heap::RootListIndex value_root_index =
612       flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
613   __ LoadRoot(result_register(), value_root_index);
614 }
615 
616 
Plug(bool flag) const617 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
618   Heap::RootListIndex value_root_index =
619       flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
620   __ LoadRoot(x10, value_root_index);
621   codegen()->PushOperand(x10);
622 }
623 
624 
Plug(bool flag) const625 void FullCodeGenerator::TestContext::Plug(bool flag) const {
626   codegen()->PrepareForBailoutBeforeSplit(condition(),
627                                           true,
628                                           true_label_,
629                                           false_label_);
630   if (flag) {
631     if (true_label_ != fall_through_) {
632       __ B(true_label_);
633     }
634   } else {
635     if (false_label_ != fall_through_) {
636       __ B(false_label_);
637     }
638   }
639 }
640 
641 
DoTest(Expression * condition,Label * if_true,Label * if_false,Label * fall_through)642 void FullCodeGenerator::DoTest(Expression* condition,
643                                Label* if_true,
644                                Label* if_false,
645                                Label* fall_through) {
646   Handle<Code> ic = ToBooleanICStub::GetUninitialized(isolate());
647   CallIC(ic, condition->test_id());
648   __ CompareRoot(result_register(), Heap::kTrueValueRootIndex);
649   Split(eq, if_true, if_false, fall_through);
650 }
651 
652 
653 // If (cond), branch to if_true.
654 // If (!cond), branch to if_false.
655 // fall_through is used as an optimization in cases where only one branch
656 // instruction is necessary.
Split(Condition cond,Label * if_true,Label * if_false,Label * fall_through)657 void FullCodeGenerator::Split(Condition cond,
658                               Label* if_true,
659                               Label* if_false,
660                               Label* fall_through) {
661   if (if_false == fall_through) {
662     __ B(cond, if_true);
663   } else if (if_true == fall_through) {
664     DCHECK(if_false != fall_through);
665     __ B(NegateCondition(cond), if_false);
666   } else {
667     __ B(cond, if_true);
668     __ B(if_false);
669   }
670 }
671 
672 
StackOperand(Variable * var)673 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
674   // Offset is negative because higher indexes are at lower addresses.
675   int offset = -var->index() * kXRegSize;
676   // Adjust by a (parameter or local) base offset.
677   if (var->IsParameter()) {
678     offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
679   } else {
680     offset += JavaScriptFrameConstants::kLocal0Offset;
681   }
682   return MemOperand(fp, offset);
683 }
684 
685 
VarOperand(Variable * var,Register scratch)686 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
687   DCHECK(var->IsContextSlot() || var->IsStackAllocated());
688   if (var->IsContextSlot()) {
689     int context_chain_length = scope()->ContextChainLength(var->scope());
690     __ LoadContext(scratch, context_chain_length);
691     return ContextMemOperand(scratch, var->index());
692   } else {
693     return StackOperand(var);
694   }
695 }
696 
697 
GetVar(Register dest,Variable * var)698 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
699   // Use destination as scratch.
700   MemOperand location = VarOperand(var, dest);
701   __ Ldr(dest, location);
702 }
703 
704 
SetVar(Variable * var,Register src,Register scratch0,Register scratch1)705 void FullCodeGenerator::SetVar(Variable* var,
706                                Register src,
707                                Register scratch0,
708                                Register scratch1) {
709   DCHECK(var->IsContextSlot() || var->IsStackAllocated());
710   DCHECK(!AreAliased(src, scratch0, scratch1));
711   MemOperand location = VarOperand(var, scratch0);
712   __ Str(src, location);
713 
714   // Emit the write barrier code if the location is in the heap.
715   if (var->IsContextSlot()) {
716     // scratch0 contains the correct context.
717     __ RecordWriteContextSlot(scratch0, static_cast<int>(location.offset()),
718                               src, scratch1, kLRHasBeenSaved, kDontSaveFPRegs);
719   }
720 }
721 
722 
PrepareForBailoutBeforeSplit(Expression * expr,bool should_normalize,Label * if_true,Label * if_false)723 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
724                                                      bool should_normalize,
725                                                      Label* if_true,
726                                                      Label* if_false) {
727   // Only prepare for bailouts before splits if we're in a test
728   // context. Otherwise, we let the Visit function deal with the
729   // preparation to avoid preparing with the same AST id twice.
730   if (!context()->IsTest()) return;
731 
732   // TODO(all): Investigate to see if there is something to work on here.
733   Label skip;
734   if (should_normalize) {
735     __ B(&skip);
736   }
737   PrepareForBailout(expr, BailoutState::TOS_REGISTER);
738   if (should_normalize) {
739     __ CompareRoot(x0, Heap::kTrueValueRootIndex);
740     Split(eq, if_true, if_false, NULL);
741     __ Bind(&skip);
742   }
743 }
744 
745 
EmitDebugCheckDeclarationContext(Variable * variable)746 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
747   // The variable in the declaration always resides in the current function
748   // context.
749   DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
750   if (FLAG_debug_code) {
751     // Check that we're not inside a with or catch context.
752     __ Ldr(x1, FieldMemOperand(cp, HeapObject::kMapOffset));
753     __ CompareRoot(x1, Heap::kWithContextMapRootIndex);
754     __ Check(ne, kDeclarationInWithContext);
755     __ CompareRoot(x1, Heap::kCatchContextMapRootIndex);
756     __ Check(ne, kDeclarationInCatchContext);
757   }
758 }
759 
760 
VisitVariableDeclaration(VariableDeclaration * declaration)761 void FullCodeGenerator::VisitVariableDeclaration(
762     VariableDeclaration* declaration) {
763   // If it was not possible to allocate the variable at compile time, we
764   // need to "declare" it at runtime to make sure it actually exists in the
765   // local context.
766   VariableProxy* proxy = declaration->proxy();
767   VariableMode mode = declaration->mode();
768   Variable* variable = proxy->var();
769   bool hole_init = mode == LET || mode == CONST;
770 
771   switch (variable->location()) {
772     case VariableLocation::GLOBAL:
773     case VariableLocation::UNALLOCATED:
774       DCHECK(!variable->binding_needs_init());
775       globals_->Add(variable->name(), zone());
776       globals_->Add(isolate()->factory()->undefined_value(), zone());
777       break;
778 
779     case VariableLocation::PARAMETER:
780     case VariableLocation::LOCAL:
781       if (hole_init) {
782         Comment cmnt(masm_, "[ VariableDeclaration");
783         __ LoadRoot(x10, Heap::kTheHoleValueRootIndex);
784         __ Str(x10, StackOperand(variable));
785       }
786       break;
787 
788     case VariableLocation::CONTEXT:
789       if (hole_init) {
790         Comment cmnt(masm_, "[ VariableDeclaration");
791         EmitDebugCheckDeclarationContext(variable);
792         __ LoadRoot(x10, Heap::kTheHoleValueRootIndex);
793         __ Str(x10, ContextMemOperand(cp, variable->index()));
794         // No write barrier since the_hole_value is in old space.
795         PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
796       }
797       break;
798 
799     case VariableLocation::LOOKUP: {
800       Comment cmnt(masm_, "[ VariableDeclaration");
801       DCHECK_EQ(VAR, mode);
802       DCHECK(!hole_init);
803       __ Mov(x2, Operand(variable->name()));
804       __ Push(x2);
805       __ CallRuntime(Runtime::kDeclareEvalVar);
806       PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
807       break;
808     }
809   }
810 }
811 
812 
VisitFunctionDeclaration(FunctionDeclaration * declaration)813 void FullCodeGenerator::VisitFunctionDeclaration(
814     FunctionDeclaration* declaration) {
815   VariableProxy* proxy = declaration->proxy();
816   Variable* variable = proxy->var();
817   switch (variable->location()) {
818     case VariableLocation::GLOBAL:
819     case VariableLocation::UNALLOCATED: {
820       globals_->Add(variable->name(), zone());
821       Handle<SharedFunctionInfo> function =
822           Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_);
823       // Check for stack overflow exception.
824       if (function.is_null()) return SetStackOverflow();
825       globals_->Add(function, zone());
826       break;
827     }
828 
829     case VariableLocation::PARAMETER:
830     case VariableLocation::LOCAL: {
831       Comment cmnt(masm_, "[ Function Declaration");
832       VisitForAccumulatorValue(declaration->fun());
833       __ Str(result_register(), StackOperand(variable));
834       break;
835     }
836 
837     case VariableLocation::CONTEXT: {
838       Comment cmnt(masm_, "[ Function Declaration");
839       EmitDebugCheckDeclarationContext(variable);
840       VisitForAccumulatorValue(declaration->fun());
841       __ Str(result_register(), ContextMemOperand(cp, variable->index()));
842       int offset = Context::SlotOffset(variable->index());
843       // We know that we have written a function, which is not a smi.
844       __ RecordWriteContextSlot(cp,
845                                 offset,
846                                 result_register(),
847                                 x2,
848                                 kLRHasBeenSaved,
849                                 kDontSaveFPRegs,
850                                 EMIT_REMEMBERED_SET,
851                                 OMIT_SMI_CHECK);
852       PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
853       break;
854     }
855 
856     case VariableLocation::LOOKUP: {
857       Comment cmnt(masm_, "[ Function Declaration");
858       __ Mov(x2, Operand(variable->name()));
859       PushOperand(x2);
860       // Push initial value for function declaration.
861       VisitForStackValue(declaration->fun());
862       CallRuntimeWithOperands(Runtime::kDeclareEvalFunction);
863       PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
864       break;
865     }
866   }
867 }
868 
869 
DeclareGlobals(Handle<FixedArray> pairs)870 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
871   // Call the runtime to declare the globals.
872   __ Mov(x11, Operand(pairs));
873   Register flags = xzr;
874   if (Smi::FromInt(DeclareGlobalsFlags())) {
875     flags = x10;
876   __ Mov(flags, Smi::FromInt(DeclareGlobalsFlags()));
877   }
878   __ Push(x11, flags);
879   __ CallRuntime(Runtime::kDeclareGlobals);
880   // Return value is ignored.
881 }
882 
883 
DeclareModules(Handle<FixedArray> descriptions)884 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
885   // Call the runtime to declare the modules.
886   __ Push(descriptions);
887   __ CallRuntime(Runtime::kDeclareModules);
888   // Return value is ignored.
889 }
890 
891 
VisitSwitchStatement(SwitchStatement * stmt)892 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
893   ASM_LOCATION("FullCodeGenerator::VisitSwitchStatement");
894   Comment cmnt(masm_, "[ SwitchStatement");
895   Breakable nested_statement(this, stmt);
896   SetStatementPosition(stmt);
897 
898   // Keep the switch value on the stack until a case matches.
899   VisitForStackValue(stmt->tag());
900   PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS);
901 
902   ZoneList<CaseClause*>* clauses = stmt->cases();
903   CaseClause* default_clause = NULL;  // Can occur anywhere in the list.
904 
905   Label next_test;  // Recycled for each test.
906   // Compile all the tests with branches to their bodies.
907   for (int i = 0; i < clauses->length(); i++) {
908     CaseClause* clause = clauses->at(i);
909     clause->body_target()->Unuse();
910 
911     // The default is not a test, but remember it as final fall through.
912     if (clause->is_default()) {
913       default_clause = clause;
914       continue;
915     }
916 
917     Comment cmnt(masm_, "[ Case comparison");
918     __ Bind(&next_test);
919     next_test.Unuse();
920 
921     // Compile the label expression.
922     VisitForAccumulatorValue(clause->label());
923 
924     // Perform the comparison as if via '==='.
925     __ Peek(x1, 0);   // Switch value.
926 
927     JumpPatchSite patch_site(masm_);
928     if (ShouldInlineSmiCase(Token::EQ_STRICT)) {
929       Label slow_case;
930       patch_site.EmitJumpIfEitherNotSmi(x0, x1, &slow_case);
931       __ Cmp(x1, x0);
932       __ B(ne, &next_test);
933       __ Drop(1);  // Switch value is no longer needed.
934       __ B(clause->body_target());
935       __ Bind(&slow_case);
936     }
937 
938     // Record position before stub call for type feedback.
939     SetExpressionPosition(clause);
940     Handle<Code> ic =
941         CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
942     CallIC(ic, clause->CompareId());
943     patch_site.EmitPatchInfo();
944 
945     Label skip;
946     __ B(&skip);
947     PrepareForBailout(clause, BailoutState::TOS_REGISTER);
948     __ JumpIfNotRoot(x0, Heap::kTrueValueRootIndex, &next_test);
949     __ Drop(1);
950     __ B(clause->body_target());
951     __ Bind(&skip);
952 
953     __ Cbnz(x0, &next_test);
954     __ Drop(1);  // Switch value is no longer needed.
955     __ B(clause->body_target());
956   }
957 
958   // Discard the test value and jump to the default if present, otherwise to
959   // the end of the statement.
960   __ Bind(&next_test);
961   DropOperands(1);  // Switch value is no longer needed.
962   if (default_clause == NULL) {
963     __ B(nested_statement.break_label());
964   } else {
965     __ B(default_clause->body_target());
966   }
967 
968   // Compile all the case bodies.
969   for (int i = 0; i < clauses->length(); i++) {
970     Comment cmnt(masm_, "[ Case body");
971     CaseClause* clause = clauses->at(i);
972     __ Bind(clause->body_target());
973     PrepareForBailoutForId(clause->EntryId(), BailoutState::NO_REGISTERS);
974     VisitStatements(clause->statements());
975   }
976 
977   __ Bind(nested_statement.break_label());
978   PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
979 }
980 
981 
VisitForInStatement(ForInStatement * stmt)982 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
983   ASM_LOCATION("FullCodeGenerator::VisitForInStatement");
984   Comment cmnt(masm_, "[ ForInStatement");
985   SetStatementPosition(stmt, SKIP_BREAK);
986 
987   FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
988 
989   // TODO(all): This visitor probably needs better comments and a revisit.
990 
991   // Get the object to enumerate over.
992   SetExpressionAsStatementPosition(stmt->enumerable());
993   VisitForAccumulatorValue(stmt->enumerable());
994   OperandStackDepthIncrement(5);
995 
996   Label loop, exit;
997   Iteration loop_statement(this, stmt);
998   increment_loop_depth();
999 
1000   // If the object is null or undefined, skip over the loop, otherwise convert
1001   // it to a JS receiver.  See ECMA-262 version 5, section 12.6.4.
1002   Label convert, done_convert;
1003   __ JumpIfSmi(x0, &convert);
1004   __ JumpIfObjectType(x0, x10, x11, FIRST_JS_RECEIVER_TYPE, &done_convert, ge);
1005   __ JumpIfRoot(x0, Heap::kNullValueRootIndex, &exit);
1006   __ JumpIfRoot(x0, Heap::kUndefinedValueRootIndex, &exit);
1007   __ Bind(&convert);
1008   ToObjectStub stub(isolate());
1009   __ CallStub(&stub);
1010   __ Bind(&done_convert);
1011   PrepareForBailoutForId(stmt->ToObjectId(), BailoutState::TOS_REGISTER);
1012   __ Push(x0);
1013 
1014   // Check cache validity in generated code. If we cannot guarantee cache
1015   // validity, call the runtime system to check cache validity or get the
1016   // property names in a fixed array. Note: Proxies never have an enum cache,
1017   // so will always take the slow path.
1018   Label call_runtime;
1019   __ CheckEnumCache(x0, x15, x10, x11, x12, x13, &call_runtime);
1020 
1021   // The enum cache is valid.  Load the map of the object being
1022   // iterated over and use the cache for the iteration.
1023   Label use_cache;
1024   __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
1025   __ B(&use_cache);
1026 
1027   // Get the set of properties to enumerate.
1028   __ Bind(&call_runtime);
1029   __ Push(x0);  // Duplicate the enumerable object on the stack.
1030   __ CallRuntime(Runtime::kForInEnumerate);
1031   PrepareForBailoutForId(stmt->EnumId(), BailoutState::TOS_REGISTER);
1032 
1033   // If we got a map from the runtime call, we can do a fast
1034   // modification check. Otherwise, we got a fixed array, and we have
1035   // to do a slow check.
1036   Label fixed_array, no_descriptors;
1037   __ Ldr(x2, FieldMemOperand(x0, HeapObject::kMapOffset));
1038   __ JumpIfNotRoot(x2, Heap::kMetaMapRootIndex, &fixed_array);
1039 
1040   // We got a map in register x0. Get the enumeration cache from it.
1041   __ Bind(&use_cache);
1042 
1043   __ EnumLengthUntagged(x1, x0);
1044   __ Cbz(x1, &no_descriptors);
1045 
1046   __ LoadInstanceDescriptors(x0, x2);
1047   __ Ldr(x2, FieldMemOperand(x2, DescriptorArray::kEnumCacheOffset));
1048   __ Ldr(x2,
1049          FieldMemOperand(x2, DescriptorArray::kEnumCacheBridgeCacheOffset));
1050 
1051   // Set up the four remaining stack slots.
1052   __ SmiTag(x1);
1053   // Map, enumeration cache, enum cache length, zero (both last as smis).
1054   __ Push(x0, x2, x1, xzr);
1055   __ B(&loop);
1056 
1057   __ Bind(&no_descriptors);
1058   __ Drop(1);
1059   __ B(&exit);
1060 
1061   // We got a fixed array in register x0. Iterate through that.
1062   __ Bind(&fixed_array);
1063 
1064   __ Mov(x1, Smi::FromInt(1));  // Smi(1) indicates slow check.
1065   __ Ldr(x2, FieldMemOperand(x0, FixedArray::kLengthOffset));
1066   __ Push(x1, x0, x2);  // Smi and array, fixed array length (as smi).
1067   PrepareForBailoutForId(stmt->PrepareId(), BailoutState::NO_REGISTERS);
1068   __ Push(xzr);  // Initial index.
1069 
1070   // Generate code for doing the condition check.
1071   __ Bind(&loop);
1072   SetExpressionAsStatementPosition(stmt->each());
1073 
1074   // Load the current count to x0, load the length to x1.
1075   __ PeekPair(x0, x1, 0);
1076   __ Cmp(x0, x1);  // Compare to the array length.
1077   __ B(hs, loop_statement.break_label());
1078 
1079   // Get the current entry of the array into register r3.
1080   __ Peek(x10, 2 * kXRegSize);
1081   __ Add(x10, x10, Operand::UntagSmiAndScale(x0, kPointerSizeLog2));
1082   __ Ldr(x3, MemOperand(x10, FixedArray::kHeaderSize - kHeapObjectTag));
1083 
1084   // Get the expected map from the stack or a smi in the
1085   // permanent slow case into register x10.
1086   __ Peek(x2, 3 * kXRegSize);
1087 
1088   // Check if the expected map still matches that of the enumerable.
1089   // If not, we may have to filter the key.
1090   Label update_each;
1091   __ Peek(x1, 4 * kXRegSize);
1092   __ Ldr(x11, FieldMemOperand(x1, HeapObject::kMapOffset));
1093   __ Cmp(x11, x2);
1094   __ B(eq, &update_each);
1095 
1096   // We need to filter the key, record slow-path here.
1097   int const vector_index = SmiFromSlot(slot)->value();
1098   __ EmitLoadTypeFeedbackVector(x0);
1099   __ Mov(x10, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1100   __ Str(x10, FieldMemOperand(x0, FixedArray::OffsetOfElementAt(vector_index)));
1101 
1102   // Convert the entry to a string or (smi) 0 if it isn't a property
1103   // any more. If the property has been removed while iterating, we
1104   // just skip it.
1105   __ Push(x1, x3);
1106   __ CallRuntime(Runtime::kForInFilter);
1107   PrepareForBailoutForId(stmt->FilterId(), BailoutState::TOS_REGISTER);
1108   __ Mov(x3, x0);
1109   __ JumpIfRoot(x0, Heap::kUndefinedValueRootIndex,
1110                 loop_statement.continue_label());
1111 
1112   // Update the 'each' property or variable from the possibly filtered
1113   // entry in register x3.
1114   __ Bind(&update_each);
1115   __ Mov(result_register(), x3);
1116   // Perform the assignment as if via '='.
1117   { EffectContext context(this);
1118     EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
1119     PrepareForBailoutForId(stmt->AssignmentId(), BailoutState::NO_REGISTERS);
1120   }
1121 
1122   // Both Crankshaft and Turbofan expect BodyId to be right before stmt->body().
1123   PrepareForBailoutForId(stmt->BodyId(), BailoutState::NO_REGISTERS);
1124   // Generate code for the body of the loop.
1125   Visit(stmt->body());
1126 
1127   // Generate code for going to the next element by incrementing
1128   // the index (smi) stored on top of the stack.
1129   __ Bind(loop_statement.continue_label());
1130   // TODO(all): We could use a callee saved register to avoid popping.
1131   __ Pop(x0);
1132   __ Add(x0, x0, Smi::FromInt(1));
1133   __ Push(x0);
1134 
1135   EmitBackEdgeBookkeeping(stmt, &loop);
1136   __ B(&loop);
1137 
1138   // Remove the pointers stored on the stack.
1139   __ Bind(loop_statement.break_label());
1140   DropOperands(5);
1141 
1142   // Exit and decrement the loop depth.
1143   PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
1144   __ Bind(&exit);
1145   decrement_loop_depth();
1146 }
1147 
1148 
EmitSetHomeObject(Expression * initializer,int offset,FeedbackVectorSlot slot)1149 void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset,
1150                                           FeedbackVectorSlot slot) {
1151   DCHECK(NeedsHomeObject(initializer));
1152   __ Peek(StoreDescriptor::ReceiverRegister(), 0);
1153   __ Mov(StoreDescriptor::NameRegister(),
1154          Operand(isolate()->factory()->home_object_symbol()));
1155   __ Peek(StoreDescriptor::ValueRegister(), offset * kPointerSize);
1156   EmitLoadStoreICSlot(slot);
1157   CallStoreIC();
1158 }
1159 
1160 
EmitSetHomeObjectAccumulator(Expression * initializer,int offset,FeedbackVectorSlot slot)1161 void FullCodeGenerator::EmitSetHomeObjectAccumulator(Expression* initializer,
1162                                                      int offset,
1163                                                      FeedbackVectorSlot slot) {
1164   DCHECK(NeedsHomeObject(initializer));
1165   __ Move(StoreDescriptor::ReceiverRegister(), x0);
1166   __ Mov(StoreDescriptor::NameRegister(),
1167          Operand(isolate()->factory()->home_object_symbol()));
1168   __ Peek(StoreDescriptor::ValueRegister(), offset * kPointerSize);
1169   EmitLoadStoreICSlot(slot);
1170   CallStoreIC();
1171 }
1172 
1173 
EmitLoadGlobalCheckExtensions(VariableProxy * proxy,TypeofMode typeof_mode,Label * slow)1174 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1175                                                       TypeofMode typeof_mode,
1176                                                       Label* slow) {
1177   Register current = cp;
1178   Register next = x10;
1179   Register temp = x11;
1180 
1181   Scope* s = scope();
1182   while (s != NULL) {
1183     if (s->num_heap_slots() > 0) {
1184       if (s->calls_sloppy_eval()) {
1185         // Check that extension is "the hole".
1186         __ Ldr(temp, ContextMemOperand(current, Context::EXTENSION_INDEX));
1187         __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1188       }
1189       // Load next context in chain.
1190       __ Ldr(next, ContextMemOperand(current, Context::PREVIOUS_INDEX));
1191       // Walk the rest of the chain without clobbering cp.
1192       current = next;
1193     }
1194     // If no outer scope calls eval, we do not need to check more
1195     // context extensions.
1196     if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1197     s = s->outer_scope();
1198   }
1199 
1200   if (s->is_eval_scope()) {
1201     Label loop, fast;
1202     __ Mov(next, current);
1203 
1204     __ Bind(&loop);
1205     // Terminate at native context.
1206     __ Ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1207     __ JumpIfRoot(temp, Heap::kNativeContextMapRootIndex, &fast);
1208     // Check that extension is "the hole".
1209     __ Ldr(temp, ContextMemOperand(next, Context::EXTENSION_INDEX));
1210     __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1211     // Load next context in chain.
1212     __ Ldr(next, ContextMemOperand(next, Context::PREVIOUS_INDEX));
1213     __ B(&loop);
1214     __ Bind(&fast);
1215   }
1216 
1217   // All extension objects were empty and it is safe to use a normal global
1218   // load machinery.
1219   EmitGlobalVariableLoad(proxy, typeof_mode);
1220 }
1221 
1222 
ContextSlotOperandCheckExtensions(Variable * var,Label * slow)1223 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1224                                                                 Label* slow) {
1225   DCHECK(var->IsContextSlot());
1226   Register context = cp;
1227   Register next = x10;
1228   Register temp = x11;
1229 
1230   for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1231     if (s->num_heap_slots() > 0) {
1232       if (s->calls_sloppy_eval()) {
1233         // Check that extension is "the hole".
1234         __ Ldr(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1235         __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1236       }
1237       __ Ldr(next, ContextMemOperand(context, Context::PREVIOUS_INDEX));
1238       // Walk the rest of the chain without clobbering cp.
1239       context = next;
1240     }
1241   }
1242   // Check that last extension is "the hole".
1243   __ Ldr(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1244   __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1245 
1246   // This function is used only for loads, not stores, so it's safe to
1247   // return an cp-based operand (the write barrier cannot be allowed to
1248   // destroy the cp register).
1249   return ContextMemOperand(context, var->index());
1250 }
1251 
1252 
EmitDynamicLookupFastCase(VariableProxy * proxy,TypeofMode typeof_mode,Label * slow,Label * done)1253 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1254                                                   TypeofMode typeof_mode,
1255                                                   Label* slow, Label* done) {
1256   // Generate fast-case code for variables that might be shadowed by
1257   // eval-introduced variables.  Eval is used a lot without
1258   // introducing variables.  In those cases, we do not want to
1259   // perform a runtime call for all variables in the scope
1260   // containing the eval.
1261   Variable* var = proxy->var();
1262   if (var->mode() == DYNAMIC_GLOBAL) {
1263     EmitLoadGlobalCheckExtensions(proxy, typeof_mode, slow);
1264     __ B(done);
1265   } else if (var->mode() == DYNAMIC_LOCAL) {
1266     Variable* local = var->local_if_not_shadowed();
1267     __ Ldr(x0, ContextSlotOperandCheckExtensions(local, slow));
1268     if (local->mode() == LET || local->mode() == CONST) {
1269       __ JumpIfNotRoot(x0, Heap::kTheHoleValueRootIndex, done);
1270       __ Mov(x0, Operand(var->name()));
1271       __ Push(x0);
1272       __ CallRuntime(Runtime::kThrowReferenceError);
1273     }
1274     __ B(done);
1275   }
1276 }
1277 
1278 
EmitGlobalVariableLoad(VariableProxy * proxy,TypeofMode typeof_mode)1279 void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
1280                                                TypeofMode typeof_mode) {
1281 #ifdef DEBUG
1282   Variable* var = proxy->var();
1283   DCHECK(var->IsUnallocatedOrGlobalSlot() ||
1284          (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL));
1285 #endif
1286   __ Mov(LoadGlobalDescriptor::SlotRegister(),
1287          SmiFromSlot(proxy->VariableFeedbackSlot()));
1288   CallLoadGlobalIC(typeof_mode);
1289 }
1290 
1291 
EmitVariableLoad(VariableProxy * proxy,TypeofMode typeof_mode)1292 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
1293                                          TypeofMode typeof_mode) {
1294   // Record position before possible IC call.
1295   SetExpressionPosition(proxy);
1296   PrepareForBailoutForId(proxy->BeforeId(), BailoutState::NO_REGISTERS);
1297   Variable* var = proxy->var();
1298 
1299   // Three cases: global variables, lookup variables, and all other types of
1300   // variables.
1301   switch (var->location()) {
1302     case VariableLocation::GLOBAL:
1303     case VariableLocation::UNALLOCATED: {
1304       Comment cmnt(masm_, "Global variable");
1305       EmitGlobalVariableLoad(proxy, typeof_mode);
1306       context()->Plug(x0);
1307       break;
1308     }
1309 
1310     case VariableLocation::PARAMETER:
1311     case VariableLocation::LOCAL:
1312     case VariableLocation::CONTEXT: {
1313       DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode);
1314       Comment cmnt(masm_, var->IsContextSlot()
1315                               ? "Context variable"
1316                               : "Stack variable");
1317       if (NeedsHoleCheckForLoad(proxy)) {
1318         // Let and const need a read barrier.
1319         GetVar(x0, var);
1320         Label done;
1321         __ JumpIfNotRoot(x0, Heap::kTheHoleValueRootIndex, &done);
1322         if (var->mode() == LET || var->mode() == CONST) {
1323           // Throw a reference error when using an uninitialized let/const
1324           // binding in harmony mode.
1325           __ Mov(x0, Operand(var->name()));
1326           __ Push(x0);
1327           __ CallRuntime(Runtime::kThrowReferenceError);
1328           __ Bind(&done);
1329         }
1330         context()->Plug(x0);
1331         break;
1332       }
1333       context()->Plug(var);
1334       break;
1335     }
1336 
1337     case VariableLocation::LOOKUP: {
1338       Label done, slow;
1339       // Generate code for loading from variables potentially shadowed by
1340       // eval-introduced variables.
1341       EmitDynamicLookupFastCase(proxy, typeof_mode, &slow, &done);
1342       __ Bind(&slow);
1343       Comment cmnt(masm_, "Lookup variable");
1344       __ Push(var->name());
1345       Runtime::FunctionId function_id =
1346           typeof_mode == NOT_INSIDE_TYPEOF
1347               ? Runtime::kLoadLookupSlot
1348               : Runtime::kLoadLookupSlotInsideTypeof;
1349       __ CallRuntime(function_id);
1350       __ Bind(&done);
1351       context()->Plug(x0);
1352       break;
1353     }
1354   }
1355 }
1356 
1357 
EmitAccessor(ObjectLiteralProperty * property)1358 void FullCodeGenerator::EmitAccessor(ObjectLiteralProperty* property) {
1359   Expression* expression = (property == NULL) ? NULL : property->value();
1360   if (expression == NULL) {
1361     __ LoadRoot(x10, Heap::kNullValueRootIndex);
1362     PushOperand(x10);
1363   } else {
1364     VisitForStackValue(expression);
1365     if (NeedsHomeObject(expression)) {
1366       DCHECK(property->kind() == ObjectLiteral::Property::GETTER ||
1367              property->kind() == ObjectLiteral::Property::SETTER);
1368       int offset = property->kind() == ObjectLiteral::Property::GETTER ? 2 : 3;
1369       EmitSetHomeObject(expression, offset, property->GetSlot());
1370     }
1371   }
1372 }
1373 
1374 
VisitObjectLiteral(ObjectLiteral * expr)1375 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1376   Comment cmnt(masm_, "[ ObjectLiteral");
1377 
1378   Handle<FixedArray> constant_properties = expr->constant_properties();
1379   __ Ldr(x3, MemOperand(fp,  JavaScriptFrameConstants::kFunctionOffset));
1380   __ Mov(x2, Smi::FromInt(expr->literal_index()));
1381   __ Mov(x1, Operand(constant_properties));
1382   int flags = expr->ComputeFlags();
1383   __ Mov(x0, Smi::FromInt(flags));
1384   if (MustCreateObjectLiteralWithRuntime(expr)) {
1385     __ Push(x3, x2, x1, x0);
1386     __ CallRuntime(Runtime::kCreateObjectLiteral);
1387   } else {
1388     FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1389     __ CallStub(&stub);
1390     RestoreContext();
1391   }
1392   PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
1393 
1394   // If result_saved is true the result is on top of the stack.  If
1395   // result_saved is false the result is in x0.
1396   bool result_saved = false;
1397 
1398   AccessorTable accessor_table(zone());
1399   int property_index = 0;
1400   for (; property_index < expr->properties()->length(); property_index++) {
1401     ObjectLiteral::Property* property = expr->properties()->at(property_index);
1402     if (property->is_computed_name()) break;
1403     if (property->IsCompileTimeValue()) continue;
1404 
1405     Literal* key = property->key()->AsLiteral();
1406     Expression* value = property->value();
1407     if (!result_saved) {
1408       PushOperand(x0);  // Save result on stack
1409       result_saved = true;
1410     }
1411     switch (property->kind()) {
1412       case ObjectLiteral::Property::CONSTANT:
1413         UNREACHABLE();
1414       case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1415         DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1416         // Fall through.
1417       case ObjectLiteral::Property::COMPUTED:
1418         // It is safe to use [[Put]] here because the boilerplate already
1419         // contains computed properties with an uninitialized value.
1420         if (key->value()->IsInternalizedString()) {
1421           if (property->emit_store()) {
1422             VisitForAccumulatorValue(value);
1423             DCHECK(StoreDescriptor::ValueRegister().is(x0));
1424             __ Mov(StoreDescriptor::NameRegister(), Operand(key->value()));
1425             __ Peek(StoreDescriptor::ReceiverRegister(), 0);
1426             EmitLoadStoreICSlot(property->GetSlot(0));
1427             CallStoreIC();
1428             PrepareForBailoutForId(key->id(), BailoutState::NO_REGISTERS);
1429 
1430             if (NeedsHomeObject(value)) {
1431               EmitSetHomeObjectAccumulator(value, 0, property->GetSlot(1));
1432             }
1433           } else {
1434             VisitForEffect(value);
1435           }
1436           break;
1437         }
1438         __ Peek(x0, 0);
1439         PushOperand(x0);
1440         VisitForStackValue(key);
1441         VisitForStackValue(value);
1442         if (property->emit_store()) {
1443           if (NeedsHomeObject(value)) {
1444             EmitSetHomeObject(value, 2, property->GetSlot());
1445           }
1446           __ Mov(x0, Smi::FromInt(SLOPPY));  // Language mode
1447           PushOperand(x0);
1448           CallRuntimeWithOperands(Runtime::kSetProperty);
1449         } else {
1450           DropOperands(3);
1451         }
1452         break;
1453       case ObjectLiteral::Property::PROTOTYPE:
1454         DCHECK(property->emit_store());
1455         // Duplicate receiver on stack.
1456         __ Peek(x0, 0);
1457         PushOperand(x0);
1458         VisitForStackValue(value);
1459         CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
1460         PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1461                                BailoutState::NO_REGISTERS);
1462         break;
1463       case ObjectLiteral::Property::GETTER:
1464         if (property->emit_store()) {
1465           AccessorTable::Iterator it = accessor_table.lookup(key);
1466           it->second->bailout_id = expr->GetIdForPropertySet(property_index);
1467           it->second->getter = property;
1468         }
1469         break;
1470       case ObjectLiteral::Property::SETTER:
1471         if (property->emit_store()) {
1472           AccessorTable::Iterator it = accessor_table.lookup(key);
1473           it->second->bailout_id = expr->GetIdForPropertySet(property_index);
1474           it->second->setter = property;
1475         }
1476         break;
1477     }
1478   }
1479 
1480   // Emit code to define accessors, using only a single call to the runtime for
1481   // each pair of corresponding getters and setters.
1482   for (AccessorTable::Iterator it = accessor_table.begin();
1483        it != accessor_table.end();
1484        ++it) {
1485     __ Peek(x10, 0);  // Duplicate receiver.
1486     PushOperand(x10);
1487     VisitForStackValue(it->first);
1488     EmitAccessor(it->second->getter);
1489     EmitAccessor(it->second->setter);
1490     __ Mov(x10, Smi::FromInt(NONE));
1491     PushOperand(x10);
1492     CallRuntimeWithOperands(Runtime::kDefineAccessorPropertyUnchecked);
1493     PrepareForBailoutForId(it->second->bailout_id, BailoutState::NO_REGISTERS);
1494   }
1495 
1496   // Object literals have two parts. The "static" part on the left contains no
1497   // computed property names, and so we can compute its map ahead of time; see
1498   // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1499   // starts with the first computed property name, and continues with all
1500   // properties to its right.  All the code from above initializes the static
1501   // component of the object literal, and arranges for the map of the result to
1502   // reflect the static order in which the keys appear. For the dynamic
1503   // properties, we compile them into a series of "SetOwnProperty" runtime
1504   // calls. This will preserve insertion order.
1505   for (; property_index < expr->properties()->length(); property_index++) {
1506     ObjectLiteral::Property* property = expr->properties()->at(property_index);
1507 
1508     Expression* value = property->value();
1509     if (!result_saved) {
1510       PushOperand(x0);  // Save result on stack
1511       result_saved = true;
1512     }
1513 
1514     __ Peek(x10, 0);  // Duplicate receiver.
1515     PushOperand(x10);
1516 
1517     if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1518       DCHECK(!property->is_computed_name());
1519       VisitForStackValue(value);
1520       DCHECK(property->emit_store());
1521       CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
1522       PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1523                              BailoutState::NO_REGISTERS);
1524     } else {
1525       EmitPropertyKey(property, expr->GetIdForPropertyName(property_index));
1526       VisitForStackValue(value);
1527       if (NeedsHomeObject(value)) {
1528         EmitSetHomeObject(value, 2, property->GetSlot());
1529       }
1530 
1531       switch (property->kind()) {
1532         case ObjectLiteral::Property::CONSTANT:
1533         case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1534         case ObjectLiteral::Property::COMPUTED:
1535           if (property->emit_store()) {
1536             PushOperand(Smi::FromInt(NONE));
1537             PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
1538             CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
1539             PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1540                                    BailoutState::NO_REGISTERS);
1541           } else {
1542             DropOperands(3);
1543           }
1544           break;
1545 
1546         case ObjectLiteral::Property::PROTOTYPE:
1547           UNREACHABLE();
1548           break;
1549 
1550         case ObjectLiteral::Property::GETTER:
1551           PushOperand(Smi::FromInt(NONE));
1552           CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
1553           break;
1554 
1555         case ObjectLiteral::Property::SETTER:
1556           PushOperand(Smi::FromInt(NONE));
1557           CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
1558           break;
1559       }
1560     }
1561   }
1562 
1563   if (result_saved) {
1564     context()->PlugTOS();
1565   } else {
1566     context()->Plug(x0);
1567   }
1568 }
1569 
1570 
VisitArrayLiteral(ArrayLiteral * expr)1571 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1572   Comment cmnt(masm_, "[ ArrayLiteral");
1573 
1574   Handle<FixedArray> constant_elements = expr->constant_elements();
1575   bool has_fast_elements =
1576       IsFastObjectElementsKind(expr->constant_elements_kind());
1577 
1578   AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1579   if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1580     // If the only customer of allocation sites is transitioning, then
1581     // we can turn it off if we don't have anywhere else to transition to.
1582     allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1583   }
1584 
1585   __ Ldr(x3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1586   __ Mov(x2, Smi::FromInt(expr->literal_index()));
1587   __ Mov(x1, Operand(constant_elements));
1588   if (MustCreateArrayLiteralWithRuntime(expr)) {
1589     __ Mov(x0, Smi::FromInt(expr->ComputeFlags()));
1590     __ Push(x3, x2, x1, x0);
1591     __ CallRuntime(Runtime::kCreateArrayLiteral);
1592   } else {
1593     FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1594     __ CallStub(&stub);
1595   }
1596   PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
1597 
1598   bool result_saved = false;  // Is the result saved to the stack?
1599   ZoneList<Expression*>* subexprs = expr->values();
1600   int length = subexprs->length();
1601 
1602   // Emit code to evaluate all the non-constant subexpressions and to store
1603   // them into the newly cloned array.
1604   int array_index = 0;
1605   for (; array_index < length; array_index++) {
1606     Expression* subexpr = subexprs->at(array_index);
1607     DCHECK(!subexpr->IsSpread());
1608 
1609     // If the subexpression is a literal or a simple materialized literal it
1610     // is already set in the cloned array.
1611     if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1612 
1613     if (!result_saved) {
1614       PushOperand(x0);
1615       result_saved = true;
1616     }
1617     VisitForAccumulatorValue(subexpr);
1618 
1619     __ Mov(StoreDescriptor::NameRegister(), Smi::FromInt(array_index));
1620     __ Peek(StoreDescriptor::ReceiverRegister(), 0);
1621     EmitLoadStoreICSlot(expr->LiteralFeedbackSlot());
1622     Handle<Code> ic =
1623         CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
1624     CallIC(ic);
1625 
1626     PrepareForBailoutForId(expr->GetIdForElement(array_index),
1627                            BailoutState::NO_REGISTERS);
1628   }
1629 
1630   // In case the array literal contains spread expressions it has two parts. The
1631   // first part is  the "static" array which has a literal index is  handled
1632   // above. The second part is the part after the first spread expression
1633   // (inclusive) and these elements gets appended to the array. Note that the
1634   // number elements an iterable produces is unknown ahead of time.
1635   if (array_index < length && result_saved) {
1636     PopOperand(x0);
1637     result_saved = false;
1638   }
1639   for (; array_index < length; array_index++) {
1640     Expression* subexpr = subexprs->at(array_index);
1641 
1642     PushOperand(x0);
1643     DCHECK(!subexpr->IsSpread());
1644     VisitForStackValue(subexpr);
1645     CallRuntimeWithOperands(Runtime::kAppendElement);
1646 
1647     PrepareForBailoutForId(expr->GetIdForElement(array_index),
1648                            BailoutState::NO_REGISTERS);
1649   }
1650 
1651   if (result_saved) {
1652     context()->PlugTOS();
1653   } else {
1654     context()->Plug(x0);
1655   }
1656 }
1657 
1658 
VisitAssignment(Assignment * expr)1659 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1660   DCHECK(expr->target()->IsValidReferenceExpressionOrThis());
1661 
1662   Comment cmnt(masm_, "[ Assignment");
1663 
1664   Property* property = expr->target()->AsProperty();
1665   LhsKind assign_type = Property::GetAssignType(property);
1666 
1667   // Evaluate LHS expression.
1668   switch (assign_type) {
1669     case VARIABLE:
1670       // Nothing to do here.
1671       break;
1672     case NAMED_PROPERTY:
1673       if (expr->is_compound()) {
1674         // We need the receiver both on the stack and in the register.
1675         VisitForStackValue(property->obj());
1676         __ Peek(LoadDescriptor::ReceiverRegister(), 0);
1677       } else {
1678         VisitForStackValue(property->obj());
1679       }
1680       break;
1681     case NAMED_SUPER_PROPERTY:
1682       VisitForStackValue(
1683           property->obj()->AsSuperPropertyReference()->this_var());
1684       VisitForAccumulatorValue(
1685           property->obj()->AsSuperPropertyReference()->home_object());
1686       PushOperand(result_register());
1687       if (expr->is_compound()) {
1688         const Register scratch = x10;
1689         __ Peek(scratch, kPointerSize);
1690         PushOperands(scratch, result_register());
1691       }
1692       break;
1693     case KEYED_SUPER_PROPERTY:
1694       VisitForStackValue(
1695           property->obj()->AsSuperPropertyReference()->this_var());
1696       VisitForStackValue(
1697           property->obj()->AsSuperPropertyReference()->home_object());
1698       VisitForAccumulatorValue(property->key());
1699       PushOperand(result_register());
1700       if (expr->is_compound()) {
1701         const Register scratch1 = x10;
1702         const Register scratch2 = x11;
1703         __ Peek(scratch1, 2 * kPointerSize);
1704         __ Peek(scratch2, kPointerSize);
1705         PushOperands(scratch1, scratch2, result_register());
1706       }
1707       break;
1708     case KEYED_PROPERTY:
1709       if (expr->is_compound()) {
1710         VisitForStackValue(property->obj());
1711         VisitForStackValue(property->key());
1712         __ Peek(LoadDescriptor::ReceiverRegister(), 1 * kPointerSize);
1713         __ Peek(LoadDescriptor::NameRegister(), 0);
1714       } else {
1715         VisitForStackValue(property->obj());
1716         VisitForStackValue(property->key());
1717       }
1718       break;
1719   }
1720 
1721   // For compound assignments we need another deoptimization point after the
1722   // variable/property load.
1723   if (expr->is_compound()) {
1724     { AccumulatorValueContext context(this);
1725       switch (assign_type) {
1726         case VARIABLE:
1727           EmitVariableLoad(expr->target()->AsVariableProxy());
1728           PrepareForBailout(expr->target(), BailoutState::TOS_REGISTER);
1729           break;
1730         case NAMED_PROPERTY:
1731           EmitNamedPropertyLoad(property);
1732           PrepareForBailoutForId(property->LoadId(),
1733                                  BailoutState::TOS_REGISTER);
1734           break;
1735         case NAMED_SUPER_PROPERTY:
1736           EmitNamedSuperPropertyLoad(property);
1737           PrepareForBailoutForId(property->LoadId(),
1738                                  BailoutState::TOS_REGISTER);
1739           break;
1740         case KEYED_SUPER_PROPERTY:
1741           EmitKeyedSuperPropertyLoad(property);
1742           PrepareForBailoutForId(property->LoadId(),
1743                                  BailoutState::TOS_REGISTER);
1744           break;
1745         case KEYED_PROPERTY:
1746           EmitKeyedPropertyLoad(property);
1747           PrepareForBailoutForId(property->LoadId(),
1748                                  BailoutState::TOS_REGISTER);
1749           break;
1750       }
1751     }
1752 
1753     Token::Value op = expr->binary_op();
1754     PushOperand(x0);  // Left operand goes on the stack.
1755     VisitForAccumulatorValue(expr->value());
1756 
1757     AccumulatorValueContext context(this);
1758     if (ShouldInlineSmiCase(op)) {
1759       EmitInlineSmiBinaryOp(expr->binary_operation(),
1760                             op,
1761                             expr->target(),
1762                             expr->value());
1763     } else {
1764       EmitBinaryOp(expr->binary_operation(), op);
1765     }
1766 
1767     // Deoptimization point in case the binary operation may have side effects.
1768     PrepareForBailout(expr->binary_operation(), BailoutState::TOS_REGISTER);
1769   } else {
1770     VisitForAccumulatorValue(expr->value());
1771   }
1772 
1773   SetExpressionPosition(expr);
1774 
1775   // Store the value.
1776   switch (assign_type) {
1777     case VARIABLE:
1778       EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1779                              expr->op(), expr->AssignmentSlot());
1780       PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
1781       context()->Plug(x0);
1782       break;
1783     case NAMED_PROPERTY:
1784       EmitNamedPropertyAssignment(expr);
1785       break;
1786     case NAMED_SUPER_PROPERTY:
1787       EmitNamedSuperPropertyStore(property);
1788       context()->Plug(x0);
1789       break;
1790     case KEYED_SUPER_PROPERTY:
1791       EmitKeyedSuperPropertyStore(property);
1792       context()->Plug(x0);
1793       break;
1794     case KEYED_PROPERTY:
1795       EmitKeyedPropertyAssignment(expr);
1796       break;
1797   }
1798 }
1799 
1800 
EmitInlineSmiBinaryOp(BinaryOperation * expr,Token::Value op,Expression * left_expr,Expression * right_expr)1801 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
1802                                               Token::Value op,
1803                                               Expression* left_expr,
1804                                               Expression* right_expr) {
1805   Label done, both_smis, stub_call;
1806 
1807   // Get the arguments.
1808   Register left = x1;
1809   Register right = x0;
1810   Register result = x0;
1811   PopOperand(left);
1812 
1813   // Perform combined smi check on both operands.
1814   __ Orr(x10, left, right);
1815   JumpPatchSite patch_site(masm_);
1816   patch_site.EmitJumpIfSmi(x10, &both_smis);
1817 
1818   __ Bind(&stub_call);
1819 
1820   Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
1821   {
1822     Assembler::BlockPoolsScope scope(masm_);
1823     CallIC(code, expr->BinaryOperationFeedbackId());
1824     patch_site.EmitPatchInfo();
1825   }
1826   __ B(&done);
1827 
1828   __ Bind(&both_smis);
1829   // Smi case. This code works in the same way as the smi-smi case in the type
1830   // recording binary operation stub, see
1831   // BinaryOpStub::GenerateSmiSmiOperation for comments.
1832   // TODO(all): That doesn't exist any more. Where are the comments?
1833   //
1834   // The set of operations that needs to be supported here is controlled by
1835   // FullCodeGenerator::ShouldInlineSmiCase().
1836   switch (op) {
1837     case Token::SAR:
1838       __ Ubfx(right, right, kSmiShift, 5);
1839       __ Asr(result, left, right);
1840       __ Bic(result, result, kSmiShiftMask);
1841       break;
1842     case Token::SHL:
1843       __ Ubfx(right, right, kSmiShift, 5);
1844       __ Lsl(result, left, right);
1845       break;
1846     case Token::SHR:
1847       // If `left >>> right` >= 0x80000000, the result is not representable in a
1848       // signed 32-bit smi.
1849       __ Ubfx(right, right, kSmiShift, 5);
1850       __ Lsr(x10, left, right);
1851       __ Tbnz(x10, kXSignBit, &stub_call);
1852       __ Bic(result, x10, kSmiShiftMask);
1853       break;
1854     case Token::ADD:
1855       __ Adds(x10, left, right);
1856       __ B(vs, &stub_call);
1857       __ Mov(result, x10);
1858       break;
1859     case Token::SUB:
1860       __ Subs(x10, left, right);
1861       __ B(vs, &stub_call);
1862       __ Mov(result, x10);
1863       break;
1864     case Token::MUL: {
1865       Label not_minus_zero, done;
1866       STATIC_ASSERT(static_cast<unsigned>(kSmiShift) == (kXRegSizeInBits / 2));
1867       STATIC_ASSERT(kSmiTag == 0);
1868       __ Smulh(x10, left, right);
1869       __ Cbnz(x10, &not_minus_zero);
1870       __ Eor(x11, left, right);
1871       __ Tbnz(x11, kXSignBit, &stub_call);
1872       __ Mov(result, x10);
1873       __ B(&done);
1874       __ Bind(&not_minus_zero);
1875       __ Cls(x11, x10);
1876       __ Cmp(x11, kXRegSizeInBits - kSmiShift);
1877       __ B(lt, &stub_call);
1878       __ SmiTag(result, x10);
1879       __ Bind(&done);
1880       break;
1881     }
1882     case Token::BIT_OR:
1883       __ Orr(result, left, right);
1884       break;
1885     case Token::BIT_AND:
1886       __ And(result, left, right);
1887       break;
1888     case Token::BIT_XOR:
1889       __ Eor(result, left, right);
1890       break;
1891     default:
1892       UNREACHABLE();
1893   }
1894 
1895   __ Bind(&done);
1896   context()->Plug(x0);
1897 }
1898 
1899 
EmitBinaryOp(BinaryOperation * expr,Token::Value op)1900 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
1901   PopOperand(x1);
1902   Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
1903   JumpPatchSite patch_site(masm_);    // Unbound, signals no inlined smi code.
1904   {
1905     Assembler::BlockPoolsScope scope(masm_);
1906     CallIC(code, expr->BinaryOperationFeedbackId());
1907     patch_site.EmitPatchInfo();
1908   }
1909   context()->Plug(x0);
1910 }
1911 
1912 
EmitClassDefineProperties(ClassLiteral * lit)1913 void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) {
1914   for (int i = 0; i < lit->properties()->length(); i++) {
1915     ObjectLiteral::Property* property = lit->properties()->at(i);
1916     Expression* value = property->value();
1917 
1918     Register scratch = x1;
1919     if (property->is_static()) {
1920       __ Peek(scratch, kPointerSize);  // constructor
1921     } else {
1922       __ Peek(scratch, 0);  // prototype
1923     }
1924     PushOperand(scratch);
1925     EmitPropertyKey(property, lit->GetIdForProperty(i));
1926 
1927     // The static prototype property is read only. We handle the non computed
1928     // property name case in the parser. Since this is the only case where we
1929     // need to check for an own read only property we special case this so we do
1930     // not need to do this for every property.
1931     if (property->is_static() && property->is_computed_name()) {
1932       __ CallRuntime(Runtime::kThrowIfStaticPrototype);
1933       __ Push(x0);
1934     }
1935 
1936     VisitForStackValue(value);
1937     if (NeedsHomeObject(value)) {
1938       EmitSetHomeObject(value, 2, property->GetSlot());
1939     }
1940 
1941     switch (property->kind()) {
1942       case ObjectLiteral::Property::CONSTANT:
1943       case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1944       case ObjectLiteral::Property::PROTOTYPE:
1945         UNREACHABLE();
1946       case ObjectLiteral::Property::COMPUTED:
1947         PushOperand(Smi::FromInt(DONT_ENUM));
1948         PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
1949         CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
1950         break;
1951 
1952       case ObjectLiteral::Property::GETTER:
1953         PushOperand(Smi::FromInt(DONT_ENUM));
1954         CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
1955         break;
1956 
1957       case ObjectLiteral::Property::SETTER:
1958         PushOperand(Smi::FromInt(DONT_ENUM));
1959         CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
1960         break;
1961 
1962       default:
1963         UNREACHABLE();
1964     }
1965   }
1966 }
1967 
1968 
EmitAssignment(Expression * expr,FeedbackVectorSlot slot)1969 void FullCodeGenerator::EmitAssignment(Expression* expr,
1970                                        FeedbackVectorSlot slot) {
1971   DCHECK(expr->IsValidReferenceExpressionOrThis());
1972 
1973   Property* prop = expr->AsProperty();
1974   LhsKind assign_type = Property::GetAssignType(prop);
1975 
1976   switch (assign_type) {
1977     case VARIABLE: {
1978       Variable* var = expr->AsVariableProxy()->var();
1979       EffectContext context(this);
1980       EmitVariableAssignment(var, Token::ASSIGN, slot);
1981       break;
1982     }
1983     case NAMED_PROPERTY: {
1984       PushOperand(x0);  // Preserve value.
1985       VisitForAccumulatorValue(prop->obj());
1986       // TODO(all): We could introduce a VisitForRegValue(reg, expr) to avoid
1987       // this copy.
1988       __ Mov(StoreDescriptor::ReceiverRegister(), x0);
1989       PopOperand(StoreDescriptor::ValueRegister());  // Restore value.
1990       __ Mov(StoreDescriptor::NameRegister(),
1991              Operand(prop->key()->AsLiteral()->value()));
1992       EmitLoadStoreICSlot(slot);
1993       CallStoreIC();
1994       break;
1995     }
1996     case NAMED_SUPER_PROPERTY: {
1997       PushOperand(x0);
1998       VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
1999       VisitForAccumulatorValue(
2000           prop->obj()->AsSuperPropertyReference()->home_object());
2001       // stack: value, this; x0: home_object
2002       Register scratch = x10;
2003       Register scratch2 = x11;
2004       __ mov(scratch, result_register());  // home_object
2005       __ Peek(x0, kPointerSize);           // value
2006       __ Peek(scratch2, 0);                // this
2007       __ Poke(scratch2, kPointerSize);     // this
2008       __ Poke(scratch, 0);                 // home_object
2009       // stack: this, home_object; x0: value
2010       EmitNamedSuperPropertyStore(prop);
2011       break;
2012     }
2013     case KEYED_SUPER_PROPERTY: {
2014       PushOperand(x0);
2015       VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2016       VisitForStackValue(
2017           prop->obj()->AsSuperPropertyReference()->home_object());
2018       VisitForAccumulatorValue(prop->key());
2019       Register scratch = x10;
2020       Register scratch2 = x11;
2021       __ Peek(scratch2, 2 * kPointerSize);  // value
2022       // stack: value, this, home_object; x0: key, x11: value
2023       __ Peek(scratch, kPointerSize);  // this
2024       __ Poke(scratch, 2 * kPointerSize);
2025       __ Peek(scratch, 0);  // home_object
2026       __ Poke(scratch, kPointerSize);
2027       __ Poke(x0, 0);
2028       __ Move(x0, scratch2);
2029       // stack: this, home_object, key; x0: value.
2030       EmitKeyedSuperPropertyStore(prop);
2031       break;
2032     }
2033     case KEYED_PROPERTY: {
2034       PushOperand(x0);  // Preserve value.
2035       VisitForStackValue(prop->obj());
2036       VisitForAccumulatorValue(prop->key());
2037       __ Mov(StoreDescriptor::NameRegister(), x0);
2038       PopOperands(StoreDescriptor::ReceiverRegister(),
2039                   StoreDescriptor::ValueRegister());
2040       EmitLoadStoreICSlot(slot);
2041       Handle<Code> ic =
2042           CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2043       CallIC(ic);
2044       break;
2045     }
2046   }
2047   context()->Plug(x0);
2048 }
2049 
2050 
EmitStoreToStackLocalOrContextSlot(Variable * var,MemOperand location)2051 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2052     Variable* var, MemOperand location) {
2053   __ Str(result_register(), location);
2054   if (var->IsContextSlot()) {
2055     // RecordWrite may destroy all its register arguments.
2056     __ Mov(x10, result_register());
2057     int offset = Context::SlotOffset(var->index());
2058     __ RecordWriteContextSlot(
2059         x1, offset, x10, x11, kLRHasBeenSaved, kDontSaveFPRegs);
2060   }
2061 }
2062 
2063 
EmitVariableAssignment(Variable * var,Token::Value op,FeedbackVectorSlot slot)2064 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
2065                                                FeedbackVectorSlot slot) {
2066   ASM_LOCATION("FullCodeGenerator::EmitVariableAssignment");
2067   if (var->IsUnallocated()) {
2068     // Global var, const, or let.
2069     __ Mov(StoreDescriptor::NameRegister(), Operand(var->name()));
2070     __ LoadGlobalObject(StoreDescriptor::ReceiverRegister());
2071     EmitLoadStoreICSlot(slot);
2072     CallStoreIC();
2073 
2074   } else if (var->mode() == LET && op != Token::INIT) {
2075     // Non-initializing assignment to let variable needs a write barrier.
2076     DCHECK(!var->IsLookupSlot());
2077     DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2078     Label assign;
2079     MemOperand location = VarOperand(var, x1);
2080     __ Ldr(x10, location);
2081     __ JumpIfNotRoot(x10, Heap::kTheHoleValueRootIndex, &assign);
2082     __ Mov(x10, Operand(var->name()));
2083     __ Push(x10);
2084     __ CallRuntime(Runtime::kThrowReferenceError);
2085     // Perform the assignment.
2086     __ Bind(&assign);
2087     EmitStoreToStackLocalOrContextSlot(var, location);
2088 
2089   } else if (var->mode() == CONST && op != Token::INIT) {
2090     // Assignment to const variable needs a write barrier.
2091     DCHECK(!var->IsLookupSlot());
2092     DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2093     Label const_error;
2094     MemOperand location = VarOperand(var, x1);
2095     __ Ldr(x10, location);
2096     __ JumpIfNotRoot(x10, Heap::kTheHoleValueRootIndex, &const_error);
2097     __ Mov(x10, Operand(var->name()));
2098     __ Push(x10);
2099     __ CallRuntime(Runtime::kThrowReferenceError);
2100     __ Bind(&const_error);
2101     __ CallRuntime(Runtime::kThrowConstAssignError);
2102 
2103   } else if (var->is_this() && var->mode() == CONST && op == Token::INIT) {
2104     // Initializing assignment to const {this} needs a write barrier.
2105     DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2106     Label uninitialized_this;
2107     MemOperand location = VarOperand(var, x1);
2108     __ Ldr(x10, location);
2109     __ JumpIfRoot(x10, Heap::kTheHoleValueRootIndex, &uninitialized_this);
2110     __ Mov(x0, Operand(var->name()));
2111     __ Push(x0);
2112     __ CallRuntime(Runtime::kThrowReferenceError);
2113     __ bind(&uninitialized_this);
2114     EmitStoreToStackLocalOrContextSlot(var, location);
2115 
2116   } else if (!var->is_const_mode() || op == Token::INIT) {
2117     if (var->IsLookupSlot()) {
2118       // Assignment to var.
2119       __ Push(var->name());
2120       __ Push(x0);
2121       __ CallRuntime(is_strict(language_mode())
2122                          ? Runtime::kStoreLookupSlot_Strict
2123                          : Runtime::kStoreLookupSlot_Sloppy);
2124     } else {
2125       // Assignment to var or initializing assignment to let/const in harmony
2126       // mode.
2127       DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2128       MemOperand location = VarOperand(var, x1);
2129       if (FLAG_debug_code && var->mode() == LET && op == Token::INIT) {
2130         __ Ldr(x10, location);
2131         __ CompareRoot(x10, Heap::kTheHoleValueRootIndex);
2132         __ Check(eq, kLetBindingReInitialization);
2133       }
2134       EmitStoreToStackLocalOrContextSlot(var, location);
2135     }
2136 
2137   } else {
2138     DCHECK(var->mode() == CONST_LEGACY && op != Token::INIT);
2139     if (is_strict(language_mode())) {
2140       __ CallRuntime(Runtime::kThrowConstAssignError);
2141     }
2142     // Silently ignore store in sloppy mode.
2143   }
2144 }
2145 
2146 
EmitNamedPropertyAssignment(Assignment * expr)2147 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2148   ASM_LOCATION("FullCodeGenerator::EmitNamedPropertyAssignment");
2149   // Assignment to a property, using a named store IC.
2150   Property* prop = expr->target()->AsProperty();
2151   DCHECK(prop != NULL);
2152   DCHECK(prop->key()->IsLiteral());
2153 
2154   __ Mov(StoreDescriptor::NameRegister(),
2155          Operand(prop->key()->AsLiteral()->value()));
2156   PopOperand(StoreDescriptor::ReceiverRegister());
2157   EmitLoadStoreICSlot(expr->AssignmentSlot());
2158   CallStoreIC();
2159 
2160   PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
2161   context()->Plug(x0);
2162 }
2163 
2164 
EmitNamedSuperPropertyStore(Property * prop)2165 void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2166   // Assignment to named property of super.
2167   // x0 : value
2168   // stack : receiver ('this'), home_object
2169   DCHECK(prop != NULL);
2170   Literal* key = prop->key()->AsLiteral();
2171   DCHECK(key != NULL);
2172 
2173   PushOperand(key->value());
2174   PushOperand(x0);
2175   CallRuntimeWithOperands(is_strict(language_mode())
2176                               ? Runtime::kStoreToSuper_Strict
2177                               : Runtime::kStoreToSuper_Sloppy);
2178 }
2179 
2180 
EmitKeyedSuperPropertyStore(Property * prop)2181 void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2182   // Assignment to named property of super.
2183   // x0 : value
2184   // stack : receiver ('this'), home_object, key
2185   DCHECK(prop != NULL);
2186 
2187   PushOperand(x0);
2188   CallRuntimeWithOperands(is_strict(language_mode())
2189                               ? Runtime::kStoreKeyedToSuper_Strict
2190                               : Runtime::kStoreKeyedToSuper_Sloppy);
2191 }
2192 
2193 
EmitKeyedPropertyAssignment(Assignment * expr)2194 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2195   ASM_LOCATION("FullCodeGenerator::EmitKeyedPropertyAssignment");
2196   // Assignment to a property, using a keyed store IC.
2197 
2198   // TODO(all): Could we pass this in registers rather than on the stack?
2199   PopOperands(StoreDescriptor::NameRegister(),
2200               StoreDescriptor::ReceiverRegister());
2201   DCHECK(StoreDescriptor::ValueRegister().is(x0));
2202 
2203   Handle<Code> ic =
2204       CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
2205   EmitLoadStoreICSlot(expr->AssignmentSlot());
2206   CallIC(ic);
2207 
2208   PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
2209   context()->Plug(x0);
2210 }
2211 
2212 
CallIC(Handle<Code> code,TypeFeedbackId ast_id)2213 void FullCodeGenerator::CallIC(Handle<Code> code,
2214                                TypeFeedbackId ast_id) {
2215   ic_total_count_++;
2216   // All calls must have a predictable size in full-codegen code to ensure that
2217   // the debugger can patch them correctly.
2218   __ Call(code, RelocInfo::CODE_TARGET, ast_id);
2219 }
2220 
2221 
2222 // Code common for calls using the IC.
EmitCallWithLoadIC(Call * expr)2223 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2224   ASM_LOCATION("FullCodeGenerator::EmitCallWithLoadIC");
2225   Expression* callee = expr->expression();
2226 
2227   // Get the target function.
2228   ConvertReceiverMode convert_mode;
2229   if (callee->IsVariableProxy()) {
2230     { StackValueContext context(this);
2231       EmitVariableLoad(callee->AsVariableProxy());
2232       PrepareForBailout(callee, BailoutState::NO_REGISTERS);
2233     }
2234     // Push undefined as receiver. This is patched in the method prologue if it
2235     // is a sloppy mode method.
2236     {
2237       UseScratchRegisterScope temps(masm_);
2238       Register temp = temps.AcquireX();
2239       __ LoadRoot(temp, Heap::kUndefinedValueRootIndex);
2240       PushOperand(temp);
2241     }
2242     convert_mode = ConvertReceiverMode::kNullOrUndefined;
2243   } else {
2244     // Load the function from the receiver.
2245     DCHECK(callee->IsProperty());
2246     DCHECK(!callee->AsProperty()->IsSuperAccess());
2247     __ Peek(LoadDescriptor::ReceiverRegister(), 0);
2248     EmitNamedPropertyLoad(callee->AsProperty());
2249     PrepareForBailoutForId(callee->AsProperty()->LoadId(),
2250                            BailoutState::TOS_REGISTER);
2251     // Push the target function under the receiver.
2252     PopOperand(x10);
2253     PushOperands(x0, x10);
2254     convert_mode = ConvertReceiverMode::kNotNullOrUndefined;
2255   }
2256 
2257   EmitCall(expr, convert_mode);
2258 }
2259 
2260 
EmitSuperCallWithLoadIC(Call * expr)2261 void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2262   ASM_LOCATION("FullCodeGenerator::EmitSuperCallWithLoadIC");
2263   Expression* callee = expr->expression();
2264   DCHECK(callee->IsProperty());
2265   Property* prop = callee->AsProperty();
2266   DCHECK(prop->IsSuperAccess());
2267   SetExpressionPosition(prop);
2268 
2269   Literal* key = prop->key()->AsLiteral();
2270   DCHECK(!key->value()->IsSmi());
2271 
2272   // Load the function from the receiver.
2273   const Register scratch = x10;
2274   SuperPropertyReference* super_ref =
2275       callee->AsProperty()->obj()->AsSuperPropertyReference();
2276   VisitForStackValue(super_ref->home_object());
2277   VisitForAccumulatorValue(super_ref->this_var());
2278   PushOperand(x0);
2279   __ Peek(scratch, kPointerSize);
2280   PushOperands(x0, scratch);
2281   PushOperand(key->value());
2282 
2283   // Stack here:
2284   //  - home_object
2285   //  - this (receiver)
2286   //  - this (receiver) <-- LoadFromSuper will pop here and below.
2287   //  - home_object
2288   //  - key
2289   CallRuntimeWithOperands(Runtime::kLoadFromSuper);
2290   PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
2291 
2292   // Replace home_object with target function.
2293   __ Poke(x0, kPointerSize);
2294 
2295   // Stack here:
2296   // - target function
2297   // - this (receiver)
2298   EmitCall(expr);
2299 }
2300 
2301 
2302 // Code common for calls using the IC.
EmitKeyedCallWithLoadIC(Call * expr,Expression * key)2303 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2304                                                 Expression* key) {
2305   ASM_LOCATION("FullCodeGenerator::EmitKeyedCallWithLoadIC");
2306   // Load the key.
2307   VisitForAccumulatorValue(key);
2308 
2309   Expression* callee = expr->expression();
2310 
2311   // Load the function from the receiver.
2312   DCHECK(callee->IsProperty());
2313   __ Peek(LoadDescriptor::ReceiverRegister(), 0);
2314   __ Move(LoadDescriptor::NameRegister(), x0);
2315   EmitKeyedPropertyLoad(callee->AsProperty());
2316   PrepareForBailoutForId(callee->AsProperty()->LoadId(),
2317                          BailoutState::TOS_REGISTER);
2318 
2319   // Push the target function under the receiver.
2320   PopOperand(x10);
2321   PushOperands(x0, x10);
2322 
2323   EmitCall(expr, ConvertReceiverMode::kNotNullOrUndefined);
2324 }
2325 
2326 
EmitKeyedSuperCallWithLoadIC(Call * expr)2327 void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
2328   ASM_LOCATION("FullCodeGenerator::EmitKeyedSuperCallWithLoadIC");
2329   Expression* callee = expr->expression();
2330   DCHECK(callee->IsProperty());
2331   Property* prop = callee->AsProperty();
2332   DCHECK(prop->IsSuperAccess());
2333   SetExpressionPosition(prop);
2334 
2335   // Load the function from the receiver.
2336   const Register scratch = x10;
2337   SuperPropertyReference* super_ref =
2338       callee->AsProperty()->obj()->AsSuperPropertyReference();
2339   VisitForStackValue(super_ref->home_object());
2340   VisitForAccumulatorValue(super_ref->this_var());
2341   PushOperand(x0);
2342   __ Peek(scratch, kPointerSize);
2343   PushOperands(x0, scratch);
2344   VisitForStackValue(prop->key());
2345 
2346   // Stack here:
2347   //  - home_object
2348   //  - this (receiver)
2349   //  - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
2350   //  - home_object
2351   //  - key
2352   CallRuntimeWithOperands(Runtime::kLoadKeyedFromSuper);
2353   PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
2354 
2355   // Replace home_object with target function.
2356   __ Poke(x0, kPointerSize);
2357 
2358   // Stack here:
2359   // - target function
2360   // - this (receiver)
2361   EmitCall(expr);
2362 }
2363 
2364 
EmitCall(Call * expr,ConvertReceiverMode mode)2365 void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) {
2366   ASM_LOCATION("FullCodeGenerator::EmitCall");
2367   // Load the arguments.
2368   ZoneList<Expression*>* args = expr->arguments();
2369   int arg_count = args->length();
2370   for (int i = 0; i < arg_count; i++) {
2371     VisitForStackValue(args->at(i));
2372   }
2373 
2374   PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
2375   SetCallPosition(expr, expr->tail_call_mode());
2376   if (expr->tail_call_mode() == TailCallMode::kAllow) {
2377     if (FLAG_trace) {
2378       __ CallRuntime(Runtime::kTraceTailCall);
2379     }
2380     // Update profiling counters before the tail call since we will
2381     // not return to this function.
2382     EmitProfilingCounterHandlingForReturnSequence(true);
2383   }
2384   Handle<Code> ic =
2385       CodeFactory::CallIC(isolate(), arg_count, mode, expr->tail_call_mode())
2386           .code();
2387   __ Mov(x3, SmiFromSlot(expr->CallFeedbackICSlot()));
2388   __ Peek(x1, (arg_count + 1) * kXRegSize);
2389   // Don't assign a type feedback id to the IC, since type feedback is provided
2390   // by the vector above.
2391   CallIC(ic);
2392   OperandStackDepthDecrement(arg_count + 1);
2393 
2394   RecordJSReturnSite(expr);
2395   RestoreContext();
2396   context()->DropAndPlug(1, x0);
2397 }
2398 
EmitResolvePossiblyDirectEval(Call * expr)2399 void FullCodeGenerator::EmitResolvePossiblyDirectEval(Call* expr) {
2400   int arg_count = expr->arguments()->length();
2401   ASM_LOCATION("FullCodeGenerator::EmitResolvePossiblyDirectEval");
2402   // Prepare to push a copy of the first argument or undefined if it doesn't
2403   // exist.
2404   if (arg_count > 0) {
2405     __ Peek(x9, arg_count * kXRegSize);
2406   } else {
2407     __ LoadRoot(x9, Heap::kUndefinedValueRootIndex);
2408   }
2409 
2410   __ Ldr(x10, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
2411 
2412   // Prepare to push the language mode.
2413   __ Mov(x11, Smi::FromInt(language_mode()));
2414   // Prepare to push the start position of the scope the calls resides in.
2415   __ Mov(x12, Smi::FromInt(scope()->start_position()));
2416   // Prepare to push the source position of the eval call.
2417   __ Mov(x13, Smi::FromInt(expr->position()));
2418 
2419   // Push.
2420   __ Push(x9, x10, x11, x12, x13);
2421 
2422   // Do the runtime call.
2423   __ CallRuntime(Runtime::kResolvePossiblyDirectEval);
2424 }
2425 
2426 
2427 // See http://www.ecma-international.org/ecma-262/6.0/#sec-function-calls.
PushCalleeAndWithBaseObject(Call * expr)2428 void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) {
2429   VariableProxy* callee = expr->expression()->AsVariableProxy();
2430   if (callee->var()->IsLookupSlot()) {
2431     Label slow, done;
2432     SetExpressionPosition(callee);
2433       // Generate code for loading from variables potentially shadowed
2434       // by eval-introduced variables.
2435       EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done);
2436 
2437     __ Bind(&slow);
2438     // Call the runtime to find the function to call (returned in x0)
2439     // and the object holding it (returned in x1).
2440     __ Push(callee->name());
2441     __ CallRuntime(Runtime::kLoadLookupSlotForCall);
2442     PushOperands(x0, x1);  // Receiver, function.
2443     PrepareForBailoutForId(expr->LookupId(), BailoutState::NO_REGISTERS);
2444 
2445     // If fast case code has been generated, emit code to push the
2446     // function and receiver and have the slow path jump around this
2447     // code.
2448     if (done.is_linked()) {
2449       Label call;
2450       __ B(&call);
2451       __ Bind(&done);
2452       // Push function.
2453       // The receiver is implicitly the global receiver. Indicate this
2454       // by passing the undefined to the call function stub.
2455       __ LoadRoot(x1, Heap::kUndefinedValueRootIndex);
2456       __ Push(x0, x1);
2457       __ Bind(&call);
2458     }
2459   } else {
2460     VisitForStackValue(callee);
2461     // refEnv.WithBaseObject()
2462     __ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
2463     PushOperand(x10);  // Reserved receiver slot.
2464   }
2465 }
2466 
2467 
EmitPossiblyEvalCall(Call * expr)2468 void FullCodeGenerator::EmitPossiblyEvalCall(Call* expr) {
2469   ASM_LOCATION("FullCodeGenerator::EmitPossiblyEvalCall");
2470   // In a call to eval, we first call Runtime_ResolvePossiblyDirectEval
2471   // to resolve the function we need to call.  Then we call the resolved
2472   // function using the given arguments.
2473   ZoneList<Expression*>* args = expr->arguments();
2474   int arg_count = args->length();
2475 
2476   PushCalleeAndWithBaseObject(expr);
2477 
2478   // Push the arguments.
2479   for (int i = 0; i < arg_count; i++) {
2480     VisitForStackValue(args->at(i));
2481   }
2482 
2483   // Push a copy of the function (found below the arguments) and
2484   // resolve eval.
2485   __ Peek(x10, (arg_count + 1) * kPointerSize);
2486   __ Push(x10);
2487   EmitResolvePossiblyDirectEval(expr);
2488 
2489   // Touch up the stack with the resolved function.
2490   __ Poke(x0, (arg_count + 1) * kPointerSize);
2491 
2492   PrepareForBailoutForId(expr->EvalId(), BailoutState::NO_REGISTERS);
2493 
2494   // Record source position for debugger.
2495   SetCallPosition(expr);
2496 
2497   // Call the evaluated function.
2498   __ Peek(x1, (arg_count + 1) * kXRegSize);
2499   __ Mov(x0, arg_count);
2500   __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kAny,
2501                                       expr->tail_call_mode()),
2502           RelocInfo::CODE_TARGET);
2503   OperandStackDepthDecrement(arg_count + 1);
2504   RecordJSReturnSite(expr);
2505   RestoreContext();
2506   context()->DropAndPlug(1, x0);
2507 }
2508 
2509 
VisitCallNew(CallNew * expr)2510 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2511   Comment cmnt(masm_, "[ CallNew");
2512   // According to ECMA-262, section 11.2.2, page 44, the function
2513   // expression in new calls must be evaluated before the
2514   // arguments.
2515 
2516   // Push constructor on the stack.  If it's not a function it's used as
2517   // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2518   // ignored.
2519   DCHECK(!expr->expression()->IsSuperPropertyReference());
2520   VisitForStackValue(expr->expression());
2521 
2522   // Push the arguments ("left-to-right") on the stack.
2523   ZoneList<Expression*>* args = expr->arguments();
2524   int arg_count = args->length();
2525   for (int i = 0; i < arg_count; i++) {
2526     VisitForStackValue(args->at(i));
2527   }
2528 
2529   // Call the construct call builtin that handles allocation and
2530   // constructor invocation.
2531   SetConstructCallPosition(expr);
2532 
2533   // Load function and argument count into x1 and x0.
2534   __ Mov(x0, arg_count);
2535   __ Peek(x1, arg_count * kXRegSize);
2536 
2537   // Record call targets in unoptimized code.
2538   __ EmitLoadTypeFeedbackVector(x2);
2539   __ Mov(x3, SmiFromSlot(expr->CallNewFeedbackSlot()));
2540 
2541   CallConstructStub stub(isolate());
2542   __ Call(stub.GetCode(), RelocInfo::CODE_TARGET);
2543   OperandStackDepthDecrement(arg_count + 1);
2544   PrepareForBailoutForId(expr->ReturnId(), BailoutState::TOS_REGISTER);
2545   RestoreContext();
2546   context()->Plug(x0);
2547 }
2548 
2549 
EmitSuperConstructorCall(Call * expr)2550 void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
2551   ASM_LOCATION("FullCodeGenerator::EmitSuperConstructorCall");
2552   SuperCallReference* super_call_ref =
2553       expr->expression()->AsSuperCallReference();
2554   DCHECK_NOT_NULL(super_call_ref);
2555 
2556   // Push the super constructor target on the stack (may be null,
2557   // but the Construct builtin can deal with that properly).
2558   VisitForAccumulatorValue(super_call_ref->this_function_var());
2559   __ AssertFunction(result_register());
2560   __ Ldr(result_register(),
2561          FieldMemOperand(result_register(), HeapObject::kMapOffset));
2562   __ Ldr(result_register(),
2563          FieldMemOperand(result_register(), Map::kPrototypeOffset));
2564   PushOperand(result_register());
2565 
2566   // Push the arguments ("left-to-right") on the stack.
2567   ZoneList<Expression*>* args = expr->arguments();
2568   int arg_count = args->length();
2569   for (int i = 0; i < arg_count; i++) {
2570     VisitForStackValue(args->at(i));
2571   }
2572 
2573   // Call the construct call builtin that handles allocation and
2574   // constructor invocation.
2575   SetConstructCallPosition(expr);
2576 
2577   // Load new target into x3.
2578   VisitForAccumulatorValue(super_call_ref->new_target_var());
2579   __ Mov(x3, result_register());
2580 
2581   // Load function and argument count into x1 and x0.
2582   __ Mov(x0, arg_count);
2583   __ Peek(x1, arg_count * kXRegSize);
2584 
2585   __ Call(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
2586   OperandStackDepthDecrement(arg_count + 1);
2587 
2588   RecordJSReturnSite(expr);
2589   RestoreContext();
2590   context()->Plug(x0);
2591 }
2592 
2593 
EmitIsSmi(CallRuntime * expr)2594 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2595   ZoneList<Expression*>* args = expr->arguments();
2596   DCHECK(args->length() == 1);
2597 
2598   VisitForAccumulatorValue(args->at(0));
2599 
2600   Label materialize_true, materialize_false;
2601   Label* if_true = NULL;
2602   Label* if_false = NULL;
2603   Label* fall_through = NULL;
2604   context()->PrepareTest(&materialize_true, &materialize_false,
2605                          &if_true, &if_false, &fall_through);
2606 
2607   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2608   __ TestAndSplit(x0, kSmiTagMask, if_true, if_false, fall_through);
2609 
2610   context()->Plug(if_true, if_false);
2611 }
2612 
2613 
EmitIsJSReceiver(CallRuntime * expr)2614 void FullCodeGenerator::EmitIsJSReceiver(CallRuntime* expr) {
2615   ZoneList<Expression*>* args = expr->arguments();
2616   DCHECK(args->length() == 1);
2617 
2618   VisitForAccumulatorValue(args->at(0));
2619 
2620   Label materialize_true, materialize_false;
2621   Label* if_true = NULL;
2622   Label* if_false = NULL;
2623   Label* fall_through = NULL;
2624   context()->PrepareTest(&materialize_true, &materialize_false,
2625                          &if_true, &if_false, &fall_through);
2626 
2627   __ JumpIfSmi(x0, if_false);
2628   __ CompareObjectType(x0, x10, x11, FIRST_JS_RECEIVER_TYPE);
2629   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2630   Split(ge, if_true, if_false, fall_through);
2631 
2632   context()->Plug(if_true, if_false);
2633 }
2634 
2635 
EmitIsArray(CallRuntime * expr)2636 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
2637   ZoneList<Expression*>* args = expr->arguments();
2638   DCHECK(args->length() == 1);
2639 
2640   VisitForAccumulatorValue(args->at(0));
2641 
2642   Label materialize_true, materialize_false;
2643   Label* if_true = NULL;
2644   Label* if_false = NULL;
2645   Label* fall_through = NULL;
2646   context()->PrepareTest(&materialize_true, &materialize_false,
2647                          &if_true, &if_false, &fall_through);
2648 
2649   __ JumpIfSmi(x0, if_false);
2650   __ CompareObjectType(x0, x10, x11, JS_ARRAY_TYPE);
2651   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2652   Split(eq, if_true, if_false, fall_through);
2653 
2654   context()->Plug(if_true, if_false);
2655 }
2656 
2657 
EmitIsTypedArray(CallRuntime * expr)2658 void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) {
2659   ZoneList<Expression*>* args = expr->arguments();
2660   DCHECK(args->length() == 1);
2661 
2662   VisitForAccumulatorValue(args->at(0));
2663 
2664   Label materialize_true, materialize_false;
2665   Label* if_true = NULL;
2666   Label* if_false = NULL;
2667   Label* fall_through = NULL;
2668   context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2669                          &if_false, &fall_through);
2670 
2671   __ JumpIfSmi(x0, if_false);
2672   __ CompareObjectType(x0, x10, x11, JS_TYPED_ARRAY_TYPE);
2673   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2674   Split(eq, if_true, if_false, fall_through);
2675 
2676   context()->Plug(if_true, if_false);
2677 }
2678 
2679 
EmitIsRegExp(CallRuntime * expr)2680 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
2681   ZoneList<Expression*>* args = expr->arguments();
2682   DCHECK(args->length() == 1);
2683 
2684   VisitForAccumulatorValue(args->at(0));
2685 
2686   Label materialize_true, materialize_false;
2687   Label* if_true = NULL;
2688   Label* if_false = NULL;
2689   Label* fall_through = NULL;
2690   context()->PrepareTest(&materialize_true, &materialize_false,
2691                          &if_true, &if_false, &fall_through);
2692 
2693   __ JumpIfSmi(x0, if_false);
2694   __ CompareObjectType(x0, x10, x11, JS_REGEXP_TYPE);
2695   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2696   Split(eq, if_true, if_false, fall_through);
2697 
2698   context()->Plug(if_true, if_false);
2699 }
2700 
2701 
EmitIsJSProxy(CallRuntime * expr)2702 void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
2703   ZoneList<Expression*>* args = expr->arguments();
2704   DCHECK(args->length() == 1);
2705 
2706   VisitForAccumulatorValue(args->at(0));
2707 
2708   Label materialize_true, materialize_false;
2709   Label* if_true = NULL;
2710   Label* if_false = NULL;
2711   Label* fall_through = NULL;
2712   context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2713                          &if_false, &fall_through);
2714 
2715   __ JumpIfSmi(x0, if_false);
2716   __ CompareObjectType(x0, x10, x11, JS_PROXY_TYPE);
2717   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2718   Split(eq, if_true, if_false, fall_through);
2719 
2720   context()->Plug(if_true, if_false);
2721 }
2722 
2723 
EmitClassOf(CallRuntime * expr)2724 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
2725   ASM_LOCATION("FullCodeGenerator::EmitClassOf");
2726   ZoneList<Expression*>* args = expr->arguments();
2727   DCHECK(args->length() == 1);
2728   Label done, null, function, non_function_constructor;
2729 
2730   VisitForAccumulatorValue(args->at(0));
2731 
2732   // If the object is not a JSReceiver, we return null.
2733   __ JumpIfSmi(x0, &null);
2734   STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2735   __ CompareObjectType(x0, x10, x11, FIRST_JS_RECEIVER_TYPE);
2736   // x10: object's map.
2737   // x11: object's type.
2738   __ B(lt, &null);
2739 
2740   // Return 'Function' for JSFunction objects.
2741   __ Cmp(x11, FIRST_FUNCTION_TYPE);
2742   STATIC_ASSERT(LAST_FUNCTION_TYPE == LAST_TYPE);
2743   __ B(hs, &function);
2744 
2745   // Check if the constructor in the map is a JS function.
2746   Register instance_type = x14;
2747   __ GetMapConstructor(x12, x10, x13, instance_type);
2748   __ Cmp(instance_type, JS_FUNCTION_TYPE);
2749   __ B(ne, &non_function_constructor);
2750 
2751   // x12 now contains the constructor function. Grab the
2752   // instance class name from there.
2753   __ Ldr(x13, FieldMemOperand(x12, JSFunction::kSharedFunctionInfoOffset));
2754   __ Ldr(x0,
2755          FieldMemOperand(x13, SharedFunctionInfo::kInstanceClassNameOffset));
2756   __ B(&done);
2757 
2758   // Functions have class 'Function'.
2759   __ Bind(&function);
2760   __ LoadRoot(x0, Heap::kFunction_stringRootIndex);
2761   __ B(&done);
2762 
2763   // Objects with a non-function constructor have class 'Object'.
2764   __ Bind(&non_function_constructor);
2765   __ LoadRoot(x0, Heap::kObject_stringRootIndex);
2766   __ B(&done);
2767 
2768   // Non-JS objects have class null.
2769   __ Bind(&null);
2770   __ LoadRoot(x0, Heap::kNullValueRootIndex);
2771 
2772   // All done.
2773   __ Bind(&done);
2774 
2775   context()->Plug(x0);
2776 }
2777 
2778 
EmitValueOf(CallRuntime * expr)2779 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
2780   ASM_LOCATION("FullCodeGenerator::EmitValueOf");
2781   ZoneList<Expression*>* args = expr->arguments();
2782   DCHECK(args->length() == 1);
2783   VisitForAccumulatorValue(args->at(0));  // Load the object.
2784 
2785   Label done;
2786   // If the object is a smi return the object.
2787   __ JumpIfSmi(x0, &done);
2788   // If the object is not a value type, return the object.
2789   __ JumpIfNotObjectType(x0, x10, x11, JS_VALUE_TYPE, &done);
2790   __ Ldr(x0, FieldMemOperand(x0, JSValue::kValueOffset));
2791 
2792   __ Bind(&done);
2793   context()->Plug(x0);
2794 }
2795 
2796 
EmitStringCharFromCode(CallRuntime * expr)2797 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
2798   ZoneList<Expression*>* args = expr->arguments();
2799   DCHECK(args->length() == 1);
2800 
2801   VisitForAccumulatorValue(args->at(0));
2802 
2803   Label done;
2804   Register code = x0;
2805   Register result = x1;
2806 
2807   StringCharFromCodeGenerator generator(code, result);
2808   generator.GenerateFast(masm_);
2809   __ B(&done);
2810 
2811   NopRuntimeCallHelper call_helper;
2812   generator.GenerateSlow(masm_, call_helper);
2813 
2814   __ Bind(&done);
2815   context()->Plug(result);
2816 }
2817 
2818 
EmitStringCharCodeAt(CallRuntime * expr)2819 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
2820   ZoneList<Expression*>* args = expr->arguments();
2821   DCHECK(args->length() == 2);
2822 
2823   VisitForStackValue(args->at(0));
2824   VisitForAccumulatorValue(args->at(1));
2825 
2826   Register object = x1;
2827   Register index = x0;
2828   Register result = x3;
2829 
2830   PopOperand(object);
2831 
2832   Label need_conversion;
2833   Label index_out_of_range;
2834   Label done;
2835   StringCharCodeAtGenerator generator(object, index, result, &need_conversion,
2836                                       &need_conversion, &index_out_of_range);
2837   generator.GenerateFast(masm_);
2838   __ B(&done);
2839 
2840   __ Bind(&index_out_of_range);
2841   // When the index is out of range, the spec requires us to return NaN.
2842   __ LoadRoot(result, Heap::kNanValueRootIndex);
2843   __ B(&done);
2844 
2845   __ Bind(&need_conversion);
2846   // Load the undefined value into the result register, which will
2847   // trigger conversion.
2848   __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
2849   __ B(&done);
2850 
2851   NopRuntimeCallHelper call_helper;
2852   generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
2853 
2854   __ Bind(&done);
2855   context()->Plug(result);
2856 }
2857 
2858 
EmitCall(CallRuntime * expr)2859 void FullCodeGenerator::EmitCall(CallRuntime* expr) {
2860   ASM_LOCATION("FullCodeGenerator::EmitCall");
2861   ZoneList<Expression*>* args = expr->arguments();
2862   DCHECK_LE(2, args->length());
2863   // Push target, receiver and arguments onto the stack.
2864   for (Expression* const arg : *args) {
2865     VisitForStackValue(arg);
2866   }
2867   PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
2868   // Move target to x1.
2869   int const argc = args->length() - 2;
2870   __ Peek(x1, (argc + 1) * kXRegSize);
2871   // Call the target.
2872   __ Mov(x0, argc);
2873   __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
2874   OperandStackDepthDecrement(argc + 1);
2875   RestoreContext();
2876   // Discard the function left on TOS.
2877   context()->DropAndPlug(1, x0);
2878 }
2879 
2880 
EmitHasCachedArrayIndex(CallRuntime * expr)2881 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
2882   ZoneList<Expression*>* args = expr->arguments();
2883   VisitForAccumulatorValue(args->at(0));
2884 
2885   Label materialize_true, materialize_false;
2886   Label* if_true = NULL;
2887   Label* if_false = NULL;
2888   Label* fall_through = NULL;
2889   context()->PrepareTest(&materialize_true, &materialize_false,
2890                          &if_true, &if_false, &fall_through);
2891 
2892   __ Ldr(x10, FieldMemOperand(x0, String::kHashFieldOffset));
2893   __ Tst(x10, String::kContainsCachedArrayIndexMask);
2894   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2895   Split(eq, if_true, if_false, fall_through);
2896 
2897   context()->Plug(if_true, if_false);
2898 }
2899 
2900 
EmitGetCachedArrayIndex(CallRuntime * expr)2901 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
2902   ZoneList<Expression*>* args = expr->arguments();
2903   DCHECK(args->length() == 1);
2904   VisitForAccumulatorValue(args->at(0));
2905 
2906   __ AssertString(x0);
2907 
2908   __ Ldr(x10, FieldMemOperand(x0, String::kHashFieldOffset));
2909   __ IndexFromHash(x10, x0);
2910 
2911   context()->Plug(x0);
2912 }
2913 
2914 
EmitGetSuperConstructor(CallRuntime * expr)2915 void FullCodeGenerator::EmitGetSuperConstructor(CallRuntime* expr) {
2916   ZoneList<Expression*>* args = expr->arguments();
2917   DCHECK_EQ(1, args->length());
2918   VisitForAccumulatorValue(args->at(0));
2919   __ AssertFunction(x0);
2920   __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
2921   __ Ldr(x0, FieldMemOperand(x0, Map::kPrototypeOffset));
2922   context()->Plug(x0);
2923 }
2924 
EmitDebugIsActive(CallRuntime * expr)2925 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
2926   DCHECK(expr->arguments()->length() == 0);
2927   ExternalReference debug_is_active =
2928       ExternalReference::debug_is_active_address(isolate());
2929   __ Mov(x10, debug_is_active);
2930   __ Ldrb(x0, MemOperand(x10));
2931   __ SmiTag(x0);
2932   context()->Plug(x0);
2933 }
2934 
2935 
EmitCreateIterResultObject(CallRuntime * expr)2936 void FullCodeGenerator::EmitCreateIterResultObject(CallRuntime* expr) {
2937   ZoneList<Expression*>* args = expr->arguments();
2938   DCHECK_EQ(2, args->length());
2939   VisitForStackValue(args->at(0));
2940   VisitForStackValue(args->at(1));
2941 
2942   Label runtime, done;
2943 
2944   Register result = x0;
2945   __ Allocate(JSIteratorResult::kSize, result, x10, x11, &runtime,
2946               NO_ALLOCATION_FLAGS);
2947   Register map_reg = x1;
2948   Register result_value = x2;
2949   Register boolean_done = x3;
2950   Register empty_fixed_array = x4;
2951   Register untagged_result = x5;
2952   __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, map_reg);
2953   __ Pop(boolean_done);
2954   __ Pop(result_value);
2955   __ LoadRoot(empty_fixed_array, Heap::kEmptyFixedArrayRootIndex);
2956   STATIC_ASSERT(JSObject::kPropertiesOffset + kPointerSize ==
2957                 JSObject::kElementsOffset);
2958   STATIC_ASSERT(JSIteratorResult::kValueOffset + kPointerSize ==
2959                 JSIteratorResult::kDoneOffset);
2960   __ ObjectUntag(untagged_result, result);
2961   __ Str(map_reg, MemOperand(untagged_result, HeapObject::kMapOffset));
2962   __ Stp(empty_fixed_array, empty_fixed_array,
2963          MemOperand(untagged_result, JSObject::kPropertiesOffset));
2964   __ Stp(result_value, boolean_done,
2965          MemOperand(untagged_result, JSIteratorResult::kValueOffset));
2966   STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
2967   __ B(&done);
2968 
2969   __ Bind(&runtime);
2970   CallRuntimeWithOperands(Runtime::kCreateIterResultObject);
2971 
2972   __ Bind(&done);
2973   context()->Plug(x0);
2974 }
2975 
2976 
EmitLoadJSRuntimeFunction(CallRuntime * expr)2977 void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
2978   // Push function.
2979   __ LoadNativeContextSlot(expr->context_index(), x0);
2980   PushOperand(x0);
2981 
2982   // Push undefined as the receiver.
2983   __ LoadRoot(x0, Heap::kUndefinedValueRootIndex);
2984   PushOperand(x0);
2985 }
2986 
2987 
EmitCallJSRuntimeFunction(CallRuntime * expr)2988 void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
2989   ZoneList<Expression*>* args = expr->arguments();
2990   int arg_count = args->length();
2991 
2992   SetCallPosition(expr);
2993   __ Peek(x1, (arg_count + 1) * kPointerSize);
2994   __ Mov(x0, arg_count);
2995   __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kNullOrUndefined),
2996           RelocInfo::CODE_TARGET);
2997   OperandStackDepthDecrement(arg_count + 1);
2998   RestoreContext();
2999 }
3000 
3001 
VisitUnaryOperation(UnaryOperation * expr)3002 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3003   switch (expr->op()) {
3004     case Token::DELETE: {
3005       Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
3006       Property* property = expr->expression()->AsProperty();
3007       VariableProxy* proxy = expr->expression()->AsVariableProxy();
3008 
3009       if (property != NULL) {
3010         VisitForStackValue(property->obj());
3011         VisitForStackValue(property->key());
3012         CallRuntimeWithOperands(is_strict(language_mode())
3013                                     ? Runtime::kDeleteProperty_Strict
3014                                     : Runtime::kDeleteProperty_Sloppy);
3015         context()->Plug(x0);
3016       } else if (proxy != NULL) {
3017         Variable* var = proxy->var();
3018         // Delete of an unqualified identifier is disallowed in strict mode but
3019         // "delete this" is allowed.
3020         bool is_this = var->HasThisName(isolate());
3021         DCHECK(is_sloppy(language_mode()) || is_this);
3022         if (var->IsUnallocatedOrGlobalSlot()) {
3023           __ LoadGlobalObject(x12);
3024           __ Mov(x11, Operand(var->name()));
3025           __ Push(x12, x11);
3026           __ CallRuntime(Runtime::kDeleteProperty_Sloppy);
3027           context()->Plug(x0);
3028         } else if (var->IsStackAllocated() || var->IsContextSlot()) {
3029           // Result of deleting non-global, non-dynamic variables is false.
3030           // The subexpression does not have side effects.
3031           context()->Plug(is_this);
3032         } else {
3033           // Non-global variable.  Call the runtime to try to delete from the
3034           // context where the variable was introduced.
3035           __ Push(var->name());
3036           __ CallRuntime(Runtime::kDeleteLookupSlot);
3037           context()->Plug(x0);
3038         }
3039       } else {
3040         // Result of deleting non-property, non-variable reference is true.
3041         // The subexpression may have side effects.
3042         VisitForEffect(expr->expression());
3043         context()->Plug(true);
3044       }
3045       break;
3046       break;
3047     }
3048     case Token::VOID: {
3049       Comment cmnt(masm_, "[ UnaryOperation (VOID)");
3050       VisitForEffect(expr->expression());
3051       context()->Plug(Heap::kUndefinedValueRootIndex);
3052       break;
3053     }
3054     case Token::NOT: {
3055       Comment cmnt(masm_, "[ UnaryOperation (NOT)");
3056       if (context()->IsEffect()) {
3057         // Unary NOT has no side effects so it's only necessary to visit the
3058         // subexpression.  Match the optimizing compiler by not branching.
3059         VisitForEffect(expr->expression());
3060       } else if (context()->IsTest()) {
3061         const TestContext* test = TestContext::cast(context());
3062         // The labels are swapped for the recursive call.
3063         VisitForControl(expr->expression(),
3064                         test->false_label(),
3065                         test->true_label(),
3066                         test->fall_through());
3067         context()->Plug(test->true_label(), test->false_label());
3068       } else {
3069         DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
3070         // TODO(jbramley): This could be much more efficient using (for
3071         // example) the CSEL instruction.
3072         Label materialize_true, materialize_false, done;
3073         VisitForControl(expr->expression(),
3074                         &materialize_false,
3075                         &materialize_true,
3076                         &materialize_true);
3077         if (!context()->IsAccumulatorValue()) OperandStackDepthIncrement(1);
3078 
3079         __ Bind(&materialize_true);
3080         PrepareForBailoutForId(expr->MaterializeTrueId(),
3081                                BailoutState::NO_REGISTERS);
3082         __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
3083         __ B(&done);
3084 
3085         __ Bind(&materialize_false);
3086         PrepareForBailoutForId(expr->MaterializeFalseId(),
3087                                BailoutState::NO_REGISTERS);
3088         __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
3089         __ B(&done);
3090 
3091         __ Bind(&done);
3092         if (context()->IsStackValue()) {
3093           __ Push(result_register());
3094         }
3095       }
3096       break;
3097     }
3098     case Token::TYPEOF: {
3099       Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
3100       {
3101         AccumulatorValueContext context(this);
3102         VisitForTypeofValue(expr->expression());
3103       }
3104       __ Mov(x3, x0);
3105       TypeofStub typeof_stub(isolate());
3106       __ CallStub(&typeof_stub);
3107       context()->Plug(x0);
3108       break;
3109     }
3110     default:
3111       UNREACHABLE();
3112   }
3113 }
3114 
3115 
VisitCountOperation(CountOperation * expr)3116 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3117   DCHECK(expr->expression()->IsValidReferenceExpressionOrThis());
3118 
3119   Comment cmnt(masm_, "[ CountOperation");
3120 
3121   Property* prop = expr->expression()->AsProperty();
3122   LhsKind assign_type = Property::GetAssignType(prop);
3123 
3124   // Evaluate expression and get value.
3125   if (assign_type == VARIABLE) {
3126     DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
3127     AccumulatorValueContext context(this);
3128     EmitVariableLoad(expr->expression()->AsVariableProxy());
3129   } else {
3130     // Reserve space for result of postfix operation.
3131     if (expr->is_postfix() && !context()->IsEffect()) {
3132       PushOperand(xzr);
3133     }
3134     switch (assign_type) {
3135       case NAMED_PROPERTY: {
3136         // Put the object both on the stack and in the register.
3137         VisitForStackValue(prop->obj());
3138         __ Peek(LoadDescriptor::ReceiverRegister(), 0);
3139         EmitNamedPropertyLoad(prop);
3140         break;
3141       }
3142 
3143       case NAMED_SUPER_PROPERTY: {
3144         VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
3145         VisitForAccumulatorValue(
3146             prop->obj()->AsSuperPropertyReference()->home_object());
3147         PushOperand(result_register());
3148         const Register scratch = x10;
3149         __ Peek(scratch, kPointerSize);
3150         PushOperands(scratch, result_register());
3151         EmitNamedSuperPropertyLoad(prop);
3152         break;
3153       }
3154 
3155       case KEYED_SUPER_PROPERTY: {
3156         VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
3157         VisitForStackValue(
3158             prop->obj()->AsSuperPropertyReference()->home_object());
3159         VisitForAccumulatorValue(prop->key());
3160         PushOperand(result_register());
3161         const Register scratch1 = x10;
3162         const Register scratch2 = x11;
3163         __ Peek(scratch1, 2 * kPointerSize);
3164         __ Peek(scratch2, kPointerSize);
3165         PushOperands(scratch1, scratch2, result_register());
3166         EmitKeyedSuperPropertyLoad(prop);
3167         break;
3168       }
3169 
3170       case KEYED_PROPERTY: {
3171         VisitForStackValue(prop->obj());
3172         VisitForStackValue(prop->key());
3173         __ Peek(LoadDescriptor::ReceiverRegister(), 1 * kPointerSize);
3174         __ Peek(LoadDescriptor::NameRegister(), 0);
3175         EmitKeyedPropertyLoad(prop);
3176         break;
3177       }
3178 
3179       case VARIABLE:
3180         UNREACHABLE();
3181     }
3182   }
3183 
3184   // We need a second deoptimization point after loading the value
3185   // in case evaluating the property load my have a side effect.
3186   if (assign_type == VARIABLE) {
3187     PrepareForBailout(expr->expression(), BailoutState::TOS_REGISTER);
3188   } else {
3189     PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
3190   }
3191 
3192   // Inline smi case if we are in a loop.
3193   Label stub_call, done;
3194   JumpPatchSite patch_site(masm_);
3195 
3196   int count_value = expr->op() == Token::INC ? 1 : -1;
3197   if (ShouldInlineSmiCase(expr->op())) {
3198     Label slow;
3199     patch_site.EmitJumpIfNotSmi(x0, &slow);
3200 
3201     // Save result for postfix expressions.
3202     if (expr->is_postfix()) {
3203       if (!context()->IsEffect()) {
3204         // Save the result on the stack. If we have a named or keyed property we
3205         // store the result under the receiver that is currently on top of the
3206         // stack.
3207         switch (assign_type) {
3208           case VARIABLE:
3209             __ Push(x0);
3210             break;
3211           case NAMED_PROPERTY:
3212             __ Poke(x0, kPointerSize);
3213             break;
3214           case NAMED_SUPER_PROPERTY:
3215             __ Poke(x0, kPointerSize * 2);
3216             break;
3217           case KEYED_PROPERTY:
3218             __ Poke(x0, kPointerSize * 2);
3219             break;
3220           case KEYED_SUPER_PROPERTY:
3221             __ Poke(x0, kPointerSize * 3);
3222             break;
3223         }
3224       }
3225     }
3226 
3227     __ Adds(x0, x0, Smi::FromInt(count_value));
3228     __ B(vc, &done);
3229     // Call stub. Undo operation first.
3230     __ Sub(x0, x0, Smi::FromInt(count_value));
3231     __ B(&stub_call);
3232     __ Bind(&slow);
3233   }
3234 
3235   // Convert old value into a number.
3236   __ Call(isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
3237   PrepareForBailoutForId(expr->ToNumberId(), BailoutState::TOS_REGISTER);
3238 
3239   // Save result for postfix expressions.
3240   if (expr->is_postfix()) {
3241     if (!context()->IsEffect()) {
3242       // Save the result on the stack. If we have a named or keyed property
3243       // we store the result under the receiver that is currently on top
3244       // of the stack.
3245       switch (assign_type) {
3246         case VARIABLE:
3247           PushOperand(x0);
3248           break;
3249         case NAMED_PROPERTY:
3250           __ Poke(x0, kXRegSize);
3251           break;
3252         case NAMED_SUPER_PROPERTY:
3253           __ Poke(x0, 2 * kXRegSize);
3254           break;
3255         case KEYED_PROPERTY:
3256           __ Poke(x0, 2 * kXRegSize);
3257           break;
3258         case KEYED_SUPER_PROPERTY:
3259           __ Poke(x0, 3 * kXRegSize);
3260           break;
3261       }
3262     }
3263   }
3264 
3265   __ Bind(&stub_call);
3266   __ Mov(x1, x0);
3267   __ Mov(x0, Smi::FromInt(count_value));
3268 
3269   SetExpressionPosition(expr);
3270 
3271   {
3272     Assembler::BlockPoolsScope scope(masm_);
3273     Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), Token::ADD).code();
3274     CallIC(code, expr->CountBinOpFeedbackId());
3275     patch_site.EmitPatchInfo();
3276   }
3277   __ Bind(&done);
3278 
3279   // Store the value returned in x0.
3280   switch (assign_type) {
3281     case VARIABLE:
3282       if (expr->is_postfix()) {
3283         { EffectContext context(this);
3284           EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3285                                  Token::ASSIGN, expr->CountSlot());
3286           PrepareForBailoutForId(expr->AssignmentId(),
3287                                  BailoutState::TOS_REGISTER);
3288           context.Plug(x0);
3289         }
3290         // For all contexts except EffectConstant We have the result on
3291         // top of the stack.
3292         if (!context()->IsEffect()) {
3293           context()->PlugTOS();
3294         }
3295       } else {
3296         EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3297                                Token::ASSIGN, expr->CountSlot());
3298         PrepareForBailoutForId(expr->AssignmentId(),
3299                                BailoutState::TOS_REGISTER);
3300         context()->Plug(x0);
3301       }
3302       break;
3303     case NAMED_PROPERTY: {
3304       __ Mov(StoreDescriptor::NameRegister(),
3305              Operand(prop->key()->AsLiteral()->value()));
3306       PopOperand(StoreDescriptor::ReceiverRegister());
3307       EmitLoadStoreICSlot(expr->CountSlot());
3308       CallStoreIC();
3309       PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
3310       if (expr->is_postfix()) {
3311         if (!context()->IsEffect()) {
3312           context()->PlugTOS();
3313         }
3314       } else {
3315         context()->Plug(x0);
3316       }
3317       break;
3318     }
3319     case NAMED_SUPER_PROPERTY: {
3320       EmitNamedSuperPropertyStore(prop);
3321       if (expr->is_postfix()) {
3322         if (!context()->IsEffect()) {
3323           context()->PlugTOS();
3324         }
3325       } else {
3326         context()->Plug(x0);
3327       }
3328       break;
3329     }
3330     case KEYED_SUPER_PROPERTY: {
3331       EmitKeyedSuperPropertyStore(prop);
3332       if (expr->is_postfix()) {
3333         if (!context()->IsEffect()) {
3334           context()->PlugTOS();
3335         }
3336       } else {
3337         context()->Plug(x0);
3338       }
3339       break;
3340     }
3341     case KEYED_PROPERTY: {
3342       PopOperand(StoreDescriptor::NameRegister());
3343       PopOperand(StoreDescriptor::ReceiverRegister());
3344       Handle<Code> ic =
3345           CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
3346       EmitLoadStoreICSlot(expr->CountSlot());
3347       CallIC(ic);
3348       PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
3349       if (expr->is_postfix()) {
3350         if (!context()->IsEffect()) {
3351           context()->PlugTOS();
3352         }
3353       } else {
3354         context()->Plug(x0);
3355       }
3356       break;
3357     }
3358   }
3359 }
3360 
3361 
EmitLiteralCompareTypeof(Expression * expr,Expression * sub_expr,Handle<String> check)3362 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
3363                                                  Expression* sub_expr,
3364                                                  Handle<String> check) {
3365   ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof");
3366   Comment cmnt(masm_, "[ EmitLiteralCompareTypeof");
3367   Label materialize_true, materialize_false;
3368   Label* if_true = NULL;
3369   Label* if_false = NULL;
3370   Label* fall_through = NULL;
3371   context()->PrepareTest(&materialize_true, &materialize_false,
3372                          &if_true, &if_false, &fall_through);
3373 
3374   { AccumulatorValueContext context(this);
3375     VisitForTypeofValue(sub_expr);
3376   }
3377   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3378 
3379   Factory* factory = isolate()->factory();
3380   if (String::Equals(check, factory->number_string())) {
3381     ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof number_string");
3382     __ JumpIfSmi(x0, if_true);
3383     __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
3384     __ CompareRoot(x0, Heap::kHeapNumberMapRootIndex);
3385     Split(eq, if_true, if_false, fall_through);
3386   } else if (String::Equals(check, factory->string_string())) {
3387     ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof string_string");
3388     __ JumpIfSmi(x0, if_false);
3389     __ CompareObjectType(x0, x0, x1, FIRST_NONSTRING_TYPE);
3390     Split(lt, if_true, if_false, fall_through);
3391   } else if (String::Equals(check, factory->symbol_string())) {
3392     ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof symbol_string");
3393     __ JumpIfSmi(x0, if_false);
3394     __ CompareObjectType(x0, x0, x1, SYMBOL_TYPE);
3395     Split(eq, if_true, if_false, fall_through);
3396   } else if (String::Equals(check, factory->boolean_string())) {
3397     ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof boolean_string");
3398     __ JumpIfRoot(x0, Heap::kTrueValueRootIndex, if_true);
3399     __ CompareRoot(x0, Heap::kFalseValueRootIndex);
3400     Split(eq, if_true, if_false, fall_through);
3401   } else if (String::Equals(check, factory->undefined_string())) {
3402     ASM_LOCATION(
3403         "FullCodeGenerator::EmitLiteralCompareTypeof undefined_string");
3404     __ JumpIfRoot(x0, Heap::kNullValueRootIndex, if_false);
3405     __ JumpIfSmi(x0, if_false);
3406     // Check for undetectable objects => true.
3407     __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
3408     __ Ldrb(x1, FieldMemOperand(x0, Map::kBitFieldOffset));
3409     __ TestAndSplit(x1, 1 << Map::kIsUndetectable, if_false, if_true,
3410                     fall_through);
3411   } else if (String::Equals(check, factory->function_string())) {
3412     ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof function_string");
3413     __ JumpIfSmi(x0, if_false);
3414     __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
3415     __ Ldrb(x1, FieldMemOperand(x0, Map::kBitFieldOffset));
3416     __ And(x1, x1, (1 << Map::kIsCallable) | (1 << Map::kIsUndetectable));
3417     __ CompareAndSplit(x1, Operand(1 << Map::kIsCallable), eq, if_true,
3418                        if_false, fall_through);
3419   } else if (String::Equals(check, factory->object_string())) {
3420     ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof object_string");
3421     __ JumpIfSmi(x0, if_false);
3422     __ JumpIfRoot(x0, Heap::kNullValueRootIndex, if_true);
3423     STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
3424     __ JumpIfObjectType(x0, x10, x11, FIRST_JS_RECEIVER_TYPE, if_false, lt);
3425     // Check for callable or undetectable objects => false.
3426     __ Ldrb(x10, FieldMemOperand(x10, Map::kBitFieldOffset));
3427     __ TestAndSplit(x10, (1 << Map::kIsCallable) | (1 << Map::kIsUndetectable),
3428                     if_true, if_false, fall_through);
3429 // clang-format off
3430 #define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type)   \
3431   } else if (String::Equals(check, factory->type##_string())) { \
3432     ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof " \
3433                  #type "_string");                              \
3434     __ JumpIfSmi(x0, if_true);                                  \
3435     __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));    \
3436     __ CompareRoot(x0, Heap::k##Type##MapRootIndex);            \
3437     Split(eq, if_true, if_false, fall_through);
3438   SIMD128_TYPES(SIMD128_TYPE)
3439 #undef SIMD128_TYPE
3440     // clang-format on
3441   } else {
3442     ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareTypeof other");
3443     if (if_false != fall_through) __ B(if_false);
3444   }
3445   context()->Plug(if_true, if_false);
3446 }
3447 
3448 
VisitCompareOperation(CompareOperation * expr)3449 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
3450   Comment cmnt(masm_, "[ CompareOperation");
3451 
3452   // Try to generate an optimized comparison with a literal value.
3453   // TODO(jbramley): This only checks common values like NaN or undefined.
3454   // Should it also handle ARM64 immediate operands?
3455   if (TryLiteralCompare(expr)) {
3456     return;
3457   }
3458 
3459   // Assign labels according to context()->PrepareTest.
3460   Label materialize_true;
3461   Label materialize_false;
3462   Label* if_true = NULL;
3463   Label* if_false = NULL;
3464   Label* fall_through = NULL;
3465   context()->PrepareTest(&materialize_true, &materialize_false,
3466                          &if_true, &if_false, &fall_through);
3467 
3468   Token::Value op = expr->op();
3469   VisitForStackValue(expr->left());
3470   switch (op) {
3471     case Token::IN:
3472       VisitForStackValue(expr->right());
3473       SetExpressionPosition(expr);
3474       EmitHasProperty();
3475       PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
3476       __ CompareRoot(x0, Heap::kTrueValueRootIndex);
3477       Split(eq, if_true, if_false, fall_through);
3478       break;
3479 
3480     case Token::INSTANCEOF: {
3481       VisitForAccumulatorValue(expr->right());
3482       SetExpressionPosition(expr);
3483       PopOperand(x1);
3484       InstanceOfStub stub(isolate());
3485       __ CallStub(&stub);
3486       PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
3487       __ CompareRoot(x0, Heap::kTrueValueRootIndex);
3488       Split(eq, if_true, if_false, fall_through);
3489       break;
3490     }
3491 
3492     default: {
3493       VisitForAccumulatorValue(expr->right());
3494       SetExpressionPosition(expr);
3495       Condition cond = CompareIC::ComputeCondition(op);
3496 
3497       // Pop the stack value.
3498       PopOperand(x1);
3499 
3500       JumpPatchSite patch_site(masm_);
3501       if (ShouldInlineSmiCase(op)) {
3502         Label slow_case;
3503         patch_site.EmitJumpIfEitherNotSmi(x0, x1, &slow_case);
3504         __ Cmp(x1, x0);
3505         Split(cond, if_true, if_false, NULL);
3506         __ Bind(&slow_case);
3507       }
3508 
3509       Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
3510       CallIC(ic, expr->CompareOperationFeedbackId());
3511       patch_site.EmitPatchInfo();
3512       PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3513       __ CompareAndSplit(x0, 0, cond, if_true, if_false, fall_through);
3514     }
3515   }
3516 
3517   // Convert the result of the comparison into one expected for this
3518   // expression's context.
3519   context()->Plug(if_true, if_false);
3520 }
3521 
3522 
EmitLiteralCompareNil(CompareOperation * expr,Expression * sub_expr,NilValue nil)3523 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
3524                                               Expression* sub_expr,
3525                                               NilValue nil) {
3526   ASM_LOCATION("FullCodeGenerator::EmitLiteralCompareNil");
3527   Label materialize_true, materialize_false;
3528   Label* if_true = NULL;
3529   Label* if_false = NULL;
3530   Label* fall_through = NULL;
3531   context()->PrepareTest(&materialize_true, &materialize_false,
3532                          &if_true, &if_false, &fall_through);
3533 
3534   VisitForAccumulatorValue(sub_expr);
3535   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3536 
3537   if (expr->op() == Token::EQ_STRICT) {
3538     Heap::RootListIndex nil_value = nil == kNullValue ?
3539         Heap::kNullValueRootIndex :
3540         Heap::kUndefinedValueRootIndex;
3541     __ CompareRoot(x0, nil_value);
3542     Split(eq, if_true, if_false, fall_through);
3543   } else {
3544     __ JumpIfSmi(x0, if_false);
3545     __ Ldr(x0, FieldMemOperand(x0, HeapObject::kMapOffset));
3546     __ Ldrb(x1, FieldMemOperand(x0, Map::kBitFieldOffset));
3547     __ TestAndSplit(x1, 1 << Map::kIsUndetectable, if_false, if_true,
3548                     fall_through);
3549   }
3550 
3551   context()->Plug(if_true, if_false);
3552 }
3553 
3554 
VisitYield(Yield * expr)3555 void FullCodeGenerator::VisitYield(Yield* expr) {
3556   Comment cmnt(masm_, "[ Yield");
3557   SetExpressionPosition(expr);
3558 
3559   // Evaluate yielded value first; the initial iterator definition depends on
3560   // this. It stays on the stack while we update the iterator.
3561   VisitForStackValue(expr->expression());
3562 
3563   // TODO(jbramley): Tidy this up once the merge is done, using named registers
3564   // and suchlike. The implementation changes a little by bleeding_edge so I
3565   // don't want to spend too much time on it now.
3566 
3567   Label suspend, continuation, post_runtime, resume, exception;
3568 
3569   __ B(&suspend);
3570   // TODO(jbramley): This label is bound here because the following code
3571   // looks at its pos(). Is it possible to do something more efficient here,
3572   // perhaps using Adr?
3573   __ Bind(&continuation);
3574   // When we arrive here, x0 holds the generator object.
3575   __ RecordGeneratorContinuation();
3576   __ Ldr(x1, FieldMemOperand(x0, JSGeneratorObject::kResumeModeOffset));
3577   __ Ldr(x0, FieldMemOperand(x0, JSGeneratorObject::kInputOrDebugPosOffset));
3578   STATIC_ASSERT(JSGeneratorObject::kNext < JSGeneratorObject::kReturn);
3579   STATIC_ASSERT(JSGeneratorObject::kThrow > JSGeneratorObject::kReturn);
3580   __ Cmp(x1, Operand(Smi::FromInt(JSGeneratorObject::kReturn)));
3581   __ B(lt, &resume);
3582   __ Push(result_register());
3583   __ B(gt, &exception);
3584   EmitCreateIteratorResult(true);
3585   EmitUnwindAndReturn();
3586 
3587   __ Bind(&exception);
3588   __ CallRuntime(Runtime::kThrow);
3589 
3590   __ Bind(&suspend);
3591   OperandStackDepthIncrement(1);  // Not popped on this path.
3592   VisitForAccumulatorValue(expr->generator_object());
3593   DCHECK((continuation.pos() > 0) && Smi::IsValid(continuation.pos()));
3594   __ Mov(x1, Smi::FromInt(continuation.pos()));
3595   __ Str(x1, FieldMemOperand(x0, JSGeneratorObject::kContinuationOffset));
3596   __ Str(cp, FieldMemOperand(x0, JSGeneratorObject::kContextOffset));
3597   __ Mov(x1, cp);
3598   __ RecordWriteField(x0, JSGeneratorObject::kContextOffset, x1, x2,
3599                       kLRHasBeenSaved, kDontSaveFPRegs);
3600   __ Add(x1, fp, StandardFrameConstants::kExpressionsOffset);
3601   __ Cmp(__ StackPointer(), x1);
3602   __ B(eq, &post_runtime);
3603   __ Push(x0);  // generator object
3604   __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
3605   RestoreContext();
3606   __ Bind(&post_runtime);
3607   PopOperand(result_register());
3608   EmitReturnSequence();
3609 
3610   __ Bind(&resume);
3611   context()->Plug(result_register());
3612 }
3613 
PushOperands(Register reg1,Register reg2)3614 void FullCodeGenerator::PushOperands(Register reg1, Register reg2) {
3615   OperandStackDepthIncrement(2);
3616   __ Push(reg1, reg2);
3617 }
3618 
PushOperands(Register reg1,Register reg2,Register reg3)3619 void FullCodeGenerator::PushOperands(Register reg1, Register reg2,
3620                                      Register reg3) {
3621   OperandStackDepthIncrement(3);
3622   __ Push(reg1, reg2, reg3);
3623 }
3624 
PopOperands(Register reg1,Register reg2)3625 void FullCodeGenerator::PopOperands(Register reg1, Register reg2) {
3626   OperandStackDepthDecrement(2);
3627   __ Pop(reg1, reg2);
3628 }
3629 
EmitOperandStackDepthCheck()3630 void FullCodeGenerator::EmitOperandStackDepthCheck() {
3631   if (FLAG_debug_code) {
3632     int expected_diff = StandardFrameConstants::kFixedFrameSizeFromFp +
3633                         operand_stack_depth_ * kPointerSize;
3634     __ Sub(x0, fp, jssp);
3635     __ Cmp(x0, Operand(expected_diff));
3636     __ Assert(eq, kUnexpectedStackDepth);
3637   }
3638 }
3639 
EmitCreateIteratorResult(bool done)3640 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
3641   Label allocate, done_allocate;
3642 
3643   // Allocate and populate an object with this form: { value: VAL, done: DONE }
3644 
3645   Register result = x0;
3646   __ Allocate(JSIteratorResult::kSize, result, x10, x11, &allocate,
3647               NO_ALLOCATION_FLAGS);
3648   __ B(&done_allocate);
3649 
3650   __ Bind(&allocate);
3651   __ Push(Smi::FromInt(JSIteratorResult::kSize));
3652   __ CallRuntime(Runtime::kAllocateInNewSpace);
3653 
3654   __ Bind(&done_allocate);
3655   Register map_reg = x1;
3656   Register result_value = x2;
3657   Register boolean_done = x3;
3658   Register empty_fixed_array = x4;
3659   Register untagged_result = x5;
3660   __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, map_reg);
3661   PopOperand(result_value);
3662   __ LoadRoot(boolean_done,
3663               done ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex);
3664   __ LoadRoot(empty_fixed_array, Heap::kEmptyFixedArrayRootIndex);
3665   STATIC_ASSERT(JSObject::kPropertiesOffset + kPointerSize ==
3666                 JSObject::kElementsOffset);
3667   STATIC_ASSERT(JSIteratorResult::kValueOffset + kPointerSize ==
3668                 JSIteratorResult::kDoneOffset);
3669   __ ObjectUntag(untagged_result, result);
3670   __ Str(map_reg, MemOperand(untagged_result, HeapObject::kMapOffset));
3671   __ Stp(empty_fixed_array, empty_fixed_array,
3672          MemOperand(untagged_result, JSObject::kPropertiesOffset));
3673   __ Stp(result_value, boolean_done,
3674          MemOperand(untagged_result, JSIteratorResult::kValueOffset));
3675   STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
3676 }
3677 
3678 
3679 // TODO(all): I don't like this method.
3680 // It seems to me that in too many places x0 is used in place of this.
3681 // Also, this function is not suitable for all places where x0 should be
3682 // abstracted (eg. when used as an argument). But some places assume that the
3683 // first argument register is x0, and use this function instead.
3684 // Considering that most of the register allocation is hard-coded in the
3685 // FullCodeGen, that it is unlikely we will need to change it extensively, and
3686 // that abstracting the allocation through functions would not yield any
3687 // performance benefit, I think the existence of this function is debatable.
result_register()3688 Register FullCodeGenerator::result_register() {
3689   return x0;
3690 }
3691 
3692 
context_register()3693 Register FullCodeGenerator::context_register() {
3694   return cp;
3695 }
3696 
LoadFromFrameField(int frame_offset,Register value)3697 void FullCodeGenerator::LoadFromFrameField(int frame_offset, Register value) {
3698   DCHECK(POINTER_SIZE_ALIGN(frame_offset) == frame_offset);
3699   __ Ldr(value, MemOperand(fp, frame_offset));
3700 }
3701 
StoreToFrameField(int frame_offset,Register value)3702 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
3703   DCHECK(POINTER_SIZE_ALIGN(frame_offset) == frame_offset);
3704   __ Str(value, MemOperand(fp, frame_offset));
3705 }
3706 
3707 
LoadContextField(Register dst,int context_index)3708 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
3709   __ Ldr(dst, ContextMemOperand(cp, context_index));
3710 }
3711 
3712 
PushFunctionArgumentForContextAllocation()3713 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
3714   Scope* closure_scope = scope()->ClosureScope();
3715   if (closure_scope->is_script_scope() ||
3716       closure_scope->is_module_scope()) {
3717     // Contexts nested in the native context have a canonical empty function
3718     // as their closure, not the anonymous closure containing the global
3719     // code.
3720     DCHECK(kSmiTag == 0);
3721     __ LoadNativeContextSlot(Context::CLOSURE_INDEX, x10);
3722   } else if (closure_scope->is_eval_scope()) {
3723     // Contexts created by a call to eval have the same closure as the
3724     // context calling eval, not the anonymous closure containing the eval
3725     // code.  Fetch it from the context.
3726     __ Ldr(x10, ContextMemOperand(cp, Context::CLOSURE_INDEX));
3727   } else {
3728     DCHECK(closure_scope->is_function_scope());
3729     __ Ldr(x10, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
3730   }
3731   PushOperand(x10);
3732 }
3733 
3734 
EnterFinallyBlock()3735 void FullCodeGenerator::EnterFinallyBlock() {
3736   ASM_LOCATION("FullCodeGenerator::EnterFinallyBlock");
3737   DCHECK(!result_register().is(x10));
3738   // Store pending message while executing finally block.
3739   ExternalReference pending_message_obj =
3740       ExternalReference::address_of_pending_message_obj(isolate());
3741   __ Mov(x10, pending_message_obj);
3742   __ Ldr(x10, MemOperand(x10));
3743   PushOperand(x10);
3744 
3745   ClearPendingMessage();
3746 }
3747 
3748 
ExitFinallyBlock()3749 void FullCodeGenerator::ExitFinallyBlock() {
3750   ASM_LOCATION("FullCodeGenerator::ExitFinallyBlock");
3751   DCHECK(!result_register().is(x10));
3752 
3753   // Restore pending message from stack.
3754   PopOperand(x10);
3755   ExternalReference pending_message_obj =
3756       ExternalReference::address_of_pending_message_obj(isolate());
3757   __ Mov(x13, pending_message_obj);
3758   __ Str(x10, MemOperand(x13));
3759 }
3760 
3761 
ClearPendingMessage()3762 void FullCodeGenerator::ClearPendingMessage() {
3763   DCHECK(!result_register().is(x10));
3764   ExternalReference pending_message_obj =
3765       ExternalReference::address_of_pending_message_obj(isolate());
3766   __ LoadRoot(x10, Heap::kTheHoleValueRootIndex);
3767   __ Mov(x13, pending_message_obj);
3768   __ Str(x10, MemOperand(x13));
3769 }
3770 
3771 
EmitCommands()3772 void FullCodeGenerator::DeferredCommands::EmitCommands() {
3773   __ Pop(result_register(), x1);  // Restore the accumulator and get the token.
3774   for (DeferredCommand cmd : commands_) {
3775     Label skip;
3776     __ Cmp(x1, Operand(Smi::FromInt(cmd.token)));
3777     __ B(ne, &skip);
3778     switch (cmd.command) {
3779       case kReturn:
3780         codegen_->EmitUnwindAndReturn();
3781         break;
3782       case kThrow:
3783         __ Push(result_register());
3784         __ CallRuntime(Runtime::kReThrow);
3785         break;
3786       case kContinue:
3787         codegen_->EmitContinue(cmd.target);
3788         break;
3789       case kBreak:
3790         codegen_->EmitBreak(cmd.target);
3791         break;
3792     }
3793     __ bind(&skip);
3794   }
3795 }
3796 
3797 #undef __
3798 
3799 
PatchAt(Code * unoptimized_code,Address pc,BackEdgeState target_state,Code * replacement_code)3800 void BackEdgeTable::PatchAt(Code* unoptimized_code,
3801                             Address pc,
3802                             BackEdgeState target_state,
3803                             Code* replacement_code) {
3804   // Turn the jump into a nop.
3805   Address branch_address = pc - 3 * kInstructionSize;
3806   Isolate* isolate = unoptimized_code->GetIsolate();
3807   PatchingAssembler patcher(isolate, branch_address, 1);
3808 
3809   DCHECK(Instruction::Cast(branch_address)
3810              ->IsNop(Assembler::INTERRUPT_CODE_NOP) ||
3811          (Instruction::Cast(branch_address)->IsCondBranchImm() &&
3812           Instruction::Cast(branch_address)->ImmPCOffset() ==
3813               6 * kInstructionSize));
3814 
3815   switch (target_state) {
3816     case INTERRUPT:
3817       //  <decrement profiling counter>
3818       //  .. .. .. ..       b.pl ok
3819       //  .. .. .. ..       ldr x16, pc+<interrupt stub address>
3820       //  .. .. .. ..       blr x16
3821       //  ... more instructions.
3822       //  ok-label
3823       // Jump offset is 6 instructions.
3824       patcher.b(6, pl);
3825       break;
3826     case ON_STACK_REPLACEMENT:
3827       //  <decrement profiling counter>
3828       //  .. .. .. ..       mov x0, x0 (NOP)
3829       //  .. .. .. ..       ldr x16, pc+<on-stack replacement address>
3830       //  .. .. .. ..       blr x16
3831       patcher.nop(Assembler::INTERRUPT_CODE_NOP);
3832       break;
3833   }
3834 
3835   // Replace the call address.
3836   Instruction* load = Instruction::Cast(pc)->preceding(2);
3837   Address interrupt_address_pointer =
3838       reinterpret_cast<Address>(load) + load->ImmPCOffset();
3839   DCHECK((Memory::uint64_at(interrupt_address_pointer) ==
3840           reinterpret_cast<uint64_t>(
3841               isolate->builtins()->OnStackReplacement()->entry())) ||
3842          (Memory::uint64_at(interrupt_address_pointer) ==
3843           reinterpret_cast<uint64_t>(
3844               isolate->builtins()->InterruptCheck()->entry())) ||
3845          (Memory::uint64_at(interrupt_address_pointer) ==
3846           reinterpret_cast<uint64_t>(
3847               isolate->builtins()->OnStackReplacement()->entry())));
3848   Memory::uint64_at(interrupt_address_pointer) =
3849       reinterpret_cast<uint64_t>(replacement_code->entry());
3850 
3851   unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
3852       unoptimized_code, reinterpret_cast<Address>(load), replacement_code);
3853 }
3854 
3855 
GetBackEdgeState(Isolate * isolate,Code * unoptimized_code,Address pc)3856 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
3857     Isolate* isolate,
3858     Code* unoptimized_code,
3859     Address pc) {
3860   // TODO(jbramley): There should be some extra assertions here (as in the ARM
3861   // back-end), but this function is gone in bleeding_edge so it might not
3862   // matter anyway.
3863   Instruction* jump_or_nop = Instruction::Cast(pc)->preceding(3);
3864 
3865   if (jump_or_nop->IsNop(Assembler::INTERRUPT_CODE_NOP)) {
3866     Instruction* load = Instruction::Cast(pc)->preceding(2);
3867     uint64_t entry = Memory::uint64_at(reinterpret_cast<Address>(load) +
3868                                        load->ImmPCOffset());
3869     if (entry == reinterpret_cast<uint64_t>(
3870         isolate->builtins()->OnStackReplacement()->entry())) {
3871       return ON_STACK_REPLACEMENT;
3872     } else {
3873       UNREACHABLE();
3874     }
3875   }
3876 
3877   return INTERRUPT;
3878 }
3879 
3880 
3881 }  // namespace internal
3882 }  // namespace v8
3883 
3884 #endif  // V8_TARGET_ARCH_ARM64
3885