• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #if V8_TARGET_ARCH_ARM
6 
7 #include "src/ast/compile-time-value.h"
8 #include "src/ast/scopes.h"
9 #include "src/builtins/builtins-constructor.h"
10 #include "src/code-factory.h"
11 #include "src/code-stubs.h"
12 #include "src/codegen.h"
13 #include "src/compilation-info.h"
14 #include "src/compiler.h"
15 #include "src/debug/debug.h"
16 #include "src/full-codegen/full-codegen.h"
17 #include "src/ic/ic.h"
18 
19 #include "src/arm/code-stubs-arm.h"
20 #include "src/arm/macro-assembler-arm.h"
21 
22 namespace v8 {
23 namespace internal {
24 
25 #define __ ACCESS_MASM(masm())
26 
27 // A patch site is a location in the code which it is possible to patch. This
28 // class has a number of methods to emit the code which is patchable and the
29 // method EmitPatchInfo to record a marker back to the patchable code. This
30 // marker is a cmp rx, #yyy instruction, and x * 0x00000fff + yyy (raw 12 bit
31 // immediate value is used) is the delta from the pc to the first instruction of
32 // the patchable code.
33 class JumpPatchSite BASE_EMBEDDED {
34  public:
JumpPatchSite(MacroAssembler * masm)35   explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
36 #ifdef DEBUG
37     info_emitted_ = false;
38 #endif
39   }
40 
~JumpPatchSite()41   ~JumpPatchSite() {
42     DCHECK(patch_site_.is_bound() == info_emitted_);
43   }
44 
45   // When initially emitting this ensure that a jump is always generated to skip
46   // the inlined smi code.
EmitJumpIfNotSmi(Register reg,Label * target)47   void EmitJumpIfNotSmi(Register reg, Label* target) {
48     DCHECK(!patch_site_.is_bound() && !info_emitted_);
49     Assembler::BlockConstPoolScope block_const_pool(masm_);
50     __ bind(&patch_site_);
51     __ cmp(reg, Operand(reg));
52     __ b(eq, target);  // Always taken before patched.
53   }
54 
55   // When initially emitting this ensure that a jump is never generated to skip
56   // the inlined smi code.
EmitJumpIfSmi(Register reg,Label * target)57   void EmitJumpIfSmi(Register reg, Label* target) {
58     DCHECK(!patch_site_.is_bound() && !info_emitted_);
59     Assembler::BlockConstPoolScope block_const_pool(masm_);
60     __ bind(&patch_site_);
61     __ cmp(reg, Operand(reg));
62     __ b(ne, target);  // Never taken before patched.
63   }
64 
EmitPatchInfo()65   void EmitPatchInfo() {
66     // Block literal pool emission whilst recording patch site information.
67     Assembler::BlockConstPoolScope block_const_pool(masm_);
68     if (patch_site_.is_bound()) {
69       int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
70       Register reg;
71       reg.set_code(delta_to_patch_site / kOff12Mask);
72       __ cmp_raw_immediate(reg, delta_to_patch_site % kOff12Mask);
73 #ifdef DEBUG
74       info_emitted_ = true;
75 #endif
76     } else {
77       __ nop();  // Signals no inlined code.
78     }
79   }
80 
81  private:
masm()82   MacroAssembler* masm() { return masm_; }
83   MacroAssembler* masm_;
84   Label patch_site_;
85 #ifdef DEBUG
86   bool info_emitted_;
87 #endif
88 };
89 
90 
91 // Generate code for a JS function.  On entry to the function the receiver
92 // and arguments have been pushed on the stack left to right.  The actual
93 // argument count matches the formal parameter count expected by the
94 // function.
95 //
96 // The live registers are:
97 //   o r1: the JS function object being called (i.e., ourselves)
98 //   o r3: the new target value
99 //   o cp: our context
100 //   o pp: our caller's constant pool pointer (if enabled)
101 //   o fp: our caller's frame pointer
102 //   o sp: stack pointer
103 //   o lr: return address
104 //
105 // The function builds a JS frame.  Please see JavaScriptFrameConstants in
106 // frames-arm.h for its layout.
Generate()107 void FullCodeGenerator::Generate() {
108   CompilationInfo* info = info_;
109   profiling_counter_ = isolate()->factory()->NewCell(
110       Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
111   SetFunctionPosition(literal());
112   Comment cmnt(masm_, "[ function compiled by full code generator");
113 
114   ProfileEntryHookStub::MaybeCallEntryHook(masm_);
115 
116   if (FLAG_debug_code && info->ExpectsJSReceiverAsReceiver()) {
117     int receiver_offset = info->scope()->num_parameters() * kPointerSize;
118     __ ldr(r2, MemOperand(sp, receiver_offset));
119     __ AssertNotSmi(r2);
120     __ CompareObjectType(r2, r2, no_reg, FIRST_JS_RECEIVER_TYPE);
121     __ Assert(ge, kSloppyFunctionExpectsJSReceiverReceiver);
122   }
123 
124   // Open a frame scope to indicate that there is a frame on the stack.  The
125   // MANUAL indicates that the scope shouldn't actually generate code to set up
126   // the frame (that is done below).
127   FrameScope frame_scope(masm_, StackFrame::MANUAL);
128 
129   info->set_prologue_offset(masm_->pc_offset());
130   __ Prologue(info->GeneratePreagedPrologue());
131 
132   // Increment invocation count for the function.
133   {
134     Comment cmnt(masm_, "[ Increment invocation count");
135     __ ldr(r2, FieldMemOperand(r1, JSFunction::kFeedbackVectorOffset));
136     __ ldr(r2, FieldMemOperand(r2, Cell::kValueOffset));
137     __ ldr(r9, FieldMemOperand(
138                    r2, FeedbackVector::kInvocationCountIndex * kPointerSize +
139                            FeedbackVector::kHeaderSize));
140     __ add(r9, r9, Operand(Smi::FromInt(1)));
141     __ str(r9, FieldMemOperand(
142                    r2, FeedbackVector::kInvocationCountIndex * kPointerSize +
143                            FeedbackVector::kHeaderSize));
144   }
145 
146   { Comment cmnt(masm_, "[ Allocate locals");
147     int locals_count = info->scope()->num_stack_slots();
148     OperandStackDepthIncrement(locals_count);
149     if (locals_count > 0) {
150       if (locals_count >= 128) {
151         Label ok;
152         __ sub(r9, sp, Operand(locals_count * kPointerSize));
153         __ LoadRoot(r2, Heap::kRealStackLimitRootIndex);
154         __ cmp(r9, Operand(r2));
155         __ b(hs, &ok);
156         __ CallRuntime(Runtime::kThrowStackOverflow);
157         __ bind(&ok);
158       }
159       __ LoadRoot(r9, Heap::kUndefinedValueRootIndex);
160       int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
161       if (locals_count >= kMaxPushes) {
162         int loop_iterations = locals_count / kMaxPushes;
163         __ mov(r2, Operand(loop_iterations));
164         Label loop_header;
165         __ bind(&loop_header);
166         // Do pushes.
167         for (int i = 0; i < kMaxPushes; i++) {
168           __ push(r9);
169         }
170         // Continue loop if not done.
171         __ sub(r2, r2, Operand(1), SetCC);
172         __ b(&loop_header, ne);
173       }
174       int remaining = locals_count % kMaxPushes;
175       // Emit the remaining pushes.
176       for (int i  = 0; i < remaining; i++) {
177         __ push(r9);
178       }
179     }
180   }
181 
182   bool function_in_register_r1 = true;
183 
184   // Possibly allocate a local context.
185   if (info->scope()->NeedsContext()) {
186     // Argument to NewContext is the function, which is still in r1.
187     Comment cmnt(masm_, "[ Allocate context");
188     bool need_write_barrier = true;
189     int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
190     if (info->scope()->is_script_scope()) {
191       __ push(r1);
192       __ Push(info->scope()->scope_info());
193       __ CallRuntime(Runtime::kNewScriptContext);
194       PrepareForBailoutForId(BailoutId::ScriptContext(),
195                              BailoutState::TOS_REGISTER);
196       // The new target value is not used, clobbering is safe.
197       DCHECK_NULL(info->scope()->new_target_var());
198     } else {
199       if (info->scope()->new_target_var() != nullptr) {
200         __ push(r3);  // Preserve new target.
201       }
202       if (slots <=
203           ConstructorBuiltinsAssembler::MaximumFunctionContextSlots()) {
204         Callable callable = CodeFactory::FastNewFunctionContext(
205             isolate(), info->scope()->scope_type());
206         __ mov(FastNewFunctionContextDescriptor::SlotsRegister(),
207                Operand(slots));
208         __ Call(callable.code(), RelocInfo::CODE_TARGET);
209         // Result of the FastNewFunctionContext builtin is always in new space.
210         need_write_barrier = false;
211       } else {
212         __ push(r1);
213         __ Push(Smi::FromInt(info->scope()->scope_type()));
214         __ CallRuntime(Runtime::kNewFunctionContext);
215       }
216       if (info->scope()->new_target_var() != nullptr) {
217         __ pop(r3);  // Preserve new target.
218       }
219     }
220     function_in_register_r1 = false;
221     // Context is returned in r0.  It replaces the context passed to us.
222     // It's saved in the stack and kept live in cp.
223     __ mov(cp, r0);
224     __ str(r0, MemOperand(fp, StandardFrameConstants::kContextOffset));
225     // Copy any necessary parameters into the context.
226     int num_parameters = info->scope()->num_parameters();
227     int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
228     for (int i = first_parameter; i < num_parameters; i++) {
229       Variable* var =
230           (i == -1) ? info->scope()->receiver() : info->scope()->parameter(i);
231       if (var->IsContextSlot()) {
232         int parameter_offset = StandardFrameConstants::kCallerSPOffset +
233             (num_parameters - 1 - i) * kPointerSize;
234         // Load parameter from stack.
235         __ ldr(r0, MemOperand(fp, parameter_offset));
236         // Store it in the context.
237         MemOperand target = ContextMemOperand(cp, var->index());
238         __ str(r0, target);
239 
240         // Update the write barrier.
241         if (need_write_barrier) {
242           __ RecordWriteContextSlot(cp, target.offset(), r0, r2,
243                                     kLRHasBeenSaved, kDontSaveFPRegs);
244         } else if (FLAG_debug_code) {
245           Label done;
246           __ JumpIfInNewSpace(cp, r0, &done);
247           __ Abort(kExpectedNewSpaceObject);
248           __ bind(&done);
249         }
250       }
251     }
252   }
253 
254   // Register holding this function and new target are both trashed in case we
255   // bailout here. But since that can happen only when new target is not used
256   // and we allocate a context, the value of |function_in_register| is correct.
257   PrepareForBailoutForId(BailoutId::FunctionContext(),
258                          BailoutState::NO_REGISTERS);
259 
260   // We don't support new.target and rest parameters here.
261   DCHECK_NULL(info->scope()->new_target_var());
262   DCHECK_NULL(info->scope()->rest_parameter());
263   DCHECK_NULL(info->scope()->this_function_var());
264 
265   Variable* arguments = info->scope()->arguments();
266   if (arguments != NULL) {
267     // Function uses arguments object.
268     Comment cmnt(masm_, "[ Allocate arguments object");
269     if (!function_in_register_r1) {
270       // Load this again, if it's used by the local context below.
271       __ ldr(r1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
272     }
273     if (is_strict(language_mode()) || !has_simple_parameters()) {
274       Callable callable = CodeFactory::FastNewStrictArguments(isolate());
275       __ Call(callable.code(), RelocInfo::CODE_TARGET);
276       RestoreContext();
277     } else if (literal()->has_duplicate_parameters()) {
278       __ Push(r1);
279       __ CallRuntime(Runtime::kNewSloppyArguments_Generic);
280     } else {
281       Callable callable = CodeFactory::FastNewSloppyArguments(isolate());
282       __ Call(callable.code(), RelocInfo::CODE_TARGET);
283       RestoreContext();
284     }
285 
286     SetVar(arguments, r0, r1, r2);
287   }
288 
289   if (FLAG_trace) {
290     __ CallRuntime(Runtime::kTraceEnter);
291   }
292 
293   // Visit the declarations and body.
294   PrepareForBailoutForId(BailoutId::FunctionEntry(),
295                          BailoutState::NO_REGISTERS);
296   {
297     Comment cmnt(masm_, "[ Declarations");
298     VisitDeclarations(scope()->declarations());
299   }
300 
301   // Assert that the declarations do not use ICs. Otherwise the debugger
302   // won't be able to redirect a PC at an IC to the correct IC in newly
303   // recompiled code.
304   DCHECK_EQ(0, ic_total_count_);
305 
306   {
307     Comment cmnt(masm_, "[ Stack check");
308     PrepareForBailoutForId(BailoutId::Declarations(),
309                            BailoutState::NO_REGISTERS);
310     Label ok;
311     __ LoadRoot(ip, Heap::kStackLimitRootIndex);
312     __ cmp(sp, Operand(ip));
313     __ b(hs, &ok);
314     Handle<Code> stack_check = isolate()->builtins()->StackCheck();
315     PredictableCodeSizeScope predictable(masm_);
316     predictable.ExpectSize(
317         masm_->CallSize(stack_check, RelocInfo::CODE_TARGET));
318     __ Call(stack_check, RelocInfo::CODE_TARGET);
319     __ bind(&ok);
320   }
321 
322   {
323     Comment cmnt(masm_, "[ Body");
324     DCHECK(loop_depth() == 0);
325     VisitStatements(literal()->body());
326     DCHECK(loop_depth() == 0);
327   }
328 
329   // Always emit a 'return undefined' in case control fell off the end of
330   // the body.
331   { Comment cmnt(masm_, "[ return <undefined>;");
332     __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
333   }
334   EmitReturnSequence();
335 
336   // Force emit the constant pool, so it doesn't get emitted in the middle
337   // of the back edge table.
338   masm()->CheckConstPool(true, false);
339 }
340 
ClearAccumulator()341 void FullCodeGenerator::ClearAccumulator() { __ mov(r0, Operand(Smi::kZero)); }
342 
EmitProfilingCounterDecrement(int delta)343 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
344   __ mov(r2, Operand(profiling_counter_));
345   __ ldr(r3, FieldMemOperand(r2, Cell::kValueOffset));
346   __ sub(r3, r3, Operand(Smi::FromInt(delta)), SetCC);
347   __ str(r3, FieldMemOperand(r2, Cell::kValueOffset));
348 }
349 
350 
351 #ifdef CAN_USE_ARMV7_INSTRUCTIONS
352 static const int kProfileCounterResetSequenceLength = 5 * Assembler::kInstrSize;
353 #else
354 static const int kProfileCounterResetSequenceLength = 7 * Assembler::kInstrSize;
355 #endif
356 
357 
EmitProfilingCounterReset()358 void FullCodeGenerator::EmitProfilingCounterReset() {
359   Assembler::BlockConstPoolScope block_const_pool(masm_);
360   PredictableCodeSizeScope predictable_code_size_scope(
361       masm_, kProfileCounterResetSequenceLength);
362   Label start;
363   __ bind(&start);
364   int reset_value = FLAG_interrupt_budget;
365   __ mov(r2, Operand(profiling_counter_));
366   // The mov instruction above can be either 1 to 3 (for ARMv7) or 1 to 5
367   // instructions (for ARMv6) depending upon whether it is an extended constant
368   // pool - insert nop to compensate.
369   int expected_instr_count =
370       (kProfileCounterResetSequenceLength / Assembler::kInstrSize) - 2;
371   DCHECK(masm_->InstructionsGeneratedSince(&start) <= expected_instr_count);
372   while (masm_->InstructionsGeneratedSince(&start) != expected_instr_count) {
373     __ nop();
374   }
375   __ mov(r3, Operand(Smi::FromInt(reset_value)));
376   __ str(r3, FieldMemOperand(r2, Cell::kValueOffset));
377 }
378 
379 
EmitBackEdgeBookkeeping(IterationStatement * stmt,Label * back_edge_target)380 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
381                                                 Label* back_edge_target) {
382   Comment cmnt(masm_, "[ Back edge bookkeeping");
383   // Block literal pools whilst emitting back edge code.
384   Assembler::BlockConstPoolScope block_const_pool(masm_);
385   Label ok;
386 
387   DCHECK(back_edge_target->is_bound());
388   int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
389   int weight = Min(kMaxBackEdgeWeight,
390                    Max(1, distance / kCodeSizeMultiplier));
391   EmitProfilingCounterDecrement(weight);
392   __ b(pl, &ok);
393   __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
394 
395   // Record a mapping of this PC offset to the OSR id.  This is used to find
396   // the AST id from the unoptimized code in order to use it as a key into
397   // the deoptimization input data found in the optimized code.
398   RecordBackEdge(stmt->OsrEntryId());
399 
400   EmitProfilingCounterReset();
401 
402   __ bind(&ok);
403   PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS);
404   // Record a mapping of the OSR id to this PC.  This is used if the OSR
405   // entry becomes the target of a bailout.  We don't expect it to be, but
406   // we want it to work if it is.
407   PrepareForBailoutForId(stmt->OsrEntryId(), BailoutState::NO_REGISTERS);
408 }
409 
EmitProfilingCounterHandlingForReturnSequence(bool is_tail_call)410 void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence(
411     bool is_tail_call) {
412   // Pretend that the exit is a backwards jump to the entry.
413   int weight = 1;
414   if (info_->ShouldSelfOptimize()) {
415     weight = FLAG_interrupt_budget / FLAG_self_opt_count;
416   } else {
417     int distance = masm_->pc_offset();
418     weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
419   }
420   EmitProfilingCounterDecrement(weight);
421   Label ok;
422   __ b(pl, &ok);
423   // Don't need to save result register if we are going to do a tail call.
424   if (!is_tail_call) {
425     __ push(r0);
426   }
427   __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
428   if (!is_tail_call) {
429     __ pop(r0);
430   }
431   EmitProfilingCounterReset();
432   __ bind(&ok);
433 }
434 
EmitReturnSequence()435 void FullCodeGenerator::EmitReturnSequence() {
436   Comment cmnt(masm_, "[ Return sequence");
437   if (return_label_.is_bound()) {
438     __ b(&return_label_);
439   } else {
440     __ bind(&return_label_);
441     if (FLAG_trace) {
442       // Push the return value on the stack as the parameter.
443       // Runtime::TraceExit returns its parameter in r0.
444       __ push(r0);
445       __ CallRuntime(Runtime::kTraceExit);
446     }
447     EmitProfilingCounterHandlingForReturnSequence(false);
448 
449     // Make sure that the constant pool is not emitted inside of the return
450     // sequence.
451     { Assembler::BlockConstPoolScope block_const_pool(masm_);
452       int32_t arg_count = info_->scope()->num_parameters() + 1;
453       int32_t sp_delta = arg_count * kPointerSize;
454       SetReturnPosition(literal());
455       // TODO(svenpanne) The code below is sometimes 4 words, sometimes 5!
456       PredictableCodeSizeScope predictable(masm_, -1);
457       __ LeaveFrame(StackFrame::JAVA_SCRIPT);
458       { ConstantPoolUnavailableScope constant_pool_unavailable(masm_);
459         __ add(sp, sp, Operand(sp_delta));
460         __ Jump(lr);
461       }
462     }
463   }
464 }
465 
RestoreContext()466 void FullCodeGenerator::RestoreContext() {
467   __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
468 }
469 
Plug(Variable * var) const470 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
471   DCHECK(var->IsStackAllocated() || var->IsContextSlot());
472   codegen()->GetVar(result_register(), var);
473   codegen()->PushOperand(result_register());
474 }
475 
476 
Plug(Heap::RootListIndex index) const477 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
478 }
479 
480 
Plug(Heap::RootListIndex index) const481 void FullCodeGenerator::AccumulatorValueContext::Plug(
482     Heap::RootListIndex index) const {
483   __ LoadRoot(result_register(), index);
484 }
485 
486 
Plug(Heap::RootListIndex index) const487 void FullCodeGenerator::StackValueContext::Plug(
488     Heap::RootListIndex index) const {
489   __ LoadRoot(result_register(), index);
490   codegen()->PushOperand(result_register());
491 }
492 
493 
Plug(Heap::RootListIndex index) const494 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
495   codegen()->PrepareForBailoutBeforeSplit(condition(),
496                                           true,
497                                           true_label_,
498                                           false_label_);
499   if (index == Heap::kUndefinedValueRootIndex ||
500       index == Heap::kNullValueRootIndex ||
501       index == Heap::kFalseValueRootIndex) {
502     if (false_label_ != fall_through_) __ b(false_label_);
503   } else if (index == Heap::kTrueValueRootIndex) {
504     if (true_label_ != fall_through_) __ b(true_label_);
505   } else {
506     __ LoadRoot(result_register(), index);
507     codegen()->DoTest(this);
508   }
509 }
510 
511 
Plug(Handle<Object> lit) const512 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
513 }
514 
515 
Plug(Handle<Object> lit) const516 void FullCodeGenerator::AccumulatorValueContext::Plug(
517     Handle<Object> lit) const {
518   __ mov(result_register(), Operand(lit));
519 }
520 
521 
Plug(Handle<Object> lit) const522 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
523   // Immediates cannot be pushed directly.
524   __ mov(result_register(), Operand(lit));
525   codegen()->PushOperand(result_register());
526 }
527 
528 
Plug(Handle<Object> lit) const529 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
530   codegen()->PrepareForBailoutBeforeSplit(condition(),
531                                           true,
532                                           true_label_,
533                                           false_label_);
534   DCHECK(lit->IsNullOrUndefined(isolate()) || !lit->IsUndetectable());
535   if (lit->IsNullOrUndefined(isolate()) || lit->IsFalse(isolate())) {
536     if (false_label_ != fall_through_) __ b(false_label_);
537   } else if (lit->IsTrue(isolate()) || lit->IsJSObject()) {
538     if (true_label_ != fall_through_) __ b(true_label_);
539   } else if (lit->IsString()) {
540     if (String::cast(*lit)->length() == 0) {
541       if (false_label_ != fall_through_) __ b(false_label_);
542     } else {
543       if (true_label_ != fall_through_) __ b(true_label_);
544     }
545   } else if (lit->IsSmi()) {
546     if (Smi::cast(*lit)->value() == 0) {
547       if (false_label_ != fall_through_) __ b(false_label_);
548     } else {
549       if (true_label_ != fall_through_) __ b(true_label_);
550     }
551   } else {
552     // For simplicity we always test the accumulator register.
553     __ mov(result_register(), Operand(lit));
554     codegen()->DoTest(this);
555   }
556 }
557 
558 
DropAndPlug(int count,Register reg) const559 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
560                                                        Register reg) const {
561   DCHECK(count > 0);
562   if (count > 1) codegen()->DropOperands(count - 1);
563   __ str(reg, MemOperand(sp, 0));
564 }
565 
566 
Plug(Label * materialize_true,Label * materialize_false) const567 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
568                                             Label* materialize_false) const {
569   DCHECK(materialize_true == materialize_false);
570   __ bind(materialize_true);
571 }
572 
573 
Plug(Label * materialize_true,Label * materialize_false) const574 void FullCodeGenerator::AccumulatorValueContext::Plug(
575     Label* materialize_true,
576     Label* materialize_false) const {
577   Label done;
578   __ bind(materialize_true);
579   __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
580   __ jmp(&done);
581   __ bind(materialize_false);
582   __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
583   __ bind(&done);
584 }
585 
586 
Plug(Label * materialize_true,Label * materialize_false) const587 void FullCodeGenerator::StackValueContext::Plug(
588     Label* materialize_true,
589     Label* materialize_false) const {
590   Label done;
591   __ bind(materialize_true);
592   __ LoadRoot(ip, Heap::kTrueValueRootIndex);
593   __ jmp(&done);
594   __ bind(materialize_false);
595   __ LoadRoot(ip, Heap::kFalseValueRootIndex);
596   __ bind(&done);
597   codegen()->PushOperand(ip);
598 }
599 
600 
Plug(Label * materialize_true,Label * materialize_false) const601 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
602                                           Label* materialize_false) const {
603   DCHECK(materialize_true == true_label_);
604   DCHECK(materialize_false == false_label_);
605 }
606 
607 
Plug(bool flag) const608 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
609   Heap::RootListIndex value_root_index =
610       flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
611   __ LoadRoot(result_register(), value_root_index);
612 }
613 
614 
Plug(bool flag) const615 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
616   Heap::RootListIndex value_root_index =
617       flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
618   __ LoadRoot(ip, value_root_index);
619   codegen()->PushOperand(ip);
620 }
621 
622 
Plug(bool flag) const623 void FullCodeGenerator::TestContext::Plug(bool flag) const {
624   codegen()->PrepareForBailoutBeforeSplit(condition(),
625                                           true,
626                                           true_label_,
627                                           false_label_);
628   if (flag) {
629     if (true_label_ != fall_through_) __ b(true_label_);
630   } else {
631     if (false_label_ != fall_through_) __ b(false_label_);
632   }
633 }
634 
635 
DoTest(Expression * condition,Label * if_true,Label * if_false,Label * fall_through)636 void FullCodeGenerator::DoTest(Expression* condition,
637                                Label* if_true,
638                                Label* if_false,
639                                Label* fall_through) {
640   Handle<Code> ic = ToBooleanICStub::GetUninitialized(isolate());
641   CallIC(ic, condition->test_id());
642   __ CompareRoot(result_register(), Heap::kTrueValueRootIndex);
643   Split(eq, if_true, if_false, fall_through);
644 }
645 
646 
Split(Condition cond,Label * if_true,Label * if_false,Label * fall_through)647 void FullCodeGenerator::Split(Condition cond,
648                               Label* if_true,
649                               Label* if_false,
650                               Label* fall_through) {
651   if (if_false == fall_through) {
652     __ b(cond, if_true);
653   } else if (if_true == fall_through) {
654     __ b(NegateCondition(cond), if_false);
655   } else {
656     __ b(cond, if_true);
657     __ b(if_false);
658   }
659 }
660 
661 
StackOperand(Variable * var)662 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
663   DCHECK(var->IsStackAllocated());
664   // Offset is negative because higher indexes are at lower addresses.
665   int offset = -var->index() * kPointerSize;
666   // Adjust by a (parameter or local) base offset.
667   if (var->IsParameter()) {
668     offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
669   } else {
670     offset += JavaScriptFrameConstants::kLocal0Offset;
671   }
672   return MemOperand(fp, offset);
673 }
674 
675 
VarOperand(Variable * var,Register scratch)676 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
677   DCHECK(var->IsContextSlot() || var->IsStackAllocated());
678   if (var->IsContextSlot()) {
679     int context_chain_length = scope()->ContextChainLength(var->scope());
680     __ LoadContext(scratch, context_chain_length);
681     return ContextMemOperand(scratch, var->index());
682   } else {
683     return StackOperand(var);
684   }
685 }
686 
687 
GetVar(Register dest,Variable * var)688 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
689   // Use destination as scratch.
690   MemOperand location = VarOperand(var, dest);
691   __ ldr(dest, location);
692 }
693 
694 
SetVar(Variable * var,Register src,Register scratch0,Register scratch1)695 void FullCodeGenerator::SetVar(Variable* var,
696                                Register src,
697                                Register scratch0,
698                                Register scratch1) {
699   DCHECK(var->IsContextSlot() || var->IsStackAllocated());
700   DCHECK(!scratch0.is(src));
701   DCHECK(!scratch0.is(scratch1));
702   DCHECK(!scratch1.is(src));
703   MemOperand location = VarOperand(var, scratch0);
704   __ str(src, location);
705 
706   // Emit the write barrier code if the location is in the heap.
707   if (var->IsContextSlot()) {
708     __ RecordWriteContextSlot(scratch0,
709                               location.offset(),
710                               src,
711                               scratch1,
712                               kLRHasBeenSaved,
713                               kDontSaveFPRegs);
714   }
715 }
716 
717 
PrepareForBailoutBeforeSplit(Expression * expr,bool should_normalize,Label * if_true,Label * if_false)718 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
719                                                      bool should_normalize,
720                                                      Label* if_true,
721                                                      Label* if_false) {
722   // Only prepare for bailouts before splits if we're in a test
723   // context. Otherwise, we let the Visit function deal with the
724   // preparation to avoid preparing with the same AST id twice.
725   if (!context()->IsTest()) return;
726 
727   Label skip;
728   if (should_normalize) __ b(&skip);
729   PrepareForBailout(expr, BailoutState::TOS_REGISTER);
730   if (should_normalize) {
731     __ LoadRoot(ip, Heap::kTrueValueRootIndex);
732     __ cmp(r0, ip);
733     Split(eq, if_true, if_false, NULL);
734     __ bind(&skip);
735   }
736 }
737 
738 
EmitDebugCheckDeclarationContext(Variable * variable)739 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
740   // The variable in the declaration always resides in the current function
741   // context.
742   DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
743   if (FLAG_debug_code) {
744     // Check that we're not inside a with or catch context.
745     __ ldr(r1, FieldMemOperand(cp, HeapObject::kMapOffset));
746     __ CompareRoot(r1, Heap::kWithContextMapRootIndex);
747     __ Check(ne, kDeclarationInWithContext);
748     __ CompareRoot(r1, Heap::kCatchContextMapRootIndex);
749     __ Check(ne, kDeclarationInCatchContext);
750   }
751 }
752 
753 
VisitVariableDeclaration(VariableDeclaration * declaration)754 void FullCodeGenerator::VisitVariableDeclaration(
755     VariableDeclaration* declaration) {
756   VariableProxy* proxy = declaration->proxy();
757   Variable* variable = proxy->var();
758   switch (variable->location()) {
759     case VariableLocation::UNALLOCATED: {
760       DCHECK(!variable->binding_needs_init());
761       globals_->Add(variable->name(), zone());
762       FeedbackSlot slot = proxy->VariableFeedbackSlot();
763       DCHECK(!slot.IsInvalid());
764       globals_->Add(handle(Smi::FromInt(slot.ToInt()), isolate()), zone());
765       globals_->Add(isolate()->factory()->undefined_value(), zone());
766       globals_->Add(isolate()->factory()->undefined_value(), zone());
767       break;
768     }
769     case VariableLocation::PARAMETER:
770     case VariableLocation::LOCAL:
771       if (variable->binding_needs_init()) {
772         Comment cmnt(masm_, "[ VariableDeclaration");
773         __ LoadRoot(r0, Heap::kTheHoleValueRootIndex);
774         __ str(r0, StackOperand(variable));
775       }
776       break;
777 
778     case VariableLocation::CONTEXT:
779       if (variable->binding_needs_init()) {
780         Comment cmnt(masm_, "[ VariableDeclaration");
781         EmitDebugCheckDeclarationContext(variable);
782         __ LoadRoot(r0, Heap::kTheHoleValueRootIndex);
783         __ str(r0, ContextMemOperand(cp, variable->index()));
784         // No write barrier since the_hole_value is in old space.
785         PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
786       }
787       break;
788 
789     case VariableLocation::LOOKUP:
790     case VariableLocation::MODULE:
791       UNREACHABLE();
792   }
793 }
794 
795 
VisitFunctionDeclaration(FunctionDeclaration * declaration)796 void FullCodeGenerator::VisitFunctionDeclaration(
797     FunctionDeclaration* declaration) {
798   VariableProxy* proxy = declaration->proxy();
799   Variable* variable = proxy->var();
800   switch (variable->location()) {
801     case VariableLocation::UNALLOCATED: {
802       globals_->Add(variable->name(), zone());
803       FeedbackSlot slot = proxy->VariableFeedbackSlot();
804       DCHECK(!slot.IsInvalid());
805       globals_->Add(handle(Smi::FromInt(slot.ToInt()), isolate()), zone());
806 
807       // We need the slot where the literals array lives, too.
808       slot = declaration->fun()->LiteralFeedbackSlot();
809       DCHECK(!slot.IsInvalid());
810       globals_->Add(handle(Smi::FromInt(slot.ToInt()), isolate()), zone());
811 
812       Handle<SharedFunctionInfo> function =
813           Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_);
814       // Check for stack-overflow exception.
815       if (function.is_null()) return SetStackOverflow();
816       globals_->Add(function, zone());
817       break;
818     }
819 
820     case VariableLocation::PARAMETER:
821     case VariableLocation::LOCAL: {
822       Comment cmnt(masm_, "[ FunctionDeclaration");
823       VisitForAccumulatorValue(declaration->fun());
824       __ str(result_register(), StackOperand(variable));
825       break;
826     }
827 
828     case VariableLocation::CONTEXT: {
829       Comment cmnt(masm_, "[ FunctionDeclaration");
830       EmitDebugCheckDeclarationContext(variable);
831       VisitForAccumulatorValue(declaration->fun());
832       __ str(result_register(), ContextMemOperand(cp, variable->index()));
833       int offset = Context::SlotOffset(variable->index());
834       // We know that we have written a function, which is not a smi.
835       __ RecordWriteContextSlot(cp,
836                                 offset,
837                                 result_register(),
838                                 r2,
839                                 kLRHasBeenSaved,
840                                 kDontSaveFPRegs,
841                                 EMIT_REMEMBERED_SET,
842                                 OMIT_SMI_CHECK);
843       PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
844       break;
845     }
846 
847     case VariableLocation::LOOKUP:
848     case VariableLocation::MODULE:
849       UNREACHABLE();
850   }
851 }
852 
853 
DeclareGlobals(Handle<FixedArray> pairs)854 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
855   // Call the runtime to declare the globals.
856   __ mov(r1, Operand(pairs));
857   __ mov(r0, Operand(Smi::FromInt(DeclareGlobalsFlags())));
858   __ EmitLoadFeedbackVector(r2);
859   __ Push(r1, r0, r2);
860   __ CallRuntime(Runtime::kDeclareGlobals);
861   // Return value is ignored.
862 }
863 
864 
VisitSwitchStatement(SwitchStatement * stmt)865 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
866   Comment cmnt(masm_, "[ SwitchStatement");
867   Breakable nested_statement(this, stmt);
868   SetStatementPosition(stmt);
869 
870   // Keep the switch value on the stack until a case matches.
871   VisitForStackValue(stmt->tag());
872   PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS);
873 
874   ZoneList<CaseClause*>* clauses = stmt->cases();
875   CaseClause* default_clause = NULL;  // Can occur anywhere in the list.
876 
877   Label next_test;  // Recycled for each test.
878   // Compile all the tests with branches to their bodies.
879   for (int i = 0; i < clauses->length(); i++) {
880     CaseClause* clause = clauses->at(i);
881     clause->body_target()->Unuse();
882 
883     // The default is not a test, but remember it as final fall through.
884     if (clause->is_default()) {
885       default_clause = clause;
886       continue;
887     }
888 
889     Comment cmnt(masm_, "[ Case comparison");
890     __ bind(&next_test);
891     next_test.Unuse();
892 
893     // Compile the label expression.
894     VisitForAccumulatorValue(clause->label());
895 
896     // Perform the comparison as if via '==='.
897     __ ldr(r1, MemOperand(sp, 0));  // Switch value.
898     bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
899     JumpPatchSite patch_site(masm_);
900     if (inline_smi_code) {
901       Label slow_case;
902       __ orr(r2, r1, r0);
903       patch_site.EmitJumpIfNotSmi(r2, &slow_case);
904 
905       __ cmp(r1, r0);
906       __ b(ne, &next_test);
907       __ Drop(1);  // Switch value is no longer needed.
908       __ b(clause->body_target());
909       __ bind(&slow_case);
910     }
911 
912     // Record position before stub call for type feedback.
913     SetExpressionPosition(clause);
914     Handle<Code> ic =
915         CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
916     CallIC(ic, clause->CompareId());
917     patch_site.EmitPatchInfo();
918 
919     Label skip;
920     __ b(&skip);
921     PrepareForBailout(clause, BailoutState::TOS_REGISTER);
922     __ LoadRoot(ip, Heap::kTrueValueRootIndex);
923     __ cmp(r0, ip);
924     __ b(ne, &next_test);
925     __ Drop(1);
926     __ jmp(clause->body_target());
927     __ bind(&skip);
928 
929     __ cmp(r0, Operand::Zero());
930     __ b(ne, &next_test);
931     __ Drop(1);  // Switch value is no longer needed.
932     __ b(clause->body_target());
933   }
934 
935   // Discard the test value and jump to the default if present, otherwise to
936   // the end of the statement.
937   __ bind(&next_test);
938   DropOperands(1);  // Switch value is no longer needed.
939   if (default_clause == NULL) {
940     __ b(nested_statement.break_label());
941   } else {
942     __ b(default_clause->body_target());
943   }
944 
945   // Compile all the case bodies.
946   for (int i = 0; i < clauses->length(); i++) {
947     Comment cmnt(masm_, "[ Case body");
948     CaseClause* clause = clauses->at(i);
949     __ bind(clause->body_target());
950     PrepareForBailoutForId(clause->EntryId(), BailoutState::NO_REGISTERS);
951     VisitStatements(clause->statements());
952   }
953 
954   __ bind(nested_statement.break_label());
955   PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
956 }
957 
958 
VisitForInStatement(ForInStatement * stmt)959 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
960   Comment cmnt(masm_, "[ ForInStatement");
961   SetStatementPosition(stmt, SKIP_BREAK);
962 
963   FeedbackSlot slot = stmt->ForInFeedbackSlot();
964 
965   // Get the object to enumerate over.
966   SetExpressionAsStatementPosition(stmt->enumerable());
967   VisitForAccumulatorValue(stmt->enumerable());
968   OperandStackDepthIncrement(5);
969 
970   Label loop, exit;
971   Iteration loop_statement(this, stmt);
972   increment_loop_depth();
973 
974   // If the object is null or undefined, skip over the loop, otherwise convert
975   // it to a JS receiver.  See ECMA-262 version 5, section 12.6.4.
976   Label convert, done_convert;
977   __ JumpIfSmi(r0, &convert);
978   __ CompareObjectType(r0, r1, r1, FIRST_JS_RECEIVER_TYPE);
979   __ b(ge, &done_convert);
980   __ CompareRoot(r0, Heap::kNullValueRootIndex);
981   __ b(eq, &exit);
982   __ CompareRoot(r0, Heap::kUndefinedValueRootIndex);
983   __ b(eq, &exit);
984   __ bind(&convert);
985   __ Call(isolate()->builtins()->ToObject(), RelocInfo::CODE_TARGET);
986   RestoreContext();
987   __ bind(&done_convert);
988   PrepareForBailoutForId(stmt->ToObjectId(), BailoutState::TOS_REGISTER);
989   __ push(r0);
990 
991   // Check cache validity in generated code. If we cannot guarantee cache
992   // validity, call the runtime system to check cache validity or get the
993   // property names in a fixed array. Note: Proxies never have an enum cache,
994   // so will always take the slow path.
995   Label call_runtime;
996   __ CheckEnumCache(&call_runtime);
997 
998   // The enum cache is valid.  Load the map of the object being
999   // iterated over and use the cache for the iteration.
1000   Label use_cache;
1001   __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
1002   __ b(&use_cache);
1003 
1004   // Get the set of properties to enumerate.
1005   __ bind(&call_runtime);
1006   __ push(r0);  // Duplicate the enumerable object on the stack.
1007   __ CallRuntime(Runtime::kForInEnumerate);
1008   PrepareForBailoutForId(stmt->EnumId(), BailoutState::TOS_REGISTER);
1009 
1010   // If we got a map from the runtime call, we can do a fast
1011   // modification check. Otherwise, we got a fixed array, and we have
1012   // to do a slow check.
1013   Label fixed_array;
1014   __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
1015   __ LoadRoot(ip, Heap::kMetaMapRootIndex);
1016   __ cmp(r2, ip);
1017   __ b(ne, &fixed_array);
1018 
1019   // We got a map in register r0. Get the enumeration cache from it.
1020   Label no_descriptors;
1021   __ bind(&use_cache);
1022 
1023   __ EnumLength(r1, r0);
1024   __ cmp(r1, Operand(Smi::kZero));
1025   __ b(eq, &no_descriptors);
1026 
1027   __ LoadInstanceDescriptors(r0, r2);
1028   __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheOffset));
1029   __ ldr(r2, FieldMemOperand(r2, DescriptorArray::kEnumCacheBridgeCacheOffset));
1030 
1031   // Set up the four remaining stack slots.
1032   __ push(r0);  // Map.
1033   __ mov(r0, Operand(Smi::kZero));
1034   // Push enumeration cache, enumeration cache length (as smi) and zero.
1035   __ Push(r2, r1, r0);
1036   __ jmp(&loop);
1037 
1038   __ bind(&no_descriptors);
1039   __ Drop(1);
1040   __ jmp(&exit);
1041 
1042   // We got a fixed array in register r0. Iterate through that.
1043   __ bind(&fixed_array);
1044 
1045   __ mov(r1, Operand(Smi::FromInt(1)));  // Smi(1) indicates slow check
1046   __ Push(r1, r0);  // Smi and array
1047   __ ldr(r1, FieldMemOperand(r0, FixedArray::kLengthOffset));
1048   __ Push(r1);  // Fixed array length (as smi).
1049   PrepareForBailoutForId(stmt->PrepareId(), BailoutState::NO_REGISTERS);
1050   __ mov(r0, Operand(Smi::kZero));
1051   __ Push(r0);  // Initial index.
1052 
1053   // Generate code for doing the condition check.
1054   __ bind(&loop);
1055   SetExpressionAsStatementPosition(stmt->each());
1056 
1057   // Load the current count to r0, load the length to r1.
1058   __ Ldrd(r0, r1, MemOperand(sp, 0 * kPointerSize));
1059   __ cmp(r0, r1);  // Compare to the array length.
1060   __ b(hs, loop_statement.break_label());
1061 
1062   // Get the current entry of the array into register r0.
1063   __ ldr(r2, MemOperand(sp, 2 * kPointerSize));
1064   __ add(r2, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1065   __ ldr(r0, MemOperand::PointerAddressFromSmiKey(r2, r0));
1066 
1067   // Get the expected map from the stack or a smi in the
1068   // permanent slow case into register r2.
1069   __ ldr(r2, MemOperand(sp, 3 * kPointerSize));
1070 
1071   // Check if the expected map still matches that of the enumerable.
1072   // If not, we may have to filter the key.
1073   Label update_each;
1074   __ ldr(r1, MemOperand(sp, 4 * kPointerSize));
1075   __ ldr(r4, FieldMemOperand(r1, HeapObject::kMapOffset));
1076   __ cmp(r4, Operand(r2));
1077   __ b(eq, &update_each);
1078 
1079   // We need to filter the key, record slow-path here.
1080   int const vector_index = SmiFromSlot(slot)->value();
1081   __ EmitLoadFeedbackVector(r3);
1082   __ mov(r2, Operand(FeedbackVector::MegamorphicSentinel(isolate())));
1083   __ str(r2, FieldMemOperand(r3, FixedArray::OffsetOfElementAt(vector_index)));
1084 
1085   // r0 contains the key. The receiver in r1 is the second argument to the
1086   // ForInFilter. ForInFilter returns undefined if the receiver doesn't
1087   // have the key or returns the name-converted key.
1088   __ Call(isolate()->builtins()->ForInFilter(), RelocInfo::CODE_TARGET);
1089   RestoreContext();
1090   PrepareForBailoutForId(stmt->FilterId(), BailoutState::TOS_REGISTER);
1091   __ CompareRoot(result_register(), Heap::kUndefinedValueRootIndex);
1092   __ b(eq, loop_statement.continue_label());
1093 
1094   // Update the 'each' property or variable from the possibly filtered
1095   // entry in register r0.
1096   __ bind(&update_each);
1097 
1098   // Perform the assignment as if via '='.
1099   { EffectContext context(this);
1100     EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
1101     PrepareForBailoutForId(stmt->AssignmentId(), BailoutState::NO_REGISTERS);
1102   }
1103 
1104   // Both Crankshaft and Turbofan expect BodyId to be right before stmt->body().
1105   PrepareForBailoutForId(stmt->BodyId(), BailoutState::NO_REGISTERS);
1106   // Generate code for the body of the loop.
1107   Visit(stmt->body());
1108 
1109   // Generate code for the going to the next element by incrementing
1110   // the index (smi) stored on top of the stack.
1111   __ bind(loop_statement.continue_label());
1112   PrepareForBailoutForId(stmt->IncrementId(), BailoutState::NO_REGISTERS);
1113   __ pop(r0);
1114   __ add(r0, r0, Operand(Smi::FromInt(1)));
1115   __ push(r0);
1116 
1117   EmitBackEdgeBookkeeping(stmt, &loop);
1118   __ b(&loop);
1119 
1120   // Remove the pointers stored on the stack.
1121   __ bind(loop_statement.break_label());
1122   DropOperands(5);
1123 
1124   // Exit and decrement the loop depth.
1125   PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
1126   __ bind(&exit);
1127   decrement_loop_depth();
1128 }
1129 
EmitSetHomeObject(Expression * initializer,int offset,FeedbackSlot slot)1130 void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset,
1131                                           FeedbackSlot slot) {
1132   DCHECK(NeedsHomeObject(initializer));
1133   __ ldr(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1134   __ ldr(StoreDescriptor::ValueRegister(),
1135          MemOperand(sp, offset * kPointerSize));
1136   CallStoreIC(slot, isolate()->factory()->home_object_symbol());
1137 }
1138 
EmitSetHomeObjectAccumulator(Expression * initializer,int offset,FeedbackSlot slot)1139 void FullCodeGenerator::EmitSetHomeObjectAccumulator(Expression* initializer,
1140                                                      int offset,
1141                                                      FeedbackSlot slot) {
1142   DCHECK(NeedsHomeObject(initializer));
1143   __ Move(StoreDescriptor::ReceiverRegister(), r0);
1144   __ ldr(StoreDescriptor::ValueRegister(),
1145          MemOperand(sp, offset * kPointerSize));
1146   CallStoreIC(slot, isolate()->factory()->home_object_symbol());
1147 }
1148 
EmitVariableLoad(VariableProxy * proxy,TypeofMode typeof_mode)1149 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
1150                                          TypeofMode typeof_mode) {
1151   // Record position before possible IC call.
1152   SetExpressionPosition(proxy);
1153   PrepareForBailoutForId(proxy->BeforeId(), BailoutState::NO_REGISTERS);
1154   Variable* var = proxy->var();
1155 
1156   // Two cases: global variables and all other types of variables.
1157   switch (var->location()) {
1158     case VariableLocation::UNALLOCATED: {
1159       Comment cmnt(masm_, "[ Global variable");
1160       EmitGlobalVariableLoad(proxy, typeof_mode);
1161       context()->Plug(r0);
1162       break;
1163     }
1164 
1165     case VariableLocation::PARAMETER:
1166     case VariableLocation::LOCAL:
1167     case VariableLocation::CONTEXT: {
1168       DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode);
1169       Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1170                                                : "[ Stack variable");
1171       if (proxy->hole_check_mode() == HoleCheckMode::kRequired) {
1172         // Throw a reference error when using an uninitialized let/const
1173         // binding in harmony mode.
1174         Label done;
1175         GetVar(r0, var);
1176         __ CompareRoot(r0, Heap::kTheHoleValueRootIndex);
1177         __ b(ne, &done);
1178         __ mov(r0, Operand(var->name()));
1179         __ push(r0);
1180         __ CallRuntime(Runtime::kThrowReferenceError);
1181         __ bind(&done);
1182         context()->Plug(r0);
1183         break;
1184       }
1185       context()->Plug(var);
1186       break;
1187     }
1188 
1189     case VariableLocation::LOOKUP:
1190     case VariableLocation::MODULE:
1191       UNREACHABLE();
1192   }
1193 }
1194 
1195 
EmitAccessor(ObjectLiteralProperty * property)1196 void FullCodeGenerator::EmitAccessor(ObjectLiteralProperty* property) {
1197   Expression* expression = (property == NULL) ? NULL : property->value();
1198   if (expression == NULL) {
1199     __ LoadRoot(r1, Heap::kNullValueRootIndex);
1200     PushOperand(r1);
1201   } else {
1202     VisitForStackValue(expression);
1203     if (NeedsHomeObject(expression)) {
1204       DCHECK(property->kind() == ObjectLiteral::Property::GETTER ||
1205              property->kind() == ObjectLiteral::Property::SETTER);
1206       int offset = property->kind() == ObjectLiteral::Property::GETTER ? 2 : 3;
1207       EmitSetHomeObject(expression, offset, property->GetSlot());
1208     }
1209   }
1210 }
1211 
1212 
VisitObjectLiteral(ObjectLiteral * expr)1213 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1214   Comment cmnt(masm_, "[ ObjectLiteral");
1215 
1216   Handle<BoilerplateDescription> constant_properties =
1217       expr->GetOrBuildConstantProperties(isolate());
1218   __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1219   __ mov(r2, Operand(SmiFromSlot(expr->literal_slot())));
1220   __ mov(r1, Operand(constant_properties));
1221   int flags = expr->ComputeFlags();
1222   __ mov(r0, Operand(Smi::FromInt(flags)));
1223   if (MustCreateObjectLiteralWithRuntime(expr)) {
1224     __ Push(r3, r2, r1, r0);
1225     __ CallRuntime(Runtime::kCreateObjectLiteral);
1226   } else {
1227     Callable callable = CodeFactory::FastCloneShallowObject(
1228         isolate(), expr->properties_count());
1229     __ Call(callable.code(), RelocInfo::CODE_TARGET);
1230     RestoreContext();
1231   }
1232   PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
1233 
1234   // If result_saved is true the result is on top of the stack.  If
1235   // result_saved is false the result is in r0.
1236   bool result_saved = false;
1237 
1238   AccessorTable accessor_table(zone());
1239   for (int i = 0; i < expr->properties()->length(); i++) {
1240     ObjectLiteral::Property* property = expr->properties()->at(i);
1241     DCHECK(!property->is_computed_name());
1242     if (property->IsCompileTimeValue()) continue;
1243 
1244     Literal* key = property->key()->AsLiteral();
1245     Expression* value = property->value();
1246     if (!result_saved) {
1247       PushOperand(r0);  // Save result on stack
1248       result_saved = true;
1249     }
1250     switch (property->kind()) {
1251       case ObjectLiteral::Property::SPREAD:
1252       case ObjectLiteral::Property::CONSTANT:
1253         UNREACHABLE();
1254       case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1255         DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1256         // Fall through.
1257       case ObjectLiteral::Property::COMPUTED:
1258         // It is safe to use [[Put]] here because the boilerplate already
1259         // contains computed properties with an uninitialized value.
1260         if (key->IsStringLiteral()) {
1261           DCHECK(key->IsPropertyName());
1262           if (property->emit_store()) {
1263             VisitForAccumulatorValue(value);
1264             DCHECK(StoreDescriptor::ValueRegister().is(r0));
1265             __ ldr(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1266             CallStoreIC(property->GetSlot(0), key->value(), true);
1267             PrepareForBailoutForId(key->id(), BailoutState::NO_REGISTERS);
1268 
1269             if (NeedsHomeObject(value)) {
1270               EmitSetHomeObjectAccumulator(value, 0, property->GetSlot(1));
1271             }
1272           } else {
1273             VisitForEffect(value);
1274           }
1275           break;
1276         }
1277         // Duplicate receiver on stack.
1278         __ ldr(r0, MemOperand(sp));
1279         PushOperand(r0);
1280         VisitForStackValue(key);
1281         VisitForStackValue(value);
1282         if (property->emit_store()) {
1283           if (NeedsHomeObject(value)) {
1284             EmitSetHomeObject(value, 2, property->GetSlot());
1285           }
1286           __ mov(r0, Operand(Smi::FromInt(SLOPPY)));  // PropertyAttributes
1287           PushOperand(r0);
1288           CallRuntimeWithOperands(Runtime::kSetProperty);
1289         } else {
1290           DropOperands(3);
1291         }
1292         break;
1293       case ObjectLiteral::Property::PROTOTYPE:
1294         // Duplicate receiver on stack.
1295         __ ldr(r0, MemOperand(sp));
1296         PushOperand(r0);
1297         VisitForStackValue(value);
1298         DCHECK(property->emit_store());
1299         CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
1300         PrepareForBailoutForId(expr->GetIdForPropertySet(i),
1301                                BailoutState::NO_REGISTERS);
1302         break;
1303 
1304       case ObjectLiteral::Property::GETTER:
1305         if (property->emit_store()) {
1306           AccessorTable::Iterator it = accessor_table.lookup(key);
1307           it->second->bailout_id = expr->GetIdForPropertySet(i);
1308           it->second->getter = property;
1309         }
1310         break;
1311       case ObjectLiteral::Property::SETTER:
1312         if (property->emit_store()) {
1313           AccessorTable::Iterator it = accessor_table.lookup(key);
1314           it->second->bailout_id = expr->GetIdForPropertySet(i);
1315           it->second->setter = property;
1316         }
1317         break;
1318     }
1319   }
1320 
1321   // Emit code to define accessors, using only a single call to the runtime for
1322   // each pair of corresponding getters and setters.
1323   for (AccessorTable::Iterator it = accessor_table.begin();
1324        it != accessor_table.end();
1325        ++it) {
1326     __ ldr(r0, MemOperand(sp));  // Duplicate receiver.
1327     PushOperand(r0);
1328     VisitForStackValue(it->first);
1329     EmitAccessor(it->second->getter);
1330     EmitAccessor(it->second->setter);
1331     __ mov(r0, Operand(Smi::FromInt(NONE)));
1332     PushOperand(r0);
1333     CallRuntimeWithOperands(Runtime::kDefineAccessorPropertyUnchecked);
1334     PrepareForBailoutForId(it->second->bailout_id, BailoutState::NO_REGISTERS);
1335   }
1336 
1337   if (result_saved) {
1338     context()->PlugTOS();
1339   } else {
1340     context()->Plug(r0);
1341   }
1342 }
1343 
1344 
VisitArrayLiteral(ArrayLiteral * expr)1345 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1346   Comment cmnt(masm_, "[ ArrayLiteral");
1347 
1348   Handle<ConstantElementsPair> constant_elements =
1349       expr->GetOrBuildConstantElements(isolate());
1350 
1351   __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1352   __ mov(r2, Operand(SmiFromSlot(expr->literal_slot())));
1353   __ mov(r1, Operand(constant_elements));
1354   if (MustCreateArrayLiteralWithRuntime(expr)) {
1355     __ mov(r0, Operand(Smi::FromInt(expr->ComputeFlags())));
1356     __ Push(r3, r2, r1, r0);
1357     __ CallRuntime(Runtime::kCreateArrayLiteral);
1358   } else {
1359     Callable callable =
1360         CodeFactory::FastCloneShallowArray(isolate(), TRACK_ALLOCATION_SITE);
1361     __ Call(callable.code(), RelocInfo::CODE_TARGET);
1362     RestoreContext();
1363   }
1364   PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
1365 
1366   bool result_saved = false;  // Is the result saved to the stack?
1367   ZoneList<Expression*>* subexprs = expr->values();
1368   int length = subexprs->length();
1369 
1370   // Emit code to evaluate all the non-constant subexpressions and to store
1371   // them into the newly cloned array.
1372   for (int array_index = 0; array_index < length; array_index++) {
1373     Expression* subexpr = subexprs->at(array_index);
1374     DCHECK(!subexpr->IsSpread());
1375 
1376     // If the subexpression is a literal or a simple materialized literal it
1377     // is already set in the cloned array.
1378     if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1379 
1380     if (!result_saved) {
1381       PushOperand(r0);
1382       result_saved = true;
1383     }
1384     VisitForAccumulatorValue(subexpr);
1385 
1386     __ mov(StoreDescriptor::NameRegister(), Operand(Smi::FromInt(array_index)));
1387     __ ldr(StoreDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1388     CallKeyedStoreIC(expr->LiteralFeedbackSlot());
1389 
1390     PrepareForBailoutForId(expr->GetIdForElement(array_index),
1391                            BailoutState::NO_REGISTERS);
1392   }
1393 
1394   if (result_saved) {
1395     context()->PlugTOS();
1396   } else {
1397     context()->Plug(r0);
1398   }
1399 }
1400 
1401 
VisitAssignment(Assignment * expr)1402 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1403   DCHECK(expr->target()->IsValidReferenceExpressionOrThis());
1404 
1405   Comment cmnt(masm_, "[ Assignment");
1406 
1407   Property* property = expr->target()->AsProperty();
1408   LhsKind assign_type = Property::GetAssignType(property);
1409 
1410   // Evaluate LHS expression.
1411   switch (assign_type) {
1412     case VARIABLE:
1413       // Nothing to do here.
1414       break;
1415     case NAMED_PROPERTY:
1416       if (expr->is_compound()) {
1417         // We need the receiver both on the stack and in the register.
1418         VisitForStackValue(property->obj());
1419         __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1420       } else {
1421         VisitForStackValue(property->obj());
1422       }
1423       break;
1424     case KEYED_PROPERTY:
1425       if (expr->is_compound()) {
1426         VisitForStackValue(property->obj());
1427         VisitForStackValue(property->key());
1428         __ ldr(LoadDescriptor::ReceiverRegister(),
1429                MemOperand(sp, 1 * kPointerSize));
1430         __ ldr(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
1431       } else {
1432         VisitForStackValue(property->obj());
1433         VisitForStackValue(property->key());
1434       }
1435       break;
1436     case NAMED_SUPER_PROPERTY:
1437     case KEYED_SUPER_PROPERTY:
1438       UNREACHABLE();
1439       break;
1440   }
1441 
1442   // For compound assignments we need another deoptimization point after the
1443   // variable/property load.
1444   if (expr->is_compound()) {
1445     { AccumulatorValueContext context(this);
1446       switch (assign_type) {
1447         case VARIABLE:
1448           EmitVariableLoad(expr->target()->AsVariableProxy());
1449           PrepareForBailout(expr->target(), BailoutState::TOS_REGISTER);
1450           break;
1451         case NAMED_PROPERTY:
1452           EmitNamedPropertyLoad(property);
1453           PrepareForBailoutForId(property->LoadId(),
1454                                  BailoutState::TOS_REGISTER);
1455           break;
1456         case KEYED_PROPERTY:
1457           EmitKeyedPropertyLoad(property);
1458           PrepareForBailoutForId(property->LoadId(),
1459                                  BailoutState::TOS_REGISTER);
1460           break;
1461         case NAMED_SUPER_PROPERTY:
1462         case KEYED_SUPER_PROPERTY:
1463           UNREACHABLE();
1464           break;
1465       }
1466     }
1467 
1468     Token::Value op = expr->binary_op();
1469     PushOperand(r0);  // Left operand goes on the stack.
1470     VisitForAccumulatorValue(expr->value());
1471 
1472     AccumulatorValueContext context(this);
1473     if (ShouldInlineSmiCase(op)) {
1474       EmitInlineSmiBinaryOp(expr->binary_operation(),
1475                             op,
1476                             expr->target(),
1477                             expr->value());
1478     } else {
1479       EmitBinaryOp(expr->binary_operation(), op);
1480     }
1481 
1482     // Deoptimization point in case the binary operation may have side effects.
1483     PrepareForBailout(expr->binary_operation(), BailoutState::TOS_REGISTER);
1484   } else {
1485     VisitForAccumulatorValue(expr->value());
1486   }
1487 
1488   SetExpressionPosition(expr);
1489 
1490   // Store the value.
1491   switch (assign_type) {
1492     case VARIABLE: {
1493       VariableProxy* proxy = expr->target()->AsVariableProxy();
1494       EmitVariableAssignment(proxy->var(), expr->op(), expr->AssignmentSlot(),
1495                              proxy->hole_check_mode());
1496       PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
1497       context()->Plug(r0);
1498       break;
1499     }
1500     case NAMED_PROPERTY:
1501       EmitNamedPropertyAssignment(expr);
1502       break;
1503     case KEYED_PROPERTY:
1504       EmitKeyedPropertyAssignment(expr);
1505       break;
1506     case NAMED_SUPER_PROPERTY:
1507     case KEYED_SUPER_PROPERTY:
1508       UNREACHABLE();
1509       break;
1510   }
1511 }
1512 
1513 
VisitYield(Yield * expr)1514 void FullCodeGenerator::VisitYield(Yield* expr) {
1515   // Resumable functions are not supported.
1516   UNREACHABLE();
1517 }
1518 
PushOperands(Register reg1,Register reg2)1519 void FullCodeGenerator::PushOperands(Register reg1, Register reg2) {
1520   OperandStackDepthIncrement(2);
1521   __ Push(reg1, reg2);
1522 }
1523 
PopOperands(Register reg1,Register reg2)1524 void FullCodeGenerator::PopOperands(Register reg1, Register reg2) {
1525   OperandStackDepthDecrement(2);
1526   __ Pop(reg1, reg2);
1527 }
1528 
EmitOperandStackDepthCheck()1529 void FullCodeGenerator::EmitOperandStackDepthCheck() {
1530   if (FLAG_debug_code) {
1531     int expected_diff = StandardFrameConstants::kFixedFrameSizeFromFp +
1532                         operand_stack_depth_ * kPointerSize;
1533     __ sub(r0, fp, sp);
1534     __ cmp(r0, Operand(expected_diff));
1535     __ Assert(eq, kUnexpectedStackDepth);
1536   }
1537 }
1538 
EmitCreateIteratorResult(bool done)1539 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
1540   Label allocate, done_allocate;
1541 
1542   __ Allocate(JSIteratorResult::kSize, r0, r2, r3, &allocate,
1543               NO_ALLOCATION_FLAGS);
1544   __ b(&done_allocate);
1545 
1546   __ bind(&allocate);
1547   __ Push(Smi::FromInt(JSIteratorResult::kSize));
1548   __ CallRuntime(Runtime::kAllocateInNewSpace);
1549 
1550   __ bind(&done_allocate);
1551   __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, r1);
1552   PopOperand(r2);
1553   __ LoadRoot(r3,
1554               done ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex);
1555   __ LoadRoot(r4, Heap::kEmptyFixedArrayRootIndex);
1556   __ str(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
1557   __ str(r4, FieldMemOperand(r0, JSObject::kPropertiesOffset));
1558   __ str(r4, FieldMemOperand(r0, JSObject::kElementsOffset));
1559   __ str(r2, FieldMemOperand(r0, JSIteratorResult::kValueOffset));
1560   __ str(r3, FieldMemOperand(r0, JSIteratorResult::kDoneOffset));
1561 }
1562 
1563 
EmitInlineSmiBinaryOp(BinaryOperation * expr,Token::Value op,Expression * left_expr,Expression * right_expr)1564 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
1565                                               Token::Value op,
1566                                               Expression* left_expr,
1567                                               Expression* right_expr) {
1568   Label done, smi_case, stub_call;
1569 
1570   Register scratch1 = r2;
1571   Register scratch2 = r3;
1572 
1573   // Get the arguments.
1574   Register left = r1;
1575   Register right = r0;
1576   PopOperand(left);
1577 
1578   // Perform combined smi check on both operands.
1579   __ orr(scratch1, left, Operand(right));
1580   STATIC_ASSERT(kSmiTag == 0);
1581   JumpPatchSite patch_site(masm_);
1582   patch_site.EmitJumpIfSmi(scratch1, &smi_case);
1583 
1584   __ bind(&stub_call);
1585   Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
1586   CallIC(code, expr->BinaryOperationFeedbackId());
1587   patch_site.EmitPatchInfo();
1588   __ jmp(&done);
1589 
1590   __ bind(&smi_case);
1591   // Smi case. This code works the same way as the smi-smi case in the type
1592   // recording binary operation stub, see
1593   switch (op) {
1594     case Token::SAR:
1595       __ GetLeastBitsFromSmi(scratch1, right, 5);
1596       __ mov(right, Operand(left, ASR, scratch1));
1597       __ bic(right, right, Operand(kSmiTagMask));
1598       break;
1599     case Token::SHL: {
1600       __ SmiUntag(scratch1, left);
1601       __ GetLeastBitsFromSmi(scratch2, right, 5);
1602       __ mov(scratch1, Operand(scratch1, LSL, scratch2));
1603       __ TrySmiTag(right, scratch1, &stub_call);
1604       break;
1605     }
1606     case Token::SHR: {
1607       __ SmiUntag(scratch1, left);
1608       __ GetLeastBitsFromSmi(scratch2, right, 5);
1609       __ mov(scratch1, Operand(scratch1, LSR, scratch2));
1610       __ tst(scratch1, Operand(0xc0000000));
1611       __ b(ne, &stub_call);
1612       __ SmiTag(right, scratch1);
1613       break;
1614     }
1615     case Token::ADD:
1616       __ add(scratch1, left, Operand(right), SetCC);
1617       __ b(vs, &stub_call);
1618       __ mov(right, scratch1);
1619       break;
1620     case Token::SUB:
1621       __ sub(scratch1, left, Operand(right), SetCC);
1622       __ b(vs, &stub_call);
1623       __ mov(right, scratch1);
1624       break;
1625     case Token::MUL: {
1626       __ SmiUntag(ip, right);
1627       __ smull(scratch1, scratch2, left, ip);
1628       __ mov(ip, Operand(scratch1, ASR, 31));
1629       __ cmp(ip, Operand(scratch2));
1630       __ b(ne, &stub_call);
1631       __ cmp(scratch1, Operand::Zero());
1632       __ mov(right, Operand(scratch1), LeaveCC, ne);
1633       __ b(ne, &done);
1634       __ add(scratch2, right, Operand(left), SetCC);
1635       __ mov(right, Operand(Smi::kZero), LeaveCC, pl);
1636       __ b(mi, &stub_call);
1637       break;
1638     }
1639     case Token::BIT_OR:
1640       __ orr(right, left, Operand(right));
1641       break;
1642     case Token::BIT_AND:
1643       __ and_(right, left, Operand(right));
1644       break;
1645     case Token::BIT_XOR:
1646       __ eor(right, left, Operand(right));
1647       break;
1648     default:
1649       UNREACHABLE();
1650   }
1651 
1652   __ bind(&done);
1653   context()->Plug(r0);
1654 }
1655 
EmitBinaryOp(BinaryOperation * expr,Token::Value op)1656 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
1657   PopOperand(r1);
1658   Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
1659   JumpPatchSite patch_site(masm_);    // unbound, signals no inlined smi code.
1660   CallIC(code, expr->BinaryOperationFeedbackId());
1661   patch_site.EmitPatchInfo();
1662   context()->Plug(r0);
1663 }
1664 
EmitAssignment(Expression * expr,FeedbackSlot slot)1665 void FullCodeGenerator::EmitAssignment(Expression* expr, FeedbackSlot slot) {
1666   DCHECK(expr->IsValidReferenceExpressionOrThis());
1667 
1668   Property* prop = expr->AsProperty();
1669   LhsKind assign_type = Property::GetAssignType(prop);
1670 
1671   switch (assign_type) {
1672     case VARIABLE: {
1673       VariableProxy* proxy = expr->AsVariableProxy();
1674       EffectContext context(this);
1675       EmitVariableAssignment(proxy->var(), Token::ASSIGN, slot,
1676                              proxy->hole_check_mode());
1677       break;
1678     }
1679     case NAMED_PROPERTY: {
1680       PushOperand(r0);  // Preserve value.
1681       VisitForAccumulatorValue(prop->obj());
1682       __ Move(StoreDescriptor::ReceiverRegister(), r0);
1683       PopOperand(StoreDescriptor::ValueRegister());  // Restore value.
1684       CallStoreIC(slot, prop->key()->AsLiteral()->value());
1685       break;
1686     }
1687     case KEYED_PROPERTY: {
1688       PushOperand(r0);  // Preserve value.
1689       VisitForStackValue(prop->obj());
1690       VisitForAccumulatorValue(prop->key());
1691       __ Move(StoreDescriptor::NameRegister(), r0);
1692       PopOperands(StoreDescriptor::ValueRegister(),
1693                   StoreDescriptor::ReceiverRegister());
1694       CallKeyedStoreIC(slot);
1695       break;
1696     }
1697     case NAMED_SUPER_PROPERTY:
1698     case KEYED_SUPER_PROPERTY:
1699       UNREACHABLE();
1700       break;
1701   }
1702   context()->Plug(r0);
1703 }
1704 
1705 
EmitStoreToStackLocalOrContextSlot(Variable * var,MemOperand location)1706 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
1707     Variable* var, MemOperand location) {
1708   __ str(result_register(), location);
1709   if (var->IsContextSlot()) {
1710     // RecordWrite may destroy all its register arguments.
1711     __ mov(r3, result_register());
1712     int offset = Context::SlotOffset(var->index());
1713     __ RecordWriteContextSlot(
1714         r1, offset, r3, r2, kLRHasBeenSaved, kDontSaveFPRegs);
1715   }
1716 }
1717 
EmitVariableAssignment(Variable * var,Token::Value op,FeedbackSlot slot,HoleCheckMode hole_check_mode)1718 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
1719                                                FeedbackSlot slot,
1720                                                HoleCheckMode hole_check_mode) {
1721   if (var->IsUnallocated()) {
1722     // Global var, const, or let.
1723     __ LoadGlobalObject(StoreDescriptor::ReceiverRegister());
1724     CallStoreIC(slot, var->name());
1725 
1726   } else if (IsLexicalVariableMode(var->mode()) && op != Token::INIT) {
1727     DCHECK(!var->IsLookupSlot());
1728     DCHECK(var->IsStackAllocated() || var->IsContextSlot());
1729     MemOperand location = VarOperand(var, r1);
1730     // Perform an initialization check for lexically declared variables.
1731     if (hole_check_mode == HoleCheckMode::kRequired) {
1732       Label assign;
1733       __ ldr(r3, location);
1734       __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
1735       __ b(ne, &assign);
1736       __ mov(r3, Operand(var->name()));
1737       __ push(r3);
1738       __ CallRuntime(Runtime::kThrowReferenceError);
1739       __ bind(&assign);
1740     }
1741     if (var->mode() != CONST) {
1742       EmitStoreToStackLocalOrContextSlot(var, location);
1743     } else if (var->throw_on_const_assignment(language_mode())) {
1744       __ CallRuntime(Runtime::kThrowConstAssignError);
1745     }
1746   } else if (var->is_this() && var->mode() == CONST && op == Token::INIT) {
1747     // Initializing assignment to const {this} needs a write barrier.
1748     DCHECK(var->IsStackAllocated() || var->IsContextSlot());
1749     Label uninitialized_this;
1750     MemOperand location = VarOperand(var, r1);
1751     __ ldr(r3, location);
1752     __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
1753     __ b(eq, &uninitialized_this);
1754     __ mov(r0, Operand(var->name()));
1755     __ Push(r0);
1756     __ CallRuntime(Runtime::kThrowReferenceError);
1757     __ bind(&uninitialized_this);
1758     EmitStoreToStackLocalOrContextSlot(var, location);
1759 
1760   } else {
1761     DCHECK(var->mode() != CONST || op == Token::INIT);
1762     DCHECK(var->IsStackAllocated() || var->IsContextSlot());
1763     DCHECK(!var->IsLookupSlot());
1764     // Assignment to var or initializing assignment to let/const in harmony
1765     // mode.
1766     MemOperand location = VarOperand(var, r1);
1767     if (FLAG_debug_code && var->mode() == LET && op == Token::INIT) {
1768       // Check for an uninitialized let binding.
1769       __ ldr(r2, location);
1770       __ CompareRoot(r2, Heap::kTheHoleValueRootIndex);
1771       __ Check(eq, kLetBindingReInitialization);
1772     }
1773     EmitStoreToStackLocalOrContextSlot(var, location);
1774   }
1775 }
1776 
1777 
EmitNamedPropertyAssignment(Assignment * expr)1778 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
1779   // Assignment to a property, using a named store IC.
1780   Property* prop = expr->target()->AsProperty();
1781   DCHECK(prop != NULL);
1782   DCHECK(prop->key()->IsLiteral());
1783 
1784   PopOperand(StoreDescriptor::ReceiverRegister());
1785   CallStoreIC(expr->AssignmentSlot(), prop->key()->AsLiteral()->value());
1786 
1787   PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
1788   context()->Plug(r0);
1789 }
1790 
1791 
EmitKeyedPropertyAssignment(Assignment * expr)1792 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
1793   // Assignment to a property, using a keyed store IC.
1794   PopOperands(StoreDescriptor::ReceiverRegister(),
1795               StoreDescriptor::NameRegister());
1796   DCHECK(StoreDescriptor::ValueRegister().is(r0));
1797 
1798   CallKeyedStoreIC(expr->AssignmentSlot());
1799 
1800   PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
1801   context()->Plug(r0);
1802 }
1803 
1804 // Code common for calls using the IC.
EmitCallWithLoadIC(Call * expr)1805 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
1806   Expression* callee = expr->expression();
1807 
1808   // Get the target function.
1809   ConvertReceiverMode convert_mode;
1810   if (callee->IsVariableProxy()) {
1811     { StackValueContext context(this);
1812       EmitVariableLoad(callee->AsVariableProxy());
1813       PrepareForBailout(callee, BailoutState::NO_REGISTERS);
1814     }
1815     // Push undefined as receiver. This is patched in the method prologue if it
1816     // is a sloppy mode method.
1817     __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1818     PushOperand(ip);
1819     convert_mode = ConvertReceiverMode::kNullOrUndefined;
1820   } else {
1821     // Load the function from the receiver.
1822     DCHECK(callee->IsProperty());
1823     DCHECK(!callee->AsProperty()->IsSuperAccess());
1824     __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1825     EmitNamedPropertyLoad(callee->AsProperty());
1826     PrepareForBailoutForId(callee->AsProperty()->LoadId(),
1827                            BailoutState::TOS_REGISTER);
1828     // Push the target function under the receiver.
1829     __ ldr(ip, MemOperand(sp, 0));
1830     PushOperand(ip);
1831     __ str(r0, MemOperand(sp, kPointerSize));
1832     convert_mode = ConvertReceiverMode::kNotNullOrUndefined;
1833   }
1834 
1835   EmitCall(expr, convert_mode);
1836 }
1837 
1838 
1839 // Code common for calls using the IC.
EmitKeyedCallWithLoadIC(Call * expr,Expression * key)1840 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
1841                                                 Expression* key) {
1842   // Load the key.
1843   VisitForAccumulatorValue(key);
1844 
1845   Expression* callee = expr->expression();
1846 
1847   // Load the function from the receiver.
1848   DCHECK(callee->IsProperty());
1849   __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1850   __ Move(LoadDescriptor::NameRegister(), r0);
1851   EmitKeyedPropertyLoad(callee->AsProperty());
1852   PrepareForBailoutForId(callee->AsProperty()->LoadId(),
1853                          BailoutState::TOS_REGISTER);
1854 
1855   // Push the target function under the receiver.
1856   __ ldr(ip, MemOperand(sp, 0));
1857   PushOperand(ip);
1858   __ str(r0, MemOperand(sp, kPointerSize));
1859 
1860   EmitCall(expr, ConvertReceiverMode::kNotNullOrUndefined);
1861 }
1862 
1863 
EmitCall(Call * expr,ConvertReceiverMode mode)1864 void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) {
1865   // Load the arguments.
1866   ZoneList<Expression*>* args = expr->arguments();
1867   int arg_count = args->length();
1868   for (int i = 0; i < arg_count; i++) {
1869     VisitForStackValue(args->at(i));
1870   }
1871 
1872   PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
1873   SetCallPosition(expr, expr->tail_call_mode());
1874   if (expr->tail_call_mode() == TailCallMode::kAllow) {
1875     if (FLAG_trace) {
1876       __ CallRuntime(Runtime::kTraceTailCall);
1877     }
1878     // Update profiling counters before the tail call since we will
1879     // not return to this function.
1880     EmitProfilingCounterHandlingForReturnSequence(true);
1881   }
1882   Handle<Code> code =
1883       CodeFactory::CallICTrampoline(isolate(), mode, expr->tail_call_mode())
1884           .code();
1885   __ mov(r3, Operand(IntFromSlot(expr->CallFeedbackICSlot())));
1886   __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
1887   __ mov(r0, Operand(arg_count));
1888   CallIC(code);
1889   OperandStackDepthDecrement(arg_count + 1);
1890 
1891   RecordJSReturnSite(expr);
1892   RestoreContext();
1893   context()->DropAndPlug(1, r0);
1894 }
1895 
VisitCallNew(CallNew * expr)1896 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
1897   Comment cmnt(masm_, "[ CallNew");
1898   // According to ECMA-262, section 11.2.2, page 44, the function
1899   // expression in new calls must be evaluated before the
1900   // arguments.
1901 
1902   // Push constructor on the stack.  If it's not a function it's used as
1903   // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
1904   // ignored.
1905   DCHECK(!expr->expression()->IsSuperPropertyReference());
1906   VisitForStackValue(expr->expression());
1907 
1908   // Push the arguments ("left-to-right") on the stack.
1909   ZoneList<Expression*>* args = expr->arguments();
1910   int arg_count = args->length();
1911   for (int i = 0; i < arg_count; i++) {
1912     VisitForStackValue(args->at(i));
1913   }
1914 
1915   // Call the construct call builtin that handles allocation and
1916   // constructor invocation.
1917   SetConstructCallPosition(expr);
1918 
1919   // Load function and argument count into r1 and r0.
1920   __ mov(r0, Operand(arg_count));
1921   __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
1922 
1923   // Record call targets in unoptimized code.
1924   __ EmitLoadFeedbackVector(r2);
1925   __ mov(r3, Operand(SmiFromSlot(expr->CallNewFeedbackSlot())));
1926 
1927   CallConstructStub stub(isolate());
1928   CallIC(stub.GetCode());
1929   OperandStackDepthDecrement(arg_count + 1);
1930   PrepareForBailoutForId(expr->ReturnId(), BailoutState::TOS_REGISTER);
1931   RestoreContext();
1932   context()->Plug(r0);
1933 }
1934 
1935 
EmitIsSmi(CallRuntime * expr)1936 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
1937   ZoneList<Expression*>* args = expr->arguments();
1938   DCHECK(args->length() == 1);
1939 
1940   VisitForAccumulatorValue(args->at(0));
1941 
1942   Label materialize_true, materialize_false;
1943   Label* if_true = NULL;
1944   Label* if_false = NULL;
1945   Label* fall_through = NULL;
1946   context()->PrepareTest(&materialize_true, &materialize_false,
1947                          &if_true, &if_false, &fall_through);
1948 
1949   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
1950   __ SmiTst(r0);
1951   Split(eq, if_true, if_false, fall_through);
1952 
1953   context()->Plug(if_true, if_false);
1954 }
1955 
1956 
EmitIsJSReceiver(CallRuntime * expr)1957 void FullCodeGenerator::EmitIsJSReceiver(CallRuntime* expr) {
1958   ZoneList<Expression*>* args = expr->arguments();
1959   DCHECK(args->length() == 1);
1960 
1961   VisitForAccumulatorValue(args->at(0));
1962 
1963   Label materialize_true, materialize_false;
1964   Label* if_true = NULL;
1965   Label* if_false = NULL;
1966   Label* fall_through = NULL;
1967   context()->PrepareTest(&materialize_true, &materialize_false,
1968                          &if_true, &if_false, &fall_through);
1969 
1970   __ JumpIfSmi(r0, if_false);
1971   __ CompareObjectType(r0, r1, r1, FIRST_JS_RECEIVER_TYPE);
1972   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
1973   Split(ge, if_true, if_false, fall_through);
1974 
1975   context()->Plug(if_true, if_false);
1976 }
1977 
1978 
EmitIsArray(CallRuntime * expr)1979 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
1980   ZoneList<Expression*>* args = expr->arguments();
1981   DCHECK(args->length() == 1);
1982 
1983   VisitForAccumulatorValue(args->at(0));
1984 
1985   Label materialize_true, materialize_false;
1986   Label* if_true = NULL;
1987   Label* if_false = NULL;
1988   Label* fall_through = NULL;
1989   context()->PrepareTest(&materialize_true, &materialize_false,
1990                          &if_true, &if_false, &fall_through);
1991 
1992   __ JumpIfSmi(r0, if_false);
1993   __ CompareObjectType(r0, r1, r1, JS_ARRAY_TYPE);
1994   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
1995   Split(eq, if_true, if_false, fall_through);
1996 
1997   context()->Plug(if_true, if_false);
1998 }
1999 
2000 
EmitIsTypedArray(CallRuntime * expr)2001 void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) {
2002   ZoneList<Expression*>* args = expr->arguments();
2003   DCHECK(args->length() == 1);
2004 
2005   VisitForAccumulatorValue(args->at(0));
2006 
2007   Label materialize_true, materialize_false;
2008   Label* if_true = NULL;
2009   Label* if_false = NULL;
2010   Label* fall_through = NULL;
2011   context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2012                          &if_false, &fall_through);
2013 
2014   __ JumpIfSmi(r0, if_false);
2015   __ CompareObjectType(r0, r1, r1, JS_TYPED_ARRAY_TYPE);
2016   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2017   Split(eq, if_true, if_false, fall_through);
2018 
2019   context()->Plug(if_true, if_false);
2020 }
2021 
2022 
EmitIsJSProxy(CallRuntime * expr)2023 void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
2024   ZoneList<Expression*>* args = expr->arguments();
2025   DCHECK(args->length() == 1);
2026 
2027   VisitForAccumulatorValue(args->at(0));
2028 
2029   Label materialize_true, materialize_false;
2030   Label* if_true = NULL;
2031   Label* if_false = NULL;
2032   Label* fall_through = NULL;
2033   context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2034                          &if_false, &fall_through);
2035 
2036   __ JumpIfSmi(r0, if_false);
2037   __ CompareObjectType(r0, r1, r1, JS_PROXY_TYPE);
2038   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2039   Split(eq, if_true, if_false, fall_through);
2040 
2041   context()->Plug(if_true, if_false);
2042 }
2043 
2044 
EmitClassOf(CallRuntime * expr)2045 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
2046   ZoneList<Expression*>* args = expr->arguments();
2047   DCHECK(args->length() == 1);
2048   Label done, null, function, non_function_constructor;
2049 
2050   VisitForAccumulatorValue(args->at(0));
2051 
2052   // If the object is not a JSReceiver, we return null.
2053   __ JumpIfSmi(r0, &null);
2054   STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2055   __ CompareObjectType(r0, r0, r1, FIRST_JS_RECEIVER_TYPE);
2056   // Map is now in r0.
2057   __ b(lt, &null);
2058 
2059   // Return 'Function' for JSFunction and JSBoundFunction objects.
2060   __ cmp(r1, Operand(FIRST_FUNCTION_TYPE));
2061   STATIC_ASSERT(LAST_FUNCTION_TYPE == LAST_TYPE);
2062   __ b(hs, &function);
2063 
2064   // Check if the constructor in the map is a JS function.
2065   Register instance_type = r2;
2066   __ GetMapConstructor(r0, r0, r1, instance_type);
2067   __ cmp(instance_type, Operand(JS_FUNCTION_TYPE));
2068   __ b(ne, &non_function_constructor);
2069 
2070   // r0 now contains the constructor function. Grab the
2071   // instance class name from there.
2072   __ ldr(r0, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset));
2073   __ ldr(r0, FieldMemOperand(r0, SharedFunctionInfo::kInstanceClassNameOffset));
2074   __ b(&done);
2075 
2076   // Functions have class 'Function'.
2077   __ bind(&function);
2078   __ LoadRoot(r0, Heap::kFunction_stringRootIndex);
2079   __ jmp(&done);
2080 
2081   // Objects with a non-function constructor have class 'Object'.
2082   __ bind(&non_function_constructor);
2083   __ LoadRoot(r0, Heap::kObject_stringRootIndex);
2084   __ jmp(&done);
2085 
2086   // Non-JS objects have class null.
2087   __ bind(&null);
2088   __ LoadRoot(r0, Heap::kNullValueRootIndex);
2089 
2090   // All done.
2091   __ bind(&done);
2092 
2093   context()->Plug(r0);
2094 }
2095 
2096 
EmitStringCharCodeAt(CallRuntime * expr)2097 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
2098   ZoneList<Expression*>* args = expr->arguments();
2099   DCHECK(args->length() == 2);
2100   VisitForStackValue(args->at(0));
2101   VisitForAccumulatorValue(args->at(1));
2102 
2103   Register object = r1;
2104   Register index = r0;
2105   Register result = r3;
2106 
2107   PopOperand(object);
2108 
2109   Label need_conversion;
2110   Label index_out_of_range;
2111   Label done;
2112   StringCharCodeAtGenerator generator(object, index, result, &need_conversion,
2113                                       &need_conversion, &index_out_of_range);
2114   generator.GenerateFast(masm_);
2115   __ jmp(&done);
2116 
2117   __ bind(&index_out_of_range);
2118   // When the index is out of range, the spec requires us to return
2119   // NaN.
2120   __ LoadRoot(result, Heap::kNanValueRootIndex);
2121   __ jmp(&done);
2122 
2123   __ bind(&need_conversion);
2124   // Load the undefined value into the result register, which will
2125   // trigger conversion.
2126   __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
2127   __ jmp(&done);
2128 
2129   NopRuntimeCallHelper call_helper;
2130   generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
2131 
2132   __ bind(&done);
2133   context()->Plug(result);
2134 }
2135 
2136 
EmitCall(CallRuntime * expr)2137 void FullCodeGenerator::EmitCall(CallRuntime* expr) {
2138   ZoneList<Expression*>* args = expr->arguments();
2139   DCHECK_LE(2, args->length());
2140   // Push target, receiver and arguments onto the stack.
2141   for (Expression* const arg : *args) {
2142     VisitForStackValue(arg);
2143   }
2144   PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
2145   // Move target to r1.
2146   int const argc = args->length() - 2;
2147   __ ldr(r1, MemOperand(sp, (argc + 1) * kPointerSize));
2148   // Call the target.
2149   __ mov(r0, Operand(argc));
2150   __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
2151   OperandStackDepthDecrement(argc + 1);
2152   RestoreContext();
2153   // Discard the function left on TOS.
2154   context()->DropAndPlug(1, r0);
2155 }
2156 
EmitGetSuperConstructor(CallRuntime * expr)2157 void FullCodeGenerator::EmitGetSuperConstructor(CallRuntime* expr) {
2158   ZoneList<Expression*>* args = expr->arguments();
2159   DCHECK_EQ(1, args->length());
2160   VisitForAccumulatorValue(args->at(0));
2161   __ AssertFunction(r0);
2162   __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
2163   __ ldr(r0, FieldMemOperand(r0, Map::kPrototypeOffset));
2164   context()->Plug(r0);
2165 }
2166 
EmitDebugIsActive(CallRuntime * expr)2167 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
2168   DCHECK(expr->arguments()->length() == 0);
2169   ExternalReference debug_is_active =
2170       ExternalReference::debug_is_active_address(isolate());
2171   __ mov(ip, Operand(debug_is_active));
2172   __ ldrb(r0, MemOperand(ip));
2173   __ SmiTag(r0);
2174   context()->Plug(r0);
2175 }
2176 
2177 
EmitCreateIterResultObject(CallRuntime * expr)2178 void FullCodeGenerator::EmitCreateIterResultObject(CallRuntime* expr) {
2179   ZoneList<Expression*>* args = expr->arguments();
2180   DCHECK_EQ(2, args->length());
2181   VisitForStackValue(args->at(0));
2182   VisitForStackValue(args->at(1));
2183 
2184   Label runtime, done;
2185 
2186   __ Allocate(JSIteratorResult::kSize, r0, r2, r3, &runtime,
2187               NO_ALLOCATION_FLAGS);
2188   __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, r1);
2189   __ pop(r3);
2190   __ pop(r2);
2191   __ LoadRoot(r4, Heap::kEmptyFixedArrayRootIndex);
2192   __ str(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
2193   __ str(r4, FieldMemOperand(r0, JSObject::kPropertiesOffset));
2194   __ str(r4, FieldMemOperand(r0, JSObject::kElementsOffset));
2195   __ str(r2, FieldMemOperand(r0, JSIteratorResult::kValueOffset));
2196   __ str(r3, FieldMemOperand(r0, JSIteratorResult::kDoneOffset));
2197   STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
2198   __ b(&done);
2199 
2200   __ bind(&runtime);
2201   CallRuntimeWithOperands(Runtime::kCreateIterResultObject);
2202 
2203   __ bind(&done);
2204   context()->Plug(r0);
2205 }
2206 
2207 
EmitLoadJSRuntimeFunction(CallRuntime * expr)2208 void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
2209   // Push function.
2210   __ LoadNativeContextSlot(expr->context_index(), r0);
2211   PushOperand(r0);
2212 
2213   // Push undefined as the receiver.
2214   __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
2215   PushOperand(r0);
2216 }
2217 
2218 
EmitCallJSRuntimeFunction(CallRuntime * expr)2219 void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
2220   ZoneList<Expression*>* args = expr->arguments();
2221   int arg_count = args->length();
2222 
2223   SetCallPosition(expr);
2224   __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2225   __ mov(r0, Operand(arg_count));
2226   __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kNullOrUndefined),
2227           RelocInfo::CODE_TARGET);
2228   OperandStackDepthDecrement(arg_count + 1);
2229   RestoreContext();
2230 }
2231 
2232 
VisitUnaryOperation(UnaryOperation * expr)2233 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
2234   switch (expr->op()) {
2235     case Token::DELETE: {
2236       Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
2237       Property* property = expr->expression()->AsProperty();
2238       VariableProxy* proxy = expr->expression()->AsVariableProxy();
2239 
2240       if (property != NULL) {
2241         VisitForStackValue(property->obj());
2242         VisitForStackValue(property->key());
2243         CallRuntimeWithOperands(is_strict(language_mode())
2244                                     ? Runtime::kDeleteProperty_Strict
2245                                     : Runtime::kDeleteProperty_Sloppy);
2246         context()->Plug(r0);
2247       } else if (proxy != NULL) {
2248         Variable* var = proxy->var();
2249         // Delete of an unqualified identifier is disallowed in strict mode but
2250         // "delete this" is allowed.
2251         bool is_this = var->is_this();
2252         DCHECK(is_sloppy(language_mode()) || is_this);
2253         if (var->IsUnallocated()) {
2254           __ LoadGlobalObject(r2);
2255           __ mov(r1, Operand(var->name()));
2256           __ Push(r2, r1);
2257           __ CallRuntime(Runtime::kDeleteProperty_Sloppy);
2258           context()->Plug(r0);
2259         } else {
2260           DCHECK(!var->IsLookupSlot());
2261           DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2262           // Result of deleting non-global, non-dynamic variables is false.
2263           // The subexpression does not have side effects.
2264           context()->Plug(is_this);
2265         }
2266       } else {
2267         // Result of deleting non-property, non-variable reference is true.
2268         // The subexpression may have side effects.
2269         VisitForEffect(expr->expression());
2270         context()->Plug(true);
2271       }
2272       break;
2273     }
2274 
2275     case Token::VOID: {
2276       Comment cmnt(masm_, "[ UnaryOperation (VOID)");
2277       VisitForEffect(expr->expression());
2278       context()->Plug(Heap::kUndefinedValueRootIndex);
2279       break;
2280     }
2281 
2282     case Token::NOT: {
2283       Comment cmnt(masm_, "[ UnaryOperation (NOT)");
2284       if (context()->IsEffect()) {
2285         // Unary NOT has no side effects so it's only necessary to visit the
2286         // subexpression.  Match the optimizing compiler by not branching.
2287         VisitForEffect(expr->expression());
2288       } else if (context()->IsTest()) {
2289         const TestContext* test = TestContext::cast(context());
2290         // The labels are swapped for the recursive call.
2291         VisitForControl(expr->expression(),
2292                         test->false_label(),
2293                         test->true_label(),
2294                         test->fall_through());
2295         context()->Plug(test->true_label(), test->false_label());
2296       } else {
2297         // We handle value contexts explicitly rather than simply visiting
2298         // for control and plugging the control flow into the context,
2299         // because we need to prepare a pair of extra administrative AST ids
2300         // for the optimizing compiler.
2301         DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
2302         Label materialize_true, materialize_false, done;
2303         VisitForControl(expr->expression(),
2304                         &materialize_false,
2305                         &materialize_true,
2306                         &materialize_true);
2307         if (!context()->IsAccumulatorValue()) OperandStackDepthIncrement(1);
2308         __ bind(&materialize_true);
2309         PrepareForBailoutForId(expr->MaterializeTrueId(),
2310                                BailoutState::NO_REGISTERS);
2311         __ LoadRoot(r0, Heap::kTrueValueRootIndex);
2312         if (context()->IsStackValue()) __ push(r0);
2313         __ jmp(&done);
2314         __ bind(&materialize_false);
2315         PrepareForBailoutForId(expr->MaterializeFalseId(),
2316                                BailoutState::NO_REGISTERS);
2317         __ LoadRoot(r0, Heap::kFalseValueRootIndex);
2318         if (context()->IsStackValue()) __ push(r0);
2319         __ bind(&done);
2320       }
2321       break;
2322     }
2323 
2324     case Token::TYPEOF: {
2325       Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
2326       {
2327         AccumulatorValueContext context(this);
2328         VisitForTypeofValue(expr->expression());
2329       }
2330       __ mov(r3, r0);
2331       __ Call(isolate()->builtins()->Typeof(), RelocInfo::CODE_TARGET);
2332       context()->Plug(r0);
2333       break;
2334     }
2335 
2336     default:
2337       UNREACHABLE();
2338   }
2339 }
2340 
2341 
VisitCountOperation(CountOperation * expr)2342 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
2343   DCHECK(expr->expression()->IsValidReferenceExpressionOrThis());
2344 
2345   Comment cmnt(masm_, "[ CountOperation");
2346 
2347   Property* prop = expr->expression()->AsProperty();
2348   LhsKind assign_type = Property::GetAssignType(prop);
2349 
2350   // Evaluate expression and get value.
2351   if (assign_type == VARIABLE) {
2352     DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
2353     AccumulatorValueContext context(this);
2354     EmitVariableLoad(expr->expression()->AsVariableProxy());
2355   } else {
2356     // Reserve space for result of postfix operation.
2357     if (expr->is_postfix() && !context()->IsEffect()) {
2358       __ mov(ip, Operand(Smi::kZero));
2359       PushOperand(ip);
2360     }
2361     switch (assign_type) {
2362       case NAMED_PROPERTY: {
2363         // Put the object both on the stack and in the register.
2364         VisitForStackValue(prop->obj());
2365         __ ldr(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2366         EmitNamedPropertyLoad(prop);
2367         break;
2368       }
2369 
2370       case KEYED_PROPERTY: {
2371         VisitForStackValue(prop->obj());
2372         VisitForStackValue(prop->key());
2373         __ ldr(LoadDescriptor::ReceiverRegister(),
2374                MemOperand(sp, 1 * kPointerSize));
2375         __ ldr(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
2376         EmitKeyedPropertyLoad(prop);
2377         break;
2378       }
2379 
2380       case NAMED_SUPER_PROPERTY:
2381       case KEYED_SUPER_PROPERTY:
2382       case VARIABLE:
2383         UNREACHABLE();
2384     }
2385   }
2386 
2387   // We need a second deoptimization point after loading the value
2388   // in case evaluating the property load my have a side effect.
2389   if (assign_type == VARIABLE) {
2390     PrepareForBailout(expr->expression(), BailoutState::TOS_REGISTER);
2391   } else {
2392     PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
2393   }
2394 
2395   // Inline smi case if we are in a loop.
2396   Label stub_call, done;
2397   JumpPatchSite patch_site(masm_);
2398 
2399   int count_value = expr->op() == Token::INC ? 1 : -1;
2400   if (ShouldInlineSmiCase(expr->op())) {
2401     Label slow;
2402     patch_site.EmitJumpIfNotSmi(r0, &slow);
2403 
2404     // Save result for postfix expressions.
2405     if (expr->is_postfix()) {
2406       if (!context()->IsEffect()) {
2407         // Save the result on the stack. If we have a named or keyed property
2408         // we store the result under the receiver that is currently on top
2409         // of the stack.
2410         switch (assign_type) {
2411           case VARIABLE:
2412             __ push(r0);
2413             break;
2414           case NAMED_PROPERTY:
2415             __ str(r0, MemOperand(sp, kPointerSize));
2416             break;
2417           case KEYED_PROPERTY:
2418             __ str(r0, MemOperand(sp, 2 * kPointerSize));
2419             break;
2420           case NAMED_SUPER_PROPERTY:
2421           case KEYED_SUPER_PROPERTY:
2422             UNREACHABLE();
2423             break;
2424         }
2425       }
2426     }
2427 
2428     __ add(r0, r0, Operand(Smi::FromInt(count_value)), SetCC);
2429     __ b(vc, &done);
2430     // Call stub. Undo operation first.
2431     __ sub(r0, r0, Operand(Smi::FromInt(count_value)));
2432     __ jmp(&stub_call);
2433     __ bind(&slow);
2434   }
2435 
2436   // Convert old value into a number.
2437   __ Call(isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
2438   RestoreContext();
2439   PrepareForBailoutForId(expr->ToNumberId(), BailoutState::TOS_REGISTER);
2440 
2441   // Save result for postfix expressions.
2442   if (expr->is_postfix()) {
2443     if (!context()->IsEffect()) {
2444       // Save the result on the stack. If we have a named or keyed property
2445       // we store the result under the receiver that is currently on top
2446       // of the stack.
2447       switch (assign_type) {
2448         case VARIABLE:
2449           PushOperand(r0);
2450           break;
2451         case NAMED_PROPERTY:
2452           __ str(r0, MemOperand(sp, kPointerSize));
2453           break;
2454         case KEYED_PROPERTY:
2455           __ str(r0, MemOperand(sp, 2 * kPointerSize));
2456           break;
2457         case NAMED_SUPER_PROPERTY:
2458         case KEYED_SUPER_PROPERTY:
2459           UNREACHABLE();
2460           break;
2461       }
2462     }
2463   }
2464 
2465 
2466   __ bind(&stub_call);
2467   __ mov(r1, r0);
2468   __ mov(r0, Operand(Smi::FromInt(count_value)));
2469 
2470   SetExpressionPosition(expr);
2471 
2472   Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), Token::ADD).code();
2473   CallIC(code, expr->CountBinOpFeedbackId());
2474   patch_site.EmitPatchInfo();
2475   __ bind(&done);
2476 
2477   // Store the value returned in r0.
2478   switch (assign_type) {
2479     case VARIABLE: {
2480       VariableProxy* proxy = expr->expression()->AsVariableProxy();
2481       if (expr->is_postfix()) {
2482         { EffectContext context(this);
2483           EmitVariableAssignment(proxy->var(), Token::ASSIGN, expr->CountSlot(),
2484                                  proxy->hole_check_mode());
2485           PrepareForBailoutForId(expr->AssignmentId(),
2486                                  BailoutState::TOS_REGISTER);
2487           context.Plug(r0);
2488         }
2489         // For all contexts except EffectConstant We have the result on
2490         // top of the stack.
2491         if (!context()->IsEffect()) {
2492           context()->PlugTOS();
2493         }
2494       } else {
2495         EmitVariableAssignment(proxy->var(), Token::ASSIGN, expr->CountSlot(),
2496                                proxy->hole_check_mode());
2497         PrepareForBailoutForId(expr->AssignmentId(),
2498                                BailoutState::TOS_REGISTER);
2499         context()->Plug(r0);
2500       }
2501       break;
2502     }
2503     case NAMED_PROPERTY: {
2504       PopOperand(StoreDescriptor::ReceiverRegister());
2505       CallStoreIC(expr->CountSlot(), prop->key()->AsLiteral()->value());
2506       PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
2507       if (expr->is_postfix()) {
2508         if (!context()->IsEffect()) {
2509           context()->PlugTOS();
2510         }
2511       } else {
2512         context()->Plug(r0);
2513       }
2514       break;
2515     }
2516     case KEYED_PROPERTY: {
2517       PopOperands(StoreDescriptor::ReceiverRegister(),
2518                   StoreDescriptor::NameRegister());
2519       CallKeyedStoreIC(expr->CountSlot());
2520       PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
2521       if (expr->is_postfix()) {
2522         if (!context()->IsEffect()) {
2523           context()->PlugTOS();
2524         }
2525       } else {
2526         context()->Plug(r0);
2527       }
2528       break;
2529     }
2530     case NAMED_SUPER_PROPERTY:
2531     case KEYED_SUPER_PROPERTY:
2532       UNREACHABLE();
2533       break;
2534   }
2535 }
2536 
2537 
EmitLiteralCompareTypeof(Expression * expr,Expression * sub_expr,Handle<String> check)2538 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
2539                                                  Expression* sub_expr,
2540                                                  Handle<String> check) {
2541   Label materialize_true, materialize_false;
2542   Label* if_true = NULL;
2543   Label* if_false = NULL;
2544   Label* fall_through = NULL;
2545   context()->PrepareTest(&materialize_true, &materialize_false,
2546                          &if_true, &if_false, &fall_through);
2547 
2548   { AccumulatorValueContext context(this);
2549     VisitForTypeofValue(sub_expr);
2550   }
2551   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2552 
2553   Factory* factory = isolate()->factory();
2554   if (String::Equals(check, factory->number_string())) {
2555     __ JumpIfSmi(r0, if_true);
2556     __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
2557     __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
2558     __ cmp(r0, ip);
2559     Split(eq, if_true, if_false, fall_through);
2560   } else if (String::Equals(check, factory->string_string())) {
2561     __ JumpIfSmi(r0, if_false);
2562     __ CompareObjectType(r0, r0, r1, FIRST_NONSTRING_TYPE);
2563     Split(lt, if_true, if_false, fall_through);
2564   } else if (String::Equals(check, factory->symbol_string())) {
2565     __ JumpIfSmi(r0, if_false);
2566     __ CompareObjectType(r0, r0, r1, SYMBOL_TYPE);
2567     Split(eq, if_true, if_false, fall_through);
2568   } else if (String::Equals(check, factory->boolean_string())) {
2569     __ CompareRoot(r0, Heap::kTrueValueRootIndex);
2570     __ b(eq, if_true);
2571     __ CompareRoot(r0, Heap::kFalseValueRootIndex);
2572     Split(eq, if_true, if_false, fall_through);
2573   } else if (String::Equals(check, factory->undefined_string())) {
2574     __ CompareRoot(r0, Heap::kNullValueRootIndex);
2575     __ b(eq, if_false);
2576     __ JumpIfSmi(r0, if_false);
2577     // Check for undetectable objects => true.
2578     __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
2579     __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
2580     __ tst(r1, Operand(1 << Map::kIsUndetectable));
2581     Split(ne, if_true, if_false, fall_through);
2582 
2583   } else if (String::Equals(check, factory->function_string())) {
2584     __ JumpIfSmi(r0, if_false);
2585     __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
2586     __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
2587     __ and_(r1, r1,
2588             Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
2589     __ cmp(r1, Operand(1 << Map::kIsCallable));
2590     Split(eq, if_true, if_false, fall_through);
2591   } else if (String::Equals(check, factory->object_string())) {
2592     __ JumpIfSmi(r0, if_false);
2593     __ CompareRoot(r0, Heap::kNullValueRootIndex);
2594     __ b(eq, if_true);
2595     STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2596     __ CompareObjectType(r0, r0, r1, FIRST_JS_RECEIVER_TYPE);
2597     __ b(lt, if_false);
2598     // Check for callable or undetectable objects => false.
2599     __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
2600     __ tst(r1, Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
2601     Split(eq, if_true, if_false, fall_through);
2602   } else {
2603     if (if_false != fall_through) __ jmp(if_false);
2604   }
2605   context()->Plug(if_true, if_false);
2606 }
2607 
2608 
VisitCompareOperation(CompareOperation * expr)2609 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
2610   Comment cmnt(masm_, "[ CompareOperation");
2611 
2612   // First we try a fast inlined version of the compare when one of
2613   // the operands is a literal.
2614   if (TryLiteralCompare(expr)) return;
2615 
2616   // Always perform the comparison for its control flow.  Pack the result
2617   // into the expression's context after the comparison is performed.
2618   Label materialize_true, materialize_false;
2619   Label* if_true = NULL;
2620   Label* if_false = NULL;
2621   Label* fall_through = NULL;
2622   context()->PrepareTest(&materialize_true, &materialize_false,
2623                          &if_true, &if_false, &fall_through);
2624 
2625   Token::Value op = expr->op();
2626   VisitForStackValue(expr->left());
2627   switch (op) {
2628     case Token::IN:
2629       VisitForStackValue(expr->right());
2630       SetExpressionPosition(expr);
2631       EmitHasProperty();
2632       PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
2633       __ CompareRoot(r0, Heap::kTrueValueRootIndex);
2634       Split(eq, if_true, if_false, fall_through);
2635       break;
2636 
2637     case Token::INSTANCEOF: {
2638       VisitForAccumulatorValue(expr->right());
2639       SetExpressionPosition(expr);
2640       PopOperand(r1);
2641       __ Call(isolate()->builtins()->InstanceOf(), RelocInfo::CODE_TARGET);
2642       RestoreContext();
2643       PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
2644       __ CompareRoot(r0, Heap::kTrueValueRootIndex);
2645       Split(eq, if_true, if_false, fall_through);
2646       break;
2647     }
2648 
2649     default: {
2650       VisitForAccumulatorValue(expr->right());
2651       SetExpressionPosition(expr);
2652       Condition cond = CompareIC::ComputeCondition(op);
2653       PopOperand(r1);
2654 
2655       bool inline_smi_code = ShouldInlineSmiCase(op);
2656       JumpPatchSite patch_site(masm_);
2657       if (inline_smi_code) {
2658         Label slow_case;
2659         __ orr(r2, r0, Operand(r1));
2660         patch_site.EmitJumpIfNotSmi(r2, &slow_case);
2661         __ cmp(r1, r0);
2662         Split(cond, if_true, if_false, NULL);
2663         __ bind(&slow_case);
2664       }
2665 
2666       Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
2667       CallIC(ic, expr->CompareOperationFeedbackId());
2668       patch_site.EmitPatchInfo();
2669       PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2670       __ cmp(r0, Operand::Zero());
2671       Split(cond, if_true, if_false, fall_through);
2672     }
2673   }
2674 
2675   // Convert the result of the comparison into one expected for this
2676   // expression's context.
2677   context()->Plug(if_true, if_false);
2678 }
2679 
2680 
EmitLiteralCompareNil(CompareOperation * expr,Expression * sub_expr,NilValue nil)2681 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
2682                                               Expression* sub_expr,
2683                                               NilValue nil) {
2684   Label materialize_true, materialize_false;
2685   Label* if_true = NULL;
2686   Label* if_false = NULL;
2687   Label* fall_through = NULL;
2688   context()->PrepareTest(&materialize_true, &materialize_false,
2689                          &if_true, &if_false, &fall_through);
2690 
2691   VisitForAccumulatorValue(sub_expr);
2692   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2693   if (expr->op() == Token::EQ_STRICT) {
2694     Heap::RootListIndex nil_value = nil == kNullValue ?
2695         Heap::kNullValueRootIndex :
2696         Heap::kUndefinedValueRootIndex;
2697     __ LoadRoot(r1, nil_value);
2698     __ cmp(r0, r1);
2699     Split(eq, if_true, if_false, fall_through);
2700   } else {
2701     __ JumpIfSmi(r0, if_false);
2702     __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
2703     __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
2704     __ tst(r1, Operand(1 << Map::kIsUndetectable));
2705     Split(ne, if_true, if_false, fall_through);
2706   }
2707   context()->Plug(if_true, if_false);
2708 }
2709 
2710 
result_register()2711 Register FullCodeGenerator::result_register() {
2712   return r0;
2713 }
2714 
2715 
context_register()2716 Register FullCodeGenerator::context_register() {
2717   return cp;
2718 }
2719 
LoadFromFrameField(int frame_offset,Register value)2720 void FullCodeGenerator::LoadFromFrameField(int frame_offset, Register value) {
2721   DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
2722   __ ldr(value, MemOperand(fp, frame_offset));
2723 }
2724 
StoreToFrameField(int frame_offset,Register value)2725 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
2726   DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
2727   __ str(value, MemOperand(fp, frame_offset));
2728 }
2729 
2730 
LoadContextField(Register dst,int context_index)2731 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
2732   __ ldr(dst, ContextMemOperand(cp, context_index));
2733 }
2734 
2735 
PushFunctionArgumentForContextAllocation()2736 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
2737   DeclarationScope* closure_scope = scope()->GetClosureScope();
2738   if (closure_scope->is_script_scope() ||
2739       closure_scope->is_module_scope()) {
2740     // Contexts nested in the native context have a canonical empty function
2741     // as their closure, not the anonymous closure containing the global
2742     // code.
2743     __ LoadNativeContextSlot(Context::CLOSURE_INDEX, ip);
2744   } else if (closure_scope->is_eval_scope()) {
2745     // Contexts created by a call to eval have the same closure as the
2746     // context calling eval, not the anonymous closure containing the eval
2747     // code.  Fetch it from the context.
2748     __ ldr(ip, ContextMemOperand(cp, Context::CLOSURE_INDEX));
2749   } else {
2750     DCHECK(closure_scope->is_function_scope());
2751     __ ldr(ip, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
2752   }
2753   PushOperand(ip);
2754 }
2755 
2756 
2757 #undef __
2758 
2759 
GetInterruptImmediateLoadAddress(Address pc)2760 static Address GetInterruptImmediateLoadAddress(Address pc) {
2761   Address load_address = pc - 2 * Assembler::kInstrSize;
2762   if (!FLAG_enable_embedded_constant_pool) {
2763     DCHECK(Assembler::IsLdrPcImmediateOffset(Memory::int32_at(load_address)));
2764   } else if (Assembler::IsLdrPpRegOffset(Memory::int32_at(load_address))) {
2765     // This is an extended constant pool lookup.
2766     if (CpuFeatures::IsSupported(ARMv7)) {
2767       load_address -= 2 * Assembler::kInstrSize;
2768       DCHECK(Assembler::IsMovW(Memory::int32_at(load_address)));
2769       DCHECK(Assembler::IsMovT(
2770           Memory::int32_at(load_address + Assembler::kInstrSize)));
2771     } else {
2772       load_address -= 4 * Assembler::kInstrSize;
2773       DCHECK(Assembler::IsMovImmed(Memory::int32_at(load_address)));
2774       DCHECK(Assembler::IsOrrImmed(
2775           Memory::int32_at(load_address + Assembler::kInstrSize)));
2776       DCHECK(Assembler::IsOrrImmed(
2777           Memory::int32_at(load_address + 2 * Assembler::kInstrSize)));
2778       DCHECK(Assembler::IsOrrImmed(
2779           Memory::int32_at(load_address + 3 * Assembler::kInstrSize)));
2780     }
2781   } else if (CpuFeatures::IsSupported(ARMv7) &&
2782              Assembler::IsMovT(Memory::int32_at(load_address))) {
2783     // This is a movw / movt immediate load.
2784     load_address -= Assembler::kInstrSize;
2785     DCHECK(Assembler::IsMovW(Memory::int32_at(load_address)));
2786   } else if (!CpuFeatures::IsSupported(ARMv7) &&
2787              Assembler::IsOrrImmed(Memory::int32_at(load_address))) {
2788     // This is a mov / orr immediate load.
2789     load_address -= 3 * Assembler::kInstrSize;
2790     DCHECK(Assembler::IsMovImmed(Memory::int32_at(load_address)));
2791     DCHECK(Assembler::IsOrrImmed(
2792         Memory::int32_at(load_address + Assembler::kInstrSize)));
2793     DCHECK(Assembler::IsOrrImmed(
2794         Memory::int32_at(load_address + 2 * Assembler::kInstrSize)));
2795   } else {
2796     // This is a small constant pool lookup.
2797     DCHECK(Assembler::IsLdrPpImmediateOffset(Memory::int32_at(load_address)));
2798   }
2799   return load_address;
2800 }
2801 
2802 
PatchAt(Code * unoptimized_code,Address pc,BackEdgeState target_state,Code * replacement_code)2803 void BackEdgeTable::PatchAt(Code* unoptimized_code,
2804                             Address pc,
2805                             BackEdgeState target_state,
2806                             Code* replacement_code) {
2807   Address pc_immediate_load_address = GetInterruptImmediateLoadAddress(pc);
2808   Address branch_address = pc_immediate_load_address - Assembler::kInstrSize;
2809   Isolate* isolate = unoptimized_code->GetIsolate();
2810   CodePatcher patcher(isolate, branch_address, 1);
2811   switch (target_state) {
2812     case INTERRUPT:
2813     {
2814       //  <decrement profiling counter>
2815       //   bpl ok
2816       //   ; load interrupt stub address into ip - either of (for ARMv7):
2817       //   ; <small cp load>      |  <extended cp load> |  <immediate load>
2818       //   ldr ip, [pc/pp, #imm]  |   movw ip, #imm     |   movw ip, #imm
2819       //                          |   movt ip, #imm     |   movw ip, #imm
2820       //                          |   ldr  ip, [pp, ip]
2821       //   ; or (for ARMv6):
2822       //   ; <small cp load>      |  <extended cp load> |  <immediate load>
2823       //   ldr ip, [pc/pp, #imm]  |   mov ip, #imm      |   mov ip, #imm
2824       //                          |   orr ip, ip, #imm> |   orr ip, ip, #imm
2825       //                          |   orr ip, ip, #imm> |   orr ip, ip, #imm
2826       //                          |   orr ip, ip, #imm> |   orr ip, ip, #imm
2827       //   blx ip
2828       //  <reset profiling counter>
2829       //  ok-label
2830 
2831       // Calculate branch offset to the ok-label - this is the difference
2832       // between the branch address and |pc| (which points at <blx ip>) plus
2833       // kProfileCounterResetSequence instructions
2834       int branch_offset = pc - Instruction::kPCReadOffset - branch_address +
2835                           kProfileCounterResetSequenceLength;
2836       patcher.masm()->b(branch_offset, pl);
2837       break;
2838     }
2839     case ON_STACK_REPLACEMENT:
2840       //  <decrement profiling counter>
2841       //   mov r0, r0 (NOP)
2842       //   ; load on-stack replacement address into ip - either of (for ARMv7):
2843       //   ; <small cp load>      |  <extended cp load> |  <immediate load>
2844       //   ldr ip, [pc/pp, #imm]  |   movw ip, #imm     |   movw ip, #imm
2845       //                          |   movt ip, #imm>    |   movw ip, #imm
2846       //                          |   ldr  ip, [pp, ip]
2847       //   ; or (for ARMv6):
2848       //   ; <small cp load>      |  <extended cp load> |  <immediate load>
2849       //   ldr ip, [pc/pp, #imm]  |   mov ip, #imm      |   mov ip, #imm
2850       //                          |   orr ip, ip, #imm> |   orr ip, ip, #imm
2851       //                          |   orr ip, ip, #imm> |   orr ip, ip, #imm
2852       //                          |   orr ip, ip, #imm> |   orr ip, ip, #imm
2853       //   blx ip
2854       //  <reset profiling counter>
2855       //  ok-label
2856       patcher.masm()->nop();
2857       break;
2858   }
2859 
2860   // Replace the call address.
2861   Assembler::set_target_address_at(isolate, pc_immediate_load_address,
2862                                    unoptimized_code, replacement_code->entry());
2863 
2864   unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
2865       unoptimized_code, pc_immediate_load_address, replacement_code);
2866 }
2867 
2868 
GetBackEdgeState(Isolate * isolate,Code * unoptimized_code,Address pc)2869 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
2870     Isolate* isolate,
2871     Code* unoptimized_code,
2872     Address pc) {
2873   DCHECK(Assembler::IsBlxIp(Memory::int32_at(pc - Assembler::kInstrSize)));
2874 
2875   Address pc_immediate_load_address = GetInterruptImmediateLoadAddress(pc);
2876   Address branch_address = pc_immediate_load_address - Assembler::kInstrSize;
2877 #ifdef DEBUG
2878   Address interrupt_address = Assembler::target_address_at(
2879       pc_immediate_load_address, unoptimized_code);
2880 #endif
2881 
2882   if (Assembler::IsBranch(Assembler::instr_at(branch_address))) {
2883     DCHECK(interrupt_address ==
2884            isolate->builtins()->InterruptCheck()->entry());
2885     return INTERRUPT;
2886   }
2887 
2888   DCHECK(Assembler::IsNop(Assembler::instr_at(branch_address)));
2889 
2890   DCHECK(interrupt_address ==
2891          isolate->builtins()->OnStackReplacement()->entry());
2892   return ON_STACK_REPLACEMENT;
2893 }
2894 
2895 
2896 }  // namespace internal
2897 }  // namespace v8
2898 
2899 #endif  // V8_TARGET_ARCH_ARM
2900