• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2015 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #if V8_TARGET_ARCH_S390
6 
7 #include "src/ast/compile-time-value.h"
8 #include "src/ast/scopes.h"
9 #include "src/builtins/builtins-constructor.h"
10 #include "src/code-factory.h"
11 #include "src/code-stubs.h"
12 #include "src/codegen.h"
13 #include "src/compilation-info.h"
14 #include "src/compiler.h"
15 #include "src/debug/debug.h"
16 #include "src/full-codegen/full-codegen.h"
17 #include "src/ic/ic.h"
18 
19 #include "src/s390/code-stubs-s390.h"
20 #include "src/s390/macro-assembler-s390.h"
21 
22 namespace v8 {
23 namespace internal {
24 
25 #define __ ACCESS_MASM(masm())
26 
27 // A patch site is a location in the code which it is possible to patch. This
28 // class has a number of methods to emit the code which is patchable and the
29 // method EmitPatchInfo to record a marker back to the patchable code. This
30 // marker is a cmpi rx, #yyy instruction, and x * 0x0000ffff + yyy (raw 16 bit
31 // immediate value is used) is the delta from the pc to the first instruction of
32 // the patchable code.
33 // See PatchInlinedSmiCode in ic-s390.cc for the code that patches it
34 class JumpPatchSite BASE_EMBEDDED {
35  public:
JumpPatchSite(MacroAssembler * masm)36   explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
37 #ifdef DEBUG
38     info_emitted_ = false;
39 #endif
40   }
41 
~JumpPatchSite()42   ~JumpPatchSite() { DCHECK(patch_site_.is_bound() == info_emitted_); }
43 
44   // When initially emitting this ensure that a jump is always generated to skip
45   // the inlined smi code.
EmitJumpIfNotSmi(Register reg,Label * target)46   void EmitJumpIfNotSmi(Register reg, Label* target) {
47     DCHECK(!patch_site_.is_bound() && !info_emitted_);
48     __ bind(&patch_site_);
49     __ CmpP(reg, reg);
50 // Emit the Nop to make bigger place for patching on 31-bit
51 // as the TestIfSmi sequence uses 4-byte TMLL
52 #ifndef V8_TARGET_ARCH_S390X
53     __ nop();
54 #endif
55     __ beq(target);  // Always taken before patched.
56   }
57 
58   // When initially emitting this ensure that a jump is never generated to skip
59   // the inlined smi code.
EmitJumpIfSmi(Register reg,Label * target)60   void EmitJumpIfSmi(Register reg, Label* target) {
61     DCHECK(!patch_site_.is_bound() && !info_emitted_);
62     __ bind(&patch_site_);
63     __ CmpP(reg, reg);
64 // Emit the Nop to make bigger place for patching on 31-bit
65 // as the TestIfSmi sequence uses 4-byte TMLL
66 #ifndef V8_TARGET_ARCH_S390X
67     __ nop();
68 #endif
69     __ bne(target);  // Never taken before patched.
70   }
71 
EmitPatchInfo()72   void EmitPatchInfo() {
73     if (patch_site_.is_bound()) {
74       int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
75       DCHECK(is_int16(delta_to_patch_site));
76       __ chi(r0, Operand(delta_to_patch_site));
77 #ifdef DEBUG
78       info_emitted_ = true;
79 #endif
80     } else {
81       __ nop();
82       __ nop();
83     }
84   }
85 
86  private:
masm()87   MacroAssembler* masm() { return masm_; }
88   MacroAssembler* masm_;
89   Label patch_site_;
90 #ifdef DEBUG
91   bool info_emitted_;
92 #endif
93 };
94 
95 // Generate code for a JS function.  On entry to the function the receiver
96 // and arguments have been pushed on the stack left to right.  The actual
97 // argument count matches the formal parameter count expected by the
98 // function.
99 //
100 // The live registers are:
101 //   o r3: the JS function object being called (i.e., ourselves)
102 //   o r5: the new target value
103 //   o cp: our context
104 //   o fp: our caller's frame pointer
105 //   o sp: stack pointer
106 //   o lr: return address
107 //   o ip: our own function entry (required by the prologue)
108 //
109 // The function builds a JS frame.  Please see JavaScriptFrameConstants in
110 // frames-s390.h for its layout.
Generate()111 void FullCodeGenerator::Generate() {
112   CompilationInfo* info = info_;
113   profiling_counter_ = isolate()->factory()->NewCell(
114       Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
115   SetFunctionPosition(literal());
116   Comment cmnt(masm_, "[ function compiled by full code generator");
117 
118   ProfileEntryHookStub::MaybeCallEntryHook(masm_);
119 
120   if (FLAG_debug_code && info->ExpectsJSReceiverAsReceiver()) {
121     int receiver_offset = info->scope()->num_parameters() * kPointerSize;
122     __ LoadP(r4, MemOperand(sp, receiver_offset), r0);
123     __ AssertNotSmi(r4);
124     __ CompareObjectType(r4, r4, no_reg, FIRST_JS_RECEIVER_TYPE);
125     __ Assert(ge, kSloppyFunctionExpectsJSReceiverReceiver);
126   }
127 
128   // Open a frame scope to indicate that there is a frame on the stack.  The
129   // MANUAL indicates that the scope shouldn't actually generate code to set up
130   // the frame (that is done below).
131   FrameScope frame_scope(masm_, StackFrame::MANUAL);
132   int prologue_offset = masm_->pc_offset();
133 
134   info->set_prologue_offset(prologue_offset);
135   __ Prologue(info->GeneratePreagedPrologue(), ip, prologue_offset);
136 
137   // Increment invocation count for the function.
138   {
139     Comment cmnt(masm_, "[ Increment invocation count");
140     __ LoadP(r6, FieldMemOperand(r3, JSFunction::kFeedbackVectorOffset));
141     __ LoadP(r6, FieldMemOperand(r6, Cell::kValueOffset));
142     __ LoadP(r1, FieldMemOperand(
143                      r6, FeedbackVector::kInvocationCountIndex * kPointerSize +
144                              FeedbackVector::kHeaderSize));
145     __ AddSmiLiteral(r1, r1, Smi::FromInt(1), r0);
146     __ StoreP(r1, FieldMemOperand(
147                       r6, FeedbackVector::kInvocationCountIndex * kPointerSize +
148                               FeedbackVector::kHeaderSize));
149   }
150 
151   {
152     Comment cmnt(masm_, "[ Allocate locals");
153     int locals_count = info->scope()->num_stack_slots();
154     OperandStackDepthIncrement(locals_count);
155     if (locals_count > 0) {
156       if (locals_count >= 128) {
157         Label ok;
158         __ AddP(ip, sp, Operand(-(locals_count * kPointerSize)));
159         __ LoadRoot(r5, Heap::kRealStackLimitRootIndex);
160         __ CmpLogicalP(ip, r5);
161         __ bge(&ok, Label::kNear);
162         __ CallRuntime(Runtime::kThrowStackOverflow);
163         __ bind(&ok);
164       }
165       __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
166       int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
167       if (locals_count >= kMaxPushes) {
168         int loop_iterations = locals_count / kMaxPushes;
169         __ mov(r4, Operand(loop_iterations));
170         Label loop_header;
171         __ bind(&loop_header);
172         // Do pushes.
173         // TODO(joransiu): Use MVC for better performance
174         __ lay(sp, MemOperand(sp, -kMaxPushes * kPointerSize));
175         for (int i = 0; i < kMaxPushes; i++) {
176           __ StoreP(ip, MemOperand(sp, i * kPointerSize));
177         }
178         // Continue loop if not done.
179         __ BranchOnCount(r4, &loop_header);
180       }
181       int remaining = locals_count % kMaxPushes;
182       // Emit the remaining pushes.
183       // TODO(joransiu): Use MVC for better performance
184       if (remaining > 0) {
185         __ lay(sp, MemOperand(sp, -remaining * kPointerSize));
186         for (int i = 0; i < remaining; i++) {
187           __ StoreP(ip, MemOperand(sp, i * kPointerSize));
188         }
189       }
190     }
191   }
192 
193   bool function_in_register_r3 = true;
194 
195   // Possibly allocate a local context.
196   if (info->scope()->NeedsContext()) {
197     // Argument to NewContext is the function, which is still in r3.
198     Comment cmnt(masm_, "[ Allocate context");
199     bool need_write_barrier = true;
200     int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
201     if (info->scope()->is_script_scope()) {
202       __ push(r3);
203       __ Push(info->scope()->scope_info());
204       __ CallRuntime(Runtime::kNewScriptContext);
205       PrepareForBailoutForId(BailoutId::ScriptContext(),
206                              BailoutState::TOS_REGISTER);
207       // The new target value is not used, clobbering is safe.
208       DCHECK_NULL(info->scope()->new_target_var());
209     } else {
210       if (info->scope()->new_target_var() != nullptr) {
211         __ push(r5);  // Preserve new target.
212       }
213       if (slots <=
214           ConstructorBuiltinsAssembler::MaximumFunctionContextSlots()) {
215         Callable callable = CodeFactory::FastNewFunctionContext(
216             isolate(), info->scope()->scope_type());
217         __ mov(FastNewFunctionContextDescriptor::SlotsRegister(),
218                Operand(slots));
219         __ Call(callable.code(), RelocInfo::CODE_TARGET);
220         // Result of the FastNewFunctionContext builtin is always in new space.
221         need_write_barrier = false;
222       } else {
223         __ push(r3);
224         __ Push(Smi::FromInt(info->scope()->scope_type()));
225         __ CallRuntime(Runtime::kNewFunctionContext);
226       }
227       if (info->scope()->new_target_var() != nullptr) {
228         __ pop(r5);  // Preserve new target.
229       }
230     }
231     function_in_register_r3 = false;
232     // Context is returned in r2.  It replaces the context passed to us.
233     // It's saved in the stack and kept live in cp.
234     __ LoadRR(cp, r2);
235     __ StoreP(r2, MemOperand(fp, StandardFrameConstants::kContextOffset));
236     // Copy any necessary parameters into the context.
237     int num_parameters = info->scope()->num_parameters();
238     int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
239     for (int i = first_parameter; i < num_parameters; i++) {
240       Variable* var =
241           (i == -1) ? info->scope()->receiver() : info->scope()->parameter(i);
242       if (var->IsContextSlot()) {
243         int parameter_offset = StandardFrameConstants::kCallerSPOffset +
244                                (num_parameters - 1 - i) * kPointerSize;
245         // Load parameter from stack.
246         __ LoadP(r2, MemOperand(fp, parameter_offset), r0);
247         // Store it in the context.
248         MemOperand target = ContextMemOperand(cp, var->index());
249         __ StoreP(r2, target);
250 
251         // Update the write barrier.
252         if (need_write_barrier) {
253           __ RecordWriteContextSlot(cp, target.offset(), r2, r4,
254                                     kLRHasBeenSaved, kDontSaveFPRegs);
255         } else if (FLAG_debug_code) {
256           Label done;
257           __ JumpIfInNewSpace(cp, r2, &done);
258           __ Abort(kExpectedNewSpaceObject);
259           __ bind(&done);
260         }
261       }
262     }
263   }
264 
265   // Register holding this function and new target are both trashed in case we
266   // bailout here. But since that can happen only when new target is not used
267   // and we allocate a context, the value of |function_in_register| is correct.
268   PrepareForBailoutForId(BailoutId::FunctionContext(),
269                          BailoutState::NO_REGISTERS);
270 
271   // We don't support new.target and rest parameters here.
272   DCHECK_NULL(info->scope()->new_target_var());
273   DCHECK_NULL(info->scope()->rest_parameter());
274   DCHECK_NULL(info->scope()->this_function_var());
275 
276   Variable* arguments = info->scope()->arguments();
277   if (arguments != NULL) {
278     // Function uses arguments object.
279     Comment cmnt(masm_, "[ Allocate arguments object");
280     if (!function_in_register_r3) {
281       // Load this again, if it's used by the local context below.
282       __ LoadP(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
283     }
284     if (is_strict(language_mode()) || !has_simple_parameters()) {
285       Callable callable = CodeFactory::FastNewStrictArguments(isolate());
286       __ Call(callable.code(), RelocInfo::CODE_TARGET);
287       RestoreContext();
288     } else if (literal()->has_duplicate_parameters()) {
289       __ Push(r3);
290       __ CallRuntime(Runtime::kNewSloppyArguments_Generic);
291     } else {
292       Callable callable = CodeFactory::FastNewSloppyArguments(isolate());
293       __ Call(callable.code(), RelocInfo::CODE_TARGET);
294       RestoreContext();
295     }
296 
297     SetVar(arguments, r2, r3, r4);
298   }
299 
300   if (FLAG_trace) {
301     __ CallRuntime(Runtime::kTraceEnter);
302   }
303 
304   // Visit the declarations and body.
305   PrepareForBailoutForId(BailoutId::FunctionEntry(),
306                          BailoutState::NO_REGISTERS);
307   {
308     Comment cmnt(masm_, "[ Declarations");
309     VisitDeclarations(scope()->declarations());
310   }
311 
312   // Assert that the declarations do not use ICs. Otherwise the debugger
313   // won't be able to redirect a PC at an IC to the correct IC in newly
314   // recompiled code.
315   DCHECK_EQ(0, ic_total_count_);
316 
317   {
318     Comment cmnt(masm_, "[ Stack check");
319     PrepareForBailoutForId(BailoutId::Declarations(),
320                            BailoutState::NO_REGISTERS);
321     Label ok;
322     __ LoadRoot(ip, Heap::kStackLimitRootIndex);
323     __ CmpLogicalP(sp, ip);
324     __ bge(&ok, Label::kNear);
325     __ Call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
326     __ bind(&ok);
327   }
328 
329   {
330     Comment cmnt(masm_, "[ Body");
331     DCHECK(loop_depth() == 0);
332     VisitStatements(literal()->body());
333     DCHECK(loop_depth() == 0);
334   }
335 
336   // Always emit a 'return undefined' in case control fell off the end of
337   // the body.
338   {
339     Comment cmnt(masm_, "[ return <undefined>;");
340     __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
341   }
342   EmitReturnSequence();
343 }
344 
ClearAccumulator()345 void FullCodeGenerator::ClearAccumulator() {
346   __ LoadSmiLiteral(r2, Smi::kZero);
347 }
348 
EmitProfilingCounterDecrement(int delta)349 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
350   __ mov(r4, Operand(profiling_counter_));
351   intptr_t smi_delta = reinterpret_cast<intptr_t>(Smi::FromInt(delta));
352   if (CpuFeatures::IsSupported(GENERAL_INSTR_EXT) && is_int8(-smi_delta)) {
353     __ AddP(FieldMemOperand(r4, Cell::kValueOffset), Operand(-smi_delta));
354     __ LoadP(r5, FieldMemOperand(r4, Cell::kValueOffset));
355   } else {
356     __ LoadP(r5, FieldMemOperand(r4, Cell::kValueOffset));
357     __ SubSmiLiteral(r5, r5, Smi::FromInt(delta), r0);
358     __ StoreP(r5, FieldMemOperand(r4, Cell::kValueOffset));
359   }
360 }
361 
EmitProfilingCounterReset()362 void FullCodeGenerator::EmitProfilingCounterReset() {
363   int reset_value = FLAG_interrupt_budget;
364   __ mov(r4, Operand(profiling_counter_));
365   __ LoadSmiLiteral(r5, Smi::FromInt(reset_value));
366   __ StoreP(r5, FieldMemOperand(r4, Cell::kValueOffset));
367 }
368 
EmitBackEdgeBookkeeping(IterationStatement * stmt,Label * back_edge_target)369 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
370                                                 Label* back_edge_target) {
371   Comment cmnt(masm_, "[ Back edge bookkeeping");
372   Label ok;
373 
374   DCHECK(back_edge_target->is_bound());
375   int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target) +
376                  kCodeSizeMultiplier / 2;
377   int weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
378   EmitProfilingCounterDecrement(weight);
379   {
380     // BackEdgeTable::PatchAt manipulates this sequence.
381     __ bge(&ok, Label::kNear);
382     __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
383 
384     // Record a mapping of this PC offset to the OSR id.  This is used to find
385     // the AST id from the unoptimized code in order to use it as a key into
386     // the deoptimization input data found in the optimized code.
387     RecordBackEdge(stmt->OsrEntryId());
388   }
389   EmitProfilingCounterReset();
390 
391   __ bind(&ok);
392   PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS);
393   // Record a mapping of the OSR id to this PC.  This is used if the OSR
394   // entry becomes the target of a bailout.  We don't expect it to be, but
395   // we want it to work if it is.
396   PrepareForBailoutForId(stmt->OsrEntryId(), BailoutState::NO_REGISTERS);
397 }
398 
EmitProfilingCounterHandlingForReturnSequence(bool is_tail_call)399 void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence(
400     bool is_tail_call) {
401   // Pretend that the exit is a backwards jump to the entry.
402   int weight = 1;
403   if (info_->ShouldSelfOptimize()) {
404     weight = FLAG_interrupt_budget / FLAG_self_opt_count;
405   } else {
406     int distance = masm_->pc_offset() + kCodeSizeMultiplier / 2;
407     weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
408   }
409   EmitProfilingCounterDecrement(weight);
410   Label ok;
411   __ CmpP(r5, Operand::Zero());
412   __ bge(&ok);
413   // Don't need to save result register if we are going to do a tail call.
414   if (!is_tail_call) {
415     __ push(r2);
416   }
417   __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
418   if (!is_tail_call) {
419     __ pop(r2);
420   }
421   EmitProfilingCounterReset();
422   __ bind(&ok);
423 }
424 
EmitReturnSequence()425 void FullCodeGenerator::EmitReturnSequence() {
426   Comment cmnt(masm_, "[ Return sequence");
427   if (return_label_.is_bound()) {
428     __ b(&return_label_);
429   } else {
430     __ bind(&return_label_);
431     if (FLAG_trace) {
432       // Push the return value on the stack as the parameter.
433       // Runtime::TraceExit returns its parameter in r2
434       __ push(r2);
435       __ CallRuntime(Runtime::kTraceExit);
436     }
437     EmitProfilingCounterHandlingForReturnSequence(false);
438 
439     // Make sure that the constant pool is not emitted inside of the return
440     // sequence.
441     {
442       int32_t arg_count = info_->scope()->num_parameters() + 1;
443       int32_t sp_delta = arg_count * kPointerSize;
444       SetReturnPosition(literal());
445       __ LeaveFrame(StackFrame::JAVA_SCRIPT, sp_delta);
446 
447       __ Ret();
448     }
449   }
450 }
451 
RestoreContext()452 void FullCodeGenerator::RestoreContext() {
453   __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
454 }
455 
Plug(Variable * var) const456 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
457   DCHECK(var->IsStackAllocated() || var->IsContextSlot());
458   codegen()->GetVar(result_register(), var);
459   codegen()->PushOperand(result_register());
460 }
461 
Plug(Heap::RootListIndex index) const462 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {}
463 
Plug(Heap::RootListIndex index) const464 void FullCodeGenerator::AccumulatorValueContext::Plug(
465     Heap::RootListIndex index) const {
466   __ LoadRoot(result_register(), index);
467 }
468 
Plug(Heap::RootListIndex index) const469 void FullCodeGenerator::StackValueContext::Plug(
470     Heap::RootListIndex index) const {
471   __ LoadRoot(result_register(), index);
472   codegen()->PushOperand(result_register());
473 }
474 
Plug(Heap::RootListIndex index) const475 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
476   codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_,
477                                           false_label_);
478   if (index == Heap::kUndefinedValueRootIndex ||
479       index == Heap::kNullValueRootIndex ||
480       index == Heap::kFalseValueRootIndex) {
481     if (false_label_ != fall_through_) __ b(false_label_);
482   } else if (index == Heap::kTrueValueRootIndex) {
483     if (true_label_ != fall_through_) __ b(true_label_);
484   } else {
485     __ LoadRoot(result_register(), index);
486     codegen()->DoTest(this);
487   }
488 }
489 
Plug(Handle<Object> lit) const490 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {}
491 
Plug(Handle<Object> lit) const492 void FullCodeGenerator::AccumulatorValueContext::Plug(
493     Handle<Object> lit) const {
494   __ mov(result_register(), Operand(lit));
495 }
496 
Plug(Handle<Object> lit) const497 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
498   // Immediates cannot be pushed directly.
499   __ mov(result_register(), Operand(lit));
500   codegen()->PushOperand(result_register());
501 }
502 
Plug(Handle<Object> lit) const503 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
504   codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_,
505                                           false_label_);
506   DCHECK(lit->IsNullOrUndefined(isolate()) || !lit->IsUndetectable());
507   if (lit->IsNullOrUndefined(isolate()) || lit->IsFalse(isolate())) {
508     if (false_label_ != fall_through_) __ b(false_label_);
509   } else if (lit->IsTrue(isolate()) || lit->IsJSObject()) {
510     if (true_label_ != fall_through_) __ b(true_label_);
511   } else if (lit->IsString()) {
512     if (String::cast(*lit)->length() == 0) {
513       if (false_label_ != fall_through_) __ b(false_label_);
514     } else {
515       if (true_label_ != fall_through_) __ b(true_label_);
516     }
517   } else if (lit->IsSmi()) {
518     if (Smi::cast(*lit)->value() == 0) {
519       if (false_label_ != fall_through_) __ b(false_label_);
520     } else {
521       if (true_label_ != fall_through_) __ b(true_label_);
522     }
523   } else {
524     // For simplicity we always test the accumulator register.
525     __ mov(result_register(), Operand(lit));
526     codegen()->DoTest(this);
527   }
528 }
529 
DropAndPlug(int count,Register reg) const530 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
531                                                        Register reg) const {
532   DCHECK(count > 0);
533   if (count > 1) codegen()->DropOperands(count - 1);
534   __ StoreP(reg, MemOperand(sp, 0));
535 }
536 
Plug(Label * materialize_true,Label * materialize_false) const537 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
538                                             Label* materialize_false) const {
539   DCHECK(materialize_true == materialize_false);
540   __ bind(materialize_true);
541 }
542 
Plug(Label * materialize_true,Label * materialize_false) const543 void FullCodeGenerator::AccumulatorValueContext::Plug(
544     Label* materialize_true, Label* materialize_false) const {
545   Label done;
546   __ bind(materialize_true);
547   __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
548   __ b(&done, Label::kNear);
549   __ bind(materialize_false);
550   __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
551   __ bind(&done);
552 }
553 
Plug(Label * materialize_true,Label * materialize_false) const554 void FullCodeGenerator::StackValueContext::Plug(
555     Label* materialize_true, Label* materialize_false) const {
556   Label done;
557   __ bind(materialize_true);
558   __ LoadRoot(ip, Heap::kTrueValueRootIndex);
559   __ b(&done, Label::kNear);
560   __ bind(materialize_false);
561   __ LoadRoot(ip, Heap::kFalseValueRootIndex);
562   __ bind(&done);
563   codegen()->PushOperand(ip);
564 }
565 
Plug(Label * materialize_true,Label * materialize_false) const566 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
567                                           Label* materialize_false) const {
568   DCHECK(materialize_true == true_label_);
569   DCHECK(materialize_false == false_label_);
570 }
571 
Plug(bool flag) const572 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
573   Heap::RootListIndex value_root_index =
574       flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
575   __ LoadRoot(result_register(), value_root_index);
576 }
577 
Plug(bool flag) const578 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
579   Heap::RootListIndex value_root_index =
580       flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
581   __ LoadRoot(ip, value_root_index);
582   codegen()->PushOperand(ip);
583 }
584 
Plug(bool flag) const585 void FullCodeGenerator::TestContext::Plug(bool flag) const {
586   codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_,
587                                           false_label_);
588   if (flag) {
589     if (true_label_ != fall_through_) __ b(true_label_);
590   } else {
591     if (false_label_ != fall_through_) __ b(false_label_);
592   }
593 }
594 
DoTest(Expression * condition,Label * if_true,Label * if_false,Label * fall_through)595 void FullCodeGenerator::DoTest(Expression* condition, Label* if_true,
596                                Label* if_false, Label* fall_through) {
597   Handle<Code> ic = ToBooleanICStub::GetUninitialized(isolate());
598   CallIC(ic, condition->test_id());
599   __ CompareRoot(result_register(), Heap::kTrueValueRootIndex);
600   Split(eq, if_true, if_false, fall_through);
601 }
602 
Split(Condition cond,Label * if_true,Label * if_false,Label * fall_through)603 void FullCodeGenerator::Split(Condition cond, Label* if_true, Label* if_false,
604                               Label* fall_through) {
605   if (if_false == fall_through) {
606     __ b(cond, if_true);
607   } else if (if_true == fall_through) {
608     __ b(NegateCondition(cond), if_false);
609   } else {
610     __ b(cond, if_true);
611     __ b(if_false);
612   }
613 }
614 
StackOperand(Variable * var)615 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
616   DCHECK(var->IsStackAllocated());
617   // Offset is negative because higher indexes are at lower addresses.
618   int offset = -var->index() * kPointerSize;
619   // Adjust by a (parameter or local) base offset.
620   if (var->IsParameter()) {
621     offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
622   } else {
623     offset += JavaScriptFrameConstants::kLocal0Offset;
624   }
625   return MemOperand(fp, offset);
626 }
627 
VarOperand(Variable * var,Register scratch)628 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
629   DCHECK(var->IsContextSlot() || var->IsStackAllocated());
630   if (var->IsContextSlot()) {
631     int context_chain_length = scope()->ContextChainLength(var->scope());
632     __ LoadContext(scratch, context_chain_length);
633     return ContextMemOperand(scratch, var->index());
634   } else {
635     return StackOperand(var);
636   }
637 }
638 
GetVar(Register dest,Variable * var)639 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
640   // Use destination as scratch.
641   MemOperand location = VarOperand(var, dest);
642   __ LoadP(dest, location, r0);
643 }
644 
SetVar(Variable * var,Register src,Register scratch0,Register scratch1)645 void FullCodeGenerator::SetVar(Variable* var, Register src, Register scratch0,
646                                Register scratch1) {
647   DCHECK(var->IsContextSlot() || var->IsStackAllocated());
648   DCHECK(!scratch0.is(src));
649   DCHECK(!scratch0.is(scratch1));
650   DCHECK(!scratch1.is(src));
651   MemOperand location = VarOperand(var, scratch0);
652   __ StoreP(src, location);
653 
654   // Emit the write barrier code if the location is in the heap.
655   if (var->IsContextSlot()) {
656     __ RecordWriteContextSlot(scratch0, location.offset(), src, scratch1,
657                               kLRHasBeenSaved, kDontSaveFPRegs);
658   }
659 }
660 
PrepareForBailoutBeforeSplit(Expression * expr,bool should_normalize,Label * if_true,Label * if_false)661 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
662                                                      bool should_normalize,
663                                                      Label* if_true,
664                                                      Label* if_false) {
665   // Only prepare for bailouts before splits if we're in a test
666   // context. Otherwise, we let the Visit function deal with the
667   // preparation to avoid preparing with the same AST id twice.
668   if (!context()->IsTest()) return;
669 
670   Label skip;
671   if (should_normalize) __ b(&skip);
672   PrepareForBailout(expr, BailoutState::TOS_REGISTER);
673   if (should_normalize) {
674     __ CompareRoot(r2, Heap::kTrueValueRootIndex);
675     Split(eq, if_true, if_false, NULL);
676     __ bind(&skip);
677   }
678 }
679 
EmitDebugCheckDeclarationContext(Variable * variable)680 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
681   // The variable in the declaration always resides in the current function
682   // context.
683   DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
684   if (FLAG_debug_code) {
685     // Check that we're not inside a with or catch context.
686     __ LoadP(r3, FieldMemOperand(cp, HeapObject::kMapOffset));
687     __ CompareRoot(r3, Heap::kWithContextMapRootIndex);
688     __ Check(ne, kDeclarationInWithContext);
689     __ CompareRoot(r3, Heap::kCatchContextMapRootIndex);
690     __ Check(ne, kDeclarationInCatchContext);
691   }
692 }
693 
VisitVariableDeclaration(VariableDeclaration * declaration)694 void FullCodeGenerator::VisitVariableDeclaration(
695     VariableDeclaration* declaration) {
696   VariableProxy* proxy = declaration->proxy();
697   Variable* variable = proxy->var();
698   switch (variable->location()) {
699     case VariableLocation::UNALLOCATED: {
700       DCHECK(!variable->binding_needs_init());
701       globals_->Add(variable->name(), zone());
702       FeedbackSlot slot = proxy->VariableFeedbackSlot();
703       DCHECK(!slot.IsInvalid());
704       globals_->Add(handle(Smi::FromInt(slot.ToInt()), isolate()), zone());
705       globals_->Add(isolate()->factory()->undefined_value(), zone());
706       globals_->Add(isolate()->factory()->undefined_value(), zone());
707       break;
708     }
709     case VariableLocation::PARAMETER:
710     case VariableLocation::LOCAL:
711       if (variable->binding_needs_init()) {
712         Comment cmnt(masm_, "[ VariableDeclaration");
713         __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
714         __ StoreP(ip, StackOperand(variable));
715       }
716       break;
717 
718     case VariableLocation::CONTEXT:
719       if (variable->binding_needs_init()) {
720         Comment cmnt(masm_, "[ VariableDeclaration");
721         EmitDebugCheckDeclarationContext(variable);
722         __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
723         __ StoreP(ip, ContextMemOperand(cp, variable->index()));
724         // No write barrier since the_hole_value is in old space.
725         PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
726       }
727       break;
728 
729     case VariableLocation::LOOKUP:
730     case VariableLocation::MODULE:
731       UNREACHABLE();
732   }
733 }
734 
VisitFunctionDeclaration(FunctionDeclaration * declaration)735 void FullCodeGenerator::VisitFunctionDeclaration(
736     FunctionDeclaration* declaration) {
737   VariableProxy* proxy = declaration->proxy();
738   Variable* variable = proxy->var();
739   switch (variable->location()) {
740     case VariableLocation::UNALLOCATED: {
741       globals_->Add(variable->name(), zone());
742       FeedbackSlot slot = proxy->VariableFeedbackSlot();
743       DCHECK(!slot.IsInvalid());
744       globals_->Add(handle(Smi::FromInt(slot.ToInt()), isolate()), zone());
745 
746       // We need the slot where the literals array lives, too.
747       slot = declaration->fun()->LiteralFeedbackSlot();
748       DCHECK(!slot.IsInvalid());
749       globals_->Add(handle(Smi::FromInt(slot.ToInt()), isolate()), zone());
750 
751       Handle<SharedFunctionInfo> function =
752           Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_);
753       // Check for stack-overflow exception.
754       if (function.is_null()) return SetStackOverflow();
755       globals_->Add(function, zone());
756       break;
757     }
758 
759     case VariableLocation::PARAMETER:
760     case VariableLocation::LOCAL: {
761       Comment cmnt(masm_, "[ FunctionDeclaration");
762       VisitForAccumulatorValue(declaration->fun());
763       __ StoreP(result_register(), StackOperand(variable));
764       break;
765     }
766 
767     case VariableLocation::CONTEXT: {
768       Comment cmnt(masm_, "[ FunctionDeclaration");
769       EmitDebugCheckDeclarationContext(variable);
770       VisitForAccumulatorValue(declaration->fun());
771       __ StoreP(result_register(), ContextMemOperand(cp, variable->index()));
772       int offset = Context::SlotOffset(variable->index());
773       // We know that we have written a function, which is not a smi.
774       __ RecordWriteContextSlot(cp, offset, result_register(), r4,
775                                 kLRHasBeenSaved, kDontSaveFPRegs,
776                                 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
777       PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
778       break;
779     }
780 
781     case VariableLocation::LOOKUP:
782     case VariableLocation::MODULE:
783       UNREACHABLE();
784   }
785 }
786 
DeclareGlobals(Handle<FixedArray> pairs)787 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
788   // Call the runtime to declare the globals.
789   __ mov(r3, Operand(pairs));
790   __ LoadSmiLiteral(r2, Smi::FromInt(DeclareGlobalsFlags()));
791   __ EmitLoadFeedbackVector(r4);
792   __ Push(r3, r2, r4);
793   __ CallRuntime(Runtime::kDeclareGlobals);
794   // Return value is ignored.
795 }
796 
VisitSwitchStatement(SwitchStatement * stmt)797 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
798   Comment cmnt(masm_, "[ SwitchStatement");
799   Breakable nested_statement(this, stmt);
800   SetStatementPosition(stmt);
801 
802   // Keep the switch value on the stack until a case matches.
803   VisitForStackValue(stmt->tag());
804   PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS);
805 
806   ZoneList<CaseClause*>* clauses = stmt->cases();
807   CaseClause* default_clause = NULL;  // Can occur anywhere in the list.
808 
809   Label next_test;  // Recycled for each test.
810   // Compile all the tests with branches to their bodies.
811   for (int i = 0; i < clauses->length(); i++) {
812     CaseClause* clause = clauses->at(i);
813     clause->body_target()->Unuse();
814 
815     // The default is not a test, but remember it as final fall through.
816     if (clause->is_default()) {
817       default_clause = clause;
818       continue;
819     }
820 
821     Comment cmnt(masm_, "[ Case comparison");
822     __ bind(&next_test);
823     next_test.Unuse();
824 
825     // Compile the label expression.
826     VisitForAccumulatorValue(clause->label());
827 
828     // Perform the comparison as if via '==='.
829     __ LoadP(r3, MemOperand(sp, 0));  // Switch value.
830     bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
831     JumpPatchSite patch_site(masm_);
832     if (inline_smi_code) {
833       Label slow_case;
834       __ LoadRR(r4, r2);
835       __ OrP(r4, r3);
836       patch_site.EmitJumpIfNotSmi(r4, &slow_case);
837 
838       __ CmpP(r3, r2);
839       __ bne(&next_test);
840       __ Drop(1);  // Switch value is no longer needed.
841       __ b(clause->body_target());
842       __ bind(&slow_case);
843     }
844 
845     // Record position before stub call for type feedback.
846     SetExpressionPosition(clause);
847     Handle<Code> ic =
848         CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
849     CallIC(ic, clause->CompareId());
850     patch_site.EmitPatchInfo();
851 
852     Label skip;
853     __ b(&skip);
854     PrepareForBailout(clause, BailoutState::TOS_REGISTER);
855     __ CompareRoot(r2, Heap::kTrueValueRootIndex);
856     __ bne(&next_test);
857     __ Drop(1);
858     __ b(clause->body_target());
859     __ bind(&skip);
860 
861     __ CmpP(r2, Operand::Zero());
862     __ bne(&next_test);
863     __ Drop(1);  // Switch value is no longer needed.
864     __ b(clause->body_target());
865   }
866 
867   // Discard the test value and jump to the default if present, otherwise to
868   // the end of the statement.
869   __ bind(&next_test);
870   DropOperands(1);  // Switch value is no longer needed.
871   if (default_clause == NULL) {
872     __ b(nested_statement.break_label());
873   } else {
874     __ b(default_clause->body_target());
875   }
876 
877   // Compile all the case bodies.
878   for (int i = 0; i < clauses->length(); i++) {
879     Comment cmnt(masm_, "[ Case body");
880     CaseClause* clause = clauses->at(i);
881     __ bind(clause->body_target());
882     PrepareForBailoutForId(clause->EntryId(), BailoutState::NO_REGISTERS);
883     VisitStatements(clause->statements());
884   }
885 
886   __ bind(nested_statement.break_label());
887   PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
888 }
889 
VisitForInStatement(ForInStatement * stmt)890 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
891   Comment cmnt(masm_, "[ ForInStatement");
892   SetStatementPosition(stmt, SKIP_BREAK);
893 
894   FeedbackSlot slot = stmt->ForInFeedbackSlot();
895 
896   // Get the object to enumerate over.
897   SetExpressionAsStatementPosition(stmt->enumerable());
898   VisitForAccumulatorValue(stmt->enumerable());
899   OperandStackDepthIncrement(5);
900 
901   Label loop, exit;
902   Iteration loop_statement(this, stmt);
903   increment_loop_depth();
904 
905   // If the object is null or undefined, skip over the loop, otherwise convert
906   // it to a JS receiver.  See ECMA-262 version 5, section 12.6.4.
907   Label convert, done_convert;
908   __ JumpIfSmi(r2, &convert);
909   __ CompareObjectType(r2, r3, r3, FIRST_JS_RECEIVER_TYPE);
910   __ bge(&done_convert);
911   __ CompareRoot(r2, Heap::kNullValueRootIndex);
912   __ beq(&exit);
913   __ CompareRoot(r2, Heap::kUndefinedValueRootIndex);
914   __ beq(&exit);
915   __ bind(&convert);
916   __ Call(isolate()->builtins()->ToObject(), RelocInfo::CODE_TARGET);
917   RestoreContext();
918   __ bind(&done_convert);
919   PrepareForBailoutForId(stmt->ToObjectId(), BailoutState::TOS_REGISTER);
920   __ push(r2);
921 
922   // Check cache validity in generated code. If we cannot guarantee cache
923   // validity, call the runtime system to check cache validity or get the
924   // property names in a fixed array. Note: Proxies never have an enum cache,
925   // so will always take the slow path.
926   Label call_runtime;
927   __ CheckEnumCache(&call_runtime);
928 
929   // The enum cache is valid.  Load the map of the object being
930   // iterated over and use the cache for the iteration.
931   Label use_cache;
932   __ LoadP(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
933   __ b(&use_cache);
934 
935   // Get the set of properties to enumerate.
936   __ bind(&call_runtime);
937   __ push(r2);  // Duplicate the enumerable object on the stack.
938   __ CallRuntime(Runtime::kForInEnumerate);
939   PrepareForBailoutForId(stmt->EnumId(), BailoutState::TOS_REGISTER);
940 
941   // If we got a map from the runtime call, we can do a fast
942   // modification check. Otherwise, we got a fixed array, and we have
943   // to do a slow check.
944   Label fixed_array;
945   __ LoadP(r4, FieldMemOperand(r2, HeapObject::kMapOffset));
946   __ CompareRoot(r4, Heap::kMetaMapRootIndex);
947   __ bne(&fixed_array);
948 
949   // We got a map in register r2. Get the enumeration cache from it.
950   Label no_descriptors;
951   __ bind(&use_cache);
952 
953   __ EnumLength(r3, r2);
954   __ CmpSmiLiteral(r3, Smi::kZero, r0);
955   __ beq(&no_descriptors, Label::kNear);
956 
957   __ LoadInstanceDescriptors(r2, r4);
958   __ LoadP(r4, FieldMemOperand(r4, DescriptorArray::kEnumCacheOffset));
959   __ LoadP(r4,
960            FieldMemOperand(r4, DescriptorArray::kEnumCacheBridgeCacheOffset));
961 
962   // Set up the four remaining stack slots.
963   __ push(r2);  // Map.
964   __ LoadSmiLiteral(r2, Smi::kZero);
965   // Push enumeration cache, enumeration cache length (as smi) and zero.
966   __ Push(r4, r3, r2);
967   __ b(&loop);
968 
969   __ bind(&no_descriptors);
970   __ Drop(1);
971   __ b(&exit);
972 
973   // We got a fixed array in register r2. Iterate through that.
974   __ bind(&fixed_array);
975 
976   __ LoadSmiLiteral(r3, Smi::FromInt(1));  // Smi(1) indicates slow check
977   __ Push(r3, r2);                         // Smi and array
978   __ LoadP(r3, FieldMemOperand(r2, FixedArray::kLengthOffset));
979   __ Push(r3);  // Fixed array length (as smi).
980   PrepareForBailoutForId(stmt->PrepareId(), BailoutState::NO_REGISTERS);
981   __ LoadSmiLiteral(r2, Smi::kZero);
982   __ Push(r2);  // Initial index.
983 
984   // Generate code for doing the condition check.
985   __ bind(&loop);
986   SetExpressionAsStatementPosition(stmt->each());
987 
988   // Load the current count to r2, load the length to r3.
989   __ LoadP(r2, MemOperand(sp, 0 * kPointerSize));
990   __ LoadP(r3, MemOperand(sp, 1 * kPointerSize));
991   __ CmpLogicalP(r2, r3);  // Compare to the array length.
992   __ bge(loop_statement.break_label());
993 
994   // Get the current entry of the array into register r5.
995   __ LoadP(r4, MemOperand(sp, 2 * kPointerSize));
996   __ AddP(r4, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
997   __ SmiToPtrArrayOffset(r5, r2);
998   __ LoadP(r5, MemOperand(r5, r4));
999 
1000   // Get the expected map from the stack or a smi in the
1001   // permanent slow case into register r4.
1002   __ LoadP(r4, MemOperand(sp, 3 * kPointerSize));
1003 
1004   // Check if the expected map still matches that of the enumerable.
1005   // If not, we may have to filter the key.
1006   Label update_each;
1007   __ LoadP(r3, MemOperand(sp, 4 * kPointerSize));
1008   __ LoadP(r6, FieldMemOperand(r3, HeapObject::kMapOffset));
1009   __ CmpP(r6, r4);
1010   __ beq(&update_each);
1011 
1012   // We need to filter the key, record slow-path here.
1013   int const vector_index = SmiFromSlot(slot)->value();
1014   __ EmitLoadFeedbackVector(r2);
1015   __ mov(r4, Operand(FeedbackVector::MegamorphicSentinel(isolate())));
1016   __ StoreP(
1017       r4, FieldMemOperand(r2, FixedArray::OffsetOfElementAt(vector_index)), r0);
1018 
1019   // Convert the entry to a string or (smi) 0 if it isn't a property
1020   // any more. If the property has been removed while iterating, we
1021   // just skip it.
1022   __ Push(r3, r5);  // Enumerable and current entry.
1023   __ CallRuntime(Runtime::kForInFilter);
1024   PrepareForBailoutForId(stmt->FilterId(), BailoutState::TOS_REGISTER);
1025   __ LoadRR(r5, r2);
1026   __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
1027   __ CmpP(r2, r0);
1028   __ beq(loop_statement.continue_label());
1029 
1030   // Update the 'each' property or variable from the possibly filtered
1031   // entry in register r5.
1032   __ bind(&update_each);
1033   __ LoadRR(result_register(), r5);
1034   // Perform the assignment as if via '='.
1035   {
1036     EffectContext context(this);
1037     EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
1038     PrepareForBailoutForId(stmt->AssignmentId(), BailoutState::NO_REGISTERS);
1039   }
1040 
1041   // Both Crankshaft and Turbofan expect BodyId to be right before stmt->body().
1042   PrepareForBailoutForId(stmt->BodyId(), BailoutState::NO_REGISTERS);
1043   // Generate code for the body of the loop.
1044   Visit(stmt->body());
1045 
1046   // Generate code for the going to the next element by incrementing
1047   // the index (smi) stored on top of the stack.
1048   __ bind(loop_statement.continue_label());
1049   PrepareForBailoutForId(stmt->IncrementId(), BailoutState::NO_REGISTERS);
1050   __ pop(r2);
1051   __ AddSmiLiteral(r2, r2, Smi::FromInt(1), r0);
1052   __ push(r2);
1053 
1054   EmitBackEdgeBookkeeping(stmt, &loop);
1055   __ b(&loop);
1056 
1057   // Remove the pointers stored on the stack.
1058   __ bind(loop_statement.break_label());
1059   DropOperands(5);
1060 
1061   // Exit and decrement the loop depth.
1062   PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
1063   __ bind(&exit);
1064   decrement_loop_depth();
1065 }
1066 
EmitSetHomeObject(Expression * initializer,int offset,FeedbackSlot slot)1067 void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset,
1068                                           FeedbackSlot slot) {
1069   DCHECK(NeedsHomeObject(initializer));
1070   __ LoadP(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1071   __ LoadP(StoreDescriptor::ValueRegister(),
1072            MemOperand(sp, offset * kPointerSize));
1073   CallStoreIC(slot, isolate()->factory()->home_object_symbol());
1074 }
1075 
EmitSetHomeObjectAccumulator(Expression * initializer,int offset,FeedbackSlot slot)1076 void FullCodeGenerator::EmitSetHomeObjectAccumulator(Expression* initializer,
1077                                                      int offset,
1078                                                      FeedbackSlot slot) {
1079   DCHECK(NeedsHomeObject(initializer));
1080   __ Move(StoreDescriptor::ReceiverRegister(), r2);
1081   __ LoadP(StoreDescriptor::ValueRegister(),
1082            MemOperand(sp, offset * kPointerSize));
1083   CallStoreIC(slot, isolate()->factory()->home_object_symbol());
1084 }
1085 
EmitVariableLoad(VariableProxy * proxy,TypeofMode typeof_mode)1086 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
1087                                          TypeofMode typeof_mode) {
1088   // Record position before possible IC call.
1089   SetExpressionPosition(proxy);
1090   PrepareForBailoutForId(proxy->BeforeId(), BailoutState::NO_REGISTERS);
1091   Variable* var = proxy->var();
1092 
1093   // Two cases: global variables and all other types of variables.
1094   switch (var->location()) {
1095     case VariableLocation::UNALLOCATED: {
1096       Comment cmnt(masm_, "[ Global variable");
1097       EmitGlobalVariableLoad(proxy, typeof_mode);
1098       context()->Plug(r2);
1099       break;
1100     }
1101 
1102     case VariableLocation::PARAMETER:
1103     case VariableLocation::LOCAL:
1104     case VariableLocation::CONTEXT: {
1105       DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode);
1106       Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1107                                                : "[ Stack variable");
1108       if (proxy->hole_check_mode() == HoleCheckMode::kRequired) {
1109         // Throw a reference error when using an uninitialized let/const
1110         // binding in harmony mode.
1111         Label done;
1112         GetVar(r2, var);
1113         __ CompareRoot(r2, Heap::kTheHoleValueRootIndex);
1114         __ bne(&done);
1115         __ mov(r2, Operand(var->name()));
1116         __ push(r2);
1117         __ CallRuntime(Runtime::kThrowReferenceError);
1118         __ bind(&done);
1119         context()->Plug(r2);
1120         break;
1121       }
1122       context()->Plug(var);
1123       break;
1124     }
1125 
1126     case VariableLocation::LOOKUP:
1127     case VariableLocation::MODULE:
1128       UNREACHABLE();
1129   }
1130 }
1131 
EmitAccessor(ObjectLiteralProperty * property)1132 void FullCodeGenerator::EmitAccessor(ObjectLiteralProperty* property) {
1133   Expression* expression = (property == NULL) ? NULL : property->value();
1134   if (expression == NULL) {
1135     __ LoadRoot(r3, Heap::kNullValueRootIndex);
1136     PushOperand(r3);
1137   } else {
1138     VisitForStackValue(expression);
1139     if (NeedsHomeObject(expression)) {
1140       DCHECK(property->kind() == ObjectLiteral::Property::GETTER ||
1141              property->kind() == ObjectLiteral::Property::SETTER);
1142       int offset = property->kind() == ObjectLiteral::Property::GETTER ? 2 : 3;
1143       EmitSetHomeObject(expression, offset, property->GetSlot());
1144     }
1145   }
1146 }
1147 
VisitObjectLiteral(ObjectLiteral * expr)1148 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1149   Comment cmnt(masm_, "[ ObjectLiteral");
1150 
1151   Handle<BoilerplateDescription> constant_properties =
1152       expr->GetOrBuildConstantProperties(isolate());
1153   __ LoadP(r5, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1154   __ LoadSmiLiteral(r4, SmiFromSlot(expr->literal_slot()));
1155   __ mov(r3, Operand(constant_properties));
1156   int flags = expr->ComputeFlags();
1157   __ LoadSmiLiteral(r2, Smi::FromInt(flags));
1158   if (MustCreateObjectLiteralWithRuntime(expr)) {
1159     __ Push(r5, r4, r3, r2);
1160     __ CallRuntime(Runtime::kCreateObjectLiteral);
1161   } else {
1162     Callable callable = CodeFactory::FastCloneShallowObject(
1163         isolate(), expr->properties_count());
1164     __ Call(callable.code(), RelocInfo::CODE_TARGET);
1165     RestoreContext();
1166   }
1167   PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
1168 
1169   // If result_saved is true the result is on top of the stack.  If
1170   // result_saved is false the result is in r2.
1171   bool result_saved = false;
1172 
1173   AccessorTable accessor_table(zone());
1174   for (int i = 0; i < expr->properties()->length(); i++) {
1175     ObjectLiteral::Property* property = expr->properties()->at(i);
1176     DCHECK(!property->is_computed_name());
1177     if (property->IsCompileTimeValue()) continue;
1178 
1179     Literal* key = property->key()->AsLiteral();
1180     Expression* value = property->value();
1181     if (!result_saved) {
1182       PushOperand(r2);  // Save result on stack
1183       result_saved = true;
1184     }
1185     switch (property->kind()) {
1186       case ObjectLiteral::Property::SPREAD:
1187       case ObjectLiteral::Property::CONSTANT:
1188         UNREACHABLE();
1189       case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1190         DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1191       // Fall through.
1192       case ObjectLiteral::Property::COMPUTED:
1193         // It is safe to use [[Put]] here because the boilerplate already
1194         // contains computed properties with an uninitialized value.
1195         if (key->IsStringLiteral()) {
1196           DCHECK(key->IsPropertyName());
1197           if (property->emit_store()) {
1198             VisitForAccumulatorValue(value);
1199             DCHECK(StoreDescriptor::ValueRegister().is(r2));
1200             __ LoadP(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1201             CallStoreIC(property->GetSlot(0), key->value(), true);
1202             PrepareForBailoutForId(key->id(), BailoutState::NO_REGISTERS);
1203 
1204             if (NeedsHomeObject(value)) {
1205               EmitSetHomeObjectAccumulator(value, 0, property->GetSlot(1));
1206             }
1207           } else {
1208             VisitForEffect(value);
1209           }
1210           break;
1211         }
1212         // Duplicate receiver on stack.
1213         __ LoadP(r2, MemOperand(sp));
1214         PushOperand(r2);
1215         VisitForStackValue(key);
1216         VisitForStackValue(value);
1217         if (property->emit_store()) {
1218           if (NeedsHomeObject(value)) {
1219             EmitSetHomeObject(value, 2, property->GetSlot());
1220           }
1221           __ LoadSmiLiteral(r2, Smi::FromInt(SLOPPY));  // PropertyAttributes
1222           PushOperand(r2);
1223           CallRuntimeWithOperands(Runtime::kSetProperty);
1224         } else {
1225           DropOperands(3);
1226         }
1227         break;
1228       case ObjectLiteral::Property::PROTOTYPE:
1229         // Duplicate receiver on stack.
1230         __ LoadP(r2, MemOperand(sp));
1231         PushOperand(r2);
1232         VisitForStackValue(value);
1233         DCHECK(property->emit_store());
1234         CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
1235         PrepareForBailoutForId(expr->GetIdForPropertySet(i),
1236                                BailoutState::NO_REGISTERS);
1237         break;
1238       case ObjectLiteral::Property::GETTER:
1239         if (property->emit_store()) {
1240           AccessorTable::Iterator it = accessor_table.lookup(key);
1241           it->second->bailout_id = expr->GetIdForPropertySet(i);
1242           it->second->getter = property;
1243         }
1244         break;
1245       case ObjectLiteral::Property::SETTER:
1246         if (property->emit_store()) {
1247           AccessorTable::Iterator it = accessor_table.lookup(key);
1248           it->second->bailout_id = expr->GetIdForPropertySet(i);
1249           it->second->setter = property;
1250         }
1251         break;
1252     }
1253   }
1254 
1255   // Emit code to define accessors, using only a single call to the runtime for
1256   // each pair of corresponding getters and setters.
1257   for (AccessorTable::Iterator it = accessor_table.begin();
1258        it != accessor_table.end(); ++it) {
1259     __ LoadP(r2, MemOperand(sp));  // Duplicate receiver.
1260     PushOperand(r2);
1261     VisitForStackValue(it->first);
1262     EmitAccessor(it->second->getter);
1263     EmitAccessor(it->second->setter);
1264     __ LoadSmiLiteral(r2, Smi::FromInt(NONE));
1265     PushOperand(r2);
1266     CallRuntimeWithOperands(Runtime::kDefineAccessorPropertyUnchecked);
1267     PrepareForBailoutForId(it->second->bailout_id, BailoutState::NO_REGISTERS);
1268   }
1269 
1270   if (result_saved) {
1271     context()->PlugTOS();
1272   } else {
1273     context()->Plug(r2);
1274   }
1275 }
1276 
VisitArrayLiteral(ArrayLiteral * expr)1277 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1278   Comment cmnt(masm_, "[ ArrayLiteral");
1279 
1280   Handle<ConstantElementsPair> constant_elements =
1281       expr->GetOrBuildConstantElements(isolate());
1282 
1283   __ LoadP(r5, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1284   __ LoadSmiLiteral(r4, SmiFromSlot(expr->literal_slot()));
1285   __ mov(r3, Operand(constant_elements));
1286   if (MustCreateArrayLiteralWithRuntime(expr)) {
1287     __ LoadSmiLiteral(r2, Smi::FromInt(expr->ComputeFlags()));
1288     __ Push(r5, r4, r3, r2);
1289     __ CallRuntime(Runtime::kCreateArrayLiteral);
1290   } else {
1291     Callable callable =
1292         CodeFactory::FastCloneShallowArray(isolate(), TRACK_ALLOCATION_SITE);
1293     __ Call(callable.code(), RelocInfo::CODE_TARGET);
1294     RestoreContext();
1295   }
1296   PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
1297 
1298   bool result_saved = false;  // Is the result saved to the stack?
1299   ZoneList<Expression*>* subexprs = expr->values();
1300   int length = subexprs->length();
1301 
1302   // Emit code to evaluate all the non-constant subexpressions and to store
1303   // them into the newly cloned array.
1304   for (int array_index = 0; array_index < length; array_index++) {
1305     Expression* subexpr = subexprs->at(array_index);
1306     DCHECK(!subexpr->IsSpread());
1307     // If the subexpression is a literal or a simple materialized literal it
1308     // is already set in the cloned array.
1309     if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1310 
1311     if (!result_saved) {
1312       PushOperand(r2);
1313       result_saved = true;
1314     }
1315     VisitForAccumulatorValue(subexpr);
1316 
1317     __ LoadSmiLiteral(StoreDescriptor::NameRegister(),
1318                       Smi::FromInt(array_index));
1319     __ LoadP(StoreDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1320     CallKeyedStoreIC(expr->LiteralFeedbackSlot());
1321 
1322     PrepareForBailoutForId(expr->GetIdForElement(array_index),
1323                            BailoutState::NO_REGISTERS);
1324   }
1325 
1326   if (result_saved) {
1327     context()->PlugTOS();
1328   } else {
1329     context()->Plug(r2);
1330   }
1331 }
1332 
VisitAssignment(Assignment * expr)1333 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1334   DCHECK(expr->target()->IsValidReferenceExpressionOrThis());
1335 
1336   Comment cmnt(masm_, "[ Assignment");
1337 
1338   Property* property = expr->target()->AsProperty();
1339   LhsKind assign_type = Property::GetAssignType(property);
1340 
1341   // Evaluate LHS expression.
1342   switch (assign_type) {
1343     case VARIABLE:
1344       // Nothing to do here.
1345       break;
1346     case NAMED_PROPERTY:
1347       if (expr->is_compound()) {
1348         // We need the receiver both on the stack and in the register.
1349         VisitForStackValue(property->obj());
1350         __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1351       } else {
1352         VisitForStackValue(property->obj());
1353       }
1354       break;
1355     case KEYED_PROPERTY:
1356       if (expr->is_compound()) {
1357         VisitForStackValue(property->obj());
1358         VisitForStackValue(property->key());
1359         __ LoadP(LoadDescriptor::ReceiverRegister(),
1360                  MemOperand(sp, 1 * kPointerSize));
1361         __ LoadP(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
1362       } else {
1363         VisitForStackValue(property->obj());
1364         VisitForStackValue(property->key());
1365       }
1366       break;
1367     case NAMED_SUPER_PROPERTY:
1368     case KEYED_SUPER_PROPERTY:
1369       UNREACHABLE();
1370       break;
1371   }
1372 
1373   // For compound assignments we need another deoptimization point after the
1374   // variable/property load.
1375   if (expr->is_compound()) {
1376     {
1377       AccumulatorValueContext context(this);
1378       switch (assign_type) {
1379         case VARIABLE:
1380           EmitVariableLoad(expr->target()->AsVariableProxy());
1381           PrepareForBailout(expr->target(), BailoutState::TOS_REGISTER);
1382           break;
1383         case NAMED_PROPERTY:
1384           EmitNamedPropertyLoad(property);
1385           PrepareForBailoutForId(property->LoadId(),
1386                                  BailoutState::TOS_REGISTER);
1387           break;
1388         case KEYED_PROPERTY:
1389           EmitKeyedPropertyLoad(property);
1390           PrepareForBailoutForId(property->LoadId(),
1391                                  BailoutState::TOS_REGISTER);
1392           break;
1393         case NAMED_SUPER_PROPERTY:
1394         case KEYED_SUPER_PROPERTY:
1395           UNREACHABLE();
1396           break;
1397       }
1398     }
1399 
1400     Token::Value op = expr->binary_op();
1401     PushOperand(r2);  // Left operand goes on the stack.
1402     VisitForAccumulatorValue(expr->value());
1403 
1404     AccumulatorValueContext context(this);
1405     if (ShouldInlineSmiCase(op)) {
1406       EmitInlineSmiBinaryOp(expr->binary_operation(), op, expr->target(),
1407                             expr->value());
1408     } else {
1409       EmitBinaryOp(expr->binary_operation(), op);
1410     }
1411 
1412     // Deoptimization point in case the binary operation may have side effects.
1413     PrepareForBailout(expr->binary_operation(), BailoutState::TOS_REGISTER);
1414   } else {
1415     VisitForAccumulatorValue(expr->value());
1416   }
1417 
1418   SetExpressionPosition(expr);
1419 
1420   // Store the value.
1421   switch (assign_type) {
1422     case VARIABLE: {
1423       VariableProxy* proxy = expr->target()->AsVariableProxy();
1424       EmitVariableAssignment(proxy->var(), expr->op(), expr->AssignmentSlot(),
1425                              proxy->hole_check_mode());
1426       PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
1427       context()->Plug(r2);
1428       break;
1429     }
1430     case NAMED_PROPERTY:
1431       EmitNamedPropertyAssignment(expr);
1432       break;
1433     case KEYED_PROPERTY:
1434       EmitKeyedPropertyAssignment(expr);
1435       break;
1436     case NAMED_SUPER_PROPERTY:
1437     case KEYED_SUPER_PROPERTY:
1438       UNREACHABLE();
1439       break;
1440   }
1441 }
1442 
VisitYield(Yield * expr)1443 void FullCodeGenerator::VisitYield(Yield* expr) {
1444   // Resumable functions are not supported.
1445   UNREACHABLE();
1446 }
1447 
PushOperands(Register reg1,Register reg2)1448 void FullCodeGenerator::PushOperands(Register reg1, Register reg2) {
1449   OperandStackDepthIncrement(2);
1450   __ Push(reg1, reg2);
1451 }
1452 
PushOperands(Register reg1,Register reg2,Register reg3)1453 void FullCodeGenerator::PushOperands(Register reg1, Register reg2,
1454                                      Register reg3) {
1455   OperandStackDepthIncrement(3);
1456   __ Push(reg1, reg2, reg3);
1457 }
1458 
PushOperands(Register reg1,Register reg2,Register reg3,Register reg4)1459 void FullCodeGenerator::PushOperands(Register reg1, Register reg2,
1460                                      Register reg3, Register reg4) {
1461   OperandStackDepthIncrement(4);
1462   __ Push(reg1, reg2, reg3, reg4);
1463 }
1464 
PopOperands(Register reg1,Register reg2)1465 void FullCodeGenerator::PopOperands(Register reg1, Register reg2) {
1466   OperandStackDepthDecrement(2);
1467   __ Pop(reg1, reg2);
1468 }
1469 
EmitOperandStackDepthCheck()1470 void FullCodeGenerator::EmitOperandStackDepthCheck() {
1471   if (FLAG_debug_code) {
1472     int expected_diff = StandardFrameConstants::kFixedFrameSizeFromFp +
1473                         operand_stack_depth_ * kPointerSize;
1474     __ SubP(r2, fp, sp);
1475     __ CmpP(r2, Operand(expected_diff));
1476     __ Assert(eq, kUnexpectedStackDepth);
1477   }
1478 }
1479 
EmitCreateIteratorResult(bool done)1480 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
1481   Label allocate, done_allocate;
1482 
1483   __ Allocate(JSIteratorResult::kSize, r2, r4, r5, &allocate,
1484               NO_ALLOCATION_FLAGS);
1485   __ b(&done_allocate);
1486 
1487   __ bind(&allocate);
1488   __ Push(Smi::FromInt(JSIteratorResult::kSize));
1489   __ CallRuntime(Runtime::kAllocateInNewSpace);
1490 
1491   __ bind(&done_allocate);
1492   __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, r3);
1493   PopOperand(r4);
1494   __ LoadRoot(r5,
1495               done ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex);
1496   __ LoadRoot(r6, Heap::kEmptyFixedArrayRootIndex);
1497   __ StoreP(r3, FieldMemOperand(r2, HeapObject::kMapOffset), r0);
1498   __ StoreP(r6, FieldMemOperand(r2, JSObject::kPropertiesOffset), r0);
1499   __ StoreP(r6, FieldMemOperand(r2, JSObject::kElementsOffset), r0);
1500   __ StoreP(r4, FieldMemOperand(r2, JSIteratorResult::kValueOffset), r0);
1501   __ StoreP(r5, FieldMemOperand(r2, JSIteratorResult::kDoneOffset), r0);
1502 }
1503 
EmitInlineSmiBinaryOp(BinaryOperation * expr,Token::Value op,Expression * left_expr,Expression * right_expr)1504 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
1505                                               Token::Value op,
1506                                               Expression* left_expr,
1507                                               Expression* right_expr) {
1508   Label done, smi_case, stub_call;
1509 
1510   Register scratch1 = r4;
1511   Register scratch2 = r5;
1512 
1513   // Get the arguments.
1514   Register left = r3;
1515   Register right = r2;
1516   PopOperand(left);
1517 
1518   // Perform combined smi check on both operands.
1519   __ LoadRR(scratch1, right);
1520   __ OrP(scratch1, left);
1521   STATIC_ASSERT(kSmiTag == 0);
1522   JumpPatchSite patch_site(masm_);
1523   patch_site.EmitJumpIfSmi(scratch1, &smi_case);
1524 
1525   __ bind(&stub_call);
1526   Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
1527   CallIC(code, expr->BinaryOperationFeedbackId());
1528   patch_site.EmitPatchInfo();
1529   __ b(&done);
1530 
1531   __ bind(&smi_case);
1532   // Smi case. This code works the same way as the smi-smi case in the type
1533   // recording binary operation stub.
1534   switch (op) {
1535     case Token::SAR:
1536       __ GetLeastBitsFromSmi(scratch1, right, 5);
1537       __ ShiftRightArithP(right, left, scratch1);
1538       __ ClearRightImm(right, right, Operand(kSmiTagSize + kSmiShiftSize));
1539       break;
1540     case Token::SHL: {
1541       __ GetLeastBitsFromSmi(scratch2, right, 5);
1542 #if V8_TARGET_ARCH_S390X
1543       __ ShiftLeftP(right, left, scratch2);
1544 #else
1545       __ SmiUntag(scratch1, left);
1546       __ ShiftLeftP(scratch1, scratch1, scratch2);
1547       // Check that the *signed* result fits in a smi
1548       __ JumpIfNotSmiCandidate(scratch1, scratch2, &stub_call);
1549       __ SmiTag(right, scratch1);
1550 #endif
1551       break;
1552     }
1553     case Token::SHR: {
1554       __ SmiUntag(scratch1, left);
1555       __ GetLeastBitsFromSmi(scratch2, right, 5);
1556       __ srl(scratch1, scratch2);
1557       // Unsigned shift is not allowed to produce a negative number.
1558       __ JumpIfNotUnsignedSmiCandidate(scratch1, r0, &stub_call);
1559       __ SmiTag(right, scratch1);
1560       break;
1561     }
1562     case Token::ADD: {
1563       __ AddP(scratch1, left, right);
1564       __ b(overflow, &stub_call);
1565       __ LoadRR(right, scratch1);
1566       break;
1567     }
1568     case Token::SUB: {
1569       __ SubP(scratch1, left, right);
1570       __ b(overflow, &stub_call);
1571       __ LoadRR(right, scratch1);
1572       break;
1573     }
1574     case Token::MUL: {
1575       Label mul_zero;
1576       if (CpuFeatures::IsSupported(MISC_INSTR_EXT2)) {
1577         __ SmiUntag(ip, right);
1578         __ MulPWithCondition(scratch2, ip, left);
1579         __ b(overflow, &stub_call);
1580         __ beq(&mul_zero, Label::kNear);
1581         __ LoadRR(right, scratch2);
1582       } else {
1583 #if V8_TARGET_ARCH_S390X
1584         // Remove tag from both operands.
1585         __ SmiUntag(ip, right);
1586         __ SmiUntag(scratch2, left);
1587         __ mr_z(scratch1, ip);
1588         // Check for overflowing the smi range - no overflow if higher 33 bits
1589         // of the result are identical.
1590         __ lr(ip, scratch2);  // 32 bit load
1591         __ sra(ip, Operand(31));
1592         __ cr_z(ip, scratch1);  // 32 bit compare
1593         __ bne(&stub_call);
1594 #else
1595         __ SmiUntag(ip, right);
1596         __ LoadRR(scratch2, left);  // load into low order of reg pair
1597         __ mr_z(scratch1, ip);      // R4:R5 = R5 * ip
1598         // Check for overflowing the smi range - no overflow if higher 33 bits
1599         // of the result are identical.
1600         __ TestIfInt32(scratch1, scratch2, ip);
1601         __ bne(&stub_call);
1602 #endif
1603         // Go slow on zero result to handle -0.
1604         __ chi(scratch2, Operand::Zero());
1605         __ beq(&mul_zero, Label::kNear);
1606 #if V8_TARGET_ARCH_S390X
1607         __ SmiTag(right, scratch2);
1608 #else
1609         __ LoadRR(right, scratch2);
1610 #endif
1611       }
1612       __ b(&done);
1613       // We need -0 if we were multiplying a negative number with 0 to get 0.
1614       // We know one of them was zero.
1615       __ bind(&mul_zero);
1616       __ AddP(scratch2, right, left);
1617       __ CmpP(scratch2, Operand::Zero());
1618       __ blt(&stub_call);
1619       __ LoadSmiLiteral(right, Smi::kZero);
1620       break;
1621     }
1622     case Token::BIT_OR:
1623       __ OrP(right, left);
1624       break;
1625     case Token::BIT_AND:
1626       __ AndP(right, left);
1627       break;
1628     case Token::BIT_XOR:
1629       __ XorP(right, left);
1630       break;
1631     default:
1632       UNREACHABLE();
1633   }
1634 
1635   __ bind(&done);
1636   context()->Plug(r2);
1637 }
1638 
EmitBinaryOp(BinaryOperation * expr,Token::Value op)1639 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
1640   PopOperand(r3);
1641   Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
1642   JumpPatchSite patch_site(masm_);  // unbound, signals no inlined smi code.
1643   CallIC(code, expr->BinaryOperationFeedbackId());
1644   patch_site.EmitPatchInfo();
1645   context()->Plug(r2);
1646 }
1647 
EmitAssignment(Expression * expr,FeedbackSlot slot)1648 void FullCodeGenerator::EmitAssignment(Expression* expr, FeedbackSlot slot) {
1649   DCHECK(expr->IsValidReferenceExpressionOrThis());
1650 
1651   Property* prop = expr->AsProperty();
1652   LhsKind assign_type = Property::GetAssignType(prop);
1653 
1654   switch (assign_type) {
1655     case VARIABLE: {
1656       VariableProxy* proxy = expr->AsVariableProxy();
1657       EffectContext context(this);
1658       EmitVariableAssignment(proxy->var(), Token::ASSIGN, slot,
1659                              proxy->hole_check_mode());
1660       break;
1661     }
1662     case NAMED_PROPERTY: {
1663       PushOperand(r2);  // Preserve value.
1664       VisitForAccumulatorValue(prop->obj());
1665       __ Move(StoreDescriptor::ReceiverRegister(), r2);
1666       PopOperand(StoreDescriptor::ValueRegister());  // Restore value.
1667       CallStoreIC(slot, prop->key()->AsLiteral()->value());
1668       break;
1669     }
1670     case KEYED_PROPERTY: {
1671       PushOperand(r2);  // Preserve value.
1672       VisitForStackValue(prop->obj());
1673       VisitForAccumulatorValue(prop->key());
1674       __ Move(StoreDescriptor::NameRegister(), r2);
1675       PopOperands(StoreDescriptor::ValueRegister(),
1676                   StoreDescriptor::ReceiverRegister());
1677       CallKeyedStoreIC(slot);
1678       break;
1679     }
1680     case NAMED_SUPER_PROPERTY:
1681     case KEYED_SUPER_PROPERTY:
1682       UNREACHABLE();
1683       break;
1684   }
1685   context()->Plug(r2);
1686 }
1687 
EmitStoreToStackLocalOrContextSlot(Variable * var,MemOperand location)1688 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
1689     Variable* var, MemOperand location) {
1690   __ StoreP(result_register(), location);
1691   if (var->IsContextSlot()) {
1692     // RecordWrite may destroy all its register arguments.
1693     __ LoadRR(r5, result_register());
1694     int offset = Context::SlotOffset(var->index());
1695     __ RecordWriteContextSlot(r3, offset, r5, r4, kLRHasBeenSaved,
1696                               kDontSaveFPRegs);
1697   }
1698 }
1699 
EmitVariableAssignment(Variable * var,Token::Value op,FeedbackSlot slot,HoleCheckMode hole_check_mode)1700 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
1701                                                FeedbackSlot slot,
1702                                                HoleCheckMode hole_check_mode) {
1703   if (var->IsUnallocated()) {
1704     // Global var, const, or let.
1705     __ LoadGlobalObject(StoreDescriptor::ReceiverRegister());
1706     CallStoreIC(slot, var->name());
1707 
1708   } else if (IsLexicalVariableMode(var->mode()) && op != Token::INIT) {
1709     // Non-initializing assignment to let variable needs a write barrier.
1710     DCHECK(!var->IsLookupSlot());
1711     DCHECK(var->IsStackAllocated() || var->IsContextSlot());
1712     MemOperand location = VarOperand(var, r3);
1713     // Perform an initialization check for lexically declared variables.
1714     if (hole_check_mode == HoleCheckMode::kRequired) {
1715       Label assign;
1716       __ LoadP(r5, location);
1717       __ CompareRoot(r5, Heap::kTheHoleValueRootIndex);
1718       __ bne(&assign);
1719       __ mov(r5, Operand(var->name()));
1720       __ push(r5);
1721       __ CallRuntime(Runtime::kThrowReferenceError);
1722       __ bind(&assign);
1723     }
1724     if (var->mode() != CONST) {
1725       EmitStoreToStackLocalOrContextSlot(var, location);
1726     } else if (var->throw_on_const_assignment(language_mode())) {
1727       __ CallRuntime(Runtime::kThrowConstAssignError);
1728     }
1729   } else if (var->is_this() && var->mode() == CONST && op == Token::INIT) {
1730     // Initializing assignment to const {this} needs a write barrier.
1731     DCHECK(var->IsStackAllocated() || var->IsContextSlot());
1732     Label uninitialized_this;
1733     MemOperand location = VarOperand(var, r3);
1734     __ LoadP(r5, location);
1735     __ CompareRoot(r5, Heap::kTheHoleValueRootIndex);
1736     __ beq(&uninitialized_this);
1737     __ mov(r3, Operand(var->name()));
1738     __ push(r3);
1739     __ CallRuntime(Runtime::kThrowReferenceError);
1740     __ bind(&uninitialized_this);
1741     EmitStoreToStackLocalOrContextSlot(var, location);
1742   } else {
1743     DCHECK(var->mode() != CONST || op == Token::INIT);
1744     DCHECK((var->IsStackAllocated() || var->IsContextSlot()));
1745     DCHECK(!var->IsLookupSlot());
1746     // Assignment to var or initializing assignment to let/const in harmony
1747     // mode.
1748     MemOperand location = VarOperand(var, r3);
1749     if (FLAG_debug_code && var->mode() == LET && op == Token::INIT) {
1750       // Check for an uninitialized let binding.
1751       __ LoadP(r4, location);
1752       __ CompareRoot(r4, Heap::kTheHoleValueRootIndex);
1753       __ Check(eq, kLetBindingReInitialization);
1754     }
1755     EmitStoreToStackLocalOrContextSlot(var, location);
1756   }
1757 }
1758 
EmitNamedPropertyAssignment(Assignment * expr)1759 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
1760   // Assignment to a property, using a named store IC.
1761   Property* prop = expr->target()->AsProperty();
1762   DCHECK(prop != NULL);
1763   DCHECK(prop->key()->IsLiteral());
1764 
1765   PopOperand(StoreDescriptor::ReceiverRegister());
1766   CallStoreIC(expr->AssignmentSlot(), prop->key()->AsLiteral()->value());
1767 
1768   PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
1769   context()->Plug(r2);
1770 }
1771 
EmitKeyedPropertyAssignment(Assignment * expr)1772 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
1773   // Assignment to a property, using a keyed store IC.
1774   PopOperands(StoreDescriptor::ReceiverRegister(),
1775               StoreDescriptor::NameRegister());
1776   DCHECK(StoreDescriptor::ValueRegister().is(r2));
1777 
1778   CallKeyedStoreIC(expr->AssignmentSlot());
1779 
1780   PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
1781   context()->Plug(r2);
1782 }
1783 
1784 // Code common for calls using the IC.
EmitCallWithLoadIC(Call * expr)1785 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
1786   Expression* callee = expr->expression();
1787 
1788   // Get the target function.
1789   ConvertReceiverMode convert_mode;
1790   if (callee->IsVariableProxy()) {
1791     {
1792       StackValueContext context(this);
1793       EmitVariableLoad(callee->AsVariableProxy());
1794       PrepareForBailout(callee, BailoutState::NO_REGISTERS);
1795     }
1796     // Push undefined as receiver. This is patched in the method prologue if it
1797     // is a sloppy mode method.
1798     __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
1799     PushOperand(r1);
1800     convert_mode = ConvertReceiverMode::kNullOrUndefined;
1801   } else {
1802     // Load the function from the receiver.
1803     DCHECK(callee->IsProperty());
1804     DCHECK(!callee->AsProperty()->IsSuperAccess());
1805     __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1806     EmitNamedPropertyLoad(callee->AsProperty());
1807     PrepareForBailoutForId(callee->AsProperty()->LoadId(),
1808                            BailoutState::TOS_REGISTER);
1809     // Push the target function under the receiver.
1810     __ LoadP(r1, MemOperand(sp, 0));
1811     PushOperand(r1);
1812     __ StoreP(r2, MemOperand(sp, kPointerSize));
1813     convert_mode = ConvertReceiverMode::kNotNullOrUndefined;
1814   }
1815 
1816   EmitCall(expr, convert_mode);
1817 }
1818 
1819 // Code common for calls using the IC.
EmitKeyedCallWithLoadIC(Call * expr,Expression * key)1820 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr, Expression* key) {
1821   // Load the key.
1822   VisitForAccumulatorValue(key);
1823 
1824   Expression* callee = expr->expression();
1825 
1826   // Load the function from the receiver.
1827   DCHECK(callee->IsProperty());
1828   __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1829   __ Move(LoadDescriptor::NameRegister(), r2);
1830   EmitKeyedPropertyLoad(callee->AsProperty());
1831   PrepareForBailoutForId(callee->AsProperty()->LoadId(),
1832                          BailoutState::TOS_REGISTER);
1833 
1834   // Push the target function under the receiver.
1835   __ LoadP(ip, MemOperand(sp, 0));
1836   PushOperand(ip);
1837   __ StoreP(r2, MemOperand(sp, kPointerSize));
1838 
1839   EmitCall(expr, ConvertReceiverMode::kNotNullOrUndefined);
1840 }
1841 
EmitCall(Call * expr,ConvertReceiverMode mode)1842 void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) {
1843   // Load the arguments.
1844   ZoneList<Expression*>* args = expr->arguments();
1845   int arg_count = args->length();
1846   for (int i = 0; i < arg_count; i++) {
1847     VisitForStackValue(args->at(i));
1848   }
1849 
1850   PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
1851   SetCallPosition(expr, expr->tail_call_mode());
1852   if (expr->tail_call_mode() == TailCallMode::kAllow) {
1853     if (FLAG_trace) {
1854       __ CallRuntime(Runtime::kTraceTailCall);
1855     }
1856     // Update profiling counters before the tail call since we will
1857     // not return to this function.
1858     EmitProfilingCounterHandlingForReturnSequence(true);
1859   }
1860   Handle<Code> code =
1861       CodeFactory::CallICTrampoline(isolate(), mode, expr->tail_call_mode())
1862           .code();
1863   __ Load(r5, Operand(IntFromSlot(expr->CallFeedbackICSlot())));
1864   __ LoadP(r3, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
1865   __ mov(r2, Operand(arg_count));
1866   CallIC(code);
1867   OperandStackDepthDecrement(arg_count + 1);
1868 
1869   RecordJSReturnSite(expr);
1870   RestoreContext();
1871   context()->DropAndPlug(1, r2);
1872 }
1873 
VisitCallNew(CallNew * expr)1874 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
1875   Comment cmnt(masm_, "[ CallNew");
1876   // According to ECMA-262, section 11.2.2, page 44, the function
1877   // expression in new calls must be evaluated before the
1878   // arguments.
1879 
1880   // Push constructor on the stack.  If it's not a function it's used as
1881   // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
1882   // ignored.
1883   DCHECK(!expr->expression()->IsSuperPropertyReference());
1884   VisitForStackValue(expr->expression());
1885 
1886   // Push the arguments ("left-to-right") on the stack.
1887   ZoneList<Expression*>* args = expr->arguments();
1888   int arg_count = args->length();
1889   for (int i = 0; i < arg_count; i++) {
1890     VisitForStackValue(args->at(i));
1891   }
1892 
1893   // Call the construct call builtin that handles allocation and
1894   // constructor invocation.
1895   SetConstructCallPosition(expr);
1896 
1897   // Load function and argument count into r3 and r2.
1898   __ mov(r2, Operand(arg_count));
1899   __ LoadP(r3, MemOperand(sp, arg_count * kPointerSize), r0);
1900 
1901   // Record call targets in unoptimized code.
1902   __ EmitLoadFeedbackVector(r4);
1903   __ LoadSmiLiteral(r5, SmiFromSlot(expr->CallNewFeedbackSlot()));
1904 
1905   CallConstructStub stub(isolate());
1906   CallIC(stub.GetCode());
1907   OperandStackDepthDecrement(arg_count + 1);
1908   PrepareForBailoutForId(expr->ReturnId(), BailoutState::TOS_REGISTER);
1909   RestoreContext();
1910   context()->Plug(r2);
1911 }
1912 
EmitIsSmi(CallRuntime * expr)1913 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
1914   ZoneList<Expression*>* args = expr->arguments();
1915   DCHECK(args->length() == 1);
1916 
1917   VisitForAccumulatorValue(args->at(0));
1918 
1919   Label materialize_true, materialize_false, skip_lookup;
1920   Label* if_true = NULL;
1921   Label* if_false = NULL;
1922   Label* fall_through = NULL;
1923   context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
1924                          &if_false, &fall_through);
1925 
1926   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
1927   __ TestIfSmi(r2);
1928   Split(eq, if_true, if_false, fall_through);
1929 
1930   context()->Plug(if_true, if_false);
1931 }
1932 
EmitIsJSReceiver(CallRuntime * expr)1933 void FullCodeGenerator::EmitIsJSReceiver(CallRuntime* expr) {
1934   ZoneList<Expression*>* args = expr->arguments();
1935   DCHECK(args->length() == 1);
1936 
1937   VisitForAccumulatorValue(args->at(0));
1938 
1939   Label materialize_true, materialize_false;
1940   Label* if_true = NULL;
1941   Label* if_false = NULL;
1942   Label* fall_through = NULL;
1943   context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
1944                          &if_false, &fall_through);
1945 
1946   __ JumpIfSmi(r2, if_false);
1947   __ CompareObjectType(r2, r3, r3, FIRST_JS_RECEIVER_TYPE);
1948   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
1949   Split(ge, if_true, if_false, fall_through);
1950 
1951   context()->Plug(if_true, if_false);
1952 }
1953 
EmitIsArray(CallRuntime * expr)1954 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
1955   ZoneList<Expression*>* args = expr->arguments();
1956   DCHECK(args->length() == 1);
1957 
1958   VisitForAccumulatorValue(args->at(0));
1959 
1960   Label materialize_true, materialize_false;
1961   Label* if_true = NULL;
1962   Label* if_false = NULL;
1963   Label* fall_through = NULL;
1964   context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
1965                          &if_false, &fall_through);
1966 
1967   __ JumpIfSmi(r2, if_false);
1968   __ CompareObjectType(r2, r3, r3, JS_ARRAY_TYPE);
1969   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
1970   Split(eq, if_true, if_false, fall_through);
1971 
1972   context()->Plug(if_true, if_false);
1973 }
1974 
EmitIsTypedArray(CallRuntime * expr)1975 void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) {
1976   ZoneList<Expression*>* args = expr->arguments();
1977   DCHECK(args->length() == 1);
1978 
1979   VisitForAccumulatorValue(args->at(0));
1980 
1981   Label materialize_true, materialize_false;
1982   Label* if_true = NULL;
1983   Label* if_false = NULL;
1984   Label* fall_through = NULL;
1985   context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
1986                          &if_false, &fall_through);
1987 
1988   __ JumpIfSmi(r2, if_false);
1989   __ CompareObjectType(r2, r3, r3, JS_TYPED_ARRAY_TYPE);
1990   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
1991   Split(eq, if_true, if_false, fall_through);
1992 
1993   context()->Plug(if_true, if_false);
1994 }
1995 
EmitIsJSProxy(CallRuntime * expr)1996 void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
1997   ZoneList<Expression*>* args = expr->arguments();
1998   DCHECK(args->length() == 1);
1999 
2000   VisitForAccumulatorValue(args->at(0));
2001 
2002   Label materialize_true, materialize_false;
2003   Label* if_true = NULL;
2004   Label* if_false = NULL;
2005   Label* fall_through = NULL;
2006   context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2007                          &if_false, &fall_through);
2008 
2009   __ JumpIfSmi(r2, if_false);
2010   __ CompareObjectType(r2, r3, r3, JS_PROXY_TYPE);
2011   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2012   Split(eq, if_true, if_false, fall_through);
2013 
2014   context()->Plug(if_true, if_false);
2015 }
2016 
EmitClassOf(CallRuntime * expr)2017 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
2018   ZoneList<Expression*>* args = expr->arguments();
2019   DCHECK(args->length() == 1);
2020   Label done, null, function, non_function_constructor;
2021 
2022   VisitForAccumulatorValue(args->at(0));
2023 
2024   // If the object is not a JSReceiver, we return null.
2025   __ JumpIfSmi(r2, &null);
2026   STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2027   __ CompareObjectType(r2, r2, r3, FIRST_JS_RECEIVER_TYPE);
2028   // Map is now in r2.
2029   __ blt(&null);
2030 
2031   // Return 'Function' for JSFunction and JSBoundFunction objects.
2032   __ CmpLogicalP(r3, Operand(FIRST_FUNCTION_TYPE));
2033   STATIC_ASSERT(LAST_FUNCTION_TYPE == LAST_TYPE);
2034   __ bge(&function);
2035 
2036   // Check if the constructor in the map is a JS function.
2037   Register instance_type = r4;
2038   __ GetMapConstructor(r2, r2, r3, instance_type);
2039   __ CmpP(instance_type, Operand(JS_FUNCTION_TYPE));
2040   __ bne(&non_function_constructor, Label::kNear);
2041 
2042   // r2 now contains the constructor function. Grab the
2043   // instance class name from there.
2044   __ LoadP(r2, FieldMemOperand(r2, JSFunction::kSharedFunctionInfoOffset));
2045   __ LoadP(r2,
2046            FieldMemOperand(r2, SharedFunctionInfo::kInstanceClassNameOffset));
2047   __ b(&done, Label::kNear);
2048 
2049   // Functions have class 'Function'.
2050   __ bind(&function);
2051   __ LoadRoot(r2, Heap::kFunction_stringRootIndex);
2052   __ b(&done, Label::kNear);
2053 
2054   // Objects with a non-function constructor have class 'Object'.
2055   __ bind(&non_function_constructor);
2056   __ LoadRoot(r2, Heap::kObject_stringRootIndex);
2057   __ b(&done, Label::kNear);
2058 
2059   // Non-JS objects have class null.
2060   __ bind(&null);
2061   __ LoadRoot(r2, Heap::kNullValueRootIndex);
2062 
2063   // All done.
2064   __ bind(&done);
2065 
2066   context()->Plug(r2);
2067 }
2068 
EmitStringCharCodeAt(CallRuntime * expr)2069 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
2070   ZoneList<Expression*>* args = expr->arguments();
2071   DCHECK(args->length() == 2);
2072   VisitForStackValue(args->at(0));
2073   VisitForAccumulatorValue(args->at(1));
2074 
2075   Register object = r3;
2076   Register index = r2;
2077   Register result = r5;
2078 
2079   PopOperand(object);
2080 
2081   Label need_conversion;
2082   Label index_out_of_range;
2083   Label done;
2084   StringCharCodeAtGenerator generator(object, index, result, &need_conversion,
2085                                       &need_conversion, &index_out_of_range);
2086   generator.GenerateFast(masm_);
2087   __ b(&done);
2088 
2089   __ bind(&index_out_of_range);
2090   // When the index is out of range, the spec requires us to return
2091   // NaN.
2092   __ LoadRoot(result, Heap::kNanValueRootIndex);
2093   __ b(&done);
2094 
2095   __ bind(&need_conversion);
2096   // Load the undefined value into the result register, which will
2097   // trigger conversion.
2098   __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
2099   __ b(&done);
2100 
2101   NopRuntimeCallHelper call_helper;
2102   generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
2103 
2104   __ bind(&done);
2105   context()->Plug(result);
2106 }
2107 
EmitCall(CallRuntime * expr)2108 void FullCodeGenerator::EmitCall(CallRuntime* expr) {
2109   ZoneList<Expression*>* args = expr->arguments();
2110   DCHECK_LE(2, args->length());
2111   // Push target, receiver and arguments onto the stack.
2112   for (Expression* const arg : *args) {
2113     VisitForStackValue(arg);
2114   }
2115   PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
2116   // Move target to r3.
2117   int const argc = args->length() - 2;
2118   __ LoadP(r3, MemOperand(sp, (argc + 1) * kPointerSize));
2119   // Call the target.
2120   __ mov(r2, Operand(argc));
2121   __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
2122   OperandStackDepthDecrement(argc + 1);
2123   RestoreContext();
2124   // Discard the function left on TOS.
2125   context()->DropAndPlug(1, r2);
2126 }
2127 
EmitGetSuperConstructor(CallRuntime * expr)2128 void FullCodeGenerator::EmitGetSuperConstructor(CallRuntime* expr) {
2129   ZoneList<Expression*>* args = expr->arguments();
2130   DCHECK_EQ(1, args->length());
2131   VisitForAccumulatorValue(args->at(0));
2132   __ AssertFunction(r2);
2133   __ LoadP(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
2134   __ LoadP(r2, FieldMemOperand(r2, Map::kPrototypeOffset));
2135   context()->Plug(r2);
2136 }
2137 
EmitDebugIsActive(CallRuntime * expr)2138 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
2139   DCHECK(expr->arguments()->length() == 0);
2140   ExternalReference debug_is_active =
2141       ExternalReference::debug_is_active_address(isolate());
2142   __ mov(ip, Operand(debug_is_active));
2143   __ LoadlB(r2, MemOperand(ip));
2144   __ SmiTag(r2);
2145   context()->Plug(r2);
2146 }
2147 
EmitCreateIterResultObject(CallRuntime * expr)2148 void FullCodeGenerator::EmitCreateIterResultObject(CallRuntime* expr) {
2149   ZoneList<Expression*>* args = expr->arguments();
2150   DCHECK_EQ(2, args->length());
2151   VisitForStackValue(args->at(0));
2152   VisitForStackValue(args->at(1));
2153 
2154   Label runtime, done;
2155 
2156   __ Allocate(JSIteratorResult::kSize, r2, r4, r5, &runtime,
2157               NO_ALLOCATION_FLAGS);
2158   __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, r3);
2159   __ Pop(r4, r5);
2160   __ LoadRoot(r6, Heap::kEmptyFixedArrayRootIndex);
2161   __ StoreP(r3, FieldMemOperand(r2, HeapObject::kMapOffset), r0);
2162   __ StoreP(r6, FieldMemOperand(r2, JSObject::kPropertiesOffset), r0);
2163   __ StoreP(r6, FieldMemOperand(r2, JSObject::kElementsOffset), r0);
2164   __ StoreP(r4, FieldMemOperand(r2, JSIteratorResult::kValueOffset), r0);
2165   __ StoreP(r5, FieldMemOperand(r2, JSIteratorResult::kDoneOffset), r0);
2166   STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
2167   __ b(&done);
2168 
2169   __ bind(&runtime);
2170   CallRuntimeWithOperands(Runtime::kCreateIterResultObject);
2171 
2172   __ bind(&done);
2173   context()->Plug(r2);
2174 }
2175 
EmitLoadJSRuntimeFunction(CallRuntime * expr)2176 void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
2177   // Push function.
2178   __ LoadNativeContextSlot(expr->context_index(), r2);
2179   PushOperand(r2);
2180 
2181   // Push undefined as the receiver.
2182   __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
2183   PushOperand(r2);
2184 }
2185 
EmitCallJSRuntimeFunction(CallRuntime * expr)2186 void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
2187   ZoneList<Expression*>* args = expr->arguments();
2188   int arg_count = args->length();
2189 
2190   SetCallPosition(expr);
2191   __ LoadP(r3, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
2192   __ mov(r2, Operand(arg_count));
2193   __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kNullOrUndefined),
2194           RelocInfo::CODE_TARGET);
2195   OperandStackDepthDecrement(arg_count + 1);
2196   RestoreContext();
2197 }
2198 
VisitUnaryOperation(UnaryOperation * expr)2199 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
2200   switch (expr->op()) {
2201     case Token::DELETE: {
2202       Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
2203       Property* property = expr->expression()->AsProperty();
2204       VariableProxy* proxy = expr->expression()->AsVariableProxy();
2205 
2206       if (property != NULL) {
2207         VisitForStackValue(property->obj());
2208         VisitForStackValue(property->key());
2209         CallRuntimeWithOperands(is_strict(language_mode())
2210                                     ? Runtime::kDeleteProperty_Strict
2211                                     : Runtime::kDeleteProperty_Sloppy);
2212         context()->Plug(r2);
2213       } else if (proxy != NULL) {
2214         Variable* var = proxy->var();
2215         // Delete of an unqualified identifier is disallowed in strict mode but
2216         // "delete this" is allowed.
2217         bool is_this = var->is_this();
2218         DCHECK(is_sloppy(language_mode()) || is_this);
2219         if (var->IsUnallocated()) {
2220           __ LoadGlobalObject(r4);
2221           __ mov(r3, Operand(var->name()));
2222           __ Push(r4, r3);
2223           __ CallRuntime(Runtime::kDeleteProperty_Sloppy);
2224           context()->Plug(r2);
2225         } else {
2226           DCHECK(!var->IsLookupSlot());
2227           DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2228           // Result of deleting non-global, non-dynamic variables is false.
2229           // The subexpression does not have side effects.
2230           context()->Plug(is_this);
2231         }
2232       } else {
2233         // Result of deleting non-property, non-variable reference is true.
2234         // The subexpression may have side effects.
2235         VisitForEffect(expr->expression());
2236         context()->Plug(true);
2237       }
2238       break;
2239     }
2240 
2241     case Token::VOID: {
2242       Comment cmnt(masm_, "[ UnaryOperation (VOID)");
2243       VisitForEffect(expr->expression());
2244       context()->Plug(Heap::kUndefinedValueRootIndex);
2245       break;
2246     }
2247 
2248     case Token::NOT: {
2249       Comment cmnt(masm_, "[ UnaryOperation (NOT)");
2250       if (context()->IsEffect()) {
2251         // Unary NOT has no side effects so it's only necessary to visit the
2252         // subexpression.  Match the optimizing compiler by not branching.
2253         VisitForEffect(expr->expression());
2254       } else if (context()->IsTest()) {
2255         const TestContext* test = TestContext::cast(context());
2256         // The labels are swapped for the recursive call.
2257         VisitForControl(expr->expression(), test->false_label(),
2258                         test->true_label(), test->fall_through());
2259         context()->Plug(test->true_label(), test->false_label());
2260       } else {
2261         // We handle value contexts explicitly rather than simply visiting
2262         // for control and plugging the control flow into the context,
2263         // because we need to prepare a pair of extra administrative AST ids
2264         // for the optimizing compiler.
2265         DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
2266         Label materialize_true, materialize_false, done;
2267         VisitForControl(expr->expression(), &materialize_false,
2268                         &materialize_true, &materialize_true);
2269         if (!context()->IsAccumulatorValue()) OperandStackDepthIncrement(1);
2270         __ bind(&materialize_true);
2271         PrepareForBailoutForId(expr->MaterializeTrueId(),
2272                                BailoutState::NO_REGISTERS);
2273         __ LoadRoot(r2, Heap::kTrueValueRootIndex);
2274         if (context()->IsStackValue()) __ push(r2);
2275         __ b(&done);
2276         __ bind(&materialize_false);
2277         PrepareForBailoutForId(expr->MaterializeFalseId(),
2278                                BailoutState::NO_REGISTERS);
2279         __ LoadRoot(r2, Heap::kFalseValueRootIndex);
2280         if (context()->IsStackValue()) __ push(r2);
2281         __ bind(&done);
2282       }
2283       break;
2284     }
2285 
2286     case Token::TYPEOF: {
2287       Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
2288       {
2289         AccumulatorValueContext context(this);
2290         VisitForTypeofValue(expr->expression());
2291       }
2292       __ LoadRR(r5, r2);
2293       __ Call(isolate()->builtins()->Typeof(), RelocInfo::CODE_TARGET);
2294       context()->Plug(r2);
2295       break;
2296     }
2297 
2298     default:
2299       UNREACHABLE();
2300   }
2301 }
2302 
VisitCountOperation(CountOperation * expr)2303 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
2304   DCHECK(expr->expression()->IsValidReferenceExpressionOrThis());
2305 
2306   Comment cmnt(masm_, "[ CountOperation");
2307 
2308   Property* prop = expr->expression()->AsProperty();
2309   LhsKind assign_type = Property::GetAssignType(prop);
2310 
2311   // Evaluate expression and get value.
2312   if (assign_type == VARIABLE) {
2313     DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
2314     AccumulatorValueContext context(this);
2315     EmitVariableLoad(expr->expression()->AsVariableProxy());
2316   } else {
2317     // Reserve space for result of postfix operation.
2318     if (expr->is_postfix() && !context()->IsEffect()) {
2319       __ LoadSmiLiteral(ip, Smi::kZero);
2320       PushOperand(ip);
2321     }
2322     switch (assign_type) {
2323       case NAMED_PROPERTY: {
2324         // Put the object both on the stack and in the register.
2325         VisitForStackValue(prop->obj());
2326         __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2327         EmitNamedPropertyLoad(prop);
2328         break;
2329       }
2330 
2331       case KEYED_PROPERTY: {
2332         VisitForStackValue(prop->obj());
2333         VisitForStackValue(prop->key());
2334         __ LoadP(LoadDescriptor::ReceiverRegister(),
2335                  MemOperand(sp, 1 * kPointerSize));
2336         __ LoadP(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
2337         EmitKeyedPropertyLoad(prop);
2338         break;
2339       }
2340 
2341       case NAMED_SUPER_PROPERTY:
2342       case KEYED_SUPER_PROPERTY:
2343       case VARIABLE:
2344         UNREACHABLE();
2345     }
2346   }
2347 
2348   // We need a second deoptimization point after loading the value
2349   // in case evaluating the property load my have a side effect.
2350   if (assign_type == VARIABLE) {
2351     PrepareForBailout(expr->expression(), BailoutState::TOS_REGISTER);
2352   } else {
2353     PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
2354   }
2355 
2356   // Inline smi case if we are in a loop.
2357   Label stub_call, done;
2358   JumpPatchSite patch_site(masm_);
2359 
2360   int count_value = expr->op() == Token::INC ? 1 : -1;
2361   if (ShouldInlineSmiCase(expr->op())) {
2362     Label slow;
2363     patch_site.EmitJumpIfNotSmi(r2, &slow);
2364 
2365     // Save result for postfix expressions.
2366     if (expr->is_postfix()) {
2367       if (!context()->IsEffect()) {
2368         // Save the result on the stack. If we have a named or keyed property
2369         // we store the result under the receiver that is currently on top
2370         // of the stack.
2371         switch (assign_type) {
2372           case VARIABLE:
2373             __ push(r2);
2374             break;
2375           case NAMED_PROPERTY:
2376             __ StoreP(r2, MemOperand(sp, kPointerSize));
2377             break;
2378           case KEYED_PROPERTY:
2379             __ StoreP(r2, MemOperand(sp, 2 * kPointerSize));
2380             break;
2381           case NAMED_SUPER_PROPERTY:
2382           case KEYED_SUPER_PROPERTY:
2383             UNREACHABLE();
2384             break;
2385         }
2386       }
2387     }
2388 
2389     Register scratch1 = r3;
2390     Register scratch2 = r4;
2391     __ LoadSmiLiteral(scratch1, Smi::FromInt(count_value));
2392     __ AddP(scratch2, r2, scratch1);
2393     __ LoadOnConditionP(nooverflow, r2, scratch2);
2394     __ b(nooverflow, &done);
2395     // Call stub. Undo operation first.
2396     __ b(&stub_call);
2397     __ bind(&slow);
2398   }
2399 
2400   // Convert old value into a number.
2401   __ Call(isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
2402   RestoreContext();
2403   PrepareForBailoutForId(expr->ToNumberId(), BailoutState::TOS_REGISTER);
2404 
2405   // Save result for postfix expressions.
2406   if (expr->is_postfix()) {
2407     if (!context()->IsEffect()) {
2408       // Save the result on the stack. If we have a named or keyed property
2409       // we store the result under the receiver that is currently on top
2410       // of the stack.
2411       switch (assign_type) {
2412         case VARIABLE:
2413           PushOperand(r2);
2414           break;
2415         case NAMED_PROPERTY:
2416           __ StoreP(r2, MemOperand(sp, kPointerSize));
2417           break;
2418         case KEYED_PROPERTY:
2419           __ StoreP(r2, MemOperand(sp, 2 * kPointerSize));
2420           break;
2421         case NAMED_SUPER_PROPERTY:
2422         case KEYED_SUPER_PROPERTY:
2423           UNREACHABLE();
2424           break;
2425       }
2426     }
2427   }
2428 
2429   __ bind(&stub_call);
2430   __ LoadRR(r3, r2);
2431   __ LoadSmiLiteral(r2, Smi::FromInt(count_value));
2432 
2433   SetExpressionPosition(expr);
2434 
2435   Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), Token::ADD).code();
2436   CallIC(code, expr->CountBinOpFeedbackId());
2437   patch_site.EmitPatchInfo();
2438   __ bind(&done);
2439 
2440   // Store the value returned in r2.
2441   switch (assign_type) {
2442     case VARIABLE: {
2443       VariableProxy* proxy = expr->expression()->AsVariableProxy();
2444       if (expr->is_postfix()) {
2445         {
2446           EffectContext context(this);
2447           EmitVariableAssignment(proxy->var(), Token::ASSIGN, expr->CountSlot(),
2448                                  proxy->hole_check_mode());
2449           PrepareForBailoutForId(expr->AssignmentId(),
2450                                  BailoutState::TOS_REGISTER);
2451           context.Plug(r2);
2452         }
2453         // For all contexts except EffectConstant We have the result on
2454         // top of the stack.
2455         if (!context()->IsEffect()) {
2456           context()->PlugTOS();
2457         }
2458       } else {
2459         EmitVariableAssignment(proxy->var(), Token::ASSIGN, expr->CountSlot(),
2460                                proxy->hole_check_mode());
2461         PrepareForBailoutForId(expr->AssignmentId(),
2462                                BailoutState::TOS_REGISTER);
2463         context()->Plug(r2);
2464       }
2465       break;
2466     }
2467     case NAMED_PROPERTY: {
2468       PopOperand(StoreDescriptor::ReceiverRegister());
2469       CallStoreIC(expr->CountSlot(), prop->key()->AsLiteral()->value());
2470       PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
2471       if (expr->is_postfix()) {
2472         if (!context()->IsEffect()) {
2473           context()->PlugTOS();
2474         }
2475       } else {
2476         context()->Plug(r2);
2477       }
2478       break;
2479     }
2480     case KEYED_PROPERTY: {
2481       PopOperands(StoreDescriptor::ReceiverRegister(),
2482                   StoreDescriptor::NameRegister());
2483       CallKeyedStoreIC(expr->CountSlot());
2484       PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
2485       if (expr->is_postfix()) {
2486         if (!context()->IsEffect()) {
2487           context()->PlugTOS();
2488         }
2489       } else {
2490         context()->Plug(r2);
2491       }
2492       break;
2493     }
2494     case NAMED_SUPER_PROPERTY:
2495     case KEYED_SUPER_PROPERTY:
2496       UNREACHABLE();
2497       break;
2498   }
2499 }
2500 
EmitLiteralCompareTypeof(Expression * expr,Expression * sub_expr,Handle<String> check)2501 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
2502                                                  Expression* sub_expr,
2503                                                  Handle<String> check) {
2504   Label materialize_true, materialize_false;
2505   Label* if_true = NULL;
2506   Label* if_false = NULL;
2507   Label* fall_through = NULL;
2508   context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2509                          &if_false, &fall_through);
2510 
2511   {
2512     AccumulatorValueContext context(this);
2513     VisitForTypeofValue(sub_expr);
2514   }
2515   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2516 
2517   Factory* factory = isolate()->factory();
2518   if (String::Equals(check, factory->number_string())) {
2519     __ JumpIfSmi(r2, if_true);
2520     __ LoadP(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
2521     __ CompareRoot(r2, Heap::kHeapNumberMapRootIndex);
2522     Split(eq, if_true, if_false, fall_through);
2523   } else if (String::Equals(check, factory->string_string())) {
2524     __ JumpIfSmi(r2, if_false);
2525     __ CompareObjectType(r2, r2, r3, FIRST_NONSTRING_TYPE);
2526     Split(lt, if_true, if_false, fall_through);
2527   } else if (String::Equals(check, factory->symbol_string())) {
2528     __ JumpIfSmi(r2, if_false);
2529     __ CompareObjectType(r2, r2, r3, SYMBOL_TYPE);
2530     Split(eq, if_true, if_false, fall_through);
2531   } else if (String::Equals(check, factory->boolean_string())) {
2532     __ CompareRoot(r2, Heap::kTrueValueRootIndex);
2533     __ beq(if_true);
2534     __ CompareRoot(r2, Heap::kFalseValueRootIndex);
2535     Split(eq, if_true, if_false, fall_through);
2536   } else if (String::Equals(check, factory->undefined_string())) {
2537     __ CompareRoot(r2, Heap::kNullValueRootIndex);
2538     __ beq(if_false);
2539     __ JumpIfSmi(r2, if_false);
2540     // Check for undetectable objects => true.
2541     __ LoadP(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
2542     __ tm(FieldMemOperand(r2, Map::kBitFieldOffset),
2543           Operand(1 << Map::kIsUndetectable));
2544     Split(ne, if_true, if_false, fall_through);
2545 
2546   } else if (String::Equals(check, factory->function_string())) {
2547     __ JumpIfSmi(r2, if_false);
2548     __ LoadP(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
2549     __ LoadlB(r3, FieldMemOperand(r2, Map::kBitFieldOffset));
2550     __ AndP(r3, r3,
2551             Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
2552     __ CmpP(r3, Operand(1 << Map::kIsCallable));
2553     Split(eq, if_true, if_false, fall_through);
2554   } else if (String::Equals(check, factory->object_string())) {
2555     __ JumpIfSmi(r2, if_false);
2556     __ CompareRoot(r2, Heap::kNullValueRootIndex);
2557     __ beq(if_true);
2558     STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2559     __ CompareObjectType(r2, r2, r3, FIRST_JS_RECEIVER_TYPE);
2560     __ blt(if_false);
2561     __ tm(FieldMemOperand(r2, Map::kBitFieldOffset),
2562           Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
2563     Split(eq, if_true, if_false, fall_through);
2564   } else {
2565     if (if_false != fall_through) __ b(if_false);
2566   }
2567   context()->Plug(if_true, if_false);
2568 }
2569 
VisitCompareOperation(CompareOperation * expr)2570 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
2571   Comment cmnt(masm_, "[ CompareOperation");
2572 
2573   // First we try a fast inlined version of the compare when one of
2574   // the operands is a literal.
2575   if (TryLiteralCompare(expr)) return;
2576 
2577   // Always perform the comparison for its control flow.  Pack the result
2578   // into the expression's context after the comparison is performed.
2579   Label materialize_true, materialize_false;
2580   Label* if_true = NULL;
2581   Label* if_false = NULL;
2582   Label* fall_through = NULL;
2583   context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2584                          &if_false, &fall_through);
2585 
2586   Token::Value op = expr->op();
2587   VisitForStackValue(expr->left());
2588   switch (op) {
2589     case Token::IN:
2590       VisitForStackValue(expr->right());
2591       SetExpressionPosition(expr);
2592       EmitHasProperty();
2593       PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
2594       __ CompareRoot(r2, Heap::kTrueValueRootIndex);
2595       Split(eq, if_true, if_false, fall_through);
2596       break;
2597 
2598     case Token::INSTANCEOF: {
2599       VisitForAccumulatorValue(expr->right());
2600       SetExpressionPosition(expr);
2601       PopOperand(r3);
2602       __ Call(isolate()->builtins()->InstanceOf(), RelocInfo::CODE_TARGET);
2603       RestoreContext();
2604       PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
2605       __ CompareRoot(r2, Heap::kTrueValueRootIndex);
2606       Split(eq, if_true, if_false, fall_through);
2607       break;
2608     }
2609 
2610     default: {
2611       VisitForAccumulatorValue(expr->right());
2612       SetExpressionPosition(expr);
2613       Condition cond = CompareIC::ComputeCondition(op);
2614       PopOperand(r3);
2615 
2616       bool inline_smi_code = ShouldInlineSmiCase(op);
2617       JumpPatchSite patch_site(masm_);
2618       if (inline_smi_code) {
2619         Label slow_case;
2620         __ LoadRR(r4, r3);
2621         __ OrP(r4, r2);
2622         patch_site.EmitJumpIfNotSmi(r4, &slow_case);
2623         __ CmpP(r3, r2);
2624         Split(cond, if_true, if_false, NULL);
2625         __ bind(&slow_case);
2626       }
2627 
2628       Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
2629       CallIC(ic, expr->CompareOperationFeedbackId());
2630       patch_site.EmitPatchInfo();
2631       PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2632       __ CmpP(r2, Operand::Zero());
2633       Split(cond, if_true, if_false, fall_through);
2634     }
2635   }
2636 
2637   // Convert the result of the comparison into one expected for this
2638   // expression's context.
2639   context()->Plug(if_true, if_false);
2640 }
2641 
EmitLiteralCompareNil(CompareOperation * expr,Expression * sub_expr,NilValue nil)2642 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
2643                                               Expression* sub_expr,
2644                                               NilValue nil) {
2645   Label materialize_true, materialize_false;
2646   Label* if_true = NULL;
2647   Label* if_false = NULL;
2648   Label* fall_through = NULL;
2649   context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2650                          &if_false, &fall_through);
2651 
2652   VisitForAccumulatorValue(sub_expr);
2653   PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2654   if (expr->op() == Token::EQ_STRICT) {
2655     Heap::RootListIndex nil_value = nil == kNullValue
2656                                         ? Heap::kNullValueRootIndex
2657                                         : Heap::kUndefinedValueRootIndex;
2658     __ CompareRoot(r2, nil_value);
2659     Split(eq, if_true, if_false, fall_through);
2660   } else {
2661     __ JumpIfSmi(r2, if_false);
2662     __ LoadP(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
2663     __ LoadlB(r3, FieldMemOperand(r2, Map::kBitFieldOffset));
2664     __ AndP(r0, r3, Operand(1 << Map::kIsUndetectable));
2665     Split(ne, if_true, if_false, fall_through);
2666   }
2667   context()->Plug(if_true, if_false);
2668 }
result_register()2669 Register FullCodeGenerator::result_register() { return r2; }
2670 
context_register()2671 Register FullCodeGenerator::context_register() { return cp; }
2672 
LoadFromFrameField(int frame_offset,Register value)2673 void FullCodeGenerator::LoadFromFrameField(int frame_offset, Register value) {
2674   DCHECK_EQ(static_cast<int>(POINTER_SIZE_ALIGN(frame_offset)), frame_offset);
2675   __ LoadP(value, MemOperand(fp, frame_offset));
2676 }
2677 
StoreToFrameField(int frame_offset,Register value)2678 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
2679   DCHECK_EQ(static_cast<int>(POINTER_SIZE_ALIGN(frame_offset)), frame_offset);
2680   __ StoreP(value, MemOperand(fp, frame_offset));
2681 }
2682 
LoadContextField(Register dst,int context_index)2683 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
2684   __ LoadP(dst, ContextMemOperand(cp, context_index), r0);
2685 }
2686 
PushFunctionArgumentForContextAllocation()2687 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
2688   DeclarationScope* closure_scope = scope()->GetClosureScope();
2689   if (closure_scope->is_script_scope() || closure_scope->is_module_scope()) {
2690     // Contexts nested in the native context have a canonical empty function
2691     // as their closure, not the anonymous closure containing the global
2692     // code.
2693     __ LoadNativeContextSlot(Context::CLOSURE_INDEX, ip);
2694   } else if (closure_scope->is_eval_scope()) {
2695     // Contexts created by a call to eval have the same closure as the
2696     // context calling eval, not the anonymous closure containing the eval
2697     // code.  Fetch it from the context.
2698     __ LoadP(ip, ContextMemOperand(cp, Context::CLOSURE_INDEX));
2699   } else {
2700     DCHECK(closure_scope->is_function_scope());
2701     __ LoadP(ip, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
2702   }
2703   PushOperand(ip);
2704 }
2705 
2706 #undef __
2707 
2708 #if V8_TARGET_ARCH_S390X
2709 static const FourByteInstr kInterruptBranchInstruction = 0xA7A40011;
2710 static const FourByteInstr kOSRBranchInstruction = 0xA7040011;
2711 static const int16_t kBackEdgeBranchOffsetInHalfWords = 0x11;
2712 #else
2713 static const FourByteInstr kInterruptBranchInstruction = 0xA7A4000D;
2714 static const FourByteInstr kOSRBranchInstruction = 0xA704000D;
2715 static const int16_t kBackEdgeBranchOffsetInHalfWords = 0xD;
2716 #endif
2717 
PatchAt(Code * unoptimized_code,Address pc,BackEdgeState target_state,Code * replacement_code)2718 void BackEdgeTable::PatchAt(Code* unoptimized_code, Address pc,
2719                             BackEdgeState target_state,
2720                             Code* replacement_code) {
2721   Address call_address = Assembler::target_address_from_return_address(pc);
2722   Address branch_address = call_address - 4;
2723   Isolate* isolate = unoptimized_code->GetIsolate();
2724   CodePatcher patcher(isolate, branch_address, 4);
2725 
2726   switch (target_state) {
2727     case INTERRUPT: {
2728       //  <decrement profiling counter>
2729       //         bge     <ok>            ;; patched to GE BRC
2730       //         brasrl    r14, <interrupt stub address>
2731       //  <reset profiling counter>
2732       //  ok-label
2733       patcher.masm()->brc(ge, Operand(kBackEdgeBranchOffsetInHalfWords));
2734       break;
2735     }
2736     case ON_STACK_REPLACEMENT:
2737       //  <decrement profiling counter>
2738       //         brc   0x0, <ok>            ;;  patched to NOP BRC
2739       //         brasrl    r14, <interrupt stub address>
2740       //  <reset profiling counter>
2741       //  ok-label ----- pc_after points here
2742       patcher.masm()->brc(CC_NOP, Operand(kBackEdgeBranchOffsetInHalfWords));
2743       break;
2744   }
2745 
2746   // Replace the stack check address in the mov sequence with the
2747   // entry address of the replacement code.
2748   Assembler::set_target_address_at(isolate, call_address, unoptimized_code,
2749                                    replacement_code->entry());
2750 
2751   unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
2752       unoptimized_code, call_address, replacement_code);
2753 }
2754 
GetBackEdgeState(Isolate * isolate,Code * unoptimized_code,Address pc)2755 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
2756     Isolate* isolate, Code* unoptimized_code, Address pc) {
2757   Address call_address = Assembler::target_address_from_return_address(pc);
2758   Address branch_address = call_address - 4;
2759 #ifdef DEBUG
2760   Address interrupt_address =
2761       Assembler::target_address_at(call_address, unoptimized_code);
2762 #endif
2763 
2764   DCHECK(BRC == Instruction::S390OpcodeValue(branch_address));
2765   // For interrupt, we expect a branch greater than or equal
2766   // i.e. BRC 0xa, +XXXX  (0xA7A4XXXX)
2767   FourByteInstr br_instr = Instruction::InstructionBits(
2768       reinterpret_cast<const byte*>(branch_address));
2769   if (kInterruptBranchInstruction == br_instr) {
2770     DCHECK(interrupt_address == isolate->builtins()->InterruptCheck()->entry());
2771     return INTERRUPT;
2772   }
2773 
2774   // Expect BRC to be patched to NOP branch.
2775   // i.e. BRC 0x0, +XXXX (0xA704XXXX)
2776   USE(kOSRBranchInstruction);
2777   DCHECK(kOSRBranchInstruction == br_instr);
2778 
2779   DCHECK(interrupt_address ==
2780          isolate->builtins()->OnStackReplacement()->entry());
2781   return ON_STACK_REPLACEMENT;
2782 }
2783 }  // namespace internal
2784 }  // namespace v8
2785 #endif  // V8_TARGET_ARCH_S390
2786