1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #if V8_TARGET_ARCH_PPC
6
7 #include "src/full-codegen/full-codegen.h"
8 #include "src/ast/compile-time-value.h"
9 #include "src/ast/scopes.h"
10 #include "src/code-factory.h"
11 #include "src/code-stubs.h"
12 #include "src/codegen.h"
13 #include "src/compilation-info.h"
14 #include "src/compiler.h"
15 #include "src/debug/debug.h"
16 #include "src/ic/ic.h"
17
18 #include "src/ppc/code-stubs-ppc.h"
19 #include "src/ppc/macro-assembler-ppc.h"
20
21 namespace v8 {
22 namespace internal {
23
24 #define __ ACCESS_MASM(masm())
25
26 // A patch site is a location in the code which it is possible to patch. This
27 // class has a number of methods to emit the code which is patchable and the
28 // method EmitPatchInfo to record a marker back to the patchable code. This
29 // marker is a cmpi rx, #yyy instruction, and x * 0x0000ffff + yyy (raw 16 bit
30 // immediate value is used) is the delta from the pc to the first instruction of
31 // the patchable code.
32 // See PatchInlinedSmiCode in ic-ppc.cc for the code that patches it
33 class JumpPatchSite BASE_EMBEDDED {
34 public:
JumpPatchSite(MacroAssembler * masm)35 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
36 #ifdef DEBUG
37 info_emitted_ = false;
38 #endif
39 }
40
~JumpPatchSite()41 ~JumpPatchSite() { DCHECK(patch_site_.is_bound() == info_emitted_); }
42
43 // When initially emitting this ensure that a jump is always generated to skip
44 // the inlined smi code.
EmitJumpIfNotSmi(Register reg,Label * target)45 void EmitJumpIfNotSmi(Register reg, Label* target) {
46 DCHECK(!patch_site_.is_bound() && !info_emitted_);
47 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
48 __ bind(&patch_site_);
49 __ cmp(reg, reg, cr0);
50 __ beq(target, cr0); // Always taken before patched.
51 }
52
53 // When initially emitting this ensure that a jump is never generated to skip
54 // the inlined smi code.
EmitJumpIfSmi(Register reg,Label * target)55 void EmitJumpIfSmi(Register reg, Label* target) {
56 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
57 DCHECK(!patch_site_.is_bound() && !info_emitted_);
58 __ bind(&patch_site_);
59 __ cmp(reg, reg, cr0);
60 __ bne(target, cr0); // Never taken before patched.
61 }
62
EmitPatchInfo()63 void EmitPatchInfo() {
64 if (patch_site_.is_bound()) {
65 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
66 Register reg;
67 // I believe this is using reg as the high bits of of the offset
68 reg.set_code(delta_to_patch_site / kOff16Mask);
69 __ cmpi(reg, Operand(delta_to_patch_site % kOff16Mask));
70 #ifdef DEBUG
71 info_emitted_ = true;
72 #endif
73 } else {
74 __ nop(); // Signals no inlined code.
75 }
76 }
77
78 private:
masm()79 MacroAssembler* masm() { return masm_; }
80 MacroAssembler* masm_;
81 Label patch_site_;
82 #ifdef DEBUG
83 bool info_emitted_;
84 #endif
85 };
86
87
88 // Generate code for a JS function. On entry to the function the receiver
89 // and arguments have been pushed on the stack left to right. The actual
90 // argument count matches the formal parameter count expected by the
91 // function.
92 //
93 // The live registers are:
94 // o r4: the JS function object being called (i.e., ourselves)
95 // o r6: the new target value
96 // o cp: our context
97 // o fp: our caller's frame pointer (aka r31)
98 // o sp: stack pointer
99 // o lr: return address
100 // o ip: our own function entry (required by the prologue)
101 //
102 // The function builds a JS frame. Please see JavaScriptFrameConstants in
103 // frames-ppc.h for its layout.
Generate()104 void FullCodeGenerator::Generate() {
105 CompilationInfo* info = info_;
106 profiling_counter_ = isolate()->factory()->NewCell(
107 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
108 SetFunctionPosition(literal());
109 Comment cmnt(masm_, "[ function compiled by full code generator");
110
111 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
112
113 if (FLAG_debug_code && info->ExpectsJSReceiverAsReceiver()) {
114 int receiver_offset = info->scope()->num_parameters() * kPointerSize;
115 __ LoadP(r5, MemOperand(sp, receiver_offset), r0);
116 __ AssertNotSmi(r5);
117 __ CompareObjectType(r5, r5, no_reg, FIRST_JS_RECEIVER_TYPE);
118 __ Assert(ge, kSloppyFunctionExpectsJSReceiverReceiver);
119 }
120
121 // Open a frame scope to indicate that there is a frame on the stack. The
122 // MANUAL indicates that the scope shouldn't actually generate code to set up
123 // the frame (that is done below).
124 FrameScope frame_scope(masm_, StackFrame::MANUAL);
125 int prologue_offset = masm_->pc_offset();
126
127 if (prologue_offset) {
128 // Prologue logic requires it's starting address in ip and the
129 // corresponding offset from the function entry.
130 prologue_offset += Instruction::kInstrSize;
131 __ addi(ip, ip, Operand(prologue_offset));
132 }
133 info->set_prologue_offset(prologue_offset);
134 __ Prologue(info->GeneratePreagedPrologue(), ip, prologue_offset);
135
136 // Increment invocation count for the function.
137 {
138 Comment cmnt(masm_, "[ Increment invocation count");
139 __ LoadP(r7, FieldMemOperand(r4, JSFunction::kLiteralsOffset));
140 __ LoadP(r7, FieldMemOperand(r7, LiteralsArray::kFeedbackVectorOffset));
141 __ LoadP(r8, FieldMemOperand(r7, TypeFeedbackVector::kInvocationCountIndex *
142 kPointerSize +
143 TypeFeedbackVector::kHeaderSize));
144 __ AddSmiLiteral(r8, r8, Smi::FromInt(1), r0);
145 __ StoreP(r8,
146 FieldMemOperand(
147 r7, TypeFeedbackVector::kInvocationCountIndex * kPointerSize +
148 TypeFeedbackVector::kHeaderSize),
149 r0);
150 }
151
152 {
153 Comment cmnt(masm_, "[ Allocate locals");
154 int locals_count = info->scope()->num_stack_slots();
155 // Generators allocate locals, if any, in context slots.
156 DCHECK(!IsGeneratorFunction(info->literal()->kind()) || locals_count == 0);
157 OperandStackDepthIncrement(locals_count);
158 if (locals_count > 0) {
159 if (locals_count >= 128) {
160 Label ok;
161 __ Add(ip, sp, -(locals_count * kPointerSize), r0);
162 __ LoadRoot(r5, Heap::kRealStackLimitRootIndex);
163 __ cmpl(ip, r5);
164 __ bc_short(ge, &ok);
165 __ CallRuntime(Runtime::kThrowStackOverflow);
166 __ bind(&ok);
167 }
168 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
169 int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
170 if (locals_count >= kMaxPushes) {
171 int loop_iterations = locals_count / kMaxPushes;
172 __ mov(r5, Operand(loop_iterations));
173 __ mtctr(r5);
174 Label loop_header;
175 __ bind(&loop_header);
176 // Do pushes.
177 for (int i = 0; i < kMaxPushes; i++) {
178 __ push(ip);
179 }
180 // Continue loop if not done.
181 __ bdnz(&loop_header);
182 }
183 int remaining = locals_count % kMaxPushes;
184 // Emit the remaining pushes.
185 for (int i = 0; i < remaining; i++) {
186 __ push(ip);
187 }
188 }
189 }
190
191 bool function_in_register_r4 = true;
192
193 // Possibly allocate a local context.
194 if (info->scope()->NeedsContext()) {
195 // Argument to NewContext is the function, which is still in r4.
196 Comment cmnt(masm_, "[ Allocate context");
197 bool need_write_barrier = true;
198 int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
199 if (info->scope()->is_script_scope()) {
200 __ push(r4);
201 __ Push(info->scope()->scope_info());
202 __ CallRuntime(Runtime::kNewScriptContext);
203 PrepareForBailoutForId(BailoutId::ScriptContext(),
204 BailoutState::TOS_REGISTER);
205 // The new target value is not used, clobbering is safe.
206 DCHECK_NULL(info->scope()->new_target_var());
207 } else {
208 if (info->scope()->new_target_var() != nullptr) {
209 __ push(r6); // Preserve new target.
210 }
211 if (slots <= FastNewFunctionContextStub::kMaximumSlots) {
212 FastNewFunctionContextStub stub(isolate());
213 __ mov(FastNewFunctionContextDescriptor::SlotsRegister(),
214 Operand(slots));
215 __ CallStub(&stub);
216 // Result of FastNewFunctionContextStub is always in new space.
217 need_write_barrier = false;
218 } else {
219 __ push(r4);
220 __ CallRuntime(Runtime::kNewFunctionContext);
221 }
222 if (info->scope()->new_target_var() != nullptr) {
223 __ pop(r6); // Preserve new target.
224 }
225 }
226 function_in_register_r4 = false;
227 // Context is returned in r3. It replaces the context passed to us.
228 // It's saved in the stack and kept live in cp.
229 __ mr(cp, r3);
230 __ StoreP(r3, MemOperand(fp, StandardFrameConstants::kContextOffset));
231 // Copy any necessary parameters into the context.
232 int num_parameters = info->scope()->num_parameters();
233 int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
234 for (int i = first_parameter; i < num_parameters; i++) {
235 Variable* var =
236 (i == -1) ? info->scope()->receiver() : info->scope()->parameter(i);
237 if (var->IsContextSlot()) {
238 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
239 (num_parameters - 1 - i) * kPointerSize;
240 // Load parameter from stack.
241 __ LoadP(r3, MemOperand(fp, parameter_offset), r0);
242 // Store it in the context.
243 MemOperand target = ContextMemOperand(cp, var->index());
244 __ StoreP(r3, target, r0);
245
246 // Update the write barrier.
247 if (need_write_barrier) {
248 __ RecordWriteContextSlot(cp, target.offset(), r3, r5,
249 kLRHasBeenSaved, kDontSaveFPRegs);
250 } else if (FLAG_debug_code) {
251 Label done;
252 __ JumpIfInNewSpace(cp, r3, &done);
253 __ Abort(kExpectedNewSpaceObject);
254 __ bind(&done);
255 }
256 }
257 }
258 }
259
260 // Register holding this function and new target are both trashed in case we
261 // bailout here. But since that can happen only when new target is not used
262 // and we allocate a context, the value of |function_in_register| is correct.
263 PrepareForBailoutForId(BailoutId::FunctionContext(),
264 BailoutState::NO_REGISTERS);
265
266 // Possibly set up a local binding to the this function which is used in
267 // derived constructors with super calls.
268 Variable* this_function_var = info->scope()->this_function_var();
269 if (this_function_var != nullptr) {
270 Comment cmnt(masm_, "[ This function");
271 if (!function_in_register_r4) {
272 __ LoadP(r4, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
273 // The write barrier clobbers register again, keep it marked as such.
274 }
275 SetVar(this_function_var, r4, r3, r5);
276 }
277
278 // Possibly set up a local binding to the new target value.
279 Variable* new_target_var = info->scope()->new_target_var();
280 if (new_target_var != nullptr) {
281 Comment cmnt(masm_, "[ new.target");
282 SetVar(new_target_var, r6, r3, r5);
283 }
284
285 // Possibly allocate RestParameters
286 Variable* rest_param = info->scope()->rest_parameter();
287 if (rest_param != nullptr) {
288 Comment cmnt(masm_, "[ Allocate rest parameter array");
289 if (!function_in_register_r4) {
290 __ LoadP(r4, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
291 }
292 FastNewRestParameterStub stub(isolate());
293 __ CallStub(&stub);
294 function_in_register_r4 = false;
295 SetVar(rest_param, r3, r4, r5);
296 }
297
298 Variable* arguments = info->scope()->arguments();
299 if (arguments != NULL) {
300 // Function uses arguments object.
301 Comment cmnt(masm_, "[ Allocate arguments object");
302 if (!function_in_register_r4) {
303 // Load this again, if it's used by the local context below.
304 __ LoadP(r4, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
305 }
306 if (is_strict(language_mode()) || !has_simple_parameters()) {
307 FastNewStrictArgumentsStub stub(isolate());
308 __ CallStub(&stub);
309 } else if (literal()->has_duplicate_parameters()) {
310 __ Push(r4);
311 __ CallRuntime(Runtime::kNewSloppyArguments_Generic);
312 } else {
313 FastNewSloppyArgumentsStub stub(isolate());
314 __ CallStub(&stub);
315 }
316
317 SetVar(arguments, r3, r4, r5);
318 }
319
320 if (FLAG_trace) {
321 __ CallRuntime(Runtime::kTraceEnter);
322 }
323
324 // Visit the declarations and body.
325 PrepareForBailoutForId(BailoutId::FunctionEntry(),
326 BailoutState::NO_REGISTERS);
327 {
328 Comment cmnt(masm_, "[ Declarations");
329 VisitDeclarations(scope()->declarations());
330 }
331
332 // Assert that the declarations do not use ICs. Otherwise the debugger
333 // won't be able to redirect a PC at an IC to the correct IC in newly
334 // recompiled code.
335 DCHECK_EQ(0, ic_total_count_);
336
337 {
338 Comment cmnt(masm_, "[ Stack check");
339 PrepareForBailoutForId(BailoutId::Declarations(),
340 BailoutState::NO_REGISTERS);
341 Label ok;
342 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
343 __ cmpl(sp, ip);
344 __ bc_short(ge, &ok);
345 __ Call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
346 __ bind(&ok);
347 }
348
349 {
350 Comment cmnt(masm_, "[ Body");
351 DCHECK(loop_depth() == 0);
352 VisitStatements(literal()->body());
353 DCHECK(loop_depth() == 0);
354 }
355
356 // Always emit a 'return undefined' in case control fell off the end of
357 // the body.
358 {
359 Comment cmnt(masm_, "[ return <undefined>;");
360 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
361 }
362 EmitReturnSequence();
363
364 if (HasStackOverflow()) {
365 masm_->AbortConstantPoolBuilding();
366 }
367 }
368
369
ClearAccumulator()370 void FullCodeGenerator::ClearAccumulator() {
371 __ LoadSmiLiteral(r3, Smi::kZero);
372 }
373
374
EmitProfilingCounterDecrement(int delta)375 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
376 __ mov(r5, Operand(profiling_counter_));
377 __ LoadP(r6, FieldMemOperand(r5, Cell::kValueOffset));
378 __ SubSmiLiteral(r6, r6, Smi::FromInt(delta), r0);
379 __ StoreP(r6, FieldMemOperand(r5, Cell::kValueOffset), r0);
380 }
381
382
EmitProfilingCounterReset()383 void FullCodeGenerator::EmitProfilingCounterReset() {
384 int reset_value = FLAG_interrupt_budget;
385 __ mov(r5, Operand(profiling_counter_));
386 __ LoadSmiLiteral(r6, Smi::FromInt(reset_value));
387 __ StoreP(r6, FieldMemOperand(r5, Cell::kValueOffset), r0);
388 }
389
390
EmitBackEdgeBookkeeping(IterationStatement * stmt,Label * back_edge_target)391 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
392 Label* back_edge_target) {
393 Comment cmnt(masm_, "[ Back edge bookkeeping");
394 Label ok;
395
396 DCHECK(back_edge_target->is_bound());
397 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target) +
398 kCodeSizeMultiplier / 2;
399 int weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
400 EmitProfilingCounterDecrement(weight);
401 {
402 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
403 Assembler::BlockConstantPoolEntrySharingScope prevent_entry_sharing(masm_);
404 // BackEdgeTable::PatchAt manipulates this sequence.
405 __ cmpi(r6, Operand::Zero());
406 __ bc_short(ge, &ok);
407 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
408
409 // Record a mapping of this PC offset to the OSR id. This is used to find
410 // the AST id from the unoptimized code in order to use it as a key into
411 // the deoptimization input data found in the optimized code.
412 RecordBackEdge(stmt->OsrEntryId());
413 }
414 EmitProfilingCounterReset();
415
416 __ bind(&ok);
417 PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS);
418 // Record a mapping of the OSR id to this PC. This is used if the OSR
419 // entry becomes the target of a bailout. We don't expect it to be, but
420 // we want it to work if it is.
421 PrepareForBailoutForId(stmt->OsrEntryId(), BailoutState::NO_REGISTERS);
422 }
423
EmitProfilingCounterHandlingForReturnSequence(bool is_tail_call)424 void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence(
425 bool is_tail_call) {
426 // Pretend that the exit is a backwards jump to the entry.
427 int weight = 1;
428 if (info_->ShouldSelfOptimize()) {
429 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
430 } else {
431 int distance = masm_->pc_offset() + kCodeSizeMultiplier / 2;
432 weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
433 }
434 EmitProfilingCounterDecrement(weight);
435 Label ok;
436 __ cmpi(r6, Operand::Zero());
437 __ bge(&ok);
438 // Don't need to save result register if we are going to do a tail call.
439 if (!is_tail_call) {
440 __ push(r3);
441 }
442 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
443 if (!is_tail_call) {
444 __ pop(r3);
445 }
446 EmitProfilingCounterReset();
447 __ bind(&ok);
448 }
449
EmitReturnSequence()450 void FullCodeGenerator::EmitReturnSequence() {
451 Comment cmnt(masm_, "[ Return sequence");
452 if (return_label_.is_bound()) {
453 __ b(&return_label_);
454 } else {
455 __ bind(&return_label_);
456 if (FLAG_trace) {
457 // Push the return value on the stack as the parameter.
458 // Runtime::TraceExit returns its parameter in r3
459 __ push(r3);
460 __ CallRuntime(Runtime::kTraceExit);
461 }
462 EmitProfilingCounterHandlingForReturnSequence(false);
463
464 // Make sure that the constant pool is not emitted inside of the return
465 // sequence.
466 {
467 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
468 int32_t arg_count = info_->scope()->num_parameters() + 1;
469 int32_t sp_delta = arg_count * kPointerSize;
470 SetReturnPosition(literal());
471 __ LeaveFrame(StackFrame::JAVA_SCRIPT, sp_delta);
472 __ blr();
473 }
474 }
475 }
476
RestoreContext()477 void FullCodeGenerator::RestoreContext() {
478 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
479 }
480
Plug(Variable * var) const481 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
482 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
483 codegen()->GetVar(result_register(), var);
484 codegen()->PushOperand(result_register());
485 }
486
487
Plug(Heap::RootListIndex index) const488 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {}
489
490
Plug(Heap::RootListIndex index) const491 void FullCodeGenerator::AccumulatorValueContext::Plug(
492 Heap::RootListIndex index) const {
493 __ LoadRoot(result_register(), index);
494 }
495
496
Plug(Heap::RootListIndex index) const497 void FullCodeGenerator::StackValueContext::Plug(
498 Heap::RootListIndex index) const {
499 __ LoadRoot(result_register(), index);
500 codegen()->PushOperand(result_register());
501 }
502
503
Plug(Heap::RootListIndex index) const504 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
505 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_,
506 false_label_);
507 if (index == Heap::kUndefinedValueRootIndex ||
508 index == Heap::kNullValueRootIndex ||
509 index == Heap::kFalseValueRootIndex) {
510 if (false_label_ != fall_through_) __ b(false_label_);
511 } else if (index == Heap::kTrueValueRootIndex) {
512 if (true_label_ != fall_through_) __ b(true_label_);
513 } else {
514 __ LoadRoot(result_register(), index);
515 codegen()->DoTest(this);
516 }
517 }
518
519
Plug(Handle<Object> lit) const520 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {}
521
522
Plug(Handle<Object> lit) const523 void FullCodeGenerator::AccumulatorValueContext::Plug(
524 Handle<Object> lit) const {
525 __ mov(result_register(), Operand(lit));
526 }
527
528
Plug(Handle<Object> lit) const529 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
530 // Immediates cannot be pushed directly.
531 __ mov(result_register(), Operand(lit));
532 codegen()->PushOperand(result_register());
533 }
534
535
Plug(Handle<Object> lit) const536 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
537 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_,
538 false_label_);
539 DCHECK(lit->IsNull(isolate()) || lit->IsUndefined(isolate()) ||
540 !lit->IsUndetectable());
541 if (lit->IsUndefined(isolate()) || lit->IsNull(isolate()) ||
542 lit->IsFalse(isolate())) {
543 if (false_label_ != fall_through_) __ b(false_label_);
544 } else if (lit->IsTrue(isolate()) || lit->IsJSObject()) {
545 if (true_label_ != fall_through_) __ b(true_label_);
546 } else if (lit->IsString()) {
547 if (String::cast(*lit)->length() == 0) {
548 if (false_label_ != fall_through_) __ b(false_label_);
549 } else {
550 if (true_label_ != fall_through_) __ b(true_label_);
551 }
552 } else if (lit->IsSmi()) {
553 if (Smi::cast(*lit)->value() == 0) {
554 if (false_label_ != fall_through_) __ b(false_label_);
555 } else {
556 if (true_label_ != fall_through_) __ b(true_label_);
557 }
558 } else {
559 // For simplicity we always test the accumulator register.
560 __ mov(result_register(), Operand(lit));
561 codegen()->DoTest(this);
562 }
563 }
564
565
DropAndPlug(int count,Register reg) const566 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
567 Register reg) const {
568 DCHECK(count > 0);
569 if (count > 1) codegen()->DropOperands(count - 1);
570 __ StoreP(reg, MemOperand(sp, 0));
571 }
572
573
Plug(Label * materialize_true,Label * materialize_false) const574 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
575 Label* materialize_false) const {
576 DCHECK(materialize_true == materialize_false);
577 __ bind(materialize_true);
578 }
579
580
Plug(Label * materialize_true,Label * materialize_false) const581 void FullCodeGenerator::AccumulatorValueContext::Plug(
582 Label* materialize_true, Label* materialize_false) const {
583 Label done;
584 __ bind(materialize_true);
585 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
586 __ b(&done);
587 __ bind(materialize_false);
588 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
589 __ bind(&done);
590 }
591
592
Plug(Label * materialize_true,Label * materialize_false) const593 void FullCodeGenerator::StackValueContext::Plug(
594 Label* materialize_true, Label* materialize_false) const {
595 Label done;
596 __ bind(materialize_true);
597 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
598 __ b(&done);
599 __ bind(materialize_false);
600 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
601 __ bind(&done);
602 codegen()->PushOperand(ip);
603 }
604
605
Plug(Label * materialize_true,Label * materialize_false) const606 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
607 Label* materialize_false) const {
608 DCHECK(materialize_true == true_label_);
609 DCHECK(materialize_false == false_label_);
610 }
611
612
Plug(bool flag) const613 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
614 Heap::RootListIndex value_root_index =
615 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
616 __ LoadRoot(result_register(), value_root_index);
617 }
618
619
Plug(bool flag) const620 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
621 Heap::RootListIndex value_root_index =
622 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
623 __ LoadRoot(ip, value_root_index);
624 codegen()->PushOperand(ip);
625 }
626
627
Plug(bool flag) const628 void FullCodeGenerator::TestContext::Plug(bool flag) const {
629 codegen()->PrepareForBailoutBeforeSplit(condition(), true, true_label_,
630 false_label_);
631 if (flag) {
632 if (true_label_ != fall_through_) __ b(true_label_);
633 } else {
634 if (false_label_ != fall_through_) __ b(false_label_);
635 }
636 }
637
638
DoTest(Expression * condition,Label * if_true,Label * if_false,Label * fall_through)639 void FullCodeGenerator::DoTest(Expression* condition, Label* if_true,
640 Label* if_false, Label* fall_through) {
641 Handle<Code> ic = ToBooleanICStub::GetUninitialized(isolate());
642 CallIC(ic, condition->test_id());
643 __ CompareRoot(result_register(), Heap::kTrueValueRootIndex);
644 Split(eq, if_true, if_false, fall_through);
645 }
646
647
Split(Condition cond,Label * if_true,Label * if_false,Label * fall_through,CRegister cr)648 void FullCodeGenerator::Split(Condition cond, Label* if_true, Label* if_false,
649 Label* fall_through, CRegister cr) {
650 if (if_false == fall_through) {
651 __ b(cond, if_true, cr);
652 } else if (if_true == fall_through) {
653 __ b(NegateCondition(cond), if_false, cr);
654 } else {
655 __ b(cond, if_true, cr);
656 __ b(if_false);
657 }
658 }
659
660
StackOperand(Variable * var)661 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
662 DCHECK(var->IsStackAllocated());
663 // Offset is negative because higher indexes are at lower addresses.
664 int offset = -var->index() * kPointerSize;
665 // Adjust by a (parameter or local) base offset.
666 if (var->IsParameter()) {
667 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
668 } else {
669 offset += JavaScriptFrameConstants::kLocal0Offset;
670 }
671 return MemOperand(fp, offset);
672 }
673
674
VarOperand(Variable * var,Register scratch)675 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
676 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
677 if (var->IsContextSlot()) {
678 int context_chain_length = scope()->ContextChainLength(var->scope());
679 __ LoadContext(scratch, context_chain_length);
680 return ContextMemOperand(scratch, var->index());
681 } else {
682 return StackOperand(var);
683 }
684 }
685
686
GetVar(Register dest,Variable * var)687 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
688 // Use destination as scratch.
689 MemOperand location = VarOperand(var, dest);
690 __ LoadP(dest, location, r0);
691 }
692
693
SetVar(Variable * var,Register src,Register scratch0,Register scratch1)694 void FullCodeGenerator::SetVar(Variable* var, Register src, Register scratch0,
695 Register scratch1) {
696 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
697 DCHECK(!scratch0.is(src));
698 DCHECK(!scratch0.is(scratch1));
699 DCHECK(!scratch1.is(src));
700 MemOperand location = VarOperand(var, scratch0);
701 __ StoreP(src, location, r0);
702
703 // Emit the write barrier code if the location is in the heap.
704 if (var->IsContextSlot()) {
705 __ RecordWriteContextSlot(scratch0, location.offset(), src, scratch1,
706 kLRHasBeenSaved, kDontSaveFPRegs);
707 }
708 }
709
710
PrepareForBailoutBeforeSplit(Expression * expr,bool should_normalize,Label * if_true,Label * if_false)711 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
712 bool should_normalize,
713 Label* if_true,
714 Label* if_false) {
715 // Only prepare for bailouts before splits if we're in a test
716 // context. Otherwise, we let the Visit function deal with the
717 // preparation to avoid preparing with the same AST id twice.
718 if (!context()->IsTest()) return;
719
720 Label skip;
721 if (should_normalize) __ b(&skip);
722 PrepareForBailout(expr, BailoutState::TOS_REGISTER);
723 if (should_normalize) {
724 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
725 __ cmp(r3, ip);
726 Split(eq, if_true, if_false, NULL);
727 __ bind(&skip);
728 }
729 }
730
731
EmitDebugCheckDeclarationContext(Variable * variable)732 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
733 // The variable in the declaration always resides in the current function
734 // context.
735 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
736 if (FLAG_debug_code) {
737 // Check that we're not inside a with or catch context.
738 __ LoadP(r4, FieldMemOperand(cp, HeapObject::kMapOffset));
739 __ CompareRoot(r4, Heap::kWithContextMapRootIndex);
740 __ Check(ne, kDeclarationInWithContext);
741 __ CompareRoot(r4, Heap::kCatchContextMapRootIndex);
742 __ Check(ne, kDeclarationInCatchContext);
743 }
744 }
745
746
VisitVariableDeclaration(VariableDeclaration * declaration)747 void FullCodeGenerator::VisitVariableDeclaration(
748 VariableDeclaration* declaration) {
749 VariableProxy* proxy = declaration->proxy();
750 Variable* variable = proxy->var();
751 switch (variable->location()) {
752 case VariableLocation::UNALLOCATED: {
753 DCHECK(!variable->binding_needs_init());
754 FeedbackVectorSlot slot = proxy->VariableFeedbackSlot();
755 DCHECK(!slot.IsInvalid());
756 globals_->Add(handle(Smi::FromInt(slot.ToInt()), isolate()), zone());
757 globals_->Add(isolate()->factory()->undefined_value(), zone());
758 break;
759 }
760 case VariableLocation::PARAMETER:
761 case VariableLocation::LOCAL:
762 if (variable->binding_needs_init()) {
763 Comment cmnt(masm_, "[ VariableDeclaration");
764 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
765 __ StoreP(ip, StackOperand(variable));
766 }
767 break;
768
769 case VariableLocation::CONTEXT:
770 if (variable->binding_needs_init()) {
771 Comment cmnt(masm_, "[ VariableDeclaration");
772 EmitDebugCheckDeclarationContext(variable);
773 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
774 __ StoreP(ip, ContextMemOperand(cp, variable->index()), r0);
775 // No write barrier since the_hole_value is in old space.
776 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
777 }
778 break;
779
780 case VariableLocation::LOOKUP: {
781 Comment cmnt(masm_, "[ VariableDeclaration");
782 DCHECK_EQ(VAR, variable->mode());
783 DCHECK(!variable->binding_needs_init());
784 __ mov(r5, Operand(variable->name()));
785 __ Push(r5);
786 __ CallRuntime(Runtime::kDeclareEvalVar);
787 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
788 break;
789 }
790
791 case VariableLocation::MODULE:
792 UNREACHABLE();
793 }
794 }
795
796
VisitFunctionDeclaration(FunctionDeclaration * declaration)797 void FullCodeGenerator::VisitFunctionDeclaration(
798 FunctionDeclaration* declaration) {
799 VariableProxy* proxy = declaration->proxy();
800 Variable* variable = proxy->var();
801 switch (variable->location()) {
802 case VariableLocation::UNALLOCATED: {
803 FeedbackVectorSlot slot = proxy->VariableFeedbackSlot();
804 DCHECK(!slot.IsInvalid());
805 globals_->Add(handle(Smi::FromInt(slot.ToInt()), isolate()), zone());
806 Handle<SharedFunctionInfo> function =
807 Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_);
808 // Check for stack-overflow exception.
809 if (function.is_null()) return SetStackOverflow();
810 globals_->Add(function, zone());
811 break;
812 }
813
814 case VariableLocation::PARAMETER:
815 case VariableLocation::LOCAL: {
816 Comment cmnt(masm_, "[ FunctionDeclaration");
817 VisitForAccumulatorValue(declaration->fun());
818 __ StoreP(result_register(), StackOperand(variable));
819 break;
820 }
821
822 case VariableLocation::CONTEXT: {
823 Comment cmnt(masm_, "[ FunctionDeclaration");
824 EmitDebugCheckDeclarationContext(variable);
825 VisitForAccumulatorValue(declaration->fun());
826 __ StoreP(result_register(), ContextMemOperand(cp, variable->index()),
827 r0);
828 int offset = Context::SlotOffset(variable->index());
829 // We know that we have written a function, which is not a smi.
830 __ RecordWriteContextSlot(cp, offset, result_register(), r5,
831 kLRHasBeenSaved, kDontSaveFPRegs,
832 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
833 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
834 break;
835 }
836
837 case VariableLocation::LOOKUP: {
838 Comment cmnt(masm_, "[ FunctionDeclaration");
839 __ mov(r5, Operand(variable->name()));
840 PushOperand(r5);
841 // Push initial value for function declaration.
842 VisitForStackValue(declaration->fun());
843 CallRuntimeWithOperands(Runtime::kDeclareEvalFunction);
844 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
845 break;
846 }
847
848 case VariableLocation::MODULE:
849 UNREACHABLE();
850 }
851 }
852
853
DeclareGlobals(Handle<FixedArray> pairs)854 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
855 // Call the runtime to declare the globals.
856 __ mov(r4, Operand(pairs));
857 __ LoadSmiLiteral(r3, Smi::FromInt(DeclareGlobalsFlags()));
858 __ EmitLoadTypeFeedbackVector(r5);
859 __ Push(r4, r3, r5);
860 __ CallRuntime(Runtime::kDeclareGlobals);
861 // Return value is ignored.
862 }
863
864
VisitSwitchStatement(SwitchStatement * stmt)865 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
866 Comment cmnt(masm_, "[ SwitchStatement");
867 Breakable nested_statement(this, stmt);
868 SetStatementPosition(stmt);
869
870 // Keep the switch value on the stack until a case matches.
871 VisitForStackValue(stmt->tag());
872 PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS);
873
874 ZoneList<CaseClause*>* clauses = stmt->cases();
875 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
876
877 Label next_test; // Recycled for each test.
878 // Compile all the tests with branches to their bodies.
879 for (int i = 0; i < clauses->length(); i++) {
880 CaseClause* clause = clauses->at(i);
881 clause->body_target()->Unuse();
882
883 // The default is not a test, but remember it as final fall through.
884 if (clause->is_default()) {
885 default_clause = clause;
886 continue;
887 }
888
889 Comment cmnt(masm_, "[ Case comparison");
890 __ bind(&next_test);
891 next_test.Unuse();
892
893 // Compile the label expression.
894 VisitForAccumulatorValue(clause->label());
895
896 // Perform the comparison as if via '==='.
897 __ LoadP(r4, MemOperand(sp, 0)); // Switch value.
898 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
899 JumpPatchSite patch_site(masm_);
900 if (inline_smi_code) {
901 Label slow_case;
902 __ orx(r5, r4, r3);
903 patch_site.EmitJumpIfNotSmi(r5, &slow_case);
904
905 __ cmp(r4, r3);
906 __ bne(&next_test);
907 __ Drop(1); // Switch value is no longer needed.
908 __ b(clause->body_target());
909 __ bind(&slow_case);
910 }
911
912 // Record position before stub call for type feedback.
913 SetExpressionPosition(clause);
914 Handle<Code> ic =
915 CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
916 CallIC(ic, clause->CompareId());
917 patch_site.EmitPatchInfo();
918
919 Label skip;
920 __ b(&skip);
921 PrepareForBailout(clause, BailoutState::TOS_REGISTER);
922 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
923 __ cmp(r3, ip);
924 __ bne(&next_test);
925 __ Drop(1);
926 __ b(clause->body_target());
927 __ bind(&skip);
928
929 __ cmpi(r3, Operand::Zero());
930 __ bne(&next_test);
931 __ Drop(1); // Switch value is no longer needed.
932 __ b(clause->body_target());
933 }
934
935 // Discard the test value and jump to the default if present, otherwise to
936 // the end of the statement.
937 __ bind(&next_test);
938 DropOperands(1); // Switch value is no longer needed.
939 if (default_clause == NULL) {
940 __ b(nested_statement.break_label());
941 } else {
942 __ b(default_clause->body_target());
943 }
944
945 // Compile all the case bodies.
946 for (int i = 0; i < clauses->length(); i++) {
947 Comment cmnt(masm_, "[ Case body");
948 CaseClause* clause = clauses->at(i);
949 __ bind(clause->body_target());
950 PrepareForBailoutForId(clause->EntryId(), BailoutState::NO_REGISTERS);
951 VisitStatements(clause->statements());
952 }
953
954 __ bind(nested_statement.break_label());
955 PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
956 }
957
958
VisitForInStatement(ForInStatement * stmt)959 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
960 Comment cmnt(masm_, "[ ForInStatement");
961 SetStatementPosition(stmt, SKIP_BREAK);
962
963 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
964
965 // Get the object to enumerate over.
966 SetExpressionAsStatementPosition(stmt->enumerable());
967 VisitForAccumulatorValue(stmt->enumerable());
968 OperandStackDepthIncrement(5);
969
970 Label loop, exit;
971 Iteration loop_statement(this, stmt);
972 increment_loop_depth();
973
974 // If the object is null or undefined, skip over the loop, otherwise convert
975 // it to a JS receiver. See ECMA-262 version 5, section 12.6.4.
976 Label convert, done_convert;
977 __ JumpIfSmi(r3, &convert);
978 __ CompareObjectType(r3, r4, r4, FIRST_JS_RECEIVER_TYPE);
979 __ bge(&done_convert);
980 __ CompareRoot(r3, Heap::kNullValueRootIndex);
981 __ beq(&exit);
982 __ CompareRoot(r3, Heap::kUndefinedValueRootIndex);
983 __ beq(&exit);
984 __ bind(&convert);
985 __ Call(isolate()->builtins()->ToObject(), RelocInfo::CODE_TARGET);
986 RestoreContext();
987 __ bind(&done_convert);
988 PrepareForBailoutForId(stmt->ToObjectId(), BailoutState::TOS_REGISTER);
989 __ push(r3);
990
991 // Check cache validity in generated code. If we cannot guarantee cache
992 // validity, call the runtime system to check cache validity or get the
993 // property names in a fixed array. Note: Proxies never have an enum cache,
994 // so will always take the slow path.
995 Label call_runtime;
996 __ CheckEnumCache(&call_runtime);
997
998 // The enum cache is valid. Load the map of the object being
999 // iterated over and use the cache for the iteration.
1000 Label use_cache;
1001 __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset));
1002 __ b(&use_cache);
1003
1004 // Get the set of properties to enumerate.
1005 __ bind(&call_runtime);
1006 __ push(r3); // Duplicate the enumerable object on the stack.
1007 __ CallRuntime(Runtime::kForInEnumerate);
1008 PrepareForBailoutForId(stmt->EnumId(), BailoutState::TOS_REGISTER);
1009
1010 // If we got a map from the runtime call, we can do a fast
1011 // modification check. Otherwise, we got a fixed array, and we have
1012 // to do a slow check.
1013 Label fixed_array;
1014 __ LoadP(r5, FieldMemOperand(r3, HeapObject::kMapOffset));
1015 __ LoadRoot(ip, Heap::kMetaMapRootIndex);
1016 __ cmp(r5, ip);
1017 __ bne(&fixed_array);
1018
1019 // We got a map in register r3. Get the enumeration cache from it.
1020 Label no_descriptors;
1021 __ bind(&use_cache);
1022
1023 __ EnumLength(r4, r3);
1024 __ CmpSmiLiteral(r4, Smi::kZero, r0);
1025 __ beq(&no_descriptors);
1026
1027 __ LoadInstanceDescriptors(r3, r5);
1028 __ LoadP(r5, FieldMemOperand(r5, DescriptorArray::kEnumCacheOffset));
1029 __ LoadP(r5,
1030 FieldMemOperand(r5, DescriptorArray::kEnumCacheBridgeCacheOffset));
1031
1032 // Set up the four remaining stack slots.
1033 __ push(r3); // Map.
1034 __ LoadSmiLiteral(r3, Smi::kZero);
1035 // Push enumeration cache, enumeration cache length (as smi) and zero.
1036 __ Push(r5, r4, r3);
1037 __ b(&loop);
1038
1039 __ bind(&no_descriptors);
1040 __ Drop(1);
1041 __ b(&exit);
1042
1043 // We got a fixed array in register r3. Iterate through that.
1044 __ bind(&fixed_array);
1045
1046 __ LoadSmiLiteral(r4, Smi::FromInt(1)); // Smi(1) indicates slow check
1047 __ Push(r4, r3); // Smi and array
1048 __ LoadP(r4, FieldMemOperand(r3, FixedArray::kLengthOffset));
1049 __ Push(r4); // Fixed array length (as smi).
1050 PrepareForBailoutForId(stmt->PrepareId(), BailoutState::NO_REGISTERS);
1051 __ LoadSmiLiteral(r3, Smi::kZero);
1052 __ Push(r3); // Initial index.
1053
1054 // Generate code for doing the condition check.
1055 __ bind(&loop);
1056 SetExpressionAsStatementPosition(stmt->each());
1057
1058 // Load the current count to r3, load the length to r4.
1059 __ LoadP(r3, MemOperand(sp, 0 * kPointerSize));
1060 __ LoadP(r4, MemOperand(sp, 1 * kPointerSize));
1061 __ cmpl(r3, r4); // Compare to the array length.
1062 __ bge(loop_statement.break_label());
1063
1064 // Get the current entry of the array into register r6.
1065 __ LoadP(r5, MemOperand(sp, 2 * kPointerSize));
1066 __ addi(r5, r5, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1067 __ SmiToPtrArrayOffset(r6, r3);
1068 __ LoadPX(r6, MemOperand(r6, r5));
1069
1070 // Get the expected map from the stack or a smi in the
1071 // permanent slow case into register r5.
1072 __ LoadP(r5, MemOperand(sp, 3 * kPointerSize));
1073
1074 // Check if the expected map still matches that of the enumerable.
1075 // If not, we may have to filter the key.
1076 Label update_each;
1077 __ LoadP(r4, MemOperand(sp, 4 * kPointerSize));
1078 __ LoadP(r7, FieldMemOperand(r4, HeapObject::kMapOffset));
1079 __ cmp(r7, r5);
1080 __ beq(&update_each);
1081
1082 // We need to filter the key, record slow-path here.
1083 int const vector_index = SmiFromSlot(slot)->value();
1084 __ EmitLoadTypeFeedbackVector(r3);
1085 __ mov(r5, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1086 __ StoreP(
1087 r5, FieldMemOperand(r3, FixedArray::OffsetOfElementAt(vector_index)), r0);
1088
1089 // Convert the entry to a string or (smi) 0 if it isn't a property
1090 // any more. If the property has been removed while iterating, we
1091 // just skip it.
1092 __ Push(r4, r6); // Enumerable and current entry.
1093 __ CallRuntime(Runtime::kForInFilter);
1094 PrepareForBailoutForId(stmt->FilterId(), BailoutState::TOS_REGISTER);
1095 __ mr(r6, r3);
1096 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
1097 __ cmp(r3, r0);
1098 __ beq(loop_statement.continue_label());
1099
1100 // Update the 'each' property or variable from the possibly filtered
1101 // entry in register r6.
1102 __ bind(&update_each);
1103 __ mr(result_register(), r6);
1104 // Perform the assignment as if via '='.
1105 {
1106 EffectContext context(this);
1107 EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
1108 PrepareForBailoutForId(stmt->AssignmentId(), BailoutState::NO_REGISTERS);
1109 }
1110
1111 // Both Crankshaft and Turbofan expect BodyId to be right before stmt->body().
1112 PrepareForBailoutForId(stmt->BodyId(), BailoutState::NO_REGISTERS);
1113 // Generate code for the body of the loop.
1114 Visit(stmt->body());
1115
1116 // Generate code for the going to the next element by incrementing
1117 // the index (smi) stored on top of the stack.
1118 __ bind(loop_statement.continue_label());
1119 PrepareForBailoutForId(stmt->IncrementId(), BailoutState::NO_REGISTERS);
1120 __ pop(r3);
1121 __ AddSmiLiteral(r3, r3, Smi::FromInt(1), r0);
1122 __ push(r3);
1123
1124 EmitBackEdgeBookkeeping(stmt, &loop);
1125 __ b(&loop);
1126
1127 // Remove the pointers stored on the stack.
1128 __ bind(loop_statement.break_label());
1129 DropOperands(5);
1130
1131 // Exit and decrement the loop depth.
1132 PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
1133 __ bind(&exit);
1134 decrement_loop_depth();
1135 }
1136
1137
EmitSetHomeObject(Expression * initializer,int offset,FeedbackVectorSlot slot)1138 void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset,
1139 FeedbackVectorSlot slot) {
1140 DCHECK(NeedsHomeObject(initializer));
1141 __ LoadP(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1142 __ LoadP(StoreDescriptor::ValueRegister(),
1143 MemOperand(sp, offset * kPointerSize));
1144 CallStoreIC(slot, isolate()->factory()->home_object_symbol());
1145 }
1146
1147
EmitSetHomeObjectAccumulator(Expression * initializer,int offset,FeedbackVectorSlot slot)1148 void FullCodeGenerator::EmitSetHomeObjectAccumulator(Expression* initializer,
1149 int offset,
1150 FeedbackVectorSlot slot) {
1151 DCHECK(NeedsHomeObject(initializer));
1152 __ Move(StoreDescriptor::ReceiverRegister(), r3);
1153 __ LoadP(StoreDescriptor::ValueRegister(),
1154 MemOperand(sp, offset * kPointerSize));
1155 CallStoreIC(slot, isolate()->factory()->home_object_symbol());
1156 }
1157
1158
EmitLoadGlobalCheckExtensions(VariableProxy * proxy,TypeofMode typeof_mode,Label * slow)1159 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1160 TypeofMode typeof_mode,
1161 Label* slow) {
1162 Register current = cp;
1163 Register next = r4;
1164 Register temp = r5;
1165
1166 int to_check = scope()->ContextChainLengthUntilOutermostSloppyEval();
1167 for (Scope* s = scope(); to_check > 0; s = s->outer_scope()) {
1168 if (!s->NeedsContext()) continue;
1169 if (s->calls_sloppy_eval()) {
1170 // Check that extension is "the hole".
1171 __ LoadP(temp, ContextMemOperand(current, Context::EXTENSION_INDEX));
1172 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1173 }
1174 // Load next context in chain.
1175 __ LoadP(next, ContextMemOperand(current, Context::PREVIOUS_INDEX));
1176 // Walk the rest of the chain without clobbering cp.
1177 current = next;
1178 to_check--;
1179 }
1180
1181 // All extension objects were empty and it is safe to use a normal global
1182 // load machinery.
1183 EmitGlobalVariableLoad(proxy, typeof_mode);
1184 }
1185
1186
ContextSlotOperandCheckExtensions(Variable * var,Label * slow)1187 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1188 Label* slow) {
1189 DCHECK(var->IsContextSlot());
1190 Register context = cp;
1191 Register next = r6;
1192 Register temp = r7;
1193
1194 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1195 if (s->NeedsContext()) {
1196 if (s->calls_sloppy_eval()) {
1197 // Check that extension is "the hole".
1198 __ LoadP(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1199 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1200 }
1201 __ LoadP(next, ContextMemOperand(context, Context::PREVIOUS_INDEX));
1202 // Walk the rest of the chain without clobbering cp.
1203 context = next;
1204 }
1205 }
1206 // Check that last extension is "the hole".
1207 __ LoadP(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1208 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1209
1210 // This function is used only for loads, not stores, so it's safe to
1211 // return an cp-based operand (the write barrier cannot be allowed to
1212 // destroy the cp register).
1213 return ContextMemOperand(context, var->index());
1214 }
1215
1216
EmitDynamicLookupFastCase(VariableProxy * proxy,TypeofMode typeof_mode,Label * slow,Label * done)1217 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1218 TypeofMode typeof_mode,
1219 Label* slow, Label* done) {
1220 // Generate fast-case code for variables that might be shadowed by
1221 // eval-introduced variables. Eval is used a lot without
1222 // introducing variables. In those cases, we do not want to
1223 // perform a runtime call for all variables in the scope
1224 // containing the eval.
1225 Variable* var = proxy->var();
1226 if (var->mode() == DYNAMIC_GLOBAL) {
1227 EmitLoadGlobalCheckExtensions(proxy, typeof_mode, slow);
1228 __ b(done);
1229 } else if (var->mode() == DYNAMIC_LOCAL) {
1230 Variable* local = var->local_if_not_shadowed();
1231 __ LoadP(r3, ContextSlotOperandCheckExtensions(local, slow));
1232 if (local->binding_needs_init()) {
1233 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
1234 __ bne(done);
1235 __ mov(r3, Operand(var->name()));
1236 __ push(r3);
1237 __ CallRuntime(Runtime::kThrowReferenceError);
1238 } else {
1239 __ b(done);
1240 }
1241 }
1242 }
1243
EmitVariableLoad(VariableProxy * proxy,TypeofMode typeof_mode)1244 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
1245 TypeofMode typeof_mode) {
1246 // Record position before possible IC call.
1247 SetExpressionPosition(proxy);
1248 PrepareForBailoutForId(proxy->BeforeId(), BailoutState::NO_REGISTERS);
1249 Variable* var = proxy->var();
1250
1251 // Three cases: global variables, lookup variables, and all other types of
1252 // variables.
1253 switch (var->location()) {
1254 case VariableLocation::UNALLOCATED: {
1255 Comment cmnt(masm_, "[ Global variable");
1256 EmitGlobalVariableLoad(proxy, typeof_mode);
1257 context()->Plug(r3);
1258 break;
1259 }
1260
1261 case VariableLocation::PARAMETER:
1262 case VariableLocation::LOCAL:
1263 case VariableLocation::CONTEXT: {
1264 DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode);
1265 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1266 : "[ Stack variable");
1267 if (proxy->hole_check_mode() == HoleCheckMode::kRequired) {
1268 // Throw a reference error when using an uninitialized let/const
1269 // binding in harmony mode.
1270 Label done;
1271 GetVar(r3, var);
1272 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
1273 __ bne(&done);
1274 __ mov(r3, Operand(var->name()));
1275 __ push(r3);
1276 __ CallRuntime(Runtime::kThrowReferenceError);
1277 __ bind(&done);
1278 context()->Plug(r3);
1279 break;
1280 }
1281 context()->Plug(var);
1282 break;
1283 }
1284
1285 case VariableLocation::LOOKUP: {
1286 Comment cmnt(masm_, "[ Lookup variable");
1287 Label done, slow;
1288 // Generate code for loading from variables potentially shadowed
1289 // by eval-introduced variables.
1290 EmitDynamicLookupFastCase(proxy, typeof_mode, &slow, &done);
1291 __ bind(&slow);
1292 __ Push(var->name());
1293 Runtime::FunctionId function_id =
1294 typeof_mode == NOT_INSIDE_TYPEOF
1295 ? Runtime::kLoadLookupSlot
1296 : Runtime::kLoadLookupSlotInsideTypeof;
1297 __ CallRuntime(function_id);
1298 __ bind(&done);
1299 context()->Plug(r3);
1300 break;
1301 }
1302
1303 case VariableLocation::MODULE:
1304 UNREACHABLE();
1305 }
1306 }
1307
1308
EmitAccessor(ObjectLiteralProperty * property)1309 void FullCodeGenerator::EmitAccessor(ObjectLiteralProperty* property) {
1310 Expression* expression = (property == NULL) ? NULL : property->value();
1311 if (expression == NULL) {
1312 __ LoadRoot(r4, Heap::kNullValueRootIndex);
1313 PushOperand(r4);
1314 } else {
1315 VisitForStackValue(expression);
1316 if (NeedsHomeObject(expression)) {
1317 DCHECK(property->kind() == ObjectLiteral::Property::GETTER ||
1318 property->kind() == ObjectLiteral::Property::SETTER);
1319 int offset = property->kind() == ObjectLiteral::Property::GETTER ? 2 : 3;
1320 EmitSetHomeObject(expression, offset, property->GetSlot());
1321 }
1322 }
1323 }
1324
1325
VisitObjectLiteral(ObjectLiteral * expr)1326 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1327 Comment cmnt(masm_, "[ ObjectLiteral");
1328
1329 Handle<FixedArray> constant_properties = expr->constant_properties();
1330 __ LoadP(r6, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1331 __ LoadSmiLiteral(r5, Smi::FromInt(expr->literal_index()));
1332 __ mov(r4, Operand(constant_properties));
1333 int flags = expr->ComputeFlags();
1334 __ LoadSmiLiteral(r3, Smi::FromInt(flags));
1335 if (MustCreateObjectLiteralWithRuntime(expr)) {
1336 __ Push(r6, r5, r4, r3);
1337 __ CallRuntime(Runtime::kCreateObjectLiteral);
1338 } else {
1339 FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1340 __ CallStub(&stub);
1341 RestoreContext();
1342 }
1343 PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
1344
1345 // If result_saved is true the result is on top of the stack. If
1346 // result_saved is false the result is in r3.
1347 bool result_saved = false;
1348
1349 AccessorTable accessor_table(zone());
1350 int property_index = 0;
1351 for (; property_index < expr->properties()->length(); property_index++) {
1352 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1353 if (property->is_computed_name()) break;
1354 if (property->IsCompileTimeValue()) continue;
1355
1356 Literal* key = property->key()->AsLiteral();
1357 Expression* value = property->value();
1358 if (!result_saved) {
1359 PushOperand(r3); // Save result on stack
1360 result_saved = true;
1361 }
1362 switch (property->kind()) {
1363 case ObjectLiteral::Property::CONSTANT:
1364 UNREACHABLE();
1365 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1366 DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1367 // Fall through.
1368 case ObjectLiteral::Property::COMPUTED:
1369 // It is safe to use [[Put]] here because the boilerplate already
1370 // contains computed properties with an uninitialized value.
1371 if (key->IsStringLiteral()) {
1372 DCHECK(key->IsPropertyName());
1373 if (property->emit_store()) {
1374 VisitForAccumulatorValue(value);
1375 DCHECK(StoreDescriptor::ValueRegister().is(r3));
1376 __ LoadP(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1377 CallStoreIC(property->GetSlot(0), key->value());
1378 PrepareForBailoutForId(key->id(), BailoutState::NO_REGISTERS);
1379
1380 if (NeedsHomeObject(value)) {
1381 EmitSetHomeObjectAccumulator(value, 0, property->GetSlot(1));
1382 }
1383 } else {
1384 VisitForEffect(value);
1385 }
1386 break;
1387 }
1388 // Duplicate receiver on stack.
1389 __ LoadP(r3, MemOperand(sp));
1390 PushOperand(r3);
1391 VisitForStackValue(key);
1392 VisitForStackValue(value);
1393 if (property->emit_store()) {
1394 if (NeedsHomeObject(value)) {
1395 EmitSetHomeObject(value, 2, property->GetSlot());
1396 }
1397 __ LoadSmiLiteral(r3, Smi::FromInt(SLOPPY)); // PropertyAttributes
1398 PushOperand(r3);
1399 CallRuntimeWithOperands(Runtime::kSetProperty);
1400 } else {
1401 DropOperands(3);
1402 }
1403 break;
1404 case ObjectLiteral::Property::PROTOTYPE:
1405 // Duplicate receiver on stack.
1406 __ LoadP(r3, MemOperand(sp));
1407 PushOperand(r3);
1408 VisitForStackValue(value);
1409 DCHECK(property->emit_store());
1410 CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
1411 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1412 BailoutState::NO_REGISTERS);
1413 break;
1414 case ObjectLiteral::Property::GETTER:
1415 if (property->emit_store()) {
1416 AccessorTable::Iterator it = accessor_table.lookup(key);
1417 it->second->bailout_id = expr->GetIdForPropertySet(property_index);
1418 it->second->getter = property;
1419 }
1420 break;
1421 case ObjectLiteral::Property::SETTER:
1422 if (property->emit_store()) {
1423 AccessorTable::Iterator it = accessor_table.lookup(key);
1424 it->second->bailout_id = expr->GetIdForPropertySet(property_index);
1425 it->second->setter = property;
1426 }
1427 break;
1428 }
1429 }
1430
1431 // Emit code to define accessors, using only a single call to the runtime for
1432 // each pair of corresponding getters and setters.
1433 for (AccessorTable::Iterator it = accessor_table.begin();
1434 it != accessor_table.end(); ++it) {
1435 __ LoadP(r3, MemOperand(sp)); // Duplicate receiver.
1436 PushOperand(r3);
1437 VisitForStackValue(it->first);
1438 EmitAccessor(it->second->getter);
1439 EmitAccessor(it->second->setter);
1440 __ LoadSmiLiteral(r3, Smi::FromInt(NONE));
1441 PushOperand(r3);
1442 CallRuntimeWithOperands(Runtime::kDefineAccessorPropertyUnchecked);
1443 PrepareForBailoutForId(it->second->bailout_id, BailoutState::NO_REGISTERS);
1444 }
1445
1446 // Object literals have two parts. The "static" part on the left contains no
1447 // computed property names, and so we can compute its map ahead of time; see
1448 // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1449 // starts with the first computed property name, and continues with all
1450 // properties to its right. All the code from above initializes the static
1451 // component of the object literal, and arranges for the map of the result to
1452 // reflect the static order in which the keys appear. For the dynamic
1453 // properties, we compile them into a series of "SetOwnProperty" runtime
1454 // calls. This will preserve insertion order.
1455 for (; property_index < expr->properties()->length(); property_index++) {
1456 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1457
1458 Expression* value = property->value();
1459 if (!result_saved) {
1460 PushOperand(r3); // Save result on the stack
1461 result_saved = true;
1462 }
1463
1464 __ LoadP(r3, MemOperand(sp)); // Duplicate receiver.
1465 PushOperand(r3);
1466
1467 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1468 DCHECK(!property->is_computed_name());
1469 VisitForStackValue(value);
1470 DCHECK(property->emit_store());
1471 CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
1472 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1473 BailoutState::NO_REGISTERS);
1474 } else {
1475 EmitPropertyKey(property, expr->GetIdForPropertyName(property_index));
1476 VisitForStackValue(value);
1477 if (NeedsHomeObject(value)) {
1478 EmitSetHomeObject(value, 2, property->GetSlot());
1479 }
1480
1481 switch (property->kind()) {
1482 case ObjectLiteral::Property::CONSTANT:
1483 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1484 case ObjectLiteral::Property::COMPUTED:
1485 if (property->emit_store()) {
1486 PushOperand(Smi::FromInt(NONE));
1487 PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
1488 CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
1489 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1490 BailoutState::NO_REGISTERS);
1491 } else {
1492 DropOperands(3);
1493 }
1494 break;
1495
1496 case ObjectLiteral::Property::PROTOTYPE:
1497 UNREACHABLE();
1498 break;
1499
1500 case ObjectLiteral::Property::GETTER:
1501 PushOperand(Smi::FromInt(NONE));
1502 CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
1503 break;
1504
1505 case ObjectLiteral::Property::SETTER:
1506 PushOperand(Smi::FromInt(NONE));
1507 CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
1508 break;
1509 }
1510 }
1511 }
1512
1513 if (result_saved) {
1514 context()->PlugTOS();
1515 } else {
1516 context()->Plug(r3);
1517 }
1518 }
1519
1520
VisitArrayLiteral(ArrayLiteral * expr)1521 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1522 Comment cmnt(masm_, "[ ArrayLiteral");
1523
1524 Handle<FixedArray> constant_elements = expr->constant_elements();
1525 bool has_fast_elements =
1526 IsFastObjectElementsKind(expr->constant_elements_kind());
1527 Handle<FixedArrayBase> constant_elements_values(
1528 FixedArrayBase::cast(constant_elements->get(1)));
1529
1530 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1531 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1532 // If the only customer of allocation sites is transitioning, then
1533 // we can turn it off if we don't have anywhere else to transition to.
1534 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1535 }
1536
1537 __ LoadP(r6, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1538 __ LoadSmiLiteral(r5, Smi::FromInt(expr->literal_index()));
1539 __ mov(r4, Operand(constant_elements));
1540 if (MustCreateArrayLiteralWithRuntime(expr)) {
1541 __ LoadSmiLiteral(r3, Smi::FromInt(expr->ComputeFlags()));
1542 __ Push(r6, r5, r4, r3);
1543 __ CallRuntime(Runtime::kCreateArrayLiteral);
1544 } else {
1545 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1546 __ CallStub(&stub);
1547 RestoreContext();
1548 }
1549 PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
1550
1551 bool result_saved = false; // Is the result saved to the stack?
1552 ZoneList<Expression*>* subexprs = expr->values();
1553 int length = subexprs->length();
1554
1555 // Emit code to evaluate all the non-constant subexpressions and to store
1556 // them into the newly cloned array.
1557 for (int array_index = 0; array_index < length; array_index++) {
1558 Expression* subexpr = subexprs->at(array_index);
1559 DCHECK(!subexpr->IsSpread());
1560 // If the subexpression is a literal or a simple materialized literal it
1561 // is already set in the cloned array.
1562 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1563
1564 if (!result_saved) {
1565 PushOperand(r3);
1566 result_saved = true;
1567 }
1568 VisitForAccumulatorValue(subexpr);
1569
1570 __ LoadSmiLiteral(StoreDescriptor::NameRegister(),
1571 Smi::FromInt(array_index));
1572 __ LoadP(StoreDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1573 CallKeyedStoreIC(expr->LiteralFeedbackSlot());
1574
1575 PrepareForBailoutForId(expr->GetIdForElement(array_index),
1576 BailoutState::NO_REGISTERS);
1577 }
1578
1579 if (result_saved) {
1580 context()->PlugTOS();
1581 } else {
1582 context()->Plug(r3);
1583 }
1584 }
1585
1586
VisitAssignment(Assignment * expr)1587 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1588 DCHECK(expr->target()->IsValidReferenceExpressionOrThis());
1589
1590 Comment cmnt(masm_, "[ Assignment");
1591
1592 Property* property = expr->target()->AsProperty();
1593 LhsKind assign_type = Property::GetAssignType(property);
1594
1595 // Evaluate LHS expression.
1596 switch (assign_type) {
1597 case VARIABLE:
1598 // Nothing to do here.
1599 break;
1600 case NAMED_PROPERTY:
1601 if (expr->is_compound()) {
1602 // We need the receiver both on the stack and in the register.
1603 VisitForStackValue(property->obj());
1604 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1605 } else {
1606 VisitForStackValue(property->obj());
1607 }
1608 break;
1609 case NAMED_SUPER_PROPERTY:
1610 VisitForStackValue(
1611 property->obj()->AsSuperPropertyReference()->this_var());
1612 VisitForAccumulatorValue(
1613 property->obj()->AsSuperPropertyReference()->home_object());
1614 PushOperand(result_register());
1615 if (expr->is_compound()) {
1616 const Register scratch = r4;
1617 __ LoadP(scratch, MemOperand(sp, kPointerSize));
1618 PushOperands(scratch, result_register());
1619 }
1620 break;
1621 case KEYED_SUPER_PROPERTY: {
1622 VisitForStackValue(
1623 property->obj()->AsSuperPropertyReference()->this_var());
1624 VisitForStackValue(
1625 property->obj()->AsSuperPropertyReference()->home_object());
1626 VisitForAccumulatorValue(property->key());
1627 PushOperand(result_register());
1628 if (expr->is_compound()) {
1629 const Register scratch1 = r5;
1630 const Register scratch2 = r4;
1631 __ LoadP(scratch1, MemOperand(sp, 2 * kPointerSize));
1632 __ LoadP(scratch2, MemOperand(sp, 1 * kPointerSize));
1633 PushOperands(scratch1, scratch2, result_register());
1634 }
1635 break;
1636 }
1637 case KEYED_PROPERTY:
1638 if (expr->is_compound()) {
1639 VisitForStackValue(property->obj());
1640 VisitForStackValue(property->key());
1641 __ LoadP(LoadDescriptor::ReceiverRegister(),
1642 MemOperand(sp, 1 * kPointerSize));
1643 __ LoadP(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
1644 } else {
1645 VisitForStackValue(property->obj());
1646 VisitForStackValue(property->key());
1647 }
1648 break;
1649 }
1650
1651 // For compound assignments we need another deoptimization point after the
1652 // variable/property load.
1653 if (expr->is_compound()) {
1654 {
1655 AccumulatorValueContext context(this);
1656 switch (assign_type) {
1657 case VARIABLE:
1658 EmitVariableLoad(expr->target()->AsVariableProxy());
1659 PrepareForBailout(expr->target(), BailoutState::TOS_REGISTER);
1660 break;
1661 case NAMED_PROPERTY:
1662 EmitNamedPropertyLoad(property);
1663 PrepareForBailoutForId(property->LoadId(),
1664 BailoutState::TOS_REGISTER);
1665 break;
1666 case NAMED_SUPER_PROPERTY:
1667 EmitNamedSuperPropertyLoad(property);
1668 PrepareForBailoutForId(property->LoadId(),
1669 BailoutState::TOS_REGISTER);
1670 break;
1671 case KEYED_SUPER_PROPERTY:
1672 EmitKeyedSuperPropertyLoad(property);
1673 PrepareForBailoutForId(property->LoadId(),
1674 BailoutState::TOS_REGISTER);
1675 break;
1676 case KEYED_PROPERTY:
1677 EmitKeyedPropertyLoad(property);
1678 PrepareForBailoutForId(property->LoadId(),
1679 BailoutState::TOS_REGISTER);
1680 break;
1681 }
1682 }
1683
1684 Token::Value op = expr->binary_op();
1685 PushOperand(r3); // Left operand goes on the stack.
1686 VisitForAccumulatorValue(expr->value());
1687
1688 AccumulatorValueContext context(this);
1689 if (ShouldInlineSmiCase(op)) {
1690 EmitInlineSmiBinaryOp(expr->binary_operation(), op, expr->target(),
1691 expr->value());
1692 } else {
1693 EmitBinaryOp(expr->binary_operation(), op);
1694 }
1695
1696 // Deoptimization point in case the binary operation may have side effects.
1697 PrepareForBailout(expr->binary_operation(), BailoutState::TOS_REGISTER);
1698 } else {
1699 VisitForAccumulatorValue(expr->value());
1700 }
1701
1702 SetExpressionPosition(expr);
1703
1704 // Store the value.
1705 switch (assign_type) {
1706 case VARIABLE: {
1707 VariableProxy* proxy = expr->target()->AsVariableProxy();
1708 EmitVariableAssignment(proxy->var(), expr->op(), expr->AssignmentSlot(),
1709 proxy->hole_check_mode());
1710 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
1711 context()->Plug(r3);
1712 break;
1713 }
1714 case NAMED_PROPERTY:
1715 EmitNamedPropertyAssignment(expr);
1716 break;
1717 case NAMED_SUPER_PROPERTY:
1718 EmitNamedSuperPropertyStore(property);
1719 context()->Plug(r3);
1720 break;
1721 case KEYED_SUPER_PROPERTY:
1722 EmitKeyedSuperPropertyStore(property);
1723 context()->Plug(r3);
1724 break;
1725 case KEYED_PROPERTY:
1726 EmitKeyedPropertyAssignment(expr);
1727 break;
1728 }
1729 }
1730
1731
VisitYield(Yield * expr)1732 void FullCodeGenerator::VisitYield(Yield* expr) {
1733 Comment cmnt(masm_, "[ Yield");
1734 SetExpressionPosition(expr);
1735
1736 // Evaluate yielded value first; the initial iterator definition depends on
1737 // this. It stays on the stack while we update the iterator.
1738 VisitForStackValue(expr->expression());
1739
1740 Label suspend, continuation, post_runtime, resume, exception;
1741
1742 __ b(&suspend);
1743 __ bind(&continuation);
1744 // When we arrive here, r3 holds the generator object.
1745 __ RecordGeneratorContinuation();
1746 __ LoadP(r4, FieldMemOperand(r3, JSGeneratorObject::kResumeModeOffset));
1747 __ LoadP(r3, FieldMemOperand(r3, JSGeneratorObject::kInputOrDebugPosOffset));
1748 STATIC_ASSERT(JSGeneratorObject::kNext < JSGeneratorObject::kReturn);
1749 STATIC_ASSERT(JSGeneratorObject::kThrow > JSGeneratorObject::kReturn);
1750 __ CmpSmiLiteral(r4, Smi::FromInt(JSGeneratorObject::kReturn), r0);
1751 __ blt(&resume);
1752 __ Push(result_register());
1753 __ bgt(&exception);
1754 EmitCreateIteratorResult(true);
1755 EmitUnwindAndReturn();
1756
1757 __ bind(&exception);
1758 __ CallRuntime(expr->rethrow_on_exception() ? Runtime::kReThrow
1759 : Runtime::kThrow);
1760
1761 __ bind(&suspend);
1762 OperandStackDepthIncrement(1); // Not popped on this path.
1763 VisitForAccumulatorValue(expr->generator_object());
1764 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
1765 __ LoadSmiLiteral(r4, Smi::FromInt(continuation.pos()));
1766 __ StoreP(r4, FieldMemOperand(r3, JSGeneratorObject::kContinuationOffset),
1767 r0);
1768 __ StoreP(cp, FieldMemOperand(r3, JSGeneratorObject::kContextOffset), r0);
1769 __ mr(r4, cp);
1770 __ RecordWriteField(r3, JSGeneratorObject::kContextOffset, r4, r5,
1771 kLRHasBeenSaved, kDontSaveFPRegs);
1772 __ addi(r4, fp, Operand(StandardFrameConstants::kExpressionsOffset));
1773 __ cmp(sp, r4);
1774 __ beq(&post_runtime);
1775 __ push(r3); // generator object
1776 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
1777 RestoreContext();
1778 __ bind(&post_runtime);
1779 PopOperand(result_register());
1780 EmitReturnSequence();
1781
1782 __ bind(&resume);
1783 context()->Plug(result_register());
1784 }
1785
PushOperands(Register reg1,Register reg2)1786 void FullCodeGenerator::PushOperands(Register reg1, Register reg2) {
1787 OperandStackDepthIncrement(2);
1788 __ Push(reg1, reg2);
1789 }
1790
PushOperands(Register reg1,Register reg2,Register reg3)1791 void FullCodeGenerator::PushOperands(Register reg1, Register reg2,
1792 Register reg3) {
1793 OperandStackDepthIncrement(3);
1794 __ Push(reg1, reg2, reg3);
1795 }
1796
PushOperands(Register reg1,Register reg2,Register reg3,Register reg4)1797 void FullCodeGenerator::PushOperands(Register reg1, Register reg2,
1798 Register reg3, Register reg4) {
1799 OperandStackDepthIncrement(4);
1800 __ Push(reg1, reg2, reg3, reg4);
1801 }
1802
PopOperands(Register reg1,Register reg2)1803 void FullCodeGenerator::PopOperands(Register reg1, Register reg2) {
1804 OperandStackDepthDecrement(2);
1805 __ Pop(reg1, reg2);
1806 }
1807
EmitOperandStackDepthCheck()1808 void FullCodeGenerator::EmitOperandStackDepthCheck() {
1809 if (FLAG_debug_code) {
1810 int expected_diff = StandardFrameConstants::kFixedFrameSizeFromFp +
1811 operand_stack_depth_ * kPointerSize;
1812 __ sub(r3, fp, sp);
1813 __ mov(ip, Operand(expected_diff));
1814 __ cmp(r3, ip);
1815 __ Assert(eq, kUnexpectedStackDepth);
1816 }
1817 }
1818
EmitCreateIteratorResult(bool done)1819 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
1820 Label allocate, done_allocate;
1821
1822 __ Allocate(JSIteratorResult::kSize, r3, r5, r6, &allocate,
1823 NO_ALLOCATION_FLAGS);
1824 __ b(&done_allocate);
1825
1826 __ bind(&allocate);
1827 __ Push(Smi::FromInt(JSIteratorResult::kSize));
1828 __ CallRuntime(Runtime::kAllocateInNewSpace);
1829
1830 __ bind(&done_allocate);
1831 __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, r4);
1832 PopOperand(r5);
1833 __ LoadRoot(r6,
1834 done ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex);
1835 __ LoadRoot(r7, Heap::kEmptyFixedArrayRootIndex);
1836 __ StoreP(r4, FieldMemOperand(r3, HeapObject::kMapOffset), r0);
1837 __ StoreP(r7, FieldMemOperand(r3, JSObject::kPropertiesOffset), r0);
1838 __ StoreP(r7, FieldMemOperand(r3, JSObject::kElementsOffset), r0);
1839 __ StoreP(r5, FieldMemOperand(r3, JSIteratorResult::kValueOffset), r0);
1840 __ StoreP(r6, FieldMemOperand(r3, JSIteratorResult::kDoneOffset), r0);
1841 }
1842
1843
EmitInlineSmiBinaryOp(BinaryOperation * expr,Token::Value op,Expression * left_expr,Expression * right_expr)1844 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
1845 Token::Value op,
1846 Expression* left_expr,
1847 Expression* right_expr) {
1848 Label done, smi_case, stub_call;
1849
1850 Register scratch1 = r5;
1851 Register scratch2 = r6;
1852
1853 // Get the arguments.
1854 Register left = r4;
1855 Register right = r3;
1856 PopOperand(left);
1857
1858 // Perform combined smi check on both operands.
1859 __ orx(scratch1, left, right);
1860 STATIC_ASSERT(kSmiTag == 0);
1861 JumpPatchSite patch_site(masm_);
1862 patch_site.EmitJumpIfSmi(scratch1, &smi_case);
1863
1864 __ bind(&stub_call);
1865 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
1866 CallIC(code, expr->BinaryOperationFeedbackId());
1867 patch_site.EmitPatchInfo();
1868 __ b(&done);
1869
1870 __ bind(&smi_case);
1871 // Smi case. This code works the same way as the smi-smi case in the type
1872 // recording binary operation stub.
1873 switch (op) {
1874 case Token::SAR:
1875 __ GetLeastBitsFromSmi(scratch1, right, 5);
1876 __ ShiftRightArith(right, left, scratch1);
1877 __ ClearRightImm(right, right, Operand(kSmiTagSize + kSmiShiftSize));
1878 break;
1879 case Token::SHL: {
1880 __ GetLeastBitsFromSmi(scratch2, right, 5);
1881 #if V8_TARGET_ARCH_PPC64
1882 __ ShiftLeft_(right, left, scratch2);
1883 #else
1884 __ SmiUntag(scratch1, left);
1885 __ ShiftLeft_(scratch1, scratch1, scratch2);
1886 // Check that the *signed* result fits in a smi
1887 __ JumpIfNotSmiCandidate(scratch1, scratch2, &stub_call);
1888 __ SmiTag(right, scratch1);
1889 #endif
1890 break;
1891 }
1892 case Token::SHR: {
1893 __ SmiUntag(scratch1, left);
1894 __ GetLeastBitsFromSmi(scratch2, right, 5);
1895 __ srw(scratch1, scratch1, scratch2);
1896 // Unsigned shift is not allowed to produce a negative number.
1897 __ JumpIfNotUnsignedSmiCandidate(scratch1, r0, &stub_call);
1898 __ SmiTag(right, scratch1);
1899 break;
1900 }
1901 case Token::ADD: {
1902 __ AddAndCheckForOverflow(scratch1, left, right, scratch2, r0);
1903 __ BranchOnOverflow(&stub_call);
1904 __ mr(right, scratch1);
1905 break;
1906 }
1907 case Token::SUB: {
1908 __ SubAndCheckForOverflow(scratch1, left, right, scratch2, r0);
1909 __ BranchOnOverflow(&stub_call);
1910 __ mr(right, scratch1);
1911 break;
1912 }
1913 case Token::MUL: {
1914 Label mul_zero;
1915 #if V8_TARGET_ARCH_PPC64
1916 // Remove tag from both operands.
1917 __ SmiUntag(ip, right);
1918 __ SmiUntag(r0, left);
1919 __ Mul(scratch1, r0, ip);
1920 // Check for overflowing the smi range - no overflow if higher 33 bits of
1921 // the result are identical.
1922 __ TestIfInt32(scratch1, r0);
1923 __ bne(&stub_call);
1924 #else
1925 __ SmiUntag(ip, right);
1926 __ mullw(scratch1, left, ip);
1927 __ mulhw(scratch2, left, ip);
1928 // Check for overflowing the smi range - no overflow if higher 33 bits of
1929 // the result are identical.
1930 __ TestIfInt32(scratch2, scratch1, ip);
1931 __ bne(&stub_call);
1932 #endif
1933 // Go slow on zero result to handle -0.
1934 __ cmpi(scratch1, Operand::Zero());
1935 __ beq(&mul_zero);
1936 #if V8_TARGET_ARCH_PPC64
1937 __ SmiTag(right, scratch1);
1938 #else
1939 __ mr(right, scratch1);
1940 #endif
1941 __ b(&done);
1942 // We need -0 if we were multiplying a negative number with 0 to get 0.
1943 // We know one of them was zero.
1944 __ bind(&mul_zero);
1945 __ add(scratch2, right, left);
1946 __ cmpi(scratch2, Operand::Zero());
1947 __ blt(&stub_call);
1948 __ LoadSmiLiteral(right, Smi::kZero);
1949 break;
1950 }
1951 case Token::BIT_OR:
1952 __ orx(right, left, right);
1953 break;
1954 case Token::BIT_AND:
1955 __ and_(right, left, right);
1956 break;
1957 case Token::BIT_XOR:
1958 __ xor_(right, left, right);
1959 break;
1960 default:
1961 UNREACHABLE();
1962 }
1963
1964 __ bind(&done);
1965 context()->Plug(r3);
1966 }
1967
1968
EmitClassDefineProperties(ClassLiteral * lit)1969 void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) {
1970 for (int i = 0; i < lit->properties()->length(); i++) {
1971 ClassLiteral::Property* property = lit->properties()->at(i);
1972 Expression* value = property->value();
1973
1974 Register scratch = r4;
1975 if (property->is_static()) {
1976 __ LoadP(scratch, MemOperand(sp, kPointerSize)); // constructor
1977 } else {
1978 __ LoadP(scratch, MemOperand(sp, 0)); // prototype
1979 }
1980 PushOperand(scratch);
1981 EmitPropertyKey(property, lit->GetIdForProperty(i));
1982
1983 // The static prototype property is read only. We handle the non computed
1984 // property name case in the parser. Since this is the only case where we
1985 // need to check for an own read only property we special case this so we do
1986 // not need to do this for every property.
1987 if (property->is_static() && property->is_computed_name()) {
1988 __ CallRuntime(Runtime::kThrowIfStaticPrototype);
1989 __ push(r3);
1990 }
1991
1992 VisitForStackValue(value);
1993 if (NeedsHomeObject(value)) {
1994 EmitSetHomeObject(value, 2, property->GetSlot());
1995 }
1996
1997 switch (property->kind()) {
1998 case ClassLiteral::Property::METHOD:
1999 PushOperand(Smi::FromInt(DONT_ENUM));
2000 PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
2001 CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
2002 break;
2003
2004 case ClassLiteral::Property::GETTER:
2005 PushOperand(Smi::FromInt(DONT_ENUM));
2006 CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
2007 break;
2008
2009 case ClassLiteral::Property::SETTER:
2010 PushOperand(Smi::FromInt(DONT_ENUM));
2011 CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
2012 break;
2013
2014 case ClassLiteral::Property::FIELD:
2015 default:
2016 UNREACHABLE();
2017 }
2018 }
2019 }
2020
2021
EmitBinaryOp(BinaryOperation * expr,Token::Value op)2022 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
2023 PopOperand(r4);
2024 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
2025 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2026 CallIC(code, expr->BinaryOperationFeedbackId());
2027 patch_site.EmitPatchInfo();
2028 context()->Plug(r3);
2029 }
2030
2031
EmitAssignment(Expression * expr,FeedbackVectorSlot slot)2032 void FullCodeGenerator::EmitAssignment(Expression* expr,
2033 FeedbackVectorSlot slot) {
2034 DCHECK(expr->IsValidReferenceExpressionOrThis());
2035
2036 Property* prop = expr->AsProperty();
2037 LhsKind assign_type = Property::GetAssignType(prop);
2038
2039 switch (assign_type) {
2040 case VARIABLE: {
2041 VariableProxy* proxy = expr->AsVariableProxy();
2042 EffectContext context(this);
2043 EmitVariableAssignment(proxy->var(), Token::ASSIGN, slot,
2044 proxy->hole_check_mode());
2045 break;
2046 }
2047 case NAMED_PROPERTY: {
2048 PushOperand(r3); // Preserve value.
2049 VisitForAccumulatorValue(prop->obj());
2050 __ Move(StoreDescriptor::ReceiverRegister(), r3);
2051 PopOperand(StoreDescriptor::ValueRegister()); // Restore value.
2052 CallStoreIC(slot, prop->key()->AsLiteral()->value());
2053 break;
2054 }
2055 case NAMED_SUPER_PROPERTY: {
2056 PushOperand(r3);
2057 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2058 VisitForAccumulatorValue(
2059 prop->obj()->AsSuperPropertyReference()->home_object());
2060 // stack: value, this; r3: home_object
2061 Register scratch = r5;
2062 Register scratch2 = r6;
2063 __ mr(scratch, result_register()); // home_object
2064 __ LoadP(r3, MemOperand(sp, kPointerSize)); // value
2065 __ LoadP(scratch2, MemOperand(sp, 0)); // this
2066 __ StoreP(scratch2, MemOperand(sp, kPointerSize)); // this
2067 __ StoreP(scratch, MemOperand(sp, 0)); // home_object
2068 // stack: this, home_object; r3: value
2069 EmitNamedSuperPropertyStore(prop);
2070 break;
2071 }
2072 case KEYED_SUPER_PROPERTY: {
2073 PushOperand(r3);
2074 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2075 VisitForStackValue(
2076 prop->obj()->AsSuperPropertyReference()->home_object());
2077 VisitForAccumulatorValue(prop->key());
2078 Register scratch = r5;
2079 Register scratch2 = r6;
2080 __ LoadP(scratch2, MemOperand(sp, 2 * kPointerSize)); // value
2081 // stack: value, this, home_object; r3: key, r6: value
2082 __ LoadP(scratch, MemOperand(sp, kPointerSize)); // this
2083 __ StoreP(scratch, MemOperand(sp, 2 * kPointerSize));
2084 __ LoadP(scratch, MemOperand(sp, 0)); // home_object
2085 __ StoreP(scratch, MemOperand(sp, kPointerSize));
2086 __ StoreP(r3, MemOperand(sp, 0));
2087 __ Move(r3, scratch2);
2088 // stack: this, home_object, key; r3: value.
2089 EmitKeyedSuperPropertyStore(prop);
2090 break;
2091 }
2092 case KEYED_PROPERTY: {
2093 PushOperand(r3); // Preserve value.
2094 VisitForStackValue(prop->obj());
2095 VisitForAccumulatorValue(prop->key());
2096 __ Move(StoreDescriptor::NameRegister(), r3);
2097 PopOperands(StoreDescriptor::ValueRegister(),
2098 StoreDescriptor::ReceiverRegister());
2099 CallKeyedStoreIC(slot);
2100 break;
2101 }
2102 }
2103 context()->Plug(r3);
2104 }
2105
2106
EmitStoreToStackLocalOrContextSlot(Variable * var,MemOperand location)2107 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2108 Variable* var, MemOperand location) {
2109 __ StoreP(result_register(), location, r0);
2110 if (var->IsContextSlot()) {
2111 // RecordWrite may destroy all its register arguments.
2112 __ mr(r6, result_register());
2113 int offset = Context::SlotOffset(var->index());
2114 __ RecordWriteContextSlot(r4, offset, r6, r5, kLRHasBeenSaved,
2115 kDontSaveFPRegs);
2116 }
2117 }
2118
EmitVariableAssignment(Variable * var,Token::Value op,FeedbackVectorSlot slot,HoleCheckMode hole_check_mode)2119 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
2120 FeedbackVectorSlot slot,
2121 HoleCheckMode hole_check_mode) {
2122 if (var->IsUnallocated()) {
2123 // Global var, const, or let.
2124 __ LoadGlobalObject(StoreDescriptor::ReceiverRegister());
2125 CallStoreIC(slot, var->name());
2126
2127 } else if (IsLexicalVariableMode(var->mode()) && op != Token::INIT) {
2128 DCHECK(!var->IsLookupSlot());
2129 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2130 MemOperand location = VarOperand(var, r4);
2131 // Perform an initialization check for lexically declared variables.
2132 if (hole_check_mode == HoleCheckMode::kRequired) {
2133 Label assign;
2134 __ LoadP(r6, location);
2135 __ CompareRoot(r6, Heap::kTheHoleValueRootIndex);
2136 __ bne(&assign);
2137 __ mov(r6, Operand(var->name()));
2138 __ push(r6);
2139 __ CallRuntime(Runtime::kThrowReferenceError);
2140 __ bind(&assign);
2141 }
2142 if (var->mode() != CONST) {
2143 EmitStoreToStackLocalOrContextSlot(var, location);
2144 } else if (var->throw_on_const_assignment(language_mode())) {
2145 __ CallRuntime(Runtime::kThrowConstAssignError);
2146 }
2147 } else if (var->is_this() && var->mode() == CONST && op == Token::INIT) {
2148 // Initializing assignment to const {this} needs a write barrier.
2149 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2150 Label uninitialized_this;
2151 MemOperand location = VarOperand(var, r4);
2152 __ LoadP(r6, location);
2153 __ CompareRoot(r6, Heap::kTheHoleValueRootIndex);
2154 __ beq(&uninitialized_this);
2155 __ mov(r4, Operand(var->name()));
2156 __ push(r4);
2157 __ CallRuntime(Runtime::kThrowReferenceError);
2158 __ bind(&uninitialized_this);
2159 EmitStoreToStackLocalOrContextSlot(var, location);
2160
2161 } else {
2162 DCHECK(var->mode() != CONST || op == Token::INIT);
2163 if (var->IsLookupSlot()) {
2164 // Assignment to var.
2165 __ Push(var->name());
2166 __ Push(r3);
2167 __ CallRuntime(is_strict(language_mode())
2168 ? Runtime::kStoreLookupSlot_Strict
2169 : Runtime::kStoreLookupSlot_Sloppy);
2170 } else {
2171 // Assignment to var or initializing assignment to let/const in harmony
2172 // mode.
2173 DCHECK((var->IsStackAllocated() || var->IsContextSlot()));
2174 MemOperand location = VarOperand(var, r4);
2175 if (FLAG_debug_code && var->mode() == LET && op == Token::INIT) {
2176 // Check for an uninitialized let binding.
2177 __ LoadP(r5, location);
2178 __ CompareRoot(r5, Heap::kTheHoleValueRootIndex);
2179 __ Check(eq, kLetBindingReInitialization);
2180 }
2181 EmitStoreToStackLocalOrContextSlot(var, location);
2182 }
2183 }
2184 }
2185
2186
EmitNamedPropertyAssignment(Assignment * expr)2187 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2188 // Assignment to a property, using a named store IC.
2189 Property* prop = expr->target()->AsProperty();
2190 DCHECK(prop != NULL);
2191 DCHECK(prop->key()->IsLiteral());
2192
2193 PopOperand(StoreDescriptor::ReceiverRegister());
2194 CallStoreIC(expr->AssignmentSlot(), prop->key()->AsLiteral()->value());
2195
2196 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
2197 context()->Plug(r3);
2198 }
2199
2200
EmitNamedSuperPropertyStore(Property * prop)2201 void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2202 // Assignment to named property of super.
2203 // r3 : value
2204 // stack : receiver ('this'), home_object
2205 DCHECK(prop != NULL);
2206 Literal* key = prop->key()->AsLiteral();
2207 DCHECK(key != NULL);
2208
2209 PushOperand(key->value());
2210 PushOperand(r3);
2211 CallRuntimeWithOperands((is_strict(language_mode())
2212 ? Runtime::kStoreToSuper_Strict
2213 : Runtime::kStoreToSuper_Sloppy));
2214 }
2215
2216
EmitKeyedSuperPropertyStore(Property * prop)2217 void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2218 // Assignment to named property of super.
2219 // r3 : value
2220 // stack : receiver ('this'), home_object, key
2221 DCHECK(prop != NULL);
2222
2223 PushOperand(r3);
2224 CallRuntimeWithOperands((is_strict(language_mode())
2225 ? Runtime::kStoreKeyedToSuper_Strict
2226 : Runtime::kStoreKeyedToSuper_Sloppy));
2227 }
2228
2229
EmitKeyedPropertyAssignment(Assignment * expr)2230 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2231 // Assignment to a property, using a keyed store IC.
2232 PopOperands(StoreDescriptor::ReceiverRegister(),
2233 StoreDescriptor::NameRegister());
2234 DCHECK(StoreDescriptor::ValueRegister().is(r3));
2235
2236 CallKeyedStoreIC(expr->AssignmentSlot());
2237
2238 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
2239 context()->Plug(r3);
2240 }
2241
2242 // Code common for calls using the IC.
EmitCallWithLoadIC(Call * expr)2243 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2244 Expression* callee = expr->expression();
2245
2246 // Get the target function.
2247 ConvertReceiverMode convert_mode;
2248 if (callee->IsVariableProxy()) {
2249 {
2250 StackValueContext context(this);
2251 EmitVariableLoad(callee->AsVariableProxy());
2252 PrepareForBailout(callee, BailoutState::NO_REGISTERS);
2253 }
2254 // Push undefined as receiver. This is patched in the method prologue if it
2255 // is a sloppy mode method.
2256 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
2257 PushOperand(r0);
2258 convert_mode = ConvertReceiverMode::kNullOrUndefined;
2259 } else {
2260 // Load the function from the receiver.
2261 DCHECK(callee->IsProperty());
2262 DCHECK(!callee->AsProperty()->IsSuperAccess());
2263 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2264 EmitNamedPropertyLoad(callee->AsProperty());
2265 PrepareForBailoutForId(callee->AsProperty()->LoadId(),
2266 BailoutState::TOS_REGISTER);
2267 // Push the target function under the receiver.
2268 __ LoadP(r0, MemOperand(sp, 0));
2269 PushOperand(r0);
2270 __ StoreP(r3, MemOperand(sp, kPointerSize));
2271 convert_mode = ConvertReceiverMode::kNotNullOrUndefined;
2272 }
2273
2274 EmitCall(expr, convert_mode);
2275 }
2276
2277
EmitSuperCallWithLoadIC(Call * expr)2278 void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2279 Expression* callee = expr->expression();
2280 DCHECK(callee->IsProperty());
2281 Property* prop = callee->AsProperty();
2282 DCHECK(prop->IsSuperAccess());
2283 SetExpressionPosition(prop);
2284
2285 Literal* key = prop->key()->AsLiteral();
2286 DCHECK(!key->value()->IsSmi());
2287 // Load the function from the receiver.
2288 const Register scratch = r4;
2289 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2290 VisitForAccumulatorValue(super_ref->home_object());
2291 __ mr(scratch, r3);
2292 VisitForAccumulatorValue(super_ref->this_var());
2293 PushOperands(scratch, r3, r3, scratch);
2294 PushOperand(key->value());
2295
2296 // Stack here:
2297 // - home_object
2298 // - this (receiver)
2299 // - this (receiver) <-- LoadFromSuper will pop here and below.
2300 // - home_object
2301 // - key
2302 CallRuntimeWithOperands(Runtime::kLoadFromSuper);
2303 PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
2304
2305 // Replace home_object with target function.
2306 __ StoreP(r3, MemOperand(sp, kPointerSize));
2307
2308 // Stack here:
2309 // - target function
2310 // - this (receiver)
2311 EmitCall(expr);
2312 }
2313
2314
2315 // Code common for calls using the IC.
EmitKeyedCallWithLoadIC(Call * expr,Expression * key)2316 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr, Expression* key) {
2317 // Load the key.
2318 VisitForAccumulatorValue(key);
2319
2320 Expression* callee = expr->expression();
2321
2322 // Load the function from the receiver.
2323 DCHECK(callee->IsProperty());
2324 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2325 __ Move(LoadDescriptor::NameRegister(), r3);
2326 EmitKeyedPropertyLoad(callee->AsProperty());
2327 PrepareForBailoutForId(callee->AsProperty()->LoadId(),
2328 BailoutState::TOS_REGISTER);
2329
2330 // Push the target function under the receiver.
2331 __ LoadP(ip, MemOperand(sp, 0));
2332 PushOperand(ip);
2333 __ StoreP(r3, MemOperand(sp, kPointerSize));
2334
2335 EmitCall(expr, ConvertReceiverMode::kNotNullOrUndefined);
2336 }
2337
2338
EmitKeyedSuperCallWithLoadIC(Call * expr)2339 void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
2340 Expression* callee = expr->expression();
2341 DCHECK(callee->IsProperty());
2342 Property* prop = callee->AsProperty();
2343 DCHECK(prop->IsSuperAccess());
2344
2345 SetExpressionPosition(prop);
2346 // Load the function from the receiver.
2347 const Register scratch = r4;
2348 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2349 VisitForAccumulatorValue(super_ref->home_object());
2350 __ mr(scratch, r3);
2351 VisitForAccumulatorValue(super_ref->this_var());
2352 PushOperands(scratch, r3, r3, scratch);
2353 VisitForStackValue(prop->key());
2354
2355 // Stack here:
2356 // - home_object
2357 // - this (receiver)
2358 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
2359 // - home_object
2360 // - key
2361 CallRuntimeWithOperands(Runtime::kLoadKeyedFromSuper);
2362 PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
2363
2364 // Replace home_object with target function.
2365 __ StoreP(r3, MemOperand(sp, kPointerSize));
2366
2367 // Stack here:
2368 // - target function
2369 // - this (receiver)
2370 EmitCall(expr);
2371 }
2372
2373
EmitCall(Call * expr,ConvertReceiverMode mode)2374 void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) {
2375 // Load the arguments.
2376 ZoneList<Expression*>* args = expr->arguments();
2377 int arg_count = args->length();
2378 for (int i = 0; i < arg_count; i++) {
2379 VisitForStackValue(args->at(i));
2380 }
2381
2382 PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
2383 SetCallPosition(expr, expr->tail_call_mode());
2384 if (expr->tail_call_mode() == TailCallMode::kAllow) {
2385 if (FLAG_trace) {
2386 __ CallRuntime(Runtime::kTraceTailCall);
2387 }
2388 // Update profiling counters before the tail call since we will
2389 // not return to this function.
2390 EmitProfilingCounterHandlingForReturnSequence(true);
2391 }
2392 Handle<Code> code =
2393 CodeFactory::CallIC(isolate(), mode, expr->tail_call_mode()).code();
2394 __ LoadSmiLiteral(r6, SmiFromSlot(expr->CallFeedbackICSlot()));
2395 __ LoadP(r4, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
2396 __ mov(r3, Operand(arg_count));
2397 CallIC(code);
2398 OperandStackDepthDecrement(arg_count + 1);
2399
2400 RecordJSReturnSite(expr);
2401 RestoreContext();
2402 context()->DropAndPlug(1, r3);
2403 }
2404
2405
EmitResolvePossiblyDirectEval(Call * expr)2406 void FullCodeGenerator::EmitResolvePossiblyDirectEval(Call* expr) {
2407 int arg_count = expr->arguments()->length();
2408 // r7: copy of the first argument or undefined if it doesn't exist.
2409 if (arg_count > 0) {
2410 __ LoadP(r7, MemOperand(sp, arg_count * kPointerSize), r0);
2411 } else {
2412 __ LoadRoot(r7, Heap::kUndefinedValueRootIndex);
2413 }
2414
2415 // r6: the receiver of the enclosing function.
2416 __ LoadP(r6, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
2417
2418 // r5: language mode.
2419 __ LoadSmiLiteral(r5, Smi::FromInt(language_mode()));
2420
2421 // r4: the start position of the scope the calls resides in.
2422 __ LoadSmiLiteral(r4, Smi::FromInt(scope()->start_position()));
2423
2424 // r3: the source position of the eval call.
2425 __ LoadSmiLiteral(r3, Smi::FromInt(expr->position()));
2426
2427 // Do the runtime call.
2428 __ Push(r7, r6, r5, r4, r3);
2429 __ CallRuntime(Runtime::kResolvePossiblyDirectEval);
2430 }
2431
2432
2433 // See http://www.ecma-international.org/ecma-262/6.0/#sec-function-calls.
PushCalleeAndWithBaseObject(Call * expr)2434 void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) {
2435 VariableProxy* callee = expr->expression()->AsVariableProxy();
2436 if (callee->var()->IsLookupSlot()) {
2437 Label slow, done;
2438 SetExpressionPosition(callee);
2439 // Generate code for loading from variables potentially shadowed by
2440 // eval-introduced variables.
2441 EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done);
2442
2443 __ bind(&slow);
2444 // Call the runtime to find the function to call (returned in r3) and
2445 // the object holding it (returned in r4).
2446 __ Push(callee->name());
2447 __ CallRuntime(Runtime::kLoadLookupSlotForCall);
2448 PushOperands(r3, r4); // Function, receiver.
2449 PrepareForBailoutForId(expr->LookupId(), BailoutState::NO_REGISTERS);
2450
2451 // If fast case code has been generated, emit code to push the function
2452 // and receiver and have the slow path jump around this code.
2453 if (done.is_linked()) {
2454 Label call;
2455 __ b(&call);
2456 __ bind(&done);
2457 // Push function.
2458 __ push(r3);
2459 // Pass undefined as the receiver, which is the WithBaseObject of a
2460 // non-object environment record. If the callee is sloppy, it will patch
2461 // it up to be the global receiver.
2462 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
2463 __ push(r4);
2464 __ bind(&call);
2465 }
2466 } else {
2467 VisitForStackValue(callee);
2468 // refEnv.WithBaseObject()
2469 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
2470 PushOperand(r5); // Reserved receiver slot.
2471 }
2472 }
2473
2474
EmitPossiblyEvalCall(Call * expr)2475 void FullCodeGenerator::EmitPossiblyEvalCall(Call* expr) {
2476 // In a call to eval, we first call
2477 // Runtime_ResolvePossiblyDirectEval to resolve the function we need
2478 // to call. Then we call the resolved function using the given arguments.
2479 ZoneList<Expression*>* args = expr->arguments();
2480 int arg_count = args->length();
2481
2482 PushCalleeAndWithBaseObject(expr);
2483
2484 // Push the arguments.
2485 for (int i = 0; i < arg_count; i++) {
2486 VisitForStackValue(args->at(i));
2487 }
2488
2489 // Push a copy of the function (found below the arguments) and
2490 // resolve eval.
2491 __ LoadP(r4, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
2492 __ push(r4);
2493 EmitResolvePossiblyDirectEval(expr);
2494
2495 // Touch up the stack with the resolved function.
2496 __ StoreP(r3, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
2497
2498 PrepareForBailoutForId(expr->EvalId(), BailoutState::NO_REGISTERS);
2499
2500 // Record source position for debugger.
2501 SetCallPosition(expr);
2502 Handle<Code> code = CodeFactory::CallIC(isolate(), ConvertReceiverMode::kAny,
2503 expr->tail_call_mode())
2504 .code();
2505 __ LoadSmiLiteral(r6, SmiFromSlot(expr->CallFeedbackICSlot()));
2506 __ LoadP(r4, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
2507 __ mov(r3, Operand(arg_count));
2508 __ Call(code, RelocInfo::CODE_TARGET);
2509 OperandStackDepthDecrement(arg_count + 1);
2510 RecordJSReturnSite(expr);
2511 RestoreContext();
2512 context()->DropAndPlug(1, r3);
2513 }
2514
2515
VisitCallNew(CallNew * expr)2516 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2517 Comment cmnt(masm_, "[ CallNew");
2518 // According to ECMA-262, section 11.2.2, page 44, the function
2519 // expression in new calls must be evaluated before the
2520 // arguments.
2521
2522 // Push constructor on the stack. If it's not a function it's used as
2523 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2524 // ignored.
2525 DCHECK(!expr->expression()->IsSuperPropertyReference());
2526 VisitForStackValue(expr->expression());
2527
2528 // Push the arguments ("left-to-right") on the stack.
2529 ZoneList<Expression*>* args = expr->arguments();
2530 int arg_count = args->length();
2531 for (int i = 0; i < arg_count; i++) {
2532 VisitForStackValue(args->at(i));
2533 }
2534
2535 // Call the construct call builtin that handles allocation and
2536 // constructor invocation.
2537 SetConstructCallPosition(expr);
2538
2539 // Load function and argument count into r4 and r3.
2540 __ mov(r3, Operand(arg_count));
2541 __ LoadP(r4, MemOperand(sp, arg_count * kPointerSize), r0);
2542
2543 // Record call targets in unoptimized code.
2544 __ EmitLoadTypeFeedbackVector(r5);
2545 __ LoadSmiLiteral(r6, SmiFromSlot(expr->CallNewFeedbackSlot()));
2546
2547 CallConstructStub stub(isolate());
2548 CallIC(stub.GetCode());
2549 OperandStackDepthDecrement(arg_count + 1);
2550 PrepareForBailoutForId(expr->ReturnId(), BailoutState::TOS_REGISTER);
2551 RestoreContext();
2552 context()->Plug(r3);
2553 }
2554
2555
EmitSuperConstructorCall(Call * expr)2556 void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
2557 SuperCallReference* super_call_ref =
2558 expr->expression()->AsSuperCallReference();
2559 DCHECK_NOT_NULL(super_call_ref);
2560
2561 // Push the super constructor target on the stack (may be null,
2562 // but the Construct builtin can deal with that properly).
2563 VisitForAccumulatorValue(super_call_ref->this_function_var());
2564 __ AssertFunction(result_register());
2565 __ LoadP(result_register(),
2566 FieldMemOperand(result_register(), HeapObject::kMapOffset));
2567 __ LoadP(result_register(),
2568 FieldMemOperand(result_register(), Map::kPrototypeOffset));
2569 PushOperand(result_register());
2570
2571 // Push the arguments ("left-to-right") on the stack.
2572 ZoneList<Expression*>* args = expr->arguments();
2573 int arg_count = args->length();
2574 for (int i = 0; i < arg_count; i++) {
2575 VisitForStackValue(args->at(i));
2576 }
2577
2578 // Call the construct call builtin that handles allocation and
2579 // constructor invocation.
2580 SetConstructCallPosition(expr);
2581
2582 // Load new target into r6.
2583 VisitForAccumulatorValue(super_call_ref->new_target_var());
2584 __ mr(r6, result_register());
2585
2586 // Load function and argument count into r1 and r0.
2587 __ mov(r3, Operand(arg_count));
2588 __ LoadP(r4, MemOperand(sp, arg_count * kPointerSize));
2589
2590 __ Call(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
2591 OperandStackDepthDecrement(arg_count + 1);
2592
2593 RecordJSReturnSite(expr);
2594 RestoreContext();
2595 context()->Plug(r3);
2596 }
2597
2598
EmitIsSmi(CallRuntime * expr)2599 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2600 ZoneList<Expression*>* args = expr->arguments();
2601 DCHECK(args->length() == 1);
2602
2603 VisitForAccumulatorValue(args->at(0));
2604
2605 Label materialize_true, materialize_false;
2606 Label* if_true = NULL;
2607 Label* if_false = NULL;
2608 Label* fall_through = NULL;
2609 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2610 &if_false, &fall_through);
2611
2612 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2613 __ TestIfSmi(r3, r0);
2614 Split(eq, if_true, if_false, fall_through, cr0);
2615
2616 context()->Plug(if_true, if_false);
2617 }
2618
2619
EmitIsJSReceiver(CallRuntime * expr)2620 void FullCodeGenerator::EmitIsJSReceiver(CallRuntime* expr) {
2621 ZoneList<Expression*>* args = expr->arguments();
2622 DCHECK(args->length() == 1);
2623
2624 VisitForAccumulatorValue(args->at(0));
2625
2626 Label materialize_true, materialize_false;
2627 Label* if_true = NULL;
2628 Label* if_false = NULL;
2629 Label* fall_through = NULL;
2630 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2631 &if_false, &fall_through);
2632
2633 __ JumpIfSmi(r3, if_false);
2634 __ CompareObjectType(r3, r4, r4, FIRST_JS_RECEIVER_TYPE);
2635 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2636 Split(ge, if_true, if_false, fall_through);
2637
2638 context()->Plug(if_true, if_false);
2639 }
2640
2641
EmitIsArray(CallRuntime * expr)2642 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
2643 ZoneList<Expression*>* args = expr->arguments();
2644 DCHECK(args->length() == 1);
2645
2646 VisitForAccumulatorValue(args->at(0));
2647
2648 Label materialize_true, materialize_false;
2649 Label* if_true = NULL;
2650 Label* if_false = NULL;
2651 Label* fall_through = NULL;
2652 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2653 &if_false, &fall_through);
2654
2655 __ JumpIfSmi(r3, if_false);
2656 __ CompareObjectType(r3, r4, r4, JS_ARRAY_TYPE);
2657 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2658 Split(eq, if_true, if_false, fall_through);
2659
2660 context()->Plug(if_true, if_false);
2661 }
2662
2663
EmitIsTypedArray(CallRuntime * expr)2664 void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) {
2665 ZoneList<Expression*>* args = expr->arguments();
2666 DCHECK(args->length() == 1);
2667
2668 VisitForAccumulatorValue(args->at(0));
2669
2670 Label materialize_true, materialize_false;
2671 Label* if_true = NULL;
2672 Label* if_false = NULL;
2673 Label* fall_through = NULL;
2674 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2675 &if_false, &fall_through);
2676
2677 __ JumpIfSmi(r3, if_false);
2678 __ CompareObjectType(r3, r4, r4, JS_TYPED_ARRAY_TYPE);
2679 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2680 Split(eq, if_true, if_false, fall_through);
2681
2682 context()->Plug(if_true, if_false);
2683 }
2684
2685
EmitIsRegExp(CallRuntime * expr)2686 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
2687 ZoneList<Expression*>* args = expr->arguments();
2688 DCHECK(args->length() == 1);
2689
2690 VisitForAccumulatorValue(args->at(0));
2691
2692 Label materialize_true, materialize_false;
2693 Label* if_true = NULL;
2694 Label* if_false = NULL;
2695 Label* fall_through = NULL;
2696 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2697 &if_false, &fall_through);
2698
2699 __ JumpIfSmi(r3, if_false);
2700 __ CompareObjectType(r3, r4, r4, JS_REGEXP_TYPE);
2701 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2702 Split(eq, if_true, if_false, fall_through);
2703
2704 context()->Plug(if_true, if_false);
2705 }
2706
2707
EmitIsJSProxy(CallRuntime * expr)2708 void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
2709 ZoneList<Expression*>* args = expr->arguments();
2710 DCHECK(args->length() == 1);
2711
2712 VisitForAccumulatorValue(args->at(0));
2713
2714 Label materialize_true, materialize_false;
2715 Label* if_true = NULL;
2716 Label* if_false = NULL;
2717 Label* fall_through = NULL;
2718 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2719 &if_false, &fall_through);
2720
2721 __ JumpIfSmi(r3, if_false);
2722 __ CompareObjectType(r3, r4, r4, JS_PROXY_TYPE);
2723 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2724 Split(eq, if_true, if_false, fall_through);
2725
2726 context()->Plug(if_true, if_false);
2727 }
2728
2729
EmitClassOf(CallRuntime * expr)2730 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
2731 ZoneList<Expression*>* args = expr->arguments();
2732 DCHECK(args->length() == 1);
2733 Label done, null, function, non_function_constructor;
2734
2735 VisitForAccumulatorValue(args->at(0));
2736
2737 // If the object is not a JSReceiver, we return null.
2738 __ JumpIfSmi(r3, &null);
2739 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2740 __ CompareObjectType(r3, r3, r4, FIRST_JS_RECEIVER_TYPE);
2741 // Map is now in r3.
2742 __ blt(&null);
2743
2744 // Return 'Function' for JSFunction and JSBoundFunction objects.
2745 __ cmpli(r4, Operand(FIRST_FUNCTION_TYPE));
2746 STATIC_ASSERT(LAST_FUNCTION_TYPE == LAST_TYPE);
2747 __ bge(&function);
2748
2749 // Check if the constructor in the map is a JS function.
2750 Register instance_type = r5;
2751 __ GetMapConstructor(r3, r3, r4, instance_type);
2752 __ cmpi(instance_type, Operand(JS_FUNCTION_TYPE));
2753 __ bne(&non_function_constructor);
2754
2755 // r3 now contains the constructor function. Grab the
2756 // instance class name from there.
2757 __ LoadP(r3, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
2758 __ LoadP(r3,
2759 FieldMemOperand(r3, SharedFunctionInfo::kInstanceClassNameOffset));
2760 __ b(&done);
2761
2762 // Functions have class 'Function'.
2763 __ bind(&function);
2764 __ LoadRoot(r3, Heap::kFunction_stringRootIndex);
2765 __ b(&done);
2766
2767 // Objects with a non-function constructor have class 'Object'.
2768 __ bind(&non_function_constructor);
2769 __ LoadRoot(r3, Heap::kObject_stringRootIndex);
2770 __ b(&done);
2771
2772 // Non-JS objects have class null.
2773 __ bind(&null);
2774 __ LoadRoot(r3, Heap::kNullValueRootIndex);
2775
2776 // All done.
2777 __ bind(&done);
2778
2779 context()->Plug(r3);
2780 }
2781
2782
EmitStringCharCodeAt(CallRuntime * expr)2783 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
2784 ZoneList<Expression*>* args = expr->arguments();
2785 DCHECK(args->length() == 2);
2786 VisitForStackValue(args->at(0));
2787 VisitForAccumulatorValue(args->at(1));
2788
2789 Register object = r4;
2790 Register index = r3;
2791 Register result = r6;
2792
2793 PopOperand(object);
2794
2795 Label need_conversion;
2796 Label index_out_of_range;
2797 Label done;
2798 StringCharCodeAtGenerator generator(object, index, result, &need_conversion,
2799 &need_conversion, &index_out_of_range);
2800 generator.GenerateFast(masm_);
2801 __ b(&done);
2802
2803 __ bind(&index_out_of_range);
2804 // When the index is out of range, the spec requires us to return
2805 // NaN.
2806 __ LoadRoot(result, Heap::kNanValueRootIndex);
2807 __ b(&done);
2808
2809 __ bind(&need_conversion);
2810 // Load the undefined value into the result register, which will
2811 // trigger conversion.
2812 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
2813 __ b(&done);
2814
2815 NopRuntimeCallHelper call_helper;
2816 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
2817
2818 __ bind(&done);
2819 context()->Plug(result);
2820 }
2821
2822
EmitCall(CallRuntime * expr)2823 void FullCodeGenerator::EmitCall(CallRuntime* expr) {
2824 ZoneList<Expression*>* args = expr->arguments();
2825 DCHECK_LE(2, args->length());
2826 // Push target, receiver and arguments onto the stack.
2827 for (Expression* const arg : *args) {
2828 VisitForStackValue(arg);
2829 }
2830 PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
2831 // Move target to r4.
2832 int const argc = args->length() - 2;
2833 __ LoadP(r4, MemOperand(sp, (argc + 1) * kPointerSize));
2834 // Call the target.
2835 __ mov(r3, Operand(argc));
2836 __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
2837 OperandStackDepthDecrement(argc + 1);
2838 RestoreContext();
2839 // Discard the function left on TOS.
2840 context()->DropAndPlug(1, r3);
2841 }
2842
EmitGetSuperConstructor(CallRuntime * expr)2843 void FullCodeGenerator::EmitGetSuperConstructor(CallRuntime* expr) {
2844 ZoneList<Expression*>* args = expr->arguments();
2845 DCHECK_EQ(1, args->length());
2846 VisitForAccumulatorValue(args->at(0));
2847 __ AssertFunction(r3);
2848 __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset));
2849 __ LoadP(r3, FieldMemOperand(r3, Map::kPrototypeOffset));
2850 context()->Plug(r3);
2851 }
2852
EmitDebugIsActive(CallRuntime * expr)2853 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
2854 DCHECK(expr->arguments()->length() == 0);
2855 ExternalReference debug_is_active =
2856 ExternalReference::debug_is_active_address(isolate());
2857 __ mov(ip, Operand(debug_is_active));
2858 __ lbz(r3, MemOperand(ip));
2859 __ SmiTag(r3);
2860 context()->Plug(r3);
2861 }
2862
2863
EmitCreateIterResultObject(CallRuntime * expr)2864 void FullCodeGenerator::EmitCreateIterResultObject(CallRuntime* expr) {
2865 ZoneList<Expression*>* args = expr->arguments();
2866 DCHECK_EQ(2, args->length());
2867 VisitForStackValue(args->at(0));
2868 VisitForStackValue(args->at(1));
2869
2870 Label runtime, done;
2871
2872 __ Allocate(JSIteratorResult::kSize, r3, r5, r6, &runtime,
2873 NO_ALLOCATION_FLAGS);
2874 __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, r4);
2875 __ Pop(r5, r6);
2876 __ LoadRoot(r7, Heap::kEmptyFixedArrayRootIndex);
2877 __ StoreP(r4, FieldMemOperand(r3, HeapObject::kMapOffset), r0);
2878 __ StoreP(r7, FieldMemOperand(r3, JSObject::kPropertiesOffset), r0);
2879 __ StoreP(r7, FieldMemOperand(r3, JSObject::kElementsOffset), r0);
2880 __ StoreP(r5, FieldMemOperand(r3, JSIteratorResult::kValueOffset), r0);
2881 __ StoreP(r6, FieldMemOperand(r3, JSIteratorResult::kDoneOffset), r0);
2882 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
2883 __ b(&done);
2884
2885 __ bind(&runtime);
2886 CallRuntimeWithOperands(Runtime::kCreateIterResultObject);
2887
2888 __ bind(&done);
2889 context()->Plug(r3);
2890 }
2891
2892
EmitLoadJSRuntimeFunction(CallRuntime * expr)2893 void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
2894 // Push function.
2895 __ LoadNativeContextSlot(expr->context_index(), r3);
2896 PushOperand(r3);
2897
2898 // Push undefined as the receiver.
2899 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
2900 PushOperand(r3);
2901 }
2902
2903
EmitCallJSRuntimeFunction(CallRuntime * expr)2904 void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
2905 ZoneList<Expression*>* args = expr->arguments();
2906 int arg_count = args->length();
2907
2908 SetCallPosition(expr);
2909 __ LoadP(r4, MemOperand(sp, (arg_count + 1) * kPointerSize), r0);
2910 __ mov(r3, Operand(arg_count));
2911 __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kNullOrUndefined),
2912 RelocInfo::CODE_TARGET);
2913 OperandStackDepthDecrement(arg_count + 1);
2914 RestoreContext();
2915 }
2916
2917
VisitUnaryOperation(UnaryOperation * expr)2918 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
2919 switch (expr->op()) {
2920 case Token::DELETE: {
2921 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
2922 Property* property = expr->expression()->AsProperty();
2923 VariableProxy* proxy = expr->expression()->AsVariableProxy();
2924
2925 if (property != NULL) {
2926 VisitForStackValue(property->obj());
2927 VisitForStackValue(property->key());
2928 CallRuntimeWithOperands(is_strict(language_mode())
2929 ? Runtime::kDeleteProperty_Strict
2930 : Runtime::kDeleteProperty_Sloppy);
2931 context()->Plug(r3);
2932 } else if (proxy != NULL) {
2933 Variable* var = proxy->var();
2934 // Delete of an unqualified identifier is disallowed in strict mode but
2935 // "delete this" is allowed.
2936 bool is_this = var->is_this();
2937 DCHECK(is_sloppy(language_mode()) || is_this);
2938 if (var->IsUnallocated()) {
2939 __ LoadGlobalObject(r5);
2940 __ mov(r4, Operand(var->name()));
2941 __ Push(r5, r4);
2942 __ CallRuntime(Runtime::kDeleteProperty_Sloppy);
2943 context()->Plug(r3);
2944 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
2945 // Result of deleting non-global, non-dynamic variables is false.
2946 // The subexpression does not have side effects.
2947 context()->Plug(is_this);
2948 } else {
2949 // Non-global variable. Call the runtime to try to delete from the
2950 // context where the variable was introduced.
2951 __ Push(var->name());
2952 __ CallRuntime(Runtime::kDeleteLookupSlot);
2953 context()->Plug(r3);
2954 }
2955 } else {
2956 // Result of deleting non-property, non-variable reference is true.
2957 // The subexpression may have side effects.
2958 VisitForEffect(expr->expression());
2959 context()->Plug(true);
2960 }
2961 break;
2962 }
2963
2964 case Token::VOID: {
2965 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
2966 VisitForEffect(expr->expression());
2967 context()->Plug(Heap::kUndefinedValueRootIndex);
2968 break;
2969 }
2970
2971 case Token::NOT: {
2972 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
2973 if (context()->IsEffect()) {
2974 // Unary NOT has no side effects so it's only necessary to visit the
2975 // subexpression. Match the optimizing compiler by not branching.
2976 VisitForEffect(expr->expression());
2977 } else if (context()->IsTest()) {
2978 const TestContext* test = TestContext::cast(context());
2979 // The labels are swapped for the recursive call.
2980 VisitForControl(expr->expression(), test->false_label(),
2981 test->true_label(), test->fall_through());
2982 context()->Plug(test->true_label(), test->false_label());
2983 } else {
2984 // We handle value contexts explicitly rather than simply visiting
2985 // for control and plugging the control flow into the context,
2986 // because we need to prepare a pair of extra administrative AST ids
2987 // for the optimizing compiler.
2988 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
2989 Label materialize_true, materialize_false, done;
2990 VisitForControl(expr->expression(), &materialize_false,
2991 &materialize_true, &materialize_true);
2992 if (!context()->IsAccumulatorValue()) OperandStackDepthIncrement(1);
2993 __ bind(&materialize_true);
2994 PrepareForBailoutForId(expr->MaterializeTrueId(),
2995 BailoutState::NO_REGISTERS);
2996 __ LoadRoot(r3, Heap::kTrueValueRootIndex);
2997 if (context()->IsStackValue()) __ push(r3);
2998 __ b(&done);
2999 __ bind(&materialize_false);
3000 PrepareForBailoutForId(expr->MaterializeFalseId(),
3001 BailoutState::NO_REGISTERS);
3002 __ LoadRoot(r3, Heap::kFalseValueRootIndex);
3003 if (context()->IsStackValue()) __ push(r3);
3004 __ bind(&done);
3005 }
3006 break;
3007 }
3008
3009 case Token::TYPEOF: {
3010 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
3011 {
3012 AccumulatorValueContext context(this);
3013 VisitForTypeofValue(expr->expression());
3014 }
3015 __ mr(r6, r3);
3016 __ Call(isolate()->builtins()->Typeof(), RelocInfo::CODE_TARGET);
3017 context()->Plug(r3);
3018 break;
3019 }
3020
3021 default:
3022 UNREACHABLE();
3023 }
3024 }
3025
3026
VisitCountOperation(CountOperation * expr)3027 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3028 DCHECK(expr->expression()->IsValidReferenceExpressionOrThis());
3029
3030 Comment cmnt(masm_, "[ CountOperation");
3031
3032 Property* prop = expr->expression()->AsProperty();
3033 LhsKind assign_type = Property::GetAssignType(prop);
3034
3035 // Evaluate expression and get value.
3036 if (assign_type == VARIABLE) {
3037 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
3038 AccumulatorValueContext context(this);
3039 EmitVariableLoad(expr->expression()->AsVariableProxy());
3040 } else {
3041 // Reserve space for result of postfix operation.
3042 if (expr->is_postfix() && !context()->IsEffect()) {
3043 __ LoadSmiLiteral(ip, Smi::kZero);
3044 PushOperand(ip);
3045 }
3046 switch (assign_type) {
3047 case NAMED_PROPERTY: {
3048 // Put the object both on the stack and in the register.
3049 VisitForStackValue(prop->obj());
3050 __ LoadP(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
3051 EmitNamedPropertyLoad(prop);
3052 break;
3053 }
3054
3055 case NAMED_SUPER_PROPERTY: {
3056 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
3057 VisitForAccumulatorValue(
3058 prop->obj()->AsSuperPropertyReference()->home_object());
3059 const Register scratch = r4;
3060 __ LoadP(scratch, MemOperand(sp, 0)); // this
3061 PushOperands(result_register(), scratch, result_register());
3062 EmitNamedSuperPropertyLoad(prop);
3063 break;
3064 }
3065
3066 case KEYED_SUPER_PROPERTY: {
3067 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
3068 VisitForStackValue(
3069 prop->obj()->AsSuperPropertyReference()->home_object());
3070 VisitForAccumulatorValue(prop->key());
3071 const Register scratch1 = r4;
3072 const Register scratch2 = r5;
3073 __ LoadP(scratch1, MemOperand(sp, 1 * kPointerSize)); // this
3074 __ LoadP(scratch2, MemOperand(sp, 0 * kPointerSize)); // home object
3075 PushOperands(result_register(), scratch1, scratch2, result_register());
3076 EmitKeyedSuperPropertyLoad(prop);
3077 break;
3078 }
3079
3080 case KEYED_PROPERTY: {
3081 VisitForStackValue(prop->obj());
3082 VisitForStackValue(prop->key());
3083 __ LoadP(LoadDescriptor::ReceiverRegister(),
3084 MemOperand(sp, 1 * kPointerSize));
3085 __ LoadP(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
3086 EmitKeyedPropertyLoad(prop);
3087 break;
3088 }
3089
3090 case VARIABLE:
3091 UNREACHABLE();
3092 }
3093 }
3094
3095 // We need a second deoptimization point after loading the value
3096 // in case evaluating the property load my have a side effect.
3097 if (assign_type == VARIABLE) {
3098 PrepareForBailout(expr->expression(), BailoutState::TOS_REGISTER);
3099 } else {
3100 PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
3101 }
3102
3103 // Inline smi case if we are in a loop.
3104 Label stub_call, done;
3105 JumpPatchSite patch_site(masm_);
3106
3107 int count_value = expr->op() == Token::INC ? 1 : -1;
3108 if (ShouldInlineSmiCase(expr->op())) {
3109 Label slow;
3110 patch_site.EmitJumpIfNotSmi(r3, &slow);
3111
3112 // Save result for postfix expressions.
3113 if (expr->is_postfix()) {
3114 if (!context()->IsEffect()) {
3115 // Save the result on the stack. If we have a named or keyed property
3116 // we store the result under the receiver that is currently on top
3117 // of the stack.
3118 switch (assign_type) {
3119 case VARIABLE:
3120 __ push(r3);
3121 break;
3122 case NAMED_PROPERTY:
3123 __ StoreP(r3, MemOperand(sp, kPointerSize));
3124 break;
3125 case NAMED_SUPER_PROPERTY:
3126 __ StoreP(r3, MemOperand(sp, 2 * kPointerSize));
3127 break;
3128 case KEYED_PROPERTY:
3129 __ StoreP(r3, MemOperand(sp, 2 * kPointerSize));
3130 break;
3131 case KEYED_SUPER_PROPERTY:
3132 __ StoreP(r3, MemOperand(sp, 3 * kPointerSize));
3133 break;
3134 }
3135 }
3136 }
3137
3138 Register scratch1 = r4;
3139 Register scratch2 = r5;
3140 __ LoadSmiLiteral(scratch1, Smi::FromInt(count_value));
3141 __ AddAndCheckForOverflow(r3, r3, scratch1, scratch2, r0);
3142 __ BranchOnNoOverflow(&done);
3143 // Call stub. Undo operation first.
3144 __ sub(r3, r3, scratch1);
3145 __ b(&stub_call);
3146 __ bind(&slow);
3147 }
3148
3149 // Convert old value into a number.
3150 __ Call(isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
3151 RestoreContext();
3152 PrepareForBailoutForId(expr->ToNumberId(), BailoutState::TOS_REGISTER);
3153
3154 // Save result for postfix expressions.
3155 if (expr->is_postfix()) {
3156 if (!context()->IsEffect()) {
3157 // Save the result on the stack. If we have a named or keyed property
3158 // we store the result under the receiver that is currently on top
3159 // of the stack.
3160 switch (assign_type) {
3161 case VARIABLE:
3162 PushOperand(r3);
3163 break;
3164 case NAMED_PROPERTY:
3165 __ StoreP(r3, MemOperand(sp, kPointerSize));
3166 break;
3167 case NAMED_SUPER_PROPERTY:
3168 __ StoreP(r3, MemOperand(sp, 2 * kPointerSize));
3169 break;
3170 case KEYED_PROPERTY:
3171 __ StoreP(r3, MemOperand(sp, 2 * kPointerSize));
3172 break;
3173 case KEYED_SUPER_PROPERTY:
3174 __ StoreP(r3, MemOperand(sp, 3 * kPointerSize));
3175 break;
3176 }
3177 }
3178 }
3179
3180 __ bind(&stub_call);
3181 __ mr(r4, r3);
3182 __ LoadSmiLiteral(r3, Smi::FromInt(count_value));
3183
3184 SetExpressionPosition(expr);
3185
3186 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), Token::ADD).code();
3187 CallIC(code, expr->CountBinOpFeedbackId());
3188 patch_site.EmitPatchInfo();
3189 __ bind(&done);
3190
3191 // Store the value returned in r3.
3192 switch (assign_type) {
3193 case VARIABLE: {
3194 VariableProxy* proxy = expr->expression()->AsVariableProxy();
3195 if (expr->is_postfix()) {
3196 {
3197 EffectContext context(this);
3198 EmitVariableAssignment(proxy->var(), Token::ASSIGN, expr->CountSlot(),
3199 proxy->hole_check_mode());
3200 PrepareForBailoutForId(expr->AssignmentId(),
3201 BailoutState::TOS_REGISTER);
3202 context.Plug(r3);
3203 }
3204 // For all contexts except EffectConstant We have the result on
3205 // top of the stack.
3206 if (!context()->IsEffect()) {
3207 context()->PlugTOS();
3208 }
3209 } else {
3210 EmitVariableAssignment(proxy->var(), Token::ASSIGN, expr->CountSlot(),
3211 proxy->hole_check_mode());
3212 PrepareForBailoutForId(expr->AssignmentId(),
3213 BailoutState::TOS_REGISTER);
3214 context()->Plug(r3);
3215 }
3216 break;
3217 }
3218 case NAMED_PROPERTY: {
3219 PopOperand(StoreDescriptor::ReceiverRegister());
3220 CallStoreIC(expr->CountSlot(), prop->key()->AsLiteral()->value());
3221 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
3222 if (expr->is_postfix()) {
3223 if (!context()->IsEffect()) {
3224 context()->PlugTOS();
3225 }
3226 } else {
3227 context()->Plug(r3);
3228 }
3229 break;
3230 }
3231 case NAMED_SUPER_PROPERTY: {
3232 EmitNamedSuperPropertyStore(prop);
3233 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
3234 if (expr->is_postfix()) {
3235 if (!context()->IsEffect()) {
3236 context()->PlugTOS();
3237 }
3238 } else {
3239 context()->Plug(r3);
3240 }
3241 break;
3242 }
3243 case KEYED_SUPER_PROPERTY: {
3244 EmitKeyedSuperPropertyStore(prop);
3245 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
3246 if (expr->is_postfix()) {
3247 if (!context()->IsEffect()) {
3248 context()->PlugTOS();
3249 }
3250 } else {
3251 context()->Plug(r3);
3252 }
3253 break;
3254 }
3255 case KEYED_PROPERTY: {
3256 PopOperands(StoreDescriptor::ReceiverRegister(),
3257 StoreDescriptor::NameRegister());
3258 CallKeyedStoreIC(expr->CountSlot());
3259 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
3260 if (expr->is_postfix()) {
3261 if (!context()->IsEffect()) {
3262 context()->PlugTOS();
3263 }
3264 } else {
3265 context()->Plug(r3);
3266 }
3267 break;
3268 }
3269 }
3270 }
3271
3272
EmitLiteralCompareTypeof(Expression * expr,Expression * sub_expr,Handle<String> check)3273 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
3274 Expression* sub_expr,
3275 Handle<String> check) {
3276 Label materialize_true, materialize_false;
3277 Label* if_true = NULL;
3278 Label* if_false = NULL;
3279 Label* fall_through = NULL;
3280 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3281 &if_false, &fall_through);
3282
3283 {
3284 AccumulatorValueContext context(this);
3285 VisitForTypeofValue(sub_expr);
3286 }
3287 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3288
3289 Factory* factory = isolate()->factory();
3290 if (String::Equals(check, factory->number_string())) {
3291 __ JumpIfSmi(r3, if_true);
3292 __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset));
3293 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
3294 __ cmp(r3, ip);
3295 Split(eq, if_true, if_false, fall_through);
3296 } else if (String::Equals(check, factory->string_string())) {
3297 __ JumpIfSmi(r3, if_false);
3298 __ CompareObjectType(r3, r3, r4, FIRST_NONSTRING_TYPE);
3299 Split(lt, if_true, if_false, fall_through);
3300 } else if (String::Equals(check, factory->symbol_string())) {
3301 __ JumpIfSmi(r3, if_false);
3302 __ CompareObjectType(r3, r3, r4, SYMBOL_TYPE);
3303 Split(eq, if_true, if_false, fall_through);
3304 } else if (String::Equals(check, factory->boolean_string())) {
3305 __ CompareRoot(r3, Heap::kTrueValueRootIndex);
3306 __ beq(if_true);
3307 __ CompareRoot(r3, Heap::kFalseValueRootIndex);
3308 Split(eq, if_true, if_false, fall_through);
3309 } else if (String::Equals(check, factory->undefined_string())) {
3310 __ CompareRoot(r3, Heap::kNullValueRootIndex);
3311 __ beq(if_false);
3312 __ JumpIfSmi(r3, if_false);
3313 // Check for undetectable objects => true.
3314 __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset));
3315 __ lbz(r4, FieldMemOperand(r3, Map::kBitFieldOffset));
3316 __ andi(r0, r4, Operand(1 << Map::kIsUndetectable));
3317 Split(ne, if_true, if_false, fall_through, cr0);
3318
3319 } else if (String::Equals(check, factory->function_string())) {
3320 __ JumpIfSmi(r3, if_false);
3321 __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset));
3322 __ lbz(r4, FieldMemOperand(r3, Map::kBitFieldOffset));
3323 __ andi(r4, r4,
3324 Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
3325 __ cmpi(r4, Operand(1 << Map::kIsCallable));
3326 Split(eq, if_true, if_false, fall_through);
3327 } else if (String::Equals(check, factory->object_string())) {
3328 __ JumpIfSmi(r3, if_false);
3329 __ CompareRoot(r3, Heap::kNullValueRootIndex);
3330 __ beq(if_true);
3331 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
3332 __ CompareObjectType(r3, r3, r4, FIRST_JS_RECEIVER_TYPE);
3333 __ blt(if_false);
3334 // Check for callable or undetectable objects => false.
3335 __ lbz(r4, FieldMemOperand(r3, Map::kBitFieldOffset));
3336 __ andi(r0, r4,
3337 Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
3338 Split(eq, if_true, if_false, fall_through, cr0);
3339 // clang-format off
3340 #define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type) \
3341 } else if (String::Equals(check, factory->type##_string())) { \
3342 __ JumpIfSmi(r3, if_false); \
3343 __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset)); \
3344 __ CompareRoot(r3, Heap::k##Type##MapRootIndex); \
3345 Split(eq, if_true, if_false, fall_through);
3346 SIMD128_TYPES(SIMD128_TYPE)
3347 #undef SIMD128_TYPE
3348 // clang-format on
3349 } else {
3350 if (if_false != fall_through) __ b(if_false);
3351 }
3352 context()->Plug(if_true, if_false);
3353 }
3354
3355
VisitCompareOperation(CompareOperation * expr)3356 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
3357 Comment cmnt(masm_, "[ CompareOperation");
3358
3359 // First we try a fast inlined version of the compare when one of
3360 // the operands is a literal.
3361 if (TryLiteralCompare(expr)) return;
3362
3363 // Always perform the comparison for its control flow. Pack the result
3364 // into the expression's context after the comparison is performed.
3365 Label materialize_true, materialize_false;
3366 Label* if_true = NULL;
3367 Label* if_false = NULL;
3368 Label* fall_through = NULL;
3369 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3370 &if_false, &fall_through);
3371
3372 Token::Value op = expr->op();
3373 VisitForStackValue(expr->left());
3374 switch (op) {
3375 case Token::IN:
3376 VisitForStackValue(expr->right());
3377 SetExpressionPosition(expr);
3378 EmitHasProperty();
3379 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
3380 __ CompareRoot(r3, Heap::kTrueValueRootIndex);
3381 Split(eq, if_true, if_false, fall_through);
3382 break;
3383
3384 case Token::INSTANCEOF: {
3385 VisitForAccumulatorValue(expr->right());
3386 SetExpressionPosition(expr);
3387 PopOperand(r4);
3388 __ Call(isolate()->builtins()->InstanceOf(), RelocInfo::CODE_TARGET);
3389 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
3390 __ CompareRoot(r3, Heap::kTrueValueRootIndex);
3391 Split(eq, if_true, if_false, fall_through);
3392 break;
3393 }
3394
3395 default: {
3396 VisitForAccumulatorValue(expr->right());
3397 SetExpressionPosition(expr);
3398 Condition cond = CompareIC::ComputeCondition(op);
3399 PopOperand(r4);
3400
3401 bool inline_smi_code = ShouldInlineSmiCase(op);
3402 JumpPatchSite patch_site(masm_);
3403 if (inline_smi_code) {
3404 Label slow_case;
3405 __ orx(r5, r3, r4);
3406 patch_site.EmitJumpIfNotSmi(r5, &slow_case);
3407 __ cmp(r4, r3);
3408 Split(cond, if_true, if_false, NULL);
3409 __ bind(&slow_case);
3410 }
3411
3412 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
3413 CallIC(ic, expr->CompareOperationFeedbackId());
3414 patch_site.EmitPatchInfo();
3415 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3416 __ cmpi(r3, Operand::Zero());
3417 Split(cond, if_true, if_false, fall_through);
3418 }
3419 }
3420
3421 // Convert the result of the comparison into one expected for this
3422 // expression's context.
3423 context()->Plug(if_true, if_false);
3424 }
3425
3426
EmitLiteralCompareNil(CompareOperation * expr,Expression * sub_expr,NilValue nil)3427 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
3428 Expression* sub_expr,
3429 NilValue nil) {
3430 Label materialize_true, materialize_false;
3431 Label* if_true = NULL;
3432 Label* if_false = NULL;
3433 Label* fall_through = NULL;
3434 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
3435 &if_false, &fall_through);
3436
3437 VisitForAccumulatorValue(sub_expr);
3438 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3439 if (expr->op() == Token::EQ_STRICT) {
3440 Heap::RootListIndex nil_value = nil == kNullValue
3441 ? Heap::kNullValueRootIndex
3442 : Heap::kUndefinedValueRootIndex;
3443 __ LoadRoot(r4, nil_value);
3444 __ cmp(r3, r4);
3445 Split(eq, if_true, if_false, fall_through);
3446 } else {
3447 __ JumpIfSmi(r3, if_false);
3448 __ LoadP(r3, FieldMemOperand(r3, HeapObject::kMapOffset));
3449 __ lbz(r4, FieldMemOperand(r3, Map::kBitFieldOffset));
3450 __ andi(r0, r4, Operand(1 << Map::kIsUndetectable));
3451 Split(ne, if_true, if_false, fall_through, cr0);
3452 }
3453 context()->Plug(if_true, if_false);
3454 }
3455
3456
result_register()3457 Register FullCodeGenerator::result_register() { return r3; }
3458
3459
context_register()3460 Register FullCodeGenerator::context_register() { return cp; }
3461
LoadFromFrameField(int frame_offset,Register value)3462 void FullCodeGenerator::LoadFromFrameField(int frame_offset, Register value) {
3463 DCHECK_EQ(static_cast<int>(POINTER_SIZE_ALIGN(frame_offset)), frame_offset);
3464 __ LoadP(value, MemOperand(fp, frame_offset), r0);
3465 }
3466
StoreToFrameField(int frame_offset,Register value)3467 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
3468 DCHECK_EQ(static_cast<int>(POINTER_SIZE_ALIGN(frame_offset)), frame_offset);
3469 __ StoreP(value, MemOperand(fp, frame_offset), r0);
3470 }
3471
3472
LoadContextField(Register dst,int context_index)3473 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
3474 __ LoadP(dst, ContextMemOperand(cp, context_index), r0);
3475 }
3476
3477
PushFunctionArgumentForContextAllocation()3478 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
3479 DeclarationScope* closure_scope = scope()->GetClosureScope();
3480 if (closure_scope->is_script_scope() ||
3481 closure_scope->is_module_scope()) {
3482 // Contexts nested in the native context have a canonical empty function
3483 // as their closure, not the anonymous closure containing the global
3484 // code.
3485 __ LoadNativeContextSlot(Context::CLOSURE_INDEX, ip);
3486 } else if (closure_scope->is_eval_scope()) {
3487 // Contexts created by a call to eval have the same closure as the
3488 // context calling eval, not the anonymous closure containing the eval
3489 // code. Fetch it from the context.
3490 __ LoadP(ip, ContextMemOperand(cp, Context::CLOSURE_INDEX));
3491 } else {
3492 DCHECK(closure_scope->is_function_scope());
3493 __ LoadP(ip, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
3494 }
3495 PushOperand(ip);
3496 }
3497
3498
3499 // ----------------------------------------------------------------------------
3500 // Non-local control flow support.
3501
EnterFinallyBlock()3502 void FullCodeGenerator::EnterFinallyBlock() {
3503 DCHECK(!result_register().is(r4));
3504 // Store pending message while executing finally block.
3505 ExternalReference pending_message_obj =
3506 ExternalReference::address_of_pending_message_obj(isolate());
3507 __ mov(ip, Operand(pending_message_obj));
3508 __ LoadP(r4, MemOperand(ip));
3509 PushOperand(r4);
3510
3511 ClearPendingMessage();
3512 }
3513
3514
ExitFinallyBlock()3515 void FullCodeGenerator::ExitFinallyBlock() {
3516 DCHECK(!result_register().is(r4));
3517 // Restore pending message from stack.
3518 PopOperand(r4);
3519 ExternalReference pending_message_obj =
3520 ExternalReference::address_of_pending_message_obj(isolate());
3521 __ mov(ip, Operand(pending_message_obj));
3522 __ StoreP(r4, MemOperand(ip));
3523 }
3524
3525
ClearPendingMessage()3526 void FullCodeGenerator::ClearPendingMessage() {
3527 DCHECK(!result_register().is(r4));
3528 ExternalReference pending_message_obj =
3529 ExternalReference::address_of_pending_message_obj(isolate());
3530 __ LoadRoot(r4, Heap::kTheHoleValueRootIndex);
3531 __ mov(ip, Operand(pending_message_obj));
3532 __ StoreP(r4, MemOperand(ip));
3533 }
3534
3535
EmitCommands()3536 void FullCodeGenerator::DeferredCommands::EmitCommands() {
3537 DCHECK(!result_register().is(r4));
3538 // Restore the accumulator (r3) and token (r4).
3539 __ Pop(r4, result_register());
3540 for (DeferredCommand cmd : commands_) {
3541 Label skip;
3542 __ CmpSmiLiteral(r4, Smi::FromInt(cmd.token), r0);
3543 __ bne(&skip);
3544 switch (cmd.command) {
3545 case kReturn:
3546 codegen_->EmitUnwindAndReturn();
3547 break;
3548 case kThrow:
3549 __ Push(result_register());
3550 __ CallRuntime(Runtime::kReThrow);
3551 break;
3552 case kContinue:
3553 codegen_->EmitContinue(cmd.target);
3554 break;
3555 case kBreak:
3556 codegen_->EmitBreak(cmd.target);
3557 break;
3558 }
3559 __ bind(&skip);
3560 }
3561 }
3562
3563 #undef __
3564
3565
PatchAt(Code * unoptimized_code,Address pc,BackEdgeState target_state,Code * replacement_code)3566 void BackEdgeTable::PatchAt(Code* unoptimized_code, Address pc,
3567 BackEdgeState target_state,
3568 Code* replacement_code) {
3569 Address mov_address = Assembler::target_address_from_return_address(pc);
3570 Address cmp_address = mov_address - 2 * Assembler::kInstrSize;
3571 Isolate* isolate = unoptimized_code->GetIsolate();
3572 CodePatcher patcher(isolate, cmp_address, 1);
3573
3574 switch (target_state) {
3575 case INTERRUPT: {
3576 // <decrement profiling counter>
3577 // cmpi r6, 0
3578 // bge <ok> ;; not changed
3579 // mov r12, <interrupt stub address>
3580 // mtlr r12
3581 // blrl
3582 // <reset profiling counter>
3583 // ok-label
3584 patcher.masm()->cmpi(r6, Operand::Zero());
3585 break;
3586 }
3587 case ON_STACK_REPLACEMENT:
3588 // <decrement profiling counter>
3589 // crset
3590 // bge <ok> ;; not changed
3591 // mov r12, <on-stack replacement address>
3592 // mtlr r12
3593 // blrl
3594 // <reset profiling counter>
3595 // ok-label ----- pc_after points here
3596
3597 // Set the LT bit such that bge is a NOP
3598 patcher.masm()->crset(Assembler::encode_crbit(cr7, CR_LT));
3599 break;
3600 }
3601
3602 // Replace the stack check address in the mov sequence with the
3603 // entry address of the replacement code.
3604 Assembler::set_target_address_at(isolate, mov_address, unoptimized_code,
3605 replacement_code->entry());
3606
3607 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
3608 unoptimized_code, mov_address, replacement_code);
3609 }
3610
3611
GetBackEdgeState(Isolate * isolate,Code * unoptimized_code,Address pc)3612 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
3613 Isolate* isolate, Code* unoptimized_code, Address pc) {
3614 Address mov_address = Assembler::target_address_from_return_address(pc);
3615 Address cmp_address = mov_address - 2 * Assembler::kInstrSize;
3616 #ifdef DEBUG
3617 Address interrupt_address =
3618 Assembler::target_address_at(mov_address, unoptimized_code);
3619 #endif
3620
3621 if (Assembler::IsCmpImmediate(Assembler::instr_at(cmp_address))) {
3622 DCHECK(interrupt_address == isolate->builtins()->InterruptCheck()->entry());
3623 return INTERRUPT;
3624 }
3625
3626 DCHECK(Assembler::IsCrSet(Assembler::instr_at(cmp_address)));
3627
3628 DCHECK(interrupt_address ==
3629 isolate->builtins()->OnStackReplacement()->entry());
3630 return ON_STACK_REPLACEMENT;
3631 }
3632 } // namespace internal
3633 } // namespace v8
3634 #endif // V8_TARGET_ARCH_PPC
3635