1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #if V8_TARGET_ARCH_MIPS
6
7 // Note on Mips implementation:
8 //
9 // The result_register() for mips is the 'v0' register, which is defined
10 // by the ABI to contain function return values. However, the first
11 // parameter to a function is defined to be 'a0'. So there are many
12 // places where we have to move a previous result in v0 to a0 for the
13 // next call: mov(a0, v0). This is not needed on the other architectures.
14
15 #include "src/full-codegen/full-codegen.h"
16 #include "src/ast/compile-time-value.h"
17 #include "src/ast/scopes.h"
18 #include "src/code-factory.h"
19 #include "src/code-stubs.h"
20 #include "src/codegen.h"
21 #include "src/compilation-info.h"
22 #include "src/compiler.h"
23 #include "src/debug/debug.h"
24 #include "src/ic/ic.h"
25
26 #include "src/mips/code-stubs-mips.h"
27 #include "src/mips/macro-assembler-mips.h"
28
29 namespace v8 {
30 namespace internal {
31
32 #define __ ACCESS_MASM(masm())
33
34 // A patch site is a location in the code which it is possible to patch. This
35 // class has a number of methods to emit the code which is patchable and the
36 // method EmitPatchInfo to record a marker back to the patchable code. This
37 // marker is a andi zero_reg, rx, #yyyy instruction, and rx * 0x0000ffff + yyyy
38 // (raw 16 bit immediate value is used) is the delta from the pc to the first
39 // instruction of the patchable code.
40 // The marker instruction is effectively a NOP (dest is zero_reg) and will
41 // never be emitted by normal code.
42 class JumpPatchSite BASE_EMBEDDED {
43 public:
JumpPatchSite(MacroAssembler * masm)44 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
45 #ifdef DEBUG
46 info_emitted_ = false;
47 #endif
48 }
49
~JumpPatchSite()50 ~JumpPatchSite() {
51 DCHECK(patch_site_.is_bound() == info_emitted_);
52 }
53
54 // When initially emitting this ensure that a jump is always generated to skip
55 // the inlined smi code.
EmitJumpIfNotSmi(Register reg,Label * target)56 void EmitJumpIfNotSmi(Register reg, Label* target) {
57 DCHECK(!patch_site_.is_bound() && !info_emitted_);
58 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
59 __ bind(&patch_site_);
60 __ andi(at, reg, 0);
61 // Always taken before patched.
62 __ BranchShort(target, eq, at, Operand(zero_reg));
63 }
64
65 // When initially emitting this ensure that a jump is never generated to skip
66 // the inlined smi code.
EmitJumpIfSmi(Register reg,Label * target)67 void EmitJumpIfSmi(Register reg, Label* target) {
68 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
69 DCHECK(!patch_site_.is_bound() && !info_emitted_);
70 __ bind(&patch_site_);
71 __ andi(at, reg, 0);
72 // Never taken before patched.
73 __ BranchShort(target, ne, at, Operand(zero_reg));
74 }
75
EmitPatchInfo()76 void EmitPatchInfo() {
77 if (patch_site_.is_bound()) {
78 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
79 Register reg = Register::from_code(delta_to_patch_site / kImm16Mask);
80 __ andi(zero_reg, reg, delta_to_patch_site % kImm16Mask);
81 #ifdef DEBUG
82 info_emitted_ = true;
83 #endif
84 } else {
85 __ nop(); // Signals no inlined code.
86 }
87 }
88
89 private:
masm()90 MacroAssembler* masm() { return masm_; }
91 MacroAssembler* masm_;
92 Label patch_site_;
93 #ifdef DEBUG
94 bool info_emitted_;
95 #endif
96 };
97
98
99 // Generate code for a JS function. On entry to the function the receiver
100 // and arguments have been pushed on the stack left to right. The actual
101 // argument count matches the formal parameter count expected by the
102 // function.
103 //
104 // The live registers are:
105 // o a1: the JS function object being called (i.e. ourselves)
106 // o a3: the new target value
107 // o cp: our context
108 // o fp: our caller's frame pointer
109 // o sp: stack pointer
110 // o ra: return address
111 //
112 // The function builds a JS frame. Please see JavaScriptFrameConstants in
113 // frames-mips.h for its layout.
Generate()114 void FullCodeGenerator::Generate() {
115 CompilationInfo* info = info_;
116 profiling_counter_ = isolate()->factory()->NewCell(
117 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
118 SetFunctionPosition(literal());
119 Comment cmnt(masm_, "[ function compiled by full code generator");
120
121 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
122
123 if (FLAG_debug_code && info->ExpectsJSReceiverAsReceiver()) {
124 int receiver_offset = info->scope()->num_parameters() * kPointerSize;
125 __ lw(a2, MemOperand(sp, receiver_offset));
126 __ AssertNotSmi(a2);
127 __ GetObjectType(a2, a2, a2);
128 __ Check(ge, kSloppyFunctionExpectsJSReceiverReceiver, a2,
129 Operand(FIRST_JS_RECEIVER_TYPE));
130 }
131
132 // Open a frame scope to indicate that there is a frame on the stack. The
133 // MANUAL indicates that the scope shouldn't actually generate code to set up
134 // the frame (that is done below).
135 FrameScope frame_scope(masm_, StackFrame::MANUAL);
136
137 info->set_prologue_offset(masm_->pc_offset());
138 __ Prologue(info->GeneratePreagedPrologue());
139
140 // Increment invocation count for the function.
141 {
142 Comment cmnt(masm_, "[ Increment invocation count");
143 __ lw(a0, FieldMemOperand(a1, JSFunction::kLiteralsOffset));
144 __ lw(a0, FieldMemOperand(a0, LiteralsArray::kFeedbackVectorOffset));
145 __ lw(t0, FieldMemOperand(
146 a0, TypeFeedbackVector::kInvocationCountIndex * kPointerSize +
147 TypeFeedbackVector::kHeaderSize));
148 __ Addu(t0, t0, Operand(Smi::FromInt(1)));
149 __ sw(t0, FieldMemOperand(
150 a0, TypeFeedbackVector::kInvocationCountIndex * kPointerSize +
151 TypeFeedbackVector::kHeaderSize));
152 }
153
154 { Comment cmnt(masm_, "[ Allocate locals");
155 int locals_count = info->scope()->num_stack_slots();
156 // Generators allocate locals, if any, in context slots.
157 DCHECK(!IsGeneratorFunction(info->literal()->kind()) || locals_count == 0);
158 OperandStackDepthIncrement(locals_count);
159 if (locals_count > 0) {
160 if (locals_count >= 128) {
161 Label ok;
162 __ Subu(t5, sp, Operand(locals_count * kPointerSize));
163 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
164 __ Branch(&ok, hs, t5, Operand(a2));
165 __ CallRuntime(Runtime::kThrowStackOverflow);
166 __ bind(&ok);
167 }
168 __ LoadRoot(t5, Heap::kUndefinedValueRootIndex);
169 int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
170 if (locals_count >= kMaxPushes) {
171 int loop_iterations = locals_count / kMaxPushes;
172 __ li(a2, Operand(loop_iterations));
173 Label loop_header;
174 __ bind(&loop_header);
175 // Do pushes.
176 __ Subu(sp, sp, Operand(kMaxPushes * kPointerSize));
177 for (int i = 0; i < kMaxPushes; i++) {
178 __ sw(t5, MemOperand(sp, i * kPointerSize));
179 }
180 // Continue loop if not done.
181 __ Subu(a2, a2, Operand(1));
182 __ Branch(&loop_header, ne, a2, Operand(zero_reg));
183 }
184 int remaining = locals_count % kMaxPushes;
185 // Emit the remaining pushes.
186 __ Subu(sp, sp, Operand(remaining * kPointerSize));
187 for (int i = 0; i < remaining; i++) {
188 __ sw(t5, MemOperand(sp, i * kPointerSize));
189 }
190 }
191 }
192
193 bool function_in_register_a1 = true;
194
195 // Possibly allocate a local context.
196 if (info->scope()->NeedsContext()) {
197 Comment cmnt(masm_, "[ Allocate context");
198 // Argument to NewContext is the function, which is still in a1.
199 bool need_write_barrier = true;
200 int slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
201 if (info->scope()->is_script_scope()) {
202 __ push(a1);
203 __ Push(info->scope()->scope_info());
204 __ CallRuntime(Runtime::kNewScriptContext);
205 PrepareForBailoutForId(BailoutId::ScriptContext(),
206 BailoutState::TOS_REGISTER);
207 // The new target value is not used, clobbering is safe.
208 DCHECK_NULL(info->scope()->new_target_var());
209 } else {
210 if (info->scope()->new_target_var() != nullptr) {
211 __ push(a3); // Preserve new target.
212 }
213 if (slots <= FastNewFunctionContextStub::kMaximumSlots) {
214 FastNewFunctionContextStub stub(isolate());
215 __ li(FastNewFunctionContextDescriptor::SlotsRegister(),
216 Operand(slots));
217 __ CallStub(&stub);
218 // Result of FastNewFunctionContextStub is always in new space.
219 need_write_barrier = false;
220 } else {
221 __ push(a1);
222 __ CallRuntime(Runtime::kNewFunctionContext);
223 }
224 if (info->scope()->new_target_var() != nullptr) {
225 __ pop(a3); // Restore new target.
226 }
227 }
228 function_in_register_a1 = false;
229 // Context is returned in v0. It replaces the context passed to us.
230 // It's saved in the stack and kept live in cp.
231 __ mov(cp, v0);
232 __ sw(v0, MemOperand(fp, StandardFrameConstants::kContextOffset));
233 // Copy any necessary parameters into the context.
234 int num_parameters = info->scope()->num_parameters();
235 int first_parameter = info->scope()->has_this_declaration() ? -1 : 0;
236 for (int i = first_parameter; i < num_parameters; i++) {
237 Variable* var =
238 (i == -1) ? info->scope()->receiver() : info->scope()->parameter(i);
239 if (var->IsContextSlot()) {
240 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
241 (num_parameters - 1 - i) * kPointerSize;
242 // Load parameter from stack.
243 __ lw(a0, MemOperand(fp, parameter_offset));
244 // Store it in the context.
245 MemOperand target = ContextMemOperand(cp, var->index());
246 __ sw(a0, target);
247
248 // Update the write barrier.
249 if (need_write_barrier) {
250 __ RecordWriteContextSlot(cp, target.offset(), a0, a2,
251 kRAHasBeenSaved, kDontSaveFPRegs);
252 } else if (FLAG_debug_code) {
253 Label done;
254 __ JumpIfInNewSpace(cp, a0, &done);
255 __ Abort(kExpectedNewSpaceObject);
256 __ bind(&done);
257 }
258 }
259 }
260 }
261
262 // Register holding this function and new target are both trashed in case we
263 // bailout here. But since that can happen only when new target is not used
264 // and we allocate a context, the value of |function_in_register| is correct.
265 PrepareForBailoutForId(BailoutId::FunctionContext(),
266 BailoutState::NO_REGISTERS);
267
268 // Possibly set up a local binding to the this function which is used in
269 // derived constructors with super calls.
270 Variable* this_function_var = info->scope()->this_function_var();
271 if (this_function_var != nullptr) {
272 Comment cmnt(masm_, "[ This function");
273 if (!function_in_register_a1) {
274 __ lw(a1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
275 // The write barrier clobbers register again, keep it marked as such.
276 }
277 SetVar(this_function_var, a1, a0, a2);
278 }
279
280 // Possibly set up a local binding to the new target value.
281 Variable* new_target_var = info->scope()->new_target_var();
282 if (new_target_var != nullptr) {
283 Comment cmnt(masm_, "[ new.target");
284 SetVar(new_target_var, a3, a0, a2);
285 }
286
287 // Possibly allocate RestParameters
288 Variable* rest_param = info->scope()->rest_parameter();
289 if (rest_param != nullptr) {
290 Comment cmnt(masm_, "[ Allocate rest parameter array");
291 if (!function_in_register_a1) {
292 __ lw(a1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
293 }
294 FastNewRestParameterStub stub(isolate());
295 __ CallStub(&stub);
296 function_in_register_a1 = false;
297 SetVar(rest_param, v0, a1, a2);
298 }
299
300 Variable* arguments = info->scope()->arguments();
301 if (arguments != NULL) {
302 // Function uses arguments object.
303 Comment cmnt(masm_, "[ Allocate arguments object");
304 if (!function_in_register_a1) {
305 // Load this again, if it's used by the local context below.
306 __ lw(a1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
307 }
308 if (is_strict(language_mode()) || !has_simple_parameters()) {
309 FastNewStrictArgumentsStub stub(isolate());
310 __ CallStub(&stub);
311 } else if (literal()->has_duplicate_parameters()) {
312 __ Push(a1);
313 __ CallRuntime(Runtime::kNewSloppyArguments_Generic);
314 } else {
315 FastNewSloppyArgumentsStub stub(isolate());
316 __ CallStub(&stub);
317 }
318
319 SetVar(arguments, v0, a1, a2);
320 }
321
322 if (FLAG_trace) {
323 __ CallRuntime(Runtime::kTraceEnter);
324 }
325
326 // Visit the declarations and body unless there is an illegal
327 // redeclaration.
328 PrepareForBailoutForId(BailoutId::FunctionEntry(),
329 BailoutState::NO_REGISTERS);
330 {
331 Comment cmnt(masm_, "[ Declarations");
332 VisitDeclarations(scope()->declarations());
333 }
334
335 // Assert that the declarations do not use ICs. Otherwise the debugger
336 // won't be able to redirect a PC at an IC to the correct IC in newly
337 // recompiled code.
338 DCHECK_EQ(0, ic_total_count_);
339
340 {
341 Comment cmnt(masm_, "[ Stack check");
342 PrepareForBailoutForId(BailoutId::Declarations(),
343 BailoutState::NO_REGISTERS);
344 Label ok;
345 __ LoadRoot(at, Heap::kStackLimitRootIndex);
346 __ Branch(&ok, hs, sp, Operand(at));
347 Handle<Code> stack_check = isolate()->builtins()->StackCheck();
348 PredictableCodeSizeScope predictable(
349 masm_, masm_->CallSize(stack_check, RelocInfo::CODE_TARGET));
350 __ Call(stack_check, RelocInfo::CODE_TARGET);
351 __ bind(&ok);
352 }
353
354 {
355 Comment cmnt(masm_, "[ Body");
356 DCHECK(loop_depth() == 0);
357 VisitStatements(literal()->body());
358 DCHECK(loop_depth() == 0);
359 }
360
361 // Always emit a 'return undefined' in case control fell off the end of
362 // the body.
363 { Comment cmnt(masm_, "[ return <undefined>;");
364 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
365 }
366 EmitReturnSequence();
367 }
368
369
ClearAccumulator()370 void FullCodeGenerator::ClearAccumulator() {
371 DCHECK(Smi::kZero == 0);
372 __ mov(v0, zero_reg);
373 }
374
375
EmitProfilingCounterDecrement(int delta)376 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
377 __ li(a2, Operand(profiling_counter_));
378 __ lw(a3, FieldMemOperand(a2, Cell::kValueOffset));
379 __ Subu(a3, a3, Operand(Smi::FromInt(delta)));
380 __ sw(a3, FieldMemOperand(a2, Cell::kValueOffset));
381 }
382
383
EmitProfilingCounterReset()384 void FullCodeGenerator::EmitProfilingCounterReset() {
385 int reset_value = FLAG_interrupt_budget;
386 if (info_->is_debug()) {
387 // Detect debug break requests as soon as possible.
388 reset_value = FLAG_interrupt_budget >> 4;
389 }
390 __ li(a2, Operand(profiling_counter_));
391 __ li(a3, Operand(Smi::FromInt(reset_value)));
392 __ sw(a3, FieldMemOperand(a2, Cell::kValueOffset));
393 }
394
395
EmitBackEdgeBookkeeping(IterationStatement * stmt,Label * back_edge_target)396 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
397 Label* back_edge_target) {
398 // The generated code is used in Deoptimizer::PatchStackCheckCodeAt so we need
399 // to make sure it is constant. Branch may emit a skip-or-jump sequence
400 // instead of the normal Branch. It seems that the "skip" part of that
401 // sequence is about as long as this Branch would be so it is safe to ignore
402 // that.
403 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
404 Comment cmnt(masm_, "[ Back edge bookkeeping");
405 Label ok;
406 DCHECK(back_edge_target->is_bound());
407 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
408 int weight = Min(kMaxBackEdgeWeight,
409 Max(1, distance / kCodeSizeMultiplier));
410 EmitProfilingCounterDecrement(weight);
411 __ slt(at, a3, zero_reg);
412 __ beq(at, zero_reg, &ok);
413 // Call will emit a li t9 first, so it is safe to use the delay slot.
414 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
415 // Record a mapping of this PC offset to the OSR id. This is used to find
416 // the AST id from the unoptimized code in order to use it as a key into
417 // the deoptimization input data found in the optimized code.
418 RecordBackEdge(stmt->OsrEntryId());
419 EmitProfilingCounterReset();
420
421 __ bind(&ok);
422 PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS);
423 // Record a mapping of the OSR id to this PC. This is used if the OSR
424 // entry becomes the target of a bailout. We don't expect it to be, but
425 // we want it to work if it is.
426 PrepareForBailoutForId(stmt->OsrEntryId(), BailoutState::NO_REGISTERS);
427 }
428
EmitProfilingCounterHandlingForReturnSequence(bool is_tail_call)429 void FullCodeGenerator::EmitProfilingCounterHandlingForReturnSequence(
430 bool is_tail_call) {
431 // Pretend that the exit is a backwards jump to the entry.
432 int weight = 1;
433 if (info_->ShouldSelfOptimize()) {
434 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
435 } else {
436 int distance = masm_->pc_offset();
437 weight = Min(kMaxBackEdgeWeight, Max(1, distance / kCodeSizeMultiplier));
438 }
439 EmitProfilingCounterDecrement(weight);
440 Label ok;
441 __ Branch(&ok, ge, a3, Operand(zero_reg));
442 // Don't need to save result register if we are going to do a tail call.
443 if (!is_tail_call) {
444 __ push(v0);
445 }
446 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
447 if (!is_tail_call) {
448 __ pop(v0);
449 }
450 EmitProfilingCounterReset();
451 __ bind(&ok);
452 }
453
EmitReturnSequence()454 void FullCodeGenerator::EmitReturnSequence() {
455 Comment cmnt(masm_, "[ Return sequence");
456 if (return_label_.is_bound()) {
457 __ Branch(&return_label_);
458 } else {
459 __ bind(&return_label_);
460 if (FLAG_trace) {
461 // Push the return value on the stack as the parameter.
462 // Runtime::TraceExit returns its parameter in v0.
463 __ push(v0);
464 __ CallRuntime(Runtime::kTraceExit);
465 }
466 EmitProfilingCounterHandlingForReturnSequence(false);
467
468 // Make sure that the constant pool is not emitted inside of the return
469 // sequence.
470 { Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
471 int32_t arg_count = info_->scope()->num_parameters() + 1;
472 int32_t sp_delta = arg_count * kPointerSize;
473 SetReturnPosition(literal());
474 __ mov(sp, fp);
475 __ MultiPop(static_cast<RegList>(fp.bit() | ra.bit()));
476 __ Addu(sp, sp, Operand(sp_delta));
477 __ Jump(ra);
478 }
479 }
480 }
481
RestoreContext()482 void FullCodeGenerator::RestoreContext() {
483 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
484 }
485
Plug(Variable * var) const486 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
487 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
488 codegen()->GetVar(result_register(), var);
489 codegen()->PushOperand(result_register());
490 }
491
492
Plug(Heap::RootListIndex index) const493 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
494 }
495
496
Plug(Heap::RootListIndex index) const497 void FullCodeGenerator::AccumulatorValueContext::Plug(
498 Heap::RootListIndex index) const {
499 __ LoadRoot(result_register(), index);
500 }
501
502
Plug(Heap::RootListIndex index) const503 void FullCodeGenerator::StackValueContext::Plug(
504 Heap::RootListIndex index) const {
505 __ LoadRoot(result_register(), index);
506 codegen()->PushOperand(result_register());
507 }
508
509
Plug(Heap::RootListIndex index) const510 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
511 codegen()->PrepareForBailoutBeforeSplit(condition(),
512 true,
513 true_label_,
514 false_label_);
515 if (index == Heap::kUndefinedValueRootIndex ||
516 index == Heap::kNullValueRootIndex ||
517 index == Heap::kFalseValueRootIndex) {
518 if (false_label_ != fall_through_) __ Branch(false_label_);
519 } else if (index == Heap::kTrueValueRootIndex) {
520 if (true_label_ != fall_through_) __ Branch(true_label_);
521 } else {
522 __ LoadRoot(result_register(), index);
523 codegen()->DoTest(this);
524 }
525 }
526
527
Plug(Handle<Object> lit) const528 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
529 }
530
531
Plug(Handle<Object> lit) const532 void FullCodeGenerator::AccumulatorValueContext::Plug(
533 Handle<Object> lit) const {
534 __ li(result_register(), Operand(lit));
535 }
536
537
Plug(Handle<Object> lit) const538 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
539 // Immediates cannot be pushed directly.
540 __ li(result_register(), Operand(lit));
541 codegen()->PushOperand(result_register());
542 }
543
544
Plug(Handle<Object> lit) const545 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
546 codegen()->PrepareForBailoutBeforeSplit(condition(),
547 true,
548 true_label_,
549 false_label_);
550 DCHECK(lit->IsNull(isolate()) || lit->IsUndefined(isolate()) ||
551 !lit->IsUndetectable());
552 if (lit->IsUndefined(isolate()) || lit->IsNull(isolate()) ||
553 lit->IsFalse(isolate())) {
554 if (false_label_ != fall_through_) __ Branch(false_label_);
555 } else if (lit->IsTrue(isolate()) || lit->IsJSObject()) {
556 if (true_label_ != fall_through_) __ Branch(true_label_);
557 } else if (lit->IsString()) {
558 if (String::cast(*lit)->length() == 0) {
559 if (false_label_ != fall_through_) __ Branch(false_label_);
560 } else {
561 if (true_label_ != fall_through_) __ Branch(true_label_);
562 }
563 } else if (lit->IsSmi()) {
564 if (Smi::cast(*lit)->value() == 0) {
565 if (false_label_ != fall_through_) __ Branch(false_label_);
566 } else {
567 if (true_label_ != fall_through_) __ Branch(true_label_);
568 }
569 } else {
570 // For simplicity we always test the accumulator register.
571 __ li(result_register(), Operand(lit));
572 codegen()->DoTest(this);
573 }
574 }
575
576
DropAndPlug(int count,Register reg) const577 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
578 Register reg) const {
579 DCHECK(count > 0);
580 if (count > 1) codegen()->DropOperands(count - 1);
581 __ sw(reg, MemOperand(sp, 0));
582 }
583
584
Plug(Label * materialize_true,Label * materialize_false) const585 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
586 Label* materialize_false) const {
587 DCHECK(materialize_true == materialize_false);
588 __ bind(materialize_true);
589 }
590
591
Plug(Label * materialize_true,Label * materialize_false) const592 void FullCodeGenerator::AccumulatorValueContext::Plug(
593 Label* materialize_true,
594 Label* materialize_false) const {
595 Label done;
596 __ bind(materialize_true);
597 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
598 __ Branch(&done);
599 __ bind(materialize_false);
600 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
601 __ bind(&done);
602 }
603
604
Plug(Label * materialize_true,Label * materialize_false) const605 void FullCodeGenerator::StackValueContext::Plug(
606 Label* materialize_true,
607 Label* materialize_false) const {
608 codegen()->OperandStackDepthIncrement(1);
609 Label done;
610 __ bind(materialize_true);
611 __ LoadRoot(at, Heap::kTrueValueRootIndex);
612 // Push the value as the following branch can clobber at in long branch mode.
613 __ push(at);
614 __ Branch(&done);
615 __ bind(materialize_false);
616 __ LoadRoot(at, Heap::kFalseValueRootIndex);
617 __ push(at);
618 __ bind(&done);
619 }
620
621
Plug(Label * materialize_true,Label * materialize_false) const622 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
623 Label* materialize_false) const {
624 DCHECK(materialize_true == true_label_);
625 DCHECK(materialize_false == false_label_);
626 }
627
628
Plug(bool flag) const629 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
630 Heap::RootListIndex value_root_index =
631 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
632 __ LoadRoot(result_register(), value_root_index);
633 }
634
635
Plug(bool flag) const636 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
637 Heap::RootListIndex value_root_index =
638 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
639 __ LoadRoot(at, value_root_index);
640 codegen()->PushOperand(at);
641 }
642
643
Plug(bool flag) const644 void FullCodeGenerator::TestContext::Plug(bool flag) const {
645 codegen()->PrepareForBailoutBeforeSplit(condition(),
646 true,
647 true_label_,
648 false_label_);
649 if (flag) {
650 if (true_label_ != fall_through_) __ Branch(true_label_);
651 } else {
652 if (false_label_ != fall_through_) __ Branch(false_label_);
653 }
654 }
655
656
DoTest(Expression * condition,Label * if_true,Label * if_false,Label * fall_through)657 void FullCodeGenerator::DoTest(Expression* condition,
658 Label* if_true,
659 Label* if_false,
660 Label* fall_through) {
661 __ mov(a0, result_register());
662 Handle<Code> ic = ToBooleanICStub::GetUninitialized(isolate());
663 CallIC(ic, condition->test_id());
664 __ LoadRoot(at, Heap::kTrueValueRootIndex);
665 Split(eq, result_register(), Operand(at), if_true, if_false, fall_through);
666 }
667
668
Split(Condition cc,Register lhs,const Operand & rhs,Label * if_true,Label * if_false,Label * fall_through)669 void FullCodeGenerator::Split(Condition cc,
670 Register lhs,
671 const Operand& rhs,
672 Label* if_true,
673 Label* if_false,
674 Label* fall_through) {
675 if (if_false == fall_through) {
676 __ Branch(if_true, cc, lhs, rhs);
677 } else if (if_true == fall_through) {
678 __ Branch(if_false, NegateCondition(cc), lhs, rhs);
679 } else {
680 __ Branch(if_true, cc, lhs, rhs);
681 __ Branch(if_false);
682 }
683 }
684
685
StackOperand(Variable * var)686 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
687 DCHECK(var->IsStackAllocated());
688 // Offset is negative because higher indexes are at lower addresses.
689 int offset = -var->index() * kPointerSize;
690 // Adjust by a (parameter or local) base offset.
691 if (var->IsParameter()) {
692 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
693 } else {
694 offset += JavaScriptFrameConstants::kLocal0Offset;
695 }
696 return MemOperand(fp, offset);
697 }
698
699
VarOperand(Variable * var,Register scratch)700 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
701 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
702 if (var->IsContextSlot()) {
703 int context_chain_length = scope()->ContextChainLength(var->scope());
704 __ LoadContext(scratch, context_chain_length);
705 return ContextMemOperand(scratch, var->index());
706 } else {
707 return StackOperand(var);
708 }
709 }
710
711
GetVar(Register dest,Variable * var)712 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
713 // Use destination as scratch.
714 MemOperand location = VarOperand(var, dest);
715 __ lw(dest, location);
716 }
717
718
SetVar(Variable * var,Register src,Register scratch0,Register scratch1)719 void FullCodeGenerator::SetVar(Variable* var,
720 Register src,
721 Register scratch0,
722 Register scratch1) {
723 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
724 DCHECK(!scratch0.is(src));
725 DCHECK(!scratch0.is(scratch1));
726 DCHECK(!scratch1.is(src));
727 MemOperand location = VarOperand(var, scratch0);
728 __ sw(src, location);
729 // Emit the write barrier code if the location is in the heap.
730 if (var->IsContextSlot()) {
731 __ RecordWriteContextSlot(scratch0,
732 location.offset(),
733 src,
734 scratch1,
735 kRAHasBeenSaved,
736 kDontSaveFPRegs);
737 }
738 }
739
740
PrepareForBailoutBeforeSplit(Expression * expr,bool should_normalize,Label * if_true,Label * if_false)741 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
742 bool should_normalize,
743 Label* if_true,
744 Label* if_false) {
745 // Only prepare for bailouts before splits if we're in a test
746 // context. Otherwise, we let the Visit function deal with the
747 // preparation to avoid preparing with the same AST id twice.
748 if (!context()->IsTest()) return;
749
750 Label skip;
751 if (should_normalize) __ Branch(&skip);
752 PrepareForBailout(expr, BailoutState::TOS_REGISTER);
753 if (should_normalize) {
754 __ LoadRoot(t0, Heap::kTrueValueRootIndex);
755 Split(eq, v0, Operand(t0), if_true, if_false, NULL);
756 __ bind(&skip);
757 }
758 }
759
760
EmitDebugCheckDeclarationContext(Variable * variable)761 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
762 // The variable in the declaration always resides in the current function
763 // context.
764 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
765 if (FLAG_debug_code) {
766 // Check that we're not inside a with or catch context.
767 __ lw(a1, FieldMemOperand(cp, HeapObject::kMapOffset));
768 __ LoadRoot(t0, Heap::kWithContextMapRootIndex);
769 __ Check(ne, kDeclarationInWithContext,
770 a1, Operand(t0));
771 __ LoadRoot(t0, Heap::kCatchContextMapRootIndex);
772 __ Check(ne, kDeclarationInCatchContext,
773 a1, Operand(t0));
774 }
775 }
776
777
VisitVariableDeclaration(VariableDeclaration * declaration)778 void FullCodeGenerator::VisitVariableDeclaration(
779 VariableDeclaration* declaration) {
780 VariableProxy* proxy = declaration->proxy();
781 Variable* variable = proxy->var();
782 switch (variable->location()) {
783 case VariableLocation::UNALLOCATED: {
784 DCHECK(!variable->binding_needs_init());
785 FeedbackVectorSlot slot = proxy->VariableFeedbackSlot();
786 DCHECK(!slot.IsInvalid());
787 globals_->Add(handle(Smi::FromInt(slot.ToInt()), isolate()), zone());
788 globals_->Add(isolate()->factory()->undefined_value(), zone());
789 break;
790 }
791 case VariableLocation::PARAMETER:
792 case VariableLocation::LOCAL:
793 if (variable->binding_needs_init()) {
794 Comment cmnt(masm_, "[ VariableDeclaration");
795 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
796 __ sw(t0, StackOperand(variable));
797 }
798 break;
799
800 case VariableLocation::CONTEXT:
801 if (variable->binding_needs_init()) {
802 Comment cmnt(masm_, "[ VariableDeclaration");
803 EmitDebugCheckDeclarationContext(variable);
804 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
805 __ sw(at, ContextMemOperand(cp, variable->index()));
806 // No write barrier since the_hole_value is in old space.
807 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
808 }
809 break;
810
811 case VariableLocation::LOOKUP: {
812 Comment cmnt(masm_, "[ VariableDeclaration");
813 DCHECK_EQ(VAR, variable->mode());
814 DCHECK(!variable->binding_needs_init());
815 __ li(a2, Operand(variable->name()));
816 __ Push(a2);
817 __ CallRuntime(Runtime::kDeclareEvalVar);
818 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
819 break;
820 }
821
822 case VariableLocation::MODULE:
823 UNREACHABLE();
824 }
825 }
826
827
VisitFunctionDeclaration(FunctionDeclaration * declaration)828 void FullCodeGenerator::VisitFunctionDeclaration(
829 FunctionDeclaration* declaration) {
830 VariableProxy* proxy = declaration->proxy();
831 Variable* variable = proxy->var();
832 switch (variable->location()) {
833 case VariableLocation::UNALLOCATED: {
834 FeedbackVectorSlot slot = proxy->VariableFeedbackSlot();
835 DCHECK(!slot.IsInvalid());
836 globals_->Add(handle(Smi::FromInt(slot.ToInt()), isolate()), zone());
837 Handle<SharedFunctionInfo> function =
838 Compiler::GetSharedFunctionInfo(declaration->fun(), script(), info_);
839 // Check for stack-overflow exception.
840 if (function.is_null()) return SetStackOverflow();
841 globals_->Add(function, zone());
842 break;
843 }
844
845 case VariableLocation::PARAMETER:
846 case VariableLocation::LOCAL: {
847 Comment cmnt(masm_, "[ FunctionDeclaration");
848 VisitForAccumulatorValue(declaration->fun());
849 __ sw(result_register(), StackOperand(variable));
850 break;
851 }
852
853 case VariableLocation::CONTEXT: {
854 Comment cmnt(masm_, "[ FunctionDeclaration");
855 EmitDebugCheckDeclarationContext(variable);
856 VisitForAccumulatorValue(declaration->fun());
857 __ sw(result_register(), ContextMemOperand(cp, variable->index()));
858 int offset = Context::SlotOffset(variable->index());
859 // We know that we have written a function, which is not a smi.
860 __ RecordWriteContextSlot(cp,
861 offset,
862 result_register(),
863 a2,
864 kRAHasBeenSaved,
865 kDontSaveFPRegs,
866 EMIT_REMEMBERED_SET,
867 OMIT_SMI_CHECK);
868 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
869 break;
870 }
871
872 case VariableLocation::LOOKUP: {
873 Comment cmnt(masm_, "[ FunctionDeclaration");
874 __ li(a2, Operand(variable->name()));
875 PushOperand(a2);
876 // Push initial value for function declaration.
877 VisitForStackValue(declaration->fun());
878 CallRuntimeWithOperands(Runtime::kDeclareEvalFunction);
879 PrepareForBailoutForId(proxy->id(), BailoutState::NO_REGISTERS);
880 break;
881 }
882
883 case VariableLocation::MODULE:
884 UNREACHABLE();
885 }
886 }
887
888
DeclareGlobals(Handle<FixedArray> pairs)889 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
890 // Call the runtime to declare the globals.
891 __ li(a1, Operand(pairs));
892 __ li(a0, Operand(Smi::FromInt(DeclareGlobalsFlags())));
893 __ EmitLoadTypeFeedbackVector(a2);
894 __ Push(a1, a0, a2);
895 __ CallRuntime(Runtime::kDeclareGlobals);
896 // Return value is ignored.
897 }
898
899
VisitSwitchStatement(SwitchStatement * stmt)900 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
901 Comment cmnt(masm_, "[ SwitchStatement");
902 Breakable nested_statement(this, stmt);
903 SetStatementPosition(stmt);
904
905 // Keep the switch value on the stack until a case matches.
906 VisitForStackValue(stmt->tag());
907 PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS);
908
909 ZoneList<CaseClause*>* clauses = stmt->cases();
910 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
911
912 Label next_test; // Recycled for each test.
913 // Compile all the tests with branches to their bodies.
914 for (int i = 0; i < clauses->length(); i++) {
915 CaseClause* clause = clauses->at(i);
916 clause->body_target()->Unuse();
917
918 // The default is not a test, but remember it as final fall through.
919 if (clause->is_default()) {
920 default_clause = clause;
921 continue;
922 }
923
924 Comment cmnt(masm_, "[ Case comparison");
925 __ bind(&next_test);
926 next_test.Unuse();
927
928 // Compile the label expression.
929 VisitForAccumulatorValue(clause->label());
930 __ mov(a0, result_register()); // CompareStub requires args in a0, a1.
931
932 // Perform the comparison as if via '==='.
933 __ lw(a1, MemOperand(sp, 0)); // Switch value.
934 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
935 JumpPatchSite patch_site(masm_);
936 if (inline_smi_code) {
937 Label slow_case;
938 __ or_(a2, a1, a0);
939 patch_site.EmitJumpIfNotSmi(a2, &slow_case);
940
941 __ Branch(&next_test, ne, a1, Operand(a0));
942 __ Drop(1); // Switch value is no longer needed.
943 __ Branch(clause->body_target());
944
945 __ bind(&slow_case);
946 }
947
948 // Record position before stub call for type feedback.
949 SetExpressionPosition(clause);
950 Handle<Code> ic =
951 CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
952 CallIC(ic, clause->CompareId());
953 patch_site.EmitPatchInfo();
954
955 Label skip;
956 __ Branch(&skip);
957 PrepareForBailout(clause, BailoutState::TOS_REGISTER);
958 __ LoadRoot(at, Heap::kTrueValueRootIndex);
959 __ Branch(&next_test, ne, v0, Operand(at));
960 __ Drop(1);
961 __ Branch(clause->body_target());
962 __ bind(&skip);
963
964 __ Branch(&next_test, ne, v0, Operand(zero_reg));
965 __ Drop(1); // Switch value is no longer needed.
966 __ Branch(clause->body_target());
967 }
968
969 // Discard the test value and jump to the default if present, otherwise to
970 // the end of the statement.
971 __ bind(&next_test);
972 DropOperands(1); // Switch value is no longer needed.
973 if (default_clause == NULL) {
974 __ Branch(nested_statement.break_label());
975 } else {
976 __ Branch(default_clause->body_target());
977 }
978
979 // Compile all the case bodies.
980 for (int i = 0; i < clauses->length(); i++) {
981 Comment cmnt(masm_, "[ Case body");
982 CaseClause* clause = clauses->at(i);
983 __ bind(clause->body_target());
984 PrepareForBailoutForId(clause->EntryId(), BailoutState::NO_REGISTERS);
985 VisitStatements(clause->statements());
986 }
987
988 __ bind(nested_statement.break_label());
989 PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
990 }
991
992
VisitForInStatement(ForInStatement * stmt)993 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
994 Comment cmnt(masm_, "[ ForInStatement");
995 SetStatementPosition(stmt, SKIP_BREAK);
996
997 FeedbackVectorSlot slot = stmt->ForInFeedbackSlot();
998
999 // Get the object to enumerate over.
1000 SetExpressionAsStatementPosition(stmt->enumerable());
1001 VisitForAccumulatorValue(stmt->enumerable());
1002 __ mov(a0, result_register());
1003 OperandStackDepthIncrement(5);
1004
1005 Label loop, exit;
1006 Iteration loop_statement(this, stmt);
1007 increment_loop_depth();
1008
1009 // If the object is null or undefined, skip over the loop, otherwise convert
1010 // it to a JS receiver. See ECMA-262 version 5, section 12.6.4.
1011 Label convert, done_convert;
1012 __ JumpIfSmi(a0, &convert);
1013 __ GetObjectType(a0, a1, a1);
1014 __ Branch(USE_DELAY_SLOT, &done_convert, ge, a1,
1015 Operand(FIRST_JS_RECEIVER_TYPE));
1016 __ LoadRoot(at, Heap::kNullValueRootIndex); // In delay slot.
1017 __ Branch(USE_DELAY_SLOT, &exit, eq, a0, Operand(at));
1018 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); // In delay slot.
1019 __ Branch(&exit, eq, a0, Operand(at));
1020 __ bind(&convert);
1021 __ Call(isolate()->builtins()->ToObject(), RelocInfo::CODE_TARGET);
1022 RestoreContext();
1023 __ mov(a0, v0);
1024 __ bind(&done_convert);
1025 PrepareForBailoutForId(stmt->ToObjectId(), BailoutState::TOS_REGISTER);
1026 __ push(a0);
1027
1028 // Check cache validity in generated code. If we cannot guarantee cache
1029 // validity, call the runtime system to check cache validity or get the
1030 // property names in a fixed array. Note: Proxies never have an enum cache,
1031 // so will always take the slow path.
1032 Label call_runtime;
1033 __ CheckEnumCache(&call_runtime);
1034
1035 // The enum cache is valid. Load the map of the object being
1036 // iterated over and use the cache for the iteration.
1037 Label use_cache;
1038 __ lw(v0, FieldMemOperand(a0, HeapObject::kMapOffset));
1039 __ Branch(&use_cache);
1040
1041 // Get the set of properties to enumerate.
1042 __ bind(&call_runtime);
1043 __ push(a0); // Duplicate the enumerable object on the stack.
1044 __ CallRuntime(Runtime::kForInEnumerate);
1045 PrepareForBailoutForId(stmt->EnumId(), BailoutState::TOS_REGISTER);
1046
1047 // If we got a map from the runtime call, we can do a fast
1048 // modification check. Otherwise, we got a fixed array, and we have
1049 // to do a slow check.
1050 Label fixed_array;
1051 __ lw(a2, FieldMemOperand(v0, HeapObject::kMapOffset));
1052 __ LoadRoot(at, Heap::kMetaMapRootIndex);
1053 __ Branch(&fixed_array, ne, a2, Operand(at));
1054
1055 // We got a map in register v0. Get the enumeration cache from it.
1056 Label no_descriptors;
1057 __ bind(&use_cache);
1058
1059 __ EnumLength(a1, v0);
1060 __ Branch(&no_descriptors, eq, a1, Operand(Smi::kZero));
1061
1062 __ LoadInstanceDescriptors(v0, a2);
1063 __ lw(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheOffset));
1064 __ lw(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheBridgeCacheOffset));
1065
1066 // Set up the four remaining stack slots.
1067 __ li(a0, Operand(Smi::kZero));
1068 // Push map, enumeration cache, enumeration cache length (as smi) and zero.
1069 __ Push(v0, a2, a1, a0);
1070 __ jmp(&loop);
1071
1072 __ bind(&no_descriptors);
1073 __ Drop(1);
1074 __ jmp(&exit);
1075
1076 // We got a fixed array in register v0. Iterate through that.
1077 __ bind(&fixed_array);
1078
1079 __ li(a1, Operand(Smi::FromInt(1))); // Smi(1) indicates slow check
1080 __ Push(a1, v0); // Smi and array
1081 __ lw(a1, FieldMemOperand(v0, FixedArray::kLengthOffset));
1082 __ Push(a1); // Fixed array length (as smi).
1083 PrepareForBailoutForId(stmt->PrepareId(), BailoutState::NO_REGISTERS);
1084 __ li(a0, Operand(Smi::kZero));
1085 __ Push(a0); // Initial index.
1086
1087 // Generate code for doing the condition check.
1088 __ bind(&loop);
1089 SetExpressionAsStatementPosition(stmt->each());
1090
1091 // Load the current count to a0, load the length to a1.
1092 __ lw(a0, MemOperand(sp, 0 * kPointerSize));
1093 __ lw(a1, MemOperand(sp, 1 * kPointerSize));
1094 __ Branch(loop_statement.break_label(), hs, a0, Operand(a1));
1095
1096 // Get the current entry of the array into result_register.
1097 __ lw(a2, MemOperand(sp, 2 * kPointerSize));
1098 __ Addu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1099 __ Lsa(t0, a2, a0, kPointerSizeLog2 - kSmiTagSize);
1100 __ lw(result_register(), MemOperand(t0)); // Current entry.
1101
1102 // Get the expected map from the stack or a smi in the
1103 // permanent slow case into register a2.
1104 __ lw(a2, MemOperand(sp, 3 * kPointerSize));
1105
1106 // Check if the expected map still matches that of the enumerable.
1107 // If not, we may have to filter the key.
1108 Label update_each;
1109 __ lw(a1, MemOperand(sp, 4 * kPointerSize));
1110 __ lw(t0, FieldMemOperand(a1, HeapObject::kMapOffset));
1111 __ Branch(&update_each, eq, t0, Operand(a2));
1112
1113 // We need to filter the key, record slow-path here.
1114 int const vector_index = SmiFromSlot(slot)->value();
1115 __ EmitLoadTypeFeedbackVector(a3);
1116 __ li(a2, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1117 __ sw(a2, FieldMemOperand(a3, FixedArray::OffsetOfElementAt(vector_index)));
1118
1119 __ mov(a0, result_register());
1120 // a0 contains the key. The receiver in a1 is the second argument to the
1121 // ForInFilter. ForInFilter returns undefined if the receiver doesn't
1122 // have the key or returns the name-converted key.
1123 __ Call(isolate()->builtins()->ForInFilter(), RelocInfo::CODE_TARGET);
1124 RestoreContext();
1125 PrepareForBailoutForId(stmt->FilterId(), BailoutState::TOS_REGISTER);
1126 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1127 __ Branch(loop_statement.continue_label(), eq, result_register(),
1128 Operand(at));
1129
1130 // Update the 'each' property or variable from the possibly filtered
1131 // entry in the result_register.
1132 __ bind(&update_each);
1133 // Perform the assignment as if via '='.
1134 { EffectContext context(this);
1135 EmitAssignment(stmt->each(), stmt->EachFeedbackSlot());
1136 PrepareForBailoutForId(stmt->AssignmentId(), BailoutState::NO_REGISTERS);
1137 }
1138
1139 // Both Crankshaft and Turbofan expect BodyId to be right before stmt->body().
1140 PrepareForBailoutForId(stmt->BodyId(), BailoutState::NO_REGISTERS);
1141 // Generate code for the body of the loop.
1142 Visit(stmt->body());
1143
1144 // Generate code for the going to the next element by incrementing
1145 // the index (smi) stored on top of the stack.
1146 __ bind(loop_statement.continue_label());
1147 PrepareForBailoutForId(stmt->IncrementId(), BailoutState::NO_REGISTERS);
1148 __ pop(a0);
1149 __ Addu(a0, a0, Operand(Smi::FromInt(1)));
1150 __ push(a0);
1151
1152 EmitBackEdgeBookkeeping(stmt, &loop);
1153 __ Branch(&loop);
1154
1155 // Remove the pointers stored on the stack.
1156 __ bind(loop_statement.break_label());
1157 DropOperands(5);
1158
1159 // Exit and decrement the loop depth.
1160 PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
1161 __ bind(&exit);
1162 decrement_loop_depth();
1163 }
1164
1165
EmitSetHomeObject(Expression * initializer,int offset,FeedbackVectorSlot slot)1166 void FullCodeGenerator::EmitSetHomeObject(Expression* initializer, int offset,
1167 FeedbackVectorSlot slot) {
1168 DCHECK(NeedsHomeObject(initializer));
1169 __ lw(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1170 __ lw(StoreDescriptor::ValueRegister(),
1171 MemOperand(sp, offset * kPointerSize));
1172 CallStoreIC(slot, isolate()->factory()->home_object_symbol());
1173 }
1174
1175
EmitSetHomeObjectAccumulator(Expression * initializer,int offset,FeedbackVectorSlot slot)1176 void FullCodeGenerator::EmitSetHomeObjectAccumulator(Expression* initializer,
1177 int offset,
1178 FeedbackVectorSlot slot) {
1179 DCHECK(NeedsHomeObject(initializer));
1180 __ Move(StoreDescriptor::ReceiverRegister(), v0);
1181 __ lw(StoreDescriptor::ValueRegister(),
1182 MemOperand(sp, offset * kPointerSize));
1183 CallStoreIC(slot, isolate()->factory()->home_object_symbol());
1184 }
1185
1186
EmitLoadGlobalCheckExtensions(VariableProxy * proxy,TypeofMode typeof_mode,Label * slow)1187 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1188 TypeofMode typeof_mode,
1189 Label* slow) {
1190 Register current = cp;
1191 Register next = a1;
1192 Register temp = a2;
1193
1194 int to_check = scope()->ContextChainLengthUntilOutermostSloppyEval();
1195 for (Scope* s = scope(); to_check > 0; s = s->outer_scope()) {
1196 if (!s->NeedsContext()) continue;
1197 if (s->calls_sloppy_eval()) {
1198 // Check that extension is "the hole".
1199 __ lw(temp, ContextMemOperand(current, Context::EXTENSION_INDEX));
1200 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1201 }
1202 // Load next context in chain.
1203 __ lw(next, ContextMemOperand(current, Context::PREVIOUS_INDEX));
1204 // Walk the rest of the chain without clobbering cp.
1205 current = next;
1206 to_check--;
1207 }
1208
1209 // All extension objects were empty and it is safe to use a normal global
1210 // load machinery.
1211 EmitGlobalVariableLoad(proxy, typeof_mode);
1212 }
1213
1214
ContextSlotOperandCheckExtensions(Variable * var,Label * slow)1215 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1216 Label* slow) {
1217 DCHECK(var->IsContextSlot());
1218 Register context = cp;
1219 Register next = a3;
1220 Register temp = t0;
1221
1222 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1223 if (s->NeedsContext()) {
1224 if (s->calls_sloppy_eval()) {
1225 // Check that extension is "the hole".
1226 __ lw(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1227 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1228 }
1229 __ lw(next, ContextMemOperand(context, Context::PREVIOUS_INDEX));
1230 // Walk the rest of the chain without clobbering cp.
1231 context = next;
1232 }
1233 }
1234 // Check that last extension is "the hole".
1235 __ lw(temp, ContextMemOperand(context, Context::EXTENSION_INDEX));
1236 __ JumpIfNotRoot(temp, Heap::kTheHoleValueRootIndex, slow);
1237
1238 // This function is used only for loads, not stores, so it's safe to
1239 // return an cp-based operand (the write barrier cannot be allowed to
1240 // destroy the cp register).
1241 return ContextMemOperand(context, var->index());
1242 }
1243
1244
EmitDynamicLookupFastCase(VariableProxy * proxy,TypeofMode typeof_mode,Label * slow,Label * done)1245 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1246 TypeofMode typeof_mode,
1247 Label* slow, Label* done) {
1248 // Generate fast-case code for variables that might be shadowed by
1249 // eval-introduced variables. Eval is used a lot without
1250 // introducing variables. In those cases, we do not want to
1251 // perform a runtime call for all variables in the scope
1252 // containing the eval.
1253 Variable* var = proxy->var();
1254 if (var->mode() == DYNAMIC_GLOBAL) {
1255 EmitLoadGlobalCheckExtensions(proxy, typeof_mode, slow);
1256 __ Branch(done);
1257 } else if (var->mode() == DYNAMIC_LOCAL) {
1258 Variable* local = var->local_if_not_shadowed();
1259 __ lw(v0, ContextSlotOperandCheckExtensions(local, slow));
1260 if (local->binding_needs_init()) {
1261 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1262 __ subu(at, v0, at); // Sub as compare: at == 0 on eq.
1263 __ Branch(done, ne, at, Operand(zero_reg));
1264 __ li(a0, Operand(var->name()));
1265 __ push(a0);
1266 __ CallRuntime(Runtime::kThrowReferenceError);
1267 } else {
1268 __ Branch(done);
1269 }
1270 }
1271 }
1272
EmitVariableLoad(VariableProxy * proxy,TypeofMode typeof_mode)1273 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy,
1274 TypeofMode typeof_mode) {
1275 // Record position before possible IC call.
1276 SetExpressionPosition(proxy);
1277 PrepareForBailoutForId(proxy->BeforeId(), BailoutState::NO_REGISTERS);
1278 Variable* var = proxy->var();
1279
1280 // Three cases: global variables, lookup variables, and all other types of
1281 // variables.
1282 switch (var->location()) {
1283 case VariableLocation::UNALLOCATED: {
1284 Comment cmnt(masm_, "[ Global variable");
1285 EmitGlobalVariableLoad(proxy, typeof_mode);
1286 context()->Plug(v0);
1287 break;
1288 }
1289
1290 case VariableLocation::PARAMETER:
1291 case VariableLocation::LOCAL:
1292 case VariableLocation::CONTEXT: {
1293 DCHECK_EQ(NOT_INSIDE_TYPEOF, typeof_mode);
1294 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1295 : "[ Stack variable");
1296 if (proxy->hole_check_mode() == HoleCheckMode::kRequired) {
1297 // Throw a reference error when using an uninitialized let/const
1298 // binding in harmony mode.
1299 Label done;
1300 GetVar(v0, var);
1301 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1302 __ subu(at, v0, at); // Sub as compare: at == 0 on eq.
1303 __ Branch(&done, ne, at, Operand(zero_reg));
1304 __ li(a0, Operand(var->name()));
1305 __ push(a0);
1306 __ CallRuntime(Runtime::kThrowReferenceError);
1307 __ bind(&done);
1308 context()->Plug(v0);
1309 break;
1310 }
1311 context()->Plug(var);
1312 break;
1313 }
1314
1315 case VariableLocation::LOOKUP: {
1316 Comment cmnt(masm_, "[ Lookup variable");
1317 Label done, slow;
1318 // Generate code for loading from variables potentially shadowed
1319 // by eval-introduced variables.
1320 EmitDynamicLookupFastCase(proxy, typeof_mode, &slow, &done);
1321 __ bind(&slow);
1322 __ Push(var->name());
1323 Runtime::FunctionId function_id =
1324 typeof_mode == NOT_INSIDE_TYPEOF
1325 ? Runtime::kLoadLookupSlot
1326 : Runtime::kLoadLookupSlotInsideTypeof;
1327 __ CallRuntime(function_id);
1328 __ bind(&done);
1329 context()->Plug(v0);
1330 break;
1331 }
1332
1333 case VariableLocation::MODULE:
1334 UNREACHABLE();
1335 }
1336 }
1337
1338
EmitAccessor(ObjectLiteralProperty * property)1339 void FullCodeGenerator::EmitAccessor(ObjectLiteralProperty* property) {
1340 Expression* expression = (property == NULL) ? NULL : property->value();
1341 if (expression == NULL) {
1342 __ LoadRoot(a1, Heap::kNullValueRootIndex);
1343 PushOperand(a1);
1344 } else {
1345 VisitForStackValue(expression);
1346 if (NeedsHomeObject(expression)) {
1347 DCHECK(property->kind() == ObjectLiteral::Property::GETTER ||
1348 property->kind() == ObjectLiteral::Property::SETTER);
1349 int offset = property->kind() == ObjectLiteral::Property::GETTER ? 2 : 3;
1350 EmitSetHomeObject(expression, offset, property->GetSlot());
1351 }
1352 }
1353 }
1354
1355
VisitObjectLiteral(ObjectLiteral * expr)1356 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1357 Comment cmnt(masm_, "[ ObjectLiteral");
1358
1359 Handle<FixedArray> constant_properties = expr->constant_properties();
1360 __ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1361 __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
1362 __ li(a1, Operand(constant_properties));
1363 __ li(a0, Operand(Smi::FromInt(expr->ComputeFlags())));
1364 if (MustCreateObjectLiteralWithRuntime(expr)) {
1365 __ Push(a3, a2, a1, a0);
1366 __ CallRuntime(Runtime::kCreateObjectLiteral);
1367 } else {
1368 FastCloneShallowObjectStub stub(isolate(), expr->properties_count());
1369 __ CallStub(&stub);
1370 RestoreContext();
1371 }
1372 PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
1373
1374 // If result_saved is true the result is on top of the stack. If
1375 // result_saved is false the result is in v0.
1376 bool result_saved = false;
1377
1378 AccessorTable accessor_table(zone());
1379 int property_index = 0;
1380 for (; property_index < expr->properties()->length(); property_index++) {
1381 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1382 if (property->is_computed_name()) break;
1383 if (property->IsCompileTimeValue()) continue;
1384
1385 Literal* key = property->key()->AsLiteral();
1386 Expression* value = property->value();
1387 if (!result_saved) {
1388 PushOperand(v0); // Save result on stack.
1389 result_saved = true;
1390 }
1391 switch (property->kind()) {
1392 case ObjectLiteral::Property::CONSTANT:
1393 UNREACHABLE();
1394 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1395 DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1396 // Fall through.
1397 case ObjectLiteral::Property::COMPUTED:
1398 // It is safe to use [[Put]] here because the boilerplate already
1399 // contains computed properties with an uninitialized value.
1400 if (key->IsStringLiteral()) {
1401 DCHECK(key->IsPropertyName());
1402 if (property->emit_store()) {
1403 VisitForAccumulatorValue(value);
1404 __ mov(StoreDescriptor::ValueRegister(), result_register());
1405 DCHECK(StoreDescriptor::ValueRegister().is(a0));
1406 __ lw(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1407 CallStoreIC(property->GetSlot(0), key->value());
1408 PrepareForBailoutForId(key->id(), BailoutState::NO_REGISTERS);
1409
1410 if (NeedsHomeObject(value)) {
1411 EmitSetHomeObjectAccumulator(value, 0, property->GetSlot(1));
1412 }
1413 } else {
1414 VisitForEffect(value);
1415 }
1416 break;
1417 }
1418 // Duplicate receiver on stack.
1419 __ lw(a0, MemOperand(sp));
1420 PushOperand(a0);
1421 VisitForStackValue(key);
1422 VisitForStackValue(value);
1423 if (property->emit_store()) {
1424 if (NeedsHomeObject(value)) {
1425 EmitSetHomeObject(value, 2, property->GetSlot());
1426 }
1427 __ li(a0, Operand(Smi::FromInt(SLOPPY))); // PropertyAttributes.
1428 PushOperand(a0);
1429 CallRuntimeWithOperands(Runtime::kSetProperty);
1430 } else {
1431 DropOperands(3);
1432 }
1433 break;
1434 case ObjectLiteral::Property::PROTOTYPE:
1435 // Duplicate receiver on stack.
1436 __ lw(a0, MemOperand(sp));
1437 PushOperand(a0);
1438 VisitForStackValue(value);
1439 DCHECK(property->emit_store());
1440 CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
1441 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1442 BailoutState::NO_REGISTERS);
1443 break;
1444 case ObjectLiteral::Property::GETTER:
1445 if (property->emit_store()) {
1446 AccessorTable::Iterator it = accessor_table.lookup(key);
1447 it->second->bailout_id = expr->GetIdForPropertySet(property_index);
1448 it->second->getter = property;
1449 }
1450 break;
1451 case ObjectLiteral::Property::SETTER:
1452 if (property->emit_store()) {
1453 AccessorTable::Iterator it = accessor_table.lookup(key);
1454 it->second->bailout_id = expr->GetIdForPropertySet(property_index);
1455 it->second->setter = property;
1456 }
1457 break;
1458 }
1459 }
1460
1461 // Emit code to define accessors, using only a single call to the runtime for
1462 // each pair of corresponding getters and setters.
1463 for (AccessorTable::Iterator it = accessor_table.begin();
1464 it != accessor_table.end();
1465 ++it) {
1466 __ lw(a0, MemOperand(sp)); // Duplicate receiver.
1467 PushOperand(a0);
1468 VisitForStackValue(it->first);
1469 EmitAccessor(it->second->getter);
1470 EmitAccessor(it->second->setter);
1471 __ li(a0, Operand(Smi::FromInt(NONE)));
1472 PushOperand(a0);
1473 CallRuntimeWithOperands(Runtime::kDefineAccessorPropertyUnchecked);
1474 PrepareForBailoutForId(it->second->bailout_id, BailoutState::NO_REGISTERS);
1475 }
1476
1477 // Object literals have two parts. The "static" part on the left contains no
1478 // computed property names, and so we can compute its map ahead of time; see
1479 // runtime.cc::CreateObjectLiteralBoilerplate. The second "dynamic" part
1480 // starts with the first computed property name, and continues with all
1481 // properties to its right. All the code from above initializes the static
1482 // component of the object literal, and arranges for the map of the result to
1483 // reflect the static order in which the keys appear. For the dynamic
1484 // properties, we compile them into a series of "SetOwnProperty" runtime
1485 // calls. This will preserve insertion order.
1486 for (; property_index < expr->properties()->length(); property_index++) {
1487 ObjectLiteral::Property* property = expr->properties()->at(property_index);
1488
1489 Expression* value = property->value();
1490 if (!result_saved) {
1491 PushOperand(v0); // Save result on the stack
1492 result_saved = true;
1493 }
1494
1495 __ lw(a0, MemOperand(sp)); // Duplicate receiver.
1496 PushOperand(a0);
1497
1498 if (property->kind() == ObjectLiteral::Property::PROTOTYPE) {
1499 DCHECK(!property->is_computed_name());
1500 VisitForStackValue(value);
1501 DCHECK(property->emit_store());
1502 CallRuntimeWithOperands(Runtime::kInternalSetPrototype);
1503 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1504 BailoutState::NO_REGISTERS);
1505 } else {
1506 EmitPropertyKey(property, expr->GetIdForPropertyName(property_index));
1507 VisitForStackValue(value);
1508 if (NeedsHomeObject(value)) {
1509 EmitSetHomeObject(value, 2, property->GetSlot());
1510 }
1511
1512 switch (property->kind()) {
1513 case ObjectLiteral::Property::CONSTANT:
1514 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1515 case ObjectLiteral::Property::COMPUTED:
1516 if (property->emit_store()) {
1517 PushOperand(Smi::FromInt(NONE));
1518 PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
1519 CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
1520 PrepareForBailoutForId(expr->GetIdForPropertySet(property_index),
1521 BailoutState::NO_REGISTERS);
1522 } else {
1523 DropOperands(3);
1524 }
1525 break;
1526
1527 case ObjectLiteral::Property::PROTOTYPE:
1528 UNREACHABLE();
1529 break;
1530
1531 case ObjectLiteral::Property::GETTER:
1532 PushOperand(Smi::FromInt(NONE));
1533 CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
1534 break;
1535
1536 case ObjectLiteral::Property::SETTER:
1537 PushOperand(Smi::FromInt(NONE));
1538 CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
1539 break;
1540 }
1541 }
1542 }
1543
1544 if (result_saved) {
1545 context()->PlugTOS();
1546 } else {
1547 context()->Plug(v0);
1548 }
1549 }
1550
1551
VisitArrayLiteral(ArrayLiteral * expr)1552 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1553 Comment cmnt(masm_, "[ ArrayLiteral");
1554
1555 Handle<FixedArray> constant_elements = expr->constant_elements();
1556 bool has_fast_elements =
1557 IsFastObjectElementsKind(expr->constant_elements_kind());
1558
1559 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1560 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1561 // If the only customer of allocation sites is transitioning, then
1562 // we can turn it off if we don't have anywhere else to transition to.
1563 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1564 }
1565
1566 __ mov(a0, result_register());
1567 __ lw(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1568 __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
1569 __ li(a1, Operand(constant_elements));
1570 if (MustCreateArrayLiteralWithRuntime(expr)) {
1571 __ li(a0, Operand(Smi::FromInt(expr->ComputeFlags())));
1572 __ Push(a3, a2, a1, a0);
1573 __ CallRuntime(Runtime::kCreateArrayLiteral);
1574 } else {
1575 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1576 __ CallStub(&stub);
1577 RestoreContext();
1578 }
1579 PrepareForBailoutForId(expr->CreateLiteralId(), BailoutState::TOS_REGISTER);
1580
1581 bool result_saved = false; // Is the result saved to the stack?
1582 ZoneList<Expression*>* subexprs = expr->values();
1583 int length = subexprs->length();
1584
1585 // Emit code to evaluate all the non-constant subexpressions and to store
1586 // them into the newly cloned array.
1587 for (int array_index = 0; array_index < length; array_index++) {
1588 Expression* subexpr = subexprs->at(array_index);
1589 DCHECK(!subexpr->IsSpread());
1590
1591 // If the subexpression is a literal or a simple materialized literal it
1592 // is already set in the cloned array.
1593 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1594
1595 if (!result_saved) {
1596 PushOperand(v0); // array literal
1597 result_saved = true;
1598 }
1599
1600 VisitForAccumulatorValue(subexpr);
1601
1602 __ li(StoreDescriptor::NameRegister(), Operand(Smi::FromInt(array_index)));
1603 __ lw(StoreDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1604 __ mov(StoreDescriptor::ValueRegister(), result_register());
1605 CallKeyedStoreIC(expr->LiteralFeedbackSlot());
1606
1607 PrepareForBailoutForId(expr->GetIdForElement(array_index),
1608 BailoutState::NO_REGISTERS);
1609 }
1610
1611 if (result_saved) {
1612 context()->PlugTOS();
1613 } else {
1614 context()->Plug(v0);
1615 }
1616 }
1617
1618
VisitAssignment(Assignment * expr)1619 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1620 DCHECK(expr->target()->IsValidReferenceExpressionOrThis());
1621
1622 Comment cmnt(masm_, "[ Assignment");
1623
1624 Property* property = expr->target()->AsProperty();
1625 LhsKind assign_type = Property::GetAssignType(property);
1626
1627 // Evaluate LHS expression.
1628 switch (assign_type) {
1629 case VARIABLE:
1630 // Nothing to do here.
1631 break;
1632 case NAMED_PROPERTY:
1633 if (expr->is_compound()) {
1634 // We need the receiver both on the stack and in the register.
1635 VisitForStackValue(property->obj());
1636 __ lw(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1637 } else {
1638 VisitForStackValue(property->obj());
1639 }
1640 break;
1641 case NAMED_SUPER_PROPERTY:
1642 VisitForStackValue(
1643 property->obj()->AsSuperPropertyReference()->this_var());
1644 VisitForAccumulatorValue(
1645 property->obj()->AsSuperPropertyReference()->home_object());
1646 PushOperand(result_register());
1647 if (expr->is_compound()) {
1648 const Register scratch = a1;
1649 __ lw(scratch, MemOperand(sp, kPointerSize));
1650 PushOperands(scratch, result_register());
1651 }
1652 break;
1653 case KEYED_SUPER_PROPERTY: {
1654 VisitForStackValue(
1655 property->obj()->AsSuperPropertyReference()->this_var());
1656 VisitForStackValue(
1657 property->obj()->AsSuperPropertyReference()->home_object());
1658 VisitForAccumulatorValue(property->key());
1659 PushOperand(result_register());
1660 if (expr->is_compound()) {
1661 const Register scratch1 = t0;
1662 const Register scratch2 = a1;
1663 __ lw(scratch1, MemOperand(sp, 2 * kPointerSize));
1664 __ lw(scratch2, MemOperand(sp, 1 * kPointerSize));
1665 PushOperands(scratch1, scratch2, result_register());
1666 }
1667 break;
1668 }
1669 case KEYED_PROPERTY:
1670 // We need the key and receiver on both the stack and in v0 and a1.
1671 if (expr->is_compound()) {
1672 VisitForStackValue(property->obj());
1673 VisitForStackValue(property->key());
1674 __ lw(LoadDescriptor::ReceiverRegister(),
1675 MemOperand(sp, 1 * kPointerSize));
1676 __ lw(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
1677 } else {
1678 VisitForStackValue(property->obj());
1679 VisitForStackValue(property->key());
1680 }
1681 break;
1682 }
1683
1684 // For compound assignments we need another deoptimization point after the
1685 // variable/property load.
1686 if (expr->is_compound()) {
1687 { AccumulatorValueContext context(this);
1688 switch (assign_type) {
1689 case VARIABLE:
1690 EmitVariableLoad(expr->target()->AsVariableProxy());
1691 PrepareForBailout(expr->target(), BailoutState::TOS_REGISTER);
1692 break;
1693 case NAMED_PROPERTY:
1694 EmitNamedPropertyLoad(property);
1695 PrepareForBailoutForId(property->LoadId(),
1696 BailoutState::TOS_REGISTER);
1697 break;
1698 case NAMED_SUPER_PROPERTY:
1699 EmitNamedSuperPropertyLoad(property);
1700 PrepareForBailoutForId(property->LoadId(),
1701 BailoutState::TOS_REGISTER);
1702 break;
1703 case KEYED_SUPER_PROPERTY:
1704 EmitKeyedSuperPropertyLoad(property);
1705 PrepareForBailoutForId(property->LoadId(),
1706 BailoutState::TOS_REGISTER);
1707 break;
1708 case KEYED_PROPERTY:
1709 EmitKeyedPropertyLoad(property);
1710 PrepareForBailoutForId(property->LoadId(),
1711 BailoutState::TOS_REGISTER);
1712 break;
1713 }
1714 }
1715
1716 Token::Value op = expr->binary_op();
1717 PushOperand(v0); // Left operand goes on the stack.
1718 VisitForAccumulatorValue(expr->value());
1719
1720 AccumulatorValueContext context(this);
1721 if (ShouldInlineSmiCase(op)) {
1722 EmitInlineSmiBinaryOp(expr->binary_operation(),
1723 op,
1724 expr->target(),
1725 expr->value());
1726 } else {
1727 EmitBinaryOp(expr->binary_operation(), op);
1728 }
1729
1730 // Deoptimization point in case the binary operation may have side effects.
1731 PrepareForBailout(expr->binary_operation(), BailoutState::TOS_REGISTER);
1732 } else {
1733 VisitForAccumulatorValue(expr->value());
1734 }
1735
1736 SetExpressionPosition(expr);
1737
1738 // Store the value.
1739 switch (assign_type) {
1740 case VARIABLE: {
1741 VariableProxy* proxy = expr->target()->AsVariableProxy();
1742 EmitVariableAssignment(proxy->var(), expr->op(), expr->AssignmentSlot(),
1743 proxy->hole_check_mode());
1744 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
1745 context()->Plug(v0);
1746 break;
1747 }
1748 case NAMED_PROPERTY:
1749 EmitNamedPropertyAssignment(expr);
1750 break;
1751 case NAMED_SUPER_PROPERTY:
1752 EmitNamedSuperPropertyStore(property);
1753 context()->Plug(v0);
1754 break;
1755 case KEYED_SUPER_PROPERTY:
1756 EmitKeyedSuperPropertyStore(property);
1757 context()->Plug(v0);
1758 break;
1759 case KEYED_PROPERTY:
1760 EmitKeyedPropertyAssignment(expr);
1761 break;
1762 }
1763 }
1764
1765
VisitYield(Yield * expr)1766 void FullCodeGenerator::VisitYield(Yield* expr) {
1767 Comment cmnt(masm_, "[ Yield");
1768 SetExpressionPosition(expr);
1769
1770 // Evaluate yielded value first; the initial iterator definition depends on
1771 // this. It stays on the stack while we update the iterator.
1772 VisitForStackValue(expr->expression());
1773
1774 Label suspend, continuation, post_runtime, resume, exception;
1775
1776 __ jmp(&suspend);
1777 __ bind(&continuation);
1778 // When we arrive here, v0 holds the generator object.
1779 __ RecordGeneratorContinuation();
1780 __ lw(a1, FieldMemOperand(v0, JSGeneratorObject::kResumeModeOffset));
1781 __ lw(v0, FieldMemOperand(v0, JSGeneratorObject::kInputOrDebugPosOffset));
1782 __ Branch(&resume, eq, a1, Operand(Smi::FromInt(JSGeneratorObject::kNext)));
1783 __ Push(result_register());
1784 __ Branch(&exception, eq, a1,
1785 Operand(Smi::FromInt(JSGeneratorObject::kThrow)));
1786 EmitCreateIteratorResult(true);
1787 EmitUnwindAndReturn();
1788
1789 __ bind(&exception);
1790 __ CallRuntime(expr->rethrow_on_exception() ? Runtime::kReThrow
1791 : Runtime::kThrow);
1792
1793 __ bind(&suspend);
1794 OperandStackDepthIncrement(1); // Not popped on this path.
1795 VisitForAccumulatorValue(expr->generator_object());
1796 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
1797 __ li(a1, Operand(Smi::FromInt(continuation.pos())));
1798 __ sw(a1, FieldMemOperand(v0, JSGeneratorObject::kContinuationOffset));
1799 __ sw(cp, FieldMemOperand(v0, JSGeneratorObject::kContextOffset));
1800 __ mov(a1, cp);
1801 __ RecordWriteField(v0, JSGeneratorObject::kContextOffset, a1, a2,
1802 kRAHasBeenSaved, kDontSaveFPRegs);
1803 __ Addu(a1, fp, Operand(StandardFrameConstants::kExpressionsOffset));
1804 __ Branch(&post_runtime, eq, sp, Operand(a1));
1805 __ push(v0); // generator object
1806 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
1807 RestoreContext();
1808 __ bind(&post_runtime);
1809 PopOperand(result_register());
1810 EmitReturnSequence();
1811
1812 __ bind(&resume);
1813 context()->Plug(result_register());
1814 }
1815
PushOperands(Register reg1,Register reg2)1816 void FullCodeGenerator::PushOperands(Register reg1, Register reg2) {
1817 OperandStackDepthIncrement(2);
1818 __ Push(reg1, reg2);
1819 }
1820
PushOperands(Register reg1,Register reg2,Register reg3)1821 void FullCodeGenerator::PushOperands(Register reg1, Register reg2,
1822 Register reg3) {
1823 OperandStackDepthIncrement(3);
1824 __ Push(reg1, reg2, reg3);
1825 }
1826
PushOperands(Register reg1,Register reg2,Register reg3,Register reg4)1827 void FullCodeGenerator::PushOperands(Register reg1, Register reg2,
1828 Register reg3, Register reg4) {
1829 OperandStackDepthIncrement(4);
1830 __ Push(reg1, reg2, reg3, reg4);
1831 }
1832
PopOperands(Register reg1,Register reg2)1833 void FullCodeGenerator::PopOperands(Register reg1, Register reg2) {
1834 OperandStackDepthDecrement(2);
1835 __ Pop(reg1, reg2);
1836 }
1837
EmitOperandStackDepthCheck()1838 void FullCodeGenerator::EmitOperandStackDepthCheck() {
1839 if (FLAG_debug_code) {
1840 int expected_diff = StandardFrameConstants::kFixedFrameSizeFromFp +
1841 operand_stack_depth_ * kPointerSize;
1842 __ Subu(v0, fp, sp);
1843 __ Assert(eq, kUnexpectedStackDepth, v0, Operand(expected_diff));
1844 }
1845 }
1846
EmitCreateIteratorResult(bool done)1847 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
1848 Label allocate, done_allocate;
1849
1850 __ Allocate(JSIteratorResult::kSize, v0, a2, a3, &allocate,
1851 NO_ALLOCATION_FLAGS);
1852 __ jmp(&done_allocate);
1853
1854 __ bind(&allocate);
1855 __ Push(Smi::FromInt(JSIteratorResult::kSize));
1856 __ CallRuntime(Runtime::kAllocateInNewSpace);
1857
1858 __ bind(&done_allocate);
1859 __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, a1);
1860 PopOperand(a2);
1861 __ LoadRoot(a3,
1862 done ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex);
1863 __ LoadRoot(t0, Heap::kEmptyFixedArrayRootIndex);
1864 __ sw(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
1865 __ sw(t0, FieldMemOperand(v0, JSObject::kPropertiesOffset));
1866 __ sw(t0, FieldMemOperand(v0, JSObject::kElementsOffset));
1867 __ sw(a2, FieldMemOperand(v0, JSIteratorResult::kValueOffset));
1868 __ sw(a3, FieldMemOperand(v0, JSIteratorResult::kDoneOffset));
1869 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
1870 }
1871
1872
EmitInlineSmiBinaryOp(BinaryOperation * expr,Token::Value op,Expression * left_expr,Expression * right_expr)1873 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
1874 Token::Value op,
1875 Expression* left_expr,
1876 Expression* right_expr) {
1877 Label done, smi_case, stub_call;
1878
1879 Register scratch1 = a2;
1880 Register scratch2 = a3;
1881
1882 // Get the arguments.
1883 Register left = a1;
1884 Register right = a0;
1885 PopOperand(left);
1886 __ mov(a0, result_register());
1887
1888 // Perform combined smi check on both operands.
1889 __ Or(scratch1, left, Operand(right));
1890 STATIC_ASSERT(kSmiTag == 0);
1891 JumpPatchSite patch_site(masm_);
1892 patch_site.EmitJumpIfSmi(scratch1, &smi_case);
1893
1894 __ bind(&stub_call);
1895 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
1896 CallIC(code, expr->BinaryOperationFeedbackId());
1897 patch_site.EmitPatchInfo();
1898 __ jmp(&done);
1899
1900 __ bind(&smi_case);
1901 // Smi case. This code works the same way as the smi-smi case in the type
1902 // recording binary operation stub, see
1903 switch (op) {
1904 case Token::SAR:
1905 __ GetLeastBitsFromSmi(scratch1, right, 5);
1906 __ srav(right, left, scratch1);
1907 __ And(v0, right, Operand(~kSmiTagMask));
1908 break;
1909 case Token::SHL: {
1910 __ SmiUntag(scratch1, left);
1911 __ GetLeastBitsFromSmi(scratch2, right, 5);
1912 __ sllv(scratch1, scratch1, scratch2);
1913 __ Addu(scratch2, scratch1, Operand(0x40000000));
1914 __ Branch(&stub_call, lt, scratch2, Operand(zero_reg));
1915 __ SmiTag(v0, scratch1);
1916 break;
1917 }
1918 case Token::SHR: {
1919 __ SmiUntag(scratch1, left);
1920 __ GetLeastBitsFromSmi(scratch2, right, 5);
1921 __ srlv(scratch1, scratch1, scratch2);
1922 __ And(scratch2, scratch1, 0xc0000000);
1923 __ Branch(&stub_call, ne, scratch2, Operand(zero_reg));
1924 __ SmiTag(v0, scratch1);
1925 break;
1926 }
1927 case Token::ADD:
1928 __ AddBranchOvf(v0, left, Operand(right), &stub_call);
1929 break;
1930 case Token::SUB:
1931 __ SubBranchOvf(v0, left, Operand(right), &stub_call);
1932 break;
1933 case Token::MUL: {
1934 __ SmiUntag(scratch1, right);
1935 __ Mul(scratch2, v0, left, scratch1);
1936 __ sra(scratch1, v0, 31);
1937 __ Branch(&stub_call, ne, scratch1, Operand(scratch2));
1938 __ Branch(&done, ne, v0, Operand(zero_reg));
1939 __ Addu(scratch2, right, left);
1940 __ Branch(&stub_call, lt, scratch2, Operand(zero_reg));
1941 DCHECK(Smi::kZero == 0);
1942 __ mov(v0, zero_reg);
1943 break;
1944 }
1945 case Token::BIT_OR:
1946 __ Or(v0, left, Operand(right));
1947 break;
1948 case Token::BIT_AND:
1949 __ And(v0, left, Operand(right));
1950 break;
1951 case Token::BIT_XOR:
1952 __ Xor(v0, left, Operand(right));
1953 break;
1954 default:
1955 UNREACHABLE();
1956 }
1957
1958 __ bind(&done);
1959 context()->Plug(v0);
1960 }
1961
1962
EmitClassDefineProperties(ClassLiteral * lit)1963 void FullCodeGenerator::EmitClassDefineProperties(ClassLiteral* lit) {
1964 for (int i = 0; i < lit->properties()->length(); i++) {
1965 ClassLiteral::Property* property = lit->properties()->at(i);
1966 Expression* value = property->value();
1967
1968 Register scratch = a1;
1969 if (property->is_static()) {
1970 __ lw(scratch, MemOperand(sp, kPointerSize)); // constructor
1971 } else {
1972 __ lw(scratch, MemOperand(sp, 0)); // prototype
1973 }
1974 PushOperand(scratch);
1975 EmitPropertyKey(property, lit->GetIdForProperty(i));
1976
1977 // The static prototype property is read only. We handle the non computed
1978 // property name case in the parser. Since this is the only case where we
1979 // need to check for an own read only property we special case this so we do
1980 // not need to do this for every property.
1981 if (property->is_static() && property->is_computed_name()) {
1982 __ CallRuntime(Runtime::kThrowIfStaticPrototype);
1983 __ push(v0);
1984 }
1985
1986 VisitForStackValue(value);
1987 if (NeedsHomeObject(value)) {
1988 EmitSetHomeObject(value, 2, property->GetSlot());
1989 }
1990
1991 switch (property->kind()) {
1992 case ClassLiteral::Property::METHOD:
1993 PushOperand(Smi::FromInt(DONT_ENUM));
1994 PushOperand(Smi::FromInt(property->NeedsSetFunctionName()));
1995 CallRuntimeWithOperands(Runtime::kDefineDataPropertyInLiteral);
1996 break;
1997
1998 case ClassLiteral::Property::GETTER:
1999 PushOperand(Smi::FromInt(DONT_ENUM));
2000 CallRuntimeWithOperands(Runtime::kDefineGetterPropertyUnchecked);
2001 break;
2002
2003 case ClassLiteral::Property::SETTER:
2004 PushOperand(Smi::FromInt(DONT_ENUM));
2005 CallRuntimeWithOperands(Runtime::kDefineSetterPropertyUnchecked);
2006 break;
2007
2008 case ClassLiteral::Property::FIELD:
2009 default:
2010 UNREACHABLE();
2011 }
2012 }
2013 }
2014
2015
EmitBinaryOp(BinaryOperation * expr,Token::Value op)2016 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr, Token::Value op) {
2017 __ mov(a0, result_register());
2018 PopOperand(a1);
2019 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op).code();
2020 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2021 CallIC(code, expr->BinaryOperationFeedbackId());
2022 patch_site.EmitPatchInfo();
2023 context()->Plug(v0);
2024 }
2025
2026
EmitAssignment(Expression * expr,FeedbackVectorSlot slot)2027 void FullCodeGenerator::EmitAssignment(Expression* expr,
2028 FeedbackVectorSlot slot) {
2029 DCHECK(expr->IsValidReferenceExpressionOrThis());
2030
2031 Property* prop = expr->AsProperty();
2032 LhsKind assign_type = Property::GetAssignType(prop);
2033
2034 switch (assign_type) {
2035 case VARIABLE: {
2036 VariableProxy* proxy = expr->AsVariableProxy();
2037 EffectContext context(this);
2038 EmitVariableAssignment(proxy->var(), Token::ASSIGN, slot,
2039 proxy->hole_check_mode());
2040 break;
2041 }
2042 case NAMED_PROPERTY: {
2043 PushOperand(result_register()); // Preserve value.
2044 VisitForAccumulatorValue(prop->obj());
2045 __ mov(StoreDescriptor::ReceiverRegister(), result_register());
2046 PopOperand(StoreDescriptor::ValueRegister()); // Restore value.
2047 CallStoreIC(slot, prop->key()->AsLiteral()->value());
2048 break;
2049 }
2050 case NAMED_SUPER_PROPERTY: {
2051 PushOperand(v0);
2052 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2053 VisitForAccumulatorValue(
2054 prop->obj()->AsSuperPropertyReference()->home_object());
2055 // stack: value, this; v0: home_object
2056 Register scratch = a2;
2057 Register scratch2 = a3;
2058 __ mov(scratch, result_register()); // home_object
2059 __ lw(v0, MemOperand(sp, kPointerSize)); // value
2060 __ lw(scratch2, MemOperand(sp, 0)); // this
2061 __ sw(scratch2, MemOperand(sp, kPointerSize)); // this
2062 __ sw(scratch, MemOperand(sp, 0)); // home_object
2063 // stack: this, home_object; v0: value
2064 EmitNamedSuperPropertyStore(prop);
2065 break;
2066 }
2067 case KEYED_SUPER_PROPERTY: {
2068 PushOperand(v0);
2069 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
2070 VisitForStackValue(
2071 prop->obj()->AsSuperPropertyReference()->home_object());
2072 VisitForAccumulatorValue(prop->key());
2073 Register scratch = a2;
2074 Register scratch2 = a3;
2075 __ lw(scratch2, MemOperand(sp, 2 * kPointerSize)); // value
2076 // stack: value, this, home_object; v0: key, a3: value
2077 __ lw(scratch, MemOperand(sp, kPointerSize)); // this
2078 __ sw(scratch, MemOperand(sp, 2 * kPointerSize));
2079 __ lw(scratch, MemOperand(sp, 0)); // home_object
2080 __ sw(scratch, MemOperand(sp, kPointerSize));
2081 __ sw(v0, MemOperand(sp, 0));
2082 __ Move(v0, scratch2);
2083 // stack: this, home_object, key; v0: value.
2084 EmitKeyedSuperPropertyStore(prop);
2085 break;
2086 }
2087 case KEYED_PROPERTY: {
2088 PushOperand(result_register()); // Preserve value.
2089 VisitForStackValue(prop->obj());
2090 VisitForAccumulatorValue(prop->key());
2091 __ mov(StoreDescriptor::NameRegister(), result_register());
2092 PopOperands(StoreDescriptor::ValueRegister(),
2093 StoreDescriptor::ReceiverRegister());
2094 CallKeyedStoreIC(slot);
2095 break;
2096 }
2097 }
2098 context()->Plug(v0);
2099 }
2100
2101
EmitStoreToStackLocalOrContextSlot(Variable * var,MemOperand location)2102 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2103 Variable* var, MemOperand location) {
2104 __ sw(result_register(), location);
2105 if (var->IsContextSlot()) {
2106 // RecordWrite may destroy all its register arguments.
2107 __ Move(a3, result_register());
2108 int offset = Context::SlotOffset(var->index());
2109 __ RecordWriteContextSlot(
2110 a1, offset, a3, a2, kRAHasBeenSaved, kDontSaveFPRegs);
2111 }
2112 }
2113
EmitVariableAssignment(Variable * var,Token::Value op,FeedbackVectorSlot slot,HoleCheckMode hole_check_mode)2114 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op,
2115 FeedbackVectorSlot slot,
2116 HoleCheckMode hole_check_mode) {
2117 if (var->IsUnallocated()) {
2118 // Global var, const, or let.
2119 __ mov(StoreDescriptor::ValueRegister(), result_register());
2120 __ LoadGlobalObject(StoreDescriptor::ReceiverRegister());
2121 CallStoreIC(slot, var->name());
2122
2123 } else if (IsLexicalVariableMode(var->mode()) && op != Token::INIT) {
2124 DCHECK(!var->IsLookupSlot());
2125 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2126 MemOperand location = VarOperand(var, a1);
2127 // Perform an initialization check for lexically declared variables.
2128 if (hole_check_mode == HoleCheckMode::kRequired) {
2129 Label assign;
2130 __ lw(a3, location);
2131 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
2132 __ Branch(&assign, ne, a3, Operand(t0));
2133 __ li(a3, Operand(var->name()));
2134 __ push(a3);
2135 __ CallRuntime(Runtime::kThrowReferenceError);
2136 __ bind(&assign);
2137 }
2138 if (var->mode() != CONST) {
2139 EmitStoreToStackLocalOrContextSlot(var, location);
2140 } else if (var->throw_on_const_assignment(language_mode())) {
2141 __ CallRuntime(Runtime::kThrowConstAssignError);
2142 }
2143 } else if (var->is_this() && var->mode() == CONST && op == Token::INIT) {
2144 // Initializing assignment to const {this} needs a write barrier.
2145 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2146 Label uninitialized_this;
2147 MemOperand location = VarOperand(var, a1);
2148 __ lw(a3, location);
2149 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2150 __ Branch(&uninitialized_this, eq, a3, Operand(at));
2151 __ li(a0, Operand(var->name()));
2152 __ Push(a0);
2153 __ CallRuntime(Runtime::kThrowReferenceError);
2154 __ bind(&uninitialized_this);
2155 EmitStoreToStackLocalOrContextSlot(var, location);
2156
2157 } else {
2158 DCHECK(var->mode() != CONST || op == Token::INIT);
2159 if (var->IsLookupSlot()) {
2160 // Assignment to var.
2161 __ Push(var->name());
2162 __ Push(v0);
2163 __ CallRuntime(is_strict(language_mode())
2164 ? Runtime::kStoreLookupSlot_Strict
2165 : Runtime::kStoreLookupSlot_Sloppy);
2166 } else {
2167 // Assignment to var or initializing assignment to let/const in harmony
2168 // mode.
2169 DCHECK((var->IsStackAllocated() || var->IsContextSlot()));
2170 MemOperand location = VarOperand(var, a1);
2171 if (FLAG_debug_code && var->mode() == LET && op == Token::INIT) {
2172 // Check for an uninitialized let binding.
2173 __ lw(a2, location);
2174 __ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
2175 __ Check(eq, kLetBindingReInitialization, a2, Operand(t0));
2176 }
2177 EmitStoreToStackLocalOrContextSlot(var, location);
2178 }
2179 }
2180 }
2181
2182
EmitNamedPropertyAssignment(Assignment * expr)2183 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2184 // Assignment to a property, using a named store IC.
2185 Property* prop = expr->target()->AsProperty();
2186 DCHECK(prop != NULL);
2187 DCHECK(prop->key()->IsLiteral());
2188
2189 __ mov(StoreDescriptor::ValueRegister(), result_register());
2190 PopOperand(StoreDescriptor::ReceiverRegister());
2191 CallStoreIC(expr->AssignmentSlot(), prop->key()->AsLiteral()->value());
2192
2193 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
2194 context()->Plug(v0);
2195 }
2196
2197
EmitNamedSuperPropertyStore(Property * prop)2198 void FullCodeGenerator::EmitNamedSuperPropertyStore(Property* prop) {
2199 // Assignment to named property of super.
2200 // v0 : value
2201 // stack : receiver ('this'), home_object
2202 DCHECK(prop != NULL);
2203 Literal* key = prop->key()->AsLiteral();
2204 DCHECK(key != NULL);
2205
2206 PushOperand(key->value());
2207 PushOperand(v0);
2208 CallRuntimeWithOperands(is_strict(language_mode())
2209 ? Runtime::kStoreToSuper_Strict
2210 : Runtime::kStoreToSuper_Sloppy);
2211 }
2212
2213
EmitKeyedSuperPropertyStore(Property * prop)2214 void FullCodeGenerator::EmitKeyedSuperPropertyStore(Property* prop) {
2215 // Assignment to named property of super.
2216 // v0 : value
2217 // stack : receiver ('this'), home_object, key
2218 DCHECK(prop != NULL);
2219
2220 PushOperand(v0);
2221 CallRuntimeWithOperands(is_strict(language_mode())
2222 ? Runtime::kStoreKeyedToSuper_Strict
2223 : Runtime::kStoreKeyedToSuper_Sloppy);
2224 }
2225
2226
EmitKeyedPropertyAssignment(Assignment * expr)2227 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2228 // Assignment to a property, using a keyed store IC.
2229 // Call keyed store IC.
2230 // The arguments are:
2231 // - a0 is the value,
2232 // - a1 is the key,
2233 // - a2 is the receiver.
2234 __ mov(StoreDescriptor::ValueRegister(), result_register());
2235 PopOperands(StoreDescriptor::ReceiverRegister(),
2236 StoreDescriptor::NameRegister());
2237 DCHECK(StoreDescriptor::ValueRegister().is(a0));
2238
2239 CallKeyedStoreIC(expr->AssignmentSlot());
2240
2241 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
2242 context()->Plug(v0);
2243 }
2244
2245 // Code common for calls using the IC.
EmitCallWithLoadIC(Call * expr)2246 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2247 Expression* callee = expr->expression();
2248
2249 // Get the target function.
2250 ConvertReceiverMode convert_mode;
2251 if (callee->IsVariableProxy()) {
2252 { StackValueContext context(this);
2253 EmitVariableLoad(callee->AsVariableProxy());
2254 PrepareForBailout(callee, BailoutState::NO_REGISTERS);
2255 }
2256 // Push undefined as receiver. This is patched in the method prologue if it
2257 // is a sloppy mode method.
2258 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
2259 PushOperand(at);
2260 convert_mode = ConvertReceiverMode::kNullOrUndefined;
2261 } else {
2262 // Load the function from the receiver.
2263 DCHECK(callee->IsProperty());
2264 DCHECK(!callee->AsProperty()->IsSuperAccess());
2265 __ lw(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2266 EmitNamedPropertyLoad(callee->AsProperty());
2267 PrepareForBailoutForId(callee->AsProperty()->LoadId(),
2268 BailoutState::TOS_REGISTER);
2269 // Push the target function under the receiver.
2270 __ lw(at, MemOperand(sp, 0));
2271 PushOperand(at);
2272 __ sw(v0, MemOperand(sp, kPointerSize));
2273 convert_mode = ConvertReceiverMode::kNotNullOrUndefined;
2274 }
2275
2276 EmitCall(expr, convert_mode);
2277 }
2278
2279
EmitSuperCallWithLoadIC(Call * expr)2280 void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2281 SetExpressionPosition(expr);
2282 Expression* callee = expr->expression();
2283 DCHECK(callee->IsProperty());
2284 Property* prop = callee->AsProperty();
2285 DCHECK(prop->IsSuperAccess());
2286
2287 Literal* key = prop->key()->AsLiteral();
2288 DCHECK(!key->value()->IsSmi());
2289 // Load the function from the receiver.
2290 const Register scratch = a1;
2291 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2292 VisitForAccumulatorValue(super_ref->home_object());
2293 __ mov(scratch, v0);
2294 VisitForAccumulatorValue(super_ref->this_var());
2295 PushOperands(scratch, v0, v0, scratch);
2296 PushOperand(key->value());
2297
2298 // Stack here:
2299 // - home_object
2300 // - this (receiver)
2301 // - this (receiver) <-- LoadFromSuper will pop here and below.
2302 // - home_object
2303 // - key
2304 CallRuntimeWithOperands(Runtime::kLoadFromSuper);
2305 PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
2306
2307 // Replace home_object with target function.
2308 __ sw(v0, MemOperand(sp, kPointerSize));
2309
2310 // Stack here:
2311 // - target function
2312 // - this (receiver)
2313 EmitCall(expr);
2314 }
2315
2316
2317 // Code common for calls using the IC.
EmitKeyedCallWithLoadIC(Call * expr,Expression * key)2318 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2319 Expression* key) {
2320 // Load the key.
2321 VisitForAccumulatorValue(key);
2322
2323 Expression* callee = expr->expression();
2324
2325 // Load the function from the receiver.
2326 DCHECK(callee->IsProperty());
2327 __ lw(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2328 __ Move(LoadDescriptor::NameRegister(), v0);
2329 EmitKeyedPropertyLoad(callee->AsProperty());
2330 PrepareForBailoutForId(callee->AsProperty()->LoadId(),
2331 BailoutState::TOS_REGISTER);
2332
2333 // Push the target function under the receiver.
2334 __ lw(at, MemOperand(sp, 0));
2335 PushOperand(at);
2336 __ sw(v0, MemOperand(sp, kPointerSize));
2337
2338 EmitCall(expr, ConvertReceiverMode::kNotNullOrUndefined);
2339 }
2340
2341
EmitKeyedSuperCallWithLoadIC(Call * expr)2342 void FullCodeGenerator::EmitKeyedSuperCallWithLoadIC(Call* expr) {
2343 Expression* callee = expr->expression();
2344 DCHECK(callee->IsProperty());
2345 Property* prop = callee->AsProperty();
2346 DCHECK(prop->IsSuperAccess());
2347
2348 SetExpressionPosition(prop);
2349 // Load the function from the receiver.
2350 const Register scratch = a1;
2351 SuperPropertyReference* super_ref = prop->obj()->AsSuperPropertyReference();
2352 VisitForAccumulatorValue(super_ref->home_object());
2353 __ Move(scratch, v0);
2354 VisitForAccumulatorValue(super_ref->this_var());
2355 PushOperands(scratch, v0, v0, scratch);
2356 VisitForStackValue(prop->key());
2357
2358 // Stack here:
2359 // - home_object
2360 // - this (receiver)
2361 // - this (receiver) <-- LoadKeyedFromSuper will pop here and below.
2362 // - home_object
2363 // - key
2364 CallRuntimeWithOperands(Runtime::kLoadKeyedFromSuper);
2365 PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
2366
2367 // Replace home_object with target function.
2368 __ sw(v0, MemOperand(sp, kPointerSize));
2369
2370 // Stack here:
2371 // - target function
2372 // - this (receiver)
2373 EmitCall(expr);
2374 }
2375
2376
EmitCall(Call * expr,ConvertReceiverMode mode)2377 void FullCodeGenerator::EmitCall(Call* expr, ConvertReceiverMode mode) {
2378 // Load the arguments.
2379 ZoneList<Expression*>* args = expr->arguments();
2380 int arg_count = args->length();
2381 for (int i = 0; i < arg_count; i++) {
2382 VisitForStackValue(args->at(i));
2383 }
2384
2385 PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
2386 // Record source position of the IC call.
2387 SetCallPosition(expr, expr->tail_call_mode());
2388 if (expr->tail_call_mode() == TailCallMode::kAllow) {
2389 if (FLAG_trace) {
2390 __ CallRuntime(Runtime::kTraceTailCall);
2391 }
2392 // Update profiling counters before the tail call since we will
2393 // not return to this function.
2394 EmitProfilingCounterHandlingForReturnSequence(true);
2395 }
2396 Handle<Code> code =
2397 CodeFactory::CallIC(isolate(), mode, expr->tail_call_mode()).code();
2398 __ li(a3, Operand(SmiFromSlot(expr->CallFeedbackICSlot())));
2399 __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2400 __ li(a0, Operand(arg_count));
2401 CallIC(code);
2402 OperandStackDepthDecrement(arg_count + 1);
2403
2404 RecordJSReturnSite(expr);
2405 RestoreContext();
2406 context()->DropAndPlug(1, v0);
2407 }
2408
EmitResolvePossiblyDirectEval(Call * expr)2409 void FullCodeGenerator::EmitResolvePossiblyDirectEval(Call* expr) {
2410 int arg_count = expr->arguments()->length();
2411 // t4: copy of the first argument or undefined if it doesn't exist.
2412 if (arg_count > 0) {
2413 __ lw(t4, MemOperand(sp, arg_count * kPointerSize));
2414 } else {
2415 __ LoadRoot(t4, Heap::kUndefinedValueRootIndex);
2416 }
2417
2418 // t3: the receiver of the enclosing function.
2419 __ lw(t3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
2420
2421 // t2: the language mode.
2422 __ li(t2, Operand(Smi::FromInt(language_mode())));
2423
2424 // t1: the start position of the scope the calls resides in.
2425 __ li(t1, Operand(Smi::FromInt(scope()->start_position())));
2426
2427 // t0: the source position of the eval call.
2428 __ li(t0, Operand(Smi::FromInt(expr->position())));
2429
2430 // Do the runtime call.
2431 __ Push(t4, t3, t2, t1, t0);
2432 __ CallRuntime(Runtime::kResolvePossiblyDirectEval);
2433 }
2434
2435
2436 // See http://www.ecma-international.org/ecma-262/6.0/#sec-function-calls.
PushCalleeAndWithBaseObject(Call * expr)2437 void FullCodeGenerator::PushCalleeAndWithBaseObject(Call* expr) {
2438 VariableProxy* callee = expr->expression()->AsVariableProxy();
2439 if (callee->var()->IsLookupSlot()) {
2440 Label slow, done;
2441
2442 SetExpressionPosition(callee);
2443 // Generate code for loading from variables potentially shadowed by
2444 // eval-introduced variables.
2445 EmitDynamicLookupFastCase(callee, NOT_INSIDE_TYPEOF, &slow, &done);
2446
2447 __ bind(&slow);
2448 // Call the runtime to find the function to call (returned in v0)
2449 // and the object holding it (returned in v1).
2450 __ Push(callee->name());
2451 __ CallRuntime(Runtime::kLoadLookupSlotForCall);
2452 PushOperands(v0, v1); // Function, receiver.
2453 PrepareForBailoutForId(expr->LookupId(), BailoutState::NO_REGISTERS);
2454
2455 // If fast case code has been generated, emit code to push the
2456 // function and receiver and have the slow path jump around this
2457 // code.
2458 if (done.is_linked()) {
2459 Label call;
2460 __ Branch(&call);
2461 __ bind(&done);
2462 // Push function.
2463 __ push(v0);
2464 // The receiver is implicitly the global receiver. Indicate this
2465 // by passing the hole to the call function stub.
2466 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
2467 __ push(a1);
2468 __ bind(&call);
2469 }
2470 } else {
2471 VisitForStackValue(callee);
2472 // refEnv.WithBaseObject()
2473 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
2474 PushOperand(a2); // Reserved receiver slot.
2475 }
2476 }
2477
2478
EmitPossiblyEvalCall(Call * expr)2479 void FullCodeGenerator::EmitPossiblyEvalCall(Call* expr) {
2480 // In a call to eval, we first call Runtime_ResolvePossiblyDirectEval
2481 // to resolve the function we need to call. Then we call the resolved
2482 // function using the given arguments.
2483 ZoneList<Expression*>* args = expr->arguments();
2484 int arg_count = args->length();
2485 PushCalleeAndWithBaseObject(expr);
2486
2487 // Push the arguments.
2488 for (int i = 0; i < arg_count; i++) {
2489 VisitForStackValue(args->at(i));
2490 }
2491
2492 // Push a copy of the function (found below the arguments) and
2493 // resolve eval.
2494 __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2495 __ push(a1);
2496 EmitResolvePossiblyDirectEval(expr);
2497
2498 // Touch up the stack with the resolved function.
2499 __ sw(v0, MemOperand(sp, (arg_count + 1) * kPointerSize));
2500
2501 PrepareForBailoutForId(expr->EvalId(), BailoutState::NO_REGISTERS);
2502 // Record source position for debugger.
2503 SetCallPosition(expr);
2504 Handle<Code> code = CodeFactory::CallIC(isolate(), ConvertReceiverMode::kAny,
2505 expr->tail_call_mode())
2506 .code();
2507 __ li(a3, Operand(SmiFromSlot(expr->CallFeedbackICSlot())));
2508 __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2509 __ li(a0, Operand(arg_count));
2510 __ Call(code, RelocInfo::CODE_TARGET);
2511 OperandStackDepthDecrement(arg_count + 1);
2512 RecordJSReturnSite(expr);
2513 RestoreContext();
2514 context()->DropAndPlug(1, v0);
2515 }
2516
2517
VisitCallNew(CallNew * expr)2518 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2519 Comment cmnt(masm_, "[ CallNew");
2520 // According to ECMA-262, section 11.2.2, page 44, the function
2521 // expression in new calls must be evaluated before the
2522 // arguments.
2523
2524 // Push constructor on the stack. If it's not a function it's used as
2525 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2526 // ignored.g
2527 DCHECK(!expr->expression()->IsSuperPropertyReference());
2528 VisitForStackValue(expr->expression());
2529
2530 // Push the arguments ("left-to-right") on the stack.
2531 ZoneList<Expression*>* args = expr->arguments();
2532 int arg_count = args->length();
2533 for (int i = 0; i < arg_count; i++) {
2534 VisitForStackValue(args->at(i));
2535 }
2536
2537 // Call the construct call builtin that handles allocation and
2538 // constructor invocation.
2539 SetConstructCallPosition(expr);
2540
2541 // Load function and argument count into a1 and a0.
2542 __ li(a0, Operand(arg_count));
2543 __ lw(a1, MemOperand(sp, arg_count * kPointerSize));
2544
2545 // Record call targets in unoptimized code.
2546 __ EmitLoadTypeFeedbackVector(a2);
2547 __ li(a3, Operand(SmiFromSlot(expr->CallNewFeedbackSlot())));
2548
2549 CallConstructStub stub(isolate());
2550 CallIC(stub.GetCode());
2551 OperandStackDepthDecrement(arg_count + 1);
2552 PrepareForBailoutForId(expr->ReturnId(), BailoutState::TOS_REGISTER);
2553 RestoreContext();
2554 context()->Plug(v0);
2555 }
2556
2557
EmitSuperConstructorCall(Call * expr)2558 void FullCodeGenerator::EmitSuperConstructorCall(Call* expr) {
2559 SuperCallReference* super_call_ref =
2560 expr->expression()->AsSuperCallReference();
2561 DCHECK_NOT_NULL(super_call_ref);
2562
2563 // Push the super constructor target on the stack (may be null,
2564 // but the Construct builtin can deal with that properly).
2565 VisitForAccumulatorValue(super_call_ref->this_function_var());
2566 __ AssertFunction(result_register());
2567 __ lw(result_register(),
2568 FieldMemOperand(result_register(), HeapObject::kMapOffset));
2569 __ lw(result_register(),
2570 FieldMemOperand(result_register(), Map::kPrototypeOffset));
2571 PushOperand(result_register());
2572
2573 // Push the arguments ("left-to-right") on the stack.
2574 ZoneList<Expression*>* args = expr->arguments();
2575 int arg_count = args->length();
2576 for (int i = 0; i < arg_count; i++) {
2577 VisitForStackValue(args->at(i));
2578 }
2579
2580 // Call the construct call builtin that handles allocation and
2581 // constructor invocation.
2582 SetConstructCallPosition(expr);
2583
2584 // Load new target into a3.
2585 VisitForAccumulatorValue(super_call_ref->new_target_var());
2586 __ mov(a3, result_register());
2587
2588 // Load function and argument count into a1 and a0.
2589 __ li(a0, Operand(arg_count));
2590 __ lw(a1, MemOperand(sp, arg_count * kPointerSize));
2591
2592 __ Call(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
2593 OperandStackDepthDecrement(arg_count + 1);
2594
2595 RecordJSReturnSite(expr);
2596 RestoreContext();
2597 context()->Plug(v0);
2598 }
2599
2600
EmitIsSmi(CallRuntime * expr)2601 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2602 ZoneList<Expression*>* args = expr->arguments();
2603 DCHECK(args->length() == 1);
2604
2605 VisitForAccumulatorValue(args->at(0));
2606
2607 Label materialize_true, materialize_false;
2608 Label* if_true = NULL;
2609 Label* if_false = NULL;
2610 Label* fall_through = NULL;
2611 context()->PrepareTest(&materialize_true, &materialize_false,
2612 &if_true, &if_false, &fall_through);
2613
2614 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2615 __ SmiTst(v0, t0);
2616 Split(eq, t0, Operand(zero_reg), if_true, if_false, fall_through);
2617
2618 context()->Plug(if_true, if_false);
2619 }
2620
2621
EmitIsJSReceiver(CallRuntime * expr)2622 void FullCodeGenerator::EmitIsJSReceiver(CallRuntime* expr) {
2623 ZoneList<Expression*>* args = expr->arguments();
2624 DCHECK(args->length() == 1);
2625
2626 VisitForAccumulatorValue(args->at(0));
2627
2628 Label materialize_true, materialize_false;
2629 Label* if_true = NULL;
2630 Label* if_false = NULL;
2631 Label* fall_through = NULL;
2632 context()->PrepareTest(&materialize_true, &materialize_false,
2633 &if_true, &if_false, &fall_through);
2634
2635 __ JumpIfSmi(v0, if_false);
2636 __ GetObjectType(v0, a1, a1);
2637 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2638 Split(ge, a1, Operand(FIRST_JS_RECEIVER_TYPE),
2639 if_true, if_false, fall_through);
2640
2641 context()->Plug(if_true, if_false);
2642 }
2643
2644
EmitIsArray(CallRuntime * expr)2645 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
2646 ZoneList<Expression*>* args = expr->arguments();
2647 DCHECK(args->length() == 1);
2648
2649 VisitForAccumulatorValue(args->at(0));
2650
2651 Label materialize_true, materialize_false;
2652 Label* if_true = NULL;
2653 Label* if_false = NULL;
2654 Label* fall_through = NULL;
2655 context()->PrepareTest(&materialize_true, &materialize_false,
2656 &if_true, &if_false, &fall_through);
2657
2658 __ JumpIfSmi(v0, if_false);
2659 __ GetObjectType(v0, a1, a1);
2660 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2661 Split(eq, a1, Operand(JS_ARRAY_TYPE),
2662 if_true, if_false, fall_through);
2663
2664 context()->Plug(if_true, if_false);
2665 }
2666
2667
EmitIsTypedArray(CallRuntime * expr)2668 void FullCodeGenerator::EmitIsTypedArray(CallRuntime* expr) {
2669 ZoneList<Expression*>* args = expr->arguments();
2670 DCHECK(args->length() == 1);
2671
2672 VisitForAccumulatorValue(args->at(0));
2673
2674 Label materialize_true, materialize_false;
2675 Label* if_true = NULL;
2676 Label* if_false = NULL;
2677 Label* fall_through = NULL;
2678 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2679 &if_false, &fall_through);
2680
2681 __ JumpIfSmi(v0, if_false);
2682 __ GetObjectType(v0, a1, a1);
2683 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2684 Split(eq, a1, Operand(JS_TYPED_ARRAY_TYPE), if_true, if_false, fall_through);
2685
2686 context()->Plug(if_true, if_false);
2687 }
2688
2689
EmitIsRegExp(CallRuntime * expr)2690 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
2691 ZoneList<Expression*>* args = expr->arguments();
2692 DCHECK(args->length() == 1);
2693
2694 VisitForAccumulatorValue(args->at(0));
2695
2696 Label materialize_true, materialize_false;
2697 Label* if_true = NULL;
2698 Label* if_false = NULL;
2699 Label* fall_through = NULL;
2700 context()->PrepareTest(&materialize_true, &materialize_false,
2701 &if_true, &if_false, &fall_through);
2702
2703 __ JumpIfSmi(v0, if_false);
2704 __ GetObjectType(v0, a1, a1);
2705 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2706 Split(eq, a1, Operand(JS_REGEXP_TYPE), if_true, if_false, fall_through);
2707
2708 context()->Plug(if_true, if_false);
2709 }
2710
2711
EmitIsJSProxy(CallRuntime * expr)2712 void FullCodeGenerator::EmitIsJSProxy(CallRuntime* expr) {
2713 ZoneList<Expression*>* args = expr->arguments();
2714 DCHECK(args->length() == 1);
2715
2716 VisitForAccumulatorValue(args->at(0));
2717
2718 Label materialize_true, materialize_false;
2719 Label* if_true = NULL;
2720 Label* if_false = NULL;
2721 Label* fall_through = NULL;
2722 context()->PrepareTest(&materialize_true, &materialize_false, &if_true,
2723 &if_false, &fall_through);
2724
2725 __ JumpIfSmi(v0, if_false);
2726 __ GetObjectType(v0, a1, a1);
2727 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2728 Split(eq, a1, Operand(JS_PROXY_TYPE), if_true, if_false, fall_through);
2729
2730 context()->Plug(if_true, if_false);
2731 }
2732
2733
EmitClassOf(CallRuntime * expr)2734 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
2735 ZoneList<Expression*>* args = expr->arguments();
2736 DCHECK(args->length() == 1);
2737 Label done, null, function, non_function_constructor;
2738
2739 VisitForAccumulatorValue(args->at(0));
2740
2741 // If the object is not a JSReceiver, we return null.
2742 __ JumpIfSmi(v0, &null);
2743 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2744 __ GetObjectType(v0, v0, a1); // Map is now in v0.
2745 __ Branch(&null, lt, a1, Operand(FIRST_JS_RECEIVER_TYPE));
2746
2747 // Return 'Function' for JSFunction and JSBoundFunction objects.
2748 STATIC_ASSERT(LAST_FUNCTION_TYPE == LAST_TYPE);
2749 __ Branch(&function, hs, a1, Operand(FIRST_FUNCTION_TYPE));
2750
2751 // Check if the constructor in the map is a JS function.
2752 Register instance_type = a2;
2753 __ GetMapConstructor(v0, v0, a1, instance_type);
2754 __ Branch(&non_function_constructor, ne, instance_type,
2755 Operand(JS_FUNCTION_TYPE));
2756
2757 // v0 now contains the constructor function. Grab the
2758 // instance class name from there.
2759 __ lw(v0, FieldMemOperand(v0, JSFunction::kSharedFunctionInfoOffset));
2760 __ lw(v0, FieldMemOperand(v0, SharedFunctionInfo::kInstanceClassNameOffset));
2761 __ Branch(&done);
2762
2763 // Functions have class 'Function'.
2764 __ bind(&function);
2765 __ LoadRoot(v0, Heap::kFunction_stringRootIndex);
2766 __ jmp(&done);
2767
2768 // Objects with a non-function constructor have class 'Object'.
2769 __ bind(&non_function_constructor);
2770 __ LoadRoot(v0, Heap::kObject_stringRootIndex);
2771 __ jmp(&done);
2772
2773 // Non-JS objects have class null.
2774 __ bind(&null);
2775 __ LoadRoot(v0, Heap::kNullValueRootIndex);
2776
2777 // All done.
2778 __ bind(&done);
2779
2780 context()->Plug(v0);
2781 }
2782
2783
EmitStringCharCodeAt(CallRuntime * expr)2784 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
2785 ZoneList<Expression*>* args = expr->arguments();
2786 DCHECK(args->length() == 2);
2787
2788 VisitForStackValue(args->at(0));
2789 VisitForAccumulatorValue(args->at(1));
2790 __ mov(a0, result_register());
2791
2792 Register object = a1;
2793 Register index = a0;
2794 Register result = v0;
2795
2796 PopOperand(object);
2797
2798 Label need_conversion;
2799 Label index_out_of_range;
2800 Label done;
2801 StringCharCodeAtGenerator generator(object, index, result, &need_conversion,
2802 &need_conversion, &index_out_of_range);
2803 generator.GenerateFast(masm_);
2804 __ jmp(&done);
2805
2806 __ bind(&index_out_of_range);
2807 // When the index is out of range, the spec requires us to return
2808 // NaN.
2809 __ LoadRoot(result, Heap::kNanValueRootIndex);
2810 __ jmp(&done);
2811
2812 __ bind(&need_conversion);
2813 // Load the undefined value into the result register, which will
2814 // trigger conversion.
2815 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
2816 __ jmp(&done);
2817
2818 NopRuntimeCallHelper call_helper;
2819 generator.GenerateSlow(masm_, NOT_PART_OF_IC_HANDLER, call_helper);
2820
2821 __ bind(&done);
2822 context()->Plug(result);
2823 }
2824
2825
EmitCall(CallRuntime * expr)2826 void FullCodeGenerator::EmitCall(CallRuntime* expr) {
2827 ZoneList<Expression*>* args = expr->arguments();
2828 DCHECK_LE(2, args->length());
2829 // Push target, receiver and arguments onto the stack.
2830 for (Expression* const arg : *args) {
2831 VisitForStackValue(arg);
2832 }
2833 PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
2834 // Move target to a1.
2835 int const argc = args->length() - 2;
2836 __ lw(a1, MemOperand(sp, (argc + 1) * kPointerSize));
2837 // Call the target.
2838 __ li(a0, Operand(argc));
2839 __ Call(isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
2840 OperandStackDepthDecrement(argc + 1);
2841 RestoreContext();
2842 // Discard the function left on TOS.
2843 context()->DropAndPlug(1, v0);
2844 }
2845
EmitGetSuperConstructor(CallRuntime * expr)2846 void FullCodeGenerator::EmitGetSuperConstructor(CallRuntime* expr) {
2847 ZoneList<Expression*>* args = expr->arguments();
2848 DCHECK_EQ(1, args->length());
2849 VisitForAccumulatorValue(args->at(0));
2850 __ AssertFunction(v0);
2851 __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
2852 __ lw(v0, FieldMemOperand(v0, Map::kPrototypeOffset));
2853 context()->Plug(v0);
2854 }
2855
EmitDebugIsActive(CallRuntime * expr)2856 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
2857 DCHECK(expr->arguments()->length() == 0);
2858 ExternalReference debug_is_active =
2859 ExternalReference::debug_is_active_address(isolate());
2860 __ li(at, Operand(debug_is_active));
2861 __ lb(v0, MemOperand(at));
2862 __ SmiTag(v0);
2863 context()->Plug(v0);
2864 }
2865
2866
EmitCreateIterResultObject(CallRuntime * expr)2867 void FullCodeGenerator::EmitCreateIterResultObject(CallRuntime* expr) {
2868 ZoneList<Expression*>* args = expr->arguments();
2869 DCHECK_EQ(2, args->length());
2870 VisitForStackValue(args->at(0));
2871 VisitForStackValue(args->at(1));
2872
2873 Label runtime, done;
2874
2875 __ Allocate(JSIteratorResult::kSize, v0, a2, a3, &runtime,
2876 NO_ALLOCATION_FLAGS);
2877 __ LoadNativeContextSlot(Context::ITERATOR_RESULT_MAP_INDEX, a1);
2878 __ Pop(a2, a3);
2879 __ LoadRoot(t0, Heap::kEmptyFixedArrayRootIndex);
2880 __ sw(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
2881 __ sw(t0, FieldMemOperand(v0, JSObject::kPropertiesOffset));
2882 __ sw(t0, FieldMemOperand(v0, JSObject::kElementsOffset));
2883 __ sw(a2, FieldMemOperand(v0, JSIteratorResult::kValueOffset));
2884 __ sw(a3, FieldMemOperand(v0, JSIteratorResult::kDoneOffset));
2885 STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
2886 __ jmp(&done);
2887
2888 __ bind(&runtime);
2889 CallRuntimeWithOperands(Runtime::kCreateIterResultObject);
2890
2891 __ bind(&done);
2892 context()->Plug(v0);
2893 }
2894
2895
EmitLoadJSRuntimeFunction(CallRuntime * expr)2896 void FullCodeGenerator::EmitLoadJSRuntimeFunction(CallRuntime* expr) {
2897 // Push function.
2898 __ LoadNativeContextSlot(expr->context_index(), v0);
2899 PushOperand(v0);
2900
2901 // Push undefined as the receiver.
2902 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
2903 PushOperand(v0);
2904 }
2905
2906
EmitCallJSRuntimeFunction(CallRuntime * expr)2907 void FullCodeGenerator::EmitCallJSRuntimeFunction(CallRuntime* expr) {
2908 ZoneList<Expression*>* args = expr->arguments();
2909 int arg_count = args->length();
2910
2911 SetCallPosition(expr);
2912 __ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2913 __ li(a0, Operand(arg_count));
2914 __ Call(isolate()->builtins()->Call(ConvertReceiverMode::kNullOrUndefined),
2915 RelocInfo::CODE_TARGET);
2916 OperandStackDepthDecrement(arg_count + 1);
2917 RestoreContext();
2918 }
2919
2920
VisitUnaryOperation(UnaryOperation * expr)2921 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
2922 switch (expr->op()) {
2923 case Token::DELETE: {
2924 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
2925 Property* property = expr->expression()->AsProperty();
2926 VariableProxy* proxy = expr->expression()->AsVariableProxy();
2927
2928 if (property != NULL) {
2929 VisitForStackValue(property->obj());
2930 VisitForStackValue(property->key());
2931 CallRuntimeWithOperands(is_strict(language_mode())
2932 ? Runtime::kDeleteProperty_Strict
2933 : Runtime::kDeleteProperty_Sloppy);
2934 context()->Plug(v0);
2935 } else if (proxy != NULL) {
2936 Variable* var = proxy->var();
2937 // Delete of an unqualified identifier is disallowed in strict mode but
2938 // "delete this" is allowed.
2939 bool is_this = var->is_this();
2940 DCHECK(is_sloppy(language_mode()) || is_this);
2941 if (var->IsUnallocated()) {
2942 __ LoadGlobalObject(a2);
2943 __ li(a1, Operand(var->name()));
2944 __ Push(a2, a1);
2945 __ CallRuntime(Runtime::kDeleteProperty_Sloppy);
2946 context()->Plug(v0);
2947 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
2948 // Result of deleting non-global, non-dynamic variables is false.
2949 // The subexpression does not have side effects.
2950 context()->Plug(is_this);
2951 } else {
2952 // Non-global variable. Call the runtime to try to delete from the
2953 // context where the variable was introduced.
2954 __ Push(var->name());
2955 __ CallRuntime(Runtime::kDeleteLookupSlot);
2956 context()->Plug(v0);
2957 }
2958 } else {
2959 // Result of deleting non-property, non-variable reference is true.
2960 // The subexpression may have side effects.
2961 VisitForEffect(expr->expression());
2962 context()->Plug(true);
2963 }
2964 break;
2965 }
2966
2967 case Token::VOID: {
2968 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
2969 VisitForEffect(expr->expression());
2970 context()->Plug(Heap::kUndefinedValueRootIndex);
2971 break;
2972 }
2973
2974 case Token::NOT: {
2975 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
2976 if (context()->IsEffect()) {
2977 // Unary NOT has no side effects so it's only necessary to visit the
2978 // subexpression. Match the optimizing compiler by not branching.
2979 VisitForEffect(expr->expression());
2980 } else if (context()->IsTest()) {
2981 const TestContext* test = TestContext::cast(context());
2982 // The labels are swapped for the recursive call.
2983 VisitForControl(expr->expression(),
2984 test->false_label(),
2985 test->true_label(),
2986 test->fall_through());
2987 context()->Plug(test->true_label(), test->false_label());
2988 } else {
2989 // We handle value contexts explicitly rather than simply visiting
2990 // for control and plugging the control flow into the context,
2991 // because we need to prepare a pair of extra administrative AST ids
2992 // for the optimizing compiler.
2993 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
2994 Label materialize_true, materialize_false, done;
2995 VisitForControl(expr->expression(),
2996 &materialize_false,
2997 &materialize_true,
2998 &materialize_true);
2999 if (!context()->IsAccumulatorValue()) OperandStackDepthIncrement(1);
3000 __ bind(&materialize_true);
3001 PrepareForBailoutForId(expr->MaterializeTrueId(),
3002 BailoutState::NO_REGISTERS);
3003 __ LoadRoot(v0, Heap::kTrueValueRootIndex);
3004 if (context()->IsStackValue()) __ push(v0);
3005 __ jmp(&done);
3006 __ bind(&materialize_false);
3007 PrepareForBailoutForId(expr->MaterializeFalseId(),
3008 BailoutState::NO_REGISTERS);
3009 __ LoadRoot(v0, Heap::kFalseValueRootIndex);
3010 if (context()->IsStackValue()) __ push(v0);
3011 __ bind(&done);
3012 }
3013 break;
3014 }
3015
3016 case Token::TYPEOF: {
3017 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
3018 {
3019 AccumulatorValueContext context(this);
3020 VisitForTypeofValue(expr->expression());
3021 }
3022 __ mov(a3, v0);
3023 __ Call(isolate()->builtins()->Typeof(), RelocInfo::CODE_TARGET);
3024 context()->Plug(v0);
3025 break;
3026 }
3027
3028 default:
3029 UNREACHABLE();
3030 }
3031 }
3032
3033
VisitCountOperation(CountOperation * expr)3034 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3035 DCHECK(expr->expression()->IsValidReferenceExpressionOrThis());
3036
3037 Comment cmnt(masm_, "[ CountOperation");
3038
3039 Property* prop = expr->expression()->AsProperty();
3040 LhsKind assign_type = Property::GetAssignType(prop);
3041
3042 // Evaluate expression and get value.
3043 if (assign_type == VARIABLE) {
3044 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
3045 AccumulatorValueContext context(this);
3046 EmitVariableLoad(expr->expression()->AsVariableProxy());
3047 } else {
3048 // Reserve space for result of postfix operation.
3049 if (expr->is_postfix() && !context()->IsEffect()) {
3050 __ li(at, Operand(Smi::kZero));
3051 PushOperand(at);
3052 }
3053 switch (assign_type) {
3054 case NAMED_PROPERTY: {
3055 // Put the object both on the stack and in the register.
3056 VisitForStackValue(prop->obj());
3057 __ lw(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
3058 EmitNamedPropertyLoad(prop);
3059 break;
3060 }
3061
3062 case NAMED_SUPER_PROPERTY: {
3063 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
3064 VisitForAccumulatorValue(
3065 prop->obj()->AsSuperPropertyReference()->home_object());
3066 const Register scratch = a1;
3067 __ lw(scratch, MemOperand(sp, 0)); // this
3068 PushOperands(result_register(), scratch, result_register());
3069 EmitNamedSuperPropertyLoad(prop);
3070 break;
3071 }
3072
3073 case KEYED_SUPER_PROPERTY: {
3074 VisitForStackValue(prop->obj()->AsSuperPropertyReference()->this_var());
3075 VisitForStackValue(
3076 prop->obj()->AsSuperPropertyReference()->home_object());
3077 VisitForAccumulatorValue(prop->key());
3078 const Register scratch1 = a1;
3079 const Register scratch2 = t0;
3080 __ lw(scratch1, MemOperand(sp, 1 * kPointerSize)); // this
3081 __ lw(scratch2, MemOperand(sp, 0 * kPointerSize)); // home object
3082 PushOperands(result_register(), scratch1, scratch2, result_register());
3083 EmitKeyedSuperPropertyLoad(prop);
3084 break;
3085 }
3086
3087 case KEYED_PROPERTY: {
3088 VisitForStackValue(prop->obj());
3089 VisitForStackValue(prop->key());
3090 __ lw(LoadDescriptor::ReceiverRegister(),
3091 MemOperand(sp, 1 * kPointerSize));
3092 __ lw(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
3093 EmitKeyedPropertyLoad(prop);
3094 break;
3095 }
3096
3097 case VARIABLE:
3098 UNREACHABLE();
3099 }
3100 }
3101
3102 // We need a second deoptimization point after loading the value
3103 // in case evaluating the property load my have a side effect.
3104 if (assign_type == VARIABLE) {
3105 PrepareForBailout(expr->expression(), BailoutState::TOS_REGISTER);
3106 } else {
3107 PrepareForBailoutForId(prop->LoadId(), BailoutState::TOS_REGISTER);
3108 }
3109
3110 // Inline smi case if we are in a loop.
3111 Label stub_call, done;
3112 JumpPatchSite patch_site(masm_);
3113
3114 int count_value = expr->op() == Token::INC ? 1 : -1;
3115 __ mov(a0, v0);
3116 if (ShouldInlineSmiCase(expr->op())) {
3117 Label slow;
3118 patch_site.EmitJumpIfNotSmi(v0, &slow);
3119
3120 // Save result for postfix expressions.
3121 if (expr->is_postfix()) {
3122 if (!context()->IsEffect()) {
3123 // Save the result on the stack. If we have a named or keyed property
3124 // we store the result under the receiver that is currently on top
3125 // of the stack.
3126 switch (assign_type) {
3127 case VARIABLE:
3128 __ push(v0);
3129 break;
3130 case NAMED_PROPERTY:
3131 __ sw(v0, MemOperand(sp, kPointerSize));
3132 break;
3133 case NAMED_SUPER_PROPERTY:
3134 __ sw(v0, MemOperand(sp, 2 * kPointerSize));
3135 break;
3136 case KEYED_PROPERTY:
3137 __ sw(v0, MemOperand(sp, 2 * kPointerSize));
3138 break;
3139 case KEYED_SUPER_PROPERTY:
3140 __ sw(v0, MemOperand(sp, 3 * kPointerSize));
3141 break;
3142 }
3143 }
3144 }
3145
3146 Register scratch1 = a1;
3147 __ li(scratch1, Operand(Smi::FromInt(count_value)));
3148 __ AddBranchNoOvf(v0, v0, Operand(scratch1), &done);
3149 // Call stub. Undo operation first.
3150 __ Move(v0, a0);
3151 __ jmp(&stub_call);
3152 __ bind(&slow);
3153 }
3154
3155 // Convert old value into a number.
3156 __ Call(isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
3157 RestoreContext();
3158 PrepareForBailoutForId(expr->ToNumberId(), BailoutState::TOS_REGISTER);
3159
3160 // Save result for postfix expressions.
3161 if (expr->is_postfix()) {
3162 if (!context()->IsEffect()) {
3163 // Save the result on the stack. If we have a named or keyed property
3164 // we store the result under the receiver that is currently on top
3165 // of the stack.
3166 switch (assign_type) {
3167 case VARIABLE:
3168 PushOperand(v0);
3169 break;
3170 case NAMED_PROPERTY:
3171 __ sw(v0, MemOperand(sp, kPointerSize));
3172 break;
3173 case NAMED_SUPER_PROPERTY:
3174 __ sw(v0, MemOperand(sp, 2 * kPointerSize));
3175 break;
3176 case KEYED_PROPERTY:
3177 __ sw(v0, MemOperand(sp, 2 * kPointerSize));
3178 break;
3179 case KEYED_SUPER_PROPERTY:
3180 __ sw(v0, MemOperand(sp, 3 * kPointerSize));
3181 break;
3182 }
3183 }
3184 }
3185
3186 __ bind(&stub_call);
3187 __ mov(a1, v0);
3188 __ li(a0, Operand(Smi::FromInt(count_value)));
3189
3190 SetExpressionPosition(expr);
3191
3192 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), Token::ADD).code();
3193 CallIC(code, expr->CountBinOpFeedbackId());
3194 patch_site.EmitPatchInfo();
3195 __ bind(&done);
3196
3197 // Store the value returned in v0.
3198 switch (assign_type) {
3199 case VARIABLE: {
3200 VariableProxy* proxy = expr->expression()->AsVariableProxy();
3201 if (expr->is_postfix()) {
3202 { EffectContext context(this);
3203 EmitVariableAssignment(proxy->var(), Token::ASSIGN, expr->CountSlot(),
3204 proxy->hole_check_mode());
3205 PrepareForBailoutForId(expr->AssignmentId(),
3206 BailoutState::TOS_REGISTER);
3207 context.Plug(v0);
3208 }
3209 // For all contexts except EffectConstant we have the result on
3210 // top of the stack.
3211 if (!context()->IsEffect()) {
3212 context()->PlugTOS();
3213 }
3214 } else {
3215 EmitVariableAssignment(proxy->var(), Token::ASSIGN, expr->CountSlot(),
3216 proxy->hole_check_mode());
3217 PrepareForBailoutForId(expr->AssignmentId(),
3218 BailoutState::TOS_REGISTER);
3219 context()->Plug(v0);
3220 }
3221 break;
3222 }
3223 case NAMED_PROPERTY: {
3224 __ mov(StoreDescriptor::ValueRegister(), result_register());
3225 PopOperand(StoreDescriptor::ReceiverRegister());
3226 CallStoreIC(expr->CountSlot(), prop->key()->AsLiteral()->value());
3227 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
3228 if (expr->is_postfix()) {
3229 if (!context()->IsEffect()) {
3230 context()->PlugTOS();
3231 }
3232 } else {
3233 context()->Plug(v0);
3234 }
3235 break;
3236 }
3237 case NAMED_SUPER_PROPERTY: {
3238 EmitNamedSuperPropertyStore(prop);
3239 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
3240 if (expr->is_postfix()) {
3241 if (!context()->IsEffect()) {
3242 context()->PlugTOS();
3243 }
3244 } else {
3245 context()->Plug(v0);
3246 }
3247 break;
3248 }
3249 case KEYED_SUPER_PROPERTY: {
3250 EmitKeyedSuperPropertyStore(prop);
3251 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
3252 if (expr->is_postfix()) {
3253 if (!context()->IsEffect()) {
3254 context()->PlugTOS();
3255 }
3256 } else {
3257 context()->Plug(v0);
3258 }
3259 break;
3260 }
3261 case KEYED_PROPERTY: {
3262 __ mov(StoreDescriptor::ValueRegister(), result_register());
3263 PopOperands(StoreDescriptor::ReceiverRegister(),
3264 StoreDescriptor::NameRegister());
3265 CallKeyedStoreIC(expr->CountSlot());
3266 PrepareForBailoutForId(expr->AssignmentId(), BailoutState::TOS_REGISTER);
3267 if (expr->is_postfix()) {
3268 if (!context()->IsEffect()) {
3269 context()->PlugTOS();
3270 }
3271 } else {
3272 context()->Plug(v0);
3273 }
3274 break;
3275 }
3276 }
3277 }
3278
3279
EmitLiteralCompareTypeof(Expression * expr,Expression * sub_expr,Handle<String> check)3280 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
3281 Expression* sub_expr,
3282 Handle<String> check) {
3283 Label materialize_true, materialize_false;
3284 Label* if_true = NULL;
3285 Label* if_false = NULL;
3286 Label* fall_through = NULL;
3287 context()->PrepareTest(&materialize_true, &materialize_false,
3288 &if_true, &if_false, &fall_through);
3289
3290 { AccumulatorValueContext context(this);
3291 VisitForTypeofValue(sub_expr);
3292 }
3293 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3294
3295 Factory* factory = isolate()->factory();
3296 if (String::Equals(check, factory->number_string())) {
3297 __ JumpIfSmi(v0, if_true);
3298 __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
3299 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
3300 Split(eq, v0, Operand(at), if_true, if_false, fall_through);
3301 } else if (String::Equals(check, factory->string_string())) {
3302 __ JumpIfSmi(v0, if_false);
3303 __ GetObjectType(v0, v0, a1);
3304 Split(lt, a1, Operand(FIRST_NONSTRING_TYPE), if_true, if_false,
3305 fall_through);
3306 } else if (String::Equals(check, factory->symbol_string())) {
3307 __ JumpIfSmi(v0, if_false);
3308 __ GetObjectType(v0, v0, a1);
3309 Split(eq, a1, Operand(SYMBOL_TYPE), if_true, if_false, fall_through);
3310 } else if (String::Equals(check, factory->boolean_string())) {
3311 __ LoadRoot(at, Heap::kTrueValueRootIndex);
3312 __ Branch(if_true, eq, v0, Operand(at));
3313 __ LoadRoot(at, Heap::kFalseValueRootIndex);
3314 Split(eq, v0, Operand(at), if_true, if_false, fall_through);
3315 } else if (String::Equals(check, factory->undefined_string())) {
3316 __ LoadRoot(at, Heap::kNullValueRootIndex);
3317 __ Branch(if_false, eq, v0, Operand(at));
3318 __ JumpIfSmi(v0, if_false);
3319 // Check for undetectable objects => true.
3320 __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
3321 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
3322 __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
3323 Split(ne, a1, Operand(zero_reg), if_true, if_false, fall_through);
3324 } else if (String::Equals(check, factory->function_string())) {
3325 __ JumpIfSmi(v0, if_false);
3326 __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
3327 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
3328 __ And(a1, a1,
3329 Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
3330 Split(eq, a1, Operand(1 << Map::kIsCallable), if_true, if_false,
3331 fall_through);
3332 } else if (String::Equals(check, factory->object_string())) {
3333 __ JumpIfSmi(v0, if_false);
3334 __ LoadRoot(at, Heap::kNullValueRootIndex);
3335 __ Branch(if_true, eq, v0, Operand(at));
3336 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
3337 __ GetObjectType(v0, v0, a1);
3338 __ Branch(if_false, lt, a1, Operand(FIRST_JS_RECEIVER_TYPE));
3339 // Check for callable or undetectable objects => false.
3340 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
3341 __ And(a1, a1,
3342 Operand((1 << Map::kIsCallable) | (1 << Map::kIsUndetectable)));
3343 Split(eq, a1, Operand(zero_reg), if_true, if_false, fall_through);
3344 // clang-format off
3345 #define SIMD128_TYPE(TYPE, Type, type, lane_count, lane_type) \
3346 } else if (String::Equals(check, factory->type##_string())) { \
3347 __ JumpIfSmi(v0, if_false); \
3348 __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset)); \
3349 __ LoadRoot(at, Heap::k##Type##MapRootIndex); \
3350 Split(eq, v0, Operand(at), if_true, if_false, fall_through);
3351 SIMD128_TYPES(SIMD128_TYPE)
3352 #undef SIMD128_TYPE
3353 // clang-format on
3354 } else {
3355 if (if_false != fall_through) __ jmp(if_false);
3356 }
3357 context()->Plug(if_true, if_false);
3358 }
3359
3360
VisitCompareOperation(CompareOperation * expr)3361 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
3362 Comment cmnt(masm_, "[ CompareOperation");
3363
3364 // First we try a fast inlined version of the compare when one of
3365 // the operands is a literal.
3366 if (TryLiteralCompare(expr)) return;
3367
3368 // Always perform the comparison for its control flow. Pack the result
3369 // into the expression's context after the comparison is performed.
3370 Label materialize_true, materialize_false;
3371 Label* if_true = NULL;
3372 Label* if_false = NULL;
3373 Label* fall_through = NULL;
3374 context()->PrepareTest(&materialize_true, &materialize_false,
3375 &if_true, &if_false, &fall_through);
3376
3377 Token::Value op = expr->op();
3378 VisitForStackValue(expr->left());
3379 switch (op) {
3380 case Token::IN:
3381 VisitForStackValue(expr->right());
3382 SetExpressionPosition(expr);
3383 EmitHasProperty();
3384 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
3385 __ LoadRoot(t0, Heap::kTrueValueRootIndex);
3386 Split(eq, v0, Operand(t0), if_true, if_false, fall_through);
3387 break;
3388
3389 case Token::INSTANCEOF: {
3390 VisitForAccumulatorValue(expr->right());
3391 SetExpressionPosition(expr);
3392 __ mov(a0, result_register());
3393 PopOperand(a1);
3394 __ Call(isolate()->builtins()->InstanceOf(), RelocInfo::CODE_TARGET);
3395 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
3396 __ LoadRoot(at, Heap::kTrueValueRootIndex);
3397 Split(eq, v0, Operand(at), if_true, if_false, fall_through);
3398 break;
3399 }
3400
3401 default: {
3402 VisitForAccumulatorValue(expr->right());
3403 SetExpressionPosition(expr);
3404 Condition cc = CompareIC::ComputeCondition(op);
3405 __ mov(a0, result_register());
3406 PopOperand(a1);
3407
3408 bool inline_smi_code = ShouldInlineSmiCase(op);
3409 JumpPatchSite patch_site(masm_);
3410 if (inline_smi_code) {
3411 Label slow_case;
3412 __ Or(a2, a0, Operand(a1));
3413 patch_site.EmitJumpIfNotSmi(a2, &slow_case);
3414 Split(cc, a1, Operand(a0), if_true, if_false, NULL);
3415 __ bind(&slow_case);
3416 }
3417
3418 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
3419 CallIC(ic, expr->CompareOperationFeedbackId());
3420 patch_site.EmitPatchInfo();
3421 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3422 Split(cc, v0, Operand(zero_reg), if_true, if_false, fall_through);
3423 }
3424 }
3425
3426 // Convert the result of the comparison into one expected for this
3427 // expression's context.
3428 context()->Plug(if_true, if_false);
3429 }
3430
3431
EmitLiteralCompareNil(CompareOperation * expr,Expression * sub_expr,NilValue nil)3432 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
3433 Expression* sub_expr,
3434 NilValue nil) {
3435 Label materialize_true, materialize_false;
3436 Label* if_true = NULL;
3437 Label* if_false = NULL;
3438 Label* fall_through = NULL;
3439 context()->PrepareTest(&materialize_true, &materialize_false,
3440 &if_true, &if_false, &fall_through);
3441
3442 VisitForAccumulatorValue(sub_expr);
3443 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3444 if (expr->op() == Token::EQ_STRICT) {
3445 Heap::RootListIndex nil_value = nil == kNullValue ?
3446 Heap::kNullValueRootIndex :
3447 Heap::kUndefinedValueRootIndex;
3448 __ LoadRoot(a1, nil_value);
3449 Split(eq, v0, Operand(a1), if_true, if_false, fall_through);
3450 } else {
3451 __ JumpIfSmi(v0, if_false);
3452 __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
3453 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
3454 __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
3455 Split(ne, a1, Operand(zero_reg), if_true, if_false, fall_through);
3456 }
3457 context()->Plug(if_true, if_false);
3458 }
3459
3460
result_register()3461 Register FullCodeGenerator::result_register() {
3462 return v0;
3463 }
3464
3465
context_register()3466 Register FullCodeGenerator::context_register() {
3467 return cp;
3468 }
3469
LoadFromFrameField(int frame_offset,Register value)3470 void FullCodeGenerator::LoadFromFrameField(int frame_offset, Register value) {
3471 DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
3472 __ lw(value, MemOperand(fp, frame_offset));
3473 }
3474
StoreToFrameField(int frame_offset,Register value)3475 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
3476 DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
3477 __ sw(value, MemOperand(fp, frame_offset));
3478 }
3479
3480
LoadContextField(Register dst,int context_index)3481 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
3482 __ lw(dst, ContextMemOperand(cp, context_index));
3483 }
3484
3485
PushFunctionArgumentForContextAllocation()3486 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
3487 DeclarationScope* closure_scope = scope()->GetClosureScope();
3488 if (closure_scope->is_script_scope() ||
3489 closure_scope->is_module_scope()) {
3490 // Contexts nested in the native context have a canonical empty function
3491 // as their closure, not the anonymous closure containing the global
3492 // code.
3493 __ LoadNativeContextSlot(Context::CLOSURE_INDEX, at);
3494 } else if (closure_scope->is_eval_scope()) {
3495 // Contexts created by a call to eval have the same closure as the
3496 // context calling eval, not the anonymous closure containing the eval
3497 // code. Fetch it from the context.
3498 __ lw(at, ContextMemOperand(cp, Context::CLOSURE_INDEX));
3499 } else {
3500 DCHECK(closure_scope->is_function_scope());
3501 __ lw(at, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
3502 }
3503 PushOperand(at);
3504 }
3505
3506
3507 // ----------------------------------------------------------------------------
3508 // Non-local control flow support.
3509
EnterFinallyBlock()3510 void FullCodeGenerator::EnterFinallyBlock() {
3511 DCHECK(!result_register().is(a1));
3512 // Store pending message while executing finally block.
3513 ExternalReference pending_message_obj =
3514 ExternalReference::address_of_pending_message_obj(isolate());
3515 __ li(at, Operand(pending_message_obj));
3516 __ lw(a1, MemOperand(at));
3517 PushOperand(a1);
3518
3519 ClearPendingMessage();
3520 }
3521
3522
ExitFinallyBlock()3523 void FullCodeGenerator::ExitFinallyBlock() {
3524 DCHECK(!result_register().is(a1));
3525 // Restore pending message from stack.
3526 PopOperand(a1);
3527 ExternalReference pending_message_obj =
3528 ExternalReference::address_of_pending_message_obj(isolate());
3529 __ li(at, Operand(pending_message_obj));
3530 __ sw(a1, MemOperand(at));
3531 }
3532
3533
ClearPendingMessage()3534 void FullCodeGenerator::ClearPendingMessage() {
3535 DCHECK(!result_register().is(a1));
3536 ExternalReference pending_message_obj =
3537 ExternalReference::address_of_pending_message_obj(isolate());
3538 __ LoadRoot(a1, Heap::kTheHoleValueRootIndex);
3539 __ li(at, Operand(pending_message_obj));
3540 __ sw(a1, MemOperand(at));
3541 }
3542
3543
EmitCommands()3544 void FullCodeGenerator::DeferredCommands::EmitCommands() {
3545 DCHECK(!result_register().is(a1));
3546 __ Pop(result_register()); // Restore the accumulator.
3547 __ Pop(a1); // Get the token.
3548 for (DeferredCommand cmd : commands_) {
3549 Label skip;
3550 __ li(at, Operand(Smi::FromInt(cmd.token)));
3551 __ Branch(&skip, ne, a1, Operand(at));
3552 switch (cmd.command) {
3553 case kReturn:
3554 codegen_->EmitUnwindAndReturn();
3555 break;
3556 case kThrow:
3557 __ Push(result_register());
3558 __ CallRuntime(Runtime::kReThrow);
3559 break;
3560 case kContinue:
3561 codegen_->EmitContinue(cmd.target);
3562 break;
3563 case kBreak:
3564 codegen_->EmitBreak(cmd.target);
3565 break;
3566 }
3567 __ bind(&skip);
3568 }
3569 }
3570
3571 #undef __
3572
3573
PatchAt(Code * unoptimized_code,Address pc,BackEdgeState target_state,Code * replacement_code)3574 void BackEdgeTable::PatchAt(Code* unoptimized_code,
3575 Address pc,
3576 BackEdgeState target_state,
3577 Code* replacement_code) {
3578 static const int kInstrSize = Assembler::kInstrSize;
3579 Address pc_immediate_load_address =
3580 Assembler::target_address_from_return_address(pc);
3581 Address branch_address = pc_immediate_load_address - 2 * kInstrSize;
3582 Isolate* isolate = unoptimized_code->GetIsolate();
3583 CodePatcher patcher(isolate, branch_address, 1);
3584
3585 switch (target_state) {
3586 case INTERRUPT:
3587 // slt at, a3, zero_reg (in case of count based interrupts)
3588 // beq at, zero_reg, ok
3589 // lui t9, <interrupt stub address> upper
3590 // ori t9, <interrupt stub address> lower
3591 // jalr t9
3592 // nop
3593 // ok-label ----- pc_after points here
3594 patcher.masm()->slt(at, a3, zero_reg);
3595 break;
3596 case ON_STACK_REPLACEMENT:
3597 // addiu at, zero_reg, 1
3598 // beq at, zero_reg, ok ;; Not changed
3599 // lui t9, <on-stack replacement address> upper
3600 // ori t9, <on-stack replacement address> lower
3601 // jalr t9 ;; Not changed
3602 // nop ;; Not changed
3603 // ok-label ----- pc_after points here
3604 patcher.masm()->addiu(at, zero_reg, 1);
3605 break;
3606 }
3607 // Replace the stack check address in the load-immediate (lui/ori pair)
3608 // with the entry address of the replacement code.
3609 Assembler::set_target_address_at(isolate, pc_immediate_load_address,
3610 replacement_code->entry());
3611
3612 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
3613 unoptimized_code, pc_immediate_load_address, replacement_code);
3614 }
3615
3616
GetBackEdgeState(Isolate * isolate,Code * unoptimized_code,Address pc)3617 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
3618 Isolate* isolate,
3619 Code* unoptimized_code,
3620 Address pc) {
3621 static const int kInstrSize = Assembler::kInstrSize;
3622 Address pc_immediate_load_address =
3623 Assembler::target_address_from_return_address(pc);
3624 Address branch_address = pc_immediate_load_address - 2 * kInstrSize;
3625
3626 DCHECK(Assembler::IsBeq(Assembler::instr_at(branch_address + kInstrSize)));
3627 if (!Assembler::IsAddImmediate(Assembler::instr_at(branch_address))) {
3628 DCHECK(reinterpret_cast<uint32_t>(
3629 Assembler::target_address_at(pc_immediate_load_address)) ==
3630 reinterpret_cast<uint32_t>(
3631 isolate->builtins()->InterruptCheck()->entry()));
3632 return INTERRUPT;
3633 }
3634
3635 DCHECK(Assembler::IsAddImmediate(Assembler::instr_at(branch_address)));
3636
3637 DCHECK(reinterpret_cast<uint32_t>(
3638 Assembler::target_address_at(pc_immediate_load_address)) ==
3639 reinterpret_cast<uint32_t>(
3640 isolate->builtins()->OnStackReplacement()->entry()));
3641 return ON_STACK_REPLACEMENT;
3642 }
3643
3644
3645 } // namespace internal
3646 } // namespace v8
3647
3648 #endif // V8_TARGET_ARCH_MIPS
3649