1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/v8.h"
6
7 #if V8_TARGET_ARCH_MIPS64
8
9 // Note on Mips implementation:
10 //
11 // The result_register() for mips is the 'v0' register, which is defined
12 // by the ABI to contain function return values. However, the first
13 // parameter to a function is defined to be 'a0'. So there are many
14 // places where we have to move a previous result in v0 to a0 for the
15 // next call: mov(a0, v0). This is not needed on the other architectures.
16
17 #include "src/code-factory.h"
18 #include "src/code-stubs.h"
19 #include "src/codegen.h"
20 #include "src/compiler.h"
21 #include "src/debug.h"
22 #include "src/full-codegen.h"
23 #include "src/ic/ic.h"
24 #include "src/isolate-inl.h"
25 #include "src/parser.h"
26 #include "src/scopes.h"
27
28 #include "src/mips64/code-stubs-mips64.h"
29 #include "src/mips64/macro-assembler-mips64.h"
30
31 namespace v8 {
32 namespace internal {
33
34 #define __ ACCESS_MASM(masm_)
35
36
37 // A patch site is a location in the code which it is possible to patch. This
38 // class has a number of methods to emit the code which is patchable and the
39 // method EmitPatchInfo to record a marker back to the patchable code. This
40 // marker is a andi zero_reg, rx, #yyyy instruction, and rx * 0x0000ffff + yyyy
41 // (raw 16 bit immediate value is used) is the delta from the pc to the first
42 // instruction of the patchable code.
43 // The marker instruction is effectively a NOP (dest is zero_reg) and will
44 // never be emitted by normal code.
45 class JumpPatchSite BASE_EMBEDDED {
46 public:
JumpPatchSite(MacroAssembler * masm)47 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
48 #ifdef DEBUG
49 info_emitted_ = false;
50 #endif
51 }
52
~JumpPatchSite()53 ~JumpPatchSite() {
54 DCHECK(patch_site_.is_bound() == info_emitted_);
55 }
56
57 // When initially emitting this ensure that a jump is always generated to skip
58 // the inlined smi code.
EmitJumpIfNotSmi(Register reg,Label * target)59 void EmitJumpIfNotSmi(Register reg, Label* target) {
60 DCHECK(!patch_site_.is_bound() && !info_emitted_);
61 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
62 __ bind(&patch_site_);
63 __ andi(at, reg, 0);
64 // Always taken before patched.
65 __ BranchShort(target, eq, at, Operand(zero_reg));
66 }
67
68 // When initially emitting this ensure that a jump is never generated to skip
69 // the inlined smi code.
EmitJumpIfSmi(Register reg,Label * target)70 void EmitJumpIfSmi(Register reg, Label* target) {
71 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
72 DCHECK(!patch_site_.is_bound() && !info_emitted_);
73 __ bind(&patch_site_);
74 __ andi(at, reg, 0);
75 // Never taken before patched.
76 __ BranchShort(target, ne, at, Operand(zero_reg));
77 }
78
EmitPatchInfo()79 void EmitPatchInfo() {
80 if (patch_site_.is_bound()) {
81 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
82 Register reg = Register::from_code(delta_to_patch_site / kImm16Mask);
83 __ andi(zero_reg, reg, delta_to_patch_site % kImm16Mask);
84 #ifdef DEBUG
85 info_emitted_ = true;
86 #endif
87 } else {
88 __ nop(); // Signals no inlined code.
89 }
90 }
91
92 private:
93 MacroAssembler* masm_;
94 Label patch_site_;
95 #ifdef DEBUG
96 bool info_emitted_;
97 #endif
98 };
99
100
101 // Generate code for a JS function. On entry to the function the receiver
102 // and arguments have been pushed on the stack left to right. The actual
103 // argument count matches the formal parameter count expected by the
104 // function.
105 //
106 // The live registers are:
107 // o a1: the JS function object being called (i.e. ourselves)
108 // o cp: our context
109 // o fp: our caller's frame pointer
110 // o sp: stack pointer
111 // o ra: return address
112 //
113 // The function builds a JS frame. Please see JavaScriptFrameConstants in
114 // frames-mips.h for its layout.
Generate()115 void FullCodeGenerator::Generate() {
116 CompilationInfo* info = info_;
117 handler_table_ =
118 isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
119
120 profiling_counter_ = isolate()->factory()->NewCell(
121 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
122 SetFunctionPosition(function());
123 Comment cmnt(masm_, "[ function compiled by full code generator");
124
125 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
126
127 #ifdef DEBUG
128 if (strlen(FLAG_stop_at) > 0 &&
129 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
130 __ stop("stop-at");
131 }
132 #endif
133
134 // Sloppy mode functions and builtins need to replace the receiver with the
135 // global proxy when called as functions (without an explicit receiver
136 // object).
137 if (info->strict_mode() == SLOPPY && !info->is_native()) {
138 Label ok;
139 int receiver_offset = info->scope()->num_parameters() * kPointerSize;
140 __ ld(at, MemOperand(sp, receiver_offset));
141 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
142 __ Branch(&ok, ne, a2, Operand(at));
143
144 __ ld(a2, GlobalObjectOperand());
145 __ ld(a2, FieldMemOperand(a2, GlobalObject::kGlobalProxyOffset));
146
147 __ sd(a2, MemOperand(sp, receiver_offset));
148 __ bind(&ok);
149 }
150 // Open a frame scope to indicate that there is a frame on the stack. The
151 // MANUAL indicates that the scope shouldn't actually generate code to set up
152 // the frame (that is done below).
153 FrameScope frame_scope(masm_, StackFrame::MANUAL);
154 info->set_prologue_offset(masm_->pc_offset());
155 __ Prologue(info->IsCodePreAgingActive());
156 info->AddNoFrameRange(0, masm_->pc_offset());
157
158 { Comment cmnt(masm_, "[ Allocate locals");
159 int locals_count = info->scope()->num_stack_slots();
160 // Generators allocate locals, if any, in context slots.
161 DCHECK(!info->function()->is_generator() || locals_count == 0);
162 if (locals_count > 0) {
163 if (locals_count >= 128) {
164 Label ok;
165 __ Dsubu(t1, sp, Operand(locals_count * kPointerSize));
166 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
167 __ Branch(&ok, hs, t1, Operand(a2));
168 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
169 __ bind(&ok);
170 }
171 __ LoadRoot(t1, Heap::kUndefinedValueRootIndex);
172 int kMaxPushes = FLAG_optimize_for_size ? 4 : 32;
173 if (locals_count >= kMaxPushes) {
174 int loop_iterations = locals_count / kMaxPushes;
175 __ li(a2, Operand(loop_iterations));
176 Label loop_header;
177 __ bind(&loop_header);
178 // Do pushes.
179 __ Dsubu(sp, sp, Operand(kMaxPushes * kPointerSize));
180 for (int i = 0; i < kMaxPushes; i++) {
181 __ sd(t1, MemOperand(sp, i * kPointerSize));
182 }
183 // Continue loop if not done.
184 __ Dsubu(a2, a2, Operand(1));
185 __ Branch(&loop_header, ne, a2, Operand(zero_reg));
186 }
187 int remaining = locals_count % kMaxPushes;
188 // Emit the remaining pushes.
189 __ Dsubu(sp, sp, Operand(remaining * kPointerSize));
190 for (int i = 0; i < remaining; i++) {
191 __ sd(t1, MemOperand(sp, i * kPointerSize));
192 }
193 }
194 }
195
196 bool function_in_register = true;
197
198 // Possibly allocate a local context.
199 int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
200 if (heap_slots > 0) {
201 Comment cmnt(masm_, "[ Allocate context");
202 // Argument to NewContext is the function, which is still in a1.
203 bool need_write_barrier = true;
204 if (FLAG_harmony_scoping && info->scope()->is_global_scope()) {
205 __ push(a1);
206 __ Push(info->scope()->GetScopeInfo());
207 __ CallRuntime(Runtime::kNewGlobalContext, 2);
208 } else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
209 FastNewContextStub stub(isolate(), heap_slots);
210 __ CallStub(&stub);
211 // Result of FastNewContextStub is always in new space.
212 need_write_barrier = false;
213 } else {
214 __ push(a1);
215 __ CallRuntime(Runtime::kNewFunctionContext, 1);
216 }
217 function_in_register = false;
218 // Context is returned in v0. It replaces the context passed to us.
219 // It's saved in the stack and kept live in cp.
220 __ mov(cp, v0);
221 __ sd(v0, MemOperand(fp, StandardFrameConstants::kContextOffset));
222 // Copy any necessary parameters into the context.
223 int num_parameters = info->scope()->num_parameters();
224 for (int i = 0; i < num_parameters; i++) {
225 Variable* var = scope()->parameter(i);
226 if (var->IsContextSlot()) {
227 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
228 (num_parameters - 1 - i) * kPointerSize;
229 // Load parameter from stack.
230 __ ld(a0, MemOperand(fp, parameter_offset));
231 // Store it in the context.
232 MemOperand target = ContextOperand(cp, var->index());
233 __ sd(a0, target);
234
235 // Update the write barrier.
236 if (need_write_barrier) {
237 __ RecordWriteContextSlot(
238 cp, target.offset(), a0, a3, kRAHasBeenSaved, kDontSaveFPRegs);
239 } else if (FLAG_debug_code) {
240 Label done;
241 __ JumpIfInNewSpace(cp, a0, &done);
242 __ Abort(kExpectedNewSpaceObject);
243 __ bind(&done);
244 }
245 }
246 }
247 }
248 Variable* arguments = scope()->arguments();
249 if (arguments != NULL) {
250 // Function uses arguments object.
251 Comment cmnt(masm_, "[ Allocate arguments object");
252 if (!function_in_register) {
253 // Load this again, if it's used by the local context below.
254 __ ld(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
255 } else {
256 __ mov(a3, a1);
257 }
258 // Receiver is just before the parameters on the caller's stack.
259 int num_parameters = info->scope()->num_parameters();
260 int offset = num_parameters * kPointerSize;
261 __ Daddu(a2, fp,
262 Operand(StandardFrameConstants::kCallerSPOffset + offset));
263 __ li(a1, Operand(Smi::FromInt(num_parameters)));
264 __ Push(a3, a2, a1);
265
266 // Arguments to ArgumentsAccessStub:
267 // function, receiver address, parameter count.
268 // The stub will rewrite receiever and parameter count if the previous
269 // stack frame was an arguments adapter frame.
270 ArgumentsAccessStub::Type type;
271 if (strict_mode() == STRICT) {
272 type = ArgumentsAccessStub::NEW_STRICT;
273 } else if (function()->has_duplicate_parameters()) {
274 type = ArgumentsAccessStub::NEW_SLOPPY_SLOW;
275 } else {
276 type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
277 }
278 ArgumentsAccessStub stub(isolate(), type);
279 __ CallStub(&stub);
280
281 SetVar(arguments, v0, a1, a2);
282 }
283
284 if (FLAG_trace) {
285 __ CallRuntime(Runtime::kTraceEnter, 0);
286 }
287 // Visit the declarations and body unless there is an illegal
288 // redeclaration.
289 if (scope()->HasIllegalRedeclaration()) {
290 Comment cmnt(masm_, "[ Declarations");
291 scope()->VisitIllegalRedeclaration(this);
292
293 } else {
294 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
295 { Comment cmnt(masm_, "[ Declarations");
296 // For named function expressions, declare the function name as a
297 // constant.
298 if (scope()->is_function_scope() && scope()->function() != NULL) {
299 VariableDeclaration* function = scope()->function();
300 DCHECK(function->proxy()->var()->mode() == CONST ||
301 function->proxy()->var()->mode() == CONST_LEGACY);
302 DCHECK(function->proxy()->var()->location() != Variable::UNALLOCATED);
303 VisitVariableDeclaration(function);
304 }
305 VisitDeclarations(scope()->declarations());
306 }
307 { Comment cmnt(masm_, "[ Stack check");
308 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
309 Label ok;
310 __ LoadRoot(at, Heap::kStackLimitRootIndex);
311 __ Branch(&ok, hs, sp, Operand(at));
312 Handle<Code> stack_check = isolate()->builtins()->StackCheck();
313 PredictableCodeSizeScope predictable(masm_,
314 masm_->CallSize(stack_check, RelocInfo::CODE_TARGET));
315 __ Call(stack_check, RelocInfo::CODE_TARGET);
316 __ bind(&ok);
317 }
318
319 { Comment cmnt(masm_, "[ Body");
320 DCHECK(loop_depth() == 0);
321
322 VisitStatements(function()->body());
323
324 DCHECK(loop_depth() == 0);
325 }
326 }
327
328 // Always emit a 'return undefined' in case control fell off the end of
329 // the body.
330 { Comment cmnt(masm_, "[ return <undefined>;");
331 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
332 }
333 EmitReturnSequence();
334 }
335
336
ClearAccumulator()337 void FullCodeGenerator::ClearAccumulator() {
338 DCHECK(Smi::FromInt(0) == 0);
339 __ mov(v0, zero_reg);
340 }
341
342
EmitProfilingCounterDecrement(int delta)343 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
344 __ li(a2, Operand(profiling_counter_));
345 __ ld(a3, FieldMemOperand(a2, Cell::kValueOffset));
346 __ Dsubu(a3, a3, Operand(Smi::FromInt(delta)));
347 __ sd(a3, FieldMemOperand(a2, Cell::kValueOffset));
348 }
349
350
EmitProfilingCounterReset()351 void FullCodeGenerator::EmitProfilingCounterReset() {
352 int reset_value = FLAG_interrupt_budget;
353 if (info_->is_debug()) {
354 // Detect debug break requests as soon as possible.
355 reset_value = FLAG_interrupt_budget >> 4;
356 }
357 __ li(a2, Operand(profiling_counter_));
358 __ li(a3, Operand(Smi::FromInt(reset_value)));
359 __ sd(a3, FieldMemOperand(a2, Cell::kValueOffset));
360 }
361
362
EmitBackEdgeBookkeeping(IterationStatement * stmt,Label * back_edge_target)363 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
364 Label* back_edge_target) {
365 // The generated code is used in Deoptimizer::PatchStackCheckCodeAt so we need
366 // to make sure it is constant. Branch may emit a skip-or-jump sequence
367 // instead of the normal Branch. It seems that the "skip" part of that
368 // sequence is about as long as this Branch would be so it is safe to ignore
369 // that.
370 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
371 Comment cmnt(masm_, "[ Back edge bookkeeping");
372 Label ok;
373 DCHECK(back_edge_target->is_bound());
374 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
375 int weight = Min(kMaxBackEdgeWeight,
376 Max(1, distance / kCodeSizeMultiplier));
377 EmitProfilingCounterDecrement(weight);
378 __ slt(at, a3, zero_reg);
379 __ beq(at, zero_reg, &ok);
380 // Call will emit a li t9 first, so it is safe to use the delay slot.
381 __ Call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
382 // Record a mapping of this PC offset to the OSR id. This is used to find
383 // the AST id from the unoptimized code in order to use it as a key into
384 // the deoptimization input data found in the optimized code.
385 RecordBackEdge(stmt->OsrEntryId());
386 EmitProfilingCounterReset();
387
388 __ bind(&ok);
389 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
390 // Record a mapping of the OSR id to this PC. This is used if the OSR
391 // entry becomes the target of a bailout. We don't expect it to be, but
392 // we want it to work if it is.
393 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
394 }
395
396
EmitReturnSequence()397 void FullCodeGenerator::EmitReturnSequence() {
398 Comment cmnt(masm_, "[ Return sequence");
399 if (return_label_.is_bound()) {
400 __ Branch(&return_label_);
401 } else {
402 __ bind(&return_label_);
403 if (FLAG_trace) {
404 // Push the return value on the stack as the parameter.
405 // Runtime::TraceExit returns its parameter in v0.
406 __ push(v0);
407 __ CallRuntime(Runtime::kTraceExit, 1);
408 }
409 // Pretend that the exit is a backwards jump to the entry.
410 int weight = 1;
411 if (info_->ShouldSelfOptimize()) {
412 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
413 } else {
414 int distance = masm_->pc_offset();
415 weight = Min(kMaxBackEdgeWeight,
416 Max(1, distance / kCodeSizeMultiplier));
417 }
418 EmitProfilingCounterDecrement(weight);
419 Label ok;
420 __ Branch(&ok, ge, a3, Operand(zero_reg));
421 __ push(v0);
422 __ Call(isolate()->builtins()->InterruptCheck(),
423 RelocInfo::CODE_TARGET);
424 __ pop(v0);
425 EmitProfilingCounterReset();
426 __ bind(&ok);
427
428 #ifdef DEBUG
429 // Add a label for checking the size of the code used for returning.
430 Label check_exit_codesize;
431 masm_->bind(&check_exit_codesize);
432 #endif
433 // Make sure that the constant pool is not emitted inside of the return
434 // sequence.
435 { Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_);
436 // Here we use masm_-> instead of the __ macro to avoid the code coverage
437 // tool from instrumenting as we rely on the code size here.
438 int32_t sp_delta = (info_->scope()->num_parameters() + 1) * kPointerSize;
439 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
440 __ RecordJSReturn();
441 masm_->mov(sp, fp);
442 int no_frame_start = masm_->pc_offset();
443 masm_->MultiPop(static_cast<RegList>(fp.bit() | ra.bit()));
444 masm_->Daddu(sp, sp, Operand(sp_delta));
445 masm_->Jump(ra);
446 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
447 }
448
449 #ifdef DEBUG
450 // Check that the size of the code used for returning is large enough
451 // for the debugger's requirements.
452 DCHECK(Assembler::kJSReturnSequenceInstructions <=
453 masm_->InstructionsGeneratedSince(&check_exit_codesize));
454 #endif
455 }
456 }
457
458
Plug(Variable * var) const459 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
460 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
461 }
462
463
Plug(Variable * var) const464 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
465 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
466 codegen()->GetVar(result_register(), var);
467 }
468
469
Plug(Variable * var) const470 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
471 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
472 codegen()->GetVar(result_register(), var);
473 __ push(result_register());
474 }
475
476
Plug(Variable * var) const477 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
478 // For simplicity we always test the accumulator register.
479 codegen()->GetVar(result_register(), var);
480 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
481 codegen()->DoTest(this);
482 }
483
484
Plug(Heap::RootListIndex index) const485 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
486 }
487
488
Plug(Heap::RootListIndex index) const489 void FullCodeGenerator::AccumulatorValueContext::Plug(
490 Heap::RootListIndex index) const {
491 __ LoadRoot(result_register(), index);
492 }
493
494
Plug(Heap::RootListIndex index) const495 void FullCodeGenerator::StackValueContext::Plug(
496 Heap::RootListIndex index) const {
497 __ LoadRoot(result_register(), index);
498 __ push(result_register());
499 }
500
501
Plug(Heap::RootListIndex index) const502 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
503 codegen()->PrepareForBailoutBeforeSplit(condition(),
504 true,
505 true_label_,
506 false_label_);
507 if (index == Heap::kUndefinedValueRootIndex ||
508 index == Heap::kNullValueRootIndex ||
509 index == Heap::kFalseValueRootIndex) {
510 if (false_label_ != fall_through_) __ Branch(false_label_);
511 } else if (index == Heap::kTrueValueRootIndex) {
512 if (true_label_ != fall_through_) __ Branch(true_label_);
513 } else {
514 __ LoadRoot(result_register(), index);
515 codegen()->DoTest(this);
516 }
517 }
518
519
Plug(Handle<Object> lit) const520 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
521 }
522
523
Plug(Handle<Object> lit) const524 void FullCodeGenerator::AccumulatorValueContext::Plug(
525 Handle<Object> lit) const {
526 __ li(result_register(), Operand(lit));
527 }
528
529
Plug(Handle<Object> lit) const530 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
531 // Immediates cannot be pushed directly.
532 __ li(result_register(), Operand(lit));
533 __ push(result_register());
534 }
535
536
Plug(Handle<Object> lit) const537 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
538 codegen()->PrepareForBailoutBeforeSplit(condition(),
539 true,
540 true_label_,
541 false_label_);
542 DCHECK(!lit->IsUndetectableObject()); // There are no undetectable literals.
543 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
544 if (false_label_ != fall_through_) __ Branch(false_label_);
545 } else if (lit->IsTrue() || lit->IsJSObject()) {
546 if (true_label_ != fall_through_) __ Branch(true_label_);
547 } else if (lit->IsString()) {
548 if (String::cast(*lit)->length() == 0) {
549 if (false_label_ != fall_through_) __ Branch(false_label_);
550 } else {
551 if (true_label_ != fall_through_) __ Branch(true_label_);
552 }
553 } else if (lit->IsSmi()) {
554 if (Smi::cast(*lit)->value() == 0) {
555 if (false_label_ != fall_through_) __ Branch(false_label_);
556 } else {
557 if (true_label_ != fall_through_) __ Branch(true_label_);
558 }
559 } else {
560 // For simplicity we always test the accumulator register.
561 __ li(result_register(), Operand(lit));
562 codegen()->DoTest(this);
563 }
564 }
565
566
DropAndPlug(int count,Register reg) const567 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
568 Register reg) const {
569 DCHECK(count > 0);
570 __ Drop(count);
571 }
572
573
DropAndPlug(int count,Register reg) const574 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
575 int count,
576 Register reg) const {
577 DCHECK(count > 0);
578 __ Drop(count);
579 __ Move(result_register(), reg);
580 }
581
582
DropAndPlug(int count,Register reg) const583 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
584 Register reg) const {
585 DCHECK(count > 0);
586 if (count > 1) __ Drop(count - 1);
587 __ sd(reg, MemOperand(sp, 0));
588 }
589
590
DropAndPlug(int count,Register reg) const591 void FullCodeGenerator::TestContext::DropAndPlug(int count,
592 Register reg) const {
593 DCHECK(count > 0);
594 // For simplicity we always test the accumulator register.
595 __ Drop(count);
596 __ Move(result_register(), reg);
597 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
598 codegen()->DoTest(this);
599 }
600
601
Plug(Label * materialize_true,Label * materialize_false) const602 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
603 Label* materialize_false) const {
604 DCHECK(materialize_true == materialize_false);
605 __ bind(materialize_true);
606 }
607
608
Plug(Label * materialize_true,Label * materialize_false) const609 void FullCodeGenerator::AccumulatorValueContext::Plug(
610 Label* materialize_true,
611 Label* materialize_false) const {
612 Label done;
613 __ bind(materialize_true);
614 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
615 __ Branch(&done);
616 __ bind(materialize_false);
617 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
618 __ bind(&done);
619 }
620
621
Plug(Label * materialize_true,Label * materialize_false) const622 void FullCodeGenerator::StackValueContext::Plug(
623 Label* materialize_true,
624 Label* materialize_false) const {
625 Label done;
626 __ bind(materialize_true);
627 __ LoadRoot(at, Heap::kTrueValueRootIndex);
628 // Push the value as the following branch can clobber at in long branch mode.
629 __ push(at);
630 __ Branch(&done);
631 __ bind(materialize_false);
632 __ LoadRoot(at, Heap::kFalseValueRootIndex);
633 __ push(at);
634 __ bind(&done);
635 }
636
637
Plug(Label * materialize_true,Label * materialize_false) const638 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
639 Label* materialize_false) const {
640 DCHECK(materialize_true == true_label_);
641 DCHECK(materialize_false == false_label_);
642 }
643
644
Plug(bool flag) const645 void FullCodeGenerator::EffectContext::Plug(bool flag) const {
646 }
647
648
Plug(bool flag) const649 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
650 Heap::RootListIndex value_root_index =
651 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
652 __ LoadRoot(result_register(), value_root_index);
653 }
654
655
Plug(bool flag) const656 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
657 Heap::RootListIndex value_root_index =
658 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
659 __ LoadRoot(at, value_root_index);
660 __ push(at);
661 }
662
663
Plug(bool flag) const664 void FullCodeGenerator::TestContext::Plug(bool flag) const {
665 codegen()->PrepareForBailoutBeforeSplit(condition(),
666 true,
667 true_label_,
668 false_label_);
669 if (flag) {
670 if (true_label_ != fall_through_) __ Branch(true_label_);
671 } else {
672 if (false_label_ != fall_through_) __ Branch(false_label_);
673 }
674 }
675
676
DoTest(Expression * condition,Label * if_true,Label * if_false,Label * fall_through)677 void FullCodeGenerator::DoTest(Expression* condition,
678 Label* if_true,
679 Label* if_false,
680 Label* fall_through) {
681 __ mov(a0, result_register());
682 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
683 CallIC(ic, condition->test_id());
684 __ mov(at, zero_reg);
685 Split(ne, v0, Operand(at), if_true, if_false, fall_through);
686 }
687
688
Split(Condition cc,Register lhs,const Operand & rhs,Label * if_true,Label * if_false,Label * fall_through)689 void FullCodeGenerator::Split(Condition cc,
690 Register lhs,
691 const Operand& rhs,
692 Label* if_true,
693 Label* if_false,
694 Label* fall_through) {
695 if (if_false == fall_through) {
696 __ Branch(if_true, cc, lhs, rhs);
697 } else if (if_true == fall_through) {
698 __ Branch(if_false, NegateCondition(cc), lhs, rhs);
699 } else {
700 __ Branch(if_true, cc, lhs, rhs);
701 __ Branch(if_false);
702 }
703 }
704
705
StackOperand(Variable * var)706 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
707 DCHECK(var->IsStackAllocated());
708 // Offset is negative because higher indexes are at lower addresses.
709 int offset = -var->index() * kPointerSize;
710 // Adjust by a (parameter or local) base offset.
711 if (var->IsParameter()) {
712 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
713 } else {
714 offset += JavaScriptFrameConstants::kLocal0Offset;
715 }
716 return MemOperand(fp, offset);
717 }
718
719
VarOperand(Variable * var,Register scratch)720 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
721 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
722 if (var->IsContextSlot()) {
723 int context_chain_length = scope()->ContextChainLength(var->scope());
724 __ LoadContext(scratch, context_chain_length);
725 return ContextOperand(scratch, var->index());
726 } else {
727 return StackOperand(var);
728 }
729 }
730
731
GetVar(Register dest,Variable * var)732 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
733 // Use destination as scratch.
734 MemOperand location = VarOperand(var, dest);
735 __ ld(dest, location);
736 }
737
738
SetVar(Variable * var,Register src,Register scratch0,Register scratch1)739 void FullCodeGenerator::SetVar(Variable* var,
740 Register src,
741 Register scratch0,
742 Register scratch1) {
743 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
744 DCHECK(!scratch0.is(src));
745 DCHECK(!scratch0.is(scratch1));
746 DCHECK(!scratch1.is(src));
747 MemOperand location = VarOperand(var, scratch0);
748 __ sd(src, location);
749 // Emit the write barrier code if the location is in the heap.
750 if (var->IsContextSlot()) {
751 __ RecordWriteContextSlot(scratch0,
752 location.offset(),
753 src,
754 scratch1,
755 kRAHasBeenSaved,
756 kDontSaveFPRegs);
757 }
758 }
759
760
PrepareForBailoutBeforeSplit(Expression * expr,bool should_normalize,Label * if_true,Label * if_false)761 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
762 bool should_normalize,
763 Label* if_true,
764 Label* if_false) {
765 // Only prepare for bailouts before splits if we're in a test
766 // context. Otherwise, we let the Visit function deal with the
767 // preparation to avoid preparing with the same AST id twice.
768 if (!context()->IsTest() || !info_->IsOptimizable()) return;
769
770 Label skip;
771 if (should_normalize) __ Branch(&skip);
772 PrepareForBailout(expr, TOS_REG);
773 if (should_normalize) {
774 __ LoadRoot(a4, Heap::kTrueValueRootIndex);
775 Split(eq, a0, Operand(a4), if_true, if_false, NULL);
776 __ bind(&skip);
777 }
778 }
779
780
EmitDebugCheckDeclarationContext(Variable * variable)781 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
782 // The variable in the declaration always resides in the current function
783 // context.
784 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
785 if (generate_debug_code_) {
786 // Check that we're not inside a with or catch context.
787 __ ld(a1, FieldMemOperand(cp, HeapObject::kMapOffset));
788 __ LoadRoot(a4, Heap::kWithContextMapRootIndex);
789 __ Check(ne, kDeclarationInWithContext,
790 a1, Operand(a4));
791 __ LoadRoot(a4, Heap::kCatchContextMapRootIndex);
792 __ Check(ne, kDeclarationInCatchContext,
793 a1, Operand(a4));
794 }
795 }
796
797
VisitVariableDeclaration(VariableDeclaration * declaration)798 void FullCodeGenerator::VisitVariableDeclaration(
799 VariableDeclaration* declaration) {
800 // If it was not possible to allocate the variable at compile time, we
801 // need to "declare" it at runtime to make sure it actually exists in the
802 // local context.
803 VariableProxy* proxy = declaration->proxy();
804 VariableMode mode = declaration->mode();
805 Variable* variable = proxy->var();
806 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
807 switch (variable->location()) {
808 case Variable::UNALLOCATED:
809 globals_->Add(variable->name(), zone());
810 globals_->Add(variable->binding_needs_init()
811 ? isolate()->factory()->the_hole_value()
812 : isolate()->factory()->undefined_value(),
813 zone());
814 break;
815
816 case Variable::PARAMETER:
817 case Variable::LOCAL:
818 if (hole_init) {
819 Comment cmnt(masm_, "[ VariableDeclaration");
820 __ LoadRoot(a4, Heap::kTheHoleValueRootIndex);
821 __ sd(a4, StackOperand(variable));
822 }
823 break;
824
825 case Variable::CONTEXT:
826 if (hole_init) {
827 Comment cmnt(masm_, "[ VariableDeclaration");
828 EmitDebugCheckDeclarationContext(variable);
829 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
830 __ sd(at, ContextOperand(cp, variable->index()));
831 // No write barrier since the_hole_value is in old space.
832 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
833 }
834 break;
835
836 case Variable::LOOKUP: {
837 Comment cmnt(masm_, "[ VariableDeclaration");
838 __ li(a2, Operand(variable->name()));
839 // Declaration nodes are always introduced in one of four modes.
840 DCHECK(IsDeclaredVariableMode(mode));
841 PropertyAttributes attr =
842 IsImmutableVariableMode(mode) ? READ_ONLY : NONE;
843 __ li(a1, Operand(Smi::FromInt(attr)));
844 // Push initial value, if any.
845 // Note: For variables we must not push an initial value (such as
846 // 'undefined') because we may have a (legal) redeclaration and we
847 // must not destroy the current value.
848 if (hole_init) {
849 __ LoadRoot(a0, Heap::kTheHoleValueRootIndex);
850 __ Push(cp, a2, a1, a0);
851 } else {
852 DCHECK(Smi::FromInt(0) == 0);
853 __ mov(a0, zero_reg); // Smi::FromInt(0) indicates no initial value.
854 __ Push(cp, a2, a1, a0);
855 }
856 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
857 break;
858 }
859 }
860 }
861
862
VisitFunctionDeclaration(FunctionDeclaration * declaration)863 void FullCodeGenerator::VisitFunctionDeclaration(
864 FunctionDeclaration* declaration) {
865 VariableProxy* proxy = declaration->proxy();
866 Variable* variable = proxy->var();
867 switch (variable->location()) {
868 case Variable::UNALLOCATED: {
869 globals_->Add(variable->name(), zone());
870 Handle<SharedFunctionInfo> function =
871 Compiler::BuildFunctionInfo(declaration->fun(), script(), info_);
872 // Check for stack-overflow exception.
873 if (function.is_null()) return SetStackOverflow();
874 globals_->Add(function, zone());
875 break;
876 }
877
878 case Variable::PARAMETER:
879 case Variable::LOCAL: {
880 Comment cmnt(masm_, "[ FunctionDeclaration");
881 VisitForAccumulatorValue(declaration->fun());
882 __ sd(result_register(), StackOperand(variable));
883 break;
884 }
885
886 case Variable::CONTEXT: {
887 Comment cmnt(masm_, "[ FunctionDeclaration");
888 EmitDebugCheckDeclarationContext(variable);
889 VisitForAccumulatorValue(declaration->fun());
890 __ sd(result_register(), ContextOperand(cp, variable->index()));
891 int offset = Context::SlotOffset(variable->index());
892 // We know that we have written a function, which is not a smi.
893 __ RecordWriteContextSlot(cp,
894 offset,
895 result_register(),
896 a2,
897 kRAHasBeenSaved,
898 kDontSaveFPRegs,
899 EMIT_REMEMBERED_SET,
900 OMIT_SMI_CHECK);
901 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
902 break;
903 }
904
905 case Variable::LOOKUP: {
906 Comment cmnt(masm_, "[ FunctionDeclaration");
907 __ li(a2, Operand(variable->name()));
908 __ li(a1, Operand(Smi::FromInt(NONE)));
909 __ Push(cp, a2, a1);
910 // Push initial value for function declaration.
911 VisitForStackValue(declaration->fun());
912 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
913 break;
914 }
915 }
916 }
917
918
VisitModuleDeclaration(ModuleDeclaration * declaration)919 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
920 Variable* variable = declaration->proxy()->var();
921 DCHECK(variable->location() == Variable::CONTEXT);
922 DCHECK(variable->interface()->IsFrozen());
923 Comment cmnt(masm_, "[ ModuleDeclaration");
924 EmitDebugCheckDeclarationContext(variable);
925
926 // Load instance object.
927 __ LoadContext(a1, scope_->ContextChainLength(scope_->GlobalScope()));
928 __ ld(a1, ContextOperand(a1, variable->interface()->Index()));
929 __ ld(a1, ContextOperand(a1, Context::EXTENSION_INDEX));
930
931 // Assign it.
932 __ sd(a1, ContextOperand(cp, variable->index()));
933 // We know that we have written a module, which is not a smi.
934 __ RecordWriteContextSlot(cp,
935 Context::SlotOffset(variable->index()),
936 a1,
937 a3,
938 kRAHasBeenSaved,
939 kDontSaveFPRegs,
940 EMIT_REMEMBERED_SET,
941 OMIT_SMI_CHECK);
942 PrepareForBailoutForId(declaration->proxy()->id(), NO_REGISTERS);
943
944 // Traverse into body.
945 Visit(declaration->module());
946 }
947
948
VisitImportDeclaration(ImportDeclaration * declaration)949 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
950 VariableProxy* proxy = declaration->proxy();
951 Variable* variable = proxy->var();
952 switch (variable->location()) {
953 case Variable::UNALLOCATED:
954 // TODO(rossberg)
955 break;
956
957 case Variable::CONTEXT: {
958 Comment cmnt(masm_, "[ ImportDeclaration");
959 EmitDebugCheckDeclarationContext(variable);
960 // TODO(rossberg)
961 break;
962 }
963
964 case Variable::PARAMETER:
965 case Variable::LOCAL:
966 case Variable::LOOKUP:
967 UNREACHABLE();
968 }
969 }
970
971
VisitExportDeclaration(ExportDeclaration * declaration)972 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
973 // TODO(rossberg)
974 }
975
976
DeclareGlobals(Handle<FixedArray> pairs)977 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
978 // Call the runtime to declare the globals.
979 // The context is the first argument.
980 __ li(a1, Operand(pairs));
981 __ li(a0, Operand(Smi::FromInt(DeclareGlobalsFlags())));
982 __ Push(cp, a1, a0);
983 __ CallRuntime(Runtime::kDeclareGlobals, 3);
984 // Return value is ignored.
985 }
986
987
DeclareModules(Handle<FixedArray> descriptions)988 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
989 // Call the runtime to declare the modules.
990 __ Push(descriptions);
991 __ CallRuntime(Runtime::kDeclareModules, 1);
992 // Return value is ignored.
993 }
994
995
VisitSwitchStatement(SwitchStatement * stmt)996 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
997 Comment cmnt(masm_, "[ SwitchStatement");
998 Breakable nested_statement(this, stmt);
999 SetStatementPosition(stmt);
1000
1001 // Keep the switch value on the stack until a case matches.
1002 VisitForStackValue(stmt->tag());
1003 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
1004
1005 ZoneList<CaseClause*>* clauses = stmt->cases();
1006 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
1007
1008 Label next_test; // Recycled for each test.
1009 // Compile all the tests with branches to their bodies.
1010 for (int i = 0; i < clauses->length(); i++) {
1011 CaseClause* clause = clauses->at(i);
1012 clause->body_target()->Unuse();
1013
1014 // The default is not a test, but remember it as final fall through.
1015 if (clause->is_default()) {
1016 default_clause = clause;
1017 continue;
1018 }
1019
1020 Comment cmnt(masm_, "[ Case comparison");
1021 __ bind(&next_test);
1022 next_test.Unuse();
1023
1024 // Compile the label expression.
1025 VisitForAccumulatorValue(clause->label());
1026 __ mov(a0, result_register()); // CompareStub requires args in a0, a1.
1027
1028 // Perform the comparison as if via '==='.
1029 __ ld(a1, MemOperand(sp, 0)); // Switch value.
1030 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
1031 JumpPatchSite patch_site(masm_);
1032 if (inline_smi_code) {
1033 Label slow_case;
1034 __ or_(a2, a1, a0);
1035 patch_site.EmitJumpIfNotSmi(a2, &slow_case);
1036
1037 __ Branch(&next_test, ne, a1, Operand(a0));
1038 __ Drop(1); // Switch value is no longer needed.
1039 __ Branch(clause->body_target());
1040
1041 __ bind(&slow_case);
1042 }
1043
1044 // Record position before stub call for type feedback.
1045 SetSourcePosition(clause->position());
1046 Handle<Code> ic =
1047 CodeFactory::CompareIC(isolate(), Token::EQ_STRICT).code();
1048 CallIC(ic, clause->CompareId());
1049 patch_site.EmitPatchInfo();
1050
1051 Label skip;
1052 __ Branch(&skip);
1053 PrepareForBailout(clause, TOS_REG);
1054 __ LoadRoot(at, Heap::kTrueValueRootIndex);
1055 __ Branch(&next_test, ne, v0, Operand(at));
1056 __ Drop(1);
1057 __ Branch(clause->body_target());
1058 __ bind(&skip);
1059
1060 __ Branch(&next_test, ne, v0, Operand(zero_reg));
1061 __ Drop(1); // Switch value is no longer needed.
1062 __ Branch(clause->body_target());
1063 }
1064
1065 // Discard the test value and jump to the default if present, otherwise to
1066 // the end of the statement.
1067 __ bind(&next_test);
1068 __ Drop(1); // Switch value is no longer needed.
1069 if (default_clause == NULL) {
1070 __ Branch(nested_statement.break_label());
1071 } else {
1072 __ Branch(default_clause->body_target());
1073 }
1074
1075 // Compile all the case bodies.
1076 for (int i = 0; i < clauses->length(); i++) {
1077 Comment cmnt(masm_, "[ Case body");
1078 CaseClause* clause = clauses->at(i);
1079 __ bind(clause->body_target());
1080 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1081 VisitStatements(clause->statements());
1082 }
1083
1084 __ bind(nested_statement.break_label());
1085 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1086 }
1087
1088
VisitForInStatement(ForInStatement * stmt)1089 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1090 Comment cmnt(masm_, "[ ForInStatement");
1091 int slot = stmt->ForInFeedbackSlot();
1092 SetStatementPosition(stmt);
1093
1094 Label loop, exit;
1095 ForIn loop_statement(this, stmt);
1096 increment_loop_depth();
1097
1098 // Get the object to enumerate over. If the object is null or undefined, skip
1099 // over the loop. See ECMA-262 version 5, section 12.6.4.
1100 VisitForAccumulatorValue(stmt->enumerable());
1101 __ mov(a0, result_register()); // Result as param to InvokeBuiltin below.
1102 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1103 __ Branch(&exit, eq, a0, Operand(at));
1104 Register null_value = a5;
1105 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
1106 __ Branch(&exit, eq, a0, Operand(null_value));
1107 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1108 __ mov(a0, v0);
1109 // Convert the object to a JS object.
1110 Label convert, done_convert;
1111 __ JumpIfSmi(a0, &convert);
1112 __ GetObjectType(a0, a1, a1);
1113 __ Branch(&done_convert, ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
1114 __ bind(&convert);
1115 __ push(a0);
1116 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1117 __ mov(a0, v0);
1118 __ bind(&done_convert);
1119 __ push(a0);
1120
1121 // Check for proxies.
1122 Label call_runtime;
1123 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1124 __ GetObjectType(a0, a1, a1);
1125 __ Branch(&call_runtime, le, a1, Operand(LAST_JS_PROXY_TYPE));
1126
1127 // Check cache validity in generated code. This is a fast case for
1128 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1129 // guarantee cache validity, call the runtime system to check cache
1130 // validity or get the property names in a fixed array.
1131 __ CheckEnumCache(null_value, &call_runtime);
1132
1133 // The enum cache is valid. Load the map of the object being
1134 // iterated over and use the cache for the iteration.
1135 Label use_cache;
1136 __ ld(v0, FieldMemOperand(a0, HeapObject::kMapOffset));
1137 __ Branch(&use_cache);
1138
1139 // Get the set of properties to enumerate.
1140 __ bind(&call_runtime);
1141 __ push(a0); // Duplicate the enumerable object on the stack.
1142 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1143
1144 // If we got a map from the runtime call, we can do a fast
1145 // modification check. Otherwise, we got a fixed array, and we have
1146 // to do a slow check.
1147 Label fixed_array;
1148 __ ld(a2, FieldMemOperand(v0, HeapObject::kMapOffset));
1149 __ LoadRoot(at, Heap::kMetaMapRootIndex);
1150 __ Branch(&fixed_array, ne, a2, Operand(at));
1151
1152 // We got a map in register v0. Get the enumeration cache from it.
1153 Label no_descriptors;
1154 __ bind(&use_cache);
1155
1156 __ EnumLength(a1, v0);
1157 __ Branch(&no_descriptors, eq, a1, Operand(Smi::FromInt(0)));
1158
1159 __ LoadInstanceDescriptors(v0, a2);
1160 __ ld(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheOffset));
1161 __ ld(a2, FieldMemOperand(a2, DescriptorArray::kEnumCacheBridgeCacheOffset));
1162
1163 // Set up the four remaining stack slots.
1164 __ li(a0, Operand(Smi::FromInt(0)));
1165 // Push map, enumeration cache, enumeration cache length (as smi) and zero.
1166 __ Push(v0, a2, a1, a0);
1167 __ jmp(&loop);
1168
1169 __ bind(&no_descriptors);
1170 __ Drop(1);
1171 __ jmp(&exit);
1172
1173 // We got a fixed array in register v0. Iterate through that.
1174 Label non_proxy;
1175 __ bind(&fixed_array);
1176
1177 __ li(a1, FeedbackVector());
1178 __ li(a2, Operand(TypeFeedbackVector::MegamorphicSentinel(isolate())));
1179 __ sd(a2, FieldMemOperand(a1, FixedArray::OffsetOfElementAt(slot)));
1180
1181 __ li(a1, Operand(Smi::FromInt(1))); // Smi indicates slow check
1182 __ ld(a2, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object
1183 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1184 __ GetObjectType(a2, a3, a3);
1185 __ Branch(&non_proxy, gt, a3, Operand(LAST_JS_PROXY_TYPE));
1186 __ li(a1, Operand(Smi::FromInt(0))); // Zero indicates proxy
1187 __ bind(&non_proxy);
1188 __ Push(a1, v0); // Smi and array
1189 __ ld(a1, FieldMemOperand(v0, FixedArray::kLengthOffset));
1190 __ li(a0, Operand(Smi::FromInt(0)));
1191 __ Push(a1, a0); // Fixed array length (as smi) and initial index.
1192
1193 // Generate code for doing the condition check.
1194 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1195 __ bind(&loop);
1196 // Load the current count to a0, load the length to a1.
1197 __ ld(a0, MemOperand(sp, 0 * kPointerSize));
1198 __ ld(a1, MemOperand(sp, 1 * kPointerSize));
1199 __ Branch(loop_statement.break_label(), hs, a0, Operand(a1));
1200
1201 // Get the current entry of the array into register a3.
1202 __ ld(a2, MemOperand(sp, 2 * kPointerSize));
1203 __ Daddu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1204 __ SmiScale(a4, a0, kPointerSizeLog2);
1205 __ daddu(a4, a2, a4); // Array base + scaled (smi) index.
1206 __ ld(a3, MemOperand(a4)); // Current entry.
1207
1208 // Get the expected map from the stack or a smi in the
1209 // permanent slow case into register a2.
1210 __ ld(a2, MemOperand(sp, 3 * kPointerSize));
1211
1212 // Check if the expected map still matches that of the enumerable.
1213 // If not, we may have to filter the key.
1214 Label update_each;
1215 __ ld(a1, MemOperand(sp, 4 * kPointerSize));
1216 __ ld(a4, FieldMemOperand(a1, HeapObject::kMapOffset));
1217 __ Branch(&update_each, eq, a4, Operand(a2));
1218
1219 // For proxies, no filtering is done.
1220 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1221 DCHECK_EQ(Smi::FromInt(0), 0);
1222 __ Branch(&update_each, eq, a2, Operand(zero_reg));
1223
1224 // Convert the entry to a string or (smi) 0 if it isn't a property
1225 // any more. If the property has been removed while iterating, we
1226 // just skip it.
1227 __ Push(a1, a3); // Enumerable and current entry.
1228 __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1229 __ mov(a3, result_register());
1230 __ Branch(loop_statement.continue_label(), eq, a3, Operand(zero_reg));
1231
1232 // Update the 'each' property or variable from the possibly filtered
1233 // entry in register a3.
1234 __ bind(&update_each);
1235 __ mov(result_register(), a3);
1236 // Perform the assignment as if via '='.
1237 { EffectContext context(this);
1238 EmitAssignment(stmt->each());
1239 }
1240
1241 // Generate code for the body of the loop.
1242 Visit(stmt->body());
1243
1244 // Generate code for the going to the next element by incrementing
1245 // the index (smi) stored on top of the stack.
1246 __ bind(loop_statement.continue_label());
1247 __ pop(a0);
1248 __ Daddu(a0, a0, Operand(Smi::FromInt(1)));
1249 __ push(a0);
1250
1251 EmitBackEdgeBookkeeping(stmt, &loop);
1252 __ Branch(&loop);
1253
1254 // Remove the pointers stored on the stack.
1255 __ bind(loop_statement.break_label());
1256 __ Drop(5);
1257
1258 // Exit and decrement the loop depth.
1259 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1260 __ bind(&exit);
1261 decrement_loop_depth();
1262 }
1263
1264
VisitForOfStatement(ForOfStatement * stmt)1265 void FullCodeGenerator::VisitForOfStatement(ForOfStatement* stmt) {
1266 Comment cmnt(masm_, "[ ForOfStatement");
1267 SetStatementPosition(stmt);
1268
1269 Iteration loop_statement(this, stmt);
1270 increment_loop_depth();
1271
1272 // var iterator = iterable[Symbol.iterator]();
1273 VisitForEffect(stmt->assign_iterator());
1274
1275 // Loop entry.
1276 __ bind(loop_statement.continue_label());
1277
1278 // result = iterator.next()
1279 VisitForEffect(stmt->next_result());
1280
1281 // if (result.done) break;
1282 Label result_not_done;
1283 VisitForControl(stmt->result_done(),
1284 loop_statement.break_label(),
1285 &result_not_done,
1286 &result_not_done);
1287 __ bind(&result_not_done);
1288
1289 // each = result.value
1290 VisitForEffect(stmt->assign_each());
1291
1292 // Generate code for the body of the loop.
1293 Visit(stmt->body());
1294
1295 // Check stack before looping.
1296 PrepareForBailoutForId(stmt->BackEdgeId(), NO_REGISTERS);
1297 EmitBackEdgeBookkeeping(stmt, loop_statement.continue_label());
1298 __ jmp(loop_statement.continue_label());
1299
1300 // Exit and decrement the loop depth.
1301 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1302 __ bind(loop_statement.break_label());
1303 decrement_loop_depth();
1304 }
1305
1306
EmitNewClosure(Handle<SharedFunctionInfo> info,bool pretenure)1307 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1308 bool pretenure) {
1309 // Use the fast case closure allocation code that allocates in new
1310 // space for nested functions that don't need literals cloning. If
1311 // we're running with the --always-opt or the --prepare-always-opt
1312 // flag, we need to use the runtime function so that the new function
1313 // we are creating here gets a chance to have its code optimized and
1314 // doesn't just get a copy of the existing unoptimized code.
1315 if (!FLAG_always_opt &&
1316 !FLAG_prepare_always_opt &&
1317 !pretenure &&
1318 scope()->is_function_scope() &&
1319 info->num_literals() == 0) {
1320 FastNewClosureStub stub(isolate(), info->strict_mode(), info->kind());
1321 __ li(a2, Operand(info));
1322 __ CallStub(&stub);
1323 } else {
1324 __ li(a0, Operand(info));
1325 __ LoadRoot(a1, pretenure ? Heap::kTrueValueRootIndex
1326 : Heap::kFalseValueRootIndex);
1327 __ Push(cp, a0, a1);
1328 __ CallRuntime(Runtime::kNewClosure, 3);
1329 }
1330 context()->Plug(v0);
1331 }
1332
1333
VisitVariableProxy(VariableProxy * expr)1334 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1335 Comment cmnt(masm_, "[ VariableProxy");
1336 EmitVariableLoad(expr);
1337 }
1338
1339
EmitLoadHomeObject(SuperReference * expr)1340 void FullCodeGenerator::EmitLoadHomeObject(SuperReference* expr) {
1341 Comment cnmt(masm_, "[ SuperReference ");
1342
1343 __ ld(LoadDescriptor::ReceiverRegister(),
1344 MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1345
1346 Handle<Symbol> home_object_symbol(isolate()->heap()->home_object_symbol());
1347 __ li(LoadDescriptor::NameRegister(), home_object_symbol);
1348
1349 CallLoadIC(NOT_CONTEXTUAL, expr->HomeObjectFeedbackId());
1350
1351 Label done;
1352 __ Branch(&done, ne, v0, Operand(isolate()->factory()->undefined_value()));
1353 __ CallRuntime(Runtime::kThrowNonMethodError, 0);
1354 __ bind(&done);
1355 }
1356
1357
EmitLoadGlobalCheckExtensions(VariableProxy * proxy,TypeofState typeof_state,Label * slow)1358 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1359 TypeofState typeof_state,
1360 Label* slow) {
1361 Register current = cp;
1362 Register next = a1;
1363 Register temp = a2;
1364
1365 Scope* s = scope();
1366 while (s != NULL) {
1367 if (s->num_heap_slots() > 0) {
1368 if (s->calls_sloppy_eval()) {
1369 // Check that extension is NULL.
1370 __ ld(temp, ContextOperand(current, Context::EXTENSION_INDEX));
1371 __ Branch(slow, ne, temp, Operand(zero_reg));
1372 }
1373 // Load next context in chain.
1374 __ ld(next, ContextOperand(current, Context::PREVIOUS_INDEX));
1375 // Walk the rest of the chain without clobbering cp.
1376 current = next;
1377 }
1378 // If no outer scope calls eval, we do not need to check more
1379 // context extensions.
1380 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1381 s = s->outer_scope();
1382 }
1383
1384 if (s->is_eval_scope()) {
1385 Label loop, fast;
1386 if (!current.is(next)) {
1387 __ Move(next, current);
1388 }
1389 __ bind(&loop);
1390 // Terminate at native context.
1391 __ ld(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1392 __ LoadRoot(a4, Heap::kNativeContextMapRootIndex);
1393 __ Branch(&fast, eq, temp, Operand(a4));
1394 // Check that extension is NULL.
1395 __ ld(temp, ContextOperand(next, Context::EXTENSION_INDEX));
1396 __ Branch(slow, ne, temp, Operand(zero_reg));
1397 // Load next context in chain.
1398 __ ld(next, ContextOperand(next, Context::PREVIOUS_INDEX));
1399 __ Branch(&loop);
1400 __ bind(&fast);
1401 }
1402
1403 __ ld(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
1404 __ li(LoadDescriptor::NameRegister(), Operand(proxy->var()->name()));
1405 if (FLAG_vector_ics) {
1406 __ li(VectorLoadICDescriptor::SlotRegister(),
1407 Operand(Smi::FromInt(proxy->VariableFeedbackSlot())));
1408 }
1409
1410 ContextualMode mode = (typeof_state == INSIDE_TYPEOF)
1411 ? NOT_CONTEXTUAL
1412 : CONTEXTUAL;
1413 CallLoadIC(mode);
1414 }
1415
1416
ContextSlotOperandCheckExtensions(Variable * var,Label * slow)1417 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1418 Label* slow) {
1419 DCHECK(var->IsContextSlot());
1420 Register context = cp;
1421 Register next = a3;
1422 Register temp = a4;
1423
1424 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1425 if (s->num_heap_slots() > 0) {
1426 if (s->calls_sloppy_eval()) {
1427 // Check that extension is NULL.
1428 __ ld(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1429 __ Branch(slow, ne, temp, Operand(zero_reg));
1430 }
1431 __ ld(next, ContextOperand(context, Context::PREVIOUS_INDEX));
1432 // Walk the rest of the chain without clobbering cp.
1433 context = next;
1434 }
1435 }
1436 // Check that last extension is NULL.
1437 __ ld(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1438 __ Branch(slow, ne, temp, Operand(zero_reg));
1439
1440 // This function is used only for loads, not stores, so it's safe to
1441 // return an cp-based operand (the write barrier cannot be allowed to
1442 // destroy the cp register).
1443 return ContextOperand(context, var->index());
1444 }
1445
1446
EmitDynamicLookupFastCase(VariableProxy * proxy,TypeofState typeof_state,Label * slow,Label * done)1447 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1448 TypeofState typeof_state,
1449 Label* slow,
1450 Label* done) {
1451 // Generate fast-case code for variables that might be shadowed by
1452 // eval-introduced variables. Eval is used a lot without
1453 // introducing variables. In those cases, we do not want to
1454 // perform a runtime call for all variables in the scope
1455 // containing the eval.
1456 Variable* var = proxy->var();
1457 if (var->mode() == DYNAMIC_GLOBAL) {
1458 EmitLoadGlobalCheckExtensions(proxy, typeof_state, slow);
1459 __ Branch(done);
1460 } else if (var->mode() == DYNAMIC_LOCAL) {
1461 Variable* local = var->local_if_not_shadowed();
1462 __ ld(v0, ContextSlotOperandCheckExtensions(local, slow));
1463 if (local->mode() == LET || local->mode() == CONST ||
1464 local->mode() == CONST_LEGACY) {
1465 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1466 __ dsubu(at, v0, at); // Sub as compare: at == 0 on eq.
1467 if (local->mode() == CONST_LEGACY) {
1468 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
1469 __ Movz(v0, a0, at); // Conditional move: return Undefined if TheHole.
1470 } else { // LET || CONST
1471 __ Branch(done, ne, at, Operand(zero_reg));
1472 __ li(a0, Operand(var->name()));
1473 __ push(a0);
1474 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1475 }
1476 }
1477 __ Branch(done);
1478 }
1479 }
1480
1481
EmitVariableLoad(VariableProxy * proxy)1482 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1483 // Record position before possible IC call.
1484 SetSourcePosition(proxy->position());
1485 Variable* var = proxy->var();
1486
1487 // Three cases: global variables, lookup variables, and all other types of
1488 // variables.
1489 switch (var->location()) {
1490 case Variable::UNALLOCATED: {
1491 Comment cmnt(masm_, "[ Global variable");
1492 // Use inline caching. Variable name is passed in a2 and the global
1493 // object (receiver) in a0.
1494 __ ld(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
1495 __ li(LoadDescriptor::NameRegister(), Operand(var->name()));
1496 if (FLAG_vector_ics) {
1497 __ li(VectorLoadICDescriptor::SlotRegister(),
1498 Operand(Smi::FromInt(proxy->VariableFeedbackSlot())));
1499 }
1500 CallLoadIC(CONTEXTUAL);
1501 context()->Plug(v0);
1502 break;
1503 }
1504
1505 case Variable::PARAMETER:
1506 case Variable::LOCAL:
1507 case Variable::CONTEXT: {
1508 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1509 : "[ Stack variable");
1510 if (var->binding_needs_init()) {
1511 // var->scope() may be NULL when the proxy is located in eval code and
1512 // refers to a potential outside binding. Currently those bindings are
1513 // always looked up dynamically, i.e. in that case
1514 // var->location() == LOOKUP.
1515 // always holds.
1516 DCHECK(var->scope() != NULL);
1517
1518 // Check if the binding really needs an initialization check. The check
1519 // can be skipped in the following situation: we have a LET or CONST
1520 // binding in harmony mode, both the Variable and the VariableProxy have
1521 // the same declaration scope (i.e. they are both in global code, in the
1522 // same function or in the same eval code) and the VariableProxy is in
1523 // the source physically located after the initializer of the variable.
1524 //
1525 // We cannot skip any initialization checks for CONST in non-harmony
1526 // mode because const variables may be declared but never initialized:
1527 // if (false) { const x; }; var y = x;
1528 //
1529 // The condition on the declaration scopes is a conservative check for
1530 // nested functions that access a binding and are called before the
1531 // binding is initialized:
1532 // function() { f(); let x = 1; function f() { x = 2; } }
1533 //
1534 bool skip_init_check;
1535 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1536 skip_init_check = false;
1537 } else {
1538 // Check that we always have valid source position.
1539 DCHECK(var->initializer_position() != RelocInfo::kNoPosition);
1540 DCHECK(proxy->position() != RelocInfo::kNoPosition);
1541 skip_init_check = var->mode() != CONST_LEGACY &&
1542 var->initializer_position() < proxy->position();
1543 }
1544
1545 if (!skip_init_check) {
1546 // Let and const need a read barrier.
1547 GetVar(v0, var);
1548 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
1549 __ dsubu(at, v0, at); // Sub as compare: at == 0 on eq.
1550 if (var->mode() == LET || var->mode() == CONST) {
1551 // Throw a reference error when using an uninitialized let/const
1552 // binding in harmony mode.
1553 Label done;
1554 __ Branch(&done, ne, at, Operand(zero_reg));
1555 __ li(a0, Operand(var->name()));
1556 __ push(a0);
1557 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1558 __ bind(&done);
1559 } else {
1560 // Uninitalized const bindings outside of harmony mode are unholed.
1561 DCHECK(var->mode() == CONST_LEGACY);
1562 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
1563 __ Movz(v0, a0, at); // Conditional move: Undefined if TheHole.
1564 }
1565 context()->Plug(v0);
1566 break;
1567 }
1568 }
1569 context()->Plug(var);
1570 break;
1571 }
1572
1573 case Variable::LOOKUP: {
1574 Comment cmnt(masm_, "[ Lookup variable");
1575 Label done, slow;
1576 // Generate code for loading from variables potentially shadowed
1577 // by eval-introduced variables.
1578 EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
1579 __ bind(&slow);
1580 __ li(a1, Operand(var->name()));
1581 __ Push(cp, a1); // Context and name.
1582 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
1583 __ bind(&done);
1584 context()->Plug(v0);
1585 }
1586 }
1587 }
1588
1589
VisitRegExpLiteral(RegExpLiteral * expr)1590 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1591 Comment cmnt(masm_, "[ RegExpLiteral");
1592 Label materialized;
1593 // Registers will be used as follows:
1594 // a5 = materialized value (RegExp literal)
1595 // a4 = JS function, literals array
1596 // a3 = literal index
1597 // a2 = RegExp pattern
1598 // a1 = RegExp flags
1599 // a0 = RegExp literal clone
1600 __ ld(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1601 __ ld(a4, FieldMemOperand(a0, JSFunction::kLiteralsOffset));
1602 int literal_offset =
1603 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1604 __ ld(a5, FieldMemOperand(a4, literal_offset));
1605 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1606 __ Branch(&materialized, ne, a5, Operand(at));
1607
1608 // Create regexp literal using runtime function.
1609 // Result will be in v0.
1610 __ li(a3, Operand(Smi::FromInt(expr->literal_index())));
1611 __ li(a2, Operand(expr->pattern()));
1612 __ li(a1, Operand(expr->flags()));
1613 __ Push(a4, a3, a2, a1);
1614 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1615 __ mov(a5, v0);
1616
1617 __ bind(&materialized);
1618 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1619 Label allocated, runtime_allocate;
1620 __ Allocate(size, v0, a2, a3, &runtime_allocate, TAG_OBJECT);
1621 __ jmp(&allocated);
1622
1623 __ bind(&runtime_allocate);
1624 __ li(a0, Operand(Smi::FromInt(size)));
1625 __ Push(a5, a0);
1626 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1627 __ pop(a5);
1628
1629 __ bind(&allocated);
1630
1631 // After this, registers are used as follows:
1632 // v0: Newly allocated regexp.
1633 // a5: Materialized regexp.
1634 // a2: temp.
1635 __ CopyFields(v0, a5, a2.bit(), size / kPointerSize);
1636 context()->Plug(v0);
1637 }
1638
1639
EmitAccessor(Expression * expression)1640 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1641 if (expression == NULL) {
1642 __ LoadRoot(a1, Heap::kNullValueRootIndex);
1643 __ push(a1);
1644 } else {
1645 VisitForStackValue(expression);
1646 }
1647 }
1648
1649
VisitObjectLiteral(ObjectLiteral * expr)1650 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1651 Comment cmnt(masm_, "[ ObjectLiteral");
1652
1653 expr->BuildConstantProperties(isolate());
1654 Handle<FixedArray> constant_properties = expr->constant_properties();
1655 __ ld(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1656 __ ld(a3, FieldMemOperand(a3, JSFunction::kLiteralsOffset));
1657 __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
1658 __ li(a1, Operand(constant_properties));
1659 int flags = expr->fast_elements()
1660 ? ObjectLiteral::kFastElements
1661 : ObjectLiteral::kNoFlags;
1662 flags |= expr->has_function()
1663 ? ObjectLiteral::kHasFunction
1664 : ObjectLiteral::kNoFlags;
1665 __ li(a0, Operand(Smi::FromInt(flags)));
1666 int properties_count = constant_properties->length() / 2;
1667 if (expr->may_store_doubles() || expr->depth() > 1 ||
1668 masm()->serializer_enabled() || flags != ObjectLiteral::kFastElements ||
1669 properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) {
1670 __ Push(a3, a2, a1, a0);
1671 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1672 } else {
1673 FastCloneShallowObjectStub stub(isolate(), properties_count);
1674 __ CallStub(&stub);
1675 }
1676
1677 // If result_saved is true the result is on top of the stack. If
1678 // result_saved is false the result is in v0.
1679 bool result_saved = false;
1680
1681 // Mark all computed expressions that are bound to a key that
1682 // is shadowed by a later occurrence of the same key. For the
1683 // marked expressions, no store code is emitted.
1684 expr->CalculateEmitStore(zone());
1685
1686 AccessorTable accessor_table(zone());
1687 for (int i = 0; i < expr->properties()->length(); i++) {
1688 ObjectLiteral::Property* property = expr->properties()->at(i);
1689 if (property->IsCompileTimeValue()) continue;
1690
1691 Literal* key = property->key();
1692 Expression* value = property->value();
1693 if (!result_saved) {
1694 __ push(v0); // Save result on stack.
1695 result_saved = true;
1696 }
1697 switch (property->kind()) {
1698 case ObjectLiteral::Property::CONSTANT:
1699 UNREACHABLE();
1700 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1701 DCHECK(!CompileTimeValue::IsCompileTimeValue(property->value()));
1702 // Fall through.
1703 case ObjectLiteral::Property::COMPUTED:
1704 if (key->value()->IsInternalizedString()) {
1705 if (property->emit_store()) {
1706 VisitForAccumulatorValue(value);
1707 __ mov(StoreDescriptor::ValueRegister(), result_register());
1708 DCHECK(StoreDescriptor::ValueRegister().is(a0));
1709 __ li(StoreDescriptor::NameRegister(), Operand(key->value()));
1710 __ ld(StoreDescriptor::ReceiverRegister(), MemOperand(sp));
1711 CallStoreIC(key->LiteralFeedbackId());
1712 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1713 } else {
1714 VisitForEffect(value);
1715 }
1716 break;
1717 }
1718 // Duplicate receiver on stack.
1719 __ ld(a0, MemOperand(sp));
1720 __ push(a0);
1721 VisitForStackValue(key);
1722 VisitForStackValue(value);
1723 if (property->emit_store()) {
1724 __ li(a0, Operand(Smi::FromInt(SLOPPY))); // PropertyAttributes.
1725 __ push(a0);
1726 __ CallRuntime(Runtime::kSetProperty, 4);
1727 } else {
1728 __ Drop(3);
1729 }
1730 break;
1731 case ObjectLiteral::Property::PROTOTYPE:
1732 // Duplicate receiver on stack.
1733 __ ld(a0, MemOperand(sp));
1734 __ push(a0);
1735 VisitForStackValue(value);
1736 if (property->emit_store()) {
1737 __ CallRuntime(Runtime::kSetPrototype, 2);
1738 } else {
1739 __ Drop(2);
1740 }
1741 break;
1742 case ObjectLiteral::Property::GETTER:
1743 accessor_table.lookup(key)->second->getter = value;
1744 break;
1745 case ObjectLiteral::Property::SETTER:
1746 accessor_table.lookup(key)->second->setter = value;
1747 break;
1748 }
1749 }
1750
1751 // Emit code to define accessors, using only a single call to the runtime for
1752 // each pair of corresponding getters and setters.
1753 for (AccessorTable::Iterator it = accessor_table.begin();
1754 it != accessor_table.end();
1755 ++it) {
1756 __ ld(a0, MemOperand(sp)); // Duplicate receiver.
1757 __ push(a0);
1758 VisitForStackValue(it->first);
1759 EmitAccessor(it->second->getter);
1760 EmitAccessor(it->second->setter);
1761 __ li(a0, Operand(Smi::FromInt(NONE)));
1762 __ push(a0);
1763 __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5);
1764 }
1765
1766 if (expr->has_function()) {
1767 DCHECK(result_saved);
1768 __ ld(a0, MemOperand(sp));
1769 __ push(a0);
1770 __ CallRuntime(Runtime::kToFastProperties, 1);
1771 }
1772
1773 if (result_saved) {
1774 context()->PlugTOS();
1775 } else {
1776 context()->Plug(v0);
1777 }
1778 }
1779
1780
VisitArrayLiteral(ArrayLiteral * expr)1781 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1782 Comment cmnt(masm_, "[ ArrayLiteral");
1783
1784 expr->BuildConstantElements(isolate());
1785 int flags = expr->depth() == 1
1786 ? ArrayLiteral::kShallowElements
1787 : ArrayLiteral::kNoFlags;
1788
1789 ZoneList<Expression*>* subexprs = expr->values();
1790 int length = subexprs->length();
1791
1792 Handle<FixedArray> constant_elements = expr->constant_elements();
1793 DCHECK_EQ(2, constant_elements->length());
1794 ElementsKind constant_elements_kind =
1795 static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
1796 bool has_fast_elements =
1797 IsFastObjectElementsKind(constant_elements_kind);
1798 Handle<FixedArrayBase> constant_elements_values(
1799 FixedArrayBase::cast(constant_elements->get(1)));
1800
1801 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1802 if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
1803 // If the only customer of allocation sites is transitioning, then
1804 // we can turn it off if we don't have anywhere else to transition to.
1805 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1806 }
1807
1808 __ mov(a0, result_register());
1809 __ ld(a3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1810 __ ld(a3, FieldMemOperand(a3, JSFunction::kLiteralsOffset));
1811 __ li(a2, Operand(Smi::FromInt(expr->literal_index())));
1812 __ li(a1, Operand(constant_elements));
1813 if (expr->depth() > 1 || length > JSObject::kInitialMaxFastElementArray) {
1814 __ li(a0, Operand(Smi::FromInt(flags)));
1815 __ Push(a3, a2, a1, a0);
1816 __ CallRuntime(Runtime::kCreateArrayLiteral, 4);
1817 } else {
1818 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1819 __ CallStub(&stub);
1820 }
1821
1822 bool result_saved = false; // Is the result saved to the stack?
1823
1824 // Emit code to evaluate all the non-constant subexpressions and to store
1825 // them into the newly cloned array.
1826 for (int i = 0; i < length; i++) {
1827 Expression* subexpr = subexprs->at(i);
1828 // If the subexpression is a literal or a simple materialized literal it
1829 // is already set in the cloned array.
1830 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1831
1832 if (!result_saved) {
1833 __ push(v0); // array literal
1834 __ Push(Smi::FromInt(expr->literal_index()));
1835 result_saved = true;
1836 }
1837
1838 VisitForAccumulatorValue(subexpr);
1839
1840 if (IsFastObjectElementsKind(constant_elements_kind)) {
1841 int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1842 __ ld(a6, MemOperand(sp, kPointerSize)); // Copy of array literal.
1843 __ ld(a1, FieldMemOperand(a6, JSObject::kElementsOffset));
1844 __ sd(result_register(), FieldMemOperand(a1, offset));
1845 // Update the write barrier for the array store.
1846 __ RecordWriteField(a1, offset, result_register(), a2,
1847 kRAHasBeenSaved, kDontSaveFPRegs,
1848 EMIT_REMEMBERED_SET, INLINE_SMI_CHECK);
1849 } else {
1850 __ li(a3, Operand(Smi::FromInt(i)));
1851 __ mov(a0, result_register());
1852 StoreArrayLiteralElementStub stub(isolate());
1853 __ CallStub(&stub);
1854 }
1855
1856 PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1857 }
1858 if (result_saved) {
1859 __ Pop(); // literal index
1860 context()->PlugTOS();
1861 } else {
1862 context()->Plug(v0);
1863 }
1864 }
1865
1866
VisitAssignment(Assignment * expr)1867 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1868 DCHECK(expr->target()->IsValidReferenceExpression());
1869
1870 Comment cmnt(masm_, "[ Assignment");
1871
1872 // Left-hand side can only be a property, a global or a (parameter or local)
1873 // slot.
1874 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1875 LhsKind assign_type = VARIABLE;
1876 Property* property = expr->target()->AsProperty();
1877 if (property != NULL) {
1878 assign_type = (property->key()->IsPropertyName())
1879 ? NAMED_PROPERTY
1880 : KEYED_PROPERTY;
1881 }
1882
1883 // Evaluate LHS expression.
1884 switch (assign_type) {
1885 case VARIABLE:
1886 // Nothing to do here.
1887 break;
1888 case NAMED_PROPERTY:
1889 if (expr->is_compound()) {
1890 // We need the receiver both on the stack and in the register.
1891 VisitForStackValue(property->obj());
1892 __ ld(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
1893 } else {
1894 VisitForStackValue(property->obj());
1895 }
1896 break;
1897 case KEYED_PROPERTY:
1898 // We need the key and receiver on both the stack and in v0 and a1.
1899 if (expr->is_compound()) {
1900 VisitForStackValue(property->obj());
1901 VisitForStackValue(property->key());
1902 __ ld(LoadDescriptor::ReceiverRegister(),
1903 MemOperand(sp, 1 * kPointerSize));
1904 __ ld(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
1905 } else {
1906 VisitForStackValue(property->obj());
1907 VisitForStackValue(property->key());
1908 }
1909 break;
1910 }
1911
1912 // For compound assignments we need another deoptimization point after the
1913 // variable/property load.
1914 if (expr->is_compound()) {
1915 { AccumulatorValueContext context(this);
1916 switch (assign_type) {
1917 case VARIABLE:
1918 EmitVariableLoad(expr->target()->AsVariableProxy());
1919 PrepareForBailout(expr->target(), TOS_REG);
1920 break;
1921 case NAMED_PROPERTY:
1922 EmitNamedPropertyLoad(property);
1923 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1924 break;
1925 case KEYED_PROPERTY:
1926 EmitKeyedPropertyLoad(property);
1927 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1928 break;
1929 }
1930 }
1931
1932 Token::Value op = expr->binary_op();
1933 __ push(v0); // Left operand goes on the stack.
1934 VisitForAccumulatorValue(expr->value());
1935
1936 OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1937 ? OVERWRITE_RIGHT
1938 : NO_OVERWRITE;
1939 SetSourcePosition(expr->position() + 1);
1940 AccumulatorValueContext context(this);
1941 if (ShouldInlineSmiCase(op)) {
1942 EmitInlineSmiBinaryOp(expr->binary_operation(),
1943 op,
1944 mode,
1945 expr->target(),
1946 expr->value());
1947 } else {
1948 EmitBinaryOp(expr->binary_operation(), op, mode);
1949 }
1950
1951 // Deoptimization point in case the binary operation may have side effects.
1952 PrepareForBailout(expr->binary_operation(), TOS_REG);
1953 } else {
1954 VisitForAccumulatorValue(expr->value());
1955 }
1956
1957 // Record source position before possible IC call.
1958 SetSourcePosition(expr->position());
1959
1960 // Store the value.
1961 switch (assign_type) {
1962 case VARIABLE:
1963 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1964 expr->op());
1965 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1966 context()->Plug(v0);
1967 break;
1968 case NAMED_PROPERTY:
1969 EmitNamedPropertyAssignment(expr);
1970 break;
1971 case KEYED_PROPERTY:
1972 EmitKeyedPropertyAssignment(expr);
1973 break;
1974 }
1975 }
1976
1977
VisitYield(Yield * expr)1978 void FullCodeGenerator::VisitYield(Yield* expr) {
1979 Comment cmnt(masm_, "[ Yield");
1980 // Evaluate yielded value first; the initial iterator definition depends on
1981 // this. It stays on the stack while we update the iterator.
1982 VisitForStackValue(expr->expression());
1983
1984 switch (expr->yield_kind()) {
1985 case Yield::kSuspend:
1986 // Pop value from top-of-stack slot; box result into result register.
1987 EmitCreateIteratorResult(false);
1988 __ push(result_register());
1989 // Fall through.
1990 case Yield::kInitial: {
1991 Label suspend, continuation, post_runtime, resume;
1992
1993 __ jmp(&suspend);
1994
1995 __ bind(&continuation);
1996 __ jmp(&resume);
1997
1998 __ bind(&suspend);
1999 VisitForAccumulatorValue(expr->generator_object());
2000 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
2001 __ li(a1, Operand(Smi::FromInt(continuation.pos())));
2002 __ sd(a1, FieldMemOperand(v0, JSGeneratorObject::kContinuationOffset));
2003 __ sd(cp, FieldMemOperand(v0, JSGeneratorObject::kContextOffset));
2004 __ mov(a1, cp);
2005 __ RecordWriteField(v0, JSGeneratorObject::kContextOffset, a1, a2,
2006 kRAHasBeenSaved, kDontSaveFPRegs);
2007 __ Daddu(a1, fp, Operand(StandardFrameConstants::kExpressionsOffset));
2008 __ Branch(&post_runtime, eq, sp, Operand(a1));
2009 __ push(v0); // generator object
2010 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2011 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2012 __ bind(&post_runtime);
2013 __ pop(result_register());
2014 EmitReturnSequence();
2015
2016 __ bind(&resume);
2017 context()->Plug(result_register());
2018 break;
2019 }
2020
2021 case Yield::kFinal: {
2022 VisitForAccumulatorValue(expr->generator_object());
2023 __ li(a1, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorClosed)));
2024 __ sd(a1, FieldMemOperand(result_register(),
2025 JSGeneratorObject::kContinuationOffset));
2026 // Pop value from top-of-stack slot, box result into result register.
2027 EmitCreateIteratorResult(true);
2028 EmitUnwindBeforeReturn();
2029 EmitReturnSequence();
2030 break;
2031 }
2032
2033 case Yield::kDelegating: {
2034 VisitForStackValue(expr->generator_object());
2035
2036 // Initial stack layout is as follows:
2037 // [sp + 1 * kPointerSize] iter
2038 // [sp + 0 * kPointerSize] g
2039
2040 Label l_catch, l_try, l_suspend, l_continuation, l_resume;
2041 Label l_next, l_call;
2042 Register load_receiver = LoadDescriptor::ReceiverRegister();
2043 Register load_name = LoadDescriptor::NameRegister();
2044 // Initial send value is undefined.
2045 __ LoadRoot(a0, Heap::kUndefinedValueRootIndex);
2046 __ Branch(&l_next);
2047
2048 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
2049 __ bind(&l_catch);
2050 __ mov(a0, v0);
2051 handler_table()->set(expr->index(), Smi::FromInt(l_catch.pos()));
2052 __ LoadRoot(a2, Heap::kthrow_stringRootIndex); // "throw"
2053 __ ld(a3, MemOperand(sp, 1 * kPointerSize)); // iter
2054 __ Push(a2, a3, a0); // "throw", iter, except
2055 __ jmp(&l_call);
2056
2057 // try { received = %yield result }
2058 // Shuffle the received result above a try handler and yield it without
2059 // re-boxing.
2060 __ bind(&l_try);
2061 __ pop(a0); // result
2062 __ PushTryHandler(StackHandler::CATCH, expr->index());
2063 const int handler_size = StackHandlerConstants::kSize;
2064 __ push(a0); // result
2065 __ jmp(&l_suspend);
2066 __ bind(&l_continuation);
2067 __ mov(a0, v0);
2068 __ jmp(&l_resume);
2069 __ bind(&l_suspend);
2070 const int generator_object_depth = kPointerSize + handler_size;
2071 __ ld(a0, MemOperand(sp, generator_object_depth));
2072 __ push(a0); // g
2073 DCHECK(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
2074 __ li(a1, Operand(Smi::FromInt(l_continuation.pos())));
2075 __ sd(a1, FieldMemOperand(a0, JSGeneratorObject::kContinuationOffset));
2076 __ sd(cp, FieldMemOperand(a0, JSGeneratorObject::kContextOffset));
2077 __ mov(a1, cp);
2078 __ RecordWriteField(a0, JSGeneratorObject::kContextOffset, a1, a2,
2079 kRAHasBeenSaved, kDontSaveFPRegs);
2080 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2081 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2082 __ pop(v0); // result
2083 EmitReturnSequence();
2084 __ mov(a0, v0);
2085 __ bind(&l_resume); // received in a0
2086 __ PopTryHandler();
2087
2088 // receiver = iter; f = 'next'; arg = received;
2089 __ bind(&l_next);
2090 __ LoadRoot(load_name, Heap::knext_stringRootIndex); // "next"
2091 __ ld(a3, MemOperand(sp, 1 * kPointerSize)); // iter
2092 __ Push(load_name, a3, a0); // "next", iter, received
2093
2094 // result = receiver[f](arg);
2095 __ bind(&l_call);
2096 __ ld(load_receiver, MemOperand(sp, kPointerSize));
2097 __ ld(load_name, MemOperand(sp, 2 * kPointerSize));
2098 if (FLAG_vector_ics) {
2099 __ li(VectorLoadICDescriptor::SlotRegister(),
2100 Operand(Smi::FromInt(expr->KeyedLoadFeedbackSlot())));
2101 }
2102 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
2103 CallIC(ic, TypeFeedbackId::None());
2104 __ mov(a0, v0);
2105 __ mov(a1, a0);
2106 __ sd(a1, MemOperand(sp, 2 * kPointerSize));
2107 CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
2108 __ CallStub(&stub);
2109
2110 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2111 __ Drop(1); // The function is still on the stack; drop it.
2112
2113 // if (!result.done) goto l_try;
2114 __ Move(load_receiver, v0);
2115
2116 __ push(load_receiver); // save result
2117 __ LoadRoot(load_name, Heap::kdone_stringRootIndex); // "done"
2118 if (FLAG_vector_ics) {
2119 __ li(VectorLoadICDescriptor::SlotRegister(),
2120 Operand(Smi::FromInt(expr->DoneFeedbackSlot())));
2121 }
2122 CallLoadIC(NOT_CONTEXTUAL); // v0=result.done
2123 __ mov(a0, v0);
2124 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
2125 CallIC(bool_ic);
2126 __ Branch(&l_try, eq, v0, Operand(zero_reg));
2127
2128 // result.value
2129 __ pop(load_receiver); // result
2130 __ LoadRoot(load_name, Heap::kvalue_stringRootIndex); // "value"
2131 if (FLAG_vector_ics) {
2132 __ li(VectorLoadICDescriptor::SlotRegister(),
2133 Operand(Smi::FromInt(expr->ValueFeedbackSlot())));
2134 }
2135 CallLoadIC(NOT_CONTEXTUAL); // v0=result.value
2136 context()->DropAndPlug(2, v0); // drop iter and g
2137 break;
2138 }
2139 }
2140 }
2141
2142
EmitGeneratorResume(Expression * generator,Expression * value,JSGeneratorObject::ResumeMode resume_mode)2143 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
2144 Expression *value,
2145 JSGeneratorObject::ResumeMode resume_mode) {
2146 // The value stays in a0, and is ultimately read by the resumed generator, as
2147 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
2148 // is read to throw the value when the resumed generator is already closed.
2149 // a1 will hold the generator object until the activation has been resumed.
2150 VisitForStackValue(generator);
2151 VisitForAccumulatorValue(value);
2152 __ pop(a1);
2153
2154 // Check generator state.
2155 Label wrong_state, closed_state, done;
2156 __ ld(a3, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset));
2157 STATIC_ASSERT(JSGeneratorObject::kGeneratorExecuting < 0);
2158 STATIC_ASSERT(JSGeneratorObject::kGeneratorClosed == 0);
2159 __ Branch(&closed_state, eq, a3, Operand(zero_reg));
2160 __ Branch(&wrong_state, lt, a3, Operand(zero_reg));
2161
2162 // Load suspended function and context.
2163 __ ld(cp, FieldMemOperand(a1, JSGeneratorObject::kContextOffset));
2164 __ ld(a4, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset));
2165
2166 // Load receiver and store as the first argument.
2167 __ ld(a2, FieldMemOperand(a1, JSGeneratorObject::kReceiverOffset));
2168 __ push(a2);
2169
2170 // Push holes for the rest of the arguments to the generator function.
2171 __ ld(a3, FieldMemOperand(a4, JSFunction::kSharedFunctionInfoOffset));
2172 // The argument count is stored as int32_t on 64-bit platforms.
2173 // TODO(plind): Smi on 32-bit platforms.
2174 __ lw(a3,
2175 FieldMemOperand(a3, SharedFunctionInfo::kFormalParameterCountOffset));
2176 __ LoadRoot(a2, Heap::kTheHoleValueRootIndex);
2177 Label push_argument_holes, push_frame;
2178 __ bind(&push_argument_holes);
2179 __ Dsubu(a3, a3, Operand(1));
2180 __ Branch(&push_frame, lt, a3, Operand(zero_reg));
2181 __ push(a2);
2182 __ jmp(&push_argument_holes);
2183
2184 // Enter a new JavaScript frame, and initialize its slots as they were when
2185 // the generator was suspended.
2186 Label resume_frame;
2187 __ bind(&push_frame);
2188 __ Call(&resume_frame);
2189 __ jmp(&done);
2190 __ bind(&resume_frame);
2191 // ra = return address.
2192 // fp = caller's frame pointer.
2193 // cp = callee's context,
2194 // a4 = callee's JS function.
2195 __ Push(ra, fp, cp, a4);
2196 // Adjust FP to point to saved FP.
2197 __ Daddu(fp, sp, 2 * kPointerSize);
2198
2199 // Load the operand stack size.
2200 __ ld(a3, FieldMemOperand(a1, JSGeneratorObject::kOperandStackOffset));
2201 __ ld(a3, FieldMemOperand(a3, FixedArray::kLengthOffset));
2202 __ SmiUntag(a3);
2203
2204 // If we are sending a value and there is no operand stack, we can jump back
2205 // in directly.
2206 if (resume_mode == JSGeneratorObject::NEXT) {
2207 Label slow_resume;
2208 __ Branch(&slow_resume, ne, a3, Operand(zero_reg));
2209 __ ld(a3, FieldMemOperand(a4, JSFunction::kCodeEntryOffset));
2210 __ ld(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset));
2211 __ SmiUntag(a2);
2212 __ Daddu(a3, a3, Operand(a2));
2213 __ li(a2, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
2214 __ sd(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset));
2215 __ Jump(a3);
2216 __ bind(&slow_resume);
2217 }
2218
2219 // Otherwise, we push holes for the operand stack and call the runtime to fix
2220 // up the stack and the handlers.
2221 Label push_operand_holes, call_resume;
2222 __ bind(&push_operand_holes);
2223 __ Dsubu(a3, a3, Operand(1));
2224 __ Branch(&call_resume, lt, a3, Operand(zero_reg));
2225 __ push(a2);
2226 __ Branch(&push_operand_holes);
2227 __ bind(&call_resume);
2228 DCHECK(!result_register().is(a1));
2229 __ Push(a1, result_register());
2230 __ Push(Smi::FromInt(resume_mode));
2231 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
2232 // Not reached: the runtime call returns elsewhere.
2233 __ stop("not-reached");
2234
2235 // Reach here when generator is closed.
2236 __ bind(&closed_state);
2237 if (resume_mode == JSGeneratorObject::NEXT) {
2238 // Return completed iterator result when generator is closed.
2239 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
2240 __ push(a2);
2241 // Pop value from top-of-stack slot; box result into result register.
2242 EmitCreateIteratorResult(true);
2243 } else {
2244 // Throw the provided value.
2245 __ push(a0);
2246 __ CallRuntime(Runtime::kThrow, 1);
2247 }
2248 __ jmp(&done);
2249
2250 // Throw error if we attempt to operate on a running generator.
2251 __ bind(&wrong_state);
2252 __ push(a1);
2253 __ CallRuntime(Runtime::kThrowGeneratorStateError, 1);
2254
2255 __ bind(&done);
2256 context()->Plug(result_register());
2257 }
2258
2259
EmitCreateIteratorResult(bool done)2260 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
2261 Label gc_required;
2262 Label allocated;
2263
2264 const int instance_size = 5 * kPointerSize;
2265 DCHECK_EQ(isolate()->native_context()->iterator_result_map()->instance_size(),
2266 instance_size);
2267
2268 __ Allocate(instance_size, v0, a2, a3, &gc_required, TAG_OBJECT);
2269 __ jmp(&allocated);
2270
2271 __ bind(&gc_required);
2272 __ Push(Smi::FromInt(instance_size));
2273 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
2274 __ ld(context_register(),
2275 MemOperand(fp, StandardFrameConstants::kContextOffset));
2276
2277 __ bind(&allocated);
2278 __ ld(a1, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
2279 __ ld(a1, FieldMemOperand(a1, GlobalObject::kNativeContextOffset));
2280 __ ld(a1, ContextOperand(a1, Context::ITERATOR_RESULT_MAP_INDEX));
2281 __ pop(a2);
2282 __ li(a3, Operand(isolate()->factory()->ToBoolean(done)));
2283 __ li(a4, Operand(isolate()->factory()->empty_fixed_array()));
2284 __ sd(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
2285 __ sd(a4, FieldMemOperand(v0, JSObject::kPropertiesOffset));
2286 __ sd(a4, FieldMemOperand(v0, JSObject::kElementsOffset));
2287 __ sd(a2,
2288 FieldMemOperand(v0, JSGeneratorObject::kResultValuePropertyOffset));
2289 __ sd(a3,
2290 FieldMemOperand(v0, JSGeneratorObject::kResultDonePropertyOffset));
2291
2292 // Only the value field needs a write barrier, as the other values are in the
2293 // root set.
2294 __ RecordWriteField(v0, JSGeneratorObject::kResultValuePropertyOffset,
2295 a2, a3, kRAHasBeenSaved, kDontSaveFPRegs);
2296 }
2297
2298
EmitNamedPropertyLoad(Property * prop)2299 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2300 SetSourcePosition(prop->position());
2301 Literal* key = prop->key()->AsLiteral();
2302 DCHECK(!prop->IsSuperAccess());
2303
2304 __ li(LoadDescriptor::NameRegister(), Operand(key->value()));
2305 if (FLAG_vector_ics) {
2306 __ li(VectorLoadICDescriptor::SlotRegister(),
2307 Operand(Smi::FromInt(prop->PropertyFeedbackSlot())));
2308 CallLoadIC(NOT_CONTEXTUAL);
2309 } else {
2310 CallLoadIC(NOT_CONTEXTUAL, prop->PropertyFeedbackId());
2311 }
2312 }
2313
2314
EmitNamedSuperPropertyLoad(Property * prop)2315 void FullCodeGenerator::EmitNamedSuperPropertyLoad(Property* prop) {
2316 SetSourcePosition(prop->position());
2317 Literal* key = prop->key()->AsLiteral();
2318 DCHECK(!key->value()->IsSmi());
2319 DCHECK(prop->IsSuperAccess());
2320
2321 SuperReference* super_ref = prop->obj()->AsSuperReference();
2322 EmitLoadHomeObject(super_ref);
2323 __ Push(v0);
2324 VisitForStackValue(super_ref->this_var());
2325 __ Push(key->value());
2326 __ CallRuntime(Runtime::kLoadFromSuper, 3);
2327 }
2328
2329
EmitKeyedPropertyLoad(Property * prop)2330 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2331 SetSourcePosition(prop->position());
2332 // Call keyed load IC. It has register arguments receiver and key.
2333 Handle<Code> ic = CodeFactory::KeyedLoadIC(isolate()).code();
2334 if (FLAG_vector_ics) {
2335 __ li(VectorLoadICDescriptor::SlotRegister(),
2336 Operand(Smi::FromInt(prop->PropertyFeedbackSlot())));
2337 CallIC(ic);
2338 } else {
2339 CallIC(ic, prop->PropertyFeedbackId());
2340 }
2341 }
2342
2343
EmitInlineSmiBinaryOp(BinaryOperation * expr,Token::Value op,OverwriteMode mode,Expression * left_expr,Expression * right_expr)2344 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2345 Token::Value op,
2346 OverwriteMode mode,
2347 Expression* left_expr,
2348 Expression* right_expr) {
2349 Label done, smi_case, stub_call;
2350
2351 Register scratch1 = a2;
2352 Register scratch2 = a3;
2353
2354 // Get the arguments.
2355 Register left = a1;
2356 Register right = a0;
2357 __ pop(left);
2358 __ mov(a0, result_register());
2359
2360 // Perform combined smi check on both operands.
2361 __ Or(scratch1, left, Operand(right));
2362 STATIC_ASSERT(kSmiTag == 0);
2363 JumpPatchSite patch_site(masm_);
2364 patch_site.EmitJumpIfSmi(scratch1, &smi_case);
2365
2366 __ bind(&stub_call);
2367 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op, mode).code();
2368 CallIC(code, expr->BinaryOperationFeedbackId());
2369 patch_site.EmitPatchInfo();
2370 __ jmp(&done);
2371
2372 __ bind(&smi_case);
2373 // Smi case. This code works the same way as the smi-smi case in the type
2374 // recording binary operation stub, see
2375 switch (op) {
2376 case Token::SAR:
2377 __ GetLeastBitsFromSmi(scratch1, right, 5);
2378 __ dsrav(right, left, scratch1);
2379 __ And(v0, right, Operand(0xffffffff00000000L));
2380 break;
2381 case Token::SHL: {
2382 __ SmiUntag(scratch1, left);
2383 __ GetLeastBitsFromSmi(scratch2, right, 5);
2384 __ dsllv(scratch1, scratch1, scratch2);
2385 __ SmiTag(v0, scratch1);
2386 break;
2387 }
2388 case Token::SHR: {
2389 __ SmiUntag(scratch1, left);
2390 __ GetLeastBitsFromSmi(scratch2, right, 5);
2391 __ dsrlv(scratch1, scratch1, scratch2);
2392 __ And(scratch2, scratch1, 0x80000000);
2393 __ Branch(&stub_call, ne, scratch2, Operand(zero_reg));
2394 __ SmiTag(v0, scratch1);
2395 break;
2396 }
2397 case Token::ADD:
2398 __ AdduAndCheckForOverflow(v0, left, right, scratch1);
2399 __ BranchOnOverflow(&stub_call, scratch1);
2400 break;
2401 case Token::SUB:
2402 __ SubuAndCheckForOverflow(v0, left, right, scratch1);
2403 __ BranchOnOverflow(&stub_call, scratch1);
2404 break;
2405 case Token::MUL: {
2406 __ Dmulh(v0, left, right);
2407 __ dsra32(scratch2, v0, 0);
2408 __ sra(scratch1, v0, 31);
2409 __ Branch(USE_DELAY_SLOT, &stub_call, ne, scratch2, Operand(scratch1));
2410 __ SmiTag(v0);
2411 __ Branch(USE_DELAY_SLOT, &done, ne, v0, Operand(zero_reg));
2412 __ Daddu(scratch2, right, left);
2413 __ Branch(&stub_call, lt, scratch2, Operand(zero_reg));
2414 DCHECK(Smi::FromInt(0) == 0);
2415 __ mov(v0, zero_reg);
2416 break;
2417 }
2418 case Token::BIT_OR:
2419 __ Or(v0, left, Operand(right));
2420 break;
2421 case Token::BIT_AND:
2422 __ And(v0, left, Operand(right));
2423 break;
2424 case Token::BIT_XOR:
2425 __ Xor(v0, left, Operand(right));
2426 break;
2427 default:
2428 UNREACHABLE();
2429 }
2430
2431 __ bind(&done);
2432 context()->Plug(v0);
2433 }
2434
2435
EmitBinaryOp(BinaryOperation * expr,Token::Value op,OverwriteMode mode)2436 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
2437 Token::Value op,
2438 OverwriteMode mode) {
2439 __ mov(a0, result_register());
2440 __ pop(a1);
2441 Handle<Code> code = CodeFactory::BinaryOpIC(isolate(), op, mode).code();
2442 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2443 CallIC(code, expr->BinaryOperationFeedbackId());
2444 patch_site.EmitPatchInfo();
2445 context()->Plug(v0);
2446 }
2447
2448
EmitAssignment(Expression * expr)2449 void FullCodeGenerator::EmitAssignment(Expression* expr) {
2450 DCHECK(expr->IsValidReferenceExpression());
2451
2452 // Left-hand side can only be a property, a global or a (parameter or local)
2453 // slot.
2454 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
2455 LhsKind assign_type = VARIABLE;
2456 Property* prop = expr->AsProperty();
2457 if (prop != NULL) {
2458 assign_type = (prop->key()->IsPropertyName())
2459 ? NAMED_PROPERTY
2460 : KEYED_PROPERTY;
2461 }
2462
2463 switch (assign_type) {
2464 case VARIABLE: {
2465 Variable* var = expr->AsVariableProxy()->var();
2466 EffectContext context(this);
2467 EmitVariableAssignment(var, Token::ASSIGN);
2468 break;
2469 }
2470 case NAMED_PROPERTY: {
2471 __ push(result_register()); // Preserve value.
2472 VisitForAccumulatorValue(prop->obj());
2473 __ mov(StoreDescriptor::ReceiverRegister(), result_register());
2474 __ pop(StoreDescriptor::ValueRegister()); // Restore value.
2475 __ li(StoreDescriptor::NameRegister(),
2476 Operand(prop->key()->AsLiteral()->value()));
2477 CallStoreIC();
2478 break;
2479 }
2480 case KEYED_PROPERTY: {
2481 __ push(result_register()); // Preserve value.
2482 VisitForStackValue(prop->obj());
2483 VisitForAccumulatorValue(prop->key());
2484 __ Move(StoreDescriptor::NameRegister(), result_register());
2485 __ Pop(StoreDescriptor::ValueRegister(),
2486 StoreDescriptor::ReceiverRegister());
2487 Handle<Code> ic =
2488 CodeFactory::KeyedStoreIC(isolate(), strict_mode()).code();
2489 CallIC(ic);
2490 break;
2491 }
2492 }
2493 context()->Plug(v0);
2494 }
2495
2496
EmitStoreToStackLocalOrContextSlot(Variable * var,MemOperand location)2497 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2498 Variable* var, MemOperand location) {
2499 __ sd(result_register(), location);
2500 if (var->IsContextSlot()) {
2501 // RecordWrite may destroy all its register arguments.
2502 __ Move(a3, result_register());
2503 int offset = Context::SlotOffset(var->index());
2504 __ RecordWriteContextSlot(
2505 a1, offset, a3, a2, kRAHasBeenSaved, kDontSaveFPRegs);
2506 }
2507 }
2508
2509
EmitVariableAssignment(Variable * var,Token::Value op)2510 void FullCodeGenerator::EmitVariableAssignment(Variable* var, Token::Value op) {
2511 if (var->IsUnallocated()) {
2512 // Global var, const, or let.
2513 __ mov(StoreDescriptor::ValueRegister(), result_register());
2514 __ li(StoreDescriptor::NameRegister(), Operand(var->name()));
2515 __ ld(StoreDescriptor::ReceiverRegister(), GlobalObjectOperand());
2516 CallStoreIC();
2517 } else if (op == Token::INIT_CONST_LEGACY) {
2518 // Const initializers need a write barrier.
2519 DCHECK(!var->IsParameter()); // No const parameters.
2520 if (var->IsLookupSlot()) {
2521 __ li(a0, Operand(var->name()));
2522 __ Push(v0, cp, a0); // Context and name.
2523 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3);
2524 } else {
2525 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2526 Label skip;
2527 MemOperand location = VarOperand(var, a1);
2528 __ ld(a2, location);
2529 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2530 __ Branch(&skip, ne, a2, Operand(at));
2531 EmitStoreToStackLocalOrContextSlot(var, location);
2532 __ bind(&skip);
2533 }
2534
2535 } else if (var->mode() == LET && op != Token::INIT_LET) {
2536 // Non-initializing assignment to let variable needs a write barrier.
2537 DCHECK(!var->IsLookupSlot());
2538 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2539 Label assign;
2540 MemOperand location = VarOperand(var, a1);
2541 __ ld(a3, location);
2542 __ LoadRoot(a4, Heap::kTheHoleValueRootIndex);
2543 __ Branch(&assign, ne, a3, Operand(a4));
2544 __ li(a3, Operand(var->name()));
2545 __ push(a3);
2546 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2547 // Perform the assignment.
2548 __ bind(&assign);
2549 EmitStoreToStackLocalOrContextSlot(var, location);
2550
2551 } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2552 if (var->IsLookupSlot()) {
2553 // Assignment to var.
2554 __ li(a4, Operand(var->name()));
2555 __ li(a3, Operand(Smi::FromInt(strict_mode())));
2556 // jssp[0] : mode.
2557 // jssp[8] : name.
2558 // jssp[16] : context.
2559 // jssp[24] : value.
2560 __ Push(v0, cp, a4, a3);
2561 __ CallRuntime(Runtime::kStoreLookupSlot, 4);
2562 } else {
2563 // Assignment to var or initializing assignment to let/const in harmony
2564 // mode.
2565 DCHECK((var->IsStackAllocated() || var->IsContextSlot()));
2566 MemOperand location = VarOperand(var, a1);
2567 if (generate_debug_code_ && op == Token::INIT_LET) {
2568 // Check for an uninitialized let binding.
2569 __ ld(a2, location);
2570 __ LoadRoot(a4, Heap::kTheHoleValueRootIndex);
2571 __ Check(eq, kLetBindingReInitialization, a2, Operand(a4));
2572 }
2573 EmitStoreToStackLocalOrContextSlot(var, location);
2574 }
2575 }
2576 // Non-initializing assignments to consts are ignored.
2577 }
2578
2579
EmitNamedPropertyAssignment(Assignment * expr)2580 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2581 // Assignment to a property, using a named store IC.
2582 Property* prop = expr->target()->AsProperty();
2583 DCHECK(prop != NULL);
2584 DCHECK(prop->key()->IsLiteral());
2585
2586 // Record source code position before IC call.
2587 SetSourcePosition(expr->position());
2588 __ mov(StoreDescriptor::ValueRegister(), result_register());
2589 __ li(StoreDescriptor::NameRegister(),
2590 Operand(prop->key()->AsLiteral()->value()));
2591 __ pop(StoreDescriptor::ReceiverRegister());
2592 CallStoreIC(expr->AssignmentFeedbackId());
2593
2594 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2595 context()->Plug(v0);
2596 }
2597
2598
EmitKeyedPropertyAssignment(Assignment * expr)2599 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2600 // Assignment to a property, using a keyed store IC.
2601
2602 // Record source code position before IC call.
2603 SetSourcePosition(expr->position());
2604 // Call keyed store IC.
2605 // The arguments are:
2606 // - a0 is the value,
2607 // - a1 is the key,
2608 // - a2 is the receiver.
2609 __ mov(StoreDescriptor::ValueRegister(), result_register());
2610 __ Pop(StoreDescriptor::ReceiverRegister(), StoreDescriptor::NameRegister());
2611 DCHECK(StoreDescriptor::ValueRegister().is(a0));
2612
2613 Handle<Code> ic = CodeFactory::KeyedStoreIC(isolate(), strict_mode()).code();
2614 CallIC(ic, expr->AssignmentFeedbackId());
2615
2616 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2617 context()->Plug(v0);
2618 }
2619
2620
VisitProperty(Property * expr)2621 void FullCodeGenerator::VisitProperty(Property* expr) {
2622 Comment cmnt(masm_, "[ Property");
2623 Expression* key = expr->key();
2624
2625 if (key->IsPropertyName()) {
2626 if (!expr->IsSuperAccess()) {
2627 VisitForAccumulatorValue(expr->obj());
2628 __ Move(LoadDescriptor::ReceiverRegister(), v0);
2629 EmitNamedPropertyLoad(expr);
2630 } else {
2631 EmitNamedSuperPropertyLoad(expr);
2632 }
2633 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2634 context()->Plug(v0);
2635 } else {
2636 VisitForStackValue(expr->obj());
2637 VisitForAccumulatorValue(expr->key());
2638 __ Move(LoadDescriptor::NameRegister(), v0);
2639 __ pop(LoadDescriptor::ReceiverRegister());
2640 EmitKeyedPropertyLoad(expr);
2641 context()->Plug(v0);
2642 }
2643 }
2644
2645
CallIC(Handle<Code> code,TypeFeedbackId id)2646 void FullCodeGenerator::CallIC(Handle<Code> code,
2647 TypeFeedbackId id) {
2648 ic_total_count_++;
2649 __ Call(code, RelocInfo::CODE_TARGET, id);
2650 }
2651
2652
2653 // Code common for calls using the IC.
EmitCallWithLoadIC(Call * expr)2654 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2655 Expression* callee = expr->expression();
2656
2657 CallICState::CallType call_type =
2658 callee->IsVariableProxy() ? CallICState::FUNCTION : CallICState::METHOD;
2659
2660 // Get the target function.
2661 if (call_type == CallICState::FUNCTION) {
2662 { StackValueContext context(this);
2663 EmitVariableLoad(callee->AsVariableProxy());
2664 PrepareForBailout(callee, NO_REGISTERS);
2665 }
2666 // Push undefined as receiver. This is patched in the method prologue if it
2667 // is a sloppy mode method.
2668 __ Push(isolate()->factory()->undefined_value());
2669 } else {
2670 // Load the function from the receiver.
2671 DCHECK(callee->IsProperty());
2672 DCHECK(!callee->AsProperty()->IsSuperAccess());
2673 __ ld(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2674 EmitNamedPropertyLoad(callee->AsProperty());
2675 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2676 // Push the target function under the receiver.
2677 __ ld(at, MemOperand(sp, 0));
2678 __ push(at);
2679 __ sd(v0, MemOperand(sp, kPointerSize));
2680 }
2681
2682 EmitCall(expr, call_type);
2683 }
2684
2685
EmitSuperCallWithLoadIC(Call * expr)2686 void FullCodeGenerator::EmitSuperCallWithLoadIC(Call* expr) {
2687 Expression* callee = expr->expression();
2688 DCHECK(callee->IsProperty());
2689 Property* prop = callee->AsProperty();
2690 DCHECK(prop->IsSuperAccess());
2691
2692 SetSourcePosition(prop->position());
2693 Literal* key = prop->key()->AsLiteral();
2694 DCHECK(!key->value()->IsSmi());
2695 // Load the function from the receiver.
2696 const Register scratch = a1;
2697 SuperReference* super_ref = prop->obj()->AsSuperReference();
2698 EmitLoadHomeObject(super_ref);
2699 __ Push(v0);
2700 VisitForAccumulatorValue(super_ref->this_var());
2701 __ Push(v0);
2702 __ ld(scratch, MemOperand(sp, kPointerSize));
2703 __ Push(scratch, v0);
2704 __ Push(key->value());
2705
2706 // Stack here:
2707 // - home_object
2708 // - this (receiver)
2709 // - home_object <-- LoadFromSuper will pop here and below.
2710 // - this (receiver)
2711 // - key
2712 __ CallRuntime(Runtime::kLoadFromSuper, 3);
2713
2714 // Replace home_object with target function.
2715 __ sd(v0, MemOperand(sp, kPointerSize));
2716
2717 // Stack here:
2718 // - target function
2719 // - this (receiver)
2720 EmitCall(expr, CallICState::METHOD);
2721 }
2722
2723
2724 // Code common for calls using the IC.
EmitKeyedCallWithLoadIC(Call * expr,Expression * key)2725 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2726 Expression* key) {
2727 // Load the key.
2728 VisitForAccumulatorValue(key);
2729
2730 Expression* callee = expr->expression();
2731
2732 // Load the function from the receiver.
2733 DCHECK(callee->IsProperty());
2734 __ ld(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
2735 __ Move(LoadDescriptor::NameRegister(), v0);
2736 EmitKeyedPropertyLoad(callee->AsProperty());
2737 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2738
2739 // Push the target function under the receiver.
2740 __ ld(at, MemOperand(sp, 0));
2741 __ push(at);
2742 __ sd(v0, MemOperand(sp, kPointerSize));
2743
2744 EmitCall(expr, CallICState::METHOD);
2745 }
2746
2747
EmitCall(Call * expr,CallICState::CallType call_type)2748 void FullCodeGenerator::EmitCall(Call* expr, CallICState::CallType call_type) {
2749 // Load the arguments.
2750 ZoneList<Expression*>* args = expr->arguments();
2751 int arg_count = args->length();
2752 { PreservePositionScope scope(masm()->positions_recorder());
2753 for (int i = 0; i < arg_count; i++) {
2754 VisitForStackValue(args->at(i));
2755 }
2756 }
2757
2758 // Record source position of the IC call.
2759 SetSourcePosition(expr->position());
2760 Handle<Code> ic = CallIC::initialize_stub(
2761 isolate(), arg_count, call_type);
2762 __ li(a3, Operand(Smi::FromInt(expr->CallFeedbackSlot())));
2763 __ ld(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2764 // Don't assign a type feedback id to the IC, since type feedback is provided
2765 // by the vector above.
2766 CallIC(ic);
2767 RecordJSReturnSite(expr);
2768 // Restore context register.
2769 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2770 context()->DropAndPlug(1, v0);
2771 }
2772
2773
EmitResolvePossiblyDirectEval(int arg_count)2774 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2775 // a7: copy of the first argument or undefined if it doesn't exist.
2776 if (arg_count > 0) {
2777 __ ld(a7, MemOperand(sp, arg_count * kPointerSize));
2778 } else {
2779 __ LoadRoot(a7, Heap::kUndefinedValueRootIndex);
2780 }
2781
2782 // a6: the receiver of the enclosing function.
2783 __ ld(a6, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
2784
2785 // a5: the receiver of the enclosing function.
2786 int receiver_offset = 2 + info_->scope()->num_parameters();
2787 __ ld(a5, MemOperand(fp, receiver_offset * kPointerSize));
2788
2789 // a4: the strict mode.
2790 __ li(a4, Operand(Smi::FromInt(strict_mode())));
2791
2792 // a1: the start position of the scope the calls resides in.
2793 __ li(a1, Operand(Smi::FromInt(scope()->start_position())));
2794
2795 // Do the runtime call.
2796 __ Push(a7);
2797 __ Push(a6, a5, a4, a1);
2798 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 6);
2799 }
2800
2801
VisitCall(Call * expr)2802 void FullCodeGenerator::VisitCall(Call* expr) {
2803 #ifdef DEBUG
2804 // We want to verify that RecordJSReturnSite gets called on all paths
2805 // through this function. Avoid early returns.
2806 expr->return_is_recorded_ = false;
2807 #endif
2808
2809 Comment cmnt(masm_, "[ Call");
2810 Expression* callee = expr->expression();
2811 Call::CallType call_type = expr->GetCallType(isolate());
2812
2813 if (call_type == Call::POSSIBLY_EVAL_CALL) {
2814 // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
2815 // to resolve the function we need to call and the receiver of the
2816 // call. Then we call the resolved function using the given
2817 // arguments.
2818 ZoneList<Expression*>* args = expr->arguments();
2819 int arg_count = args->length();
2820
2821 { PreservePositionScope pos_scope(masm()->positions_recorder());
2822 VisitForStackValue(callee);
2823 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
2824 __ push(a2); // Reserved receiver slot.
2825
2826 // Push the arguments.
2827 for (int i = 0; i < arg_count; i++) {
2828 VisitForStackValue(args->at(i));
2829 }
2830
2831 // Push a copy of the function (found below the arguments) and
2832 // resolve eval.
2833 __ ld(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2834 __ push(a1);
2835 EmitResolvePossiblyDirectEval(arg_count);
2836
2837 // The runtime call returns a pair of values in v0 (function) and
2838 // v1 (receiver). Touch up the stack with the right values.
2839 __ sd(v0, MemOperand(sp, (arg_count + 1) * kPointerSize));
2840 __ sd(v1, MemOperand(sp, arg_count * kPointerSize));
2841 }
2842 // Record source position for debugger.
2843 SetSourcePosition(expr->position());
2844 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
2845 __ ld(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2846 __ CallStub(&stub);
2847 RecordJSReturnSite(expr);
2848 // Restore context register.
2849 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2850 context()->DropAndPlug(1, v0);
2851 } else if (call_type == Call::GLOBAL_CALL) {
2852 EmitCallWithLoadIC(expr);
2853 } else if (call_type == Call::LOOKUP_SLOT_CALL) {
2854 // Call to a lookup slot (dynamically introduced variable).
2855 VariableProxy* proxy = callee->AsVariableProxy();
2856 Label slow, done;
2857
2858 { PreservePositionScope scope(masm()->positions_recorder());
2859 // Generate code for loading from variables potentially shadowed
2860 // by eval-introduced variables.
2861 EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
2862 }
2863
2864 __ bind(&slow);
2865 // Call the runtime to find the function to call (returned in v0)
2866 // and the object holding it (returned in v1).
2867 DCHECK(!context_register().is(a2));
2868 __ li(a2, Operand(proxy->name()));
2869 __ Push(context_register(), a2);
2870 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
2871 __ Push(v0, v1); // Function, receiver.
2872
2873 // If fast case code has been generated, emit code to push the
2874 // function and receiver and have the slow path jump around this
2875 // code.
2876 if (done.is_linked()) {
2877 Label call;
2878 __ Branch(&call);
2879 __ bind(&done);
2880 // Push function.
2881 __ push(v0);
2882 // The receiver is implicitly the global receiver. Indicate this
2883 // by passing the hole to the call function stub.
2884 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
2885 __ push(a1);
2886 __ bind(&call);
2887 }
2888
2889 // The receiver is either the global receiver or an object found
2890 // by LoadContextSlot.
2891 EmitCall(expr);
2892 } else if (call_type == Call::PROPERTY_CALL) {
2893 Property* property = callee->AsProperty();
2894 bool is_named_call = property->key()->IsPropertyName();
2895 // super.x() is handled in EmitCallWithLoadIC.
2896 if (property->IsSuperAccess() && is_named_call) {
2897 EmitSuperCallWithLoadIC(expr);
2898 } else {
2899 {
2900 PreservePositionScope scope(masm()->positions_recorder());
2901 VisitForStackValue(property->obj());
2902 }
2903 if (is_named_call) {
2904 EmitCallWithLoadIC(expr);
2905 } else {
2906 EmitKeyedCallWithLoadIC(expr, property->key());
2907 }
2908 }
2909 } else {
2910 DCHECK(call_type == Call::OTHER_CALL);
2911 // Call to an arbitrary expression not handled specially above.
2912 { PreservePositionScope scope(masm()->positions_recorder());
2913 VisitForStackValue(callee);
2914 }
2915 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
2916 __ push(a1);
2917 // Emit function call.
2918 EmitCall(expr);
2919 }
2920
2921 #ifdef DEBUG
2922 // RecordJSReturnSite should have been called.
2923 DCHECK(expr->return_is_recorded_);
2924 #endif
2925 }
2926
2927
VisitCallNew(CallNew * expr)2928 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2929 Comment cmnt(masm_, "[ CallNew");
2930 // According to ECMA-262, section 11.2.2, page 44, the function
2931 // expression in new calls must be evaluated before the
2932 // arguments.
2933
2934 // Push constructor on the stack. If it's not a function it's used as
2935 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2936 // ignored.
2937 VisitForStackValue(expr->expression());
2938
2939 // Push the arguments ("left-to-right") on the stack.
2940 ZoneList<Expression*>* args = expr->arguments();
2941 int arg_count = args->length();
2942 for (int i = 0; i < arg_count; i++) {
2943 VisitForStackValue(args->at(i));
2944 }
2945 // Call the construct call builtin that handles allocation and
2946 // constructor invocation.
2947 SetSourcePosition(expr->position());
2948
2949 // Load function and argument count into a1 and a0.
2950 __ li(a0, Operand(arg_count));
2951 __ ld(a1, MemOperand(sp, arg_count * kPointerSize));
2952
2953 // Record call targets in unoptimized code.
2954 if (FLAG_pretenuring_call_new) {
2955 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
2956 DCHECK(expr->AllocationSiteFeedbackSlot() ==
2957 expr->CallNewFeedbackSlot() + 1);
2958 }
2959
2960 __ li(a2, FeedbackVector());
2961 __ li(a3, Operand(Smi::FromInt(expr->CallNewFeedbackSlot())));
2962
2963 CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
2964 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
2965 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2966 context()->Plug(v0);
2967 }
2968
2969
EmitIsSmi(CallRuntime * expr)2970 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2971 ZoneList<Expression*>* args = expr->arguments();
2972 DCHECK(args->length() == 1);
2973
2974 VisitForAccumulatorValue(args->at(0));
2975
2976 Label materialize_true, materialize_false;
2977 Label* if_true = NULL;
2978 Label* if_false = NULL;
2979 Label* fall_through = NULL;
2980 context()->PrepareTest(&materialize_true, &materialize_false,
2981 &if_true, &if_false, &fall_through);
2982
2983 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2984 __ SmiTst(v0, a4);
2985 Split(eq, a4, Operand(zero_reg), if_true, if_false, fall_through);
2986
2987 context()->Plug(if_true, if_false);
2988 }
2989
2990
EmitIsNonNegativeSmi(CallRuntime * expr)2991 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
2992 ZoneList<Expression*>* args = expr->arguments();
2993 DCHECK(args->length() == 1);
2994
2995 VisitForAccumulatorValue(args->at(0));
2996
2997 Label materialize_true, materialize_false;
2998 Label* if_true = NULL;
2999 Label* if_false = NULL;
3000 Label* fall_through = NULL;
3001 context()->PrepareTest(&materialize_true, &materialize_false,
3002 &if_true, &if_false, &fall_through);
3003
3004 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3005 __ NonNegativeSmiTst(v0, at);
3006 Split(eq, at, Operand(zero_reg), if_true, if_false, fall_through);
3007
3008 context()->Plug(if_true, if_false);
3009 }
3010
3011
EmitIsObject(CallRuntime * expr)3012 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
3013 ZoneList<Expression*>* args = expr->arguments();
3014 DCHECK(args->length() == 1);
3015
3016 VisitForAccumulatorValue(args->at(0));
3017
3018 Label materialize_true, materialize_false;
3019 Label* if_true = NULL;
3020 Label* if_false = NULL;
3021 Label* fall_through = NULL;
3022 context()->PrepareTest(&materialize_true, &materialize_false,
3023 &if_true, &if_false, &fall_through);
3024
3025 __ JumpIfSmi(v0, if_false);
3026 __ LoadRoot(at, Heap::kNullValueRootIndex);
3027 __ Branch(if_true, eq, v0, Operand(at));
3028 __ ld(a2, FieldMemOperand(v0, HeapObject::kMapOffset));
3029 // Undetectable objects behave like undefined when tested with typeof.
3030 __ lbu(a1, FieldMemOperand(a2, Map::kBitFieldOffset));
3031 __ And(at, a1, Operand(1 << Map::kIsUndetectable));
3032 __ Branch(if_false, ne, at, Operand(zero_reg));
3033 __ lbu(a1, FieldMemOperand(a2, Map::kInstanceTypeOffset));
3034 __ Branch(if_false, lt, a1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
3035 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3036 Split(le, a1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE),
3037 if_true, if_false, fall_through);
3038
3039 context()->Plug(if_true, if_false);
3040 }
3041
3042
EmitIsSpecObject(CallRuntime * expr)3043 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
3044 ZoneList<Expression*>* args = expr->arguments();
3045 DCHECK(args->length() == 1);
3046
3047 VisitForAccumulatorValue(args->at(0));
3048
3049 Label materialize_true, materialize_false;
3050 Label* if_true = NULL;
3051 Label* if_false = NULL;
3052 Label* fall_through = NULL;
3053 context()->PrepareTest(&materialize_true, &materialize_false,
3054 &if_true, &if_false, &fall_through);
3055
3056 __ JumpIfSmi(v0, if_false);
3057 __ GetObjectType(v0, a1, a1);
3058 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3059 Split(ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE),
3060 if_true, if_false, fall_through);
3061
3062 context()->Plug(if_true, if_false);
3063 }
3064
3065
EmitIsUndetectableObject(CallRuntime * expr)3066 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
3067 ZoneList<Expression*>* args = expr->arguments();
3068 DCHECK(args->length() == 1);
3069
3070 VisitForAccumulatorValue(args->at(0));
3071
3072 Label materialize_true, materialize_false;
3073 Label* if_true = NULL;
3074 Label* if_false = NULL;
3075 Label* fall_through = NULL;
3076 context()->PrepareTest(&materialize_true, &materialize_false,
3077 &if_true, &if_false, &fall_through);
3078
3079 __ JumpIfSmi(v0, if_false);
3080 __ ld(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
3081 __ lbu(a1, FieldMemOperand(a1, Map::kBitFieldOffset));
3082 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3083 __ And(at, a1, Operand(1 << Map::kIsUndetectable));
3084 Split(ne, at, Operand(zero_reg), if_true, if_false, fall_through);
3085
3086 context()->Plug(if_true, if_false);
3087 }
3088
3089
EmitIsStringWrapperSafeForDefaultValueOf(CallRuntime * expr)3090 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
3091 CallRuntime* expr) {
3092 ZoneList<Expression*>* args = expr->arguments();
3093 DCHECK(args->length() == 1);
3094
3095 VisitForAccumulatorValue(args->at(0));
3096
3097 Label materialize_true, materialize_false, skip_lookup;
3098 Label* if_true = NULL;
3099 Label* if_false = NULL;
3100 Label* fall_through = NULL;
3101 context()->PrepareTest(&materialize_true, &materialize_false,
3102 &if_true, &if_false, &fall_through);
3103
3104 __ AssertNotSmi(v0);
3105
3106 __ ld(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
3107 __ lbu(a4, FieldMemOperand(a1, Map::kBitField2Offset));
3108 __ And(a4, a4, 1 << Map::kStringWrapperSafeForDefaultValueOf);
3109 __ Branch(&skip_lookup, ne, a4, Operand(zero_reg));
3110
3111 // Check for fast case object. Generate false result for slow case object.
3112 __ ld(a2, FieldMemOperand(v0, JSObject::kPropertiesOffset));
3113 __ ld(a2, FieldMemOperand(a2, HeapObject::kMapOffset));
3114 __ LoadRoot(a4, Heap::kHashTableMapRootIndex);
3115 __ Branch(if_false, eq, a2, Operand(a4));
3116
3117 // Look for valueOf name in the descriptor array, and indicate false if
3118 // found. Since we omit an enumeration index check, if it is added via a
3119 // transition that shares its descriptor array, this is a false positive.
3120 Label entry, loop, done;
3121
3122 // Skip loop if no descriptors are valid.
3123 __ NumberOfOwnDescriptors(a3, a1);
3124 __ Branch(&done, eq, a3, Operand(zero_reg));
3125
3126 __ LoadInstanceDescriptors(a1, a4);
3127 // a4: descriptor array.
3128 // a3: valid entries in the descriptor array.
3129 STATIC_ASSERT(kSmiTag == 0);
3130 STATIC_ASSERT(kSmiTagSize == 1);
3131 // Does not need?
3132 // STATIC_ASSERT(kPointerSize == 4);
3133 __ li(at, Operand(DescriptorArray::kDescriptorSize));
3134 __ Dmul(a3, a3, at);
3135 // Calculate location of the first key name.
3136 __ Daddu(a4, a4, Operand(DescriptorArray::kFirstOffset - kHeapObjectTag));
3137 // Calculate the end of the descriptor array.
3138 __ mov(a2, a4);
3139 __ dsll(a5, a3, kPointerSizeLog2);
3140 __ Daddu(a2, a2, a5);
3141
3142 // Loop through all the keys in the descriptor array. If one of these is the
3143 // string "valueOf" the result is false.
3144 // The use of a6 to store the valueOf string assumes that it is not otherwise
3145 // used in the loop below.
3146 __ li(a6, Operand(isolate()->factory()->value_of_string()));
3147 __ jmp(&entry);
3148 __ bind(&loop);
3149 __ ld(a3, MemOperand(a4, 0));
3150 __ Branch(if_false, eq, a3, Operand(a6));
3151 __ Daddu(a4, a4, Operand(DescriptorArray::kDescriptorSize * kPointerSize));
3152 __ bind(&entry);
3153 __ Branch(&loop, ne, a4, Operand(a2));
3154
3155 __ bind(&done);
3156
3157 // Set the bit in the map to indicate that there is no local valueOf field.
3158 __ lbu(a2, FieldMemOperand(a1, Map::kBitField2Offset));
3159 __ Or(a2, a2, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
3160 __ sb(a2, FieldMemOperand(a1, Map::kBitField2Offset));
3161
3162 __ bind(&skip_lookup);
3163
3164 // If a valueOf property is not found on the object check that its
3165 // prototype is the un-modified String prototype. If not result is false.
3166 __ ld(a2, FieldMemOperand(a1, Map::kPrototypeOffset));
3167 __ JumpIfSmi(a2, if_false);
3168 __ ld(a2, FieldMemOperand(a2, HeapObject::kMapOffset));
3169 __ ld(a3, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
3170 __ ld(a3, FieldMemOperand(a3, GlobalObject::kNativeContextOffset));
3171 __ ld(a3, ContextOperand(a3, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
3172 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3173 Split(eq, a2, Operand(a3), if_true, if_false, fall_through);
3174
3175 context()->Plug(if_true, if_false);
3176 }
3177
3178
EmitIsFunction(CallRuntime * expr)3179 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3180 ZoneList<Expression*>* args = expr->arguments();
3181 DCHECK(args->length() == 1);
3182
3183 VisitForAccumulatorValue(args->at(0));
3184
3185 Label materialize_true, materialize_false;
3186 Label* if_true = NULL;
3187 Label* if_false = NULL;
3188 Label* fall_through = NULL;
3189 context()->PrepareTest(&materialize_true, &materialize_false,
3190 &if_true, &if_false, &fall_through);
3191
3192 __ JumpIfSmi(v0, if_false);
3193 __ GetObjectType(v0, a1, a2);
3194 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3195 __ Branch(if_true, eq, a2, Operand(JS_FUNCTION_TYPE));
3196 __ Branch(if_false);
3197
3198 context()->Plug(if_true, if_false);
3199 }
3200
3201
EmitIsMinusZero(CallRuntime * expr)3202 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3203 ZoneList<Expression*>* args = expr->arguments();
3204 DCHECK(args->length() == 1);
3205
3206 VisitForAccumulatorValue(args->at(0));
3207
3208 Label materialize_true, materialize_false;
3209 Label* if_true = NULL;
3210 Label* if_false = NULL;
3211 Label* fall_through = NULL;
3212 context()->PrepareTest(&materialize_true, &materialize_false,
3213 &if_true, &if_false, &fall_through);
3214
3215 __ CheckMap(v0, a1, Heap::kHeapNumberMapRootIndex, if_false, DO_SMI_CHECK);
3216 __ lwu(a2, FieldMemOperand(v0, HeapNumber::kExponentOffset));
3217 __ lwu(a1, FieldMemOperand(v0, HeapNumber::kMantissaOffset));
3218 __ li(a4, 0x80000000);
3219 Label not_nan;
3220 __ Branch(¬_nan, ne, a2, Operand(a4));
3221 __ mov(a4, zero_reg);
3222 __ mov(a2, a1);
3223 __ bind(¬_nan);
3224
3225 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3226 Split(eq, a2, Operand(a4), if_true, if_false, fall_through);
3227
3228 context()->Plug(if_true, if_false);
3229 }
3230
3231
EmitIsArray(CallRuntime * expr)3232 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3233 ZoneList<Expression*>* args = expr->arguments();
3234 DCHECK(args->length() == 1);
3235
3236 VisitForAccumulatorValue(args->at(0));
3237
3238 Label materialize_true, materialize_false;
3239 Label* if_true = NULL;
3240 Label* if_false = NULL;
3241 Label* fall_through = NULL;
3242 context()->PrepareTest(&materialize_true, &materialize_false,
3243 &if_true, &if_false, &fall_through);
3244
3245 __ JumpIfSmi(v0, if_false);
3246 __ GetObjectType(v0, a1, a1);
3247 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3248 Split(eq, a1, Operand(JS_ARRAY_TYPE),
3249 if_true, if_false, fall_through);
3250
3251 context()->Plug(if_true, if_false);
3252 }
3253
3254
EmitIsRegExp(CallRuntime * expr)3255 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3256 ZoneList<Expression*>* args = expr->arguments();
3257 DCHECK(args->length() == 1);
3258
3259 VisitForAccumulatorValue(args->at(0));
3260
3261 Label materialize_true, materialize_false;
3262 Label* if_true = NULL;
3263 Label* if_false = NULL;
3264 Label* fall_through = NULL;
3265 context()->PrepareTest(&materialize_true, &materialize_false,
3266 &if_true, &if_false, &fall_through);
3267
3268 __ JumpIfSmi(v0, if_false);
3269 __ GetObjectType(v0, a1, a1);
3270 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3271 Split(eq, a1, Operand(JS_REGEXP_TYPE), if_true, if_false, fall_through);
3272
3273 context()->Plug(if_true, if_false);
3274 }
3275
3276
EmitIsConstructCall(CallRuntime * expr)3277 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3278 DCHECK(expr->arguments()->length() == 0);
3279
3280 Label materialize_true, materialize_false;
3281 Label* if_true = NULL;
3282 Label* if_false = NULL;
3283 Label* fall_through = NULL;
3284 context()->PrepareTest(&materialize_true, &materialize_false,
3285 &if_true, &if_false, &fall_through);
3286
3287 // Get the frame pointer for the calling frame.
3288 __ ld(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3289
3290 // Skip the arguments adaptor frame if it exists.
3291 Label check_frame_marker;
3292 __ ld(a1, MemOperand(a2, StandardFrameConstants::kContextOffset));
3293 __ Branch(&check_frame_marker, ne,
3294 a1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3295 __ ld(a2, MemOperand(a2, StandardFrameConstants::kCallerFPOffset));
3296
3297 // Check the marker in the calling frame.
3298 __ bind(&check_frame_marker);
3299 __ ld(a1, MemOperand(a2, StandardFrameConstants::kMarkerOffset));
3300 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3301 Split(eq, a1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)),
3302 if_true, if_false, fall_through);
3303
3304 context()->Plug(if_true, if_false);
3305 }
3306
3307
EmitObjectEquals(CallRuntime * expr)3308 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3309 ZoneList<Expression*>* args = expr->arguments();
3310 DCHECK(args->length() == 2);
3311
3312 // Load the two objects into registers and perform the comparison.
3313 VisitForStackValue(args->at(0));
3314 VisitForAccumulatorValue(args->at(1));
3315
3316 Label materialize_true, materialize_false;
3317 Label* if_true = NULL;
3318 Label* if_false = NULL;
3319 Label* fall_through = NULL;
3320 context()->PrepareTest(&materialize_true, &materialize_false,
3321 &if_true, &if_false, &fall_through);
3322
3323 __ pop(a1);
3324 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3325 Split(eq, v0, Operand(a1), if_true, if_false, fall_through);
3326
3327 context()->Plug(if_true, if_false);
3328 }
3329
3330
EmitArguments(CallRuntime * expr)3331 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3332 ZoneList<Expression*>* args = expr->arguments();
3333 DCHECK(args->length() == 1);
3334
3335 // ArgumentsAccessStub expects the key in a1 and the formal
3336 // parameter count in a0.
3337 VisitForAccumulatorValue(args->at(0));
3338 __ mov(a1, v0);
3339 __ li(a0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
3340 ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
3341 __ CallStub(&stub);
3342 context()->Plug(v0);
3343 }
3344
3345
EmitArgumentsLength(CallRuntime * expr)3346 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3347 DCHECK(expr->arguments()->length() == 0);
3348 Label exit;
3349 // Get the number of formal parameters.
3350 __ li(v0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
3351
3352 // Check if the calling frame is an arguments adaptor frame.
3353 __ ld(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
3354 __ ld(a3, MemOperand(a2, StandardFrameConstants::kContextOffset));
3355 __ Branch(&exit, ne, a3,
3356 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3357
3358 // Arguments adaptor case: Read the arguments length from the
3359 // adaptor frame.
3360 __ ld(v0, MemOperand(a2, ArgumentsAdaptorFrameConstants::kLengthOffset));
3361
3362 __ bind(&exit);
3363 context()->Plug(v0);
3364 }
3365
3366
EmitClassOf(CallRuntime * expr)3367 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3368 ZoneList<Expression*>* args = expr->arguments();
3369 DCHECK(args->length() == 1);
3370 Label done, null, function, non_function_constructor;
3371
3372 VisitForAccumulatorValue(args->at(0));
3373
3374 // If the object is a smi, we return null.
3375 __ JumpIfSmi(v0, &null);
3376
3377 // Check that the object is a JS object but take special care of JS
3378 // functions to make sure they have 'Function' as their class.
3379 // Assume that there are only two callable types, and one of them is at
3380 // either end of the type range for JS object types. Saves extra comparisons.
3381 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
3382 __ GetObjectType(v0, v0, a1); // Map is now in v0.
3383 __ Branch(&null, lt, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
3384
3385 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3386 FIRST_SPEC_OBJECT_TYPE + 1);
3387 __ Branch(&function, eq, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
3388
3389 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3390 LAST_SPEC_OBJECT_TYPE - 1);
3391 __ Branch(&function, eq, a1, Operand(LAST_SPEC_OBJECT_TYPE));
3392 // Assume that there is no larger type.
3393 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
3394
3395 // Check if the constructor in the map is a JS function.
3396 __ ld(v0, FieldMemOperand(v0, Map::kConstructorOffset));
3397 __ GetObjectType(v0, a1, a1);
3398 __ Branch(&non_function_constructor, ne, a1, Operand(JS_FUNCTION_TYPE));
3399
3400 // v0 now contains the constructor function. Grab the
3401 // instance class name from there.
3402 __ ld(v0, FieldMemOperand(v0, JSFunction::kSharedFunctionInfoOffset));
3403 __ ld(v0, FieldMemOperand(v0, SharedFunctionInfo::kInstanceClassNameOffset));
3404 __ Branch(&done);
3405
3406 // Functions have class 'Function'.
3407 __ bind(&function);
3408 __ LoadRoot(v0, Heap::kFunction_stringRootIndex);
3409 __ jmp(&done);
3410
3411 // Objects with a non-function constructor have class 'Object'.
3412 __ bind(&non_function_constructor);
3413 __ LoadRoot(v0, Heap::kObject_stringRootIndex);
3414 __ jmp(&done);
3415
3416 // Non-JS objects have class null.
3417 __ bind(&null);
3418 __ LoadRoot(v0, Heap::kNullValueRootIndex);
3419
3420 // All done.
3421 __ bind(&done);
3422
3423 context()->Plug(v0);
3424 }
3425
3426
EmitSubString(CallRuntime * expr)3427 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3428 // Load the arguments on the stack and call the stub.
3429 SubStringStub stub(isolate());
3430 ZoneList<Expression*>* args = expr->arguments();
3431 DCHECK(args->length() == 3);
3432 VisitForStackValue(args->at(0));
3433 VisitForStackValue(args->at(1));
3434 VisitForStackValue(args->at(2));
3435 __ CallStub(&stub);
3436 context()->Plug(v0);
3437 }
3438
3439
EmitRegExpExec(CallRuntime * expr)3440 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3441 // Load the arguments on the stack and call the stub.
3442 RegExpExecStub stub(isolate());
3443 ZoneList<Expression*>* args = expr->arguments();
3444 DCHECK(args->length() == 4);
3445 VisitForStackValue(args->at(0));
3446 VisitForStackValue(args->at(1));
3447 VisitForStackValue(args->at(2));
3448 VisitForStackValue(args->at(3));
3449 __ CallStub(&stub);
3450 context()->Plug(v0);
3451 }
3452
3453
EmitValueOf(CallRuntime * expr)3454 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3455 ZoneList<Expression*>* args = expr->arguments();
3456 DCHECK(args->length() == 1);
3457
3458 VisitForAccumulatorValue(args->at(0)); // Load the object.
3459
3460 Label done;
3461 // If the object is a smi return the object.
3462 __ JumpIfSmi(v0, &done);
3463 // If the object is not a value type, return the object.
3464 __ GetObjectType(v0, a1, a1);
3465 __ Branch(&done, ne, a1, Operand(JS_VALUE_TYPE));
3466
3467 __ ld(v0, FieldMemOperand(v0, JSValue::kValueOffset));
3468
3469 __ bind(&done);
3470 context()->Plug(v0);
3471 }
3472
3473
EmitDateField(CallRuntime * expr)3474 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3475 ZoneList<Expression*>* args = expr->arguments();
3476 DCHECK(args->length() == 2);
3477 DCHECK_NE(NULL, args->at(1)->AsLiteral());
3478 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3479
3480 VisitForAccumulatorValue(args->at(0)); // Load the object.
3481
3482 Label runtime, done, not_date_object;
3483 Register object = v0;
3484 Register result = v0;
3485 Register scratch0 = t1;
3486 Register scratch1 = a1;
3487
3488 __ JumpIfSmi(object, ¬_date_object);
3489 __ GetObjectType(object, scratch1, scratch1);
3490 __ Branch(¬_date_object, ne, scratch1, Operand(JS_DATE_TYPE));
3491
3492 if (index->value() == 0) {
3493 __ ld(result, FieldMemOperand(object, JSDate::kValueOffset));
3494 __ jmp(&done);
3495 } else {
3496 if (index->value() < JSDate::kFirstUncachedField) {
3497 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3498 __ li(scratch1, Operand(stamp));
3499 __ ld(scratch1, MemOperand(scratch1));
3500 __ ld(scratch0, FieldMemOperand(object, JSDate::kCacheStampOffset));
3501 __ Branch(&runtime, ne, scratch1, Operand(scratch0));
3502 __ ld(result, FieldMemOperand(object, JSDate::kValueOffset +
3503 kPointerSize * index->value()));
3504 __ jmp(&done);
3505 }
3506 __ bind(&runtime);
3507 __ PrepareCallCFunction(2, scratch1);
3508 __ li(a1, Operand(index));
3509 __ Move(a0, object);
3510 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3511 __ jmp(&done);
3512 }
3513
3514 __ bind(¬_date_object);
3515 __ CallRuntime(Runtime::kThrowNotDateError, 0);
3516 __ bind(&done);
3517 context()->Plug(v0);
3518 }
3519
3520
EmitOneByteSeqStringSetChar(CallRuntime * expr)3521 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3522 ZoneList<Expression*>* args = expr->arguments();
3523 DCHECK_EQ(3, args->length());
3524
3525 Register string = v0;
3526 Register index = a1;
3527 Register value = a2;
3528
3529 VisitForStackValue(args->at(0)); // index
3530 VisitForStackValue(args->at(1)); // value
3531 VisitForAccumulatorValue(args->at(2)); // string
3532 __ Pop(index, value);
3533
3534 if (FLAG_debug_code) {
3535 __ SmiTst(value, at);
3536 __ Check(eq, kNonSmiValue, at, Operand(zero_reg));
3537 __ SmiTst(index, at);
3538 __ Check(eq, kNonSmiIndex, at, Operand(zero_reg));
3539 __ SmiUntag(index, index);
3540 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3541 Register scratch = t1;
3542 __ EmitSeqStringSetCharCheck(
3543 string, index, value, scratch, one_byte_seq_type);
3544 __ SmiTag(index, index);
3545 }
3546
3547 __ SmiUntag(value, value);
3548 __ Daddu(at,
3549 string,
3550 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
3551 __ SmiUntag(index);
3552 __ Daddu(at, at, index);
3553 __ sb(value, MemOperand(at));
3554 context()->Plug(string);
3555 }
3556
3557
EmitTwoByteSeqStringSetChar(CallRuntime * expr)3558 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3559 ZoneList<Expression*>* args = expr->arguments();
3560 DCHECK_EQ(3, args->length());
3561
3562 Register string = v0;
3563 Register index = a1;
3564 Register value = a2;
3565
3566 VisitForStackValue(args->at(0)); // index
3567 VisitForStackValue(args->at(1)); // value
3568 VisitForAccumulatorValue(args->at(2)); // string
3569 __ Pop(index, value);
3570
3571 if (FLAG_debug_code) {
3572 __ SmiTst(value, at);
3573 __ Check(eq, kNonSmiValue, at, Operand(zero_reg));
3574 __ SmiTst(index, at);
3575 __ Check(eq, kNonSmiIndex, at, Operand(zero_reg));
3576 __ SmiUntag(index, index);
3577 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3578 Register scratch = t1;
3579 __ EmitSeqStringSetCharCheck(
3580 string, index, value, scratch, two_byte_seq_type);
3581 __ SmiTag(index, index);
3582 }
3583
3584 __ SmiUntag(value, value);
3585 __ Daddu(at,
3586 string,
3587 Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
3588 __ dsra(index, index, 32 - 1);
3589 __ Daddu(at, at, index);
3590 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
3591 __ sh(value, MemOperand(at));
3592 context()->Plug(string);
3593 }
3594
3595
EmitMathPow(CallRuntime * expr)3596 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3597 // Load the arguments on the stack and call the runtime function.
3598 ZoneList<Expression*>* args = expr->arguments();
3599 DCHECK(args->length() == 2);
3600 VisitForStackValue(args->at(0));
3601 VisitForStackValue(args->at(1));
3602 MathPowStub stub(isolate(), MathPowStub::ON_STACK);
3603 __ CallStub(&stub);
3604 context()->Plug(v0);
3605 }
3606
3607
EmitSetValueOf(CallRuntime * expr)3608 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3609 ZoneList<Expression*>* args = expr->arguments();
3610 DCHECK(args->length() == 2);
3611
3612 VisitForStackValue(args->at(0)); // Load the object.
3613 VisitForAccumulatorValue(args->at(1)); // Load the value.
3614 __ pop(a1); // v0 = value. a1 = object.
3615
3616 Label done;
3617 // If the object is a smi, return the value.
3618 __ JumpIfSmi(a1, &done);
3619
3620 // If the object is not a value type, return the value.
3621 __ GetObjectType(a1, a2, a2);
3622 __ Branch(&done, ne, a2, Operand(JS_VALUE_TYPE));
3623
3624 // Store the value.
3625 __ sd(v0, FieldMemOperand(a1, JSValue::kValueOffset));
3626 // Update the write barrier. Save the value as it will be
3627 // overwritten by the write barrier code and is needed afterward.
3628 __ mov(a2, v0);
3629 __ RecordWriteField(
3630 a1, JSValue::kValueOffset, a2, a3, kRAHasBeenSaved, kDontSaveFPRegs);
3631
3632 __ bind(&done);
3633 context()->Plug(v0);
3634 }
3635
3636
EmitNumberToString(CallRuntime * expr)3637 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3638 ZoneList<Expression*>* args = expr->arguments();
3639 DCHECK_EQ(args->length(), 1);
3640
3641 // Load the argument into a0 and call the stub.
3642 VisitForAccumulatorValue(args->at(0));
3643 __ mov(a0, result_register());
3644
3645 NumberToStringStub stub(isolate());
3646 __ CallStub(&stub);
3647 context()->Plug(v0);
3648 }
3649
3650
EmitStringCharFromCode(CallRuntime * expr)3651 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3652 ZoneList<Expression*>* args = expr->arguments();
3653 DCHECK(args->length() == 1);
3654
3655 VisitForAccumulatorValue(args->at(0));
3656
3657 Label done;
3658 StringCharFromCodeGenerator generator(v0, a1);
3659 generator.GenerateFast(masm_);
3660 __ jmp(&done);
3661
3662 NopRuntimeCallHelper call_helper;
3663 generator.GenerateSlow(masm_, call_helper);
3664
3665 __ bind(&done);
3666 context()->Plug(a1);
3667 }
3668
3669
EmitStringCharCodeAt(CallRuntime * expr)3670 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3671 ZoneList<Expression*>* args = expr->arguments();
3672 DCHECK(args->length() == 2);
3673
3674 VisitForStackValue(args->at(0));
3675 VisitForAccumulatorValue(args->at(1));
3676 __ mov(a0, result_register());
3677
3678 Register object = a1;
3679 Register index = a0;
3680 Register result = v0;
3681
3682 __ pop(object);
3683
3684 Label need_conversion;
3685 Label index_out_of_range;
3686 Label done;
3687 StringCharCodeAtGenerator generator(object,
3688 index,
3689 result,
3690 &need_conversion,
3691 &need_conversion,
3692 &index_out_of_range,
3693 STRING_INDEX_IS_NUMBER);
3694 generator.GenerateFast(masm_);
3695 __ jmp(&done);
3696
3697 __ bind(&index_out_of_range);
3698 // When the index is out of range, the spec requires us to return
3699 // NaN.
3700 __ LoadRoot(result, Heap::kNanValueRootIndex);
3701 __ jmp(&done);
3702
3703 __ bind(&need_conversion);
3704 // Load the undefined value into the result register, which will
3705 // trigger conversion.
3706 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3707 __ jmp(&done);
3708
3709 NopRuntimeCallHelper call_helper;
3710 generator.GenerateSlow(masm_, call_helper);
3711
3712 __ bind(&done);
3713 context()->Plug(result);
3714 }
3715
3716
EmitStringCharAt(CallRuntime * expr)3717 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3718 ZoneList<Expression*>* args = expr->arguments();
3719 DCHECK(args->length() == 2);
3720
3721 VisitForStackValue(args->at(0));
3722 VisitForAccumulatorValue(args->at(1));
3723 __ mov(a0, result_register());
3724
3725 Register object = a1;
3726 Register index = a0;
3727 Register scratch = a3;
3728 Register result = v0;
3729
3730 __ pop(object);
3731
3732 Label need_conversion;
3733 Label index_out_of_range;
3734 Label done;
3735 StringCharAtGenerator generator(object,
3736 index,
3737 scratch,
3738 result,
3739 &need_conversion,
3740 &need_conversion,
3741 &index_out_of_range,
3742 STRING_INDEX_IS_NUMBER);
3743 generator.GenerateFast(masm_);
3744 __ jmp(&done);
3745
3746 __ bind(&index_out_of_range);
3747 // When the index is out of range, the spec requires us to return
3748 // the empty string.
3749 __ LoadRoot(result, Heap::kempty_stringRootIndex);
3750 __ jmp(&done);
3751
3752 __ bind(&need_conversion);
3753 // Move smi zero into the result register, which will trigger
3754 // conversion.
3755 __ li(result, Operand(Smi::FromInt(0)));
3756 __ jmp(&done);
3757
3758 NopRuntimeCallHelper call_helper;
3759 generator.GenerateSlow(masm_, call_helper);
3760
3761 __ bind(&done);
3762 context()->Plug(result);
3763 }
3764
3765
EmitStringAdd(CallRuntime * expr)3766 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3767 ZoneList<Expression*>* args = expr->arguments();
3768 DCHECK_EQ(2, args->length());
3769 VisitForStackValue(args->at(0));
3770 VisitForAccumulatorValue(args->at(1));
3771
3772 __ pop(a1);
3773 __ mov(a0, result_register()); // StringAddStub requires args in a0, a1.
3774 StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
3775 __ CallStub(&stub);
3776 context()->Plug(v0);
3777 }
3778
3779
EmitStringCompare(CallRuntime * expr)3780 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3781 ZoneList<Expression*>* args = expr->arguments();
3782 DCHECK_EQ(2, args->length());
3783
3784 VisitForStackValue(args->at(0));
3785 VisitForStackValue(args->at(1));
3786
3787 StringCompareStub stub(isolate());
3788 __ CallStub(&stub);
3789 context()->Plug(v0);
3790 }
3791
3792
EmitCallFunction(CallRuntime * expr)3793 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3794 ZoneList<Expression*>* args = expr->arguments();
3795 DCHECK(args->length() >= 2);
3796
3797 int arg_count = args->length() - 2; // 2 ~ receiver and function.
3798 for (int i = 0; i < arg_count + 1; i++) {
3799 VisitForStackValue(args->at(i));
3800 }
3801 VisitForAccumulatorValue(args->last()); // Function.
3802
3803 Label runtime, done;
3804 // Check for non-function argument (including proxy).
3805 __ JumpIfSmi(v0, &runtime);
3806 __ GetObjectType(v0, a1, a1);
3807 __ Branch(&runtime, ne, a1, Operand(JS_FUNCTION_TYPE));
3808
3809 // InvokeFunction requires the function in a1. Move it in there.
3810 __ mov(a1, result_register());
3811 ParameterCount count(arg_count);
3812 __ InvokeFunction(a1, count, CALL_FUNCTION, NullCallWrapper());
3813 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3814 __ jmp(&done);
3815
3816 __ bind(&runtime);
3817 __ push(v0);
3818 __ CallRuntime(Runtime::kCall, args->length());
3819 __ bind(&done);
3820
3821 context()->Plug(v0);
3822 }
3823
3824
EmitRegExpConstructResult(CallRuntime * expr)3825 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3826 RegExpConstructResultStub stub(isolate());
3827 ZoneList<Expression*>* args = expr->arguments();
3828 DCHECK(args->length() == 3);
3829 VisitForStackValue(args->at(0));
3830 VisitForStackValue(args->at(1));
3831 VisitForAccumulatorValue(args->at(2));
3832 __ mov(a0, result_register());
3833 __ pop(a1);
3834 __ pop(a2);
3835 __ CallStub(&stub);
3836 context()->Plug(v0);
3837 }
3838
3839
EmitGetFromCache(CallRuntime * expr)3840 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3841 ZoneList<Expression*>* args = expr->arguments();
3842 DCHECK_EQ(2, args->length());
3843
3844 DCHECK_NE(NULL, args->at(0)->AsLiteral());
3845 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
3846
3847 Handle<FixedArray> jsfunction_result_caches(
3848 isolate()->native_context()->jsfunction_result_caches());
3849 if (jsfunction_result_caches->length() <= cache_id) {
3850 __ Abort(kAttemptToUseUndefinedCache);
3851 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
3852 context()->Plug(v0);
3853 return;
3854 }
3855
3856 VisitForAccumulatorValue(args->at(1));
3857
3858 Register key = v0;
3859 Register cache = a1;
3860 __ ld(cache, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
3861 __ ld(cache, FieldMemOperand(cache, GlobalObject::kNativeContextOffset));
3862 __ ld(cache,
3863 ContextOperand(
3864 cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
3865 __ ld(cache,
3866 FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
3867
3868
3869 Label done, not_found;
3870 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
3871 __ ld(a2, FieldMemOperand(cache, JSFunctionResultCache::kFingerOffset));
3872 // a2 now holds finger offset as a smi.
3873 __ Daddu(a3, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3874 // a3 now points to the start of fixed array elements.
3875 __ SmiScale(at, a2, kPointerSizeLog2);
3876 __ daddu(a3, a3, at);
3877 // a3 now points to key of indexed element of cache.
3878 __ ld(a2, MemOperand(a3));
3879 __ Branch(¬_found, ne, key, Operand(a2));
3880
3881 __ ld(v0, MemOperand(a3, kPointerSize));
3882 __ Branch(&done);
3883
3884 __ bind(¬_found);
3885 // Call runtime to perform the lookup.
3886 __ Push(cache, key);
3887 __ CallRuntime(Runtime::kGetFromCache, 2);
3888
3889 __ bind(&done);
3890 context()->Plug(v0);
3891 }
3892
3893
EmitHasCachedArrayIndex(CallRuntime * expr)3894 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3895 ZoneList<Expression*>* args = expr->arguments();
3896 VisitForAccumulatorValue(args->at(0));
3897
3898 Label materialize_true, materialize_false;
3899 Label* if_true = NULL;
3900 Label* if_false = NULL;
3901 Label* fall_through = NULL;
3902 context()->PrepareTest(&materialize_true, &materialize_false,
3903 &if_true, &if_false, &fall_through);
3904
3905 __ lwu(a0, FieldMemOperand(v0, String::kHashFieldOffset));
3906 __ And(a0, a0, Operand(String::kContainsCachedArrayIndexMask));
3907
3908 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3909 Split(eq, a0, Operand(zero_reg), if_true, if_false, fall_through);
3910
3911 context()->Plug(if_true, if_false);
3912 }
3913
3914
EmitGetCachedArrayIndex(CallRuntime * expr)3915 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3916 ZoneList<Expression*>* args = expr->arguments();
3917 DCHECK(args->length() == 1);
3918 VisitForAccumulatorValue(args->at(0));
3919
3920 __ AssertString(v0);
3921
3922 __ lwu(v0, FieldMemOperand(v0, String::kHashFieldOffset));
3923 __ IndexFromHash(v0, v0);
3924
3925 context()->Plug(v0);
3926 }
3927
3928
EmitFastOneByteArrayJoin(CallRuntime * expr)3929 void FullCodeGenerator::EmitFastOneByteArrayJoin(CallRuntime* expr) {
3930 Label bailout, done, one_char_separator, long_separator,
3931 non_trivial_array, not_size_one_array, loop,
3932 empty_separator_loop, one_char_separator_loop,
3933 one_char_separator_loop_entry, long_separator_loop;
3934 ZoneList<Expression*>* args = expr->arguments();
3935 DCHECK(args->length() == 2);
3936 VisitForStackValue(args->at(1));
3937 VisitForAccumulatorValue(args->at(0));
3938
3939 // All aliases of the same register have disjoint lifetimes.
3940 Register array = v0;
3941 Register elements = no_reg; // Will be v0.
3942 Register result = no_reg; // Will be v0.
3943 Register separator = a1;
3944 Register array_length = a2;
3945 Register result_pos = no_reg; // Will be a2.
3946 Register string_length = a3;
3947 Register string = a4;
3948 Register element = a5;
3949 Register elements_end = a6;
3950 Register scratch1 = a7;
3951 Register scratch2 = t1;
3952 Register scratch3 = t0;
3953
3954 // Separator operand is on the stack.
3955 __ pop(separator);
3956
3957 // Check that the array is a JSArray.
3958 __ JumpIfSmi(array, &bailout);
3959 __ GetObjectType(array, scratch1, scratch2);
3960 __ Branch(&bailout, ne, scratch2, Operand(JS_ARRAY_TYPE));
3961
3962 // Check that the array has fast elements.
3963 __ CheckFastElements(scratch1, scratch2, &bailout);
3964
3965 // If the array has length zero, return the empty string.
3966 __ ld(array_length, FieldMemOperand(array, JSArray::kLengthOffset));
3967 __ SmiUntag(array_length);
3968 __ Branch(&non_trivial_array, ne, array_length, Operand(zero_reg));
3969 __ LoadRoot(v0, Heap::kempty_stringRootIndex);
3970 __ Branch(&done);
3971
3972 __ bind(&non_trivial_array);
3973
3974 // Get the FixedArray containing array's elements.
3975 elements = array;
3976 __ ld(elements, FieldMemOperand(array, JSArray::kElementsOffset));
3977 array = no_reg; // End of array's live range.
3978
3979 // Check that all array elements are sequential one-byte strings, and
3980 // accumulate the sum of their lengths, as a smi-encoded value.
3981 __ mov(string_length, zero_reg);
3982 __ Daddu(element,
3983 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3984 __ dsll(elements_end, array_length, kPointerSizeLog2);
3985 __ Daddu(elements_end, element, elements_end);
3986 // Loop condition: while (element < elements_end).
3987 // Live values in registers:
3988 // elements: Fixed array of strings.
3989 // array_length: Length of the fixed array of strings (not smi)
3990 // separator: Separator string
3991 // string_length: Accumulated sum of string lengths (smi).
3992 // element: Current array element.
3993 // elements_end: Array end.
3994 if (generate_debug_code_) {
3995 __ Assert(gt, kNoEmptyArraysHereInEmitFastOneByteArrayJoin, array_length,
3996 Operand(zero_reg));
3997 }
3998 __ bind(&loop);
3999 __ ld(string, MemOperand(element));
4000 __ Daddu(element, element, kPointerSize);
4001 __ JumpIfSmi(string, &bailout);
4002 __ ld(scratch1, FieldMemOperand(string, HeapObject::kMapOffset));
4003 __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
4004 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout);
4005 __ ld(scratch1, FieldMemOperand(string, SeqOneByteString::kLengthOffset));
4006 __ AdduAndCheckForOverflow(string_length, string_length, scratch1, scratch3);
4007 __ BranchOnOverflow(&bailout, scratch3);
4008 __ Branch(&loop, lt, element, Operand(elements_end));
4009
4010 // If array_length is 1, return elements[0], a string.
4011 __ Branch(¬_size_one_array, ne, array_length, Operand(1));
4012 __ ld(v0, FieldMemOperand(elements, FixedArray::kHeaderSize));
4013 __ Branch(&done);
4014
4015 __ bind(¬_size_one_array);
4016
4017 // Live values in registers:
4018 // separator: Separator string
4019 // array_length: Length of the array.
4020 // string_length: Sum of string lengths (smi).
4021 // elements: FixedArray of strings.
4022
4023 // Check that the separator is a flat one-byte string.
4024 __ JumpIfSmi(separator, &bailout);
4025 __ ld(scratch1, FieldMemOperand(separator, HeapObject::kMapOffset));
4026 __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
4027 __ JumpIfInstanceTypeIsNotSequentialOneByte(scratch1, scratch2, &bailout);
4028
4029 // Add (separator length times array_length) - separator length to the
4030 // string_length to get the length of the result string. array_length is not
4031 // smi but the other values are, so the result is a smi.
4032 __ ld(scratch1, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
4033 __ Dsubu(string_length, string_length, Operand(scratch1));
4034 __ SmiUntag(scratch1);
4035 __ Dmul(scratch2, array_length, scratch1);
4036 // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are
4037 // zero.
4038 __ dsra32(scratch1, scratch2, 0);
4039 __ Branch(&bailout, ne, scratch2, Operand(zero_reg));
4040 __ SmiUntag(string_length);
4041 __ AdduAndCheckForOverflow(string_length, string_length, scratch2, scratch3);
4042 __ BranchOnOverflow(&bailout, scratch3);
4043
4044 // Get first element in the array to free up the elements register to be used
4045 // for the result.
4046 __ Daddu(element,
4047 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
4048 result = elements; // End of live range for elements.
4049 elements = no_reg;
4050 // Live values in registers:
4051 // element: First array element
4052 // separator: Separator string
4053 // string_length: Length of result string (not smi)
4054 // array_length: Length of the array.
4055 __ AllocateOneByteString(result, string_length, scratch1, scratch2,
4056 elements_end, &bailout);
4057 // Prepare for looping. Set up elements_end to end of the array. Set
4058 // result_pos to the position of the result where to write the first
4059 // character.
4060 __ dsll(elements_end, array_length, kPointerSizeLog2);
4061 __ Daddu(elements_end, element, elements_end);
4062 result_pos = array_length; // End of live range for array_length.
4063 array_length = no_reg;
4064 __ Daddu(result_pos,
4065 result,
4066 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4067
4068 // Check the length of the separator.
4069 __ ld(scratch1, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
4070 __ li(at, Operand(Smi::FromInt(1)));
4071 __ Branch(&one_char_separator, eq, scratch1, Operand(at));
4072 __ Branch(&long_separator, gt, scratch1, Operand(at));
4073
4074 // Empty separator case.
4075 __ bind(&empty_separator_loop);
4076 // Live values in registers:
4077 // result_pos: the position to which we are currently copying characters.
4078 // element: Current array element.
4079 // elements_end: Array end.
4080
4081 // Copy next array element to the result.
4082 __ ld(string, MemOperand(element));
4083 __ Daddu(element, element, kPointerSize);
4084 __ ld(string_length, FieldMemOperand(string, String::kLengthOffset));
4085 __ SmiUntag(string_length);
4086 __ Daddu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4087 __ CopyBytes(string, result_pos, string_length, scratch1);
4088 // End while (element < elements_end).
4089 __ Branch(&empty_separator_loop, lt, element, Operand(elements_end));
4090 DCHECK(result.is(v0));
4091 __ Branch(&done);
4092
4093 // One-character separator case.
4094 __ bind(&one_char_separator);
4095 // Replace separator with its one-byte character value.
4096 __ lbu(separator, FieldMemOperand(separator, SeqOneByteString::kHeaderSize));
4097 // Jump into the loop after the code that copies the separator, so the first
4098 // element is not preceded by a separator.
4099 __ jmp(&one_char_separator_loop_entry);
4100
4101 __ bind(&one_char_separator_loop);
4102 // Live values in registers:
4103 // result_pos: the position to which we are currently copying characters.
4104 // element: Current array element.
4105 // elements_end: Array end.
4106 // separator: Single separator one-byte char (in lower byte).
4107
4108 // Copy the separator character to the result.
4109 __ sb(separator, MemOperand(result_pos));
4110 __ Daddu(result_pos, result_pos, 1);
4111
4112 // Copy next array element to the result.
4113 __ bind(&one_char_separator_loop_entry);
4114 __ ld(string, MemOperand(element));
4115 __ Daddu(element, element, kPointerSize);
4116 __ ld(string_length, FieldMemOperand(string, String::kLengthOffset));
4117 __ SmiUntag(string_length);
4118 __ Daddu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4119 __ CopyBytes(string, result_pos, string_length, scratch1);
4120 // End while (element < elements_end).
4121 __ Branch(&one_char_separator_loop, lt, element, Operand(elements_end));
4122 DCHECK(result.is(v0));
4123 __ Branch(&done);
4124
4125 // Long separator case (separator is more than one character). Entry is at the
4126 // label long_separator below.
4127 __ bind(&long_separator_loop);
4128 // Live values in registers:
4129 // result_pos: the position to which we are currently copying characters.
4130 // element: Current array element.
4131 // elements_end: Array end.
4132 // separator: Separator string.
4133
4134 // Copy the separator to the result.
4135 __ ld(string_length, FieldMemOperand(separator, String::kLengthOffset));
4136 __ SmiUntag(string_length);
4137 __ Daddu(string,
4138 separator,
4139 Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4140 __ CopyBytes(string, result_pos, string_length, scratch1);
4141
4142 __ bind(&long_separator);
4143 __ ld(string, MemOperand(element));
4144 __ Daddu(element, element, kPointerSize);
4145 __ ld(string_length, FieldMemOperand(string, String::kLengthOffset));
4146 __ SmiUntag(string_length);
4147 __ Daddu(string, string, SeqOneByteString::kHeaderSize - kHeapObjectTag);
4148 __ CopyBytes(string, result_pos, string_length, scratch1);
4149 // End while (element < elements_end).
4150 __ Branch(&long_separator_loop, lt, element, Operand(elements_end));
4151 DCHECK(result.is(v0));
4152 __ Branch(&done);
4153
4154 __ bind(&bailout);
4155 __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
4156 __ bind(&done);
4157 context()->Plug(v0);
4158 }
4159
4160
EmitDebugIsActive(CallRuntime * expr)4161 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
4162 DCHECK(expr->arguments()->length() == 0);
4163 ExternalReference debug_is_active =
4164 ExternalReference::debug_is_active_address(isolate());
4165 __ li(at, Operand(debug_is_active));
4166 __ lbu(v0, MemOperand(at));
4167 __ SmiTag(v0);
4168 context()->Plug(v0);
4169 }
4170
4171
VisitCallRuntime(CallRuntime * expr)4172 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4173 if (expr->function() != NULL &&
4174 expr->function()->intrinsic_type == Runtime::INLINE) {
4175 Comment cmnt(masm_, "[ InlineRuntimeCall");
4176 EmitInlineRuntimeCall(expr);
4177 return;
4178 }
4179
4180 Comment cmnt(masm_, "[ CallRuntime");
4181 ZoneList<Expression*>* args = expr->arguments();
4182 int arg_count = args->length();
4183
4184 if (expr->is_jsruntime()) {
4185 // Push the builtins object as the receiver.
4186 Register receiver = LoadDescriptor::ReceiverRegister();
4187 __ ld(receiver, GlobalObjectOperand());
4188 __ ld(receiver, FieldMemOperand(receiver, GlobalObject::kBuiltinsOffset));
4189 __ push(receiver);
4190
4191 // Load the function from the receiver.
4192 __ li(LoadDescriptor::NameRegister(), Operand(expr->name()));
4193 if (FLAG_vector_ics) {
4194 __ li(VectorLoadICDescriptor::SlotRegister(),
4195 Operand(Smi::FromInt(expr->CallRuntimeFeedbackSlot())));
4196 CallLoadIC(NOT_CONTEXTUAL);
4197 } else {
4198 CallLoadIC(NOT_CONTEXTUAL, expr->CallRuntimeFeedbackId());
4199 }
4200
4201 // Push the target function under the receiver.
4202 __ ld(at, MemOperand(sp, 0));
4203 __ push(at);
4204 __ sd(v0, MemOperand(sp, kPointerSize));
4205
4206 // Push the arguments ("left-to-right").
4207 int arg_count = args->length();
4208 for (int i = 0; i < arg_count; i++) {
4209 VisitForStackValue(args->at(i));
4210 }
4211
4212 // Record source position of the IC call.
4213 SetSourcePosition(expr->position());
4214 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
4215 __ ld(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
4216 __ CallStub(&stub);
4217
4218 // Restore context register.
4219 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4220
4221 context()->DropAndPlug(1, v0);
4222 } else {
4223 // Push the arguments ("left-to-right").
4224 for (int i = 0; i < arg_count; i++) {
4225 VisitForStackValue(args->at(i));
4226 }
4227
4228 // Call the C runtime function.
4229 __ CallRuntime(expr->function(), arg_count);
4230 context()->Plug(v0);
4231 }
4232 }
4233
4234
VisitUnaryOperation(UnaryOperation * expr)4235 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4236 switch (expr->op()) {
4237 case Token::DELETE: {
4238 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4239 Property* property = expr->expression()->AsProperty();
4240 VariableProxy* proxy = expr->expression()->AsVariableProxy();
4241
4242 if (property != NULL) {
4243 VisitForStackValue(property->obj());
4244 VisitForStackValue(property->key());
4245 __ li(a1, Operand(Smi::FromInt(strict_mode())));
4246 __ push(a1);
4247 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4248 context()->Plug(v0);
4249 } else if (proxy != NULL) {
4250 Variable* var = proxy->var();
4251 // Delete of an unqualified identifier is disallowed in strict mode
4252 // but "delete this" is allowed.
4253 DCHECK(strict_mode() == SLOPPY || var->is_this());
4254 if (var->IsUnallocated()) {
4255 __ ld(a2, GlobalObjectOperand());
4256 __ li(a1, Operand(var->name()));
4257 __ li(a0, Operand(Smi::FromInt(SLOPPY)));
4258 __ Push(a2, a1, a0);
4259 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4260 context()->Plug(v0);
4261 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4262 // Result of deleting non-global, non-dynamic variables is false.
4263 // The subexpression does not have side effects.
4264 context()->Plug(var->is_this());
4265 } else {
4266 // Non-global variable. Call the runtime to try to delete from the
4267 // context where the variable was introduced.
4268 DCHECK(!context_register().is(a2));
4269 __ li(a2, Operand(var->name()));
4270 __ Push(context_register(), a2);
4271 __ CallRuntime(Runtime::kDeleteLookupSlot, 2);
4272 context()->Plug(v0);
4273 }
4274 } else {
4275 // Result of deleting non-property, non-variable reference is true.
4276 // The subexpression may have side effects.
4277 VisitForEffect(expr->expression());
4278 context()->Plug(true);
4279 }
4280 break;
4281 }
4282
4283 case Token::VOID: {
4284 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4285 VisitForEffect(expr->expression());
4286 context()->Plug(Heap::kUndefinedValueRootIndex);
4287 break;
4288 }
4289
4290 case Token::NOT: {
4291 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4292 if (context()->IsEffect()) {
4293 // Unary NOT has no side effects so it's only necessary to visit the
4294 // subexpression. Match the optimizing compiler by not branching.
4295 VisitForEffect(expr->expression());
4296 } else if (context()->IsTest()) {
4297 const TestContext* test = TestContext::cast(context());
4298 // The labels are swapped for the recursive call.
4299 VisitForControl(expr->expression(),
4300 test->false_label(),
4301 test->true_label(),
4302 test->fall_through());
4303 context()->Plug(test->true_label(), test->false_label());
4304 } else {
4305 // We handle value contexts explicitly rather than simply visiting
4306 // for control and plugging the control flow into the context,
4307 // because we need to prepare a pair of extra administrative AST ids
4308 // for the optimizing compiler.
4309 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
4310 Label materialize_true, materialize_false, done;
4311 VisitForControl(expr->expression(),
4312 &materialize_false,
4313 &materialize_true,
4314 &materialize_true);
4315 __ bind(&materialize_true);
4316 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4317 __ LoadRoot(v0, Heap::kTrueValueRootIndex);
4318 if (context()->IsStackValue()) __ push(v0);
4319 __ jmp(&done);
4320 __ bind(&materialize_false);
4321 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4322 __ LoadRoot(v0, Heap::kFalseValueRootIndex);
4323 if (context()->IsStackValue()) __ push(v0);
4324 __ bind(&done);
4325 }
4326 break;
4327 }
4328
4329 case Token::TYPEOF: {
4330 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4331 { StackValueContext context(this);
4332 VisitForTypeofValue(expr->expression());
4333 }
4334 __ CallRuntime(Runtime::kTypeof, 1);
4335 context()->Plug(v0);
4336 break;
4337 }
4338
4339 default:
4340 UNREACHABLE();
4341 }
4342 }
4343
4344
VisitCountOperation(CountOperation * expr)4345 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4346 DCHECK(expr->expression()->IsValidReferenceExpression());
4347
4348 Comment cmnt(masm_, "[ CountOperation");
4349 SetSourcePosition(expr->position());
4350
4351 // Expression can only be a property, a global or a (parameter or local)
4352 // slot.
4353 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
4354 LhsKind assign_type = VARIABLE;
4355 Property* prop = expr->expression()->AsProperty();
4356 // In case of a property we use the uninitialized expression context
4357 // of the key to detect a named property.
4358 if (prop != NULL) {
4359 assign_type =
4360 (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
4361 }
4362
4363 // Evaluate expression and get value.
4364 if (assign_type == VARIABLE) {
4365 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
4366 AccumulatorValueContext context(this);
4367 EmitVariableLoad(expr->expression()->AsVariableProxy());
4368 } else {
4369 // Reserve space for result of postfix operation.
4370 if (expr->is_postfix() && !context()->IsEffect()) {
4371 __ li(at, Operand(Smi::FromInt(0)));
4372 __ push(at);
4373 }
4374 if (assign_type == NAMED_PROPERTY) {
4375 // Put the object both on the stack and in the register.
4376 VisitForStackValue(prop->obj());
4377 __ ld(LoadDescriptor::ReceiverRegister(), MemOperand(sp, 0));
4378 EmitNamedPropertyLoad(prop);
4379 } else {
4380 VisitForStackValue(prop->obj());
4381 VisitForStackValue(prop->key());
4382 __ ld(LoadDescriptor::ReceiverRegister(),
4383 MemOperand(sp, 1 * kPointerSize));
4384 __ ld(LoadDescriptor::NameRegister(), MemOperand(sp, 0));
4385 EmitKeyedPropertyLoad(prop);
4386 }
4387 }
4388
4389 // We need a second deoptimization point after loading the value
4390 // in case evaluating the property load my have a side effect.
4391 if (assign_type == VARIABLE) {
4392 PrepareForBailout(expr->expression(), TOS_REG);
4393 } else {
4394 PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4395 }
4396
4397 // Inline smi case if we are in a loop.
4398 Label stub_call, done;
4399 JumpPatchSite patch_site(masm_);
4400
4401 int count_value = expr->op() == Token::INC ? 1 : -1;
4402 __ mov(a0, v0);
4403 if (ShouldInlineSmiCase(expr->op())) {
4404 Label slow;
4405 patch_site.EmitJumpIfNotSmi(v0, &slow);
4406
4407 // Save result for postfix expressions.
4408 if (expr->is_postfix()) {
4409 if (!context()->IsEffect()) {
4410 // Save the result on the stack. If we have a named or keyed property
4411 // we store the result under the receiver that is currently on top
4412 // of the stack.
4413 switch (assign_type) {
4414 case VARIABLE:
4415 __ push(v0);
4416 break;
4417 case NAMED_PROPERTY:
4418 __ sd(v0, MemOperand(sp, kPointerSize));
4419 break;
4420 case KEYED_PROPERTY:
4421 __ sd(v0, MemOperand(sp, 2 * kPointerSize));
4422 break;
4423 }
4424 }
4425 }
4426
4427 Register scratch1 = a1;
4428 Register scratch2 = a4;
4429 __ li(scratch1, Operand(Smi::FromInt(count_value)));
4430 __ AdduAndCheckForOverflow(v0, v0, scratch1, scratch2);
4431 __ BranchOnNoOverflow(&done, scratch2);
4432 // Call stub. Undo operation first.
4433 __ Move(v0, a0);
4434 __ jmp(&stub_call);
4435 __ bind(&slow);
4436 }
4437 ToNumberStub convert_stub(isolate());
4438 __ CallStub(&convert_stub);
4439
4440 // Save result for postfix expressions.
4441 if (expr->is_postfix()) {
4442 if (!context()->IsEffect()) {
4443 // Save the result on the stack. If we have a named or keyed property
4444 // we store the result under the receiver that is currently on top
4445 // of the stack.
4446 switch (assign_type) {
4447 case VARIABLE:
4448 __ push(v0);
4449 break;
4450 case NAMED_PROPERTY:
4451 __ sd(v0, MemOperand(sp, kPointerSize));
4452 break;
4453 case KEYED_PROPERTY:
4454 __ sd(v0, MemOperand(sp, 2 * kPointerSize));
4455 break;
4456 }
4457 }
4458 }
4459
4460 __ bind(&stub_call);
4461 __ mov(a1, v0);
4462 __ li(a0, Operand(Smi::FromInt(count_value)));
4463
4464 // Record position before stub call.
4465 SetSourcePosition(expr->position());
4466
4467 Handle<Code> code =
4468 CodeFactory::BinaryOpIC(isolate(), Token::ADD, NO_OVERWRITE).code();
4469 CallIC(code, expr->CountBinOpFeedbackId());
4470 patch_site.EmitPatchInfo();
4471 __ bind(&done);
4472
4473 // Store the value returned in v0.
4474 switch (assign_type) {
4475 case VARIABLE:
4476 if (expr->is_postfix()) {
4477 { EffectContext context(this);
4478 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4479 Token::ASSIGN);
4480 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4481 context.Plug(v0);
4482 }
4483 // For all contexts except EffectConstant we have the result on
4484 // top of the stack.
4485 if (!context()->IsEffect()) {
4486 context()->PlugTOS();
4487 }
4488 } else {
4489 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4490 Token::ASSIGN);
4491 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4492 context()->Plug(v0);
4493 }
4494 break;
4495 case NAMED_PROPERTY: {
4496 __ mov(StoreDescriptor::ValueRegister(), result_register());
4497 __ li(StoreDescriptor::NameRegister(),
4498 Operand(prop->key()->AsLiteral()->value()));
4499 __ pop(StoreDescriptor::ReceiverRegister());
4500 CallStoreIC(expr->CountStoreFeedbackId());
4501 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4502 if (expr->is_postfix()) {
4503 if (!context()->IsEffect()) {
4504 context()->PlugTOS();
4505 }
4506 } else {
4507 context()->Plug(v0);
4508 }
4509 break;
4510 }
4511 case KEYED_PROPERTY: {
4512 __ mov(StoreDescriptor::ValueRegister(), result_register());
4513 __ Pop(StoreDescriptor::ReceiverRegister(),
4514 StoreDescriptor::NameRegister());
4515 Handle<Code> ic =
4516 CodeFactory::KeyedStoreIC(isolate(), strict_mode()).code();
4517 CallIC(ic, expr->CountStoreFeedbackId());
4518 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4519 if (expr->is_postfix()) {
4520 if (!context()->IsEffect()) {
4521 context()->PlugTOS();
4522 }
4523 } else {
4524 context()->Plug(v0);
4525 }
4526 break;
4527 }
4528 }
4529 }
4530
4531
VisitForTypeofValue(Expression * expr)4532 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4533 DCHECK(!context()->IsEffect());
4534 DCHECK(!context()->IsTest());
4535 VariableProxy* proxy = expr->AsVariableProxy();
4536 if (proxy != NULL && proxy->var()->IsUnallocated()) {
4537 Comment cmnt(masm_, "[ Global variable");
4538 __ ld(LoadDescriptor::ReceiverRegister(), GlobalObjectOperand());
4539 __ li(LoadDescriptor::NameRegister(), Operand(proxy->name()));
4540 if (FLAG_vector_ics) {
4541 __ li(VectorLoadICDescriptor::SlotRegister(),
4542 Operand(Smi::FromInt(proxy->VariableFeedbackSlot())));
4543 }
4544 // Use a regular load, not a contextual load, to avoid a reference
4545 // error.
4546 CallLoadIC(NOT_CONTEXTUAL);
4547 PrepareForBailout(expr, TOS_REG);
4548 context()->Plug(v0);
4549 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
4550 Comment cmnt(masm_, "[ Lookup slot");
4551 Label done, slow;
4552
4553 // Generate code for loading from variables potentially shadowed
4554 // by eval-introduced variables.
4555 EmitDynamicLookupFastCase(proxy, INSIDE_TYPEOF, &slow, &done);
4556
4557 __ bind(&slow);
4558 __ li(a0, Operand(proxy->name()));
4559 __ Push(cp, a0);
4560 __ CallRuntime(Runtime::kLoadLookupSlotNoReferenceError, 2);
4561 PrepareForBailout(expr, TOS_REG);
4562 __ bind(&done);
4563
4564 context()->Plug(v0);
4565 } else {
4566 // This expression cannot throw a reference error at the top level.
4567 VisitInDuplicateContext(expr);
4568 }
4569 }
4570
EmitLiteralCompareTypeof(Expression * expr,Expression * sub_expr,Handle<String> check)4571 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4572 Expression* sub_expr,
4573 Handle<String> check) {
4574 Label materialize_true, materialize_false;
4575 Label* if_true = NULL;
4576 Label* if_false = NULL;
4577 Label* fall_through = NULL;
4578 context()->PrepareTest(&materialize_true, &materialize_false,
4579 &if_true, &if_false, &fall_through);
4580
4581 { AccumulatorValueContext context(this);
4582 VisitForTypeofValue(sub_expr);
4583 }
4584 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4585
4586 Factory* factory = isolate()->factory();
4587 if (String::Equals(check, factory->number_string())) {
4588 __ JumpIfSmi(v0, if_true);
4589 __ ld(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
4590 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
4591 Split(eq, v0, Operand(at), if_true, if_false, fall_through);
4592 } else if (String::Equals(check, factory->string_string())) {
4593 __ JumpIfSmi(v0, if_false);
4594 // Check for undetectable objects => false.
4595 __ GetObjectType(v0, v0, a1);
4596 __ Branch(if_false, ge, a1, Operand(FIRST_NONSTRING_TYPE));
4597 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
4598 __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
4599 Split(eq, a1, Operand(zero_reg),
4600 if_true, if_false, fall_through);
4601 } else if (String::Equals(check, factory->symbol_string())) {
4602 __ JumpIfSmi(v0, if_false);
4603 __ GetObjectType(v0, v0, a1);
4604 Split(eq, a1, Operand(SYMBOL_TYPE), if_true, if_false, fall_through);
4605 } else if (String::Equals(check, factory->boolean_string())) {
4606 __ LoadRoot(at, Heap::kTrueValueRootIndex);
4607 __ Branch(if_true, eq, v0, Operand(at));
4608 __ LoadRoot(at, Heap::kFalseValueRootIndex);
4609 Split(eq, v0, Operand(at), if_true, if_false, fall_through);
4610 } else if (String::Equals(check, factory->undefined_string())) {
4611 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
4612 __ Branch(if_true, eq, v0, Operand(at));
4613 __ JumpIfSmi(v0, if_false);
4614 // Check for undetectable objects => true.
4615 __ ld(v0, FieldMemOperand(v0, HeapObject::kMapOffset));
4616 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
4617 __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
4618 Split(ne, a1, Operand(zero_reg), if_true, if_false, fall_through);
4619 } else if (String::Equals(check, factory->function_string())) {
4620 __ JumpIfSmi(v0, if_false);
4621 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
4622 __ GetObjectType(v0, v0, a1);
4623 __ Branch(if_true, eq, a1, Operand(JS_FUNCTION_TYPE));
4624 Split(eq, a1, Operand(JS_FUNCTION_PROXY_TYPE),
4625 if_true, if_false, fall_through);
4626 } else if (String::Equals(check, factory->object_string())) {
4627 __ JumpIfSmi(v0, if_false);
4628 __ LoadRoot(at, Heap::kNullValueRootIndex);
4629 __ Branch(if_true, eq, v0, Operand(at));
4630 // Check for JS objects => true.
4631 __ GetObjectType(v0, v0, a1);
4632 __ Branch(if_false, lt, a1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
4633 __ lbu(a1, FieldMemOperand(v0, Map::kInstanceTypeOffset));
4634 __ Branch(if_false, gt, a1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
4635 // Check for undetectable objects => false.
4636 __ lbu(a1, FieldMemOperand(v0, Map::kBitFieldOffset));
4637 __ And(a1, a1, Operand(1 << Map::kIsUndetectable));
4638 Split(eq, a1, Operand(zero_reg), if_true, if_false, fall_through);
4639 } else {
4640 if (if_false != fall_through) __ jmp(if_false);
4641 }
4642 context()->Plug(if_true, if_false);
4643 }
4644
4645
VisitCompareOperation(CompareOperation * expr)4646 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4647 Comment cmnt(masm_, "[ CompareOperation");
4648 SetSourcePosition(expr->position());
4649
4650 // First we try a fast inlined version of the compare when one of
4651 // the operands is a literal.
4652 if (TryLiteralCompare(expr)) return;
4653
4654 // Always perform the comparison for its control flow. Pack the result
4655 // into the expression's context after the comparison is performed.
4656 Label materialize_true, materialize_false;
4657 Label* if_true = NULL;
4658 Label* if_false = NULL;
4659 Label* fall_through = NULL;
4660 context()->PrepareTest(&materialize_true, &materialize_false,
4661 &if_true, &if_false, &fall_through);
4662
4663 Token::Value op = expr->op();
4664 VisitForStackValue(expr->left());
4665 switch (op) {
4666 case Token::IN:
4667 VisitForStackValue(expr->right());
4668 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
4669 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4670 __ LoadRoot(a4, Heap::kTrueValueRootIndex);
4671 Split(eq, v0, Operand(a4), if_true, if_false, fall_through);
4672 break;
4673
4674 case Token::INSTANCEOF: {
4675 VisitForStackValue(expr->right());
4676 InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
4677 __ CallStub(&stub);
4678 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4679 // The stub returns 0 for true.
4680 Split(eq, v0, Operand(zero_reg), if_true, if_false, fall_through);
4681 break;
4682 }
4683
4684 default: {
4685 VisitForAccumulatorValue(expr->right());
4686 Condition cc = CompareIC::ComputeCondition(op);
4687 __ mov(a0, result_register());
4688 __ pop(a1);
4689
4690 bool inline_smi_code = ShouldInlineSmiCase(op);
4691 JumpPatchSite patch_site(masm_);
4692 if (inline_smi_code) {
4693 Label slow_case;
4694 __ Or(a2, a0, Operand(a1));
4695 patch_site.EmitJumpIfNotSmi(a2, &slow_case);
4696 Split(cc, a1, Operand(a0), if_true, if_false, NULL);
4697 __ bind(&slow_case);
4698 }
4699 // Record position and call the compare IC.
4700 SetSourcePosition(expr->position());
4701 Handle<Code> ic = CodeFactory::CompareIC(isolate(), op).code();
4702 CallIC(ic, expr->CompareOperationFeedbackId());
4703 patch_site.EmitPatchInfo();
4704 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4705 Split(cc, v0, Operand(zero_reg), if_true, if_false, fall_through);
4706 }
4707 }
4708
4709 // Convert the result of the comparison into one expected for this
4710 // expression's context.
4711 context()->Plug(if_true, if_false);
4712 }
4713
4714
EmitLiteralCompareNil(CompareOperation * expr,Expression * sub_expr,NilValue nil)4715 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4716 Expression* sub_expr,
4717 NilValue nil) {
4718 Label materialize_true, materialize_false;
4719 Label* if_true = NULL;
4720 Label* if_false = NULL;
4721 Label* fall_through = NULL;
4722 context()->PrepareTest(&materialize_true, &materialize_false,
4723 &if_true, &if_false, &fall_through);
4724
4725 VisitForAccumulatorValue(sub_expr);
4726 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4727 __ mov(a0, result_register());
4728 if (expr->op() == Token::EQ_STRICT) {
4729 Heap::RootListIndex nil_value = nil == kNullValue ?
4730 Heap::kNullValueRootIndex :
4731 Heap::kUndefinedValueRootIndex;
4732 __ LoadRoot(a1, nil_value);
4733 Split(eq, a0, Operand(a1), if_true, if_false, fall_through);
4734 } else {
4735 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
4736 CallIC(ic, expr->CompareOperationFeedbackId());
4737 Split(ne, v0, Operand(zero_reg), if_true, if_false, fall_through);
4738 }
4739 context()->Plug(if_true, if_false);
4740 }
4741
4742
VisitThisFunction(ThisFunction * expr)4743 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4744 __ ld(v0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4745 context()->Plug(v0);
4746 }
4747
4748
result_register()4749 Register FullCodeGenerator::result_register() {
4750 return v0;
4751 }
4752
4753
context_register()4754 Register FullCodeGenerator::context_register() {
4755 return cp;
4756 }
4757
4758
StoreToFrameField(int frame_offset,Register value)4759 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4760 // DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
4761 DCHECK(IsAligned(frame_offset, kPointerSize));
4762 // __ sw(value, MemOperand(fp, frame_offset));
4763 __ sd(value, MemOperand(fp, frame_offset));
4764 }
4765
4766
LoadContextField(Register dst,int context_index)4767 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4768 __ ld(dst, ContextOperand(cp, context_index));
4769 }
4770
4771
PushFunctionArgumentForContextAllocation()4772 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4773 Scope* declaration_scope = scope()->DeclarationScope();
4774 if (declaration_scope->is_global_scope() ||
4775 declaration_scope->is_module_scope()) {
4776 // Contexts nested in the native context have a canonical empty function
4777 // as their closure, not the anonymous closure containing the global
4778 // code. Pass a smi sentinel and let the runtime look up the empty
4779 // function.
4780 __ li(at, Operand(Smi::FromInt(0)));
4781 } else if (declaration_scope->is_eval_scope()) {
4782 // Contexts created by a call to eval have the same closure as the
4783 // context calling eval, not the anonymous closure containing the eval
4784 // code. Fetch it from the context.
4785 __ ld(at, ContextOperand(cp, Context::CLOSURE_INDEX));
4786 } else {
4787 DCHECK(declaration_scope->is_function_scope());
4788 __ ld(at, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4789 }
4790 __ push(at);
4791 }
4792
4793
4794 // ----------------------------------------------------------------------------
4795 // Non-local control flow support.
4796
EnterFinallyBlock()4797 void FullCodeGenerator::EnterFinallyBlock() {
4798 DCHECK(!result_register().is(a1));
4799 // Store result register while executing finally block.
4800 __ push(result_register());
4801 // Cook return address in link register to stack (smi encoded Code* delta).
4802 __ Dsubu(a1, ra, Operand(masm_->CodeObject()));
4803 __ SmiTag(a1);
4804
4805 // Store result register while executing finally block.
4806 __ push(a1);
4807
4808 // Store pending message while executing finally block.
4809 ExternalReference pending_message_obj =
4810 ExternalReference::address_of_pending_message_obj(isolate());
4811 __ li(at, Operand(pending_message_obj));
4812 __ ld(a1, MemOperand(at));
4813 __ push(a1);
4814
4815 ExternalReference has_pending_message =
4816 ExternalReference::address_of_has_pending_message(isolate());
4817 __ li(at, Operand(has_pending_message));
4818 __ ld(a1, MemOperand(at));
4819 __ SmiTag(a1);
4820 __ push(a1);
4821
4822 ExternalReference pending_message_script =
4823 ExternalReference::address_of_pending_message_script(isolate());
4824 __ li(at, Operand(pending_message_script));
4825 __ ld(a1, MemOperand(at));
4826 __ push(a1);
4827 }
4828
4829
ExitFinallyBlock()4830 void FullCodeGenerator::ExitFinallyBlock() {
4831 DCHECK(!result_register().is(a1));
4832 // Restore pending message from stack.
4833 __ pop(a1);
4834 ExternalReference pending_message_script =
4835 ExternalReference::address_of_pending_message_script(isolate());
4836 __ li(at, Operand(pending_message_script));
4837 __ sd(a1, MemOperand(at));
4838
4839 __ pop(a1);
4840 __ SmiUntag(a1);
4841 ExternalReference has_pending_message =
4842 ExternalReference::address_of_has_pending_message(isolate());
4843 __ li(at, Operand(has_pending_message));
4844 __ sd(a1, MemOperand(at));
4845
4846 __ pop(a1);
4847 ExternalReference pending_message_obj =
4848 ExternalReference::address_of_pending_message_obj(isolate());
4849 __ li(at, Operand(pending_message_obj));
4850 __ sd(a1, MemOperand(at));
4851
4852 // Restore result register from stack.
4853 __ pop(a1);
4854
4855 // Uncook return address and return.
4856 __ pop(result_register());
4857
4858 __ SmiUntag(a1);
4859 __ Daddu(at, a1, Operand(masm_->CodeObject()));
4860 __ Jump(at);
4861 }
4862
4863
4864 #undef __
4865
4866 #define __ ACCESS_MASM(masm())
4867
Exit(int * stack_depth,int * context_length)4868 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
4869 int* stack_depth,
4870 int* context_length) {
4871 // The macros used here must preserve the result register.
4872
4873 // Because the handler block contains the context of the finally
4874 // code, we can restore it directly from there for the finally code
4875 // rather than iteratively unwinding contexts via their previous
4876 // links.
4877 __ Drop(*stack_depth); // Down to the handler block.
4878 if (*context_length > 0) {
4879 // Restore the context to its dedicated register and the stack.
4880 __ ld(cp, MemOperand(sp, StackHandlerConstants::kContextOffset));
4881 __ sd(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4882 }
4883 __ PopTryHandler();
4884 __ Call(finally_entry_);
4885
4886 *stack_depth = 0;
4887 *context_length = 0;
4888 return previous_;
4889 }
4890
4891
4892 #undef __
4893
4894
PatchAt(Code * unoptimized_code,Address pc,BackEdgeState target_state,Code * replacement_code)4895 void BackEdgeTable::PatchAt(Code* unoptimized_code,
4896 Address pc,
4897 BackEdgeState target_state,
4898 Code* replacement_code) {
4899 static const int kInstrSize = Assembler::kInstrSize;
4900 Address branch_address = pc - 8 * kInstrSize;
4901 CodePatcher patcher(branch_address, 1);
4902
4903 switch (target_state) {
4904 case INTERRUPT:
4905 // slt at, a3, zero_reg (in case of count based interrupts)
4906 // beq at, zero_reg, ok
4907 // lui t9, <interrupt stub address> upper
4908 // ori t9, <interrupt stub address> u-middle
4909 // dsll t9, t9, 16
4910 // ori t9, <interrupt stub address> lower
4911 // jalr t9
4912 // nop
4913 // ok-label ----- pc_after points here
4914 patcher.masm()->slt(at, a3, zero_reg);
4915 break;
4916 case ON_STACK_REPLACEMENT:
4917 case OSR_AFTER_STACK_CHECK:
4918 // addiu at, zero_reg, 1
4919 // beq at, zero_reg, ok ;; Not changed
4920 // lui t9, <on-stack replacement address> upper
4921 // ori t9, <on-stack replacement address> middle
4922 // dsll t9, t9, 16
4923 // ori t9, <on-stack replacement address> lower
4924 // jalr t9 ;; Not changed
4925 // nop ;; Not changed
4926 // ok-label ----- pc_after points here
4927 patcher.masm()->daddiu(at, zero_reg, 1);
4928 break;
4929 }
4930 Address pc_immediate_load_address = pc - 6 * kInstrSize;
4931 // Replace the stack check address in the load-immediate (6-instr sequence)
4932 // with the entry address of the replacement code.
4933 Assembler::set_target_address_at(pc_immediate_load_address,
4934 replacement_code->entry());
4935
4936 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
4937 unoptimized_code, pc_immediate_load_address, replacement_code);
4938 }
4939
4940
GetBackEdgeState(Isolate * isolate,Code * unoptimized_code,Address pc)4941 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
4942 Isolate* isolate,
4943 Code* unoptimized_code,
4944 Address pc) {
4945 static const int kInstrSize = Assembler::kInstrSize;
4946 Address branch_address = pc - 8 * kInstrSize;
4947 Address pc_immediate_load_address = pc - 6 * kInstrSize;
4948
4949 DCHECK(Assembler::IsBeq(Assembler::instr_at(pc - 7 * kInstrSize)));
4950 if (!Assembler::IsAddImmediate(Assembler::instr_at(branch_address))) {
4951 DCHECK(reinterpret_cast<uint64_t>(
4952 Assembler::target_address_at(pc_immediate_load_address)) ==
4953 reinterpret_cast<uint64_t>(
4954 isolate->builtins()->InterruptCheck()->entry()));
4955 return INTERRUPT;
4956 }
4957
4958 DCHECK(Assembler::IsAddImmediate(Assembler::instr_at(branch_address)));
4959
4960 if (reinterpret_cast<uint64_t>(
4961 Assembler::target_address_at(pc_immediate_load_address)) ==
4962 reinterpret_cast<uint64_t>(
4963 isolate->builtins()->OnStackReplacement()->entry())) {
4964 return ON_STACK_REPLACEMENT;
4965 }
4966
4967 DCHECK(reinterpret_cast<uint64_t>(
4968 Assembler::target_address_at(pc_immediate_load_address)) ==
4969 reinterpret_cast<uint64_t>(
4970 isolate->builtins()->OsrAfterStackCheck()->entry()));
4971 return OSR_AFTER_STACK_CHECK;
4972 }
4973
4974
4975 } } // namespace v8::internal
4976
4977 #endif // V8_TARGET_ARCH_MIPS64
4978