1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28 #include "v8.h"
29
30 #if defined(V8_TARGET_ARCH_ARM)
31
32 #include "code-stubs.h"
33 #include "codegen.h"
34 #include "compiler.h"
35 #include "debug.h"
36 #include "full-codegen.h"
37 #include "isolate-inl.h"
38 #include "parser.h"
39 #include "scopes.h"
40 #include "stub-cache.h"
41
42 #include "arm/code-stubs-arm.h"
43 #include "arm/macro-assembler-arm.h"
44
45 namespace v8 {
46 namespace internal {
47
48 #define __ ACCESS_MASM(masm_)
49
50
51 // A patch site is a location in the code which it is possible to patch. This
52 // class has a number of methods to emit the code which is patchable and the
53 // method EmitPatchInfo to record a marker back to the patchable code. This
54 // marker is a cmp rx, #yyy instruction, and x * 0x00000fff + yyy (raw 12 bit
55 // immediate value is used) is the delta from the pc to the first instruction of
56 // the patchable code.
57 class JumpPatchSite BASE_EMBEDDED {
58 public:
JumpPatchSite(MacroAssembler * masm)59 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
60 #ifdef DEBUG
61 info_emitted_ = false;
62 #endif
63 }
64
~JumpPatchSite()65 ~JumpPatchSite() {
66 ASSERT(patch_site_.is_bound() == info_emitted_);
67 }
68
69 // When initially emitting this ensure that a jump is always generated to skip
70 // the inlined smi code.
EmitJumpIfNotSmi(Register reg,Label * target)71 void EmitJumpIfNotSmi(Register reg, Label* target) {
72 ASSERT(!patch_site_.is_bound() && !info_emitted_);
73 Assembler::BlockConstPoolScope block_const_pool(masm_);
74 __ bind(&patch_site_);
75 __ cmp(reg, Operand(reg));
76 // Don't use b(al, ...) as that might emit the constant pool right after the
77 // branch. After patching when the branch is no longer unconditional
78 // execution can continue into the constant pool.
79 __ b(eq, target); // Always taken before patched.
80 }
81
82 // When initially emitting this ensure that a jump is never generated to skip
83 // the inlined smi code.
EmitJumpIfSmi(Register reg,Label * target)84 void EmitJumpIfSmi(Register reg, Label* target) {
85 ASSERT(!patch_site_.is_bound() && !info_emitted_);
86 Assembler::BlockConstPoolScope block_const_pool(masm_);
87 __ bind(&patch_site_);
88 __ cmp(reg, Operand(reg));
89 __ b(ne, target); // Never taken before patched.
90 }
91
EmitPatchInfo()92 void EmitPatchInfo() {
93 if (patch_site_.is_bound()) {
94 int delta_to_patch_site = masm_->InstructionsGeneratedSince(&patch_site_);
95 Register reg;
96 reg.set_code(delta_to_patch_site / kOff12Mask);
97 __ cmp_raw_immediate(reg, delta_to_patch_site % kOff12Mask);
98 #ifdef DEBUG
99 info_emitted_ = true;
100 #endif
101 } else {
102 __ nop(); // Signals no inlined code.
103 }
104 }
105
106 private:
107 MacroAssembler* masm_;
108 Label patch_site_;
109 #ifdef DEBUG
110 bool info_emitted_;
111 #endif
112 };
113
114
115 // TODO(jkummerow): Obsolete as soon as x64 is updated. Remove.
self_optimization_header_size()116 int FullCodeGenerator::self_optimization_header_size() {
117 UNREACHABLE();
118 return 24;
119 }
120
121
122 // Generate code for a JS function. On entry to the function the receiver
123 // and arguments have been pushed on the stack left to right. The actual
124 // argument count matches the formal parameter count expected by the
125 // function.
126 //
127 // The live registers are:
128 // o r1: the JS function object being called (i.e., ourselves)
129 // o cp: our context
130 // o fp: our caller's frame pointer
131 // o sp: stack pointer
132 // o lr: return address
133 //
134 // The function builds a JS frame. Please see JavaScriptFrameConstants in
135 // frames-arm.h for its layout.
Generate()136 void FullCodeGenerator::Generate() {
137 CompilationInfo* info = info_;
138 handler_table_ =
139 isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
140 profiling_counter_ = isolate()->factory()->NewJSGlobalPropertyCell(
141 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget)));
142 SetFunctionPosition(function());
143 Comment cmnt(masm_, "[ function compiled by full code generator");
144
145 #ifdef DEBUG
146 if (strlen(FLAG_stop_at) > 0 &&
147 info->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
148 __ stop("stop-at");
149 }
150 #endif
151
152 // Strict mode functions and builtins need to replace the receiver
153 // with undefined when called as functions (without an explicit
154 // receiver object). r5 is zero for method calls and non-zero for
155 // function calls.
156 if (!info->is_classic_mode() || info->is_native()) {
157 Label ok;
158 __ cmp(r5, Operand(0));
159 __ b(eq, &ok);
160 int receiver_offset = info->scope()->num_parameters() * kPointerSize;
161 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
162 __ str(r2, MemOperand(sp, receiver_offset));
163 __ bind(&ok);
164 }
165
166 // Open a frame scope to indicate that there is a frame on the stack. The
167 // MANUAL indicates that the scope shouldn't actually generate code to set up
168 // the frame (that is done below).
169 FrameScope frame_scope(masm_, StackFrame::MANUAL);
170
171 int locals_count = info->scope()->num_stack_slots();
172
173 __ Push(lr, fp, cp, r1);
174 if (locals_count > 0) {
175 // Load undefined value here, so the value is ready for the loop
176 // below.
177 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
178 }
179 // Adjust fp to point to caller's fp.
180 __ add(fp, sp, Operand(2 * kPointerSize));
181
182 { Comment cmnt(masm_, "[ Allocate locals");
183 for (int i = 0; i < locals_count; i++) {
184 __ push(ip);
185 }
186 }
187
188 bool function_in_register = true;
189
190 // Possibly allocate a local context.
191 int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
192 if (heap_slots > 0) {
193 Comment cmnt(masm_, "[ Allocate local context");
194 // Argument to NewContext is the function, which is in r1.
195 __ push(r1);
196 if (heap_slots <= FastNewContextStub::kMaximumSlots) {
197 FastNewContextStub stub(heap_slots);
198 __ CallStub(&stub);
199 } else {
200 __ CallRuntime(Runtime::kNewFunctionContext, 1);
201 }
202 function_in_register = false;
203 // Context is returned in both r0 and cp. It replaces the context
204 // passed to us. It's saved in the stack and kept live in cp.
205 __ str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
206 // Copy any necessary parameters into the context.
207 int num_parameters = info->scope()->num_parameters();
208 for (int i = 0; i < num_parameters; i++) {
209 Variable* var = scope()->parameter(i);
210 if (var->IsContextSlot()) {
211 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
212 (num_parameters - 1 - i) * kPointerSize;
213 // Load parameter from stack.
214 __ ldr(r0, MemOperand(fp, parameter_offset));
215 // Store it in the context.
216 MemOperand target = ContextOperand(cp, var->index());
217 __ str(r0, target);
218
219 // Update the write barrier.
220 __ RecordWriteContextSlot(
221 cp, target.offset(), r0, r3, kLRHasBeenSaved, kDontSaveFPRegs);
222 }
223 }
224 }
225
226 Variable* arguments = scope()->arguments();
227 if (arguments != NULL) {
228 // Function uses arguments object.
229 Comment cmnt(masm_, "[ Allocate arguments object");
230 if (!function_in_register) {
231 // Load this again, if it's used by the local context below.
232 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
233 } else {
234 __ mov(r3, r1);
235 }
236 // Receiver is just before the parameters on the caller's stack.
237 int num_parameters = info->scope()->num_parameters();
238 int offset = num_parameters * kPointerSize;
239 __ add(r2, fp,
240 Operand(StandardFrameConstants::kCallerSPOffset + offset));
241 __ mov(r1, Operand(Smi::FromInt(num_parameters)));
242 __ Push(r3, r2, r1);
243
244 // Arguments to ArgumentsAccessStub:
245 // function, receiver address, parameter count.
246 // The stub will rewrite receiever and parameter count if the previous
247 // stack frame was an arguments adapter frame.
248 ArgumentsAccessStub::Type type;
249 if (!is_classic_mode()) {
250 type = ArgumentsAccessStub::NEW_STRICT;
251 } else if (function()->has_duplicate_parameters()) {
252 type = ArgumentsAccessStub::NEW_NON_STRICT_SLOW;
253 } else {
254 type = ArgumentsAccessStub::NEW_NON_STRICT_FAST;
255 }
256 ArgumentsAccessStub stub(type);
257 __ CallStub(&stub);
258
259 SetVar(arguments, r0, r1, r2);
260 }
261
262 if (FLAG_trace) {
263 __ CallRuntime(Runtime::kTraceEnter, 0);
264 }
265
266 // Visit the declarations and body unless there is an illegal
267 // redeclaration.
268 if (scope()->HasIllegalRedeclaration()) {
269 Comment cmnt(masm_, "[ Declarations");
270 scope()->VisitIllegalRedeclaration(this);
271
272 } else {
273 PrepareForBailoutForId(AstNode::kFunctionEntryId, NO_REGISTERS);
274 { Comment cmnt(masm_, "[ Declarations");
275 // For named function expressions, declare the function name as a
276 // constant.
277 if (scope()->is_function_scope() && scope()->function() != NULL) {
278 VariableProxy* proxy = scope()->function();
279 ASSERT(proxy->var()->mode() == CONST ||
280 proxy->var()->mode() == CONST_HARMONY);
281 ASSERT(proxy->var()->location() != Variable::UNALLOCATED);
282 EmitDeclaration(proxy, proxy->var()->mode(), NULL);
283 }
284 VisitDeclarations(scope()->declarations());
285 }
286
287 { Comment cmnt(masm_, "[ Stack check");
288 PrepareForBailoutForId(AstNode::kDeclarationsId, NO_REGISTERS);
289 Label ok;
290 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
291 __ cmp(sp, Operand(ip));
292 __ b(hs, &ok);
293 StackCheckStub stub;
294 __ CallStub(&stub);
295 __ bind(&ok);
296 }
297
298 { Comment cmnt(masm_, "[ Body");
299 ASSERT(loop_depth() == 0);
300 VisitStatements(function()->body());
301 ASSERT(loop_depth() == 0);
302 }
303 }
304
305 // Always emit a 'return undefined' in case control fell off the end of
306 // the body.
307 { Comment cmnt(masm_, "[ return <undefined>;");
308 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
309 }
310 EmitReturnSequence();
311
312 // Force emit the constant pool, so it doesn't get emitted in the middle
313 // of the stack check table.
314 masm()->CheckConstPool(true, false);
315 }
316
317
ClearAccumulator()318 void FullCodeGenerator::ClearAccumulator() {
319 __ mov(r0, Operand(Smi::FromInt(0)));
320 }
321
322
EmitProfilingCounterDecrement(int delta)323 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
324 __ mov(r2, Operand(profiling_counter_));
325 __ ldr(r3, FieldMemOperand(r2, JSGlobalPropertyCell::kValueOffset));
326 __ sub(r3, r3, Operand(Smi::FromInt(delta)), SetCC);
327 __ str(r3, FieldMemOperand(r2, JSGlobalPropertyCell::kValueOffset));
328 }
329
330
EmitProfilingCounterReset()331 void FullCodeGenerator::EmitProfilingCounterReset() {
332 int reset_value = FLAG_interrupt_budget;
333 if (info_->ShouldSelfOptimize() && !FLAG_retry_self_opt) {
334 // Self-optimization is a one-off thing: if it fails, don't try again.
335 reset_value = Smi::kMaxValue;
336 }
337 if (isolate()->IsDebuggerActive()) {
338 // Detect debug break requests as soon as possible.
339 reset_value = 10;
340 }
341 __ mov(r2, Operand(profiling_counter_));
342 __ mov(r3, Operand(Smi::FromInt(reset_value)));
343 __ str(r3, FieldMemOperand(r2, JSGlobalPropertyCell::kValueOffset));
344 }
345
346
347 static const int kMaxBackEdgeWeight = 127;
348 static const int kBackEdgeDistanceDivisor = 142;
349
350
EmitStackCheck(IterationStatement * stmt,Label * back_edge_target)351 void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt,
352 Label* back_edge_target) {
353 Comment cmnt(masm_, "[ Stack check");
354 Label ok;
355
356 if (FLAG_count_based_interrupts) {
357 int weight = 1;
358 if (FLAG_weighted_back_edges) {
359 ASSERT(back_edge_target->is_bound());
360 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
361 weight = Min(kMaxBackEdgeWeight,
362 Max(1, distance / kBackEdgeDistanceDivisor));
363 }
364 EmitProfilingCounterDecrement(weight);
365 __ b(pl, &ok);
366 InterruptStub stub;
367 __ CallStub(&stub);
368 } else {
369 __ LoadRoot(ip, Heap::kStackLimitRootIndex);
370 __ cmp(sp, Operand(ip));
371 __ b(hs, &ok);
372 StackCheckStub stub;
373 __ CallStub(&stub);
374 }
375
376 // Record a mapping of this PC offset to the OSR id. This is used to find
377 // the AST id from the unoptimized code in order to use it as a key into
378 // the deoptimization input data found in the optimized code.
379 RecordStackCheck(stmt->OsrEntryId());
380
381 if (FLAG_count_based_interrupts) {
382 EmitProfilingCounterReset();
383 }
384
385 __ bind(&ok);
386 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
387 // Record a mapping of the OSR id to this PC. This is used if the OSR
388 // entry becomes the target of a bailout. We don't expect it to be, but
389 // we want it to work if it is.
390 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
391 }
392
393
EmitReturnSequence()394 void FullCodeGenerator::EmitReturnSequence() {
395 Comment cmnt(masm_, "[ Return sequence");
396 if (return_label_.is_bound()) {
397 __ b(&return_label_);
398 } else {
399 __ bind(&return_label_);
400 if (FLAG_trace) {
401 // Push the return value on the stack as the parameter.
402 // Runtime::TraceExit returns its parameter in r0.
403 __ push(r0);
404 __ CallRuntime(Runtime::kTraceExit, 1);
405 }
406 if (FLAG_interrupt_at_exit || FLAG_self_optimization) {
407 // Pretend that the exit is a backwards jump to the entry.
408 int weight = 1;
409 if (info_->ShouldSelfOptimize()) {
410 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
411 } else if (FLAG_weighted_back_edges) {
412 int distance = masm_->pc_offset();
413 weight = Min(kMaxBackEdgeWeight,
414 Max(1, distance / kBackEdgeDistanceDivisor));
415 }
416 EmitProfilingCounterDecrement(weight);
417 Label ok;
418 __ b(pl, &ok);
419 __ push(r0);
420 if (info_->ShouldSelfOptimize() && FLAG_direct_self_opt) {
421 __ ldr(r2, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
422 __ push(r2);
423 __ CallRuntime(Runtime::kOptimizeFunctionOnNextCall, 1);
424 } else {
425 InterruptStub stub;
426 __ CallStub(&stub);
427 }
428 __ pop(r0);
429 EmitProfilingCounterReset();
430 __ bind(&ok);
431 }
432
433 #ifdef DEBUG
434 // Add a label for checking the size of the code used for returning.
435 Label check_exit_codesize;
436 masm_->bind(&check_exit_codesize);
437 #endif
438 // Make sure that the constant pool is not emitted inside of the return
439 // sequence.
440 { Assembler::BlockConstPoolScope block_const_pool(masm_);
441 // Here we use masm_-> instead of the __ macro to avoid the code coverage
442 // tool from instrumenting as we rely on the code size here.
443 int32_t sp_delta = (info_->scope()->num_parameters() + 1) * kPointerSize;
444 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
445 __ RecordJSReturn();
446 masm_->mov(sp, fp);
447 masm_->ldm(ia_w, sp, fp.bit() | lr.bit());
448 masm_->add(sp, sp, Operand(sp_delta));
449 masm_->Jump(lr);
450 }
451
452 #ifdef DEBUG
453 // Check that the size of the code used for returning is large enough
454 // for the debugger's requirements.
455 ASSERT(Assembler::kJSReturnSequenceInstructions <=
456 masm_->InstructionsGeneratedSince(&check_exit_codesize));
457 #endif
458 }
459 }
460
461
Plug(Variable * var) const462 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
463 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
464 }
465
466
Plug(Variable * var) const467 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
468 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
469 codegen()->GetVar(result_register(), var);
470 }
471
472
Plug(Variable * var) const473 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
474 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
475 codegen()->GetVar(result_register(), var);
476 __ push(result_register());
477 }
478
479
Plug(Variable * var) const480 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
481 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
482 // For simplicity we always test the accumulator register.
483 codegen()->GetVar(result_register(), var);
484 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
485 codegen()->DoTest(this);
486 }
487
488
Plug(Heap::RootListIndex index) const489 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
490 }
491
492
Plug(Heap::RootListIndex index) const493 void FullCodeGenerator::AccumulatorValueContext::Plug(
494 Heap::RootListIndex index) const {
495 __ LoadRoot(result_register(), index);
496 }
497
498
Plug(Heap::RootListIndex index) const499 void FullCodeGenerator::StackValueContext::Plug(
500 Heap::RootListIndex index) const {
501 __ LoadRoot(result_register(), index);
502 __ push(result_register());
503 }
504
505
Plug(Heap::RootListIndex index) const506 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
507 codegen()->PrepareForBailoutBeforeSplit(condition(),
508 true,
509 true_label_,
510 false_label_);
511 if (index == Heap::kUndefinedValueRootIndex ||
512 index == Heap::kNullValueRootIndex ||
513 index == Heap::kFalseValueRootIndex) {
514 if (false_label_ != fall_through_) __ b(false_label_);
515 } else if (index == Heap::kTrueValueRootIndex) {
516 if (true_label_ != fall_through_) __ b(true_label_);
517 } else {
518 __ LoadRoot(result_register(), index);
519 codegen()->DoTest(this);
520 }
521 }
522
523
Plug(Handle<Object> lit) const524 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
525 }
526
527
Plug(Handle<Object> lit) const528 void FullCodeGenerator::AccumulatorValueContext::Plug(
529 Handle<Object> lit) const {
530 __ mov(result_register(), Operand(lit));
531 }
532
533
Plug(Handle<Object> lit) const534 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
535 // Immediates cannot be pushed directly.
536 __ mov(result_register(), Operand(lit));
537 __ push(result_register());
538 }
539
540
Plug(Handle<Object> lit) const541 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
542 codegen()->PrepareForBailoutBeforeSplit(condition(),
543 true,
544 true_label_,
545 false_label_);
546 ASSERT(!lit->IsUndetectableObject()); // There are no undetectable literals.
547 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
548 if (false_label_ != fall_through_) __ b(false_label_);
549 } else if (lit->IsTrue() || lit->IsJSObject()) {
550 if (true_label_ != fall_through_) __ b(true_label_);
551 } else if (lit->IsString()) {
552 if (String::cast(*lit)->length() == 0) {
553 if (false_label_ != fall_through_) __ b(false_label_);
554 } else {
555 if (true_label_ != fall_through_) __ b(true_label_);
556 }
557 } else if (lit->IsSmi()) {
558 if (Smi::cast(*lit)->value() == 0) {
559 if (false_label_ != fall_through_) __ b(false_label_);
560 } else {
561 if (true_label_ != fall_through_) __ b(true_label_);
562 }
563 } else {
564 // For simplicity we always test the accumulator register.
565 __ mov(result_register(), Operand(lit));
566 codegen()->DoTest(this);
567 }
568 }
569
570
DropAndPlug(int count,Register reg) const571 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
572 Register reg) const {
573 ASSERT(count > 0);
574 __ Drop(count);
575 }
576
577
DropAndPlug(int count,Register reg) const578 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
579 int count,
580 Register reg) const {
581 ASSERT(count > 0);
582 __ Drop(count);
583 __ Move(result_register(), reg);
584 }
585
586
DropAndPlug(int count,Register reg) const587 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
588 Register reg) const {
589 ASSERT(count > 0);
590 if (count > 1) __ Drop(count - 1);
591 __ str(reg, MemOperand(sp, 0));
592 }
593
594
DropAndPlug(int count,Register reg) const595 void FullCodeGenerator::TestContext::DropAndPlug(int count,
596 Register reg) const {
597 ASSERT(count > 0);
598 // For simplicity we always test the accumulator register.
599 __ Drop(count);
600 __ Move(result_register(), reg);
601 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
602 codegen()->DoTest(this);
603 }
604
605
Plug(Label * materialize_true,Label * materialize_false) const606 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
607 Label* materialize_false) const {
608 ASSERT(materialize_true == materialize_false);
609 __ bind(materialize_true);
610 }
611
612
Plug(Label * materialize_true,Label * materialize_false) const613 void FullCodeGenerator::AccumulatorValueContext::Plug(
614 Label* materialize_true,
615 Label* materialize_false) const {
616 Label done;
617 __ bind(materialize_true);
618 __ LoadRoot(result_register(), Heap::kTrueValueRootIndex);
619 __ jmp(&done);
620 __ bind(materialize_false);
621 __ LoadRoot(result_register(), Heap::kFalseValueRootIndex);
622 __ bind(&done);
623 }
624
625
Plug(Label * materialize_true,Label * materialize_false) const626 void FullCodeGenerator::StackValueContext::Plug(
627 Label* materialize_true,
628 Label* materialize_false) const {
629 Label done;
630 __ bind(materialize_true);
631 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
632 __ push(ip);
633 __ jmp(&done);
634 __ bind(materialize_false);
635 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
636 __ push(ip);
637 __ bind(&done);
638 }
639
640
Plug(Label * materialize_true,Label * materialize_false) const641 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
642 Label* materialize_false) const {
643 ASSERT(materialize_true == true_label_);
644 ASSERT(materialize_false == false_label_);
645 }
646
647
Plug(bool flag) const648 void FullCodeGenerator::EffectContext::Plug(bool flag) const {
649 }
650
651
Plug(bool flag) const652 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
653 Heap::RootListIndex value_root_index =
654 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
655 __ LoadRoot(result_register(), value_root_index);
656 }
657
658
Plug(bool flag) const659 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
660 Heap::RootListIndex value_root_index =
661 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
662 __ LoadRoot(ip, value_root_index);
663 __ push(ip);
664 }
665
666
Plug(bool flag) const667 void FullCodeGenerator::TestContext::Plug(bool flag) const {
668 codegen()->PrepareForBailoutBeforeSplit(condition(),
669 true,
670 true_label_,
671 false_label_);
672 if (flag) {
673 if (true_label_ != fall_through_) __ b(true_label_);
674 } else {
675 if (false_label_ != fall_through_) __ b(false_label_);
676 }
677 }
678
679
DoTest(Expression * condition,Label * if_true,Label * if_false,Label * fall_through)680 void FullCodeGenerator::DoTest(Expression* condition,
681 Label* if_true,
682 Label* if_false,
683 Label* fall_through) {
684 if (CpuFeatures::IsSupported(VFP3)) {
685 ToBooleanStub stub(result_register());
686 __ CallStub(&stub);
687 __ tst(result_register(), result_register());
688 } else {
689 // Call the runtime to find the boolean value of the source and then
690 // translate it into control flow to the pair of labels.
691 __ push(result_register());
692 __ CallRuntime(Runtime::kToBool, 1);
693 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
694 __ cmp(r0, ip);
695 }
696 Split(ne, if_true, if_false, fall_through);
697 }
698
699
Split(Condition cond,Label * if_true,Label * if_false,Label * fall_through)700 void FullCodeGenerator::Split(Condition cond,
701 Label* if_true,
702 Label* if_false,
703 Label* fall_through) {
704 if (if_false == fall_through) {
705 __ b(cond, if_true);
706 } else if (if_true == fall_through) {
707 __ b(NegateCondition(cond), if_false);
708 } else {
709 __ b(cond, if_true);
710 __ b(if_false);
711 }
712 }
713
714
StackOperand(Variable * var)715 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
716 ASSERT(var->IsStackAllocated());
717 // Offset is negative because higher indexes are at lower addresses.
718 int offset = -var->index() * kPointerSize;
719 // Adjust by a (parameter or local) base offset.
720 if (var->IsParameter()) {
721 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
722 } else {
723 offset += JavaScriptFrameConstants::kLocal0Offset;
724 }
725 return MemOperand(fp, offset);
726 }
727
728
VarOperand(Variable * var,Register scratch)729 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
730 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
731 if (var->IsContextSlot()) {
732 int context_chain_length = scope()->ContextChainLength(var->scope());
733 __ LoadContext(scratch, context_chain_length);
734 return ContextOperand(scratch, var->index());
735 } else {
736 return StackOperand(var);
737 }
738 }
739
740
GetVar(Register dest,Variable * var)741 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
742 // Use destination as scratch.
743 MemOperand location = VarOperand(var, dest);
744 __ ldr(dest, location);
745 }
746
747
SetVar(Variable * var,Register src,Register scratch0,Register scratch1)748 void FullCodeGenerator::SetVar(Variable* var,
749 Register src,
750 Register scratch0,
751 Register scratch1) {
752 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
753 ASSERT(!scratch0.is(src));
754 ASSERT(!scratch0.is(scratch1));
755 ASSERT(!scratch1.is(src));
756 MemOperand location = VarOperand(var, scratch0);
757 __ str(src, location);
758
759 // Emit the write barrier code if the location is in the heap.
760 if (var->IsContextSlot()) {
761 __ RecordWriteContextSlot(scratch0,
762 location.offset(),
763 src,
764 scratch1,
765 kLRHasBeenSaved,
766 kDontSaveFPRegs);
767 }
768 }
769
770
PrepareForBailoutBeforeSplit(Expression * expr,bool should_normalize,Label * if_true,Label * if_false)771 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
772 bool should_normalize,
773 Label* if_true,
774 Label* if_false) {
775 // Only prepare for bailouts before splits if we're in a test
776 // context. Otherwise, we let the Visit function deal with the
777 // preparation to avoid preparing with the same AST id twice.
778 if (!context()->IsTest() || !info_->IsOptimizable()) return;
779
780 Label skip;
781 if (should_normalize) __ b(&skip);
782 PrepareForBailout(expr, TOS_REG);
783 if (should_normalize) {
784 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
785 __ cmp(r0, ip);
786 Split(eq, if_true, if_false, NULL);
787 __ bind(&skip);
788 }
789 }
790
791
EmitDeclaration(VariableProxy * proxy,VariableMode mode,FunctionLiteral * function)792 void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy,
793 VariableMode mode,
794 FunctionLiteral* function) {
795 // If it was not possible to allocate the variable at compile time, we
796 // need to "declare" it at runtime to make sure it actually exists in the
797 // local context.
798 Variable* variable = proxy->var();
799 bool binding_needs_init = (function == NULL) &&
800 (mode == CONST || mode == CONST_HARMONY || mode == LET);
801 switch (variable->location()) {
802 case Variable::UNALLOCATED:
803 ++global_count_;
804 break;
805
806 case Variable::PARAMETER:
807 case Variable::LOCAL:
808 if (function != NULL) {
809 Comment cmnt(masm_, "[ Declaration");
810 VisitForAccumulatorValue(function);
811 __ str(result_register(), StackOperand(variable));
812 } else if (binding_needs_init) {
813 Comment cmnt(masm_, "[ Declaration");
814 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
815 __ str(ip, StackOperand(variable));
816 }
817 break;
818
819 case Variable::CONTEXT:
820 // The variable in the decl always resides in the current function
821 // context.
822 ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
823 if (FLAG_debug_code) {
824 // Check that we're not inside a with or catch context.
825 __ ldr(r1, FieldMemOperand(cp, HeapObject::kMapOffset));
826 __ CompareRoot(r1, Heap::kWithContextMapRootIndex);
827 __ Check(ne, "Declaration in with context.");
828 __ CompareRoot(r1, Heap::kCatchContextMapRootIndex);
829 __ Check(ne, "Declaration in catch context.");
830 }
831 if (function != NULL) {
832 Comment cmnt(masm_, "[ Declaration");
833 VisitForAccumulatorValue(function);
834 __ str(result_register(), ContextOperand(cp, variable->index()));
835 int offset = Context::SlotOffset(variable->index());
836 // We know that we have written a function, which is not a smi.
837 __ RecordWriteContextSlot(cp,
838 offset,
839 result_register(),
840 r2,
841 kLRHasBeenSaved,
842 kDontSaveFPRegs,
843 EMIT_REMEMBERED_SET,
844 OMIT_SMI_CHECK);
845 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
846 } else if (binding_needs_init) {
847 Comment cmnt(masm_, "[ Declaration");
848 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
849 __ str(ip, ContextOperand(cp, variable->index()));
850 // No write barrier since the_hole_value is in old space.
851 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
852 }
853 break;
854
855 case Variable::LOOKUP: {
856 Comment cmnt(masm_, "[ Declaration");
857 __ mov(r2, Operand(variable->name()));
858 // Declaration nodes are always introduced in one of four modes.
859 ASSERT(mode == VAR ||
860 mode == CONST ||
861 mode == CONST_HARMONY ||
862 mode == LET);
863 PropertyAttributes attr = (mode == CONST || mode == CONST_HARMONY)
864 ? READ_ONLY : NONE;
865 __ mov(r1, Operand(Smi::FromInt(attr)));
866 // Push initial value, if any.
867 // Note: For variables we must not push an initial value (such as
868 // 'undefined') because we may have a (legal) redeclaration and we
869 // must not destroy the current value.
870 if (function != NULL) {
871 __ Push(cp, r2, r1);
872 // Push initial value for function declaration.
873 VisitForStackValue(function);
874 } else if (binding_needs_init) {
875 __ LoadRoot(r0, Heap::kTheHoleValueRootIndex);
876 __ Push(cp, r2, r1, r0);
877 } else {
878 __ mov(r0, Operand(Smi::FromInt(0))); // Indicates no initial value.
879 __ Push(cp, r2, r1, r0);
880 }
881 __ CallRuntime(Runtime::kDeclareContextSlot, 4);
882 break;
883 }
884 }
885 }
886
887
DeclareGlobals(Handle<FixedArray> pairs)888 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
889 // Call the runtime to declare the globals.
890 // The context is the first argument.
891 __ mov(r1, Operand(pairs));
892 __ mov(r0, Operand(Smi::FromInt(DeclareGlobalsFlags())));
893 __ Push(cp, r1, r0);
894 __ CallRuntime(Runtime::kDeclareGlobals, 3);
895 // Return value is ignored.
896 }
897
898
VisitSwitchStatement(SwitchStatement * stmt)899 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
900 Comment cmnt(masm_, "[ SwitchStatement");
901 Breakable nested_statement(this, stmt);
902 SetStatementPosition(stmt);
903
904 // Keep the switch value on the stack until a case matches.
905 VisitForStackValue(stmt->tag());
906 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
907
908 ZoneList<CaseClause*>* clauses = stmt->cases();
909 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
910
911 Label next_test; // Recycled for each test.
912 // Compile all the tests with branches to their bodies.
913 for (int i = 0; i < clauses->length(); i++) {
914 CaseClause* clause = clauses->at(i);
915 clause->body_target()->Unuse();
916
917 // The default is not a test, but remember it as final fall through.
918 if (clause->is_default()) {
919 default_clause = clause;
920 continue;
921 }
922
923 Comment cmnt(masm_, "[ Case comparison");
924 __ bind(&next_test);
925 next_test.Unuse();
926
927 // Compile the label expression.
928 VisitForAccumulatorValue(clause->label());
929
930 // Perform the comparison as if via '==='.
931 __ ldr(r1, MemOperand(sp, 0)); // Switch value.
932 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
933 JumpPatchSite patch_site(masm_);
934 if (inline_smi_code) {
935 Label slow_case;
936 __ orr(r2, r1, r0);
937 patch_site.EmitJumpIfNotSmi(r2, &slow_case);
938
939 __ cmp(r1, r0);
940 __ b(ne, &next_test);
941 __ Drop(1); // Switch value is no longer needed.
942 __ b(clause->body_target());
943 __ bind(&slow_case);
944 }
945
946 // Record position before stub call for type feedback.
947 SetSourcePosition(clause->position());
948 Handle<Code> ic = CompareIC::GetUninitialized(Token::EQ_STRICT);
949 CallIC(ic, RelocInfo::CODE_TARGET, clause->CompareId());
950 patch_site.EmitPatchInfo();
951
952 __ cmp(r0, Operand(0));
953 __ b(ne, &next_test);
954 __ Drop(1); // Switch value is no longer needed.
955 __ b(clause->body_target());
956 }
957
958 // Discard the test value and jump to the default if present, otherwise to
959 // the end of the statement.
960 __ bind(&next_test);
961 __ Drop(1); // Switch value is no longer needed.
962 if (default_clause == NULL) {
963 __ b(nested_statement.break_label());
964 } else {
965 __ b(default_clause->body_target());
966 }
967
968 // Compile all the case bodies.
969 for (int i = 0; i < clauses->length(); i++) {
970 Comment cmnt(masm_, "[ Case body");
971 CaseClause* clause = clauses->at(i);
972 __ bind(clause->body_target());
973 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
974 VisitStatements(clause->statements());
975 }
976
977 __ bind(nested_statement.break_label());
978 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
979 }
980
981
VisitForInStatement(ForInStatement * stmt)982 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
983 Comment cmnt(masm_, "[ ForInStatement");
984 SetStatementPosition(stmt);
985
986 Label loop, exit;
987 ForIn loop_statement(this, stmt);
988 increment_loop_depth();
989
990 // Get the object to enumerate over. Both SpiderMonkey and JSC
991 // ignore null and undefined in contrast to the specification; see
992 // ECMA-262 section 12.6.4.
993 VisitForAccumulatorValue(stmt->enumerable());
994 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
995 __ cmp(r0, ip);
996 __ b(eq, &exit);
997 Register null_value = r5;
998 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
999 __ cmp(r0, null_value);
1000 __ b(eq, &exit);
1001
1002 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1003
1004 // Convert the object to a JS object.
1005 Label convert, done_convert;
1006 __ JumpIfSmi(r0, &convert);
1007 __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE);
1008 __ b(ge, &done_convert);
1009 __ bind(&convert);
1010 __ push(r0);
1011 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1012 __ bind(&done_convert);
1013 __ push(r0);
1014
1015 // Check for proxies.
1016 Label call_runtime;
1017 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1018 __ CompareObjectType(r0, r1, r1, LAST_JS_PROXY_TYPE);
1019 __ b(le, &call_runtime);
1020
1021 // Check cache validity in generated code. This is a fast case for
1022 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1023 // guarantee cache validity, call the runtime system to check cache
1024 // validity or get the property names in a fixed array.
1025 __ CheckEnumCache(null_value, &call_runtime);
1026
1027 // The enum cache is valid. Load the map of the object being
1028 // iterated over and use the cache for the iteration.
1029 Label use_cache;
1030 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
1031 __ b(&use_cache);
1032
1033 // Get the set of properties to enumerate.
1034 __ bind(&call_runtime);
1035 __ push(r0); // Duplicate the enumerable object on the stack.
1036 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1037
1038 // If we got a map from the runtime call, we can do a fast
1039 // modification check. Otherwise, we got a fixed array, and we have
1040 // to do a slow check.
1041 Label fixed_array;
1042 __ mov(r2, r0);
1043 __ ldr(r1, FieldMemOperand(r2, HeapObject::kMapOffset));
1044 __ LoadRoot(ip, Heap::kMetaMapRootIndex);
1045 __ cmp(r1, ip);
1046 __ b(ne, &fixed_array);
1047
1048 // We got a map in register r0. Get the enumeration cache from it.
1049 __ bind(&use_cache);
1050 __ LoadInstanceDescriptors(r0, r1);
1051 __ ldr(r1, FieldMemOperand(r1, DescriptorArray::kEnumerationIndexOffset));
1052 __ ldr(r2, FieldMemOperand(r1, DescriptorArray::kEnumCacheBridgeCacheOffset));
1053
1054 // Set up the four remaining stack slots.
1055 __ push(r0); // Map.
1056 __ ldr(r1, FieldMemOperand(r2, FixedArray::kLengthOffset));
1057 __ mov(r0, Operand(Smi::FromInt(0)));
1058 // Push enumeration cache, enumeration cache length (as smi) and zero.
1059 __ Push(r2, r1, r0);
1060 __ jmp(&loop);
1061
1062 // We got a fixed array in register r0. Iterate through that.
1063 Label non_proxy;
1064 __ bind(&fixed_array);
1065
1066 Handle<JSGlobalPropertyCell> cell =
1067 isolate()->factory()->NewJSGlobalPropertyCell(
1068 Handle<Object>(
1069 Smi::FromInt(TypeFeedbackCells::kForInFastCaseMarker)));
1070 RecordTypeFeedbackCell(stmt->PrepareId(), cell);
1071 __ LoadHeapObject(r1, cell);
1072 __ mov(r2, Operand(Smi::FromInt(TypeFeedbackCells::kForInSlowCaseMarker)));
1073 __ str(r2, FieldMemOperand(r1, JSGlobalPropertyCell::kValueOffset));
1074
1075 __ mov(r1, Operand(Smi::FromInt(1))); // Smi indicates slow check
1076 __ ldr(r2, MemOperand(sp, 0 * kPointerSize)); // Get enumerated object
1077 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1078 __ CompareObjectType(r2, r3, r3, LAST_JS_PROXY_TYPE);
1079 __ b(gt, &non_proxy);
1080 __ mov(r1, Operand(Smi::FromInt(0))); // Zero indicates proxy
1081 __ bind(&non_proxy);
1082 __ Push(r1, r0); // Smi and array
1083 __ ldr(r1, FieldMemOperand(r0, FixedArray::kLengthOffset));
1084 __ mov(r0, Operand(Smi::FromInt(0)));
1085 __ Push(r1, r0); // Fixed array length (as smi) and initial index.
1086
1087 // Generate code for doing the condition check.
1088 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1089 __ bind(&loop);
1090 // Load the current count to r0, load the length to r1.
1091 __ Ldrd(r0, r1, MemOperand(sp, 0 * kPointerSize));
1092 __ cmp(r0, r1); // Compare to the array length.
1093 __ b(hs, loop_statement.break_label());
1094
1095 // Get the current entry of the array into register r3.
1096 __ ldr(r2, MemOperand(sp, 2 * kPointerSize));
1097 __ add(r2, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1098 __ ldr(r3, MemOperand(r2, r0, LSL, kPointerSizeLog2 - kSmiTagSize));
1099
1100 // Get the expected map from the stack or a smi in the
1101 // permanent slow case into register r2.
1102 __ ldr(r2, MemOperand(sp, 3 * kPointerSize));
1103
1104 // Check if the expected map still matches that of the enumerable.
1105 // If not, we may have to filter the key.
1106 Label update_each;
1107 __ ldr(r1, MemOperand(sp, 4 * kPointerSize));
1108 __ ldr(r4, FieldMemOperand(r1, HeapObject::kMapOffset));
1109 __ cmp(r4, Operand(r2));
1110 __ b(eq, &update_each);
1111
1112 // For proxies, no filtering is done.
1113 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1114 __ cmp(r2, Operand(Smi::FromInt(0)));
1115 __ b(eq, &update_each);
1116
1117 // Convert the entry to a string or (smi) 0 if it isn't a property
1118 // any more. If the property has been removed while iterating, we
1119 // just skip it.
1120 __ push(r1); // Enumerable.
1121 __ push(r3); // Current entry.
1122 __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1123 __ mov(r3, Operand(r0), SetCC);
1124 __ b(eq, loop_statement.continue_label());
1125
1126 // Update the 'each' property or variable from the possibly filtered
1127 // entry in register r3.
1128 __ bind(&update_each);
1129 __ mov(result_register(), r3);
1130 // Perform the assignment as if via '='.
1131 { EffectContext context(this);
1132 EmitAssignment(stmt->each());
1133 }
1134
1135 // Generate code for the body of the loop.
1136 Visit(stmt->body());
1137
1138 // Generate code for the going to the next element by incrementing
1139 // the index (smi) stored on top of the stack.
1140 __ bind(loop_statement.continue_label());
1141 __ pop(r0);
1142 __ add(r0, r0, Operand(Smi::FromInt(1)));
1143 __ push(r0);
1144
1145 EmitStackCheck(stmt, &loop);
1146 __ b(&loop);
1147
1148 // Remove the pointers stored on the stack.
1149 __ bind(loop_statement.break_label());
1150 __ Drop(5);
1151
1152 // Exit and decrement the loop depth.
1153 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1154 __ bind(&exit);
1155 decrement_loop_depth();
1156 }
1157
1158
EmitNewClosure(Handle<SharedFunctionInfo> info,bool pretenure)1159 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1160 bool pretenure) {
1161 // Use the fast case closure allocation code that allocates in new
1162 // space for nested functions that don't need literals cloning. If
1163 // we're running with the --always-opt or the --prepare-always-opt
1164 // flag, we need to use the runtime function so that the new function
1165 // we are creating here gets a chance to have its code optimized and
1166 // doesn't just get a copy of the existing unoptimized code.
1167 if (!FLAG_always_opt &&
1168 !FLAG_prepare_always_opt &&
1169 !pretenure &&
1170 scope()->is_function_scope() &&
1171 info->num_literals() == 0) {
1172 FastNewClosureStub stub(info->language_mode());
1173 __ mov(r0, Operand(info));
1174 __ push(r0);
1175 __ CallStub(&stub);
1176 } else {
1177 __ mov(r0, Operand(info));
1178 __ LoadRoot(r1, pretenure ? Heap::kTrueValueRootIndex
1179 : Heap::kFalseValueRootIndex);
1180 __ Push(cp, r0, r1);
1181 __ CallRuntime(Runtime::kNewClosure, 3);
1182 }
1183 context()->Plug(r0);
1184 }
1185
1186
VisitVariableProxy(VariableProxy * expr)1187 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1188 Comment cmnt(masm_, "[ VariableProxy");
1189 EmitVariableLoad(expr);
1190 }
1191
1192
EmitLoadGlobalCheckExtensions(Variable * var,TypeofState typeof_state,Label * slow)1193 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
1194 TypeofState typeof_state,
1195 Label* slow) {
1196 Register current = cp;
1197 Register next = r1;
1198 Register temp = r2;
1199
1200 Scope* s = scope();
1201 while (s != NULL) {
1202 if (s->num_heap_slots() > 0) {
1203 if (s->calls_non_strict_eval()) {
1204 // Check that extension is NULL.
1205 __ ldr(temp, ContextOperand(current, Context::EXTENSION_INDEX));
1206 __ tst(temp, temp);
1207 __ b(ne, slow);
1208 }
1209 // Load next context in chain.
1210 __ ldr(next, ContextOperand(current, Context::PREVIOUS_INDEX));
1211 // Walk the rest of the chain without clobbering cp.
1212 current = next;
1213 }
1214 // If no outer scope calls eval, we do not need to check more
1215 // context extensions.
1216 if (!s->outer_scope_calls_non_strict_eval() || s->is_eval_scope()) break;
1217 s = s->outer_scope();
1218 }
1219
1220 if (s->is_eval_scope()) {
1221 Label loop, fast;
1222 if (!current.is(next)) {
1223 __ Move(next, current);
1224 }
1225 __ bind(&loop);
1226 // Terminate at global context.
1227 __ ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset));
1228 __ LoadRoot(ip, Heap::kGlobalContextMapRootIndex);
1229 __ cmp(temp, ip);
1230 __ b(eq, &fast);
1231 // Check that extension is NULL.
1232 __ ldr(temp, ContextOperand(next, Context::EXTENSION_INDEX));
1233 __ tst(temp, temp);
1234 __ b(ne, slow);
1235 // Load next context in chain.
1236 __ ldr(next, ContextOperand(next, Context::PREVIOUS_INDEX));
1237 __ b(&loop);
1238 __ bind(&fast);
1239 }
1240
1241 __ ldr(r0, GlobalObjectOperand());
1242 __ mov(r2, Operand(var->name()));
1243 RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
1244 ? RelocInfo::CODE_TARGET
1245 : RelocInfo::CODE_TARGET_CONTEXT;
1246 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1247 CallIC(ic, mode);
1248 }
1249
1250
ContextSlotOperandCheckExtensions(Variable * var,Label * slow)1251 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1252 Label* slow) {
1253 ASSERT(var->IsContextSlot());
1254 Register context = cp;
1255 Register next = r3;
1256 Register temp = r4;
1257
1258 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1259 if (s->num_heap_slots() > 0) {
1260 if (s->calls_non_strict_eval()) {
1261 // Check that extension is NULL.
1262 __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1263 __ tst(temp, temp);
1264 __ b(ne, slow);
1265 }
1266 __ ldr(next, ContextOperand(context, Context::PREVIOUS_INDEX));
1267 // Walk the rest of the chain without clobbering cp.
1268 context = next;
1269 }
1270 }
1271 // Check that last extension is NULL.
1272 __ ldr(temp, ContextOperand(context, Context::EXTENSION_INDEX));
1273 __ tst(temp, temp);
1274 __ b(ne, slow);
1275
1276 // This function is used only for loads, not stores, so it's safe to
1277 // return an cp-based operand (the write barrier cannot be allowed to
1278 // destroy the cp register).
1279 return ContextOperand(context, var->index());
1280 }
1281
1282
EmitDynamicLookupFastCase(Variable * var,TypeofState typeof_state,Label * slow,Label * done)1283 void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var,
1284 TypeofState typeof_state,
1285 Label* slow,
1286 Label* done) {
1287 // Generate fast-case code for variables that might be shadowed by
1288 // eval-introduced variables. Eval is used a lot without
1289 // introducing variables. In those cases, we do not want to
1290 // perform a runtime call for all variables in the scope
1291 // containing the eval.
1292 if (var->mode() == DYNAMIC_GLOBAL) {
1293 EmitLoadGlobalCheckExtensions(var, typeof_state, slow);
1294 __ jmp(done);
1295 } else if (var->mode() == DYNAMIC_LOCAL) {
1296 Variable* local = var->local_if_not_shadowed();
1297 __ ldr(r0, ContextSlotOperandCheckExtensions(local, slow));
1298 if (local->mode() == CONST ||
1299 local->mode() == CONST_HARMONY ||
1300 local->mode() == LET) {
1301 __ CompareRoot(r0, Heap::kTheHoleValueRootIndex);
1302 if (local->mode() == CONST) {
1303 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
1304 } else { // LET || CONST_HARMONY
1305 __ b(ne, done);
1306 __ mov(r0, Operand(var->name()));
1307 __ push(r0);
1308 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1309 }
1310 }
1311 __ jmp(done);
1312 }
1313 }
1314
1315
EmitVariableLoad(VariableProxy * proxy)1316 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1317 // Record position before possible IC call.
1318 SetSourcePosition(proxy->position());
1319 Variable* var = proxy->var();
1320
1321 // Three cases: global variables, lookup variables, and all other types of
1322 // variables.
1323 switch (var->location()) {
1324 case Variable::UNALLOCATED: {
1325 Comment cmnt(masm_, "Global variable");
1326 // Use inline caching. Variable name is passed in r2 and the global
1327 // object (receiver) in r0.
1328 __ ldr(r0, GlobalObjectOperand());
1329 __ mov(r2, Operand(var->name()));
1330 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1331 CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
1332 context()->Plug(r0);
1333 break;
1334 }
1335
1336 case Variable::PARAMETER:
1337 case Variable::LOCAL:
1338 case Variable::CONTEXT: {
1339 Comment cmnt(masm_, var->IsContextSlot()
1340 ? "Context variable"
1341 : "Stack variable");
1342 if (var->binding_needs_init()) {
1343 // var->scope() may be NULL when the proxy is located in eval code and
1344 // refers to a potential outside binding. Currently those bindings are
1345 // always looked up dynamically, i.e. in that case
1346 // var->location() == LOOKUP.
1347 // always holds.
1348 ASSERT(var->scope() != NULL);
1349
1350 // Check if the binding really needs an initialization check. The check
1351 // can be skipped in the following situation: we have a LET or CONST
1352 // binding in harmony mode, both the Variable and the VariableProxy have
1353 // the same declaration scope (i.e. they are both in global code, in the
1354 // same function or in the same eval code) and the VariableProxy is in
1355 // the source physically located after the initializer of the variable.
1356 //
1357 // We cannot skip any initialization checks for CONST in non-harmony
1358 // mode because const variables may be declared but never initialized:
1359 // if (false) { const x; }; var y = x;
1360 //
1361 // The condition on the declaration scopes is a conservative check for
1362 // nested functions that access a binding and are called before the
1363 // binding is initialized:
1364 // function() { f(); let x = 1; function f() { x = 2; } }
1365 //
1366 bool skip_init_check;
1367 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1368 skip_init_check = false;
1369 } else {
1370 // Check that we always have valid source position.
1371 ASSERT(var->initializer_position() != RelocInfo::kNoPosition);
1372 ASSERT(proxy->position() != RelocInfo::kNoPosition);
1373 skip_init_check = var->mode() != CONST &&
1374 var->initializer_position() < proxy->position();
1375 }
1376
1377 if (!skip_init_check) {
1378 // Let and const need a read barrier.
1379 GetVar(r0, var);
1380 __ CompareRoot(r0, Heap::kTheHoleValueRootIndex);
1381 if (var->mode() == LET || var->mode() == CONST_HARMONY) {
1382 // Throw a reference error when using an uninitialized let/const
1383 // binding in harmony mode.
1384 Label done;
1385 __ b(ne, &done);
1386 __ mov(r0, Operand(var->name()));
1387 __ push(r0);
1388 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1389 __ bind(&done);
1390 } else {
1391 // Uninitalized const bindings outside of harmony mode are unholed.
1392 ASSERT(var->mode() == CONST);
1393 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex, eq);
1394 }
1395 context()->Plug(r0);
1396 break;
1397 }
1398 }
1399 context()->Plug(var);
1400 break;
1401 }
1402
1403 case Variable::LOOKUP: {
1404 Label done, slow;
1405 // Generate code for loading from variables potentially shadowed
1406 // by eval-introduced variables.
1407 EmitDynamicLookupFastCase(var, NOT_INSIDE_TYPEOF, &slow, &done);
1408 __ bind(&slow);
1409 Comment cmnt(masm_, "Lookup variable");
1410 __ mov(r1, Operand(var->name()));
1411 __ Push(cp, r1); // Context and name.
1412 __ CallRuntime(Runtime::kLoadContextSlot, 2);
1413 __ bind(&done);
1414 context()->Plug(r0);
1415 }
1416 }
1417 }
1418
1419
VisitRegExpLiteral(RegExpLiteral * expr)1420 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1421 Comment cmnt(masm_, "[ RegExpLiteral");
1422 Label materialized;
1423 // Registers will be used as follows:
1424 // r5 = materialized value (RegExp literal)
1425 // r4 = JS function, literals array
1426 // r3 = literal index
1427 // r2 = RegExp pattern
1428 // r1 = RegExp flags
1429 // r0 = RegExp literal clone
1430 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1431 __ ldr(r4, FieldMemOperand(r0, JSFunction::kLiteralsOffset));
1432 int literal_offset =
1433 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1434 __ ldr(r5, FieldMemOperand(r4, literal_offset));
1435 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1436 __ cmp(r5, ip);
1437 __ b(ne, &materialized);
1438
1439 // Create regexp literal using runtime function.
1440 // Result will be in r0.
1441 __ mov(r3, Operand(Smi::FromInt(expr->literal_index())));
1442 __ mov(r2, Operand(expr->pattern()));
1443 __ mov(r1, Operand(expr->flags()));
1444 __ Push(r4, r3, r2, r1);
1445 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1446 __ mov(r5, r0);
1447
1448 __ bind(&materialized);
1449 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1450 Label allocated, runtime_allocate;
1451 __ AllocateInNewSpace(size, r0, r2, r3, &runtime_allocate, TAG_OBJECT);
1452 __ jmp(&allocated);
1453
1454 __ bind(&runtime_allocate);
1455 __ push(r5);
1456 __ mov(r0, Operand(Smi::FromInt(size)));
1457 __ push(r0);
1458 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1459 __ pop(r5);
1460
1461 __ bind(&allocated);
1462 // After this, registers are used as follows:
1463 // r0: Newly allocated regexp.
1464 // r5: Materialized regexp.
1465 // r2: temp.
1466 __ CopyFields(r0, r5, r2.bit(), size / kPointerSize);
1467 context()->Plug(r0);
1468 }
1469
1470
EmitAccessor(Expression * expression)1471 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1472 if (expression == NULL) {
1473 __ LoadRoot(r1, Heap::kNullValueRootIndex);
1474 __ push(r1);
1475 } else {
1476 VisitForStackValue(expression);
1477 }
1478 }
1479
1480
VisitObjectLiteral(ObjectLiteral * expr)1481 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1482 Comment cmnt(masm_, "[ ObjectLiteral");
1483 Handle<FixedArray> constant_properties = expr->constant_properties();
1484 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1485 __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
1486 __ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
1487 __ mov(r1, Operand(constant_properties));
1488 int flags = expr->fast_elements()
1489 ? ObjectLiteral::kFastElements
1490 : ObjectLiteral::kNoFlags;
1491 flags |= expr->has_function()
1492 ? ObjectLiteral::kHasFunction
1493 : ObjectLiteral::kNoFlags;
1494 __ mov(r0, Operand(Smi::FromInt(flags)));
1495 __ Push(r3, r2, r1, r0);
1496 int properties_count = constant_properties->length() / 2;
1497 if (expr->depth() > 1) {
1498 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1499 } else if (flags != ObjectLiteral::kFastElements ||
1500 properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) {
1501 __ CallRuntime(Runtime::kCreateObjectLiteralShallow, 4);
1502 } else {
1503 FastCloneShallowObjectStub stub(properties_count);
1504 __ CallStub(&stub);
1505 }
1506
1507 // If result_saved is true the result is on top of the stack. If
1508 // result_saved is false the result is in r0.
1509 bool result_saved = false;
1510
1511 // Mark all computed expressions that are bound to a key that
1512 // is shadowed by a later occurrence of the same key. For the
1513 // marked expressions, no store code is emitted.
1514 expr->CalculateEmitStore();
1515
1516 AccessorTable accessor_table(isolate()->zone());
1517 for (int i = 0; i < expr->properties()->length(); i++) {
1518 ObjectLiteral::Property* property = expr->properties()->at(i);
1519 if (property->IsCompileTimeValue()) continue;
1520
1521 Literal* key = property->key();
1522 Expression* value = property->value();
1523 if (!result_saved) {
1524 __ push(r0); // Save result on stack
1525 result_saved = true;
1526 }
1527 switch (property->kind()) {
1528 case ObjectLiteral::Property::CONSTANT:
1529 UNREACHABLE();
1530 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1531 ASSERT(!CompileTimeValue::IsCompileTimeValue(property->value()));
1532 // Fall through.
1533 case ObjectLiteral::Property::COMPUTED:
1534 if (key->handle()->IsSymbol()) {
1535 if (property->emit_store()) {
1536 VisitForAccumulatorValue(value);
1537 __ mov(r2, Operand(key->handle()));
1538 __ ldr(r1, MemOperand(sp));
1539 Handle<Code> ic = is_classic_mode()
1540 ? isolate()->builtins()->StoreIC_Initialize()
1541 : isolate()->builtins()->StoreIC_Initialize_Strict();
1542 CallIC(ic, RelocInfo::CODE_TARGET, key->id());
1543 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1544 } else {
1545 VisitForEffect(value);
1546 }
1547 break;
1548 }
1549 // Fall through.
1550 case ObjectLiteral::Property::PROTOTYPE:
1551 // Duplicate receiver on stack.
1552 __ ldr(r0, MemOperand(sp));
1553 __ push(r0);
1554 VisitForStackValue(key);
1555 VisitForStackValue(value);
1556 if (property->emit_store()) {
1557 __ mov(r0, Operand(Smi::FromInt(NONE))); // PropertyAttributes
1558 __ push(r0);
1559 __ CallRuntime(Runtime::kSetProperty, 4);
1560 } else {
1561 __ Drop(3);
1562 }
1563 break;
1564 case ObjectLiteral::Property::GETTER:
1565 accessor_table.lookup(key)->second->getter = value;
1566 break;
1567 case ObjectLiteral::Property::SETTER:
1568 accessor_table.lookup(key)->second->setter = value;
1569 break;
1570 }
1571 }
1572
1573 // Emit code to define accessors, using only a single call to the runtime for
1574 // each pair of corresponding getters and setters.
1575 for (AccessorTable::Iterator it = accessor_table.begin();
1576 it != accessor_table.end();
1577 ++it) {
1578 __ ldr(r0, MemOperand(sp)); // Duplicate receiver.
1579 __ push(r0);
1580 VisitForStackValue(it->first);
1581 EmitAccessor(it->second->getter);
1582 EmitAccessor(it->second->setter);
1583 __ mov(r0, Operand(Smi::FromInt(NONE)));
1584 __ push(r0);
1585 __ CallRuntime(Runtime::kDefineOrRedefineAccessorProperty, 5);
1586 }
1587
1588 if (expr->has_function()) {
1589 ASSERT(result_saved);
1590 __ ldr(r0, MemOperand(sp));
1591 __ push(r0);
1592 __ CallRuntime(Runtime::kToFastProperties, 1);
1593 }
1594
1595 if (result_saved) {
1596 context()->PlugTOS();
1597 } else {
1598 context()->Plug(r0);
1599 }
1600 }
1601
1602
VisitArrayLiteral(ArrayLiteral * expr)1603 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1604 Comment cmnt(masm_, "[ ArrayLiteral");
1605
1606 ZoneList<Expression*>* subexprs = expr->values();
1607 int length = subexprs->length();
1608 Handle<FixedArray> constant_elements = expr->constant_elements();
1609 ASSERT_EQ(2, constant_elements->length());
1610 ElementsKind constant_elements_kind =
1611 static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
1612 bool has_fast_elements = constant_elements_kind == FAST_ELEMENTS;
1613 Handle<FixedArrayBase> constant_elements_values(
1614 FixedArrayBase::cast(constant_elements->get(1)));
1615
1616 __ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1617 __ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
1618 __ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
1619 __ mov(r1, Operand(constant_elements));
1620 __ Push(r3, r2, r1);
1621 if (has_fast_elements && constant_elements_values->map() ==
1622 isolate()->heap()->fixed_cow_array_map()) {
1623 FastCloneShallowArrayStub stub(
1624 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length);
1625 __ CallStub(&stub);
1626 __ IncrementCounter(
1627 isolate()->counters()->cow_arrays_created_stub(), 1, r1, r2);
1628 } else if (expr->depth() > 1) {
1629 __ CallRuntime(Runtime::kCreateArrayLiteral, 3);
1630 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
1631 __ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
1632 } else {
1633 ASSERT(constant_elements_kind == FAST_ELEMENTS ||
1634 constant_elements_kind == FAST_SMI_ONLY_ELEMENTS ||
1635 FLAG_smi_only_arrays);
1636 FastCloneShallowArrayStub::Mode mode = has_fast_elements
1637 ? FastCloneShallowArrayStub::CLONE_ELEMENTS
1638 : FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS;
1639 FastCloneShallowArrayStub stub(mode, length);
1640 __ CallStub(&stub);
1641 }
1642
1643 bool result_saved = false; // Is the result saved to the stack?
1644
1645 // Emit code to evaluate all the non-constant subexpressions and to store
1646 // them into the newly cloned array.
1647 for (int i = 0; i < length; i++) {
1648 Expression* subexpr = subexprs->at(i);
1649 // If the subexpression is a literal or a simple materialized literal it
1650 // is already set in the cloned array.
1651 if (subexpr->AsLiteral() != NULL ||
1652 CompileTimeValue::IsCompileTimeValue(subexpr)) {
1653 continue;
1654 }
1655
1656 if (!result_saved) {
1657 __ push(r0);
1658 result_saved = true;
1659 }
1660 VisitForAccumulatorValue(subexpr);
1661
1662 if (constant_elements_kind == FAST_ELEMENTS) {
1663 int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1664 __ ldr(r6, MemOperand(sp)); // Copy of array literal.
1665 __ ldr(r1, FieldMemOperand(r6, JSObject::kElementsOffset));
1666 __ str(result_register(), FieldMemOperand(r1, offset));
1667 // Update the write barrier for the array store.
1668 __ RecordWriteField(r1, offset, result_register(), r2,
1669 kLRHasBeenSaved, kDontSaveFPRegs,
1670 EMIT_REMEMBERED_SET, INLINE_SMI_CHECK);
1671 } else {
1672 __ ldr(r1, MemOperand(sp)); // Copy of array literal.
1673 __ ldr(r2, FieldMemOperand(r1, JSObject::kMapOffset));
1674 __ mov(r3, Operand(Smi::FromInt(i)));
1675 __ mov(r4, Operand(Smi::FromInt(expr->literal_index())));
1676 StoreArrayLiteralElementStub stub;
1677 __ CallStub(&stub);
1678 }
1679
1680 PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1681 }
1682
1683 if (result_saved) {
1684 context()->PlugTOS();
1685 } else {
1686 context()->Plug(r0);
1687 }
1688 }
1689
1690
VisitAssignment(Assignment * expr)1691 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1692 Comment cmnt(masm_, "[ Assignment");
1693 // Invalid left-hand sides are rewritten to have a 'throw ReferenceError'
1694 // on the left-hand side.
1695 if (!expr->target()->IsValidLeftHandSide()) {
1696 VisitForEffect(expr->target());
1697 return;
1698 }
1699
1700 // Left-hand side can only be a property, a global or a (parameter or local)
1701 // slot.
1702 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1703 LhsKind assign_type = VARIABLE;
1704 Property* property = expr->target()->AsProperty();
1705 if (property != NULL) {
1706 assign_type = (property->key()->IsPropertyName())
1707 ? NAMED_PROPERTY
1708 : KEYED_PROPERTY;
1709 }
1710
1711 // Evaluate LHS expression.
1712 switch (assign_type) {
1713 case VARIABLE:
1714 // Nothing to do here.
1715 break;
1716 case NAMED_PROPERTY:
1717 if (expr->is_compound()) {
1718 // We need the receiver both on the stack and in the accumulator.
1719 VisitForAccumulatorValue(property->obj());
1720 __ push(result_register());
1721 } else {
1722 VisitForStackValue(property->obj());
1723 }
1724 break;
1725 case KEYED_PROPERTY:
1726 if (expr->is_compound()) {
1727 VisitForStackValue(property->obj());
1728 VisitForAccumulatorValue(property->key());
1729 __ ldr(r1, MemOperand(sp, 0));
1730 __ push(r0);
1731 } else {
1732 VisitForStackValue(property->obj());
1733 VisitForStackValue(property->key());
1734 }
1735 break;
1736 }
1737
1738 // For compound assignments we need another deoptimization point after the
1739 // variable/property load.
1740 if (expr->is_compound()) {
1741 { AccumulatorValueContext context(this);
1742 switch (assign_type) {
1743 case VARIABLE:
1744 EmitVariableLoad(expr->target()->AsVariableProxy());
1745 PrepareForBailout(expr->target(), TOS_REG);
1746 break;
1747 case NAMED_PROPERTY:
1748 EmitNamedPropertyLoad(property);
1749 PrepareForBailoutForId(expr->CompoundLoadId(), TOS_REG);
1750 break;
1751 case KEYED_PROPERTY:
1752 EmitKeyedPropertyLoad(property);
1753 PrepareForBailoutForId(expr->CompoundLoadId(), TOS_REG);
1754 break;
1755 }
1756 }
1757
1758 Token::Value op = expr->binary_op();
1759 __ push(r0); // Left operand goes on the stack.
1760 VisitForAccumulatorValue(expr->value());
1761
1762 OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1763 ? OVERWRITE_RIGHT
1764 : NO_OVERWRITE;
1765 SetSourcePosition(expr->position() + 1);
1766 AccumulatorValueContext context(this);
1767 if (ShouldInlineSmiCase(op)) {
1768 EmitInlineSmiBinaryOp(expr->binary_operation(),
1769 op,
1770 mode,
1771 expr->target(),
1772 expr->value());
1773 } else {
1774 EmitBinaryOp(expr->binary_operation(), op, mode);
1775 }
1776
1777 // Deoptimization point in case the binary operation may have side effects.
1778 PrepareForBailout(expr->binary_operation(), TOS_REG);
1779 } else {
1780 VisitForAccumulatorValue(expr->value());
1781 }
1782
1783 // Record source position before possible IC call.
1784 SetSourcePosition(expr->position());
1785
1786 // Store the value.
1787 switch (assign_type) {
1788 case VARIABLE:
1789 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1790 expr->op());
1791 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1792 context()->Plug(r0);
1793 break;
1794 case NAMED_PROPERTY:
1795 EmitNamedPropertyAssignment(expr);
1796 break;
1797 case KEYED_PROPERTY:
1798 EmitKeyedPropertyAssignment(expr);
1799 break;
1800 }
1801 }
1802
1803
EmitNamedPropertyLoad(Property * prop)1804 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
1805 SetSourcePosition(prop->position());
1806 Literal* key = prop->key()->AsLiteral();
1807 __ mov(r2, Operand(key->handle()));
1808 // Call load IC. It has arguments receiver and property name r0 and r2.
1809 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1810 CallIC(ic, RelocInfo::CODE_TARGET, prop->id());
1811 }
1812
1813
EmitKeyedPropertyLoad(Property * prop)1814 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
1815 SetSourcePosition(prop->position());
1816 // Call keyed load IC. It has arguments key and receiver in r0 and r1.
1817 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
1818 CallIC(ic, RelocInfo::CODE_TARGET, prop->id());
1819 }
1820
1821
EmitInlineSmiBinaryOp(BinaryOperation * expr,Token::Value op,OverwriteMode mode,Expression * left_expr,Expression * right_expr)1822 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
1823 Token::Value op,
1824 OverwriteMode mode,
1825 Expression* left_expr,
1826 Expression* right_expr) {
1827 Label done, smi_case, stub_call;
1828
1829 Register scratch1 = r2;
1830 Register scratch2 = r3;
1831
1832 // Get the arguments.
1833 Register left = r1;
1834 Register right = r0;
1835 __ pop(left);
1836
1837 // Perform combined smi check on both operands.
1838 __ orr(scratch1, left, Operand(right));
1839 STATIC_ASSERT(kSmiTag == 0);
1840 JumpPatchSite patch_site(masm_);
1841 patch_site.EmitJumpIfSmi(scratch1, &smi_case);
1842
1843 __ bind(&stub_call);
1844 BinaryOpStub stub(op, mode);
1845 CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
1846 patch_site.EmitPatchInfo();
1847 __ jmp(&done);
1848
1849 __ bind(&smi_case);
1850 // Smi case. This code works the same way as the smi-smi case in the type
1851 // recording binary operation stub, see
1852 // BinaryOpStub::GenerateSmiSmiOperation for comments.
1853 switch (op) {
1854 case Token::SAR:
1855 __ b(&stub_call);
1856 __ GetLeastBitsFromSmi(scratch1, right, 5);
1857 __ mov(right, Operand(left, ASR, scratch1));
1858 __ bic(right, right, Operand(kSmiTagMask));
1859 break;
1860 case Token::SHL: {
1861 __ b(&stub_call);
1862 __ SmiUntag(scratch1, left);
1863 __ GetLeastBitsFromSmi(scratch2, right, 5);
1864 __ mov(scratch1, Operand(scratch1, LSL, scratch2));
1865 __ add(scratch2, scratch1, Operand(0x40000000), SetCC);
1866 __ b(mi, &stub_call);
1867 __ SmiTag(right, scratch1);
1868 break;
1869 }
1870 case Token::SHR: {
1871 __ b(&stub_call);
1872 __ SmiUntag(scratch1, left);
1873 __ GetLeastBitsFromSmi(scratch2, right, 5);
1874 __ mov(scratch1, Operand(scratch1, LSR, scratch2));
1875 __ tst(scratch1, Operand(0xc0000000));
1876 __ b(ne, &stub_call);
1877 __ SmiTag(right, scratch1);
1878 break;
1879 }
1880 case Token::ADD:
1881 __ add(scratch1, left, Operand(right), SetCC);
1882 __ b(vs, &stub_call);
1883 __ mov(right, scratch1);
1884 break;
1885 case Token::SUB:
1886 __ sub(scratch1, left, Operand(right), SetCC);
1887 __ b(vs, &stub_call);
1888 __ mov(right, scratch1);
1889 break;
1890 case Token::MUL: {
1891 __ SmiUntag(ip, right);
1892 __ smull(scratch1, scratch2, left, ip);
1893 __ mov(ip, Operand(scratch1, ASR, 31));
1894 __ cmp(ip, Operand(scratch2));
1895 __ b(ne, &stub_call);
1896 __ cmp(scratch1, Operand(0));
1897 __ mov(right, Operand(scratch1), LeaveCC, ne);
1898 __ b(ne, &done);
1899 __ add(scratch2, right, Operand(left), SetCC);
1900 __ mov(right, Operand(Smi::FromInt(0)), LeaveCC, pl);
1901 __ b(mi, &stub_call);
1902 break;
1903 }
1904 case Token::BIT_OR:
1905 __ orr(right, left, Operand(right));
1906 break;
1907 case Token::BIT_AND:
1908 __ and_(right, left, Operand(right));
1909 break;
1910 case Token::BIT_XOR:
1911 __ eor(right, left, Operand(right));
1912 break;
1913 default:
1914 UNREACHABLE();
1915 }
1916
1917 __ bind(&done);
1918 context()->Plug(r0);
1919 }
1920
1921
EmitBinaryOp(BinaryOperation * expr,Token::Value op,OverwriteMode mode)1922 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
1923 Token::Value op,
1924 OverwriteMode mode) {
1925 __ pop(r1);
1926 BinaryOpStub stub(op, mode);
1927 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
1928 CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
1929 patch_site.EmitPatchInfo();
1930 context()->Plug(r0);
1931 }
1932
1933
EmitAssignment(Expression * expr)1934 void FullCodeGenerator::EmitAssignment(Expression* expr) {
1935 // Invalid left-hand sides are rewritten to have a 'throw
1936 // ReferenceError' on the left-hand side.
1937 if (!expr->IsValidLeftHandSide()) {
1938 VisitForEffect(expr);
1939 return;
1940 }
1941
1942 // Left-hand side can only be a property, a global or a (parameter or local)
1943 // slot.
1944 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1945 LhsKind assign_type = VARIABLE;
1946 Property* prop = expr->AsProperty();
1947 if (prop != NULL) {
1948 assign_type = (prop->key()->IsPropertyName())
1949 ? NAMED_PROPERTY
1950 : KEYED_PROPERTY;
1951 }
1952
1953 switch (assign_type) {
1954 case VARIABLE: {
1955 Variable* var = expr->AsVariableProxy()->var();
1956 EffectContext context(this);
1957 EmitVariableAssignment(var, Token::ASSIGN);
1958 break;
1959 }
1960 case NAMED_PROPERTY: {
1961 __ push(r0); // Preserve value.
1962 VisitForAccumulatorValue(prop->obj());
1963 __ mov(r1, r0);
1964 __ pop(r0); // Restore value.
1965 __ mov(r2, Operand(prop->key()->AsLiteral()->handle()));
1966 Handle<Code> ic = is_classic_mode()
1967 ? isolate()->builtins()->StoreIC_Initialize()
1968 : isolate()->builtins()->StoreIC_Initialize_Strict();
1969 CallIC(ic);
1970 break;
1971 }
1972 case KEYED_PROPERTY: {
1973 __ push(r0); // Preserve value.
1974 VisitForStackValue(prop->obj());
1975 VisitForAccumulatorValue(prop->key());
1976 __ mov(r1, r0);
1977 __ pop(r2);
1978 __ pop(r0); // Restore value.
1979 Handle<Code> ic = is_classic_mode()
1980 ? isolate()->builtins()->KeyedStoreIC_Initialize()
1981 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
1982 CallIC(ic);
1983 break;
1984 }
1985 }
1986 context()->Plug(r0);
1987 }
1988
1989
EmitVariableAssignment(Variable * var,Token::Value op)1990 void FullCodeGenerator::EmitVariableAssignment(Variable* var,
1991 Token::Value op) {
1992 if (var->IsUnallocated()) {
1993 // Global var, const, or let.
1994 __ mov(r2, Operand(var->name()));
1995 __ ldr(r1, GlobalObjectOperand());
1996 Handle<Code> ic = is_classic_mode()
1997 ? isolate()->builtins()->StoreIC_Initialize()
1998 : isolate()->builtins()->StoreIC_Initialize_Strict();
1999 CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
2000
2001 } else if (op == Token::INIT_CONST) {
2002 // Const initializers need a write barrier.
2003 ASSERT(!var->IsParameter()); // No const parameters.
2004 if (var->IsStackLocal()) {
2005 Label skip;
2006 __ ldr(r1, StackOperand(var));
2007 __ CompareRoot(r1, Heap::kTheHoleValueRootIndex);
2008 __ b(ne, &skip);
2009 __ str(result_register(), StackOperand(var));
2010 __ bind(&skip);
2011 } else {
2012 ASSERT(var->IsContextSlot() || var->IsLookupSlot());
2013 // Like var declarations, const declarations are hoisted to function
2014 // scope. However, unlike var initializers, const initializers are
2015 // able to drill a hole to that function context, even from inside a
2016 // 'with' context. We thus bypass the normal static scope lookup for
2017 // var->IsContextSlot().
2018 __ push(r0);
2019 __ mov(r0, Operand(var->name()));
2020 __ Push(cp, r0); // Context and name.
2021 __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
2022 }
2023
2024 } else if (var->mode() == LET && op != Token::INIT_LET) {
2025 // Non-initializing assignment to let variable needs a write barrier.
2026 if (var->IsLookupSlot()) {
2027 __ push(r0); // Value.
2028 __ mov(r1, Operand(var->name()));
2029 __ mov(r0, Operand(Smi::FromInt(language_mode())));
2030 __ Push(cp, r1, r0); // Context, name, strict mode.
2031 __ CallRuntime(Runtime::kStoreContextSlot, 4);
2032 } else {
2033 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
2034 Label assign;
2035 MemOperand location = VarOperand(var, r1);
2036 __ ldr(r3, location);
2037 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex);
2038 __ b(ne, &assign);
2039 __ mov(r3, Operand(var->name()));
2040 __ push(r3);
2041 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2042 // Perform the assignment.
2043 __ bind(&assign);
2044 __ str(result_register(), location);
2045 if (var->IsContextSlot()) {
2046 // RecordWrite may destroy all its register arguments.
2047 __ mov(r3, result_register());
2048 int offset = Context::SlotOffset(var->index());
2049 __ RecordWriteContextSlot(
2050 r1, offset, r3, r2, kLRHasBeenSaved, kDontSaveFPRegs);
2051 }
2052 }
2053
2054 } else if (!var->is_const_mode() || op == Token::INIT_CONST_HARMONY) {
2055 // Assignment to var or initializing assignment to let/const
2056 // in harmony mode.
2057 if (var->IsStackAllocated() || var->IsContextSlot()) {
2058 MemOperand location = VarOperand(var, r1);
2059 if (FLAG_debug_code && op == Token::INIT_LET) {
2060 // Check for an uninitialized let binding.
2061 __ ldr(r2, location);
2062 __ CompareRoot(r2, Heap::kTheHoleValueRootIndex);
2063 __ Check(eq, "Let binding re-initialization.");
2064 }
2065 // Perform the assignment.
2066 __ str(r0, location);
2067 if (var->IsContextSlot()) {
2068 __ mov(r3, r0);
2069 int offset = Context::SlotOffset(var->index());
2070 __ RecordWriteContextSlot(
2071 r1, offset, r3, r2, kLRHasBeenSaved, kDontSaveFPRegs);
2072 }
2073 } else {
2074 ASSERT(var->IsLookupSlot());
2075 __ push(r0); // Value.
2076 __ mov(r1, Operand(var->name()));
2077 __ mov(r0, Operand(Smi::FromInt(language_mode())));
2078 __ Push(cp, r1, r0); // Context, name, strict mode.
2079 __ CallRuntime(Runtime::kStoreContextSlot, 4);
2080 }
2081 }
2082 // Non-initializing assignments to consts are ignored.
2083 }
2084
2085
EmitNamedPropertyAssignment(Assignment * expr)2086 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2087 // Assignment to a property, using a named store IC.
2088 Property* prop = expr->target()->AsProperty();
2089 ASSERT(prop != NULL);
2090 ASSERT(prop->key()->AsLiteral() != NULL);
2091
2092 // If the assignment starts a block of assignments to the same object,
2093 // change to slow case to avoid the quadratic behavior of repeatedly
2094 // adding fast properties.
2095 if (expr->starts_initialization_block()) {
2096 __ push(result_register());
2097 __ ldr(ip, MemOperand(sp, kPointerSize)); // Receiver is now under value.
2098 __ push(ip);
2099 __ CallRuntime(Runtime::kToSlowProperties, 1);
2100 __ pop(result_register());
2101 }
2102
2103 // Record source code position before IC call.
2104 SetSourcePosition(expr->position());
2105 __ mov(r2, Operand(prop->key()->AsLiteral()->handle()));
2106 // Load receiver to r1. Leave a copy in the stack if needed for turning the
2107 // receiver into fast case.
2108 if (expr->ends_initialization_block()) {
2109 __ ldr(r1, MemOperand(sp));
2110 } else {
2111 __ pop(r1);
2112 }
2113
2114 Handle<Code> ic = is_classic_mode()
2115 ? isolate()->builtins()->StoreIC_Initialize()
2116 : isolate()->builtins()->StoreIC_Initialize_Strict();
2117 CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
2118
2119 // If the assignment ends an initialization block, revert to fast case.
2120 if (expr->ends_initialization_block()) {
2121 __ push(r0); // Result of assignment, saved even if not needed.
2122 // Receiver is under the result value.
2123 __ ldr(ip, MemOperand(sp, kPointerSize));
2124 __ push(ip);
2125 __ CallRuntime(Runtime::kToFastProperties, 1);
2126 __ pop(r0);
2127 __ Drop(1);
2128 }
2129 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2130 context()->Plug(r0);
2131 }
2132
2133
EmitKeyedPropertyAssignment(Assignment * expr)2134 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2135 // Assignment to a property, using a keyed store IC.
2136
2137 // If the assignment starts a block of assignments to the same object,
2138 // change to slow case to avoid the quadratic behavior of repeatedly
2139 // adding fast properties.
2140 if (expr->starts_initialization_block()) {
2141 __ push(result_register());
2142 // Receiver is now under the key and value.
2143 __ ldr(ip, MemOperand(sp, 2 * kPointerSize));
2144 __ push(ip);
2145 __ CallRuntime(Runtime::kToSlowProperties, 1);
2146 __ pop(result_register());
2147 }
2148
2149 // Record source code position before IC call.
2150 SetSourcePosition(expr->position());
2151 __ pop(r1); // Key.
2152 // Load receiver to r2. Leave a copy in the stack if needed for turning the
2153 // receiver into fast case.
2154 if (expr->ends_initialization_block()) {
2155 __ ldr(r2, MemOperand(sp));
2156 } else {
2157 __ pop(r2);
2158 }
2159
2160 Handle<Code> ic = is_classic_mode()
2161 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2162 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2163 CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
2164
2165 // If the assignment ends an initialization block, revert to fast case.
2166 if (expr->ends_initialization_block()) {
2167 __ push(r0); // Result of assignment, saved even if not needed.
2168 // Receiver is under the result value.
2169 __ ldr(ip, MemOperand(sp, kPointerSize));
2170 __ push(ip);
2171 __ CallRuntime(Runtime::kToFastProperties, 1);
2172 __ pop(r0);
2173 __ Drop(1);
2174 }
2175 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2176 context()->Plug(r0);
2177 }
2178
2179
VisitProperty(Property * expr)2180 void FullCodeGenerator::VisitProperty(Property* expr) {
2181 Comment cmnt(masm_, "[ Property");
2182 Expression* key = expr->key();
2183
2184 if (key->IsPropertyName()) {
2185 VisitForAccumulatorValue(expr->obj());
2186 EmitNamedPropertyLoad(expr);
2187 context()->Plug(r0);
2188 } else {
2189 VisitForStackValue(expr->obj());
2190 VisitForAccumulatorValue(expr->key());
2191 __ pop(r1);
2192 EmitKeyedPropertyLoad(expr);
2193 context()->Plug(r0);
2194 }
2195 }
2196
2197
CallIC(Handle<Code> code,RelocInfo::Mode rmode,unsigned ast_id)2198 void FullCodeGenerator::CallIC(Handle<Code> code,
2199 RelocInfo::Mode rmode,
2200 unsigned ast_id) {
2201 ic_total_count_++;
2202 __ Call(code, rmode, ast_id);
2203 }
2204
EmitCallWithIC(Call * expr,Handle<Object> name,RelocInfo::Mode mode)2205 void FullCodeGenerator::EmitCallWithIC(Call* expr,
2206 Handle<Object> name,
2207 RelocInfo::Mode mode) {
2208 // Code common for calls using the IC.
2209 ZoneList<Expression*>* args = expr->arguments();
2210 int arg_count = args->length();
2211 { PreservePositionScope scope(masm()->positions_recorder());
2212 for (int i = 0; i < arg_count; i++) {
2213 VisitForStackValue(args->at(i));
2214 }
2215 __ mov(r2, Operand(name));
2216 }
2217 // Record source position for debugger.
2218 SetSourcePosition(expr->position());
2219 // Call the IC initialization code.
2220 Handle<Code> ic =
2221 isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
2222 CallIC(ic, mode, expr->id());
2223 RecordJSReturnSite(expr);
2224 // Restore context register.
2225 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2226 context()->Plug(r0);
2227 }
2228
2229
EmitKeyedCallWithIC(Call * expr,Expression * key)2230 void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
2231 Expression* key) {
2232 // Load the key.
2233 VisitForAccumulatorValue(key);
2234
2235 // Swap the name of the function and the receiver on the stack to follow
2236 // the calling convention for call ICs.
2237 __ pop(r1);
2238 __ push(r0);
2239 __ push(r1);
2240
2241 // Code common for calls using the IC.
2242 ZoneList<Expression*>* args = expr->arguments();
2243 int arg_count = args->length();
2244 { PreservePositionScope scope(masm()->positions_recorder());
2245 for (int i = 0; i < arg_count; i++) {
2246 VisitForStackValue(args->at(i));
2247 }
2248 }
2249 // Record source position for debugger.
2250 SetSourcePosition(expr->position());
2251 // Call the IC initialization code.
2252 Handle<Code> ic =
2253 isolate()->stub_cache()->ComputeKeyedCallInitialize(arg_count);
2254 __ ldr(r2, MemOperand(sp, (arg_count + 1) * kPointerSize)); // Key.
2255 CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
2256 RecordJSReturnSite(expr);
2257 // Restore context register.
2258 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2259 context()->DropAndPlug(1, r0); // Drop the key still on the stack.
2260 }
2261
2262
EmitCallWithStub(Call * expr,CallFunctionFlags flags)2263 void FullCodeGenerator::EmitCallWithStub(Call* expr, CallFunctionFlags flags) {
2264 // Code common for calls using the call stub.
2265 ZoneList<Expression*>* args = expr->arguments();
2266 int arg_count = args->length();
2267 { PreservePositionScope scope(masm()->positions_recorder());
2268 for (int i = 0; i < arg_count; i++) {
2269 VisitForStackValue(args->at(i));
2270 }
2271 }
2272 // Record source position for debugger.
2273 SetSourcePosition(expr->position());
2274 CallFunctionStub stub(arg_count, flags);
2275 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2276 __ CallStub(&stub);
2277 RecordJSReturnSite(expr);
2278 // Restore context register.
2279 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2280 context()->DropAndPlug(1, r0);
2281 }
2282
2283
EmitResolvePossiblyDirectEval(int arg_count)2284 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2285 // Push copy of the first argument or undefined if it doesn't exist.
2286 if (arg_count > 0) {
2287 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
2288 } else {
2289 __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
2290 }
2291 __ push(r1);
2292
2293 // Push the receiver of the enclosing function.
2294 int receiver_offset = 2 + info_->scope()->num_parameters();
2295 __ ldr(r1, MemOperand(fp, receiver_offset * kPointerSize));
2296 __ push(r1);
2297 // Push the language mode.
2298 __ mov(r1, Operand(Smi::FromInt(language_mode())));
2299 __ push(r1);
2300
2301 // Push the start position of the scope the calls resides in.
2302 __ mov(r1, Operand(Smi::FromInt(scope()->start_position())));
2303 __ push(r1);
2304
2305 // Do the runtime call.
2306 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
2307 }
2308
2309
VisitCall(Call * expr)2310 void FullCodeGenerator::VisitCall(Call* expr) {
2311 #ifdef DEBUG
2312 // We want to verify that RecordJSReturnSite gets called on all paths
2313 // through this function. Avoid early returns.
2314 expr->return_is_recorded_ = false;
2315 #endif
2316
2317 Comment cmnt(masm_, "[ Call");
2318 Expression* callee = expr->expression();
2319 VariableProxy* proxy = callee->AsVariableProxy();
2320 Property* property = callee->AsProperty();
2321
2322 if (proxy != NULL && proxy->var()->is_possibly_eval()) {
2323 // In a call to eval, we first call %ResolvePossiblyDirectEval to
2324 // resolve the function we need to call and the receiver of the
2325 // call. Then we call the resolved function using the given
2326 // arguments.
2327 ZoneList<Expression*>* args = expr->arguments();
2328 int arg_count = args->length();
2329
2330 { PreservePositionScope pos_scope(masm()->positions_recorder());
2331 VisitForStackValue(callee);
2332 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
2333 __ push(r2); // Reserved receiver slot.
2334
2335 // Push the arguments.
2336 for (int i = 0; i < arg_count; i++) {
2337 VisitForStackValue(args->at(i));
2338 }
2339
2340 // Push a copy of the function (found below the arguments) and
2341 // resolve eval.
2342 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2343 __ push(r1);
2344 EmitResolvePossiblyDirectEval(arg_count);
2345
2346 // The runtime call returns a pair of values in r0 (function) and
2347 // r1 (receiver). Touch up the stack with the right values.
2348 __ str(r0, MemOperand(sp, (arg_count + 1) * kPointerSize));
2349 __ str(r1, MemOperand(sp, arg_count * kPointerSize));
2350 }
2351
2352 // Record source position for debugger.
2353 SetSourcePosition(expr->position());
2354 CallFunctionStub stub(arg_count, RECEIVER_MIGHT_BE_IMPLICIT);
2355 __ ldr(r1, MemOperand(sp, (arg_count + 1) * kPointerSize));
2356 __ CallStub(&stub);
2357 RecordJSReturnSite(expr);
2358 // Restore context register.
2359 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2360 context()->DropAndPlug(1, r0);
2361 } else if (proxy != NULL && proxy->var()->IsUnallocated()) {
2362 // Push global object as receiver for the call IC.
2363 __ ldr(r0, GlobalObjectOperand());
2364 __ push(r0);
2365 EmitCallWithIC(expr, proxy->name(), RelocInfo::CODE_TARGET_CONTEXT);
2366 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
2367 // Call to a lookup slot (dynamically introduced variable).
2368 Label slow, done;
2369
2370 { PreservePositionScope scope(masm()->positions_recorder());
2371 // Generate code for loading from variables potentially shadowed
2372 // by eval-introduced variables.
2373 EmitDynamicLookupFastCase(proxy->var(), NOT_INSIDE_TYPEOF, &slow, &done);
2374 }
2375
2376 __ bind(&slow);
2377 // Call the runtime to find the function to call (returned in r0)
2378 // and the object holding it (returned in edx).
2379 __ push(context_register());
2380 __ mov(r2, Operand(proxy->name()));
2381 __ push(r2);
2382 __ CallRuntime(Runtime::kLoadContextSlot, 2);
2383 __ Push(r0, r1); // Function, receiver.
2384
2385 // If fast case code has been generated, emit code to push the
2386 // function and receiver and have the slow path jump around this
2387 // code.
2388 if (done.is_linked()) {
2389 Label call;
2390 __ b(&call);
2391 __ bind(&done);
2392 // Push function.
2393 __ push(r0);
2394 // The receiver is implicitly the global receiver. Indicate this
2395 // by passing the hole to the call function stub.
2396 __ LoadRoot(r1, Heap::kTheHoleValueRootIndex);
2397 __ push(r1);
2398 __ bind(&call);
2399 }
2400
2401 // The receiver is either the global receiver or an object found
2402 // by LoadContextSlot. That object could be the hole if the
2403 // receiver is implicitly the global object.
2404 EmitCallWithStub(expr, RECEIVER_MIGHT_BE_IMPLICIT);
2405 } else if (property != NULL) {
2406 { PreservePositionScope scope(masm()->positions_recorder());
2407 VisitForStackValue(property->obj());
2408 }
2409 if (property->key()->IsPropertyName()) {
2410 EmitCallWithIC(expr,
2411 property->key()->AsLiteral()->handle(),
2412 RelocInfo::CODE_TARGET);
2413 } else {
2414 EmitKeyedCallWithIC(expr, property->key());
2415 }
2416 } else {
2417 // Call to an arbitrary expression not handled specially above.
2418 { PreservePositionScope scope(masm()->positions_recorder());
2419 VisitForStackValue(callee);
2420 }
2421 // Load global receiver object.
2422 __ ldr(r1, GlobalObjectOperand());
2423 __ ldr(r1, FieldMemOperand(r1, GlobalObject::kGlobalReceiverOffset));
2424 __ push(r1);
2425 // Emit function call.
2426 EmitCallWithStub(expr, NO_CALL_FUNCTION_FLAGS);
2427 }
2428
2429 #ifdef DEBUG
2430 // RecordJSReturnSite should have been called.
2431 ASSERT(expr->return_is_recorded_);
2432 #endif
2433 }
2434
2435
VisitCallNew(CallNew * expr)2436 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2437 Comment cmnt(masm_, "[ CallNew");
2438 // According to ECMA-262, section 11.2.2, page 44, the function
2439 // expression in new calls must be evaluated before the
2440 // arguments.
2441
2442 // Push constructor on the stack. If it's not a function it's used as
2443 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2444 // ignored.
2445 VisitForStackValue(expr->expression());
2446
2447 // Push the arguments ("left-to-right") on the stack.
2448 ZoneList<Expression*>* args = expr->arguments();
2449 int arg_count = args->length();
2450 for (int i = 0; i < arg_count; i++) {
2451 VisitForStackValue(args->at(i));
2452 }
2453
2454 // Call the construct call builtin that handles allocation and
2455 // constructor invocation.
2456 SetSourcePosition(expr->position());
2457
2458 // Load function and argument count into r1 and r0.
2459 __ mov(r0, Operand(arg_count));
2460 __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
2461
2462 // Record call targets in unoptimized code, but not in the snapshot.
2463 CallFunctionFlags flags;
2464 if (!Serializer::enabled()) {
2465 flags = RECORD_CALL_TARGET;
2466 Handle<Object> uninitialized =
2467 TypeFeedbackCells::UninitializedSentinel(isolate());
2468 Handle<JSGlobalPropertyCell> cell =
2469 isolate()->factory()->NewJSGlobalPropertyCell(uninitialized);
2470 RecordTypeFeedbackCell(expr->id(), cell);
2471 __ mov(r2, Operand(cell));
2472 } else {
2473 flags = NO_CALL_FUNCTION_FLAGS;
2474 }
2475
2476 CallConstructStub stub(flags);
2477 __ Call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
2478 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2479 context()->Plug(r0);
2480 }
2481
2482
EmitIsSmi(CallRuntime * expr)2483 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2484 ZoneList<Expression*>* args = expr->arguments();
2485 ASSERT(args->length() == 1);
2486
2487 VisitForAccumulatorValue(args->at(0));
2488
2489 Label materialize_true, materialize_false;
2490 Label* if_true = NULL;
2491 Label* if_false = NULL;
2492 Label* fall_through = NULL;
2493 context()->PrepareTest(&materialize_true, &materialize_false,
2494 &if_true, &if_false, &fall_through);
2495
2496 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2497 __ tst(r0, Operand(kSmiTagMask));
2498 Split(eq, if_true, if_false, fall_through);
2499
2500 context()->Plug(if_true, if_false);
2501 }
2502
2503
EmitIsNonNegativeSmi(CallRuntime * expr)2504 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
2505 ZoneList<Expression*>* args = expr->arguments();
2506 ASSERT(args->length() == 1);
2507
2508 VisitForAccumulatorValue(args->at(0));
2509
2510 Label materialize_true, materialize_false;
2511 Label* if_true = NULL;
2512 Label* if_false = NULL;
2513 Label* fall_through = NULL;
2514 context()->PrepareTest(&materialize_true, &materialize_false,
2515 &if_true, &if_false, &fall_through);
2516
2517 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2518 __ tst(r0, Operand(kSmiTagMask | 0x80000000));
2519 Split(eq, if_true, if_false, fall_through);
2520
2521 context()->Plug(if_true, if_false);
2522 }
2523
2524
EmitIsObject(CallRuntime * expr)2525 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
2526 ZoneList<Expression*>* args = expr->arguments();
2527 ASSERT(args->length() == 1);
2528
2529 VisitForAccumulatorValue(args->at(0));
2530
2531 Label materialize_true, materialize_false;
2532 Label* if_true = NULL;
2533 Label* if_false = NULL;
2534 Label* fall_through = NULL;
2535 context()->PrepareTest(&materialize_true, &materialize_false,
2536 &if_true, &if_false, &fall_through);
2537
2538 __ JumpIfSmi(r0, if_false);
2539 __ LoadRoot(ip, Heap::kNullValueRootIndex);
2540 __ cmp(r0, ip);
2541 __ b(eq, if_true);
2542 __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
2543 // Undetectable objects behave like undefined when tested with typeof.
2544 __ ldrb(r1, FieldMemOperand(r2, Map::kBitFieldOffset));
2545 __ tst(r1, Operand(1 << Map::kIsUndetectable));
2546 __ b(ne, if_false);
2547 __ ldrb(r1, FieldMemOperand(r2, Map::kInstanceTypeOffset));
2548 __ cmp(r1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
2549 __ b(lt, if_false);
2550 __ cmp(r1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
2551 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2552 Split(le, if_true, if_false, fall_through);
2553
2554 context()->Plug(if_true, if_false);
2555 }
2556
2557
EmitIsSpecObject(CallRuntime * expr)2558 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
2559 ZoneList<Expression*>* args = expr->arguments();
2560 ASSERT(args->length() == 1);
2561
2562 VisitForAccumulatorValue(args->at(0));
2563
2564 Label materialize_true, materialize_false;
2565 Label* if_true = NULL;
2566 Label* if_false = NULL;
2567 Label* fall_through = NULL;
2568 context()->PrepareTest(&materialize_true, &materialize_false,
2569 &if_true, &if_false, &fall_through);
2570
2571 __ JumpIfSmi(r0, if_false);
2572 __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE);
2573 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2574 Split(ge, if_true, if_false, fall_through);
2575
2576 context()->Plug(if_true, if_false);
2577 }
2578
2579
EmitIsUndetectableObject(CallRuntime * expr)2580 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
2581 ZoneList<Expression*>* args = expr->arguments();
2582 ASSERT(args->length() == 1);
2583
2584 VisitForAccumulatorValue(args->at(0));
2585
2586 Label materialize_true, materialize_false;
2587 Label* if_true = NULL;
2588 Label* if_false = NULL;
2589 Label* fall_through = NULL;
2590 context()->PrepareTest(&materialize_true, &materialize_false,
2591 &if_true, &if_false, &fall_through);
2592
2593 __ JumpIfSmi(r0, if_false);
2594 __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
2595 __ ldrb(r1, FieldMemOperand(r1, Map::kBitFieldOffset));
2596 __ tst(r1, Operand(1 << Map::kIsUndetectable));
2597 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2598 Split(ne, if_true, if_false, fall_through);
2599
2600 context()->Plug(if_true, if_false);
2601 }
2602
2603
EmitIsStringWrapperSafeForDefaultValueOf(CallRuntime * expr)2604 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
2605 CallRuntime* expr) {
2606 ZoneList<Expression*>* args = expr->arguments();
2607 ASSERT(args->length() == 1);
2608
2609 VisitForAccumulatorValue(args->at(0));
2610
2611 Label materialize_true, materialize_false;
2612 Label* if_true = NULL;
2613 Label* if_false = NULL;
2614 Label* fall_through = NULL;
2615 context()->PrepareTest(&materialize_true, &materialize_false,
2616 &if_true, &if_false, &fall_through);
2617
2618 if (FLAG_debug_code) __ AbortIfSmi(r0);
2619
2620 __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
2621 __ ldrb(ip, FieldMemOperand(r1, Map::kBitField2Offset));
2622 __ tst(ip, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
2623 __ b(ne, if_true);
2624
2625 // Check for fast case object. Generate false result for slow case object.
2626 __ ldr(r2, FieldMemOperand(r0, JSObject::kPropertiesOffset));
2627 __ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
2628 __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
2629 __ cmp(r2, ip);
2630 __ b(eq, if_false);
2631
2632 // Look for valueOf symbol in the descriptor array, and indicate false if
2633 // found. The type is not checked, so if it is a transition it is a false
2634 // negative.
2635 __ LoadInstanceDescriptors(r1, r4);
2636 __ ldr(r3, FieldMemOperand(r4, FixedArray::kLengthOffset));
2637 // r4: descriptor array
2638 // r3: length of descriptor array
2639 // Calculate the end of the descriptor array.
2640 STATIC_ASSERT(kSmiTag == 0);
2641 STATIC_ASSERT(kSmiTagSize == 1);
2642 STATIC_ASSERT(kPointerSize == 4);
2643 __ add(r2, r4, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
2644 __ add(r2, r2, Operand(r3, LSL, kPointerSizeLog2 - kSmiTagSize));
2645
2646 // Calculate location of the first key name.
2647 __ add(r4,
2648 r4,
2649 Operand(FixedArray::kHeaderSize - kHeapObjectTag +
2650 DescriptorArray::kFirstIndex * kPointerSize));
2651 // Loop through all the keys in the descriptor array. If one of these is the
2652 // symbol valueOf the result is false.
2653 Label entry, loop;
2654 // The use of ip to store the valueOf symbol asumes that it is not otherwise
2655 // used in the loop below.
2656 __ mov(ip, Operand(FACTORY->value_of_symbol()));
2657 __ jmp(&entry);
2658 __ bind(&loop);
2659 __ ldr(r3, MemOperand(r4, 0));
2660 __ cmp(r3, ip);
2661 __ b(eq, if_false);
2662 __ add(r4, r4, Operand(kPointerSize));
2663 __ bind(&entry);
2664 __ cmp(r4, Operand(r2));
2665 __ b(ne, &loop);
2666
2667 // If a valueOf property is not found on the object check that it's
2668 // prototype is the un-modified String prototype. If not result is false.
2669 __ ldr(r2, FieldMemOperand(r1, Map::kPrototypeOffset));
2670 __ JumpIfSmi(r2, if_false);
2671 __ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
2672 __ ldr(r3, ContextOperand(cp, Context::GLOBAL_INDEX));
2673 __ ldr(r3, FieldMemOperand(r3, GlobalObject::kGlobalContextOffset));
2674 __ ldr(r3, ContextOperand(r3, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
2675 __ cmp(r2, r3);
2676 __ b(ne, if_false);
2677
2678 // Set the bit in the map to indicate that it has been checked safe for
2679 // default valueOf and set true result.
2680 __ ldrb(r2, FieldMemOperand(r1, Map::kBitField2Offset));
2681 __ orr(r2, r2, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
2682 __ strb(r2, FieldMemOperand(r1, Map::kBitField2Offset));
2683 __ jmp(if_true);
2684
2685 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2686 context()->Plug(if_true, if_false);
2687 }
2688
2689
EmitIsFunction(CallRuntime * expr)2690 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
2691 ZoneList<Expression*>* args = expr->arguments();
2692 ASSERT(args->length() == 1);
2693
2694 VisitForAccumulatorValue(args->at(0));
2695
2696 Label materialize_true, materialize_false;
2697 Label* if_true = NULL;
2698 Label* if_false = NULL;
2699 Label* fall_through = NULL;
2700 context()->PrepareTest(&materialize_true, &materialize_false,
2701 &if_true, &if_false, &fall_through);
2702
2703 __ JumpIfSmi(r0, if_false);
2704 __ CompareObjectType(r0, r1, r2, JS_FUNCTION_TYPE);
2705 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2706 Split(eq, if_true, if_false, fall_through);
2707
2708 context()->Plug(if_true, if_false);
2709 }
2710
2711
EmitIsArray(CallRuntime * expr)2712 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
2713 ZoneList<Expression*>* args = expr->arguments();
2714 ASSERT(args->length() == 1);
2715
2716 VisitForAccumulatorValue(args->at(0));
2717
2718 Label materialize_true, materialize_false;
2719 Label* if_true = NULL;
2720 Label* if_false = NULL;
2721 Label* fall_through = NULL;
2722 context()->PrepareTest(&materialize_true, &materialize_false,
2723 &if_true, &if_false, &fall_through);
2724
2725 __ JumpIfSmi(r0, if_false);
2726 __ CompareObjectType(r0, r1, r1, JS_ARRAY_TYPE);
2727 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2728 Split(eq, if_true, if_false, fall_through);
2729
2730 context()->Plug(if_true, if_false);
2731 }
2732
2733
EmitIsRegExp(CallRuntime * expr)2734 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
2735 ZoneList<Expression*>* args = expr->arguments();
2736 ASSERT(args->length() == 1);
2737
2738 VisitForAccumulatorValue(args->at(0));
2739
2740 Label materialize_true, materialize_false;
2741 Label* if_true = NULL;
2742 Label* if_false = NULL;
2743 Label* fall_through = NULL;
2744 context()->PrepareTest(&materialize_true, &materialize_false,
2745 &if_true, &if_false, &fall_through);
2746
2747 __ JumpIfSmi(r0, if_false);
2748 __ CompareObjectType(r0, r1, r1, JS_REGEXP_TYPE);
2749 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2750 Split(eq, if_true, if_false, fall_through);
2751
2752 context()->Plug(if_true, if_false);
2753 }
2754
2755
2756
EmitIsConstructCall(CallRuntime * expr)2757 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
2758 ASSERT(expr->arguments()->length() == 0);
2759
2760 Label materialize_true, materialize_false;
2761 Label* if_true = NULL;
2762 Label* if_false = NULL;
2763 Label* fall_through = NULL;
2764 context()->PrepareTest(&materialize_true, &materialize_false,
2765 &if_true, &if_false, &fall_through);
2766
2767 // Get the frame pointer for the calling frame.
2768 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2769
2770 // Skip the arguments adaptor frame if it exists.
2771 Label check_frame_marker;
2772 __ ldr(r1, MemOperand(r2, StandardFrameConstants::kContextOffset));
2773 __ cmp(r1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2774 __ b(ne, &check_frame_marker);
2775 __ ldr(r2, MemOperand(r2, StandardFrameConstants::kCallerFPOffset));
2776
2777 // Check the marker in the calling frame.
2778 __ bind(&check_frame_marker);
2779 __ ldr(r1, MemOperand(r2, StandardFrameConstants::kMarkerOffset));
2780 __ cmp(r1, Operand(Smi::FromInt(StackFrame::CONSTRUCT)));
2781 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2782 Split(eq, if_true, if_false, fall_through);
2783
2784 context()->Plug(if_true, if_false);
2785 }
2786
2787
EmitObjectEquals(CallRuntime * expr)2788 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
2789 ZoneList<Expression*>* args = expr->arguments();
2790 ASSERT(args->length() == 2);
2791
2792 // Load the two objects into registers and perform the comparison.
2793 VisitForStackValue(args->at(0));
2794 VisitForAccumulatorValue(args->at(1));
2795
2796 Label materialize_true, materialize_false;
2797 Label* if_true = NULL;
2798 Label* if_false = NULL;
2799 Label* fall_through = NULL;
2800 context()->PrepareTest(&materialize_true, &materialize_false,
2801 &if_true, &if_false, &fall_through);
2802
2803 __ pop(r1);
2804 __ cmp(r0, r1);
2805 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2806 Split(eq, if_true, if_false, fall_through);
2807
2808 context()->Plug(if_true, if_false);
2809 }
2810
2811
EmitArguments(CallRuntime * expr)2812 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
2813 ZoneList<Expression*>* args = expr->arguments();
2814 ASSERT(args->length() == 1);
2815
2816 // ArgumentsAccessStub expects the key in edx and the formal
2817 // parameter count in r0.
2818 VisitForAccumulatorValue(args->at(0));
2819 __ mov(r1, r0);
2820 __ mov(r0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
2821 ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
2822 __ CallStub(&stub);
2823 context()->Plug(r0);
2824 }
2825
2826
EmitArgumentsLength(CallRuntime * expr)2827 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
2828 ASSERT(expr->arguments()->length() == 0);
2829 Label exit;
2830 // Get the number of formal parameters.
2831 __ mov(r0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
2832
2833 // Check if the calling frame is an arguments adaptor frame.
2834 __ ldr(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2835 __ ldr(r3, MemOperand(r2, StandardFrameConstants::kContextOffset));
2836 __ cmp(r3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2837 __ b(ne, &exit);
2838
2839 // Arguments adaptor case: Read the arguments length from the
2840 // adaptor frame.
2841 __ ldr(r0, MemOperand(r2, ArgumentsAdaptorFrameConstants::kLengthOffset));
2842
2843 __ bind(&exit);
2844 context()->Plug(r0);
2845 }
2846
2847
EmitClassOf(CallRuntime * expr)2848 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
2849 ZoneList<Expression*>* args = expr->arguments();
2850 ASSERT(args->length() == 1);
2851 Label done, null, function, non_function_constructor;
2852
2853 VisitForAccumulatorValue(args->at(0));
2854
2855 // If the object is a smi, we return null.
2856 __ JumpIfSmi(r0, &null);
2857
2858 // Check that the object is a JS object but take special care of JS
2859 // functions to make sure they have 'Function' as their class.
2860 // Assume that there are only two callable types, and one of them is at
2861 // either end of the type range for JS object types. Saves extra comparisons.
2862 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
2863 __ CompareObjectType(r0, r0, r1, FIRST_SPEC_OBJECT_TYPE);
2864 // Map is now in r0.
2865 __ b(lt, &null);
2866 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
2867 FIRST_SPEC_OBJECT_TYPE + 1);
2868 __ b(eq, &function);
2869
2870 __ cmp(r1, Operand(LAST_SPEC_OBJECT_TYPE));
2871 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
2872 LAST_SPEC_OBJECT_TYPE - 1);
2873 __ b(eq, &function);
2874 // Assume that there is no larger type.
2875 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
2876
2877 // Check if the constructor in the map is a JS function.
2878 __ ldr(r0, FieldMemOperand(r0, Map::kConstructorOffset));
2879 __ CompareObjectType(r0, r1, r1, JS_FUNCTION_TYPE);
2880 __ b(ne, &non_function_constructor);
2881
2882 // r0 now contains the constructor function. Grab the
2883 // instance class name from there.
2884 __ ldr(r0, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset));
2885 __ ldr(r0, FieldMemOperand(r0, SharedFunctionInfo::kInstanceClassNameOffset));
2886 __ b(&done);
2887
2888 // Functions have class 'Function'.
2889 __ bind(&function);
2890 __ LoadRoot(r0, Heap::kfunction_class_symbolRootIndex);
2891 __ jmp(&done);
2892
2893 // Objects with a non-function constructor have class 'Object'.
2894 __ bind(&non_function_constructor);
2895 __ LoadRoot(r0, Heap::kObject_symbolRootIndex);
2896 __ jmp(&done);
2897
2898 // Non-JS objects have class null.
2899 __ bind(&null);
2900 __ LoadRoot(r0, Heap::kNullValueRootIndex);
2901
2902 // All done.
2903 __ bind(&done);
2904
2905 context()->Plug(r0);
2906 }
2907
2908
EmitLog(CallRuntime * expr)2909 void FullCodeGenerator::EmitLog(CallRuntime* expr) {
2910 // Conditionally generate a log call.
2911 // Args:
2912 // 0 (literal string): The type of logging (corresponds to the flags).
2913 // This is used to determine whether or not to generate the log call.
2914 // 1 (string): Format string. Access the string at argument index 2
2915 // with '%2s' (see Logger::LogRuntime for all the formats).
2916 // 2 (array): Arguments to the format string.
2917 ZoneList<Expression*>* args = expr->arguments();
2918 ASSERT_EQ(args->length(), 3);
2919 if (CodeGenerator::ShouldGenerateLog(args->at(0))) {
2920 VisitForStackValue(args->at(1));
2921 VisitForStackValue(args->at(2));
2922 __ CallRuntime(Runtime::kLog, 2);
2923 }
2924
2925 // Finally, we're expected to leave a value on the top of the stack.
2926 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
2927 context()->Plug(r0);
2928 }
2929
2930
EmitRandomHeapNumber(CallRuntime * expr)2931 void FullCodeGenerator::EmitRandomHeapNumber(CallRuntime* expr) {
2932 ASSERT(expr->arguments()->length() == 0);
2933 Label slow_allocate_heapnumber;
2934 Label heapnumber_allocated;
2935
2936 __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
2937 __ AllocateHeapNumber(r4, r1, r2, r6, &slow_allocate_heapnumber);
2938 __ jmp(&heapnumber_allocated);
2939
2940 __ bind(&slow_allocate_heapnumber);
2941 // Allocate a heap number.
2942 __ CallRuntime(Runtime::kNumberAlloc, 0);
2943 __ mov(r4, Operand(r0));
2944
2945 __ bind(&heapnumber_allocated);
2946
2947 // Convert 32 random bits in r0 to 0.(32 random bits) in a double
2948 // by computing:
2949 // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
2950 if (CpuFeatures::IsSupported(VFP3)) {
2951 __ PrepareCallCFunction(1, r0);
2952 __ ldr(r0, ContextOperand(context_register(), Context::GLOBAL_INDEX));
2953 __ ldr(r0, FieldMemOperand(r0, GlobalObject::kGlobalContextOffset));
2954 __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
2955
2956 CpuFeatures::Scope scope(VFP3);
2957 // 0x41300000 is the top half of 1.0 x 2^20 as a double.
2958 // Create this constant using mov/orr to avoid PC relative load.
2959 __ mov(r1, Operand(0x41000000));
2960 __ orr(r1, r1, Operand(0x300000));
2961 // Move 0x41300000xxxxxxxx (x = random bits) to VFP.
2962 __ vmov(d7, r0, r1);
2963 // Move 0x4130000000000000 to VFP.
2964 __ mov(r0, Operand(0, RelocInfo::NONE));
2965 __ vmov(d8, r0, r1);
2966 // Subtract and store the result in the heap number.
2967 __ vsub(d7, d7, d8);
2968 __ sub(r0, r4, Operand(kHeapObjectTag));
2969 __ vstr(d7, r0, HeapNumber::kValueOffset);
2970 __ mov(r0, r4);
2971 } else {
2972 __ PrepareCallCFunction(2, r0);
2973 __ ldr(r1, ContextOperand(context_register(), Context::GLOBAL_INDEX));
2974 __ mov(r0, Operand(r4));
2975 __ ldr(r1, FieldMemOperand(r1, GlobalObject::kGlobalContextOffset));
2976 __ CallCFunction(
2977 ExternalReference::fill_heap_number_with_random_function(isolate()), 2);
2978 }
2979
2980 context()->Plug(r0);
2981 }
2982
2983
EmitSubString(CallRuntime * expr)2984 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
2985 // Load the arguments on the stack and call the stub.
2986 SubStringStub stub;
2987 ZoneList<Expression*>* args = expr->arguments();
2988 ASSERT(args->length() == 3);
2989 VisitForStackValue(args->at(0));
2990 VisitForStackValue(args->at(1));
2991 VisitForStackValue(args->at(2));
2992 __ CallStub(&stub);
2993 context()->Plug(r0);
2994 }
2995
2996
EmitRegExpExec(CallRuntime * expr)2997 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
2998 // Load the arguments on the stack and call the stub.
2999 RegExpExecStub stub;
3000 ZoneList<Expression*>* args = expr->arguments();
3001 ASSERT(args->length() == 4);
3002 VisitForStackValue(args->at(0));
3003 VisitForStackValue(args->at(1));
3004 VisitForStackValue(args->at(2));
3005 VisitForStackValue(args->at(3));
3006 __ CallStub(&stub);
3007 context()->Plug(r0);
3008 }
3009
3010
EmitValueOf(CallRuntime * expr)3011 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3012 ZoneList<Expression*>* args = expr->arguments();
3013 ASSERT(args->length() == 1);
3014 VisitForAccumulatorValue(args->at(0)); // Load the object.
3015
3016 Label done;
3017 // If the object is a smi return the object.
3018 __ JumpIfSmi(r0, &done);
3019 // If the object is not a value type, return the object.
3020 __ CompareObjectType(r0, r1, r1, JS_VALUE_TYPE);
3021 __ b(ne, &done);
3022 __ ldr(r0, FieldMemOperand(r0, JSValue::kValueOffset));
3023
3024 __ bind(&done);
3025 context()->Plug(r0);
3026 }
3027
3028
EmitDateField(CallRuntime * expr)3029 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3030 ZoneList<Expression*>* args = expr->arguments();
3031 ASSERT(args->length() == 2);
3032 ASSERT_NE(NULL, args->at(1)->AsLiteral());
3033 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->handle()));
3034
3035 VisitForAccumulatorValue(args->at(0)); // Load the object.
3036
3037 Label runtime, done;
3038 Register object = r0;
3039 Register result = r0;
3040 Register scratch0 = r9;
3041 Register scratch1 = r1;
3042
3043 #ifdef DEBUG
3044 __ AbortIfSmi(object);
3045 __ CompareObjectType(object, scratch1, scratch1, JS_DATE_TYPE);
3046 __ Assert(eq, "Trying to get date field from non-date.");
3047 #endif
3048
3049 if (index->value() == 0) {
3050 __ ldr(result, FieldMemOperand(object, JSDate::kValueOffset));
3051 } else {
3052 if (index->value() < JSDate::kFirstUncachedField) {
3053 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3054 __ mov(scratch1, Operand(stamp));
3055 __ ldr(scratch1, MemOperand(scratch1));
3056 __ ldr(scratch0, FieldMemOperand(object, JSDate::kCacheStampOffset));
3057 __ cmp(scratch1, scratch0);
3058 __ b(ne, &runtime);
3059 __ ldr(result, FieldMemOperand(object, JSDate::kValueOffset +
3060 kPointerSize * index->value()));
3061 __ jmp(&done);
3062 }
3063 __ bind(&runtime);
3064 __ PrepareCallCFunction(2, scratch1);
3065 __ mov(r1, Operand(index));
3066 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3067 __ bind(&done);
3068 }
3069 context()->Plug(r0);
3070 }
3071
3072
EmitMathPow(CallRuntime * expr)3073 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3074 // Load the arguments on the stack and call the runtime function.
3075 ZoneList<Expression*>* args = expr->arguments();
3076 ASSERT(args->length() == 2);
3077 VisitForStackValue(args->at(0));
3078 VisitForStackValue(args->at(1));
3079 if (CpuFeatures::IsSupported(VFP3)) {
3080 MathPowStub stub(MathPowStub::ON_STACK);
3081 __ CallStub(&stub);
3082 } else {
3083 __ CallRuntime(Runtime::kMath_pow, 2);
3084 }
3085 context()->Plug(r0);
3086 }
3087
3088
EmitSetValueOf(CallRuntime * expr)3089 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3090 ZoneList<Expression*>* args = expr->arguments();
3091 ASSERT(args->length() == 2);
3092 VisitForStackValue(args->at(0)); // Load the object.
3093 VisitForAccumulatorValue(args->at(1)); // Load the value.
3094 __ pop(r1); // r0 = value. r1 = object.
3095
3096 Label done;
3097 // If the object is a smi, return the value.
3098 __ JumpIfSmi(r1, &done);
3099
3100 // If the object is not a value type, return the value.
3101 __ CompareObjectType(r1, r2, r2, JS_VALUE_TYPE);
3102 __ b(ne, &done);
3103
3104 // Store the value.
3105 __ str(r0, FieldMemOperand(r1, JSValue::kValueOffset));
3106 // Update the write barrier. Save the value as it will be
3107 // overwritten by the write barrier code and is needed afterward.
3108 __ mov(r2, r0);
3109 __ RecordWriteField(
3110 r1, JSValue::kValueOffset, r2, r3, kLRHasBeenSaved, kDontSaveFPRegs);
3111
3112 __ bind(&done);
3113 context()->Plug(r0);
3114 }
3115
3116
EmitNumberToString(CallRuntime * expr)3117 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3118 ZoneList<Expression*>* args = expr->arguments();
3119 ASSERT_EQ(args->length(), 1);
3120 // Load the argument on the stack and call the stub.
3121 VisitForStackValue(args->at(0));
3122
3123 NumberToStringStub stub;
3124 __ CallStub(&stub);
3125 context()->Plug(r0);
3126 }
3127
3128
EmitStringCharFromCode(CallRuntime * expr)3129 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3130 ZoneList<Expression*>* args = expr->arguments();
3131 ASSERT(args->length() == 1);
3132 VisitForAccumulatorValue(args->at(0));
3133
3134 Label done;
3135 StringCharFromCodeGenerator generator(r0, r1);
3136 generator.GenerateFast(masm_);
3137 __ jmp(&done);
3138
3139 NopRuntimeCallHelper call_helper;
3140 generator.GenerateSlow(masm_, call_helper);
3141
3142 __ bind(&done);
3143 context()->Plug(r1);
3144 }
3145
3146
EmitStringCharCodeAt(CallRuntime * expr)3147 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3148 ZoneList<Expression*>* args = expr->arguments();
3149 ASSERT(args->length() == 2);
3150 VisitForStackValue(args->at(0));
3151 VisitForAccumulatorValue(args->at(1));
3152
3153 Register object = r1;
3154 Register index = r0;
3155 Register result = r3;
3156
3157 __ pop(object);
3158
3159 Label need_conversion;
3160 Label index_out_of_range;
3161 Label done;
3162 StringCharCodeAtGenerator generator(object,
3163 index,
3164 result,
3165 &need_conversion,
3166 &need_conversion,
3167 &index_out_of_range,
3168 STRING_INDEX_IS_NUMBER);
3169 generator.GenerateFast(masm_);
3170 __ jmp(&done);
3171
3172 __ bind(&index_out_of_range);
3173 // When the index is out of range, the spec requires us to return
3174 // NaN.
3175 __ LoadRoot(result, Heap::kNanValueRootIndex);
3176 __ jmp(&done);
3177
3178 __ bind(&need_conversion);
3179 // Load the undefined value into the result register, which will
3180 // trigger conversion.
3181 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
3182 __ jmp(&done);
3183
3184 NopRuntimeCallHelper call_helper;
3185 generator.GenerateSlow(masm_, call_helper);
3186
3187 __ bind(&done);
3188 context()->Plug(result);
3189 }
3190
3191
EmitStringCharAt(CallRuntime * expr)3192 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3193 ZoneList<Expression*>* args = expr->arguments();
3194 ASSERT(args->length() == 2);
3195 VisitForStackValue(args->at(0));
3196 VisitForAccumulatorValue(args->at(1));
3197
3198 Register object = r1;
3199 Register index = r0;
3200 Register scratch = r3;
3201 Register result = r0;
3202
3203 __ pop(object);
3204
3205 Label need_conversion;
3206 Label index_out_of_range;
3207 Label done;
3208 StringCharAtGenerator generator(object,
3209 index,
3210 scratch,
3211 result,
3212 &need_conversion,
3213 &need_conversion,
3214 &index_out_of_range,
3215 STRING_INDEX_IS_NUMBER);
3216 generator.GenerateFast(masm_);
3217 __ jmp(&done);
3218
3219 __ bind(&index_out_of_range);
3220 // When the index is out of range, the spec requires us to return
3221 // the empty string.
3222 __ LoadRoot(result, Heap::kEmptyStringRootIndex);
3223 __ jmp(&done);
3224
3225 __ bind(&need_conversion);
3226 // Move smi zero into the result register, which will trigger
3227 // conversion.
3228 __ mov(result, Operand(Smi::FromInt(0)));
3229 __ jmp(&done);
3230
3231 NopRuntimeCallHelper call_helper;
3232 generator.GenerateSlow(masm_, call_helper);
3233
3234 __ bind(&done);
3235 context()->Plug(result);
3236 }
3237
3238
EmitStringAdd(CallRuntime * expr)3239 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3240 ZoneList<Expression*>* args = expr->arguments();
3241 ASSERT_EQ(2, args->length());
3242 VisitForStackValue(args->at(0));
3243 VisitForStackValue(args->at(1));
3244
3245 StringAddStub stub(NO_STRING_ADD_FLAGS);
3246 __ CallStub(&stub);
3247 context()->Plug(r0);
3248 }
3249
3250
EmitStringCompare(CallRuntime * expr)3251 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3252 ZoneList<Expression*>* args = expr->arguments();
3253 ASSERT_EQ(2, args->length());
3254 VisitForStackValue(args->at(0));
3255 VisitForStackValue(args->at(1));
3256
3257 StringCompareStub stub;
3258 __ CallStub(&stub);
3259 context()->Plug(r0);
3260 }
3261
3262
EmitMathSin(CallRuntime * expr)3263 void FullCodeGenerator::EmitMathSin(CallRuntime* expr) {
3264 // Load the argument on the stack and call the stub.
3265 TranscendentalCacheStub stub(TranscendentalCache::SIN,
3266 TranscendentalCacheStub::TAGGED);
3267 ZoneList<Expression*>* args = expr->arguments();
3268 ASSERT(args->length() == 1);
3269 VisitForStackValue(args->at(0));
3270 __ CallStub(&stub);
3271 context()->Plug(r0);
3272 }
3273
3274
EmitMathCos(CallRuntime * expr)3275 void FullCodeGenerator::EmitMathCos(CallRuntime* expr) {
3276 // Load the argument on the stack and call the stub.
3277 TranscendentalCacheStub stub(TranscendentalCache::COS,
3278 TranscendentalCacheStub::TAGGED);
3279 ZoneList<Expression*>* args = expr->arguments();
3280 ASSERT(args->length() == 1);
3281 VisitForStackValue(args->at(0));
3282 __ CallStub(&stub);
3283 context()->Plug(r0);
3284 }
3285
3286
EmitMathTan(CallRuntime * expr)3287 void FullCodeGenerator::EmitMathTan(CallRuntime* expr) {
3288 // Load the argument on the stack and call the stub.
3289 TranscendentalCacheStub stub(TranscendentalCache::TAN,
3290 TranscendentalCacheStub::TAGGED);
3291 ZoneList<Expression*>* args = expr->arguments();
3292 ASSERT(args->length() == 1);
3293 VisitForStackValue(args->at(0));
3294 __ CallStub(&stub);
3295 context()->Plug(r0);
3296 }
3297
3298
EmitMathLog(CallRuntime * expr)3299 void FullCodeGenerator::EmitMathLog(CallRuntime* expr) {
3300 // Load the argument on the stack and call the stub.
3301 TranscendentalCacheStub stub(TranscendentalCache::LOG,
3302 TranscendentalCacheStub::TAGGED);
3303 ZoneList<Expression*>* args = expr->arguments();
3304 ASSERT(args->length() == 1);
3305 VisitForStackValue(args->at(0));
3306 __ CallStub(&stub);
3307 context()->Plug(r0);
3308 }
3309
3310
EmitMathSqrt(CallRuntime * expr)3311 void FullCodeGenerator::EmitMathSqrt(CallRuntime* expr) {
3312 // Load the argument on the stack and call the runtime function.
3313 ZoneList<Expression*>* args = expr->arguments();
3314 ASSERT(args->length() == 1);
3315 VisitForStackValue(args->at(0));
3316 __ CallRuntime(Runtime::kMath_sqrt, 1);
3317 context()->Plug(r0);
3318 }
3319
3320
EmitCallFunction(CallRuntime * expr)3321 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3322 ZoneList<Expression*>* args = expr->arguments();
3323 ASSERT(args->length() >= 2);
3324
3325 int arg_count = args->length() - 2; // 2 ~ receiver and function.
3326 for (int i = 0; i < arg_count + 1; i++) {
3327 VisitForStackValue(args->at(i));
3328 }
3329 VisitForAccumulatorValue(args->last()); // Function.
3330
3331 // Check for proxy.
3332 Label proxy, done;
3333 __ CompareObjectType(r0, r1, r1, JS_FUNCTION_PROXY_TYPE);
3334 __ b(eq, &proxy);
3335
3336 // InvokeFunction requires the function in r1. Move it in there.
3337 __ mov(r1, result_register());
3338 ParameterCount count(arg_count);
3339 __ InvokeFunction(r1, count, CALL_FUNCTION,
3340 NullCallWrapper(), CALL_AS_METHOD);
3341 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3342 __ jmp(&done);
3343
3344 __ bind(&proxy);
3345 __ push(r0);
3346 __ CallRuntime(Runtime::kCall, args->length());
3347 __ bind(&done);
3348
3349 context()->Plug(r0);
3350 }
3351
3352
EmitRegExpConstructResult(CallRuntime * expr)3353 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3354 RegExpConstructResultStub stub;
3355 ZoneList<Expression*>* args = expr->arguments();
3356 ASSERT(args->length() == 3);
3357 VisitForStackValue(args->at(0));
3358 VisitForStackValue(args->at(1));
3359 VisitForStackValue(args->at(2));
3360 __ CallStub(&stub);
3361 context()->Plug(r0);
3362 }
3363
3364
EmitGetFromCache(CallRuntime * expr)3365 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3366 ZoneList<Expression*>* args = expr->arguments();
3367 ASSERT_EQ(2, args->length());
3368 ASSERT_NE(NULL, args->at(0)->AsLiteral());
3369 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value();
3370
3371 Handle<FixedArray> jsfunction_result_caches(
3372 isolate()->global_context()->jsfunction_result_caches());
3373 if (jsfunction_result_caches->length() <= cache_id) {
3374 __ Abort("Attempt to use undefined cache.");
3375 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
3376 context()->Plug(r0);
3377 return;
3378 }
3379
3380 VisitForAccumulatorValue(args->at(1));
3381
3382 Register key = r0;
3383 Register cache = r1;
3384 __ ldr(cache, ContextOperand(cp, Context::GLOBAL_INDEX));
3385 __ ldr(cache, FieldMemOperand(cache, GlobalObject::kGlobalContextOffset));
3386 __ ldr(cache, ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
3387 __ ldr(cache,
3388 FieldMemOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
3389
3390
3391 Label done, not_found;
3392 // tmp now holds finger offset as a smi.
3393 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
3394 __ ldr(r2, FieldMemOperand(cache, JSFunctionResultCache::kFingerOffset));
3395 // r2 now holds finger offset as a smi.
3396 __ add(r3, cache, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3397 // r3 now points to the start of fixed array elements.
3398 __ ldr(r2, MemOperand(r3, r2, LSL, kPointerSizeLog2 - kSmiTagSize, PreIndex));
3399 // Note side effect of PreIndex: r3 now points to the key of the pair.
3400 __ cmp(key, r2);
3401 __ b(ne, ¬_found);
3402
3403 __ ldr(r0, MemOperand(r3, kPointerSize));
3404 __ b(&done);
3405
3406 __ bind(¬_found);
3407 // Call runtime to perform the lookup.
3408 __ Push(cache, key);
3409 __ CallRuntime(Runtime::kGetFromCache, 2);
3410
3411 __ bind(&done);
3412 context()->Plug(r0);
3413 }
3414
3415
EmitIsRegExpEquivalent(CallRuntime * expr)3416 void FullCodeGenerator::EmitIsRegExpEquivalent(CallRuntime* expr) {
3417 ZoneList<Expression*>* args = expr->arguments();
3418 ASSERT_EQ(2, args->length());
3419
3420 Register right = r0;
3421 Register left = r1;
3422 Register tmp = r2;
3423 Register tmp2 = r3;
3424
3425 VisitForStackValue(args->at(0));
3426 VisitForAccumulatorValue(args->at(1));
3427 __ pop(left);
3428
3429 Label done, fail, ok;
3430 __ cmp(left, Operand(right));
3431 __ b(eq, &ok);
3432 // Fail if either is a non-HeapObject.
3433 __ and_(tmp, left, Operand(right));
3434 __ JumpIfSmi(tmp, &fail);
3435 __ ldr(tmp, FieldMemOperand(left, HeapObject::kMapOffset));
3436 __ ldrb(tmp2, FieldMemOperand(tmp, Map::kInstanceTypeOffset));
3437 __ cmp(tmp2, Operand(JS_REGEXP_TYPE));
3438 __ b(ne, &fail);
3439 __ ldr(tmp2, FieldMemOperand(right, HeapObject::kMapOffset));
3440 __ cmp(tmp, Operand(tmp2));
3441 __ b(ne, &fail);
3442 __ ldr(tmp, FieldMemOperand(left, JSRegExp::kDataOffset));
3443 __ ldr(tmp2, FieldMemOperand(right, JSRegExp::kDataOffset));
3444 __ cmp(tmp, tmp2);
3445 __ b(eq, &ok);
3446 __ bind(&fail);
3447 __ LoadRoot(r0, Heap::kFalseValueRootIndex);
3448 __ jmp(&done);
3449 __ bind(&ok);
3450 __ LoadRoot(r0, Heap::kTrueValueRootIndex);
3451 __ bind(&done);
3452
3453 context()->Plug(r0);
3454 }
3455
3456
EmitHasCachedArrayIndex(CallRuntime * expr)3457 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3458 ZoneList<Expression*>* args = expr->arguments();
3459 VisitForAccumulatorValue(args->at(0));
3460
3461 Label materialize_true, materialize_false;
3462 Label* if_true = NULL;
3463 Label* if_false = NULL;
3464 Label* fall_through = NULL;
3465 context()->PrepareTest(&materialize_true, &materialize_false,
3466 &if_true, &if_false, &fall_through);
3467
3468 __ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset));
3469 __ tst(r0, Operand(String::kContainsCachedArrayIndexMask));
3470 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3471 Split(eq, if_true, if_false, fall_through);
3472
3473 context()->Plug(if_true, if_false);
3474 }
3475
3476
EmitGetCachedArrayIndex(CallRuntime * expr)3477 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3478 ZoneList<Expression*>* args = expr->arguments();
3479 ASSERT(args->length() == 1);
3480 VisitForAccumulatorValue(args->at(0));
3481
3482 if (FLAG_debug_code) {
3483 __ AbortIfNotString(r0);
3484 }
3485
3486 __ ldr(r0, FieldMemOperand(r0, String::kHashFieldOffset));
3487 __ IndexFromHash(r0, r0);
3488
3489 context()->Plug(r0);
3490 }
3491
3492
EmitFastAsciiArrayJoin(CallRuntime * expr)3493 void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
3494 Label bailout, done, one_char_separator, long_separator,
3495 non_trivial_array, not_size_one_array, loop,
3496 empty_separator_loop, one_char_separator_loop,
3497 one_char_separator_loop_entry, long_separator_loop;
3498 ZoneList<Expression*>* args = expr->arguments();
3499 ASSERT(args->length() == 2);
3500 VisitForStackValue(args->at(1));
3501 VisitForAccumulatorValue(args->at(0));
3502
3503 // All aliases of the same register have disjoint lifetimes.
3504 Register array = r0;
3505 Register elements = no_reg; // Will be r0.
3506 Register result = no_reg; // Will be r0.
3507 Register separator = r1;
3508 Register array_length = r2;
3509 Register result_pos = no_reg; // Will be r2
3510 Register string_length = r3;
3511 Register string = r4;
3512 Register element = r5;
3513 Register elements_end = r6;
3514 Register scratch1 = r7;
3515 Register scratch2 = r9;
3516
3517 // Separator operand is on the stack.
3518 __ pop(separator);
3519
3520 // Check that the array is a JSArray.
3521 __ JumpIfSmi(array, &bailout);
3522 __ CompareObjectType(array, scratch1, scratch2, JS_ARRAY_TYPE);
3523 __ b(ne, &bailout);
3524
3525 // Check that the array has fast elements.
3526 __ CheckFastElements(scratch1, scratch2, &bailout);
3527
3528 // If the array has length zero, return the empty string.
3529 __ ldr(array_length, FieldMemOperand(array, JSArray::kLengthOffset));
3530 __ SmiUntag(array_length, SetCC);
3531 __ b(ne, &non_trivial_array);
3532 __ LoadRoot(r0, Heap::kEmptyStringRootIndex);
3533 __ b(&done);
3534
3535 __ bind(&non_trivial_array);
3536
3537 // Get the FixedArray containing array's elements.
3538 elements = array;
3539 __ ldr(elements, FieldMemOperand(array, JSArray::kElementsOffset));
3540 array = no_reg; // End of array's live range.
3541
3542 // Check that all array elements are sequential ASCII strings, and
3543 // accumulate the sum of their lengths, as a smi-encoded value.
3544 __ mov(string_length, Operand(0));
3545 __ add(element,
3546 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3547 __ add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
3548 // Loop condition: while (element < elements_end).
3549 // Live values in registers:
3550 // elements: Fixed array of strings.
3551 // array_length: Length of the fixed array of strings (not smi)
3552 // separator: Separator string
3553 // string_length: Accumulated sum of string lengths (smi).
3554 // element: Current array element.
3555 // elements_end: Array end.
3556 if (FLAG_debug_code) {
3557 __ cmp(array_length, Operand(0));
3558 __ Assert(gt, "No empty arrays here in EmitFastAsciiArrayJoin");
3559 }
3560 __ bind(&loop);
3561 __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
3562 __ JumpIfSmi(string, &bailout);
3563 __ ldr(scratch1, FieldMemOperand(string, HeapObject::kMapOffset));
3564 __ ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
3565 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout);
3566 __ ldr(scratch1, FieldMemOperand(string, SeqAsciiString::kLengthOffset));
3567 __ add(string_length, string_length, Operand(scratch1));
3568 __ b(vs, &bailout);
3569 __ cmp(element, elements_end);
3570 __ b(lt, &loop);
3571
3572 // If array_length is 1, return elements[0], a string.
3573 __ cmp(array_length, Operand(1));
3574 __ b(ne, ¬_size_one_array);
3575 __ ldr(r0, FieldMemOperand(elements, FixedArray::kHeaderSize));
3576 __ b(&done);
3577
3578 __ bind(¬_size_one_array);
3579
3580 // Live values in registers:
3581 // separator: Separator string
3582 // array_length: Length of the array.
3583 // string_length: Sum of string lengths (smi).
3584 // elements: FixedArray of strings.
3585
3586 // Check that the separator is a flat ASCII string.
3587 __ JumpIfSmi(separator, &bailout);
3588 __ ldr(scratch1, FieldMemOperand(separator, HeapObject::kMapOffset));
3589 __ ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
3590 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout);
3591
3592 // Add (separator length times array_length) - separator length to the
3593 // string_length to get the length of the result string. array_length is not
3594 // smi but the other values are, so the result is a smi
3595 __ ldr(scratch1, FieldMemOperand(separator, SeqAsciiString::kLengthOffset));
3596 __ sub(string_length, string_length, Operand(scratch1));
3597 __ smull(scratch2, ip, array_length, scratch1);
3598 // Check for smi overflow. No overflow if higher 33 bits of 64-bit result are
3599 // zero.
3600 __ cmp(ip, Operand(0));
3601 __ b(ne, &bailout);
3602 __ tst(scratch2, Operand(0x80000000));
3603 __ b(ne, &bailout);
3604 __ add(string_length, string_length, Operand(scratch2));
3605 __ b(vs, &bailout);
3606 __ SmiUntag(string_length);
3607
3608 // Get first element in the array to free up the elements register to be used
3609 // for the result.
3610 __ add(element,
3611 elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
3612 result = elements; // End of live range for elements.
3613 elements = no_reg;
3614 // Live values in registers:
3615 // element: First array element
3616 // separator: Separator string
3617 // string_length: Length of result string (not smi)
3618 // array_length: Length of the array.
3619 __ AllocateAsciiString(result,
3620 string_length,
3621 scratch1,
3622 scratch2,
3623 elements_end,
3624 &bailout);
3625 // Prepare for looping. Set up elements_end to end of the array. Set
3626 // result_pos to the position of the result where to write the first
3627 // character.
3628 __ add(elements_end, element, Operand(array_length, LSL, kPointerSizeLog2));
3629 result_pos = array_length; // End of live range for array_length.
3630 array_length = no_reg;
3631 __ add(result_pos,
3632 result,
3633 Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
3634
3635 // Check the length of the separator.
3636 __ ldr(scratch1, FieldMemOperand(separator, SeqAsciiString::kLengthOffset));
3637 __ cmp(scratch1, Operand(Smi::FromInt(1)));
3638 __ b(eq, &one_char_separator);
3639 __ b(gt, &long_separator);
3640
3641 // Empty separator case
3642 __ bind(&empty_separator_loop);
3643 // Live values in registers:
3644 // result_pos: the position to which we are currently copying characters.
3645 // element: Current array element.
3646 // elements_end: Array end.
3647
3648 // Copy next array element to the result.
3649 __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
3650 __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
3651 __ SmiUntag(string_length);
3652 __ add(string, string, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
3653 __ CopyBytes(string, result_pos, string_length, scratch1);
3654 __ cmp(element, elements_end);
3655 __ b(lt, &empty_separator_loop); // End while (element < elements_end).
3656 ASSERT(result.is(r0));
3657 __ b(&done);
3658
3659 // One-character separator case
3660 __ bind(&one_char_separator);
3661 // Replace separator with its ASCII character value.
3662 __ ldrb(separator, FieldMemOperand(separator, SeqAsciiString::kHeaderSize));
3663 // Jump into the loop after the code that copies the separator, so the first
3664 // element is not preceded by a separator
3665 __ jmp(&one_char_separator_loop_entry);
3666
3667 __ bind(&one_char_separator_loop);
3668 // Live values in registers:
3669 // result_pos: the position to which we are currently copying characters.
3670 // element: Current array element.
3671 // elements_end: Array end.
3672 // separator: Single separator ASCII char (in lower byte).
3673
3674 // Copy the separator character to the result.
3675 __ strb(separator, MemOperand(result_pos, 1, PostIndex));
3676
3677 // Copy next array element to the result.
3678 __ bind(&one_char_separator_loop_entry);
3679 __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
3680 __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
3681 __ SmiUntag(string_length);
3682 __ add(string, string, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
3683 __ CopyBytes(string, result_pos, string_length, scratch1);
3684 __ cmp(element, elements_end);
3685 __ b(lt, &one_char_separator_loop); // End while (element < elements_end).
3686 ASSERT(result.is(r0));
3687 __ b(&done);
3688
3689 // Long separator case (separator is more than one character). Entry is at the
3690 // label long_separator below.
3691 __ bind(&long_separator_loop);
3692 // Live values in registers:
3693 // result_pos: the position to which we are currently copying characters.
3694 // element: Current array element.
3695 // elements_end: Array end.
3696 // separator: Separator string.
3697
3698 // Copy the separator to the result.
3699 __ ldr(string_length, FieldMemOperand(separator, String::kLengthOffset));
3700 __ SmiUntag(string_length);
3701 __ add(string,
3702 separator,
3703 Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
3704 __ CopyBytes(string, result_pos, string_length, scratch1);
3705
3706 __ bind(&long_separator);
3707 __ ldr(string, MemOperand(element, kPointerSize, PostIndex));
3708 __ ldr(string_length, FieldMemOperand(string, String::kLengthOffset));
3709 __ SmiUntag(string_length);
3710 __ add(string, string, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag));
3711 __ CopyBytes(string, result_pos, string_length, scratch1);
3712 __ cmp(element, elements_end);
3713 __ b(lt, &long_separator_loop); // End while (element < elements_end).
3714 ASSERT(result.is(r0));
3715 __ b(&done);
3716
3717 __ bind(&bailout);
3718 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
3719 __ bind(&done);
3720 context()->Plug(r0);
3721 }
3722
3723
VisitCallRuntime(CallRuntime * expr)3724 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
3725 Handle<String> name = expr->name();
3726 if (name->length() > 0 && name->Get(0) == '_') {
3727 Comment cmnt(masm_, "[ InlineRuntimeCall");
3728 EmitInlineRuntimeCall(expr);
3729 return;
3730 }
3731
3732 Comment cmnt(masm_, "[ CallRuntime");
3733 ZoneList<Expression*>* args = expr->arguments();
3734
3735 if (expr->is_jsruntime()) {
3736 // Prepare for calling JS runtime function.
3737 __ ldr(r0, GlobalObjectOperand());
3738 __ ldr(r0, FieldMemOperand(r0, GlobalObject::kBuiltinsOffset));
3739 __ push(r0);
3740 }
3741
3742 // Push the arguments ("left-to-right").
3743 int arg_count = args->length();
3744 for (int i = 0; i < arg_count; i++) {
3745 VisitForStackValue(args->at(i));
3746 }
3747
3748 if (expr->is_jsruntime()) {
3749 // Call the JS runtime function.
3750 __ mov(r2, Operand(expr->name()));
3751 RelocInfo::Mode mode = RelocInfo::CODE_TARGET;
3752 Handle<Code> ic =
3753 isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
3754 CallIC(ic, mode, expr->id());
3755 // Restore context register.
3756 __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
3757 } else {
3758 // Call the C runtime function.
3759 __ CallRuntime(expr->function(), arg_count);
3760 }
3761 context()->Plug(r0);
3762 }
3763
3764
VisitUnaryOperation(UnaryOperation * expr)3765 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3766 switch (expr->op()) {
3767 case Token::DELETE: {
3768 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
3769 Property* property = expr->expression()->AsProperty();
3770 VariableProxy* proxy = expr->expression()->AsVariableProxy();
3771
3772 if (property != NULL) {
3773 VisitForStackValue(property->obj());
3774 VisitForStackValue(property->key());
3775 StrictModeFlag strict_mode_flag = (language_mode() == CLASSIC_MODE)
3776 ? kNonStrictMode : kStrictMode;
3777 __ mov(r1, Operand(Smi::FromInt(strict_mode_flag)));
3778 __ push(r1);
3779 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3780 context()->Plug(r0);
3781 } else if (proxy != NULL) {
3782 Variable* var = proxy->var();
3783 // Delete of an unqualified identifier is disallowed in strict mode
3784 // but "delete this" is allowed.
3785 ASSERT(language_mode() == CLASSIC_MODE || var->is_this());
3786 if (var->IsUnallocated()) {
3787 __ ldr(r2, GlobalObjectOperand());
3788 __ mov(r1, Operand(var->name()));
3789 __ mov(r0, Operand(Smi::FromInt(kNonStrictMode)));
3790 __ Push(r2, r1, r0);
3791 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3792 context()->Plug(r0);
3793 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
3794 // Result of deleting non-global, non-dynamic variables is false.
3795 // The subexpression does not have side effects.
3796 context()->Plug(var->is_this());
3797 } else {
3798 // Non-global variable. Call the runtime to try to delete from the
3799 // context where the variable was introduced.
3800 __ push(context_register());
3801 __ mov(r2, Operand(var->name()));
3802 __ push(r2);
3803 __ CallRuntime(Runtime::kDeleteContextSlot, 2);
3804 context()->Plug(r0);
3805 }
3806 } else {
3807 // Result of deleting non-property, non-variable reference is true.
3808 // The subexpression may have side effects.
3809 VisitForEffect(expr->expression());
3810 context()->Plug(true);
3811 }
3812 break;
3813 }
3814
3815 case Token::VOID: {
3816 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
3817 VisitForEffect(expr->expression());
3818 context()->Plug(Heap::kUndefinedValueRootIndex);
3819 break;
3820 }
3821
3822 case Token::NOT: {
3823 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
3824 if (context()->IsEffect()) {
3825 // Unary NOT has no side effects so it's only necessary to visit the
3826 // subexpression. Match the optimizing compiler by not branching.
3827 VisitForEffect(expr->expression());
3828 } else if (context()->IsTest()) {
3829 const TestContext* test = TestContext::cast(context());
3830 // The labels are swapped for the recursive call.
3831 VisitForControl(expr->expression(),
3832 test->false_label(),
3833 test->true_label(),
3834 test->fall_through());
3835 context()->Plug(test->true_label(), test->false_label());
3836 } else {
3837 // We handle value contexts explicitly rather than simply visiting
3838 // for control and plugging the control flow into the context,
3839 // because we need to prepare a pair of extra administrative AST ids
3840 // for the optimizing compiler.
3841 ASSERT(context()->IsAccumulatorValue() || context()->IsStackValue());
3842 Label materialize_true, materialize_false, done;
3843 VisitForControl(expr->expression(),
3844 &materialize_false,
3845 &materialize_true,
3846 &materialize_true);
3847 __ bind(&materialize_true);
3848 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
3849 __ LoadRoot(r0, Heap::kTrueValueRootIndex);
3850 if (context()->IsStackValue()) __ push(r0);
3851 __ jmp(&done);
3852 __ bind(&materialize_false);
3853 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
3854 __ LoadRoot(r0, Heap::kFalseValueRootIndex);
3855 if (context()->IsStackValue()) __ push(r0);
3856 __ bind(&done);
3857 }
3858 break;
3859 }
3860
3861 case Token::TYPEOF: {
3862 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
3863 { StackValueContext context(this);
3864 VisitForTypeofValue(expr->expression());
3865 }
3866 __ CallRuntime(Runtime::kTypeof, 1);
3867 context()->Plug(r0);
3868 break;
3869 }
3870
3871 case Token::ADD: {
3872 Comment cmt(masm_, "[ UnaryOperation (ADD)");
3873 VisitForAccumulatorValue(expr->expression());
3874 Label no_conversion;
3875 __ JumpIfSmi(result_register(), &no_conversion);
3876 ToNumberStub convert_stub;
3877 __ CallStub(&convert_stub);
3878 __ bind(&no_conversion);
3879 context()->Plug(result_register());
3880 break;
3881 }
3882
3883 case Token::SUB:
3884 EmitUnaryOperation(expr, "[ UnaryOperation (SUB)");
3885 break;
3886
3887 case Token::BIT_NOT:
3888 EmitUnaryOperation(expr, "[ UnaryOperation (BIT_NOT)");
3889 break;
3890
3891 default:
3892 UNREACHABLE();
3893 }
3894 }
3895
3896
EmitUnaryOperation(UnaryOperation * expr,const char * comment)3897 void FullCodeGenerator::EmitUnaryOperation(UnaryOperation* expr,
3898 const char* comment) {
3899 // TODO(svenpanne): Allowing format strings in Comment would be nice here...
3900 Comment cmt(masm_, comment);
3901 bool can_overwrite = expr->expression()->ResultOverwriteAllowed();
3902 UnaryOverwriteMode overwrite =
3903 can_overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE;
3904 UnaryOpStub stub(expr->op(), overwrite);
3905 // UnaryOpStub expects the argument to be in the
3906 // accumulator register r0.
3907 VisitForAccumulatorValue(expr->expression());
3908 SetSourcePosition(expr->position());
3909 CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
3910 context()->Plug(r0);
3911 }
3912
3913
VisitCountOperation(CountOperation * expr)3914 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3915 Comment cmnt(masm_, "[ CountOperation");
3916 SetSourcePosition(expr->position());
3917
3918 // Invalid left-hand sides are rewritten to have a 'throw ReferenceError'
3919 // as the left-hand side.
3920 if (!expr->expression()->IsValidLeftHandSide()) {
3921 VisitForEffect(expr->expression());
3922 return;
3923 }
3924
3925 // Expression can only be a property, a global or a (parameter or local)
3926 // slot.
3927 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
3928 LhsKind assign_type = VARIABLE;
3929 Property* prop = expr->expression()->AsProperty();
3930 // In case of a property we use the uninitialized expression context
3931 // of the key to detect a named property.
3932 if (prop != NULL) {
3933 assign_type =
3934 (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
3935 }
3936
3937 // Evaluate expression and get value.
3938 if (assign_type == VARIABLE) {
3939 ASSERT(expr->expression()->AsVariableProxy()->var() != NULL);
3940 AccumulatorValueContext context(this);
3941 EmitVariableLoad(expr->expression()->AsVariableProxy());
3942 } else {
3943 // Reserve space for result of postfix operation.
3944 if (expr->is_postfix() && !context()->IsEffect()) {
3945 __ mov(ip, Operand(Smi::FromInt(0)));
3946 __ push(ip);
3947 }
3948 if (assign_type == NAMED_PROPERTY) {
3949 // Put the object both on the stack and in the accumulator.
3950 VisitForAccumulatorValue(prop->obj());
3951 __ push(r0);
3952 EmitNamedPropertyLoad(prop);
3953 } else {
3954 VisitForStackValue(prop->obj());
3955 VisitForAccumulatorValue(prop->key());
3956 __ ldr(r1, MemOperand(sp, 0));
3957 __ push(r0);
3958 EmitKeyedPropertyLoad(prop);
3959 }
3960 }
3961
3962 // We need a second deoptimization point after loading the value
3963 // in case evaluating the property load my have a side effect.
3964 if (assign_type == VARIABLE) {
3965 PrepareForBailout(expr->expression(), TOS_REG);
3966 } else {
3967 PrepareForBailoutForId(expr->CountId(), TOS_REG);
3968 }
3969
3970 // Call ToNumber only if operand is not a smi.
3971 Label no_conversion;
3972 __ JumpIfSmi(r0, &no_conversion);
3973 ToNumberStub convert_stub;
3974 __ CallStub(&convert_stub);
3975 __ bind(&no_conversion);
3976
3977 // Save result for postfix expressions.
3978 if (expr->is_postfix()) {
3979 if (!context()->IsEffect()) {
3980 // Save the result on the stack. If we have a named or keyed property
3981 // we store the result under the receiver that is currently on top
3982 // of the stack.
3983 switch (assign_type) {
3984 case VARIABLE:
3985 __ push(r0);
3986 break;
3987 case NAMED_PROPERTY:
3988 __ str(r0, MemOperand(sp, kPointerSize));
3989 break;
3990 case KEYED_PROPERTY:
3991 __ str(r0, MemOperand(sp, 2 * kPointerSize));
3992 break;
3993 }
3994 }
3995 }
3996
3997
3998 // Inline smi case if we are in a loop.
3999 Label stub_call, done;
4000 JumpPatchSite patch_site(masm_);
4001
4002 int count_value = expr->op() == Token::INC ? 1 : -1;
4003 if (ShouldInlineSmiCase(expr->op())) {
4004 __ add(r0, r0, Operand(Smi::FromInt(count_value)), SetCC);
4005 __ b(vs, &stub_call);
4006 // We could eliminate this smi check if we split the code at
4007 // the first smi check before calling ToNumber.
4008 patch_site.EmitJumpIfSmi(r0, &done);
4009
4010 __ bind(&stub_call);
4011 // Call stub. Undo operation first.
4012 __ sub(r0, r0, Operand(Smi::FromInt(count_value)));
4013 }
4014 __ mov(r1, Operand(Smi::FromInt(count_value)));
4015
4016 // Record position before stub call.
4017 SetSourcePosition(expr->position());
4018
4019 BinaryOpStub stub(Token::ADD, NO_OVERWRITE);
4020 CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->CountId());
4021 patch_site.EmitPatchInfo();
4022 __ bind(&done);
4023
4024 // Store the value returned in r0.
4025 switch (assign_type) {
4026 case VARIABLE:
4027 if (expr->is_postfix()) {
4028 { EffectContext context(this);
4029 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4030 Token::ASSIGN);
4031 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4032 context.Plug(r0);
4033 }
4034 // For all contexts except EffectConstant We have the result on
4035 // top of the stack.
4036 if (!context()->IsEffect()) {
4037 context()->PlugTOS();
4038 }
4039 } else {
4040 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4041 Token::ASSIGN);
4042 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4043 context()->Plug(r0);
4044 }
4045 break;
4046 case NAMED_PROPERTY: {
4047 __ mov(r2, Operand(prop->key()->AsLiteral()->handle()));
4048 __ pop(r1);
4049 Handle<Code> ic = is_classic_mode()
4050 ? isolate()->builtins()->StoreIC_Initialize()
4051 : isolate()->builtins()->StoreIC_Initialize_Strict();
4052 CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
4053 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4054 if (expr->is_postfix()) {
4055 if (!context()->IsEffect()) {
4056 context()->PlugTOS();
4057 }
4058 } else {
4059 context()->Plug(r0);
4060 }
4061 break;
4062 }
4063 case KEYED_PROPERTY: {
4064 __ pop(r1); // Key.
4065 __ pop(r2); // Receiver.
4066 Handle<Code> ic = is_classic_mode()
4067 ? isolate()->builtins()->KeyedStoreIC_Initialize()
4068 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
4069 CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
4070 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4071 if (expr->is_postfix()) {
4072 if (!context()->IsEffect()) {
4073 context()->PlugTOS();
4074 }
4075 } else {
4076 context()->Plug(r0);
4077 }
4078 break;
4079 }
4080 }
4081 }
4082
4083
VisitForTypeofValue(Expression * expr)4084 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4085 ASSERT(!context()->IsEffect());
4086 ASSERT(!context()->IsTest());
4087 VariableProxy* proxy = expr->AsVariableProxy();
4088 if (proxy != NULL && proxy->var()->IsUnallocated()) {
4089 Comment cmnt(masm_, "Global variable");
4090 __ ldr(r0, GlobalObjectOperand());
4091 __ mov(r2, Operand(proxy->name()));
4092 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
4093 // Use a regular load, not a contextual load, to avoid a reference
4094 // error.
4095 CallIC(ic);
4096 PrepareForBailout(expr, TOS_REG);
4097 context()->Plug(r0);
4098 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
4099 Label done, slow;
4100
4101 // Generate code for loading from variables potentially shadowed
4102 // by eval-introduced variables.
4103 EmitDynamicLookupFastCase(proxy->var(), INSIDE_TYPEOF, &slow, &done);
4104
4105 __ bind(&slow);
4106 __ mov(r0, Operand(proxy->name()));
4107 __ Push(cp, r0);
4108 __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
4109 PrepareForBailout(expr, TOS_REG);
4110 __ bind(&done);
4111
4112 context()->Plug(r0);
4113 } else {
4114 // This expression cannot throw a reference error at the top level.
4115 VisitInDuplicateContext(expr);
4116 }
4117 }
4118
4119
EmitLiteralCompareTypeof(Expression * expr,Expression * sub_expr,Handle<String> check)4120 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4121 Expression* sub_expr,
4122 Handle<String> check) {
4123 Label materialize_true, materialize_false;
4124 Label* if_true = NULL;
4125 Label* if_false = NULL;
4126 Label* fall_through = NULL;
4127 context()->PrepareTest(&materialize_true, &materialize_false,
4128 &if_true, &if_false, &fall_through);
4129
4130 { AccumulatorValueContext context(this);
4131 VisitForTypeofValue(sub_expr);
4132 }
4133 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4134
4135 if (check->Equals(isolate()->heap()->number_symbol())) {
4136 __ JumpIfSmi(r0, if_true);
4137 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
4138 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
4139 __ cmp(r0, ip);
4140 Split(eq, if_true, if_false, fall_through);
4141 } else if (check->Equals(isolate()->heap()->string_symbol())) {
4142 __ JumpIfSmi(r0, if_false);
4143 // Check for undetectable objects => false.
4144 __ CompareObjectType(r0, r0, r1, FIRST_NONSTRING_TYPE);
4145 __ b(ge, if_false);
4146 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
4147 __ tst(r1, Operand(1 << Map::kIsUndetectable));
4148 Split(eq, if_true, if_false, fall_through);
4149 } else if (check->Equals(isolate()->heap()->boolean_symbol())) {
4150 __ CompareRoot(r0, Heap::kTrueValueRootIndex);
4151 __ b(eq, if_true);
4152 __ CompareRoot(r0, Heap::kFalseValueRootIndex);
4153 Split(eq, if_true, if_false, fall_through);
4154 } else if (FLAG_harmony_typeof &&
4155 check->Equals(isolate()->heap()->null_symbol())) {
4156 __ CompareRoot(r0, Heap::kNullValueRootIndex);
4157 Split(eq, if_true, if_false, fall_through);
4158 } else if (check->Equals(isolate()->heap()->undefined_symbol())) {
4159 __ CompareRoot(r0, Heap::kUndefinedValueRootIndex);
4160 __ b(eq, if_true);
4161 __ JumpIfSmi(r0, if_false);
4162 // Check for undetectable objects => true.
4163 __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
4164 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
4165 __ tst(r1, Operand(1 << Map::kIsUndetectable));
4166 Split(ne, if_true, if_false, fall_through);
4167
4168 } else if (check->Equals(isolate()->heap()->function_symbol())) {
4169 __ JumpIfSmi(r0, if_false);
4170 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
4171 __ CompareObjectType(r0, r0, r1, JS_FUNCTION_TYPE);
4172 __ b(eq, if_true);
4173 __ cmp(r1, Operand(JS_FUNCTION_PROXY_TYPE));
4174 Split(eq, if_true, if_false, fall_through);
4175 } else if (check->Equals(isolate()->heap()->object_symbol())) {
4176 __ JumpIfSmi(r0, if_false);
4177 if (!FLAG_harmony_typeof) {
4178 __ CompareRoot(r0, Heap::kNullValueRootIndex);
4179 __ b(eq, if_true);
4180 }
4181 // Check for JS objects => true.
4182 __ CompareObjectType(r0, r0, r1, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
4183 __ b(lt, if_false);
4184 __ CompareInstanceType(r0, r1, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
4185 __ b(gt, if_false);
4186 // Check for undetectable objects => false.
4187 __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
4188 __ tst(r1, Operand(1 << Map::kIsUndetectable));
4189 Split(eq, if_true, if_false, fall_through);
4190 } else {
4191 if (if_false != fall_through) __ jmp(if_false);
4192 }
4193 context()->Plug(if_true, if_false);
4194 }
4195
4196
VisitCompareOperation(CompareOperation * expr)4197 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4198 Comment cmnt(masm_, "[ CompareOperation");
4199 SetSourcePosition(expr->position());
4200
4201 // First we try a fast inlined version of the compare when one of
4202 // the operands is a literal.
4203 if (TryLiteralCompare(expr)) return;
4204
4205 // Always perform the comparison for its control flow. Pack the result
4206 // into the expression's context after the comparison is performed.
4207 Label materialize_true, materialize_false;
4208 Label* if_true = NULL;
4209 Label* if_false = NULL;
4210 Label* fall_through = NULL;
4211 context()->PrepareTest(&materialize_true, &materialize_false,
4212 &if_true, &if_false, &fall_through);
4213
4214 Token::Value op = expr->op();
4215 VisitForStackValue(expr->left());
4216 switch (op) {
4217 case Token::IN:
4218 VisitForStackValue(expr->right());
4219 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
4220 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4221 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
4222 __ cmp(r0, ip);
4223 Split(eq, if_true, if_false, fall_through);
4224 break;
4225
4226 case Token::INSTANCEOF: {
4227 VisitForStackValue(expr->right());
4228 InstanceofStub stub(InstanceofStub::kNoFlags);
4229 __ CallStub(&stub);
4230 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4231 // The stub returns 0 for true.
4232 __ tst(r0, r0);
4233 Split(eq, if_true, if_false, fall_through);
4234 break;
4235 }
4236
4237 default: {
4238 VisitForAccumulatorValue(expr->right());
4239 Condition cond = eq;
4240 switch (op) {
4241 case Token::EQ_STRICT:
4242 case Token::EQ:
4243 cond = eq;
4244 break;
4245 case Token::LT:
4246 cond = lt;
4247 break;
4248 case Token::GT:
4249 cond = gt;
4250 break;
4251 case Token::LTE:
4252 cond = le;
4253 break;
4254 case Token::GTE:
4255 cond = ge;
4256 break;
4257 case Token::IN:
4258 case Token::INSTANCEOF:
4259 default:
4260 UNREACHABLE();
4261 }
4262 __ pop(r1);
4263
4264 bool inline_smi_code = ShouldInlineSmiCase(op);
4265 JumpPatchSite patch_site(masm_);
4266 if (inline_smi_code) {
4267 Label slow_case;
4268 __ orr(r2, r0, Operand(r1));
4269 patch_site.EmitJumpIfNotSmi(r2, &slow_case);
4270 __ cmp(r1, r0);
4271 Split(cond, if_true, if_false, NULL);
4272 __ bind(&slow_case);
4273 }
4274
4275 // Record position and call the compare IC.
4276 SetSourcePosition(expr->position());
4277 Handle<Code> ic = CompareIC::GetUninitialized(op);
4278 CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
4279 patch_site.EmitPatchInfo();
4280 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4281 __ cmp(r0, Operand(0));
4282 Split(cond, if_true, if_false, fall_through);
4283 }
4284 }
4285
4286 // Convert the result of the comparison into one expected for this
4287 // expression's context.
4288 context()->Plug(if_true, if_false);
4289 }
4290
4291
EmitLiteralCompareNil(CompareOperation * expr,Expression * sub_expr,NilValue nil)4292 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4293 Expression* sub_expr,
4294 NilValue nil) {
4295 Label materialize_true, materialize_false;
4296 Label* if_true = NULL;
4297 Label* if_false = NULL;
4298 Label* fall_through = NULL;
4299 context()->PrepareTest(&materialize_true, &materialize_false,
4300 &if_true, &if_false, &fall_through);
4301
4302 VisitForAccumulatorValue(sub_expr);
4303 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4304 Heap::RootListIndex nil_value = nil == kNullValue ?
4305 Heap::kNullValueRootIndex :
4306 Heap::kUndefinedValueRootIndex;
4307 __ LoadRoot(r1, nil_value);
4308 __ cmp(r0, r1);
4309 if (expr->op() == Token::EQ_STRICT) {
4310 Split(eq, if_true, if_false, fall_through);
4311 } else {
4312 Heap::RootListIndex other_nil_value = nil == kNullValue ?
4313 Heap::kUndefinedValueRootIndex :
4314 Heap::kNullValueRootIndex;
4315 __ b(eq, if_true);
4316 __ LoadRoot(r1, other_nil_value);
4317 __ cmp(r0, r1);
4318 __ b(eq, if_true);
4319 __ JumpIfSmi(r0, if_false);
4320 // It can be an undetectable object.
4321 __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
4322 __ ldrb(r1, FieldMemOperand(r1, Map::kBitFieldOffset));
4323 __ and_(r1, r1, Operand(1 << Map::kIsUndetectable));
4324 __ cmp(r1, Operand(1 << Map::kIsUndetectable));
4325 Split(eq, if_true, if_false, fall_through);
4326 }
4327 context()->Plug(if_true, if_false);
4328 }
4329
4330
VisitThisFunction(ThisFunction * expr)4331 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4332 __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4333 context()->Plug(r0);
4334 }
4335
4336
result_register()4337 Register FullCodeGenerator::result_register() {
4338 return r0;
4339 }
4340
4341
context_register()4342 Register FullCodeGenerator::context_register() {
4343 return cp;
4344 }
4345
4346
StoreToFrameField(int frame_offset,Register value)4347 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4348 ASSERT_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
4349 __ str(value, MemOperand(fp, frame_offset));
4350 }
4351
4352
LoadContextField(Register dst,int context_index)4353 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4354 __ ldr(dst, ContextOperand(cp, context_index));
4355 }
4356
4357
PushFunctionArgumentForContextAllocation()4358 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4359 Scope* declaration_scope = scope()->DeclarationScope();
4360 if (declaration_scope->is_global_scope()) {
4361 // Contexts nested in the global context have a canonical empty function
4362 // as their closure, not the anonymous closure containing the global
4363 // code. Pass a smi sentinel and let the runtime look up the empty
4364 // function.
4365 __ mov(ip, Operand(Smi::FromInt(0)));
4366 } else if (declaration_scope->is_eval_scope()) {
4367 // Contexts created by a call to eval have the same closure as the
4368 // context calling eval, not the anonymous closure containing the eval
4369 // code. Fetch it from the context.
4370 __ ldr(ip, ContextOperand(cp, Context::CLOSURE_INDEX));
4371 } else {
4372 ASSERT(declaration_scope->is_function_scope());
4373 __ ldr(ip, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
4374 }
4375 __ push(ip);
4376 }
4377
4378
4379 // ----------------------------------------------------------------------------
4380 // Non-local control flow support.
4381
EnterFinallyBlock()4382 void FullCodeGenerator::EnterFinallyBlock() {
4383 ASSERT(!result_register().is(r1));
4384 // Store result register while executing finally block.
4385 __ push(result_register());
4386 // Cook return address in link register to stack (smi encoded Code* delta)
4387 __ sub(r1, lr, Operand(masm_->CodeObject()));
4388 ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize);
4389 STATIC_ASSERT(kSmiTag == 0);
4390 __ add(r1, r1, Operand(r1)); // Convert to smi.
4391 __ push(r1);
4392 }
4393
4394
ExitFinallyBlock()4395 void FullCodeGenerator::ExitFinallyBlock() {
4396 ASSERT(!result_register().is(r1));
4397 // Restore result register from stack.
4398 __ pop(r1);
4399 // Uncook return address and return.
4400 __ pop(result_register());
4401 ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize);
4402 __ mov(r1, Operand(r1, ASR, 1)); // Un-smi-tag value.
4403 __ add(pc, r1, Operand(masm_->CodeObject()));
4404 }
4405
4406
4407 #undef __
4408
4409 #define __ ACCESS_MASM(masm())
4410
Exit(int * stack_depth,int * context_length)4411 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
4412 int* stack_depth,
4413 int* context_length) {
4414 // The macros used here must preserve the result register.
4415
4416 // Because the handler block contains the context of the finally
4417 // code, we can restore it directly from there for the finally code
4418 // rather than iteratively unwinding contexts via their previous
4419 // links.
4420 __ Drop(*stack_depth); // Down to the handler block.
4421 if (*context_length > 0) {
4422 // Restore the context to its dedicated register and the stack.
4423 __ ldr(cp, MemOperand(sp, StackHandlerConstants::kContextOffset));
4424 __ str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
4425 }
4426 __ PopTryHandler();
4427 __ bl(finally_entry_);
4428
4429 *stack_depth = 0;
4430 *context_length = 0;
4431 return previous_;
4432 }
4433
4434
4435 #undef __
4436
4437 } } // namespace v8::internal
4438
4439 #endif // V8_TARGET_ARCH_ARM
4440