1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28 #include "v8.h"
29
30 #if defined(V8_TARGET_ARCH_X64)
31
32 #include "code-stubs.h"
33 #include "codegen.h"
34 #include "compiler.h"
35 #include "debug.h"
36 #include "full-codegen.h"
37 #include "parser.h"
38 #include "scopes.h"
39 #include "stub-cache.h"
40
41 namespace v8 {
42 namespace internal {
43
44 #define __ ACCESS_MASM(masm_)
45
46
47 class JumpPatchSite BASE_EMBEDDED {
48 public:
JumpPatchSite(MacroAssembler * masm)49 explicit JumpPatchSite(MacroAssembler* masm)
50 : masm_(masm) {
51 #ifdef DEBUG
52 info_emitted_ = false;
53 #endif
54 }
55
~JumpPatchSite()56 ~JumpPatchSite() {
57 ASSERT(patch_site_.is_bound() == info_emitted_);
58 }
59
EmitJumpIfNotSmi(Register reg,NearLabel * target)60 void EmitJumpIfNotSmi(Register reg, NearLabel* target) {
61 __ testb(reg, Immediate(kSmiTagMask));
62 EmitJump(not_carry, target); // Always taken before patched.
63 }
64
EmitJumpIfSmi(Register reg,NearLabel * target)65 void EmitJumpIfSmi(Register reg, NearLabel* target) {
66 __ testb(reg, Immediate(kSmiTagMask));
67 EmitJump(carry, target); // Never taken before patched.
68 }
69
EmitPatchInfo()70 void EmitPatchInfo() {
71 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
72 ASSERT(is_int8(delta_to_patch_site));
73 __ testl(rax, Immediate(delta_to_patch_site));
74 #ifdef DEBUG
75 info_emitted_ = true;
76 #endif
77 }
78
is_bound() const79 bool is_bound() const { return patch_site_.is_bound(); }
80
81 private:
82 // jc will be patched with jz, jnc will become jnz.
EmitJump(Condition cc,NearLabel * target)83 void EmitJump(Condition cc, NearLabel* target) {
84 ASSERT(!patch_site_.is_bound() && !info_emitted_);
85 ASSERT(cc == carry || cc == not_carry);
86 __ bind(&patch_site_);
87 __ j(cc, target);
88 }
89
90 MacroAssembler* masm_;
91 Label patch_site_;
92 #ifdef DEBUG
93 bool info_emitted_;
94 #endif
95 };
96
97
98 // Generate code for a JS function. On entry to the function the receiver
99 // and arguments have been pushed on the stack left to right, with the
100 // return address on top of them. The actual argument count matches the
101 // formal parameter count expected by the function.
102 //
103 // The live registers are:
104 // o rdi: the JS function object being called (ie, ourselves)
105 // o rsi: our context
106 // o rbp: our caller's frame pointer
107 // o rsp: stack pointer (pointing to return address)
108 //
109 // The function builds a JS frame. Please see JavaScriptFrameConstants in
110 // frames-x64.h for its layout.
Generate(CompilationInfo * info)111 void FullCodeGenerator::Generate(CompilationInfo* info) {
112 ASSERT(info_ == NULL);
113 info_ = info;
114 SetFunctionPosition(function());
115 Comment cmnt(masm_, "[ function compiled by full code generator");
116
117 #ifdef DEBUG
118 if (strlen(FLAG_stop_at) > 0 &&
119 info->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
120 __ int3();
121 }
122 #endif
123 __ push(rbp); // Caller's frame pointer.
124 __ movq(rbp, rsp);
125 __ push(rsi); // Callee's context.
126 __ push(rdi); // Callee's JS Function.
127
128 { Comment cmnt(masm_, "[ Allocate locals");
129 int locals_count = scope()->num_stack_slots();
130 if (locals_count == 1) {
131 __ PushRoot(Heap::kUndefinedValueRootIndex);
132 } else if (locals_count > 1) {
133 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
134 for (int i = 0; i < locals_count; i++) {
135 __ push(rdx);
136 }
137 }
138 }
139
140 bool function_in_register = true;
141
142 // Possibly allocate a local context.
143 int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
144 if (heap_slots > 0) {
145 Comment cmnt(masm_, "[ Allocate local context");
146 // Argument to NewContext is the function, which is still in rdi.
147 __ push(rdi);
148 if (heap_slots <= FastNewContextStub::kMaximumSlots) {
149 FastNewContextStub stub(heap_slots);
150 __ CallStub(&stub);
151 } else {
152 __ CallRuntime(Runtime::kNewContext, 1);
153 }
154 function_in_register = false;
155 // Context is returned in both rax and rsi. It replaces the context
156 // passed to us. It's saved in the stack and kept live in rsi.
157 __ movq(Operand(rbp, StandardFrameConstants::kContextOffset), rsi);
158
159 // Copy any necessary parameters into the context.
160 int num_parameters = scope()->num_parameters();
161 for (int i = 0; i < num_parameters; i++) {
162 Slot* slot = scope()->parameter(i)->AsSlot();
163 if (slot != NULL && slot->type() == Slot::CONTEXT) {
164 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
165 (num_parameters - 1 - i) * kPointerSize;
166 // Load parameter from stack.
167 __ movq(rax, Operand(rbp, parameter_offset));
168 // Store it in the context.
169 int context_offset = Context::SlotOffset(slot->index());
170 __ movq(Operand(rsi, context_offset), rax);
171 // Update the write barrier. This clobbers all involved
172 // registers, so we have use a third register to avoid
173 // clobbering rsi.
174 __ movq(rcx, rsi);
175 __ RecordWrite(rcx, context_offset, rax, rbx);
176 }
177 }
178 }
179
180 // Possibly allocate an arguments object.
181 Variable* arguments = scope()->arguments();
182 if (arguments != NULL) {
183 // Arguments object must be allocated after the context object, in
184 // case the "arguments" or ".arguments" variables are in the context.
185 Comment cmnt(masm_, "[ Allocate arguments object");
186 if (function_in_register) {
187 __ push(rdi);
188 } else {
189 __ push(Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
190 }
191 // The receiver is just before the parameters on the caller's stack.
192 int offset = scope()->num_parameters() * kPointerSize;
193 __ lea(rdx,
194 Operand(rbp, StandardFrameConstants::kCallerSPOffset + offset));
195 __ push(rdx);
196 __ Push(Smi::FromInt(scope()->num_parameters()));
197 // Arguments to ArgumentsAccessStub:
198 // function, receiver address, parameter count.
199 // The stub will rewrite receiver and parameter count if the previous
200 // stack frame was an arguments adapter frame.
201 ArgumentsAccessStub stub(
202 is_strict_mode() ? ArgumentsAccessStub::NEW_STRICT
203 : ArgumentsAccessStub::NEW_NON_STRICT);
204 __ CallStub(&stub);
205
206 Variable* arguments_shadow = scope()->arguments_shadow();
207 if (arguments_shadow != NULL) {
208 // Store new arguments object in both "arguments" and ".arguments" slots.
209 __ movq(rcx, rax);
210 Move(arguments_shadow->AsSlot(), rcx, rbx, rdx);
211 }
212 Move(arguments->AsSlot(), rax, rbx, rdx);
213 }
214
215 if (FLAG_trace) {
216 __ CallRuntime(Runtime::kTraceEnter, 0);
217 }
218
219 // Visit the declarations and body unless there is an illegal
220 // redeclaration.
221 if (scope()->HasIllegalRedeclaration()) {
222 Comment cmnt(masm_, "[ Declarations");
223 scope()->VisitIllegalRedeclaration(this);
224 } else {
225 { Comment cmnt(masm_, "[ Declarations");
226 // For named function expressions, declare the function name as a
227 // constant.
228 if (scope()->is_function_scope() && scope()->function() != NULL) {
229 EmitDeclaration(scope()->function(), Variable::CONST, NULL);
230 }
231 VisitDeclarations(scope()->declarations());
232 }
233
234 { Comment cmnt(masm_, "[ Stack check");
235 PrepareForBailoutForId(AstNode::kFunctionEntryId, NO_REGISTERS);
236 NearLabel ok;
237 __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
238 __ j(above_equal, &ok);
239 StackCheckStub stub;
240 __ CallStub(&stub);
241 __ bind(&ok);
242 }
243
244 { Comment cmnt(masm_, "[ Body");
245 ASSERT(loop_depth() == 0);
246 VisitStatements(function()->body());
247 ASSERT(loop_depth() == 0);
248 }
249 }
250
251 // Always emit a 'return undefined' in case control fell off the end of
252 // the body.
253 { Comment cmnt(masm_, "[ return <undefined>;");
254 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
255 EmitReturnSequence();
256 }
257 }
258
259
ClearAccumulator()260 void FullCodeGenerator::ClearAccumulator() {
261 __ Set(rax, 0);
262 }
263
264
EmitStackCheck(IterationStatement * stmt)265 void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt) {
266 Comment cmnt(masm_, "[ Stack check");
267 NearLabel ok;
268 __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
269 __ j(above_equal, &ok);
270 StackCheckStub stub;
271 __ CallStub(&stub);
272 // Record a mapping of this PC offset to the OSR id. This is used to find
273 // the AST id from the unoptimized code in order to use it as a key into
274 // the deoptimization input data found in the optimized code.
275 RecordStackCheck(stmt->OsrEntryId());
276
277 // Loop stack checks can be patched to perform on-stack replacement. In
278 // order to decide whether or not to perform OSR we embed the loop depth
279 // in a test instruction after the call so we can extract it from the OSR
280 // builtin.
281 ASSERT(loop_depth() > 0);
282 __ testl(rax, Immediate(Min(loop_depth(), Code::kMaxLoopNestingMarker)));
283
284 __ bind(&ok);
285 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
286 // Record a mapping of the OSR id to this PC. This is used if the OSR
287 // entry becomes the target of a bailout. We don't expect it to be, but
288 // we want it to work if it is.
289 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
290 }
291
292
EmitReturnSequence()293 void FullCodeGenerator::EmitReturnSequence() {
294 Comment cmnt(masm_, "[ Return sequence");
295 if (return_label_.is_bound()) {
296 __ jmp(&return_label_);
297 } else {
298 __ bind(&return_label_);
299 if (FLAG_trace) {
300 __ push(rax);
301 __ CallRuntime(Runtime::kTraceExit, 1);
302 }
303 #ifdef DEBUG
304 // Add a label for checking the size of the code used for returning.
305 Label check_exit_codesize;
306 masm_->bind(&check_exit_codesize);
307 #endif
308 CodeGenerator::RecordPositions(masm_, function()->end_position() - 1);
309 __ RecordJSReturn();
310 // Do not use the leave instruction here because it is too short to
311 // patch with the code required by the debugger.
312 __ movq(rsp, rbp);
313 __ pop(rbp);
314
315 int arguments_bytes = (scope()->num_parameters() + 1) * kPointerSize;
316 __ Ret(arguments_bytes, rcx);
317
318 #ifdef ENABLE_DEBUGGER_SUPPORT
319 // Add padding that will be overwritten by a debugger breakpoint. We
320 // have just generated at least 7 bytes: "movq rsp, rbp; pop rbp; ret k"
321 // (3 + 1 + 3).
322 const int kPadding = Assembler::kJSReturnSequenceLength - 7;
323 for (int i = 0; i < kPadding; ++i) {
324 masm_->int3();
325 }
326 // Check that the size of the code used for returning is large enough
327 // for the debugger's requirements.
328 ASSERT(Assembler::kJSReturnSequenceLength <=
329 masm_->SizeOfCodeGeneratedSince(&check_exit_codesize));
330 #endif
331 }
332 }
333
334
Plug(Slot * slot) const335 void FullCodeGenerator::EffectContext::Plug(Slot* slot) const {
336 }
337
338
Plug(Slot * slot) const339 void FullCodeGenerator::AccumulatorValueContext::Plug(Slot* slot) const {
340 MemOperand slot_operand = codegen()->EmitSlotSearch(slot, result_register());
341 __ movq(result_register(), slot_operand);
342 }
343
344
Plug(Slot * slot) const345 void FullCodeGenerator::StackValueContext::Plug(Slot* slot) const {
346 MemOperand slot_operand = codegen()->EmitSlotSearch(slot, result_register());
347 __ push(slot_operand);
348 }
349
350
Plug(Slot * slot) const351 void FullCodeGenerator::TestContext::Plug(Slot* slot) const {
352 codegen()->Move(result_register(), slot);
353 codegen()->PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
354 codegen()->DoTest(true_label_, false_label_, fall_through_);
355 }
356
357
Plug(Heap::RootListIndex index) const358 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
359 }
360
361
Plug(Heap::RootListIndex index) const362 void FullCodeGenerator::AccumulatorValueContext::Plug(
363 Heap::RootListIndex index) const {
364 __ LoadRoot(result_register(), index);
365 }
366
367
Plug(Heap::RootListIndex index) const368 void FullCodeGenerator::StackValueContext::Plug(
369 Heap::RootListIndex index) const {
370 __ PushRoot(index);
371 }
372
373
Plug(Heap::RootListIndex index) const374 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
375 codegen()->PrepareForBailoutBeforeSplit(TOS_REG,
376 true,
377 true_label_,
378 false_label_);
379 if (index == Heap::kUndefinedValueRootIndex ||
380 index == Heap::kNullValueRootIndex ||
381 index == Heap::kFalseValueRootIndex) {
382 if (false_label_ != fall_through_) __ jmp(false_label_);
383 } else if (index == Heap::kTrueValueRootIndex) {
384 if (true_label_ != fall_through_) __ jmp(true_label_);
385 } else {
386 __ LoadRoot(result_register(), index);
387 codegen()->DoTest(true_label_, false_label_, fall_through_);
388 }
389 }
390
391
Plug(Handle<Object> lit) const392 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
393 }
394
395
Plug(Handle<Object> lit) const396 void FullCodeGenerator::AccumulatorValueContext::Plug(
397 Handle<Object> lit) const {
398 __ Move(result_register(), lit);
399 }
400
401
Plug(Handle<Object> lit) const402 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
403 __ Push(lit);
404 }
405
406
Plug(Handle<Object> lit) const407 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
408 codegen()->PrepareForBailoutBeforeSplit(TOS_REG,
409 true,
410 true_label_,
411 false_label_);
412 ASSERT(!lit->IsUndetectableObject()); // There are no undetectable literals.
413 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
414 if (false_label_ != fall_through_) __ jmp(false_label_);
415 } else if (lit->IsTrue() || lit->IsJSObject()) {
416 if (true_label_ != fall_through_) __ jmp(true_label_);
417 } else if (lit->IsString()) {
418 if (String::cast(*lit)->length() == 0) {
419 if (false_label_ != fall_through_) __ jmp(false_label_);
420 } else {
421 if (true_label_ != fall_through_) __ jmp(true_label_);
422 }
423 } else if (lit->IsSmi()) {
424 if (Smi::cast(*lit)->value() == 0) {
425 if (false_label_ != fall_through_) __ jmp(false_label_);
426 } else {
427 if (true_label_ != fall_through_) __ jmp(true_label_);
428 }
429 } else {
430 // For simplicity we always test the accumulator register.
431 __ Move(result_register(), lit);
432 codegen()->DoTest(true_label_, false_label_, fall_through_);
433 }
434 }
435
436
DropAndPlug(int count,Register reg) const437 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
438 Register reg) const {
439 ASSERT(count > 0);
440 __ Drop(count);
441 }
442
443
DropAndPlug(int count,Register reg) const444 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
445 int count,
446 Register reg) const {
447 ASSERT(count > 0);
448 __ Drop(count);
449 __ Move(result_register(), reg);
450 }
451
452
DropAndPlug(int count,Register reg) const453 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
454 Register reg) const {
455 ASSERT(count > 0);
456 if (count > 1) __ Drop(count - 1);
457 __ movq(Operand(rsp, 0), reg);
458 }
459
460
DropAndPlug(int count,Register reg) const461 void FullCodeGenerator::TestContext::DropAndPlug(int count,
462 Register reg) const {
463 ASSERT(count > 0);
464 // For simplicity we always test the accumulator register.
465 __ Drop(count);
466 __ Move(result_register(), reg);
467 codegen()->PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
468 codegen()->DoTest(true_label_, false_label_, fall_through_);
469 }
470
471
Plug(Label * materialize_true,Label * materialize_false) const472 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
473 Label* materialize_false) const {
474 ASSERT(materialize_true == materialize_false);
475 __ bind(materialize_true);
476 }
477
478
Plug(Label * materialize_true,Label * materialize_false) const479 void FullCodeGenerator::AccumulatorValueContext::Plug(
480 Label* materialize_true,
481 Label* materialize_false) const {
482 NearLabel done;
483 __ bind(materialize_true);
484 __ Move(result_register(), isolate()->factory()->true_value());
485 __ jmp(&done);
486 __ bind(materialize_false);
487 __ Move(result_register(), isolate()->factory()->false_value());
488 __ bind(&done);
489 }
490
491
Plug(Label * materialize_true,Label * materialize_false) const492 void FullCodeGenerator::StackValueContext::Plug(
493 Label* materialize_true,
494 Label* materialize_false) const {
495 NearLabel done;
496 __ bind(materialize_true);
497 __ Push(isolate()->factory()->true_value());
498 __ jmp(&done);
499 __ bind(materialize_false);
500 __ Push(isolate()->factory()->false_value());
501 __ bind(&done);
502 }
503
504
Plug(Label * materialize_true,Label * materialize_false) const505 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
506 Label* materialize_false) const {
507 ASSERT(materialize_true == true_label_);
508 ASSERT(materialize_false == false_label_);
509 }
510
511
Plug(bool flag) const512 void FullCodeGenerator::EffectContext::Plug(bool flag) const {
513 }
514
515
Plug(bool flag) const516 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
517 Heap::RootListIndex value_root_index =
518 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
519 __ LoadRoot(result_register(), value_root_index);
520 }
521
522
Plug(bool flag) const523 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
524 Heap::RootListIndex value_root_index =
525 flag ? Heap::kTrueValueRootIndex : Heap::kFalseValueRootIndex;
526 __ PushRoot(value_root_index);
527 }
528
529
Plug(bool flag) const530 void FullCodeGenerator::TestContext::Plug(bool flag) const {
531 codegen()->PrepareForBailoutBeforeSplit(TOS_REG,
532 true,
533 true_label_,
534 false_label_);
535 if (flag) {
536 if (true_label_ != fall_through_) __ jmp(true_label_);
537 } else {
538 if (false_label_ != fall_through_) __ jmp(false_label_);
539 }
540 }
541
542
DoTest(Label * if_true,Label * if_false,Label * fall_through)543 void FullCodeGenerator::DoTest(Label* if_true,
544 Label* if_false,
545 Label* fall_through) {
546 // Emit the inlined tests assumed by the stub.
547 __ CompareRoot(result_register(), Heap::kUndefinedValueRootIndex);
548 __ j(equal, if_false);
549 __ CompareRoot(result_register(), Heap::kTrueValueRootIndex);
550 __ j(equal, if_true);
551 __ CompareRoot(result_register(), Heap::kFalseValueRootIndex);
552 __ j(equal, if_false);
553 STATIC_ASSERT(kSmiTag == 0);
554 __ Cmp(result_register(), Smi::FromInt(0));
555 __ j(equal, if_false);
556 Condition is_smi = masm_->CheckSmi(result_register());
557 __ j(is_smi, if_true);
558
559 // Call the ToBoolean stub for all other cases.
560 ToBooleanStub stub;
561 __ push(result_register());
562 __ CallStub(&stub);
563 __ testq(rax, rax);
564
565 // The stub returns nonzero for true.
566 Split(not_zero, if_true, if_false, fall_through);
567 }
568
569
Split(Condition cc,Label * if_true,Label * if_false,Label * fall_through)570 void FullCodeGenerator::Split(Condition cc,
571 Label* if_true,
572 Label* if_false,
573 Label* fall_through) {
574 if (if_false == fall_through) {
575 __ j(cc, if_true);
576 } else if (if_true == fall_through) {
577 __ j(NegateCondition(cc), if_false);
578 } else {
579 __ j(cc, if_true);
580 __ jmp(if_false);
581 }
582 }
583
584
EmitSlotSearch(Slot * slot,Register scratch)585 MemOperand FullCodeGenerator::EmitSlotSearch(Slot* slot, Register scratch) {
586 switch (slot->type()) {
587 case Slot::PARAMETER:
588 case Slot::LOCAL:
589 return Operand(rbp, SlotOffset(slot));
590 case Slot::CONTEXT: {
591 int context_chain_length =
592 scope()->ContextChainLength(slot->var()->scope());
593 __ LoadContext(scratch, context_chain_length);
594 return ContextOperand(scratch, slot->index());
595 }
596 case Slot::LOOKUP:
597 UNREACHABLE();
598 }
599 UNREACHABLE();
600 return Operand(rax, 0);
601 }
602
603
Move(Register destination,Slot * source)604 void FullCodeGenerator::Move(Register destination, Slot* source) {
605 MemOperand location = EmitSlotSearch(source, destination);
606 __ movq(destination, location);
607 }
608
609
Move(Slot * dst,Register src,Register scratch1,Register scratch2)610 void FullCodeGenerator::Move(Slot* dst,
611 Register src,
612 Register scratch1,
613 Register scratch2) {
614 ASSERT(dst->type() != Slot::LOOKUP); // Not yet implemented.
615 ASSERT(!scratch1.is(src) && !scratch2.is(src));
616 MemOperand location = EmitSlotSearch(dst, scratch1);
617 __ movq(location, src);
618 // Emit the write barrier code if the location is in the heap.
619 if (dst->type() == Slot::CONTEXT) {
620 int offset = FixedArray::kHeaderSize + dst->index() * kPointerSize;
621 __ RecordWrite(scratch1, offset, src, scratch2);
622 }
623 }
624
625
PrepareForBailoutBeforeSplit(State state,bool should_normalize,Label * if_true,Label * if_false)626 void FullCodeGenerator::PrepareForBailoutBeforeSplit(State state,
627 bool should_normalize,
628 Label* if_true,
629 Label* if_false) {
630 // Only prepare for bailouts before splits if we're in a test
631 // context. Otherwise, we let the Visit function deal with the
632 // preparation to avoid preparing with the same AST id twice.
633 if (!context()->IsTest() || !info_->IsOptimizable()) return;
634
635 NearLabel skip;
636 if (should_normalize) __ jmp(&skip);
637
638 ForwardBailoutStack* current = forward_bailout_stack_;
639 while (current != NULL) {
640 PrepareForBailout(current->expr(), state);
641 current = current->parent();
642 }
643
644 if (should_normalize) {
645 __ CompareRoot(rax, Heap::kTrueValueRootIndex);
646 Split(equal, if_true, if_false, NULL);
647 __ bind(&skip);
648 }
649 }
650
651
EmitDeclaration(Variable * variable,Variable::Mode mode,FunctionLiteral * function)652 void FullCodeGenerator::EmitDeclaration(Variable* variable,
653 Variable::Mode mode,
654 FunctionLiteral* function) {
655 Comment cmnt(masm_, "[ Declaration");
656 ASSERT(variable != NULL); // Must have been resolved.
657 Slot* slot = variable->AsSlot();
658 Property* prop = variable->AsProperty();
659
660 if (slot != NULL) {
661 switch (slot->type()) {
662 case Slot::PARAMETER:
663 case Slot::LOCAL:
664 if (mode == Variable::CONST) {
665 __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
666 __ movq(Operand(rbp, SlotOffset(slot)), kScratchRegister);
667 } else if (function != NULL) {
668 VisitForAccumulatorValue(function);
669 __ movq(Operand(rbp, SlotOffset(slot)), result_register());
670 }
671 break;
672
673 case Slot::CONTEXT:
674 // We bypass the general EmitSlotSearch because we know more about
675 // this specific context.
676
677 // The variable in the decl always resides in the current context.
678 ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
679 if (FLAG_debug_code) {
680 // Check if we have the correct context pointer.
681 __ movq(rbx, ContextOperand(rsi, Context::FCONTEXT_INDEX));
682 __ cmpq(rbx, rsi);
683 __ Check(equal, "Unexpected declaration in current context.");
684 }
685 if (mode == Variable::CONST) {
686 __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
687 __ movq(ContextOperand(rsi, slot->index()), kScratchRegister);
688 // No write barrier since the hole value is in old space.
689 } else if (function != NULL) {
690 VisitForAccumulatorValue(function);
691 __ movq(ContextOperand(rsi, slot->index()), result_register());
692 int offset = Context::SlotOffset(slot->index());
693 __ movq(rbx, rsi);
694 __ RecordWrite(rbx, offset, result_register(), rcx);
695 }
696 break;
697
698 case Slot::LOOKUP: {
699 __ push(rsi);
700 __ Push(variable->name());
701 // Declaration nodes are always introduced in one of two modes.
702 ASSERT(mode == Variable::VAR || mode == Variable::CONST);
703 PropertyAttributes attr = (mode == Variable::VAR) ? NONE : READ_ONLY;
704 __ Push(Smi::FromInt(attr));
705 // Push initial value, if any.
706 // Note: For variables we must not push an initial value (such as
707 // 'undefined') because we may have a (legal) redeclaration and we
708 // must not destroy the current value.
709 if (mode == Variable::CONST) {
710 __ PushRoot(Heap::kTheHoleValueRootIndex);
711 } else if (function != NULL) {
712 VisitForStackValue(function);
713 } else {
714 __ Push(Smi::FromInt(0)); // no initial value!
715 }
716 __ CallRuntime(Runtime::kDeclareContextSlot, 4);
717 break;
718 }
719 }
720
721 } else if (prop != NULL) {
722 if (function != NULL || mode == Variable::CONST) {
723 // We are declaring a function or constant that rewrites to a
724 // property. Use (keyed) IC to set the initial value. We
725 // cannot visit the rewrite because it's shared and we risk
726 // recording duplicate AST IDs for bailouts from optimized code.
727 ASSERT(prop->obj()->AsVariableProxy() != NULL);
728 { AccumulatorValueContext for_object(this);
729 EmitVariableLoad(prop->obj()->AsVariableProxy()->var());
730 }
731 if (function != NULL) {
732 __ push(rax);
733 VisitForAccumulatorValue(function);
734 __ pop(rdx);
735 } else {
736 __ movq(rdx, rax);
737 __ LoadRoot(rax, Heap::kTheHoleValueRootIndex);
738 }
739 ASSERT(prop->key()->AsLiteral() != NULL &&
740 prop->key()->AsLiteral()->handle()->IsSmi());
741 __ Move(rcx, prop->key()->AsLiteral()->handle());
742
743 Handle<Code> ic = is_strict_mode()
744 ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
745 : isolate()->builtins()->KeyedStoreIC_Initialize();
746 EmitCallIC(ic, RelocInfo::CODE_TARGET);
747 }
748 }
749 }
750
751
VisitDeclaration(Declaration * decl)752 void FullCodeGenerator::VisitDeclaration(Declaration* decl) {
753 EmitDeclaration(decl->proxy()->var(), decl->mode(), decl->fun());
754 }
755
756
DeclareGlobals(Handle<FixedArray> pairs)757 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
758 // Call the runtime to declare the globals.
759 __ push(rsi); // The context is the first argument.
760 __ Push(pairs);
761 __ Push(Smi::FromInt(is_eval() ? 1 : 0));
762 __ Push(Smi::FromInt(strict_mode_flag()));
763 __ CallRuntime(Runtime::kDeclareGlobals, 4);
764 // Return value is ignored.
765 }
766
767
VisitSwitchStatement(SwitchStatement * stmt)768 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
769 Comment cmnt(masm_, "[ SwitchStatement");
770 Breakable nested_statement(this, stmt);
771 SetStatementPosition(stmt);
772
773 // Keep the switch value on the stack until a case matches.
774 VisitForStackValue(stmt->tag());
775 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
776
777 ZoneList<CaseClause*>* clauses = stmt->cases();
778 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
779
780 Label next_test; // Recycled for each test.
781 // Compile all the tests with branches to their bodies.
782 for (int i = 0; i < clauses->length(); i++) {
783 CaseClause* clause = clauses->at(i);
784 clause->body_target()->Unuse();
785
786 // The default is not a test, but remember it as final fall through.
787 if (clause->is_default()) {
788 default_clause = clause;
789 continue;
790 }
791
792 Comment cmnt(masm_, "[ Case comparison");
793 __ bind(&next_test);
794 next_test.Unuse();
795
796 // Compile the label expression.
797 VisitForAccumulatorValue(clause->label());
798
799 // Perform the comparison as if via '==='.
800 __ movq(rdx, Operand(rsp, 0)); // Switch value.
801 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
802 JumpPatchSite patch_site(masm_);
803 if (inline_smi_code) {
804 NearLabel slow_case;
805 __ movq(rcx, rdx);
806 __ or_(rcx, rax);
807 patch_site.EmitJumpIfNotSmi(rcx, &slow_case);
808
809 __ cmpq(rdx, rax);
810 __ j(not_equal, &next_test);
811 __ Drop(1); // Switch value is no longer needed.
812 __ jmp(clause->body_target());
813 __ bind(&slow_case);
814 }
815
816 // Record position before stub call for type feedback.
817 SetSourcePosition(clause->position());
818 Handle<Code> ic = CompareIC::GetUninitialized(Token::EQ_STRICT);
819 EmitCallIC(ic, &patch_site);
820
821 __ testq(rax, rax);
822 __ j(not_equal, &next_test);
823 __ Drop(1); // Switch value is no longer needed.
824 __ jmp(clause->body_target());
825 }
826
827 // Discard the test value and jump to the default if present, otherwise to
828 // the end of the statement.
829 __ bind(&next_test);
830 __ Drop(1); // Switch value is no longer needed.
831 if (default_clause == NULL) {
832 __ jmp(nested_statement.break_target());
833 } else {
834 __ jmp(default_clause->body_target());
835 }
836
837 // Compile all the case bodies.
838 for (int i = 0; i < clauses->length(); i++) {
839 Comment cmnt(masm_, "[ Case body");
840 CaseClause* clause = clauses->at(i);
841 __ bind(clause->body_target());
842 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
843 VisitStatements(clause->statements());
844 }
845
846 __ bind(nested_statement.break_target());
847 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
848 }
849
850
VisitForInStatement(ForInStatement * stmt)851 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
852 Comment cmnt(masm_, "[ ForInStatement");
853 SetStatementPosition(stmt);
854
855 Label loop, exit;
856 ForIn loop_statement(this, stmt);
857 increment_loop_depth();
858
859 // Get the object to enumerate over. Both SpiderMonkey and JSC
860 // ignore null and undefined in contrast to the specification; see
861 // ECMA-262 section 12.6.4.
862 VisitForAccumulatorValue(stmt->enumerable());
863 __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
864 __ j(equal, &exit);
865 Register null_value = rdi;
866 __ LoadRoot(null_value, Heap::kNullValueRootIndex);
867 __ cmpq(rax, null_value);
868 __ j(equal, &exit);
869
870 // Convert the object to a JS object.
871 Label convert, done_convert;
872 __ JumpIfSmi(rax, &convert);
873 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rcx);
874 __ j(above_equal, &done_convert);
875 __ bind(&convert);
876 __ push(rax);
877 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
878 __ bind(&done_convert);
879 __ push(rax);
880
881 // Check cache validity in generated code. This is a fast case for
882 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
883 // guarantee cache validity, call the runtime system to check cache
884 // validity or get the property names in a fixed array.
885 Label next, call_runtime;
886 Register empty_fixed_array_value = r8;
887 __ LoadRoot(empty_fixed_array_value, Heap::kEmptyFixedArrayRootIndex);
888 Register empty_descriptor_array_value = r9;
889 __ LoadRoot(empty_descriptor_array_value,
890 Heap::kEmptyDescriptorArrayRootIndex);
891 __ movq(rcx, rax);
892 __ bind(&next);
893
894 // Check that there are no elements. Register rcx contains the
895 // current JS object we've reached through the prototype chain.
896 __ cmpq(empty_fixed_array_value,
897 FieldOperand(rcx, JSObject::kElementsOffset));
898 __ j(not_equal, &call_runtime);
899
900 // Check that instance descriptors are not empty so that we can
901 // check for an enum cache. Leave the map in rbx for the subsequent
902 // prototype load.
903 __ movq(rbx, FieldOperand(rcx, HeapObject::kMapOffset));
904 __ movq(rdx, FieldOperand(rbx, Map::kInstanceDescriptorsOffset));
905 __ cmpq(rdx, empty_descriptor_array_value);
906 __ j(equal, &call_runtime);
907
908 // Check that there is an enum cache in the non-empty instance
909 // descriptors (rdx). This is the case if the next enumeration
910 // index field does not contain a smi.
911 __ movq(rdx, FieldOperand(rdx, DescriptorArray::kEnumerationIndexOffset));
912 __ JumpIfSmi(rdx, &call_runtime);
913
914 // For all objects but the receiver, check that the cache is empty.
915 NearLabel check_prototype;
916 __ cmpq(rcx, rax);
917 __ j(equal, &check_prototype);
918 __ movq(rdx, FieldOperand(rdx, DescriptorArray::kEnumCacheBridgeCacheOffset));
919 __ cmpq(rdx, empty_fixed_array_value);
920 __ j(not_equal, &call_runtime);
921
922 // Load the prototype from the map and loop if non-null.
923 __ bind(&check_prototype);
924 __ movq(rcx, FieldOperand(rbx, Map::kPrototypeOffset));
925 __ cmpq(rcx, null_value);
926 __ j(not_equal, &next);
927
928 // The enum cache is valid. Load the map of the object being
929 // iterated over and use the cache for the iteration.
930 NearLabel use_cache;
931 __ movq(rax, FieldOperand(rax, HeapObject::kMapOffset));
932 __ jmp(&use_cache);
933
934 // Get the set of properties to enumerate.
935 __ bind(&call_runtime);
936 __ push(rax); // Duplicate the enumerable object on the stack.
937 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
938
939 // If we got a map from the runtime call, we can do a fast
940 // modification check. Otherwise, we got a fixed array, and we have
941 // to do a slow check.
942 NearLabel fixed_array;
943 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
944 Heap::kMetaMapRootIndex);
945 __ j(not_equal, &fixed_array);
946
947 // We got a map in register rax. Get the enumeration cache from it.
948 __ bind(&use_cache);
949 __ movq(rcx, FieldOperand(rax, Map::kInstanceDescriptorsOffset));
950 __ movq(rcx, FieldOperand(rcx, DescriptorArray::kEnumerationIndexOffset));
951 __ movq(rdx, FieldOperand(rcx, DescriptorArray::kEnumCacheBridgeCacheOffset));
952
953 // Setup the four remaining stack slots.
954 __ push(rax); // Map.
955 __ push(rdx); // Enumeration cache.
956 __ movq(rax, FieldOperand(rdx, FixedArray::kLengthOffset));
957 __ push(rax); // Enumeration cache length (as smi).
958 __ Push(Smi::FromInt(0)); // Initial index.
959 __ jmp(&loop);
960
961 // We got a fixed array in register rax. Iterate through that.
962 __ bind(&fixed_array);
963 __ Push(Smi::FromInt(0)); // Map (0) - force slow check.
964 __ push(rax);
965 __ movq(rax, FieldOperand(rax, FixedArray::kLengthOffset));
966 __ push(rax); // Fixed array length (as smi).
967 __ Push(Smi::FromInt(0)); // Initial index.
968
969 // Generate code for doing the condition check.
970 __ bind(&loop);
971 __ movq(rax, Operand(rsp, 0 * kPointerSize)); // Get the current index.
972 __ cmpq(rax, Operand(rsp, 1 * kPointerSize)); // Compare to the array length.
973 __ j(above_equal, loop_statement.break_target());
974
975 // Get the current entry of the array into register rbx.
976 __ movq(rbx, Operand(rsp, 2 * kPointerSize));
977 SmiIndex index = masm()->SmiToIndex(rax, rax, kPointerSizeLog2);
978 __ movq(rbx, FieldOperand(rbx,
979 index.reg,
980 index.scale,
981 FixedArray::kHeaderSize));
982
983 // Get the expected map from the stack or a zero map in the
984 // permanent slow case into register rdx.
985 __ movq(rdx, Operand(rsp, 3 * kPointerSize));
986
987 // Check if the expected map still matches that of the enumerable.
988 // If not, we have to filter the key.
989 NearLabel update_each;
990 __ movq(rcx, Operand(rsp, 4 * kPointerSize));
991 __ cmpq(rdx, FieldOperand(rcx, HeapObject::kMapOffset));
992 __ j(equal, &update_each);
993
994 // Convert the entry to a string or null if it isn't a property
995 // anymore. If the property has been removed while iterating, we
996 // just skip it.
997 __ push(rcx); // Enumerable.
998 __ push(rbx); // Current entry.
999 __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1000 __ Cmp(rax, Smi::FromInt(0));
1001 __ j(equal, loop_statement.continue_target());
1002 __ movq(rbx, rax);
1003
1004 // Update the 'each' property or variable from the possibly filtered
1005 // entry in register rbx.
1006 __ bind(&update_each);
1007 __ movq(result_register(), rbx);
1008 // Perform the assignment as if via '='.
1009 { EffectContext context(this);
1010 EmitAssignment(stmt->each(), stmt->AssignmentId());
1011 }
1012
1013 // Generate code for the body of the loop.
1014 Visit(stmt->body());
1015
1016 // Generate code for going to the next element by incrementing the
1017 // index (smi) stored on top of the stack.
1018 __ bind(loop_statement.continue_target());
1019 __ SmiAddConstant(Operand(rsp, 0 * kPointerSize), Smi::FromInt(1));
1020
1021 EmitStackCheck(stmt);
1022 __ jmp(&loop);
1023
1024 // Remove the pointers stored on the stack.
1025 __ bind(loop_statement.break_target());
1026 __ addq(rsp, Immediate(5 * kPointerSize));
1027
1028 // Exit and decrement the loop depth.
1029 __ bind(&exit);
1030 decrement_loop_depth();
1031 }
1032
1033
EmitNewClosure(Handle<SharedFunctionInfo> info,bool pretenure)1034 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1035 bool pretenure) {
1036 // Use the fast case closure allocation code that allocates in new
1037 // space for nested functions that don't need literals cloning. If
1038 // we're running with the --always-opt or the --prepare-always-opt
1039 // flag, we need to use the runtime function so that the new function
1040 // we are creating here gets a chance to have its code optimized and
1041 // doesn't just get a copy of the existing unoptimized code.
1042 if (!FLAG_always_opt &&
1043 !FLAG_prepare_always_opt &&
1044 !pretenure &&
1045 scope()->is_function_scope() &&
1046 info->num_literals() == 0) {
1047 FastNewClosureStub stub(info->strict_mode() ? kStrictMode : kNonStrictMode);
1048 __ Push(info);
1049 __ CallStub(&stub);
1050 } else {
1051 __ push(rsi);
1052 __ Push(info);
1053 __ Push(pretenure
1054 ? isolate()->factory()->true_value()
1055 : isolate()->factory()->false_value());
1056 __ CallRuntime(Runtime::kNewClosure, 3);
1057 }
1058 context()->Plug(rax);
1059 }
1060
1061
VisitVariableProxy(VariableProxy * expr)1062 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1063 Comment cmnt(masm_, "[ VariableProxy");
1064 EmitVariableLoad(expr->var());
1065 }
1066
1067
EmitLoadGlobalSlotCheckExtensions(Slot * slot,TypeofState typeof_state,Label * slow)1068 void FullCodeGenerator::EmitLoadGlobalSlotCheckExtensions(
1069 Slot* slot,
1070 TypeofState typeof_state,
1071 Label* slow) {
1072 Register context = rsi;
1073 Register temp = rdx;
1074
1075 Scope* s = scope();
1076 while (s != NULL) {
1077 if (s->num_heap_slots() > 0) {
1078 if (s->calls_eval()) {
1079 // Check that extension is NULL.
1080 __ cmpq(ContextOperand(context, Context::EXTENSION_INDEX),
1081 Immediate(0));
1082 __ j(not_equal, slow);
1083 }
1084 // Load next context in chain.
1085 __ movq(temp, ContextOperand(context, Context::CLOSURE_INDEX));
1086 __ movq(temp, FieldOperand(temp, JSFunction::kContextOffset));
1087 // Walk the rest of the chain without clobbering rsi.
1088 context = temp;
1089 }
1090 // If no outer scope calls eval, we do not need to check more
1091 // context extensions. If we have reached an eval scope, we check
1092 // all extensions from this point.
1093 if (!s->outer_scope_calls_eval() || s->is_eval_scope()) break;
1094 s = s->outer_scope();
1095 }
1096
1097 if (s != NULL && s->is_eval_scope()) {
1098 // Loop up the context chain. There is no frame effect so it is
1099 // safe to use raw labels here.
1100 NearLabel next, fast;
1101 if (!context.is(temp)) {
1102 __ movq(temp, context);
1103 }
1104 // Load map for comparison into register, outside loop.
1105 __ LoadRoot(kScratchRegister, Heap::kGlobalContextMapRootIndex);
1106 __ bind(&next);
1107 // Terminate at global context.
1108 __ cmpq(kScratchRegister, FieldOperand(temp, HeapObject::kMapOffset));
1109 __ j(equal, &fast);
1110 // Check that extension is NULL.
1111 __ cmpq(ContextOperand(temp, Context::EXTENSION_INDEX), Immediate(0));
1112 __ j(not_equal, slow);
1113 // Load next context in chain.
1114 __ movq(temp, ContextOperand(temp, Context::CLOSURE_INDEX));
1115 __ movq(temp, FieldOperand(temp, JSFunction::kContextOffset));
1116 __ jmp(&next);
1117 __ bind(&fast);
1118 }
1119
1120 // All extension objects were empty and it is safe to use a global
1121 // load IC call.
1122 __ movq(rax, GlobalObjectOperand());
1123 __ Move(rcx, slot->var()->name());
1124 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1125 RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
1126 ? RelocInfo::CODE_TARGET
1127 : RelocInfo::CODE_TARGET_CONTEXT;
1128 EmitCallIC(ic, mode);
1129 }
1130
1131
ContextSlotOperandCheckExtensions(Slot * slot,Label * slow)1132 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(
1133 Slot* slot,
1134 Label* slow) {
1135 ASSERT(slot->type() == Slot::CONTEXT);
1136 Register context = rsi;
1137 Register temp = rbx;
1138
1139 for (Scope* s = scope(); s != slot->var()->scope(); s = s->outer_scope()) {
1140 if (s->num_heap_slots() > 0) {
1141 if (s->calls_eval()) {
1142 // Check that extension is NULL.
1143 __ cmpq(ContextOperand(context, Context::EXTENSION_INDEX),
1144 Immediate(0));
1145 __ j(not_equal, slow);
1146 }
1147 __ movq(temp, ContextOperand(context, Context::CLOSURE_INDEX));
1148 __ movq(temp, FieldOperand(temp, JSFunction::kContextOffset));
1149 // Walk the rest of the chain without clobbering rsi.
1150 context = temp;
1151 }
1152 }
1153 // Check that last extension is NULL.
1154 __ cmpq(ContextOperand(context, Context::EXTENSION_INDEX), Immediate(0));
1155 __ j(not_equal, slow);
1156
1157 // This function is used only for loads, not stores, so it's safe to
1158 // return an rsi-based operand (the write barrier cannot be allowed to
1159 // destroy the rsi register).
1160 return ContextOperand(context, slot->index());
1161 }
1162
1163
EmitDynamicLoadFromSlotFastCase(Slot * slot,TypeofState typeof_state,Label * slow,Label * done)1164 void FullCodeGenerator::EmitDynamicLoadFromSlotFastCase(
1165 Slot* slot,
1166 TypeofState typeof_state,
1167 Label* slow,
1168 Label* done) {
1169 // Generate fast-case code for variables that might be shadowed by
1170 // eval-introduced variables. Eval is used a lot without
1171 // introducing variables. In those cases, we do not want to
1172 // perform a runtime call for all variables in the scope
1173 // containing the eval.
1174 if (slot->var()->mode() == Variable::DYNAMIC_GLOBAL) {
1175 EmitLoadGlobalSlotCheckExtensions(slot, typeof_state, slow);
1176 __ jmp(done);
1177 } else if (slot->var()->mode() == Variable::DYNAMIC_LOCAL) {
1178 Slot* potential_slot = slot->var()->local_if_not_shadowed()->AsSlot();
1179 Expression* rewrite = slot->var()->local_if_not_shadowed()->rewrite();
1180 if (potential_slot != NULL) {
1181 // Generate fast case for locals that rewrite to slots.
1182 __ movq(rax,
1183 ContextSlotOperandCheckExtensions(potential_slot, slow));
1184 if (potential_slot->var()->mode() == Variable::CONST) {
1185 __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
1186 __ j(not_equal, done);
1187 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
1188 }
1189 __ jmp(done);
1190 } else if (rewrite != NULL) {
1191 // Generate fast case for calls of an argument function.
1192 Property* property = rewrite->AsProperty();
1193 if (property != NULL) {
1194 VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
1195 Literal* key_literal = property->key()->AsLiteral();
1196 if (obj_proxy != NULL &&
1197 key_literal != NULL &&
1198 obj_proxy->IsArguments() &&
1199 key_literal->handle()->IsSmi()) {
1200 // Load arguments object if there are no eval-introduced
1201 // variables. Then load the argument from the arguments
1202 // object using keyed load.
1203 __ movq(rdx,
1204 ContextSlotOperandCheckExtensions(obj_proxy->var()->AsSlot(),
1205 slow));
1206 __ Move(rax, key_literal->handle());
1207 Handle<Code> ic =
1208 isolate()->builtins()->KeyedLoadIC_Initialize();
1209 EmitCallIC(ic, RelocInfo::CODE_TARGET);
1210 __ jmp(done);
1211 }
1212 }
1213 }
1214 }
1215 }
1216
1217
EmitVariableLoad(Variable * var)1218 void FullCodeGenerator::EmitVariableLoad(Variable* var) {
1219 // Four cases: non-this global variables, lookup slots, all other
1220 // types of slots, and parameters that rewrite to explicit property
1221 // accesses on the arguments object.
1222 Slot* slot = var->AsSlot();
1223 Property* property = var->AsProperty();
1224
1225 if (var->is_global() && !var->is_this()) {
1226 Comment cmnt(masm_, "Global variable");
1227 // Use inline caching. Variable name is passed in rcx and the global
1228 // object on the stack.
1229 __ Move(rcx, var->name());
1230 __ movq(rax, GlobalObjectOperand());
1231 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1232 EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
1233 context()->Plug(rax);
1234
1235 } else if (slot != NULL && slot->type() == Slot::LOOKUP) {
1236 Label done, slow;
1237
1238 // Generate code for loading from variables potentially shadowed
1239 // by eval-introduced variables.
1240 EmitDynamicLoadFromSlotFastCase(slot, NOT_INSIDE_TYPEOF, &slow, &done);
1241
1242 __ bind(&slow);
1243 Comment cmnt(masm_, "Lookup slot");
1244 __ push(rsi); // Context.
1245 __ Push(var->name());
1246 __ CallRuntime(Runtime::kLoadContextSlot, 2);
1247 __ bind(&done);
1248
1249 context()->Plug(rax);
1250
1251 } else if (slot != NULL) {
1252 Comment cmnt(masm_, (slot->type() == Slot::CONTEXT)
1253 ? "Context slot"
1254 : "Stack slot");
1255 if (var->mode() == Variable::CONST) {
1256 // Constants may be the hole value if they have not been initialized.
1257 // Unhole them.
1258 NearLabel done;
1259 MemOperand slot_operand = EmitSlotSearch(slot, rax);
1260 __ movq(rax, slot_operand);
1261 __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
1262 __ j(not_equal, &done);
1263 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
1264 __ bind(&done);
1265 context()->Plug(rax);
1266 } else {
1267 context()->Plug(slot);
1268 }
1269
1270 } else {
1271 Comment cmnt(masm_, "Rewritten parameter");
1272 ASSERT_NOT_NULL(property);
1273 // Rewritten parameter accesses are of the form "slot[literal]".
1274
1275 // Assert that the object is in a slot.
1276 Variable* object_var = property->obj()->AsVariableProxy()->AsVariable();
1277 ASSERT_NOT_NULL(object_var);
1278 Slot* object_slot = object_var->AsSlot();
1279 ASSERT_NOT_NULL(object_slot);
1280
1281 // Load the object.
1282 MemOperand object_loc = EmitSlotSearch(object_slot, rax);
1283 __ movq(rdx, object_loc);
1284
1285 // Assert that the key is a smi.
1286 Literal* key_literal = property->key()->AsLiteral();
1287 ASSERT_NOT_NULL(key_literal);
1288 ASSERT(key_literal->handle()->IsSmi());
1289
1290 // Load the key.
1291 __ Move(rax, key_literal->handle());
1292
1293 // Do a keyed property load.
1294 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
1295 EmitCallIC(ic, RelocInfo::CODE_TARGET);
1296 context()->Plug(rax);
1297 }
1298 }
1299
1300
VisitRegExpLiteral(RegExpLiteral * expr)1301 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1302 Comment cmnt(masm_, "[ RegExpLiteral");
1303 Label materialized;
1304 // Registers will be used as follows:
1305 // rdi = JS function.
1306 // rcx = literals array.
1307 // rbx = regexp literal.
1308 // rax = regexp literal clone.
1309 __ movq(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1310 __ movq(rcx, FieldOperand(rdi, JSFunction::kLiteralsOffset));
1311 int literal_offset =
1312 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1313 __ movq(rbx, FieldOperand(rcx, literal_offset));
1314 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
1315 __ j(not_equal, &materialized);
1316
1317 // Create regexp literal using runtime function
1318 // Result will be in rax.
1319 __ push(rcx);
1320 __ Push(Smi::FromInt(expr->literal_index()));
1321 __ Push(expr->pattern());
1322 __ Push(expr->flags());
1323 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1324 __ movq(rbx, rax);
1325
1326 __ bind(&materialized);
1327 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1328 Label allocated, runtime_allocate;
1329 __ AllocateInNewSpace(size, rax, rcx, rdx, &runtime_allocate, TAG_OBJECT);
1330 __ jmp(&allocated);
1331
1332 __ bind(&runtime_allocate);
1333 __ push(rbx);
1334 __ Push(Smi::FromInt(size));
1335 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1336 __ pop(rbx);
1337
1338 __ bind(&allocated);
1339 // Copy the content into the newly allocated memory.
1340 // (Unroll copy loop once for better throughput).
1341 for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
1342 __ movq(rdx, FieldOperand(rbx, i));
1343 __ movq(rcx, FieldOperand(rbx, i + kPointerSize));
1344 __ movq(FieldOperand(rax, i), rdx);
1345 __ movq(FieldOperand(rax, i + kPointerSize), rcx);
1346 }
1347 if ((size % (2 * kPointerSize)) != 0) {
1348 __ movq(rdx, FieldOperand(rbx, size - kPointerSize));
1349 __ movq(FieldOperand(rax, size - kPointerSize), rdx);
1350 }
1351 context()->Plug(rax);
1352 }
1353
1354
VisitObjectLiteral(ObjectLiteral * expr)1355 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1356 Comment cmnt(masm_, "[ ObjectLiteral");
1357 __ movq(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1358 __ push(FieldOperand(rdi, JSFunction::kLiteralsOffset));
1359 __ Push(Smi::FromInt(expr->literal_index()));
1360 __ Push(expr->constant_properties());
1361 int flags = expr->fast_elements()
1362 ? ObjectLiteral::kFastElements
1363 : ObjectLiteral::kNoFlags;
1364 flags |= expr->has_function()
1365 ? ObjectLiteral::kHasFunction
1366 : ObjectLiteral::kNoFlags;
1367 __ Push(Smi::FromInt(flags));
1368 if (expr->depth() > 1) {
1369 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1370 } else {
1371 __ CallRuntime(Runtime::kCreateObjectLiteralShallow, 4);
1372 }
1373
1374 // If result_saved is true the result is on top of the stack. If
1375 // result_saved is false the result is in rax.
1376 bool result_saved = false;
1377
1378 // Mark all computed expressions that are bound to a key that
1379 // is shadowed by a later occurrence of the same key. For the
1380 // marked expressions, no store code is emitted.
1381 expr->CalculateEmitStore();
1382
1383 for (int i = 0; i < expr->properties()->length(); i++) {
1384 ObjectLiteral::Property* property = expr->properties()->at(i);
1385 if (property->IsCompileTimeValue()) continue;
1386
1387 Literal* key = property->key();
1388 Expression* value = property->value();
1389 if (!result_saved) {
1390 __ push(rax); // Save result on the stack
1391 result_saved = true;
1392 }
1393 switch (property->kind()) {
1394 case ObjectLiteral::Property::CONSTANT:
1395 UNREACHABLE();
1396 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1397 ASSERT(!CompileTimeValue::IsCompileTimeValue(value));
1398 // Fall through.
1399 case ObjectLiteral::Property::COMPUTED:
1400 if (key->handle()->IsSymbol()) {
1401 if (property->emit_store()) {
1402 VisitForAccumulatorValue(value);
1403 __ Move(rcx, key->handle());
1404 __ movq(rdx, Operand(rsp, 0));
1405 Handle<Code> ic = is_strict_mode()
1406 ? isolate()->builtins()->StoreIC_Initialize_Strict()
1407 : isolate()->builtins()->StoreIC_Initialize();
1408 EmitCallIC(ic, RelocInfo::CODE_TARGET);
1409 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1410 } else {
1411 VisitForEffect(value);
1412 }
1413 break;
1414 }
1415 // Fall through.
1416 case ObjectLiteral::Property::PROTOTYPE:
1417 __ push(Operand(rsp, 0)); // Duplicate receiver.
1418 VisitForStackValue(key);
1419 VisitForStackValue(value);
1420 if (property->emit_store()) {
1421 __ Push(Smi::FromInt(NONE)); // PropertyAttributes
1422 __ CallRuntime(Runtime::kSetProperty, 4);
1423 } else {
1424 __ Drop(3);
1425 }
1426 break;
1427 case ObjectLiteral::Property::SETTER:
1428 case ObjectLiteral::Property::GETTER:
1429 __ push(Operand(rsp, 0)); // Duplicate receiver.
1430 VisitForStackValue(key);
1431 __ Push(property->kind() == ObjectLiteral::Property::SETTER ?
1432 Smi::FromInt(1) :
1433 Smi::FromInt(0));
1434 VisitForStackValue(value);
1435 __ CallRuntime(Runtime::kDefineAccessor, 4);
1436 break;
1437 }
1438 }
1439
1440 if (expr->has_function()) {
1441 ASSERT(result_saved);
1442 __ push(Operand(rsp, 0));
1443 __ CallRuntime(Runtime::kToFastProperties, 1);
1444 }
1445
1446 if (result_saved) {
1447 context()->PlugTOS();
1448 } else {
1449 context()->Plug(rax);
1450 }
1451 }
1452
1453
VisitArrayLiteral(ArrayLiteral * expr)1454 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1455 Comment cmnt(masm_, "[ ArrayLiteral");
1456
1457 ZoneList<Expression*>* subexprs = expr->values();
1458 int length = subexprs->length();
1459
1460 __ movq(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
1461 __ push(FieldOperand(rbx, JSFunction::kLiteralsOffset));
1462 __ Push(Smi::FromInt(expr->literal_index()));
1463 __ Push(expr->constant_elements());
1464 if (expr->constant_elements()->map() ==
1465 isolate()->heap()->fixed_cow_array_map()) {
1466 FastCloneShallowArrayStub stub(
1467 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length);
1468 __ CallStub(&stub);
1469 __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), 1);
1470 } else if (expr->depth() > 1) {
1471 __ CallRuntime(Runtime::kCreateArrayLiteral, 3);
1472 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
1473 __ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
1474 } else {
1475 FastCloneShallowArrayStub stub(
1476 FastCloneShallowArrayStub::CLONE_ELEMENTS, length);
1477 __ CallStub(&stub);
1478 }
1479
1480 bool result_saved = false; // Is the result saved to the stack?
1481
1482 // Emit code to evaluate all the non-constant subexpressions and to store
1483 // them into the newly cloned array.
1484 for (int i = 0; i < length; i++) {
1485 Expression* subexpr = subexprs->at(i);
1486 // If the subexpression is a literal or a simple materialized literal it
1487 // is already set in the cloned array.
1488 if (subexpr->AsLiteral() != NULL ||
1489 CompileTimeValue::IsCompileTimeValue(subexpr)) {
1490 continue;
1491 }
1492
1493 if (!result_saved) {
1494 __ push(rax);
1495 result_saved = true;
1496 }
1497 VisitForAccumulatorValue(subexpr);
1498
1499 // Store the subexpression value in the array's elements.
1500 __ movq(rbx, Operand(rsp, 0)); // Copy of array literal.
1501 __ movq(rbx, FieldOperand(rbx, JSObject::kElementsOffset));
1502 int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1503 __ movq(FieldOperand(rbx, offset), result_register());
1504
1505 // Update the write barrier for the array store.
1506 __ RecordWrite(rbx, offset, result_register(), rcx);
1507
1508 PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1509 }
1510
1511 if (result_saved) {
1512 context()->PlugTOS();
1513 } else {
1514 context()->Plug(rax);
1515 }
1516 }
1517
1518
VisitAssignment(Assignment * expr)1519 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1520 Comment cmnt(masm_, "[ Assignment");
1521 // Invalid left-hand sides are rewritten to have a 'throw ReferenceError'
1522 // on the left-hand side.
1523 if (!expr->target()->IsValidLeftHandSide()) {
1524 VisitForEffect(expr->target());
1525 return;
1526 }
1527
1528 // Left-hand side can only be a property, a global or a (parameter or local)
1529 // slot. Variables with rewrite to .arguments are treated as KEYED_PROPERTY.
1530 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1531 LhsKind assign_type = VARIABLE;
1532 Property* property = expr->target()->AsProperty();
1533 if (property != NULL) {
1534 assign_type = (property->key()->IsPropertyName())
1535 ? NAMED_PROPERTY
1536 : KEYED_PROPERTY;
1537 }
1538
1539 // Evaluate LHS expression.
1540 switch (assign_type) {
1541 case VARIABLE:
1542 // Nothing to do here.
1543 break;
1544 case NAMED_PROPERTY:
1545 if (expr->is_compound()) {
1546 // We need the receiver both on the stack and in the accumulator.
1547 VisitForAccumulatorValue(property->obj());
1548 __ push(result_register());
1549 } else {
1550 VisitForStackValue(property->obj());
1551 }
1552 break;
1553 case KEYED_PROPERTY: {
1554 if (expr->is_compound()) {
1555 if (property->is_arguments_access()) {
1556 VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
1557 MemOperand slot_operand =
1558 EmitSlotSearch(obj_proxy->var()->AsSlot(), rcx);
1559 __ push(slot_operand);
1560 __ Move(rax, property->key()->AsLiteral()->handle());
1561 } else {
1562 VisitForStackValue(property->obj());
1563 VisitForAccumulatorValue(property->key());
1564 }
1565 __ movq(rdx, Operand(rsp, 0));
1566 __ push(rax);
1567 } else {
1568 if (property->is_arguments_access()) {
1569 VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
1570 MemOperand slot_operand =
1571 EmitSlotSearch(obj_proxy->var()->AsSlot(), rcx);
1572 __ push(slot_operand);
1573 __ Push(property->key()->AsLiteral()->handle());
1574 } else {
1575 VisitForStackValue(property->obj());
1576 VisitForStackValue(property->key());
1577 }
1578 }
1579 break;
1580 }
1581 }
1582
1583 // For compound assignments we need another deoptimization point after the
1584 // variable/property load.
1585 if (expr->is_compound()) {
1586 { AccumulatorValueContext context(this);
1587 switch (assign_type) {
1588 case VARIABLE:
1589 EmitVariableLoad(expr->target()->AsVariableProxy()->var());
1590 PrepareForBailout(expr->target(), TOS_REG);
1591 break;
1592 case NAMED_PROPERTY:
1593 EmitNamedPropertyLoad(property);
1594 PrepareForBailoutForId(expr->CompoundLoadId(), TOS_REG);
1595 break;
1596 case KEYED_PROPERTY:
1597 EmitKeyedPropertyLoad(property);
1598 PrepareForBailoutForId(expr->CompoundLoadId(), TOS_REG);
1599 break;
1600 }
1601 }
1602
1603 Token::Value op = expr->binary_op();
1604 __ push(rax); // Left operand goes on the stack.
1605 VisitForAccumulatorValue(expr->value());
1606
1607 OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1608 ? OVERWRITE_RIGHT
1609 : NO_OVERWRITE;
1610 SetSourcePosition(expr->position() + 1);
1611 AccumulatorValueContext context(this);
1612 if (ShouldInlineSmiCase(op)) {
1613 EmitInlineSmiBinaryOp(expr,
1614 op,
1615 mode,
1616 expr->target(),
1617 expr->value());
1618 } else {
1619 EmitBinaryOp(op, mode);
1620 }
1621 // Deoptimization point in case the binary operation may have side effects.
1622 PrepareForBailout(expr->binary_operation(), TOS_REG);
1623 } else {
1624 VisitForAccumulatorValue(expr->value());
1625 }
1626
1627 // Record source position before possible IC call.
1628 SetSourcePosition(expr->position());
1629
1630 // Store the value.
1631 switch (assign_type) {
1632 case VARIABLE:
1633 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1634 expr->op());
1635 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1636 context()->Plug(rax);
1637 break;
1638 case NAMED_PROPERTY:
1639 EmitNamedPropertyAssignment(expr);
1640 break;
1641 case KEYED_PROPERTY:
1642 EmitKeyedPropertyAssignment(expr);
1643 break;
1644 }
1645 }
1646
1647
EmitNamedPropertyLoad(Property * prop)1648 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
1649 SetSourcePosition(prop->position());
1650 Literal* key = prop->key()->AsLiteral();
1651 __ Move(rcx, key->handle());
1652 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1653 EmitCallIC(ic, RelocInfo::CODE_TARGET);
1654 }
1655
1656
EmitKeyedPropertyLoad(Property * prop)1657 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
1658 SetSourcePosition(prop->position());
1659 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
1660 EmitCallIC(ic, RelocInfo::CODE_TARGET);
1661 }
1662
1663
EmitInlineSmiBinaryOp(Expression * expr,Token::Value op,OverwriteMode mode,Expression * left,Expression * right)1664 void FullCodeGenerator::EmitInlineSmiBinaryOp(Expression* expr,
1665 Token::Value op,
1666 OverwriteMode mode,
1667 Expression* left,
1668 Expression* right) {
1669 // Do combined smi check of the operands. Left operand is on the
1670 // stack (popped into rdx). Right operand is in rax but moved into
1671 // rcx to make the shifts easier.
1672 NearLabel done, stub_call, smi_case;
1673 __ pop(rdx);
1674 __ movq(rcx, rax);
1675 __ or_(rax, rdx);
1676 JumpPatchSite patch_site(masm_);
1677 patch_site.EmitJumpIfSmi(rax, &smi_case);
1678
1679 __ bind(&stub_call);
1680 __ movq(rax, rcx);
1681 TypeRecordingBinaryOpStub stub(op, mode);
1682 EmitCallIC(stub.GetCode(), &patch_site);
1683 __ jmp(&done);
1684
1685 __ bind(&smi_case);
1686 switch (op) {
1687 case Token::SAR:
1688 __ SmiShiftArithmeticRight(rax, rdx, rcx);
1689 break;
1690 case Token::SHL:
1691 __ SmiShiftLeft(rax, rdx, rcx);
1692 break;
1693 case Token::SHR:
1694 __ SmiShiftLogicalRight(rax, rdx, rcx, &stub_call);
1695 break;
1696 case Token::ADD:
1697 __ SmiAdd(rax, rdx, rcx, &stub_call);
1698 break;
1699 case Token::SUB:
1700 __ SmiSub(rax, rdx, rcx, &stub_call);
1701 break;
1702 case Token::MUL:
1703 __ SmiMul(rax, rdx, rcx, &stub_call);
1704 break;
1705 case Token::BIT_OR:
1706 __ SmiOr(rax, rdx, rcx);
1707 break;
1708 case Token::BIT_AND:
1709 __ SmiAnd(rax, rdx, rcx);
1710 break;
1711 case Token::BIT_XOR:
1712 __ SmiXor(rax, rdx, rcx);
1713 break;
1714 default:
1715 UNREACHABLE();
1716 break;
1717 }
1718
1719 __ bind(&done);
1720 context()->Plug(rax);
1721 }
1722
1723
EmitBinaryOp(Token::Value op,OverwriteMode mode)1724 void FullCodeGenerator::EmitBinaryOp(Token::Value op,
1725 OverwriteMode mode) {
1726 __ pop(rdx);
1727 TypeRecordingBinaryOpStub stub(op, mode);
1728 EmitCallIC(stub.GetCode(), NULL); // NULL signals no inlined smi code.
1729 context()->Plug(rax);
1730 }
1731
1732
EmitAssignment(Expression * expr,int bailout_ast_id)1733 void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) {
1734 // Invalid left-hand sides are rewritten to have a 'throw
1735 // ReferenceError' on the left-hand side.
1736 if (!expr->IsValidLeftHandSide()) {
1737 VisitForEffect(expr);
1738 return;
1739 }
1740
1741 // Left-hand side can only be a property, a global or a (parameter or local)
1742 // slot. Variables with rewrite to .arguments are treated as KEYED_PROPERTY.
1743 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1744 LhsKind assign_type = VARIABLE;
1745 Property* prop = expr->AsProperty();
1746 if (prop != NULL) {
1747 assign_type = (prop->key()->IsPropertyName())
1748 ? NAMED_PROPERTY
1749 : KEYED_PROPERTY;
1750 }
1751
1752 switch (assign_type) {
1753 case VARIABLE: {
1754 Variable* var = expr->AsVariableProxy()->var();
1755 EffectContext context(this);
1756 EmitVariableAssignment(var, Token::ASSIGN);
1757 break;
1758 }
1759 case NAMED_PROPERTY: {
1760 __ push(rax); // Preserve value.
1761 VisitForAccumulatorValue(prop->obj());
1762 __ movq(rdx, rax);
1763 __ pop(rax); // Restore value.
1764 __ Move(rcx, prop->key()->AsLiteral()->handle());
1765 Handle<Code> ic = is_strict_mode()
1766 ? isolate()->builtins()->StoreIC_Initialize_Strict()
1767 : isolate()->builtins()->StoreIC_Initialize();
1768 EmitCallIC(ic, RelocInfo::CODE_TARGET);
1769 break;
1770 }
1771 case KEYED_PROPERTY: {
1772 __ push(rax); // Preserve value.
1773 if (prop->is_synthetic()) {
1774 ASSERT(prop->obj()->AsVariableProxy() != NULL);
1775 ASSERT(prop->key()->AsLiteral() != NULL);
1776 { AccumulatorValueContext for_object(this);
1777 EmitVariableLoad(prop->obj()->AsVariableProxy()->var());
1778 }
1779 __ movq(rdx, rax);
1780 __ Move(rcx, prop->key()->AsLiteral()->handle());
1781 } else {
1782 VisitForStackValue(prop->obj());
1783 VisitForAccumulatorValue(prop->key());
1784 __ movq(rcx, rax);
1785 __ pop(rdx);
1786 }
1787 __ pop(rax); // Restore value.
1788 Handle<Code> ic = is_strict_mode()
1789 ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
1790 : isolate()->builtins()->KeyedStoreIC_Initialize();
1791 EmitCallIC(ic, RelocInfo::CODE_TARGET);
1792 break;
1793 }
1794 }
1795 PrepareForBailoutForId(bailout_ast_id, TOS_REG);
1796 context()->Plug(rax);
1797 }
1798
1799
EmitVariableAssignment(Variable * var,Token::Value op)1800 void FullCodeGenerator::EmitVariableAssignment(Variable* var,
1801 Token::Value op) {
1802 // Left-hand sides that rewrite to explicit property accesses do not reach
1803 // here.
1804 ASSERT(var != NULL);
1805 ASSERT(var->is_global() || var->AsSlot() != NULL);
1806
1807 if (var->is_global()) {
1808 ASSERT(!var->is_this());
1809 // Assignment to a global variable. Use inline caching for the
1810 // assignment. Right-hand-side value is passed in rax, variable name in
1811 // rcx, and the global object on the stack.
1812 __ Move(rcx, var->name());
1813 __ movq(rdx, GlobalObjectOperand());
1814 Handle<Code> ic = is_strict_mode()
1815 ? isolate()->builtins()->StoreIC_Initialize_Strict()
1816 : isolate()->builtins()->StoreIC_Initialize();
1817 EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
1818
1819 } else if (op == Token::INIT_CONST) {
1820 // Like var declarations, const declarations are hoisted to function
1821 // scope. However, unlike var initializers, const initializers are able
1822 // to drill a hole to that function context, even from inside a 'with'
1823 // context. We thus bypass the normal static scope lookup.
1824 Slot* slot = var->AsSlot();
1825 Label skip;
1826 switch (slot->type()) {
1827 case Slot::PARAMETER:
1828 // No const parameters.
1829 UNREACHABLE();
1830 break;
1831 case Slot::LOCAL:
1832 __ movq(rdx, Operand(rbp, SlotOffset(slot)));
1833 __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
1834 __ j(not_equal, &skip);
1835 __ movq(Operand(rbp, SlotOffset(slot)), rax);
1836 break;
1837 case Slot::CONTEXT: {
1838 __ movq(rcx, ContextOperand(rsi, Context::FCONTEXT_INDEX));
1839 __ movq(rdx, ContextOperand(rcx, slot->index()));
1840 __ CompareRoot(rdx, Heap::kTheHoleValueRootIndex);
1841 __ j(not_equal, &skip);
1842 __ movq(ContextOperand(rcx, slot->index()), rax);
1843 int offset = Context::SlotOffset(slot->index());
1844 __ movq(rdx, rax); // Preserve the stored value in eax.
1845 __ RecordWrite(rcx, offset, rdx, rbx);
1846 break;
1847 }
1848 case Slot::LOOKUP:
1849 __ push(rax);
1850 __ push(rsi);
1851 __ Push(var->name());
1852 __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
1853 break;
1854 }
1855 __ bind(&skip);
1856
1857 } else if (var->mode() != Variable::CONST) {
1858 // Perform the assignment for non-const variables. Const assignments
1859 // are simply skipped.
1860 Slot* slot = var->AsSlot();
1861 switch (slot->type()) {
1862 case Slot::PARAMETER:
1863 case Slot::LOCAL:
1864 // Perform the assignment.
1865 __ movq(Operand(rbp, SlotOffset(slot)), rax);
1866 break;
1867
1868 case Slot::CONTEXT: {
1869 MemOperand target = EmitSlotSearch(slot, rcx);
1870 // Perform the assignment and issue the write barrier.
1871 __ movq(target, rax);
1872 // The value of the assignment is in rax. RecordWrite clobbers its
1873 // register arguments.
1874 __ movq(rdx, rax);
1875 int offset = Context::SlotOffset(slot->index());
1876 __ RecordWrite(rcx, offset, rdx, rbx);
1877 break;
1878 }
1879
1880 case Slot::LOOKUP:
1881 // Call the runtime for the assignment.
1882 __ push(rax); // Value.
1883 __ push(rsi); // Context.
1884 __ Push(var->name());
1885 __ Push(Smi::FromInt(strict_mode_flag()));
1886 __ CallRuntime(Runtime::kStoreContextSlot, 4);
1887 break;
1888 }
1889 }
1890 }
1891
1892
EmitNamedPropertyAssignment(Assignment * expr)1893 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
1894 // Assignment to a property, using a named store IC.
1895 Property* prop = expr->target()->AsProperty();
1896 ASSERT(prop != NULL);
1897 ASSERT(prop->key()->AsLiteral() != NULL);
1898
1899 // If the assignment starts a block of assignments to the same object,
1900 // change to slow case to avoid the quadratic behavior of repeatedly
1901 // adding fast properties.
1902 if (expr->starts_initialization_block()) {
1903 __ push(result_register());
1904 __ push(Operand(rsp, kPointerSize)); // Receiver is now under value.
1905 __ CallRuntime(Runtime::kToSlowProperties, 1);
1906 __ pop(result_register());
1907 }
1908
1909 // Record source code position before IC call.
1910 SetSourcePosition(expr->position());
1911 __ Move(rcx, prop->key()->AsLiteral()->handle());
1912 if (expr->ends_initialization_block()) {
1913 __ movq(rdx, Operand(rsp, 0));
1914 } else {
1915 __ pop(rdx);
1916 }
1917 Handle<Code> ic = is_strict_mode()
1918 ? isolate()->builtins()->StoreIC_Initialize_Strict()
1919 : isolate()->builtins()->StoreIC_Initialize();
1920 EmitCallIC(ic, RelocInfo::CODE_TARGET);
1921
1922 // If the assignment ends an initialization block, revert to fast case.
1923 if (expr->ends_initialization_block()) {
1924 __ push(rax); // Result of assignment, saved even if not needed.
1925 __ push(Operand(rsp, kPointerSize)); // Receiver is under value.
1926 __ CallRuntime(Runtime::kToFastProperties, 1);
1927 __ pop(rax);
1928 __ Drop(1);
1929 }
1930 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1931 context()->Plug(rax);
1932 }
1933
1934
EmitKeyedPropertyAssignment(Assignment * expr)1935 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
1936 // Assignment to a property, using a keyed store IC.
1937
1938 // If the assignment starts a block of assignments to the same object,
1939 // change to slow case to avoid the quadratic behavior of repeatedly
1940 // adding fast properties.
1941 if (expr->starts_initialization_block()) {
1942 __ push(result_register());
1943 // Receiver is now under the key and value.
1944 __ push(Operand(rsp, 2 * kPointerSize));
1945 __ CallRuntime(Runtime::kToSlowProperties, 1);
1946 __ pop(result_register());
1947 }
1948
1949 __ pop(rcx);
1950 if (expr->ends_initialization_block()) {
1951 __ movq(rdx, Operand(rsp, 0)); // Leave receiver on the stack for later.
1952 } else {
1953 __ pop(rdx);
1954 }
1955 // Record source code position before IC call.
1956 SetSourcePosition(expr->position());
1957 Handle<Code> ic = is_strict_mode()
1958 ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
1959 : isolate()->builtins()->KeyedStoreIC_Initialize();
1960 EmitCallIC(ic, RelocInfo::CODE_TARGET);
1961
1962 // If the assignment ends an initialization block, revert to fast case.
1963 if (expr->ends_initialization_block()) {
1964 __ pop(rdx);
1965 __ push(rax); // Result of assignment, saved even if not needed.
1966 __ push(rdx);
1967 __ CallRuntime(Runtime::kToFastProperties, 1);
1968 __ pop(rax);
1969 }
1970
1971 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1972 context()->Plug(rax);
1973 }
1974
1975
VisitProperty(Property * expr)1976 void FullCodeGenerator::VisitProperty(Property* expr) {
1977 Comment cmnt(masm_, "[ Property");
1978 Expression* key = expr->key();
1979
1980 if (key->IsPropertyName()) {
1981 VisitForAccumulatorValue(expr->obj());
1982 EmitNamedPropertyLoad(expr);
1983 context()->Plug(rax);
1984 } else {
1985 VisitForStackValue(expr->obj());
1986 VisitForAccumulatorValue(expr->key());
1987 __ pop(rdx);
1988 EmitKeyedPropertyLoad(expr);
1989 context()->Plug(rax);
1990 }
1991 }
1992
1993
EmitCallWithIC(Call * expr,Handle<Object> name,RelocInfo::Mode mode)1994 void FullCodeGenerator::EmitCallWithIC(Call* expr,
1995 Handle<Object> name,
1996 RelocInfo::Mode mode) {
1997 // Code common for calls using the IC.
1998 ZoneList<Expression*>* args = expr->arguments();
1999 int arg_count = args->length();
2000 { PreservePositionScope scope(masm()->positions_recorder());
2001 for (int i = 0; i < arg_count; i++) {
2002 VisitForStackValue(args->at(i));
2003 }
2004 __ Move(rcx, name);
2005 }
2006 // Record source position for debugger.
2007 SetSourcePosition(expr->position());
2008 // Call the IC initialization code.
2009 InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
2010 Handle<Code> ic =
2011 ISOLATE->stub_cache()->ComputeCallInitialize(arg_count, in_loop);
2012 EmitCallIC(ic, mode);
2013 RecordJSReturnSite(expr);
2014 // Restore context register.
2015 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2016 context()->Plug(rax);
2017 }
2018
2019
EmitKeyedCallWithIC(Call * expr,Expression * key,RelocInfo::Mode mode)2020 void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
2021 Expression* key,
2022 RelocInfo::Mode mode) {
2023 // Load the key.
2024 VisitForAccumulatorValue(key);
2025
2026 // Swap the name of the function and the receiver on the stack to follow
2027 // the calling convention for call ICs.
2028 __ pop(rcx);
2029 __ push(rax);
2030 __ push(rcx);
2031
2032 // Load the arguments.
2033 ZoneList<Expression*>* args = expr->arguments();
2034 int arg_count = args->length();
2035 { PreservePositionScope scope(masm()->positions_recorder());
2036 for (int i = 0; i < arg_count; i++) {
2037 VisitForStackValue(args->at(i));
2038 }
2039 }
2040 // Record source position for debugger.
2041 SetSourcePosition(expr->position());
2042 // Call the IC initialization code.
2043 InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
2044 Handle<Code> ic =
2045 ISOLATE->stub_cache()->ComputeKeyedCallInitialize(arg_count, in_loop);
2046 __ movq(rcx, Operand(rsp, (arg_count + 1) * kPointerSize)); // Key.
2047 EmitCallIC(ic, mode);
2048 RecordJSReturnSite(expr);
2049 // Restore context register.
2050 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2051 context()->DropAndPlug(1, rax); // Drop the key still on the stack.
2052 }
2053
2054
EmitCallWithStub(Call * expr)2055 void FullCodeGenerator::EmitCallWithStub(Call* expr) {
2056 // Code common for calls using the call stub.
2057 ZoneList<Expression*>* args = expr->arguments();
2058 int arg_count = args->length();
2059 { PreservePositionScope scope(masm()->positions_recorder());
2060 for (int i = 0; i < arg_count; i++) {
2061 VisitForStackValue(args->at(i));
2062 }
2063 }
2064 // Record source position for debugger.
2065 SetSourcePosition(expr->position());
2066 InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
2067 CallFunctionStub stub(arg_count, in_loop, RECEIVER_MIGHT_BE_VALUE);
2068 __ CallStub(&stub);
2069 RecordJSReturnSite(expr);
2070 // Restore context register.
2071 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2072 // Discard the function left on TOS.
2073 context()->DropAndPlug(1, rax);
2074 }
2075
2076
EmitResolvePossiblyDirectEval(ResolveEvalFlag flag,int arg_count)2077 void FullCodeGenerator::EmitResolvePossiblyDirectEval(ResolveEvalFlag flag,
2078 int arg_count) {
2079 // Push copy of the first argument or undefined if it doesn't exist.
2080 if (arg_count > 0) {
2081 __ push(Operand(rsp, arg_count * kPointerSize));
2082 } else {
2083 __ PushRoot(Heap::kUndefinedValueRootIndex);
2084 }
2085
2086 // Push the receiver of the enclosing function and do runtime call.
2087 __ push(Operand(rbp, (2 + scope()->num_parameters()) * kPointerSize));
2088
2089 // Push the strict mode flag.
2090 __ Push(Smi::FromInt(strict_mode_flag()));
2091
2092 __ CallRuntime(flag == SKIP_CONTEXT_LOOKUP
2093 ? Runtime::kResolvePossiblyDirectEvalNoLookup
2094 : Runtime::kResolvePossiblyDirectEval, 4);
2095 }
2096
2097
VisitCall(Call * expr)2098 void FullCodeGenerator::VisitCall(Call* expr) {
2099 #ifdef DEBUG
2100 // We want to verify that RecordJSReturnSite gets called on all paths
2101 // through this function. Avoid early returns.
2102 expr->return_is_recorded_ = false;
2103 #endif
2104
2105 Comment cmnt(masm_, "[ Call");
2106 Expression* fun = expr->expression();
2107 Variable* var = fun->AsVariableProxy()->AsVariable();
2108
2109 if (var != NULL && var->is_possibly_eval()) {
2110 // In a call to eval, we first call %ResolvePossiblyDirectEval to
2111 // resolve the function we need to call and the receiver of the
2112 // call. Then we call the resolved function using the given
2113 // arguments.
2114 ZoneList<Expression*>* args = expr->arguments();
2115 int arg_count = args->length();
2116 { PreservePositionScope pos_scope(masm()->positions_recorder());
2117 VisitForStackValue(fun);
2118 __ PushRoot(Heap::kUndefinedValueRootIndex); // Reserved receiver slot.
2119
2120 // Push the arguments.
2121 for (int i = 0; i < arg_count; i++) {
2122 VisitForStackValue(args->at(i));
2123 }
2124
2125 // If we know that eval can only be shadowed by eval-introduced
2126 // variables we attempt to load the global eval function directly
2127 // in generated code. If we succeed, there is no need to perform a
2128 // context lookup in the runtime system.
2129 Label done;
2130 if (var->AsSlot() != NULL && var->mode() == Variable::DYNAMIC_GLOBAL) {
2131 Label slow;
2132 EmitLoadGlobalSlotCheckExtensions(var->AsSlot(),
2133 NOT_INSIDE_TYPEOF,
2134 &slow);
2135 // Push the function and resolve eval.
2136 __ push(rax);
2137 EmitResolvePossiblyDirectEval(SKIP_CONTEXT_LOOKUP, arg_count);
2138 __ jmp(&done);
2139 __ bind(&slow);
2140 }
2141
2142 // Push copy of the function (found below the arguments) and
2143 // resolve eval.
2144 __ push(Operand(rsp, (arg_count + 1) * kPointerSize));
2145 EmitResolvePossiblyDirectEval(PERFORM_CONTEXT_LOOKUP, arg_count);
2146 if (done.is_linked()) {
2147 __ bind(&done);
2148 }
2149
2150 // The runtime call returns a pair of values in rax (function) and
2151 // rdx (receiver). Touch up the stack with the right values.
2152 __ movq(Operand(rsp, (arg_count + 0) * kPointerSize), rdx);
2153 __ movq(Operand(rsp, (arg_count + 1) * kPointerSize), rax);
2154 }
2155 // Record source position for debugger.
2156 SetSourcePosition(expr->position());
2157 InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
2158 CallFunctionStub stub(arg_count, in_loop, RECEIVER_MIGHT_BE_VALUE);
2159 __ CallStub(&stub);
2160 RecordJSReturnSite(expr);
2161 // Restore context register.
2162 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
2163 context()->DropAndPlug(1, rax);
2164 } else if (var != NULL && !var->is_this() && var->is_global()) {
2165 // Call to a global variable.
2166 // Push global object as receiver for the call IC lookup.
2167 __ push(GlobalObjectOperand());
2168 EmitCallWithIC(expr, var->name(), RelocInfo::CODE_TARGET_CONTEXT);
2169 } else if (var != NULL && var->AsSlot() != NULL &&
2170 var->AsSlot()->type() == Slot::LOOKUP) {
2171 // Call to a lookup slot (dynamically introduced variable).
2172 Label slow, done;
2173
2174 { PreservePositionScope scope(masm()->positions_recorder());
2175 // Generate code for loading from variables potentially shadowed
2176 // by eval-introduced variables.
2177 EmitDynamicLoadFromSlotFastCase(var->AsSlot(),
2178 NOT_INSIDE_TYPEOF,
2179 &slow,
2180 &done);
2181
2182 __ bind(&slow);
2183 }
2184 // Call the runtime to find the function to call (returned in rax)
2185 // and the object holding it (returned in rdx).
2186 __ push(context_register());
2187 __ Push(var->name());
2188 __ CallRuntime(Runtime::kLoadContextSlot, 2);
2189 __ push(rax); // Function.
2190 __ push(rdx); // Receiver.
2191
2192 // If fast case code has been generated, emit code to push the
2193 // function and receiver and have the slow path jump around this
2194 // code.
2195 if (done.is_linked()) {
2196 NearLabel call;
2197 __ jmp(&call);
2198 __ bind(&done);
2199 // Push function.
2200 __ push(rax);
2201 // Push global receiver.
2202 __ movq(rbx, GlobalObjectOperand());
2203 __ push(FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset));
2204 __ bind(&call);
2205 }
2206
2207 EmitCallWithStub(expr);
2208 } else if (fun->AsProperty() != NULL) {
2209 // Call to an object property.
2210 Property* prop = fun->AsProperty();
2211 Literal* key = prop->key()->AsLiteral();
2212 if (key != NULL && key->handle()->IsSymbol()) {
2213 // Call to a named property, use call IC.
2214 { PreservePositionScope scope(masm()->positions_recorder());
2215 VisitForStackValue(prop->obj());
2216 }
2217 EmitCallWithIC(expr, key->handle(), RelocInfo::CODE_TARGET);
2218 } else {
2219 // Call to a keyed property.
2220 // For a synthetic property use keyed load IC followed by function call,
2221 // for a regular property use keyed EmitCallIC.
2222 if (prop->is_synthetic()) {
2223 // Do not visit the object and key subexpressions (they are shared
2224 // by all occurrences of the same rewritten parameter).
2225 ASSERT(prop->obj()->AsVariableProxy() != NULL);
2226 ASSERT(prop->obj()->AsVariableProxy()->var()->AsSlot() != NULL);
2227 Slot* slot = prop->obj()->AsVariableProxy()->var()->AsSlot();
2228 MemOperand operand = EmitSlotSearch(slot, rdx);
2229 __ movq(rdx, operand);
2230
2231 ASSERT(prop->key()->AsLiteral() != NULL);
2232 ASSERT(prop->key()->AsLiteral()->handle()->IsSmi());
2233 __ Move(rax, prop->key()->AsLiteral()->handle());
2234
2235 // Record source code position for IC call.
2236 SetSourcePosition(prop->position());
2237
2238 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2239 EmitCallIC(ic, RelocInfo::CODE_TARGET);
2240 // Push result (function).
2241 __ push(rax);
2242 // Push Global receiver.
2243 __ movq(rcx, GlobalObjectOperand());
2244 __ push(FieldOperand(rcx, GlobalObject::kGlobalReceiverOffset));
2245 EmitCallWithStub(expr);
2246 } else {
2247 { PreservePositionScope scope(masm()->positions_recorder());
2248 VisitForStackValue(prop->obj());
2249 }
2250 EmitKeyedCallWithIC(expr, prop->key(), RelocInfo::CODE_TARGET);
2251 }
2252 }
2253 } else {
2254 { PreservePositionScope scope(masm()->positions_recorder());
2255 VisitForStackValue(fun);
2256 }
2257 // Load global receiver object.
2258 __ movq(rbx, GlobalObjectOperand());
2259 __ push(FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset));
2260 // Emit function call.
2261 EmitCallWithStub(expr);
2262 }
2263
2264 #ifdef DEBUG
2265 // RecordJSReturnSite should have been called.
2266 ASSERT(expr->return_is_recorded_);
2267 #endif
2268 }
2269
2270
VisitCallNew(CallNew * expr)2271 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2272 Comment cmnt(masm_, "[ CallNew");
2273 // According to ECMA-262, section 11.2.2, page 44, the function
2274 // expression in new calls must be evaluated before the
2275 // arguments.
2276
2277 // Push constructor on the stack. If it's not a function it's used as
2278 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2279 // ignored.
2280 VisitForStackValue(expr->expression());
2281
2282 // Push the arguments ("left-to-right") on the stack.
2283 ZoneList<Expression*>* args = expr->arguments();
2284 int arg_count = args->length();
2285 for (int i = 0; i < arg_count; i++) {
2286 VisitForStackValue(args->at(i));
2287 }
2288
2289 // Call the construct call builtin that handles allocation and
2290 // constructor invocation.
2291 SetSourcePosition(expr->position());
2292
2293 // Load function and argument count into rdi and rax.
2294 __ Set(rax, arg_count);
2295 __ movq(rdi, Operand(rsp, arg_count * kPointerSize));
2296
2297 Handle<Code> construct_builtin =
2298 isolate()->builtins()->JSConstructCall();
2299 __ Call(construct_builtin, RelocInfo::CONSTRUCT_CALL);
2300 context()->Plug(rax);
2301 }
2302
2303
EmitIsSmi(ZoneList<Expression * > * args)2304 void FullCodeGenerator::EmitIsSmi(ZoneList<Expression*>* args) {
2305 ASSERT(args->length() == 1);
2306
2307 VisitForAccumulatorValue(args->at(0));
2308
2309 Label materialize_true, materialize_false;
2310 Label* if_true = NULL;
2311 Label* if_false = NULL;
2312 Label* fall_through = NULL;
2313 context()->PrepareTest(&materialize_true, &materialize_false,
2314 &if_true, &if_false, &fall_through);
2315
2316 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2317 __ JumpIfSmi(rax, if_true);
2318 __ jmp(if_false);
2319
2320 context()->Plug(if_true, if_false);
2321 }
2322
2323
EmitIsNonNegativeSmi(ZoneList<Expression * > * args)2324 void FullCodeGenerator::EmitIsNonNegativeSmi(ZoneList<Expression*>* args) {
2325 ASSERT(args->length() == 1);
2326
2327 VisitForAccumulatorValue(args->at(0));
2328
2329 Label materialize_true, materialize_false;
2330 Label* if_true = NULL;
2331 Label* if_false = NULL;
2332 Label* fall_through = NULL;
2333 context()->PrepareTest(&materialize_true, &materialize_false,
2334 &if_true, &if_false, &fall_through);
2335
2336 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2337 Condition non_negative_smi = masm()->CheckNonNegativeSmi(rax);
2338 Split(non_negative_smi, if_true, if_false, fall_through);
2339
2340 context()->Plug(if_true, if_false);
2341 }
2342
2343
EmitIsObject(ZoneList<Expression * > * args)2344 void FullCodeGenerator::EmitIsObject(ZoneList<Expression*>* args) {
2345 ASSERT(args->length() == 1);
2346
2347 VisitForAccumulatorValue(args->at(0));
2348
2349 Label materialize_true, materialize_false;
2350 Label* if_true = NULL;
2351 Label* if_false = NULL;
2352 Label* fall_through = NULL;
2353 context()->PrepareTest(&materialize_true, &materialize_false,
2354 &if_true, &if_false, &fall_through);
2355
2356 __ JumpIfSmi(rax, if_false);
2357 __ CompareRoot(rax, Heap::kNullValueRootIndex);
2358 __ j(equal, if_true);
2359 __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
2360 // Undetectable objects behave like undefined when tested with typeof.
2361 __ testb(FieldOperand(rbx, Map::kBitFieldOffset),
2362 Immediate(1 << Map::kIsUndetectable));
2363 __ j(not_zero, if_false);
2364 __ movzxbq(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
2365 __ cmpq(rbx, Immediate(FIRST_JS_OBJECT_TYPE));
2366 __ j(below, if_false);
2367 __ cmpq(rbx, Immediate(LAST_JS_OBJECT_TYPE));
2368 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2369 Split(below_equal, if_true, if_false, fall_through);
2370
2371 context()->Plug(if_true, if_false);
2372 }
2373
2374
EmitIsSpecObject(ZoneList<Expression * > * args)2375 void FullCodeGenerator::EmitIsSpecObject(ZoneList<Expression*>* args) {
2376 ASSERT(args->length() == 1);
2377
2378 VisitForAccumulatorValue(args->at(0));
2379
2380 Label materialize_true, materialize_false;
2381 Label* if_true = NULL;
2382 Label* if_false = NULL;
2383 Label* fall_through = NULL;
2384 context()->PrepareTest(&materialize_true, &materialize_false,
2385 &if_true, &if_false, &fall_through);
2386
2387 __ JumpIfSmi(rax, if_false);
2388 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rbx);
2389 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2390 Split(above_equal, if_true, if_false, fall_through);
2391
2392 context()->Plug(if_true, if_false);
2393 }
2394
2395
EmitIsUndetectableObject(ZoneList<Expression * > * args)2396 void FullCodeGenerator::EmitIsUndetectableObject(ZoneList<Expression*>* args) {
2397 ASSERT(args->length() == 1);
2398
2399 VisitForAccumulatorValue(args->at(0));
2400
2401 Label materialize_true, materialize_false;
2402 Label* if_true = NULL;
2403 Label* if_false = NULL;
2404 Label* fall_through = NULL;
2405 context()->PrepareTest(&materialize_true, &materialize_false,
2406 &if_true, &if_false, &fall_through);
2407
2408 __ JumpIfSmi(rax, if_false);
2409 __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
2410 __ testb(FieldOperand(rbx, Map::kBitFieldOffset),
2411 Immediate(1 << Map::kIsUndetectable));
2412 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2413 Split(not_zero, if_true, if_false, fall_through);
2414
2415 context()->Plug(if_true, if_false);
2416 }
2417
2418
EmitIsStringWrapperSafeForDefaultValueOf(ZoneList<Expression * > * args)2419 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
2420 ZoneList<Expression*>* args) {
2421 ASSERT(args->length() == 1);
2422
2423 VisitForAccumulatorValue(args->at(0));
2424
2425 Label materialize_true, materialize_false;
2426 Label* if_true = NULL;
2427 Label* if_false = NULL;
2428 Label* fall_through = NULL;
2429 context()->PrepareTest(&materialize_true, &materialize_false,
2430 &if_true, &if_false, &fall_through);
2431
2432 if (FLAG_debug_code) __ AbortIfSmi(rax);
2433
2434 // Check whether this map has already been checked to be safe for default
2435 // valueOf.
2436 __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
2437 __ testb(FieldOperand(rbx, Map::kBitField2Offset),
2438 Immediate(1 << Map::kStringWrapperSafeForDefaultValueOf));
2439 __ j(not_zero, if_true);
2440
2441 // Check for fast case object. Generate false result for slow case object.
2442 __ movq(rcx, FieldOperand(rax, JSObject::kPropertiesOffset));
2443 __ movq(rcx, FieldOperand(rcx, HeapObject::kMapOffset));
2444 __ CompareRoot(rcx, Heap::kHashTableMapRootIndex);
2445 __ j(equal, if_false);
2446
2447 // Look for valueOf symbol in the descriptor array, and indicate false if
2448 // found. The type is not checked, so if it is a transition it is a false
2449 // negative.
2450 __ movq(rbx, FieldOperand(rbx, Map::kInstanceDescriptorsOffset));
2451 __ movq(rcx, FieldOperand(rbx, FixedArray::kLengthOffset));
2452 // rbx: descriptor array
2453 // rcx: length of descriptor array
2454 // Calculate the end of the descriptor array.
2455 SmiIndex index = masm_->SmiToIndex(rdx, rcx, kPointerSizeLog2);
2456 __ lea(rcx,
2457 Operand(
2458 rbx, index.reg, index.scale, FixedArray::kHeaderSize));
2459 // Calculate location of the first key name.
2460 __ addq(rbx,
2461 Immediate(FixedArray::kHeaderSize +
2462 DescriptorArray::kFirstIndex * kPointerSize));
2463 // Loop through all the keys in the descriptor array. If one of these is the
2464 // symbol valueOf the result is false.
2465 Label entry, loop;
2466 __ jmp(&entry);
2467 __ bind(&loop);
2468 __ movq(rdx, FieldOperand(rbx, 0));
2469 __ Cmp(rdx, FACTORY->value_of_symbol());
2470 __ j(equal, if_false);
2471 __ addq(rbx, Immediate(kPointerSize));
2472 __ bind(&entry);
2473 __ cmpq(rbx, rcx);
2474 __ j(not_equal, &loop);
2475
2476 // Reload map as register rbx was used as temporary above.
2477 __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
2478
2479 // If a valueOf property is not found on the object check that it's
2480 // prototype is the un-modified String prototype. If not result is false.
2481 __ movq(rcx, FieldOperand(rbx, Map::kPrototypeOffset));
2482 __ testq(rcx, Immediate(kSmiTagMask));
2483 __ j(zero, if_false);
2484 __ movq(rcx, FieldOperand(rcx, HeapObject::kMapOffset));
2485 __ movq(rdx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
2486 __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalContextOffset));
2487 __ cmpq(rcx,
2488 ContextOperand(rdx, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
2489 __ j(not_equal, if_false);
2490 // Set the bit in the map to indicate that it has been checked safe for
2491 // default valueOf and set true result.
2492 __ or_(FieldOperand(rbx, Map::kBitField2Offset),
2493 Immediate(1 << Map::kStringWrapperSafeForDefaultValueOf));
2494 __ jmp(if_true);
2495
2496 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2497 context()->Plug(if_true, if_false);
2498 }
2499
2500
EmitIsFunction(ZoneList<Expression * > * args)2501 void FullCodeGenerator::EmitIsFunction(ZoneList<Expression*>* args) {
2502 ASSERT(args->length() == 1);
2503
2504 VisitForAccumulatorValue(args->at(0));
2505
2506 Label materialize_true, materialize_false;
2507 Label* if_true = NULL;
2508 Label* if_false = NULL;
2509 Label* fall_through = NULL;
2510 context()->PrepareTest(&materialize_true, &materialize_false,
2511 &if_true, &if_false, &fall_through);
2512
2513 __ JumpIfSmi(rax, if_false);
2514 __ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx);
2515 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2516 Split(equal, if_true, if_false, fall_through);
2517
2518 context()->Plug(if_true, if_false);
2519 }
2520
2521
EmitIsArray(ZoneList<Expression * > * args)2522 void FullCodeGenerator::EmitIsArray(ZoneList<Expression*>* args) {
2523 ASSERT(args->length() == 1);
2524
2525 VisitForAccumulatorValue(args->at(0));
2526
2527 Label materialize_true, materialize_false;
2528 Label* if_true = NULL;
2529 Label* if_false = NULL;
2530 Label* fall_through = NULL;
2531 context()->PrepareTest(&materialize_true, &materialize_false,
2532 &if_true, &if_false, &fall_through);
2533
2534 __ JumpIfSmi(rax, if_false);
2535 __ CmpObjectType(rax, JS_ARRAY_TYPE, rbx);
2536 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2537 Split(equal, if_true, if_false, fall_through);
2538
2539 context()->Plug(if_true, if_false);
2540 }
2541
2542
EmitIsRegExp(ZoneList<Expression * > * args)2543 void FullCodeGenerator::EmitIsRegExp(ZoneList<Expression*>* args) {
2544 ASSERT(args->length() == 1);
2545
2546 VisitForAccumulatorValue(args->at(0));
2547
2548 Label materialize_true, materialize_false;
2549 Label* if_true = NULL;
2550 Label* if_false = NULL;
2551 Label* fall_through = NULL;
2552 context()->PrepareTest(&materialize_true, &materialize_false,
2553 &if_true, &if_false, &fall_through);
2554
2555 __ JumpIfSmi(rax, if_false);
2556 __ CmpObjectType(rax, JS_REGEXP_TYPE, rbx);
2557 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2558 Split(equal, if_true, if_false, fall_through);
2559
2560 context()->Plug(if_true, if_false);
2561 }
2562
2563
2564
EmitIsConstructCall(ZoneList<Expression * > * args)2565 void FullCodeGenerator::EmitIsConstructCall(ZoneList<Expression*>* args) {
2566 ASSERT(args->length() == 0);
2567
2568 Label materialize_true, materialize_false;
2569 Label* if_true = NULL;
2570 Label* if_false = NULL;
2571 Label* fall_through = NULL;
2572 context()->PrepareTest(&materialize_true, &materialize_false,
2573 &if_true, &if_false, &fall_through);
2574
2575 // Get the frame pointer for the calling frame.
2576 __ movq(rax, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
2577
2578 // Skip the arguments adaptor frame if it exists.
2579 Label check_frame_marker;
2580 __ Cmp(Operand(rax, StandardFrameConstants::kContextOffset),
2581 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
2582 __ j(not_equal, &check_frame_marker);
2583 __ movq(rax, Operand(rax, StandardFrameConstants::kCallerFPOffset));
2584
2585 // Check the marker in the calling frame.
2586 __ bind(&check_frame_marker);
2587 __ Cmp(Operand(rax, StandardFrameConstants::kMarkerOffset),
2588 Smi::FromInt(StackFrame::CONSTRUCT));
2589 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2590 Split(equal, if_true, if_false, fall_through);
2591
2592 context()->Plug(if_true, if_false);
2593 }
2594
2595
EmitObjectEquals(ZoneList<Expression * > * args)2596 void FullCodeGenerator::EmitObjectEquals(ZoneList<Expression*>* args) {
2597 ASSERT(args->length() == 2);
2598
2599 // Load the two objects into registers and perform the comparison.
2600 VisitForStackValue(args->at(0));
2601 VisitForAccumulatorValue(args->at(1));
2602
2603 Label materialize_true, materialize_false;
2604 Label* if_true = NULL;
2605 Label* if_false = NULL;
2606 Label* fall_through = NULL;
2607 context()->PrepareTest(&materialize_true, &materialize_false,
2608 &if_true, &if_false, &fall_through);
2609
2610 __ pop(rbx);
2611 __ cmpq(rax, rbx);
2612 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2613 Split(equal, if_true, if_false, fall_through);
2614
2615 context()->Plug(if_true, if_false);
2616 }
2617
2618
EmitArguments(ZoneList<Expression * > * args)2619 void FullCodeGenerator::EmitArguments(ZoneList<Expression*>* args) {
2620 ASSERT(args->length() == 1);
2621
2622 // ArgumentsAccessStub expects the key in rdx and the formal
2623 // parameter count in rax.
2624 VisitForAccumulatorValue(args->at(0));
2625 __ movq(rdx, rax);
2626 __ Move(rax, Smi::FromInt(scope()->num_parameters()));
2627 ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
2628 __ CallStub(&stub);
2629 context()->Plug(rax);
2630 }
2631
2632
EmitArgumentsLength(ZoneList<Expression * > * args)2633 void FullCodeGenerator::EmitArgumentsLength(ZoneList<Expression*>* args) {
2634 ASSERT(args->length() == 0);
2635
2636 NearLabel exit;
2637 // Get the number of formal parameters.
2638 __ Move(rax, Smi::FromInt(scope()->num_parameters()));
2639
2640 // Check if the calling frame is an arguments adaptor frame.
2641 __ movq(rbx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
2642 __ Cmp(Operand(rbx, StandardFrameConstants::kContextOffset),
2643 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
2644 __ j(not_equal, &exit);
2645
2646 // Arguments adaptor case: Read the arguments length from the
2647 // adaptor frame.
2648 __ movq(rax, Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset));
2649
2650 __ bind(&exit);
2651 if (FLAG_debug_code) __ AbortIfNotSmi(rax);
2652 context()->Plug(rax);
2653 }
2654
2655
EmitClassOf(ZoneList<Expression * > * args)2656 void FullCodeGenerator::EmitClassOf(ZoneList<Expression*>* args) {
2657 ASSERT(args->length() == 1);
2658 Label done, null, function, non_function_constructor;
2659
2660 VisitForAccumulatorValue(args->at(0));
2661
2662 // If the object is a smi, we return null.
2663 __ JumpIfSmi(rax, &null);
2664
2665 // Check that the object is a JS object but take special care of JS
2666 // functions to make sure they have 'Function' as their class.
2667 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rax); // Map is now in rax.
2668 __ j(below, &null);
2669
2670 // As long as JS_FUNCTION_TYPE is the last instance type and it is
2671 // right after LAST_JS_OBJECT_TYPE, we can avoid checking for
2672 // LAST_JS_OBJECT_TYPE.
2673 ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
2674 ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
2675 __ CmpInstanceType(rax, JS_FUNCTION_TYPE);
2676 __ j(equal, &function);
2677
2678 // Check if the constructor in the map is a function.
2679 __ movq(rax, FieldOperand(rax, Map::kConstructorOffset));
2680 __ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx);
2681 __ j(not_equal, &non_function_constructor);
2682
2683 // rax now contains the constructor function. Grab the
2684 // instance class name from there.
2685 __ movq(rax, FieldOperand(rax, JSFunction::kSharedFunctionInfoOffset));
2686 __ movq(rax, FieldOperand(rax, SharedFunctionInfo::kInstanceClassNameOffset));
2687 __ jmp(&done);
2688
2689 // Functions have class 'Function'.
2690 __ bind(&function);
2691 __ Move(rax, isolate()->factory()->function_class_symbol());
2692 __ jmp(&done);
2693
2694 // Objects with a non-function constructor have class 'Object'.
2695 __ bind(&non_function_constructor);
2696 __ Move(rax, isolate()->factory()->Object_symbol());
2697 __ jmp(&done);
2698
2699 // Non-JS objects have class null.
2700 __ bind(&null);
2701 __ LoadRoot(rax, Heap::kNullValueRootIndex);
2702
2703 // All done.
2704 __ bind(&done);
2705
2706 context()->Plug(rax);
2707 }
2708
2709
EmitLog(ZoneList<Expression * > * args)2710 void FullCodeGenerator::EmitLog(ZoneList<Expression*>* args) {
2711 // Conditionally generate a log call.
2712 // Args:
2713 // 0 (literal string): The type of logging (corresponds to the flags).
2714 // This is used to determine whether or not to generate the log call.
2715 // 1 (string): Format string. Access the string at argument index 2
2716 // with '%2s' (see Logger::LogRuntime for all the formats).
2717 // 2 (array): Arguments to the format string.
2718 ASSERT_EQ(args->length(), 3);
2719 #ifdef ENABLE_LOGGING_AND_PROFILING
2720 if (CodeGenerator::ShouldGenerateLog(args->at(0))) {
2721 VisitForStackValue(args->at(1));
2722 VisitForStackValue(args->at(2));
2723 __ CallRuntime(Runtime::kLog, 2);
2724 }
2725 #endif
2726 // Finally, we're expected to leave a value on the top of the stack.
2727 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
2728 context()->Plug(rax);
2729 }
2730
2731
EmitRandomHeapNumber(ZoneList<Expression * > * args)2732 void FullCodeGenerator::EmitRandomHeapNumber(ZoneList<Expression*>* args) {
2733 ASSERT(args->length() == 0);
2734
2735 Label slow_allocate_heapnumber;
2736 Label heapnumber_allocated;
2737
2738 __ AllocateHeapNumber(rbx, rcx, &slow_allocate_heapnumber);
2739 __ jmp(&heapnumber_allocated);
2740
2741 __ bind(&slow_allocate_heapnumber);
2742 // Allocate a heap number.
2743 __ CallRuntime(Runtime::kNumberAlloc, 0);
2744 __ movq(rbx, rax);
2745
2746 __ bind(&heapnumber_allocated);
2747
2748 // Return a random uint32 number in rax.
2749 // The fresh HeapNumber is in rbx, which is callee-save on both x64 ABIs.
2750 __ PrepareCallCFunction(1);
2751 #ifdef _WIN64
2752 __ LoadAddress(rcx, ExternalReference::isolate_address());
2753 #else
2754 __ LoadAddress(rdi, ExternalReference::isolate_address());
2755 #endif
2756 __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
2757
2758 // Convert 32 random bits in rax to 0.(32 random bits) in a double
2759 // by computing:
2760 // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
2761 __ movl(rcx, Immediate(0x49800000)); // 1.0 x 2^20 as single.
2762 __ movd(xmm1, rcx);
2763 __ movd(xmm0, rax);
2764 __ cvtss2sd(xmm1, xmm1);
2765 __ xorpd(xmm0, xmm1);
2766 __ subsd(xmm0, xmm1);
2767 __ movsd(FieldOperand(rbx, HeapNumber::kValueOffset), xmm0);
2768
2769 __ movq(rax, rbx);
2770 context()->Plug(rax);
2771 }
2772
2773
EmitSubString(ZoneList<Expression * > * args)2774 void FullCodeGenerator::EmitSubString(ZoneList<Expression*>* args) {
2775 // Load the arguments on the stack and call the stub.
2776 SubStringStub stub;
2777 ASSERT(args->length() == 3);
2778 VisitForStackValue(args->at(0));
2779 VisitForStackValue(args->at(1));
2780 VisitForStackValue(args->at(2));
2781 __ CallStub(&stub);
2782 context()->Plug(rax);
2783 }
2784
2785
EmitRegExpExec(ZoneList<Expression * > * args)2786 void FullCodeGenerator::EmitRegExpExec(ZoneList<Expression*>* args) {
2787 // Load the arguments on the stack and call the stub.
2788 RegExpExecStub stub;
2789 ASSERT(args->length() == 4);
2790 VisitForStackValue(args->at(0));
2791 VisitForStackValue(args->at(1));
2792 VisitForStackValue(args->at(2));
2793 VisitForStackValue(args->at(3));
2794 __ CallStub(&stub);
2795 context()->Plug(rax);
2796 }
2797
2798
EmitValueOf(ZoneList<Expression * > * args)2799 void FullCodeGenerator::EmitValueOf(ZoneList<Expression*>* args) {
2800 ASSERT(args->length() == 1);
2801
2802 VisitForAccumulatorValue(args->at(0)); // Load the object.
2803
2804 Label done;
2805 // If the object is a smi return the object.
2806 __ JumpIfSmi(rax, &done);
2807 // If the object is not a value type, return the object.
2808 __ CmpObjectType(rax, JS_VALUE_TYPE, rbx);
2809 __ j(not_equal, &done);
2810 __ movq(rax, FieldOperand(rax, JSValue::kValueOffset));
2811
2812 __ bind(&done);
2813 context()->Plug(rax);
2814 }
2815
2816
EmitMathPow(ZoneList<Expression * > * args)2817 void FullCodeGenerator::EmitMathPow(ZoneList<Expression*>* args) {
2818 // Load the arguments on the stack and call the runtime function.
2819 ASSERT(args->length() == 2);
2820 VisitForStackValue(args->at(0));
2821 VisitForStackValue(args->at(1));
2822 MathPowStub stub;
2823 __ CallStub(&stub);
2824 context()->Plug(rax);
2825 }
2826
2827
EmitSetValueOf(ZoneList<Expression * > * args)2828 void FullCodeGenerator::EmitSetValueOf(ZoneList<Expression*>* args) {
2829 ASSERT(args->length() == 2);
2830
2831 VisitForStackValue(args->at(0)); // Load the object.
2832 VisitForAccumulatorValue(args->at(1)); // Load the value.
2833 __ pop(rbx); // rax = value. rbx = object.
2834
2835 Label done;
2836 // If the object is a smi, return the value.
2837 __ JumpIfSmi(rbx, &done);
2838
2839 // If the object is not a value type, return the value.
2840 __ CmpObjectType(rbx, JS_VALUE_TYPE, rcx);
2841 __ j(not_equal, &done);
2842
2843 // Store the value.
2844 __ movq(FieldOperand(rbx, JSValue::kValueOffset), rax);
2845 // Update the write barrier. Save the value as it will be
2846 // overwritten by the write barrier code and is needed afterward.
2847 __ movq(rdx, rax);
2848 __ RecordWrite(rbx, JSValue::kValueOffset, rdx, rcx);
2849
2850 __ bind(&done);
2851 context()->Plug(rax);
2852 }
2853
2854
EmitNumberToString(ZoneList<Expression * > * args)2855 void FullCodeGenerator::EmitNumberToString(ZoneList<Expression*>* args) {
2856 ASSERT_EQ(args->length(), 1);
2857
2858 // Load the argument on the stack and call the stub.
2859 VisitForStackValue(args->at(0));
2860
2861 NumberToStringStub stub;
2862 __ CallStub(&stub);
2863 context()->Plug(rax);
2864 }
2865
2866
EmitStringCharFromCode(ZoneList<Expression * > * args)2867 void FullCodeGenerator::EmitStringCharFromCode(ZoneList<Expression*>* args) {
2868 ASSERT(args->length() == 1);
2869
2870 VisitForAccumulatorValue(args->at(0));
2871
2872 Label done;
2873 StringCharFromCodeGenerator generator(rax, rbx);
2874 generator.GenerateFast(masm_);
2875 __ jmp(&done);
2876
2877 NopRuntimeCallHelper call_helper;
2878 generator.GenerateSlow(masm_, call_helper);
2879
2880 __ bind(&done);
2881 context()->Plug(rbx);
2882 }
2883
2884
EmitStringCharCodeAt(ZoneList<Expression * > * args)2885 void FullCodeGenerator::EmitStringCharCodeAt(ZoneList<Expression*>* args) {
2886 ASSERT(args->length() == 2);
2887
2888 VisitForStackValue(args->at(0));
2889 VisitForAccumulatorValue(args->at(1));
2890
2891 Register object = rbx;
2892 Register index = rax;
2893 Register scratch = rcx;
2894 Register result = rdx;
2895
2896 __ pop(object);
2897
2898 Label need_conversion;
2899 Label index_out_of_range;
2900 Label done;
2901 StringCharCodeAtGenerator generator(object,
2902 index,
2903 scratch,
2904 result,
2905 &need_conversion,
2906 &need_conversion,
2907 &index_out_of_range,
2908 STRING_INDEX_IS_NUMBER);
2909 generator.GenerateFast(masm_);
2910 __ jmp(&done);
2911
2912 __ bind(&index_out_of_range);
2913 // When the index is out of range, the spec requires us to return
2914 // NaN.
2915 __ LoadRoot(result, Heap::kNanValueRootIndex);
2916 __ jmp(&done);
2917
2918 __ bind(&need_conversion);
2919 // Move the undefined value into the result register, which will
2920 // trigger conversion.
2921 __ LoadRoot(result, Heap::kUndefinedValueRootIndex);
2922 __ jmp(&done);
2923
2924 NopRuntimeCallHelper call_helper;
2925 generator.GenerateSlow(masm_, call_helper);
2926
2927 __ bind(&done);
2928 context()->Plug(result);
2929 }
2930
2931
EmitStringCharAt(ZoneList<Expression * > * args)2932 void FullCodeGenerator::EmitStringCharAt(ZoneList<Expression*>* args) {
2933 ASSERT(args->length() == 2);
2934
2935 VisitForStackValue(args->at(0));
2936 VisitForAccumulatorValue(args->at(1));
2937
2938 Register object = rbx;
2939 Register index = rax;
2940 Register scratch1 = rcx;
2941 Register scratch2 = rdx;
2942 Register result = rax;
2943
2944 __ pop(object);
2945
2946 Label need_conversion;
2947 Label index_out_of_range;
2948 Label done;
2949 StringCharAtGenerator generator(object,
2950 index,
2951 scratch1,
2952 scratch2,
2953 result,
2954 &need_conversion,
2955 &need_conversion,
2956 &index_out_of_range,
2957 STRING_INDEX_IS_NUMBER);
2958 generator.GenerateFast(masm_);
2959 __ jmp(&done);
2960
2961 __ bind(&index_out_of_range);
2962 // When the index is out of range, the spec requires us to return
2963 // the empty string.
2964 __ LoadRoot(result, Heap::kEmptyStringRootIndex);
2965 __ jmp(&done);
2966
2967 __ bind(&need_conversion);
2968 // Move smi zero into the result register, which will trigger
2969 // conversion.
2970 __ Move(result, Smi::FromInt(0));
2971 __ jmp(&done);
2972
2973 NopRuntimeCallHelper call_helper;
2974 generator.GenerateSlow(masm_, call_helper);
2975
2976 __ bind(&done);
2977 context()->Plug(result);
2978 }
2979
2980
EmitStringAdd(ZoneList<Expression * > * args)2981 void FullCodeGenerator::EmitStringAdd(ZoneList<Expression*>* args) {
2982 ASSERT_EQ(2, args->length());
2983
2984 VisitForStackValue(args->at(0));
2985 VisitForStackValue(args->at(1));
2986
2987 StringAddStub stub(NO_STRING_ADD_FLAGS);
2988 __ CallStub(&stub);
2989 context()->Plug(rax);
2990 }
2991
2992
EmitStringCompare(ZoneList<Expression * > * args)2993 void FullCodeGenerator::EmitStringCompare(ZoneList<Expression*>* args) {
2994 ASSERT_EQ(2, args->length());
2995
2996 VisitForStackValue(args->at(0));
2997 VisitForStackValue(args->at(1));
2998
2999 StringCompareStub stub;
3000 __ CallStub(&stub);
3001 context()->Plug(rax);
3002 }
3003
3004
EmitMathSin(ZoneList<Expression * > * args)3005 void FullCodeGenerator::EmitMathSin(ZoneList<Expression*>* args) {
3006 // Load the argument on the stack and call the stub.
3007 TranscendentalCacheStub stub(TranscendentalCache::SIN,
3008 TranscendentalCacheStub::TAGGED);
3009 ASSERT(args->length() == 1);
3010 VisitForStackValue(args->at(0));
3011 __ CallStub(&stub);
3012 context()->Plug(rax);
3013 }
3014
3015
EmitMathCos(ZoneList<Expression * > * args)3016 void FullCodeGenerator::EmitMathCos(ZoneList<Expression*>* args) {
3017 // Load the argument on the stack and call the stub.
3018 TranscendentalCacheStub stub(TranscendentalCache::COS,
3019 TranscendentalCacheStub::TAGGED);
3020 ASSERT(args->length() == 1);
3021 VisitForStackValue(args->at(0));
3022 __ CallStub(&stub);
3023 context()->Plug(rax);
3024 }
3025
3026
EmitMathLog(ZoneList<Expression * > * args)3027 void FullCodeGenerator::EmitMathLog(ZoneList<Expression*>* args) {
3028 // Load the argument on the stack and call the stub.
3029 TranscendentalCacheStub stub(TranscendentalCache::LOG,
3030 TranscendentalCacheStub::TAGGED);
3031 ASSERT(args->length() == 1);
3032 VisitForStackValue(args->at(0));
3033 __ CallStub(&stub);
3034 context()->Plug(rax);
3035 }
3036
3037
EmitMathSqrt(ZoneList<Expression * > * args)3038 void FullCodeGenerator::EmitMathSqrt(ZoneList<Expression*>* args) {
3039 // Load the argument on the stack and call the runtime function.
3040 ASSERT(args->length() == 1);
3041 VisitForStackValue(args->at(0));
3042 __ CallRuntime(Runtime::kMath_sqrt, 1);
3043 context()->Plug(rax);
3044 }
3045
3046
EmitCallFunction(ZoneList<Expression * > * args)3047 void FullCodeGenerator::EmitCallFunction(ZoneList<Expression*>* args) {
3048 ASSERT(args->length() >= 2);
3049
3050 int arg_count = args->length() - 2; // For receiver and function.
3051 VisitForStackValue(args->at(0)); // Receiver.
3052 for (int i = 0; i < arg_count; i++) {
3053 VisitForStackValue(args->at(i + 1));
3054 }
3055 VisitForAccumulatorValue(args->at(arg_count + 1)); // Function.
3056
3057 // InvokeFunction requires function in rdi. Move it in there.
3058 if (!result_register().is(rdi)) __ movq(rdi, result_register());
3059 ParameterCount count(arg_count);
3060 __ InvokeFunction(rdi, count, CALL_FUNCTION);
3061 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
3062 context()->Plug(rax);
3063 }
3064
3065
EmitRegExpConstructResult(ZoneList<Expression * > * args)3066 void FullCodeGenerator::EmitRegExpConstructResult(ZoneList<Expression*>* args) {
3067 RegExpConstructResultStub stub;
3068 ASSERT(args->length() == 3);
3069 VisitForStackValue(args->at(0));
3070 VisitForStackValue(args->at(1));
3071 VisitForStackValue(args->at(2));
3072 __ CallStub(&stub);
3073 context()->Plug(rax);
3074 }
3075
3076
EmitSwapElements(ZoneList<Expression * > * args)3077 void FullCodeGenerator::EmitSwapElements(ZoneList<Expression*>* args) {
3078 ASSERT(args->length() == 3);
3079 VisitForStackValue(args->at(0));
3080 VisitForStackValue(args->at(1));
3081 VisitForStackValue(args->at(2));
3082 Label done;
3083 Label slow_case;
3084 Register object = rax;
3085 Register index_1 = rbx;
3086 Register index_2 = rcx;
3087 Register elements = rdi;
3088 Register temp = rdx;
3089 __ movq(object, Operand(rsp, 2 * kPointerSize));
3090 // Fetch the map and check if array is in fast case.
3091 // Check that object doesn't require security checks and
3092 // has no indexed interceptor.
3093 __ CmpObjectType(object, JS_ARRAY_TYPE, temp);
3094 __ j(not_equal, &slow_case);
3095 __ testb(FieldOperand(temp, Map::kBitFieldOffset),
3096 Immediate(KeyedLoadIC::kSlowCaseBitFieldMask));
3097 __ j(not_zero, &slow_case);
3098
3099 // Check the object's elements are in fast case and writable.
3100 __ movq(elements, FieldOperand(object, JSObject::kElementsOffset));
3101 __ CompareRoot(FieldOperand(elements, HeapObject::kMapOffset),
3102 Heap::kFixedArrayMapRootIndex);
3103 __ j(not_equal, &slow_case);
3104
3105 // Check that both indices are smis.
3106 __ movq(index_1, Operand(rsp, 1 * kPointerSize));
3107 __ movq(index_2, Operand(rsp, 0 * kPointerSize));
3108 __ JumpIfNotBothSmi(index_1, index_2, &slow_case);
3109
3110 // Check that both indices are valid.
3111 // The JSArray length field is a smi since the array is in fast case mode.
3112 __ movq(temp, FieldOperand(object, JSArray::kLengthOffset));
3113 __ SmiCompare(temp, index_1);
3114 __ j(below_equal, &slow_case);
3115 __ SmiCompare(temp, index_2);
3116 __ j(below_equal, &slow_case);
3117
3118 __ SmiToInteger32(index_1, index_1);
3119 __ SmiToInteger32(index_2, index_2);
3120 // Bring addresses into index1 and index2.
3121 __ lea(index_1, FieldOperand(elements, index_1, times_pointer_size,
3122 FixedArray::kHeaderSize));
3123 __ lea(index_2, FieldOperand(elements, index_2, times_pointer_size,
3124 FixedArray::kHeaderSize));
3125
3126 // Swap elements. Use object and temp as scratch registers.
3127 __ movq(object, Operand(index_1, 0));
3128 __ movq(temp, Operand(index_2, 0));
3129 __ movq(Operand(index_2, 0), object);
3130 __ movq(Operand(index_1, 0), temp);
3131
3132 Label new_space;
3133 __ InNewSpace(elements, temp, equal, &new_space);
3134
3135 __ movq(object, elements);
3136 __ RecordWriteHelper(object, index_1, temp);
3137 __ RecordWriteHelper(elements, index_2, temp);
3138
3139 __ bind(&new_space);
3140 // We are done. Drop elements from the stack, and return undefined.
3141 __ addq(rsp, Immediate(3 * kPointerSize));
3142 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
3143 __ jmp(&done);
3144
3145 __ bind(&slow_case);
3146 __ CallRuntime(Runtime::kSwapElements, 3);
3147
3148 __ bind(&done);
3149 context()->Plug(rax);
3150 }
3151
3152
EmitGetFromCache(ZoneList<Expression * > * args)3153 void FullCodeGenerator::EmitGetFromCache(ZoneList<Expression*>* args) {
3154 ASSERT_EQ(2, args->length());
3155
3156 ASSERT_NE(NULL, args->at(0)->AsLiteral());
3157 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value();
3158
3159 Handle<FixedArray> jsfunction_result_caches(
3160 isolate()->global_context()->jsfunction_result_caches());
3161 if (jsfunction_result_caches->length() <= cache_id) {
3162 __ Abort("Attempt to use undefined cache.");
3163 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
3164 context()->Plug(rax);
3165 return;
3166 }
3167
3168 VisitForAccumulatorValue(args->at(1));
3169
3170 Register key = rax;
3171 Register cache = rbx;
3172 Register tmp = rcx;
3173 __ movq(cache, ContextOperand(rsi, Context::GLOBAL_INDEX));
3174 __ movq(cache,
3175 FieldOperand(cache, GlobalObject::kGlobalContextOffset));
3176 __ movq(cache,
3177 ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
3178 __ movq(cache,
3179 FieldOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
3180
3181 NearLabel done, not_found;
3182 // tmp now holds finger offset as a smi.
3183 ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
3184 __ movq(tmp, FieldOperand(cache, JSFunctionResultCache::kFingerOffset));
3185 SmiIndex index =
3186 __ SmiToIndex(kScratchRegister, tmp, kPointerSizeLog2);
3187 __ cmpq(key, FieldOperand(cache,
3188 index.reg,
3189 index.scale,
3190 FixedArray::kHeaderSize));
3191 __ j(not_equal, ¬_found);
3192 __ movq(rax, FieldOperand(cache,
3193 index.reg,
3194 index.scale,
3195 FixedArray::kHeaderSize + kPointerSize));
3196 __ jmp(&done);
3197
3198 __ bind(¬_found);
3199 // Call runtime to perform the lookup.
3200 __ push(cache);
3201 __ push(key);
3202 __ CallRuntime(Runtime::kGetFromCache, 2);
3203
3204 __ bind(&done);
3205 context()->Plug(rax);
3206 }
3207
3208
EmitIsRegExpEquivalent(ZoneList<Expression * > * args)3209 void FullCodeGenerator::EmitIsRegExpEquivalent(ZoneList<Expression*>* args) {
3210 ASSERT_EQ(2, args->length());
3211
3212 Register right = rax;
3213 Register left = rbx;
3214 Register tmp = rcx;
3215
3216 VisitForStackValue(args->at(0));
3217 VisitForAccumulatorValue(args->at(1));
3218 __ pop(left);
3219
3220 NearLabel done, fail, ok;
3221 __ cmpq(left, right);
3222 __ j(equal, &ok);
3223 // Fail if either is a non-HeapObject.
3224 Condition either_smi = masm()->CheckEitherSmi(left, right, tmp);
3225 __ j(either_smi, &fail);
3226 __ j(zero, &fail);
3227 __ movq(tmp, FieldOperand(left, HeapObject::kMapOffset));
3228 __ cmpb(FieldOperand(tmp, Map::kInstanceTypeOffset),
3229 Immediate(JS_REGEXP_TYPE));
3230 __ j(not_equal, &fail);
3231 __ cmpq(tmp, FieldOperand(right, HeapObject::kMapOffset));
3232 __ j(not_equal, &fail);
3233 __ movq(tmp, FieldOperand(left, JSRegExp::kDataOffset));
3234 __ cmpq(tmp, FieldOperand(right, JSRegExp::kDataOffset));
3235 __ j(equal, &ok);
3236 __ bind(&fail);
3237 __ Move(rax, isolate()->factory()->false_value());
3238 __ jmp(&done);
3239 __ bind(&ok);
3240 __ Move(rax, isolate()->factory()->true_value());
3241 __ bind(&done);
3242
3243 context()->Plug(rax);
3244 }
3245
3246
EmitHasCachedArrayIndex(ZoneList<Expression * > * args)3247 void FullCodeGenerator::EmitHasCachedArrayIndex(ZoneList<Expression*>* args) {
3248 ASSERT(args->length() == 1);
3249
3250 VisitForAccumulatorValue(args->at(0));
3251
3252 Label materialize_true, materialize_false;
3253 Label* if_true = NULL;
3254 Label* if_false = NULL;
3255 Label* fall_through = NULL;
3256 context()->PrepareTest(&materialize_true, &materialize_false,
3257 &if_true, &if_false, &fall_through);
3258
3259 __ testl(FieldOperand(rax, String::kHashFieldOffset),
3260 Immediate(String::kContainsCachedArrayIndexMask));
3261 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
3262 __ j(zero, if_true);
3263 __ jmp(if_false);
3264
3265 context()->Plug(if_true, if_false);
3266 }
3267
3268
EmitGetCachedArrayIndex(ZoneList<Expression * > * args)3269 void FullCodeGenerator::EmitGetCachedArrayIndex(ZoneList<Expression*>* args) {
3270 ASSERT(args->length() == 1);
3271 VisitForAccumulatorValue(args->at(0));
3272
3273 if (FLAG_debug_code) {
3274 __ AbortIfNotString(rax);
3275 }
3276
3277 __ movl(rax, FieldOperand(rax, String::kHashFieldOffset));
3278 ASSERT(String::kHashShift >= kSmiTagSize);
3279 __ IndexFromHash(rax, rax);
3280
3281 context()->Plug(rax);
3282 }
3283
3284
EmitFastAsciiArrayJoin(ZoneList<Expression * > * args)3285 void FullCodeGenerator::EmitFastAsciiArrayJoin(ZoneList<Expression*>* args) {
3286 Label bailout, return_result, done, one_char_separator, long_separator,
3287 non_trivial_array, not_size_one_array, loop,
3288 loop_1, loop_1_condition, loop_2, loop_2_entry, loop_3, loop_3_entry;
3289 ASSERT(args->length() == 2);
3290 // We will leave the separator on the stack until the end of the function.
3291 VisitForStackValue(args->at(1));
3292 // Load this to rax (= array)
3293 VisitForAccumulatorValue(args->at(0));
3294 // All aliases of the same register have disjoint lifetimes.
3295 Register array = rax;
3296 Register elements = no_reg; // Will be rax.
3297
3298 Register index = rdx;
3299
3300 Register string_length = rcx;
3301
3302 Register string = rsi;
3303
3304 Register scratch = rbx;
3305
3306 Register array_length = rdi;
3307 Register result_pos = no_reg; // Will be rdi.
3308
3309 Operand separator_operand = Operand(rsp, 2 * kPointerSize);
3310 Operand result_operand = Operand(rsp, 1 * kPointerSize);
3311 Operand array_length_operand = Operand(rsp, 0 * kPointerSize);
3312 // Separator operand is already pushed. Make room for the two
3313 // other stack fields, and clear the direction flag in anticipation
3314 // of calling CopyBytes.
3315 __ subq(rsp, Immediate(2 * kPointerSize));
3316 __ cld();
3317 // Check that the array is a JSArray
3318 __ JumpIfSmi(array, &bailout);
3319 __ CmpObjectType(array, JS_ARRAY_TYPE, scratch);
3320 __ j(not_equal, &bailout);
3321
3322 // Check that the array has fast elements.
3323 __ testb(FieldOperand(scratch, Map::kBitField2Offset),
3324 Immediate(1 << Map::kHasFastElements));
3325 __ j(zero, &bailout);
3326
3327 // Array has fast elements, so its length must be a smi.
3328 // If the array has length zero, return the empty string.
3329 __ movq(array_length, FieldOperand(array, JSArray::kLengthOffset));
3330 __ SmiCompare(array_length, Smi::FromInt(0));
3331 __ j(not_zero, &non_trivial_array);
3332 __ LoadRoot(rax, Heap::kEmptyStringRootIndex);
3333 __ jmp(&return_result);
3334
3335 // Save the array length on the stack.
3336 __ bind(&non_trivial_array);
3337 __ SmiToInteger32(array_length, array_length);
3338 __ movl(array_length_operand, array_length);
3339
3340 // Save the FixedArray containing array's elements.
3341 // End of array's live range.
3342 elements = array;
3343 __ movq(elements, FieldOperand(array, JSArray::kElementsOffset));
3344 array = no_reg;
3345
3346
3347 // Check that all array elements are sequential ASCII strings, and
3348 // accumulate the sum of their lengths, as a smi-encoded value.
3349 __ Set(index, 0);
3350 __ Set(string_length, 0);
3351 // Loop condition: while (index < array_length).
3352 // Live loop registers: index(int32), array_length(int32), string(String*),
3353 // scratch, string_length(int32), elements(FixedArray*).
3354 if (FLAG_debug_code) {
3355 __ cmpq(index, array_length);
3356 __ Assert(below, "No empty arrays here in EmitFastAsciiArrayJoin");
3357 }
3358 __ bind(&loop);
3359 __ movq(string, FieldOperand(elements,
3360 index,
3361 times_pointer_size,
3362 FixedArray::kHeaderSize));
3363 __ JumpIfSmi(string, &bailout);
3364 __ movq(scratch, FieldOperand(string, HeapObject::kMapOffset));
3365 __ movzxbl(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
3366 __ andb(scratch, Immediate(
3367 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
3368 __ cmpb(scratch, Immediate(kStringTag | kAsciiStringTag | kSeqStringTag));
3369 __ j(not_equal, &bailout);
3370 __ AddSmiField(string_length,
3371 FieldOperand(string, SeqAsciiString::kLengthOffset));
3372 __ j(overflow, &bailout);
3373 __ incl(index);
3374 __ cmpl(index, array_length);
3375 __ j(less, &loop);
3376
3377 // Live registers:
3378 // string_length: Sum of string lengths.
3379 // elements: FixedArray of strings.
3380 // index: Array length.
3381 // array_length: Array length.
3382
3383 // If array_length is 1, return elements[0], a string.
3384 __ cmpl(array_length, Immediate(1));
3385 __ j(not_equal, ¬_size_one_array);
3386 __ movq(rax, FieldOperand(elements, FixedArray::kHeaderSize));
3387 __ jmp(&return_result);
3388
3389 __ bind(¬_size_one_array);
3390
3391 // End of array_length live range.
3392 result_pos = array_length;
3393 array_length = no_reg;
3394
3395 // Live registers:
3396 // string_length: Sum of string lengths.
3397 // elements: FixedArray of strings.
3398 // index: Array length.
3399
3400 // Check that the separator is a sequential ASCII string.
3401 __ movq(string, separator_operand);
3402 __ JumpIfSmi(string, &bailout);
3403 __ movq(scratch, FieldOperand(string, HeapObject::kMapOffset));
3404 __ movzxbl(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
3405 __ andb(scratch, Immediate(
3406 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
3407 __ cmpb(scratch, Immediate(kStringTag | kAsciiStringTag | kSeqStringTag));
3408 __ j(not_equal, &bailout);
3409
3410 // Live registers:
3411 // string_length: Sum of string lengths.
3412 // elements: FixedArray of strings.
3413 // index: Array length.
3414 // string: Separator string.
3415
3416 // Add (separator length times (array_length - 1)) to string_length.
3417 __ SmiToInteger32(scratch,
3418 FieldOperand(string, SeqAsciiString::kLengthOffset));
3419 __ decl(index);
3420 __ imull(scratch, index);
3421 __ j(overflow, &bailout);
3422 __ addl(string_length, scratch);
3423 __ j(overflow, &bailout);
3424
3425 // Live registers and stack values:
3426 // string_length: Total length of result string.
3427 // elements: FixedArray of strings.
3428 __ AllocateAsciiString(result_pos, string_length, scratch,
3429 index, string, &bailout);
3430 __ movq(result_operand, result_pos);
3431 __ lea(result_pos, FieldOperand(result_pos, SeqAsciiString::kHeaderSize));
3432
3433 __ movq(string, separator_operand);
3434 __ SmiCompare(FieldOperand(string, SeqAsciiString::kLengthOffset),
3435 Smi::FromInt(1));
3436 __ j(equal, &one_char_separator);
3437 __ j(greater, &long_separator);
3438
3439
3440 // Empty separator case:
3441 __ Set(index, 0);
3442 __ movl(scratch, array_length_operand);
3443 __ jmp(&loop_1_condition);
3444 // Loop condition: while (index < array_length).
3445 __ bind(&loop_1);
3446 // Each iteration of the loop concatenates one string to the result.
3447 // Live values in registers:
3448 // index: which element of the elements array we are adding to the result.
3449 // result_pos: the position to which we are currently copying characters.
3450 // elements: the FixedArray of strings we are joining.
3451 // scratch: array length.
3452
3453 // Get string = array[index].
3454 __ movq(string, FieldOperand(elements, index,
3455 times_pointer_size,
3456 FixedArray::kHeaderSize));
3457 __ SmiToInteger32(string_length,
3458 FieldOperand(string, String::kLengthOffset));
3459 __ lea(string,
3460 FieldOperand(string, SeqAsciiString::kHeaderSize));
3461 __ CopyBytes(result_pos, string, string_length);
3462 __ incl(index);
3463 __ bind(&loop_1_condition);
3464 __ cmpl(index, scratch);
3465 __ j(less, &loop_1); // Loop while (index < array_length).
3466 __ jmp(&done);
3467
3468 // Generic bailout code used from several places.
3469 __ bind(&bailout);
3470 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
3471 __ jmp(&return_result);
3472
3473
3474 // One-character separator case
3475 __ bind(&one_char_separator);
3476 // Get the separator ascii character value.
3477 // Register "string" holds the separator.
3478 __ movzxbl(scratch, FieldOperand(string, SeqAsciiString::kHeaderSize));
3479 __ Set(index, 0);
3480 // Jump into the loop after the code that copies the separator, so the first
3481 // element is not preceded by a separator
3482 __ jmp(&loop_2_entry);
3483 // Loop condition: while (index < length).
3484 __ bind(&loop_2);
3485 // Each iteration of the loop concatenates one string to the result.
3486 // Live values in registers:
3487 // elements: The FixedArray of strings we are joining.
3488 // index: which element of the elements array we are adding to the result.
3489 // result_pos: the position to which we are currently copying characters.
3490 // scratch: Separator character.
3491
3492 // Copy the separator character to the result.
3493 __ movb(Operand(result_pos, 0), scratch);
3494 __ incq(result_pos);
3495
3496 __ bind(&loop_2_entry);
3497 // Get string = array[index].
3498 __ movq(string, FieldOperand(elements, index,
3499 times_pointer_size,
3500 FixedArray::kHeaderSize));
3501 __ SmiToInteger32(string_length,
3502 FieldOperand(string, String::kLengthOffset));
3503 __ lea(string,
3504 FieldOperand(string, SeqAsciiString::kHeaderSize));
3505 __ CopyBytes(result_pos, string, string_length);
3506 __ incl(index);
3507 __ cmpl(index, array_length_operand);
3508 __ j(less, &loop_2); // End while (index < length).
3509 __ jmp(&done);
3510
3511
3512 // Long separator case (separator is more than one character).
3513 __ bind(&long_separator);
3514
3515 // Make elements point to end of elements array, and index
3516 // count from -array_length to zero, so we don't need to maintain
3517 // a loop limit.
3518 __ movl(index, array_length_operand);
3519 __ lea(elements, FieldOperand(elements, index, times_pointer_size,
3520 FixedArray::kHeaderSize));
3521 __ neg(index);
3522
3523 // Replace separator string with pointer to its first character, and
3524 // make scratch be its length.
3525 __ movq(string, separator_operand);
3526 __ SmiToInteger32(scratch,
3527 FieldOperand(string, String::kLengthOffset));
3528 __ lea(string,
3529 FieldOperand(string, SeqAsciiString::kHeaderSize));
3530 __ movq(separator_operand, string);
3531
3532 // Jump into the loop after the code that copies the separator, so the first
3533 // element is not preceded by a separator
3534 __ jmp(&loop_3_entry);
3535 // Loop condition: while (index < length).
3536 __ bind(&loop_3);
3537 // Each iteration of the loop concatenates one string to the result.
3538 // Live values in registers:
3539 // index: which element of the elements array we are adding to the result.
3540 // result_pos: the position to which we are currently copying characters.
3541 // scratch: Separator length.
3542 // separator_operand (rsp[0x10]): Address of first char of separator.
3543
3544 // Copy the separator to the result.
3545 __ movq(string, separator_operand);
3546 __ movl(string_length, scratch);
3547 __ CopyBytes(result_pos, string, string_length, 2);
3548
3549 __ bind(&loop_3_entry);
3550 // Get string = array[index].
3551 __ movq(string, Operand(elements, index, times_pointer_size, 0));
3552 __ SmiToInteger32(string_length,
3553 FieldOperand(string, String::kLengthOffset));
3554 __ lea(string,
3555 FieldOperand(string, SeqAsciiString::kHeaderSize));
3556 __ CopyBytes(result_pos, string, string_length);
3557 __ incq(index);
3558 __ j(not_equal, &loop_3); // Loop while (index < 0).
3559
3560 __ bind(&done);
3561 __ movq(rax, result_operand);
3562
3563 __ bind(&return_result);
3564 // Drop temp values from the stack, and restore context register.
3565 __ addq(rsp, Immediate(3 * kPointerSize));
3566 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
3567 context()->Plug(rax);
3568 }
3569
3570
VisitCallRuntime(CallRuntime * expr)3571 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
3572 Handle<String> name = expr->name();
3573 if (name->length() > 0 && name->Get(0) == '_') {
3574 Comment cmnt(masm_, "[ InlineRuntimeCall");
3575 EmitInlineRuntimeCall(expr);
3576 return;
3577 }
3578
3579 Comment cmnt(masm_, "[ CallRuntime");
3580 ZoneList<Expression*>* args = expr->arguments();
3581
3582 if (expr->is_jsruntime()) {
3583 // Prepare for calling JS runtime function.
3584 __ movq(rax, GlobalObjectOperand());
3585 __ push(FieldOperand(rax, GlobalObject::kBuiltinsOffset));
3586 }
3587
3588 // Push the arguments ("left-to-right").
3589 int arg_count = args->length();
3590 for (int i = 0; i < arg_count; i++) {
3591 VisitForStackValue(args->at(i));
3592 }
3593
3594 if (expr->is_jsruntime()) {
3595 // Call the JS runtime function using a call IC.
3596 __ Move(rcx, expr->name());
3597 InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
3598 Handle<Code> ic =
3599 ISOLATE->stub_cache()->ComputeCallInitialize(arg_count, in_loop);
3600 EmitCallIC(ic, RelocInfo::CODE_TARGET);
3601 // Restore context register.
3602 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
3603 } else {
3604 __ CallRuntime(expr->function(), arg_count);
3605 }
3606 context()->Plug(rax);
3607 }
3608
3609
VisitUnaryOperation(UnaryOperation * expr)3610 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3611 switch (expr->op()) {
3612 case Token::DELETE: {
3613 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
3614 Property* prop = expr->expression()->AsProperty();
3615 Variable* var = expr->expression()->AsVariableProxy()->AsVariable();
3616
3617 if (prop != NULL) {
3618 if (prop->is_synthetic()) {
3619 // Result of deleting parameters is false, even when they rewrite
3620 // to accesses on the arguments object.
3621 context()->Plug(false);
3622 } else {
3623 VisitForStackValue(prop->obj());
3624 VisitForStackValue(prop->key());
3625 __ Push(Smi::FromInt(strict_mode_flag()));
3626 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3627 context()->Plug(rax);
3628 }
3629 } else if (var != NULL) {
3630 // Delete of an unqualified identifier is disallowed in strict mode
3631 // but "delete this" is.
3632 ASSERT(strict_mode_flag() == kNonStrictMode || var->is_this());
3633 if (var->is_global()) {
3634 __ push(GlobalObjectOperand());
3635 __ Push(var->name());
3636 __ Push(Smi::FromInt(kNonStrictMode));
3637 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3638 context()->Plug(rax);
3639 } else if (var->AsSlot() != NULL &&
3640 var->AsSlot()->type() != Slot::LOOKUP) {
3641 // Result of deleting non-global, non-dynamic variables is false.
3642 // The subexpression does not have side effects.
3643 context()->Plug(false);
3644 } else {
3645 // Non-global variable. Call the runtime to try to delete from the
3646 // context where the variable was introduced.
3647 __ push(context_register());
3648 __ Push(var->name());
3649 __ CallRuntime(Runtime::kDeleteContextSlot, 2);
3650 context()->Plug(rax);
3651 }
3652 } else {
3653 // Result of deleting non-property, non-variable reference is true.
3654 // The subexpression may have side effects.
3655 VisitForEffect(expr->expression());
3656 context()->Plug(true);
3657 }
3658 break;
3659 }
3660
3661 case Token::VOID: {
3662 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
3663 VisitForEffect(expr->expression());
3664 context()->Plug(Heap::kUndefinedValueRootIndex);
3665 break;
3666 }
3667
3668 case Token::NOT: {
3669 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
3670 if (context()->IsEffect()) {
3671 // Unary NOT has no side effects so it's only necessary to visit the
3672 // subexpression. Match the optimizing compiler by not branching.
3673 VisitForEffect(expr->expression());
3674 } else {
3675 Label materialize_true, materialize_false;
3676 Label* if_true = NULL;
3677 Label* if_false = NULL;
3678 Label* fall_through = NULL;
3679 // Notice that the labels are swapped.
3680 context()->PrepareTest(&materialize_true, &materialize_false,
3681 &if_false, &if_true, &fall_through);
3682 if (context()->IsTest()) ForwardBailoutToChild(expr);
3683 VisitForControl(expr->expression(), if_true, if_false, fall_through);
3684 context()->Plug(if_false, if_true); // Labels swapped.
3685 }
3686 break;
3687 }
3688
3689 case Token::TYPEOF: {
3690 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
3691 { StackValueContext context(this);
3692 VisitForTypeofValue(expr->expression());
3693 }
3694 __ CallRuntime(Runtime::kTypeof, 1);
3695 context()->Plug(rax);
3696 break;
3697 }
3698
3699 case Token::ADD: {
3700 Comment cmt(masm_, "[ UnaryOperation (ADD)");
3701 VisitForAccumulatorValue(expr->expression());
3702 Label no_conversion;
3703 Condition is_smi = masm_->CheckSmi(result_register());
3704 __ j(is_smi, &no_conversion);
3705 ToNumberStub convert_stub;
3706 __ CallStub(&convert_stub);
3707 __ bind(&no_conversion);
3708 context()->Plug(result_register());
3709 break;
3710 }
3711
3712 case Token::SUB: {
3713 Comment cmt(masm_, "[ UnaryOperation (SUB)");
3714 bool can_overwrite = expr->expression()->ResultOverwriteAllowed();
3715 UnaryOverwriteMode overwrite =
3716 can_overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE;
3717 GenericUnaryOpStub stub(Token::SUB, overwrite, NO_UNARY_FLAGS);
3718 // GenericUnaryOpStub expects the argument to be in the
3719 // accumulator register rax.
3720 VisitForAccumulatorValue(expr->expression());
3721 __ CallStub(&stub);
3722 context()->Plug(rax);
3723 break;
3724 }
3725
3726 case Token::BIT_NOT: {
3727 Comment cmt(masm_, "[ UnaryOperation (BIT_NOT)");
3728 // The generic unary operation stub expects the argument to be
3729 // in the accumulator register rax.
3730 VisitForAccumulatorValue(expr->expression());
3731 Label done;
3732 bool inline_smi_case = ShouldInlineSmiCase(expr->op());
3733 if (inline_smi_case) {
3734 Label call_stub;
3735 __ JumpIfNotSmi(rax, &call_stub);
3736 __ SmiNot(rax, rax);
3737 __ jmp(&done);
3738 __ bind(&call_stub);
3739 }
3740 bool overwrite = expr->expression()->ResultOverwriteAllowed();
3741 UnaryOverwriteMode mode =
3742 overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE;
3743 UnaryOpFlags flags = inline_smi_case
3744 ? NO_UNARY_SMI_CODE_IN_STUB
3745 : NO_UNARY_FLAGS;
3746 GenericUnaryOpStub stub(Token::BIT_NOT, mode, flags);
3747 __ CallStub(&stub);
3748 __ bind(&done);
3749 context()->Plug(rax);
3750 break;
3751 }
3752
3753 default:
3754 UNREACHABLE();
3755 }
3756 }
3757
3758
VisitCountOperation(CountOperation * expr)3759 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3760 Comment cmnt(masm_, "[ CountOperation");
3761 SetSourcePosition(expr->position());
3762
3763 // Invalid left-hand-sides are rewritten to have a 'throw
3764 // ReferenceError' as the left-hand side.
3765 if (!expr->expression()->IsValidLeftHandSide()) {
3766 VisitForEffect(expr->expression());
3767 return;
3768 }
3769
3770 // Expression can only be a property, a global or a (parameter or local)
3771 // slot. Variables with rewrite to .arguments are treated as KEYED_PROPERTY.
3772 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
3773 LhsKind assign_type = VARIABLE;
3774 Property* prop = expr->expression()->AsProperty();
3775 // In case of a property we use the uninitialized expression context
3776 // of the key to detect a named property.
3777 if (prop != NULL) {
3778 assign_type =
3779 (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
3780 }
3781
3782 // Evaluate expression and get value.
3783 if (assign_type == VARIABLE) {
3784 ASSERT(expr->expression()->AsVariableProxy()->var() != NULL);
3785 AccumulatorValueContext context(this);
3786 EmitVariableLoad(expr->expression()->AsVariableProxy()->var());
3787 } else {
3788 // Reserve space for result of postfix operation.
3789 if (expr->is_postfix() && !context()->IsEffect()) {
3790 __ Push(Smi::FromInt(0));
3791 }
3792 if (assign_type == NAMED_PROPERTY) {
3793 VisitForAccumulatorValue(prop->obj());
3794 __ push(rax); // Copy of receiver, needed for later store.
3795 EmitNamedPropertyLoad(prop);
3796 } else {
3797 if (prop->is_arguments_access()) {
3798 VariableProxy* obj_proxy = prop->obj()->AsVariableProxy();
3799 MemOperand slot_operand =
3800 EmitSlotSearch(obj_proxy->var()->AsSlot(), rcx);
3801 __ push(slot_operand);
3802 __ Move(rax, prop->key()->AsLiteral()->handle());
3803 } else {
3804 VisitForStackValue(prop->obj());
3805 VisitForAccumulatorValue(prop->key());
3806 }
3807 __ movq(rdx, Operand(rsp, 0)); // Leave receiver on stack
3808 __ push(rax); // Copy of key, needed for later store.
3809 EmitKeyedPropertyLoad(prop);
3810 }
3811 }
3812
3813 // We need a second deoptimization point after loading the value
3814 // in case evaluating the property load my have a side effect.
3815 if (assign_type == VARIABLE) {
3816 PrepareForBailout(expr->expression(), TOS_REG);
3817 } else {
3818 PrepareForBailoutForId(expr->CountId(), TOS_REG);
3819 }
3820
3821 // Call ToNumber only if operand is not a smi.
3822 NearLabel no_conversion;
3823 Condition is_smi;
3824 is_smi = masm_->CheckSmi(rax);
3825 __ j(is_smi, &no_conversion);
3826 ToNumberStub convert_stub;
3827 __ CallStub(&convert_stub);
3828 __ bind(&no_conversion);
3829
3830 // Save result for postfix expressions.
3831 if (expr->is_postfix()) {
3832 if (!context()->IsEffect()) {
3833 // Save the result on the stack. If we have a named or keyed property
3834 // we store the result under the receiver that is currently on top
3835 // of the stack.
3836 switch (assign_type) {
3837 case VARIABLE:
3838 __ push(rax);
3839 break;
3840 case NAMED_PROPERTY:
3841 __ movq(Operand(rsp, kPointerSize), rax);
3842 break;
3843 case KEYED_PROPERTY:
3844 __ movq(Operand(rsp, 2 * kPointerSize), rax);
3845 break;
3846 }
3847 }
3848 }
3849
3850 // Inline smi case if we are in a loop.
3851 NearLabel stub_call, done;
3852 JumpPatchSite patch_site(masm_);
3853
3854 if (ShouldInlineSmiCase(expr->op())) {
3855 if (expr->op() == Token::INC) {
3856 __ SmiAddConstant(rax, rax, Smi::FromInt(1));
3857 } else {
3858 __ SmiSubConstant(rax, rax, Smi::FromInt(1));
3859 }
3860 __ j(overflow, &stub_call);
3861 // We could eliminate this smi check if we split the code at
3862 // the first smi check before calling ToNumber.
3863 patch_site.EmitJumpIfSmi(rax, &done);
3864
3865 __ bind(&stub_call);
3866 // Call stub. Undo operation first.
3867 if (expr->op() == Token::INC) {
3868 __ SmiSubConstant(rax, rax, Smi::FromInt(1));
3869 } else {
3870 __ SmiAddConstant(rax, rax, Smi::FromInt(1));
3871 }
3872 }
3873
3874 // Record position before stub call.
3875 SetSourcePosition(expr->position());
3876
3877 // Call stub for +1/-1.
3878 TypeRecordingBinaryOpStub stub(expr->binary_op(), NO_OVERWRITE);
3879 if (expr->op() == Token::INC) {
3880 __ Move(rdx, Smi::FromInt(1));
3881 } else {
3882 __ movq(rdx, rax);
3883 __ Move(rax, Smi::FromInt(1));
3884 }
3885 EmitCallIC(stub.GetCode(), &patch_site);
3886 __ bind(&done);
3887
3888 // Store the value returned in rax.
3889 switch (assign_type) {
3890 case VARIABLE:
3891 if (expr->is_postfix()) {
3892 // Perform the assignment as if via '='.
3893 { EffectContext context(this);
3894 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3895 Token::ASSIGN);
3896 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3897 context.Plug(rax);
3898 }
3899 // For all contexts except kEffect: We have the result on
3900 // top of the stack.
3901 if (!context()->IsEffect()) {
3902 context()->PlugTOS();
3903 }
3904 } else {
3905 // Perform the assignment as if via '='.
3906 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3907 Token::ASSIGN);
3908 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3909 context()->Plug(rax);
3910 }
3911 break;
3912 case NAMED_PROPERTY: {
3913 __ Move(rcx, prop->key()->AsLiteral()->handle());
3914 __ pop(rdx);
3915 Handle<Code> ic = is_strict_mode()
3916 ? isolate()->builtins()->StoreIC_Initialize_Strict()
3917 : isolate()->builtins()->StoreIC_Initialize();
3918 EmitCallIC(ic, RelocInfo::CODE_TARGET);
3919 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3920 if (expr->is_postfix()) {
3921 if (!context()->IsEffect()) {
3922 context()->PlugTOS();
3923 }
3924 } else {
3925 context()->Plug(rax);
3926 }
3927 break;
3928 }
3929 case KEYED_PROPERTY: {
3930 __ pop(rcx);
3931 __ pop(rdx);
3932 Handle<Code> ic = is_strict_mode()
3933 ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
3934 : isolate()->builtins()->KeyedStoreIC_Initialize();
3935 EmitCallIC(ic, RelocInfo::CODE_TARGET);
3936 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3937 if (expr->is_postfix()) {
3938 if (!context()->IsEffect()) {
3939 context()->PlugTOS();
3940 }
3941 } else {
3942 context()->Plug(rax);
3943 }
3944 break;
3945 }
3946 }
3947 }
3948
3949
VisitForTypeofValue(Expression * expr)3950 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
3951 VariableProxy* proxy = expr->AsVariableProxy();
3952 ASSERT(!context()->IsEffect());
3953 ASSERT(!context()->IsTest());
3954
3955 if (proxy != NULL && !proxy->var()->is_this() && proxy->var()->is_global()) {
3956 Comment cmnt(masm_, "Global variable");
3957 __ Move(rcx, proxy->name());
3958 __ movq(rax, GlobalObjectOperand());
3959 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
3960 // Use a regular load, not a contextual load, to avoid a reference
3961 // error.
3962 EmitCallIC(ic, RelocInfo::CODE_TARGET);
3963 PrepareForBailout(expr, TOS_REG);
3964 context()->Plug(rax);
3965 } else if (proxy != NULL &&
3966 proxy->var()->AsSlot() != NULL &&
3967 proxy->var()->AsSlot()->type() == Slot::LOOKUP) {
3968 Label done, slow;
3969
3970 // Generate code for loading from variables potentially shadowed
3971 // by eval-introduced variables.
3972 Slot* slot = proxy->var()->AsSlot();
3973 EmitDynamicLoadFromSlotFastCase(slot, INSIDE_TYPEOF, &slow, &done);
3974
3975 __ bind(&slow);
3976 __ push(rsi);
3977 __ Push(proxy->name());
3978 __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
3979 PrepareForBailout(expr, TOS_REG);
3980 __ bind(&done);
3981
3982 context()->Plug(rax);
3983 } else {
3984 // This expression cannot throw a reference error at the top level.
3985 context()->HandleExpression(expr);
3986 }
3987 }
3988
3989
TryLiteralCompare(Token::Value op,Expression * left,Expression * right,Label * if_true,Label * if_false,Label * fall_through)3990 bool FullCodeGenerator::TryLiteralCompare(Token::Value op,
3991 Expression* left,
3992 Expression* right,
3993 Label* if_true,
3994 Label* if_false,
3995 Label* fall_through) {
3996 if (op != Token::EQ && op != Token::EQ_STRICT) return false;
3997
3998 // Check for the pattern: typeof <expression> == <string literal>.
3999 Literal* right_literal = right->AsLiteral();
4000 if (right_literal == NULL) return false;
4001 Handle<Object> right_literal_value = right_literal->handle();
4002 if (!right_literal_value->IsString()) return false;
4003 UnaryOperation* left_unary = left->AsUnaryOperation();
4004 if (left_unary == NULL || left_unary->op() != Token::TYPEOF) return false;
4005 Handle<String> check = Handle<String>::cast(right_literal_value);
4006
4007 { AccumulatorValueContext context(this);
4008 VisitForTypeofValue(left_unary->expression());
4009 }
4010 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
4011
4012 if (check->Equals(isolate()->heap()->number_symbol())) {
4013 __ JumpIfSmi(rax, if_true);
4014 __ movq(rax, FieldOperand(rax, HeapObject::kMapOffset));
4015 __ CompareRoot(rax, Heap::kHeapNumberMapRootIndex);
4016 Split(equal, if_true, if_false, fall_through);
4017 } else if (check->Equals(isolate()->heap()->string_symbol())) {
4018 __ JumpIfSmi(rax, if_false);
4019 // Check for undetectable objects => false.
4020 __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, rdx);
4021 __ j(above_equal, if_false);
4022 __ testb(FieldOperand(rdx, Map::kBitFieldOffset),
4023 Immediate(1 << Map::kIsUndetectable));
4024 Split(zero, if_true, if_false, fall_through);
4025 } else if (check->Equals(isolate()->heap()->boolean_symbol())) {
4026 __ CompareRoot(rax, Heap::kTrueValueRootIndex);
4027 __ j(equal, if_true);
4028 __ CompareRoot(rax, Heap::kFalseValueRootIndex);
4029 Split(equal, if_true, if_false, fall_through);
4030 } else if (check->Equals(isolate()->heap()->undefined_symbol())) {
4031 __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
4032 __ j(equal, if_true);
4033 __ JumpIfSmi(rax, if_false);
4034 // Check for undetectable objects => true.
4035 __ movq(rdx, FieldOperand(rax, HeapObject::kMapOffset));
4036 __ testb(FieldOperand(rdx, Map::kBitFieldOffset),
4037 Immediate(1 << Map::kIsUndetectable));
4038 Split(not_zero, if_true, if_false, fall_through);
4039 } else if (check->Equals(isolate()->heap()->function_symbol())) {
4040 __ JumpIfSmi(rax, if_false);
4041 __ CmpObjectType(rax, FIRST_FUNCTION_CLASS_TYPE, rdx);
4042 Split(above_equal, if_true, if_false, fall_through);
4043 } else if (check->Equals(isolate()->heap()->object_symbol())) {
4044 __ JumpIfSmi(rax, if_false);
4045 __ CompareRoot(rax, Heap::kNullValueRootIndex);
4046 __ j(equal, if_true);
4047 __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rdx);
4048 __ j(below, if_false);
4049 __ CmpInstanceType(rdx, FIRST_FUNCTION_CLASS_TYPE);
4050 __ j(above_equal, if_false);
4051 // Check for undetectable objects => false.
4052 __ testb(FieldOperand(rdx, Map::kBitFieldOffset),
4053 Immediate(1 << Map::kIsUndetectable));
4054 Split(zero, if_true, if_false, fall_through);
4055 } else {
4056 if (if_false != fall_through) __ jmp(if_false);
4057 }
4058
4059 return true;
4060 }
4061
4062
VisitCompareOperation(CompareOperation * expr)4063 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4064 Comment cmnt(masm_, "[ CompareOperation");
4065 SetSourcePosition(expr->position());
4066
4067 // Always perform the comparison for its control flow. Pack the result
4068 // into the expression's context after the comparison is performed.
4069 Label materialize_true, materialize_false;
4070 Label* if_true = NULL;
4071 Label* if_false = NULL;
4072 Label* fall_through = NULL;
4073 context()->PrepareTest(&materialize_true, &materialize_false,
4074 &if_true, &if_false, &fall_through);
4075
4076 // First we try a fast inlined version of the compare when one of
4077 // the operands is a literal.
4078 Token::Value op = expr->op();
4079 Expression* left = expr->left();
4080 Expression* right = expr->right();
4081 if (TryLiteralCompare(op, left, right, if_true, if_false, fall_through)) {
4082 context()->Plug(if_true, if_false);
4083 return;
4084 }
4085
4086 VisitForStackValue(expr->left());
4087 switch (op) {
4088 case Token::IN:
4089 VisitForStackValue(expr->right());
4090 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
4091 PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
4092 __ CompareRoot(rax, Heap::kTrueValueRootIndex);
4093 Split(equal, if_true, if_false, fall_through);
4094 break;
4095
4096 case Token::INSTANCEOF: {
4097 VisitForStackValue(expr->right());
4098 InstanceofStub stub(InstanceofStub::kNoFlags);
4099 __ CallStub(&stub);
4100 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
4101 __ testq(rax, rax);
4102 // The stub returns 0 for true.
4103 Split(zero, if_true, if_false, fall_through);
4104 break;
4105 }
4106
4107 default: {
4108 VisitForAccumulatorValue(expr->right());
4109 Condition cc = no_condition;
4110 bool strict = false;
4111 switch (op) {
4112 case Token::EQ_STRICT:
4113 strict = true;
4114 // Fall through.
4115 case Token::EQ:
4116 cc = equal;
4117 __ pop(rdx);
4118 break;
4119 case Token::LT:
4120 cc = less;
4121 __ pop(rdx);
4122 break;
4123 case Token::GT:
4124 // Reverse left and right sizes to obtain ECMA-262 conversion order.
4125 cc = less;
4126 __ movq(rdx, result_register());
4127 __ pop(rax);
4128 break;
4129 case Token::LTE:
4130 // Reverse left and right sizes to obtain ECMA-262 conversion order.
4131 cc = greater_equal;
4132 __ movq(rdx, result_register());
4133 __ pop(rax);
4134 break;
4135 case Token::GTE:
4136 cc = greater_equal;
4137 __ pop(rdx);
4138 break;
4139 case Token::IN:
4140 case Token::INSTANCEOF:
4141 default:
4142 UNREACHABLE();
4143 }
4144
4145 bool inline_smi_code = ShouldInlineSmiCase(op);
4146 JumpPatchSite patch_site(masm_);
4147 if (inline_smi_code) {
4148 NearLabel slow_case;
4149 __ movq(rcx, rdx);
4150 __ or_(rcx, rax);
4151 patch_site.EmitJumpIfNotSmi(rcx, &slow_case);
4152 __ cmpq(rdx, rax);
4153 Split(cc, if_true, if_false, NULL);
4154 __ bind(&slow_case);
4155 }
4156
4157 // Record position and call the compare IC.
4158 SetSourcePosition(expr->position());
4159 Handle<Code> ic = CompareIC::GetUninitialized(op);
4160 EmitCallIC(ic, &patch_site);
4161
4162 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
4163 __ testq(rax, rax);
4164 Split(cc, if_true, if_false, fall_through);
4165 }
4166 }
4167
4168 // Convert the result of the comparison into one expected for this
4169 // expression's context.
4170 context()->Plug(if_true, if_false);
4171 }
4172
4173
VisitCompareToNull(CompareToNull * expr)4174 void FullCodeGenerator::VisitCompareToNull(CompareToNull* expr) {
4175 Comment cmnt(masm_, "[ CompareToNull");
4176 Label materialize_true, materialize_false;
4177 Label* if_true = NULL;
4178 Label* if_false = NULL;
4179 Label* fall_through = NULL;
4180 context()->PrepareTest(&materialize_true, &materialize_false,
4181 &if_true, &if_false, &fall_through);
4182
4183 VisitForAccumulatorValue(expr->expression());
4184 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
4185 __ CompareRoot(rax, Heap::kNullValueRootIndex);
4186 if (expr->is_strict()) {
4187 Split(equal, if_true, if_false, fall_through);
4188 } else {
4189 __ j(equal, if_true);
4190 __ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
4191 __ j(equal, if_true);
4192 Condition is_smi = masm_->CheckSmi(rax);
4193 __ j(is_smi, if_false);
4194 // It can be an undetectable object.
4195 __ movq(rdx, FieldOperand(rax, HeapObject::kMapOffset));
4196 __ testb(FieldOperand(rdx, Map::kBitFieldOffset),
4197 Immediate(1 << Map::kIsUndetectable));
4198 Split(not_zero, if_true, if_false, fall_through);
4199 }
4200 context()->Plug(if_true, if_false);
4201 }
4202
4203
VisitThisFunction(ThisFunction * expr)4204 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4205 __ movq(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
4206 context()->Plug(rax);
4207 }
4208
4209
result_register()4210 Register FullCodeGenerator::result_register() {
4211 return rax;
4212 }
4213
4214
context_register()4215 Register FullCodeGenerator::context_register() {
4216 return rsi;
4217 }
4218
4219
EmitCallIC(Handle<Code> ic,RelocInfo::Mode mode)4220 void FullCodeGenerator::EmitCallIC(Handle<Code> ic, RelocInfo::Mode mode) {
4221 ASSERT(mode == RelocInfo::CODE_TARGET ||
4222 mode == RelocInfo::CODE_TARGET_CONTEXT);
4223 Counters* counters = isolate()->counters();
4224 switch (ic->kind()) {
4225 case Code::LOAD_IC:
4226 __ IncrementCounter(counters->named_load_full(), 1);
4227 break;
4228 case Code::KEYED_LOAD_IC:
4229 __ IncrementCounter(counters->keyed_load_full(), 1);
4230 break;
4231 case Code::STORE_IC:
4232 __ IncrementCounter(counters->named_store_full(), 1);
4233 break;
4234 case Code::KEYED_STORE_IC:
4235 __ IncrementCounter(counters->keyed_store_full(), 1);
4236 default:
4237 break;
4238 }
4239
4240 __ call(ic, mode);
4241
4242 // Crankshaft doesn't need patching of inlined loads and stores.
4243 // When compiling the snapshot we need to produce code that works
4244 // with and without Crankshaft.
4245 if (V8::UseCrankshaft() && !Serializer::enabled()) {
4246 return;
4247 }
4248
4249 // If we're calling a (keyed) load or store stub, we have to mark
4250 // the call as containing no inlined code so we will not attempt to
4251 // patch it.
4252 switch (ic->kind()) {
4253 case Code::LOAD_IC:
4254 case Code::KEYED_LOAD_IC:
4255 case Code::STORE_IC:
4256 case Code::KEYED_STORE_IC:
4257 __ nop(); // Signals no inlined code.
4258 break;
4259 default:
4260 // Do nothing.
4261 break;
4262 }
4263 }
4264
4265
EmitCallIC(Handle<Code> ic,JumpPatchSite * patch_site)4266 void FullCodeGenerator::EmitCallIC(Handle<Code> ic, JumpPatchSite* patch_site) {
4267 Counters* counters = isolate()->counters();
4268 switch (ic->kind()) {
4269 case Code::LOAD_IC:
4270 __ IncrementCounter(counters->named_load_full(), 1);
4271 break;
4272 case Code::KEYED_LOAD_IC:
4273 __ IncrementCounter(counters->keyed_load_full(), 1);
4274 break;
4275 case Code::STORE_IC:
4276 __ IncrementCounter(counters->named_store_full(), 1);
4277 break;
4278 case Code::KEYED_STORE_IC:
4279 __ IncrementCounter(counters->keyed_store_full(), 1);
4280 default:
4281 break;
4282 }
4283
4284 __ call(ic, RelocInfo::CODE_TARGET);
4285 if (patch_site != NULL && patch_site->is_bound()) {
4286 patch_site->EmitPatchInfo();
4287 } else {
4288 __ nop(); // Signals no inlined code.
4289 }
4290 }
4291
4292
StoreToFrameField(int frame_offset,Register value)4293 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4294 ASSERT(IsAligned(frame_offset, kPointerSize));
4295 __ movq(Operand(rbp, frame_offset), value);
4296 }
4297
4298
LoadContextField(Register dst,int context_index)4299 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4300 __ movq(dst, ContextOperand(rsi, context_index));
4301 }
4302
4303
4304 // ----------------------------------------------------------------------------
4305 // Non-local control flow support.
4306
4307
EnterFinallyBlock()4308 void FullCodeGenerator::EnterFinallyBlock() {
4309 ASSERT(!result_register().is(rdx));
4310 ASSERT(!result_register().is(rcx));
4311 // Cook return address on top of stack (smi encoded Code* delta)
4312 __ movq(rdx, Operand(rsp, 0));
4313 __ Move(rcx, masm_->CodeObject());
4314 __ subq(rdx, rcx);
4315 __ Integer32ToSmi(rdx, rdx);
4316 __ movq(Operand(rsp, 0), rdx);
4317 // Store result register while executing finally block.
4318 __ push(result_register());
4319 }
4320
4321
ExitFinallyBlock()4322 void FullCodeGenerator::ExitFinallyBlock() {
4323 ASSERT(!result_register().is(rdx));
4324 ASSERT(!result_register().is(rcx));
4325 // Restore result register from stack.
4326 __ pop(result_register());
4327 // Uncook return address.
4328 __ movq(rdx, Operand(rsp, 0));
4329 __ SmiToInteger32(rdx, rdx);
4330 __ Move(rcx, masm_->CodeObject());
4331 __ addq(rdx, rcx);
4332 __ movq(Operand(rsp, 0), rdx);
4333 // And return.
4334 __ ret(0);
4335 }
4336
4337
4338 #undef __
4339
4340
4341 } } // namespace v8::internal
4342
4343 #endif // V8_TARGET_ARCH_X64
4344