1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28 #include "v8.h"
29
30 #if defined(V8_TARGET_ARCH_IA32)
31
32 #include "code-stubs.h"
33 #include "codegen.h"
34 #include "compiler.h"
35 #include "debug.h"
36 #include "full-codegen.h"
37 #include "parser.h"
38 #include "scopes.h"
39 #include "stub-cache.h"
40
41 namespace v8 {
42 namespace internal {
43
44
45 #define __ ACCESS_MASM(masm_)
46
47
48 class JumpPatchSite BASE_EMBEDDED {
49 public:
JumpPatchSite(MacroAssembler * masm)50 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
51 #ifdef DEBUG
52 info_emitted_ = false;
53 #endif
54 }
55
~JumpPatchSite()56 ~JumpPatchSite() {
57 ASSERT(patch_site_.is_bound() == info_emitted_);
58 }
59
EmitJumpIfNotSmi(Register reg,NearLabel * target)60 void EmitJumpIfNotSmi(Register reg, NearLabel* target) {
61 __ test(reg, Immediate(kSmiTagMask));
62 EmitJump(not_carry, target); // Always taken before patched.
63 }
64
EmitJumpIfSmi(Register reg,NearLabel * target)65 void EmitJumpIfSmi(Register reg, NearLabel* target) {
66 __ test(reg, Immediate(kSmiTagMask));
67 EmitJump(carry, target); // Never taken before patched.
68 }
69
EmitPatchInfo()70 void EmitPatchInfo() {
71 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
72 ASSERT(is_int8(delta_to_patch_site));
73 __ test(eax, Immediate(delta_to_patch_site));
74 #ifdef DEBUG
75 info_emitted_ = true;
76 #endif
77 }
78
is_bound() const79 bool is_bound() const { return patch_site_.is_bound(); }
80
81 private:
82 // jc will be patched with jz, jnc will become jnz.
EmitJump(Condition cc,NearLabel * target)83 void EmitJump(Condition cc, NearLabel* target) {
84 ASSERT(!patch_site_.is_bound() && !info_emitted_);
85 ASSERT(cc == carry || cc == not_carry);
86 __ bind(&patch_site_);
87 __ j(cc, target);
88 }
89
90 MacroAssembler* masm_;
91 Label patch_site_;
92 #ifdef DEBUG
93 bool info_emitted_;
94 #endif
95 };
96
97
98 // Generate code for a JS function. On entry to the function the receiver
99 // and arguments have been pushed on the stack left to right, with the
100 // return address on top of them. The actual argument count matches the
101 // formal parameter count expected by the function.
102 //
103 // The live registers are:
104 // o edi: the JS function object being called (ie, ourselves)
105 // o esi: our context
106 // o ebp: our caller's frame pointer
107 // o esp: stack pointer (pointing to return address)
108 //
109 // The function builds a JS frame. Please see JavaScriptFrameConstants in
110 // frames-ia32.h for its layout.
Generate(CompilationInfo * info)111 void FullCodeGenerator::Generate(CompilationInfo* info) {
112 ASSERT(info_ == NULL);
113 info_ = info;
114 SetFunctionPosition(function());
115 Comment cmnt(masm_, "[ function compiled by full code generator");
116
117 #ifdef DEBUG
118 if (strlen(FLAG_stop_at) > 0 &&
119 info->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
120 __ int3();
121 }
122 #endif
123
124 __ push(ebp); // Caller's frame pointer.
125 __ mov(ebp, esp);
126 __ push(esi); // Callee's context.
127 __ push(edi); // Callee's JS Function.
128
129 { Comment cmnt(masm_, "[ Allocate locals");
130 int locals_count = scope()->num_stack_slots();
131 if (locals_count == 1) {
132 __ push(Immediate(isolate()->factory()->undefined_value()));
133 } else if (locals_count > 1) {
134 __ mov(eax, Immediate(isolate()->factory()->undefined_value()));
135 for (int i = 0; i < locals_count; i++) {
136 __ push(eax);
137 }
138 }
139 }
140
141 bool function_in_register = true;
142
143 // Possibly allocate a local context.
144 int heap_slots = scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
145 if (heap_slots > 0) {
146 Comment cmnt(masm_, "[ Allocate local context");
147 // Argument to NewContext is the function, which is still in edi.
148 __ push(edi);
149 if (heap_slots <= FastNewContextStub::kMaximumSlots) {
150 FastNewContextStub stub(heap_slots);
151 __ CallStub(&stub);
152 } else {
153 __ CallRuntime(Runtime::kNewContext, 1);
154 }
155 function_in_register = false;
156 // Context is returned in both eax and esi. It replaces the context
157 // passed to us. It's saved in the stack and kept live in esi.
158 __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi);
159
160 // Copy parameters into context if necessary.
161 int num_parameters = scope()->num_parameters();
162 for (int i = 0; i < num_parameters; i++) {
163 Slot* slot = scope()->parameter(i)->AsSlot();
164 if (slot != NULL && slot->type() == Slot::CONTEXT) {
165 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
166 (num_parameters - 1 - i) * kPointerSize;
167 // Load parameter from stack.
168 __ mov(eax, Operand(ebp, parameter_offset));
169 // Store it in the context.
170 int context_offset = Context::SlotOffset(slot->index());
171 __ mov(Operand(esi, context_offset), eax);
172 // Update the write barrier. This clobbers all involved
173 // registers, so we have use a third register to avoid
174 // clobbering esi.
175 __ mov(ecx, esi);
176 __ RecordWrite(ecx, context_offset, eax, ebx);
177 }
178 }
179 }
180
181 Variable* arguments = scope()->arguments();
182 if (arguments != NULL) {
183 // Function uses arguments object.
184 Comment cmnt(masm_, "[ Allocate arguments object");
185 if (function_in_register) {
186 __ push(edi);
187 } else {
188 __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
189 }
190 // Receiver is just before the parameters on the caller's stack.
191 int offset = scope()->num_parameters() * kPointerSize;
192 __ lea(edx,
193 Operand(ebp, StandardFrameConstants::kCallerSPOffset + offset));
194 __ push(edx);
195 __ SafePush(Immediate(Smi::FromInt(scope()->num_parameters())));
196 // Arguments to ArgumentsAccessStub:
197 // function, receiver address, parameter count.
198 // The stub will rewrite receiver and parameter count if the previous
199 // stack frame was an arguments adapter frame.
200 ArgumentsAccessStub stub(
201 is_strict_mode() ? ArgumentsAccessStub::NEW_STRICT
202 : ArgumentsAccessStub::NEW_NON_STRICT);
203 __ CallStub(&stub);
204
205 Variable* arguments_shadow = scope()->arguments_shadow();
206 if (arguments_shadow != NULL) {
207 __ mov(ecx, eax); // Duplicate result.
208 Move(arguments_shadow->AsSlot(), ecx, ebx, edx);
209 }
210 Move(arguments->AsSlot(), eax, ebx, edx);
211 }
212
213 if (FLAG_trace) {
214 __ CallRuntime(Runtime::kTraceEnter, 0);
215 }
216
217 // Visit the declarations and body unless there is an illegal
218 // redeclaration.
219 if (scope()->HasIllegalRedeclaration()) {
220 Comment cmnt(masm_, "[ Declarations");
221 scope()->VisitIllegalRedeclaration(this);
222
223 } else {
224 { Comment cmnt(masm_, "[ Declarations");
225 // For named function expressions, declare the function name as a
226 // constant.
227 if (scope()->is_function_scope() && scope()->function() != NULL) {
228 EmitDeclaration(scope()->function(), Variable::CONST, NULL);
229 }
230 VisitDeclarations(scope()->declarations());
231 }
232
233 { Comment cmnt(masm_, "[ Stack check");
234 PrepareForBailoutForId(AstNode::kFunctionEntryId, NO_REGISTERS);
235 NearLabel ok;
236 ExternalReference stack_limit =
237 ExternalReference::address_of_stack_limit(isolate());
238 __ cmp(esp, Operand::StaticVariable(stack_limit));
239 __ j(above_equal, &ok, taken);
240 StackCheckStub stub;
241 __ CallStub(&stub);
242 __ bind(&ok);
243 }
244
245 { Comment cmnt(masm_, "[ Body");
246 ASSERT(loop_depth() == 0);
247 VisitStatements(function()->body());
248 ASSERT(loop_depth() == 0);
249 }
250 }
251
252 // Always emit a 'return undefined' in case control fell off the end of
253 // the body.
254 { Comment cmnt(masm_, "[ return <undefined>;");
255 __ mov(eax, isolate()->factory()->undefined_value());
256 EmitReturnSequence();
257 }
258 }
259
260
ClearAccumulator()261 void FullCodeGenerator::ClearAccumulator() {
262 __ Set(eax, Immediate(Smi::FromInt(0)));
263 }
264
265
EmitStackCheck(IterationStatement * stmt)266 void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt) {
267 Comment cmnt(masm_, "[ Stack check");
268 NearLabel ok;
269 ExternalReference stack_limit =
270 ExternalReference::address_of_stack_limit(isolate());
271 __ cmp(esp, Operand::StaticVariable(stack_limit));
272 __ j(above_equal, &ok, taken);
273 StackCheckStub stub;
274 __ CallStub(&stub);
275 // Record a mapping of this PC offset to the OSR id. This is used to find
276 // the AST id from the unoptimized code in order to use it as a key into
277 // the deoptimization input data found in the optimized code.
278 RecordStackCheck(stmt->OsrEntryId());
279
280 // Loop stack checks can be patched to perform on-stack replacement. In
281 // order to decide whether or not to perform OSR we embed the loop depth
282 // in a test instruction after the call so we can extract it from the OSR
283 // builtin.
284 ASSERT(loop_depth() > 0);
285 __ test(eax, Immediate(Min(loop_depth(), Code::kMaxLoopNestingMarker)));
286
287 __ bind(&ok);
288 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
289 // Record a mapping of the OSR id to this PC. This is used if the OSR
290 // entry becomes the target of a bailout. We don't expect it to be, but
291 // we want it to work if it is.
292 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
293 }
294
295
EmitReturnSequence()296 void FullCodeGenerator::EmitReturnSequence() {
297 Comment cmnt(masm_, "[ Return sequence");
298 if (return_label_.is_bound()) {
299 __ jmp(&return_label_);
300 } else {
301 // Common return label
302 __ bind(&return_label_);
303 if (FLAG_trace) {
304 __ push(eax);
305 __ CallRuntime(Runtime::kTraceExit, 1);
306 }
307 #ifdef DEBUG
308 // Add a label for checking the size of the code used for returning.
309 Label check_exit_codesize;
310 masm_->bind(&check_exit_codesize);
311 #endif
312 SetSourcePosition(function()->end_position() - 1);
313 __ RecordJSReturn();
314 // Do not use the leave instruction here because it is too short to
315 // patch with the code required by the debugger.
316 __ mov(esp, ebp);
317 __ pop(ebp);
318
319 int arguments_bytes = (scope()->num_parameters() + 1) * kPointerSize;
320 __ Ret(arguments_bytes, ecx);
321 #ifdef ENABLE_DEBUGGER_SUPPORT
322 // Check that the size of the code used for returning is large enough
323 // for the debugger's requirements.
324 ASSERT(Assembler::kJSReturnSequenceLength <=
325 masm_->SizeOfCodeGeneratedSince(&check_exit_codesize));
326 #endif
327 }
328 }
329
330
Plug(Slot * slot) const331 void FullCodeGenerator::EffectContext::Plug(Slot* slot) const {
332 }
333
334
Plug(Slot * slot) const335 void FullCodeGenerator::AccumulatorValueContext::Plug(Slot* slot) const {
336 MemOperand slot_operand = codegen()->EmitSlotSearch(slot, result_register());
337 __ mov(result_register(), slot_operand);
338 }
339
340
Plug(Slot * slot) const341 void FullCodeGenerator::StackValueContext::Plug(Slot* slot) const {
342 MemOperand slot_operand = codegen()->EmitSlotSearch(slot, result_register());
343 // Memory operands can be pushed directly.
344 __ push(slot_operand);
345 }
346
347
Plug(Slot * slot) const348 void FullCodeGenerator::TestContext::Plug(Slot* slot) const {
349 // For simplicity we always test the accumulator register.
350 codegen()->Move(result_register(), slot);
351 codegen()->PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
352 codegen()->DoTest(true_label_, false_label_, fall_through_);
353 }
354
355
Plug(Heap::RootListIndex index) const356 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
357 UNREACHABLE(); // Not used on IA32.
358 }
359
360
Plug(Heap::RootListIndex index) const361 void FullCodeGenerator::AccumulatorValueContext::Plug(
362 Heap::RootListIndex index) const {
363 UNREACHABLE(); // Not used on IA32.
364 }
365
366
Plug(Heap::RootListIndex index) const367 void FullCodeGenerator::StackValueContext::Plug(
368 Heap::RootListIndex index) const {
369 UNREACHABLE(); // Not used on IA32.
370 }
371
372
Plug(Heap::RootListIndex index) const373 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
374 UNREACHABLE(); // Not used on IA32.
375 }
376
377
Plug(Handle<Object> lit) const378 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
379 }
380
381
Plug(Handle<Object> lit) const382 void FullCodeGenerator::AccumulatorValueContext::Plug(
383 Handle<Object> lit) const {
384 if (lit->IsSmi()) {
385 __ SafeSet(result_register(), Immediate(lit));
386 } else {
387 __ Set(result_register(), Immediate(lit));
388 }
389 }
390
391
Plug(Handle<Object> lit) const392 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
393 if (lit->IsSmi()) {
394 __ SafePush(Immediate(lit));
395 } else {
396 __ push(Immediate(lit));
397 }
398 }
399
400
Plug(Handle<Object> lit) const401 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
402 codegen()->PrepareForBailoutBeforeSplit(TOS_REG,
403 true,
404 true_label_,
405 false_label_);
406 ASSERT(!lit->IsUndetectableObject()); // There are no undetectable literals.
407 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
408 if (false_label_ != fall_through_) __ jmp(false_label_);
409 } else if (lit->IsTrue() || lit->IsJSObject()) {
410 if (true_label_ != fall_through_) __ jmp(true_label_);
411 } else if (lit->IsString()) {
412 if (String::cast(*lit)->length() == 0) {
413 if (false_label_ != fall_through_) __ jmp(false_label_);
414 } else {
415 if (true_label_ != fall_through_) __ jmp(true_label_);
416 }
417 } else if (lit->IsSmi()) {
418 if (Smi::cast(*lit)->value() == 0) {
419 if (false_label_ != fall_through_) __ jmp(false_label_);
420 } else {
421 if (true_label_ != fall_through_) __ jmp(true_label_);
422 }
423 } else {
424 // For simplicity we always test the accumulator register.
425 __ mov(result_register(), lit);
426 codegen()->DoTest(true_label_, false_label_, fall_through_);
427 }
428 }
429
430
DropAndPlug(int count,Register reg) const431 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
432 Register reg) const {
433 ASSERT(count > 0);
434 __ Drop(count);
435 }
436
437
DropAndPlug(int count,Register reg) const438 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
439 int count,
440 Register reg) const {
441 ASSERT(count > 0);
442 __ Drop(count);
443 __ Move(result_register(), reg);
444 }
445
446
DropAndPlug(int count,Register reg) const447 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
448 Register reg) const {
449 ASSERT(count > 0);
450 if (count > 1) __ Drop(count - 1);
451 __ mov(Operand(esp, 0), reg);
452 }
453
454
DropAndPlug(int count,Register reg) const455 void FullCodeGenerator::TestContext::DropAndPlug(int count,
456 Register reg) const {
457 ASSERT(count > 0);
458 // For simplicity we always test the accumulator register.
459 __ Drop(count);
460 __ Move(result_register(), reg);
461 codegen()->PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
462 codegen()->DoTest(true_label_, false_label_, fall_through_);
463 }
464
465
Plug(Label * materialize_true,Label * materialize_false) const466 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
467 Label* materialize_false) const {
468 ASSERT(materialize_true == materialize_false);
469 __ bind(materialize_true);
470 }
471
472
Plug(Label * materialize_true,Label * materialize_false) const473 void FullCodeGenerator::AccumulatorValueContext::Plug(
474 Label* materialize_true,
475 Label* materialize_false) const {
476 NearLabel done;
477 __ bind(materialize_true);
478 __ mov(result_register(), isolate()->factory()->true_value());
479 __ jmp(&done);
480 __ bind(materialize_false);
481 __ mov(result_register(), isolate()->factory()->false_value());
482 __ bind(&done);
483 }
484
485
Plug(Label * materialize_true,Label * materialize_false) const486 void FullCodeGenerator::StackValueContext::Plug(
487 Label* materialize_true,
488 Label* materialize_false) const {
489 NearLabel done;
490 __ bind(materialize_true);
491 __ push(Immediate(isolate()->factory()->true_value()));
492 __ jmp(&done);
493 __ bind(materialize_false);
494 __ push(Immediate(isolate()->factory()->false_value()));
495 __ bind(&done);
496 }
497
498
Plug(Label * materialize_true,Label * materialize_false) const499 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
500 Label* materialize_false) const {
501 ASSERT(materialize_true == true_label_);
502 ASSERT(materialize_false == false_label_);
503 }
504
505
Plug(bool flag) const506 void FullCodeGenerator::EffectContext::Plug(bool flag) const {
507 }
508
509
Plug(bool flag) const510 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
511 Handle<Object> value = flag
512 ? isolate()->factory()->true_value()
513 : isolate()->factory()->false_value();
514 __ mov(result_register(), value);
515 }
516
517
Plug(bool flag) const518 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
519 Handle<Object> value = flag
520 ? isolate()->factory()->true_value()
521 : isolate()->factory()->false_value();
522 __ push(Immediate(value));
523 }
524
525
Plug(bool flag) const526 void FullCodeGenerator::TestContext::Plug(bool flag) const {
527 codegen()->PrepareForBailoutBeforeSplit(TOS_REG,
528 true,
529 true_label_,
530 false_label_);
531 if (flag) {
532 if (true_label_ != fall_through_) __ jmp(true_label_);
533 } else {
534 if (false_label_ != fall_through_) __ jmp(false_label_);
535 }
536 }
537
538
DoTest(Label * if_true,Label * if_false,Label * fall_through)539 void FullCodeGenerator::DoTest(Label* if_true,
540 Label* if_false,
541 Label* fall_through) {
542 // Emit the inlined tests assumed by the stub.
543 __ cmp(result_register(), isolate()->factory()->undefined_value());
544 __ j(equal, if_false);
545 __ cmp(result_register(), isolate()->factory()->true_value());
546 __ j(equal, if_true);
547 __ cmp(result_register(), isolate()->factory()->false_value());
548 __ j(equal, if_false);
549 STATIC_ASSERT(kSmiTag == 0);
550 __ test(result_register(), Operand(result_register()));
551 __ j(zero, if_false);
552 __ test(result_register(), Immediate(kSmiTagMask));
553 __ j(zero, if_true);
554
555 // Call the ToBoolean stub for all other cases.
556 ToBooleanStub stub;
557 __ push(result_register());
558 __ CallStub(&stub);
559 __ test(eax, Operand(eax));
560
561 // The stub returns nonzero for true.
562 Split(not_zero, if_true, if_false, fall_through);
563 }
564
565
Split(Condition cc,Label * if_true,Label * if_false,Label * fall_through)566 void FullCodeGenerator::Split(Condition cc,
567 Label* if_true,
568 Label* if_false,
569 Label* fall_through) {
570 if (if_false == fall_through) {
571 __ j(cc, if_true);
572 } else if (if_true == fall_through) {
573 __ j(NegateCondition(cc), if_false);
574 } else {
575 __ j(cc, if_true);
576 __ jmp(if_false);
577 }
578 }
579
580
EmitSlotSearch(Slot * slot,Register scratch)581 MemOperand FullCodeGenerator::EmitSlotSearch(Slot* slot, Register scratch) {
582 switch (slot->type()) {
583 case Slot::PARAMETER:
584 case Slot::LOCAL:
585 return Operand(ebp, SlotOffset(slot));
586 case Slot::CONTEXT: {
587 int context_chain_length =
588 scope()->ContextChainLength(slot->var()->scope());
589 __ LoadContext(scratch, context_chain_length);
590 return ContextOperand(scratch, slot->index());
591 }
592 case Slot::LOOKUP:
593 UNREACHABLE();
594 }
595 UNREACHABLE();
596 return Operand(eax, 0);
597 }
598
599
Move(Register destination,Slot * source)600 void FullCodeGenerator::Move(Register destination, Slot* source) {
601 MemOperand location = EmitSlotSearch(source, destination);
602 __ mov(destination, location);
603 }
604
605
Move(Slot * dst,Register src,Register scratch1,Register scratch2)606 void FullCodeGenerator::Move(Slot* dst,
607 Register src,
608 Register scratch1,
609 Register scratch2) {
610 ASSERT(dst->type() != Slot::LOOKUP); // Not yet implemented.
611 ASSERT(!scratch1.is(src) && !scratch2.is(src));
612 MemOperand location = EmitSlotSearch(dst, scratch1);
613 __ mov(location, src);
614 // Emit the write barrier code if the location is in the heap.
615 if (dst->type() == Slot::CONTEXT) {
616 int offset = Context::SlotOffset(dst->index());
617 ASSERT(!scratch1.is(esi) && !src.is(esi) && !scratch2.is(esi));
618 __ RecordWrite(scratch1, offset, src, scratch2);
619 }
620 }
621
622
PrepareForBailoutBeforeSplit(State state,bool should_normalize,Label * if_true,Label * if_false)623 void FullCodeGenerator::PrepareForBailoutBeforeSplit(State state,
624 bool should_normalize,
625 Label* if_true,
626 Label* if_false) {
627 // Only prepare for bailouts before splits if we're in a test
628 // context. Otherwise, we let the Visit function deal with the
629 // preparation to avoid preparing with the same AST id twice.
630 if (!context()->IsTest() || !info_->IsOptimizable()) return;
631
632 NearLabel skip;
633 if (should_normalize) __ jmp(&skip);
634
635 ForwardBailoutStack* current = forward_bailout_stack_;
636 while (current != NULL) {
637 PrepareForBailout(current->expr(), state);
638 current = current->parent();
639 }
640
641 if (should_normalize) {
642 __ cmp(eax, isolate()->factory()->true_value());
643 Split(equal, if_true, if_false, NULL);
644 __ bind(&skip);
645 }
646 }
647
648
EmitDeclaration(Variable * variable,Variable::Mode mode,FunctionLiteral * function)649 void FullCodeGenerator::EmitDeclaration(Variable* variable,
650 Variable::Mode mode,
651 FunctionLiteral* function) {
652 Comment cmnt(masm_, "[ Declaration");
653 ASSERT(variable != NULL); // Must have been resolved.
654 Slot* slot = variable->AsSlot();
655 Property* prop = variable->AsProperty();
656
657 if (slot != NULL) {
658 switch (slot->type()) {
659 case Slot::PARAMETER:
660 case Slot::LOCAL:
661 if (mode == Variable::CONST) {
662 __ mov(Operand(ebp, SlotOffset(slot)),
663 Immediate(isolate()->factory()->the_hole_value()));
664 } else if (function != NULL) {
665 VisitForAccumulatorValue(function);
666 __ mov(Operand(ebp, SlotOffset(slot)), result_register());
667 }
668 break;
669
670 case Slot::CONTEXT:
671 // We bypass the general EmitSlotSearch because we know more about
672 // this specific context.
673
674 // The variable in the decl always resides in the current function
675 // context.
676 ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
677 if (FLAG_debug_code) {
678 // Check that we're not inside a 'with'.
679 __ mov(ebx, ContextOperand(esi, Context::FCONTEXT_INDEX));
680 __ cmp(ebx, Operand(esi));
681 __ Check(equal, "Unexpected declaration in current context.");
682 }
683 if (mode == Variable::CONST) {
684 __ mov(ContextOperand(esi, slot->index()),
685 Immediate(isolate()->factory()->the_hole_value()));
686 // No write barrier since the hole value is in old space.
687 } else if (function != NULL) {
688 VisitForAccumulatorValue(function);
689 __ mov(ContextOperand(esi, slot->index()), result_register());
690 int offset = Context::SlotOffset(slot->index());
691 __ mov(ebx, esi);
692 __ RecordWrite(ebx, offset, result_register(), ecx);
693 }
694 break;
695
696 case Slot::LOOKUP: {
697 __ push(esi);
698 __ push(Immediate(variable->name()));
699 // Declaration nodes are always introduced in one of two modes.
700 ASSERT(mode == Variable::VAR || mode == Variable::CONST);
701 PropertyAttributes attr = (mode == Variable::VAR) ? NONE : READ_ONLY;
702 __ push(Immediate(Smi::FromInt(attr)));
703 // Push initial value, if any.
704 // Note: For variables we must not push an initial value (such as
705 // 'undefined') because we may have a (legal) redeclaration and we
706 // must not destroy the current value.
707 if (mode == Variable::CONST) {
708 __ push(Immediate(isolate()->factory()->the_hole_value()));
709 } else if (function != NULL) {
710 VisitForStackValue(function);
711 } else {
712 __ push(Immediate(Smi::FromInt(0))); // No initial value!
713 }
714 __ CallRuntime(Runtime::kDeclareContextSlot, 4);
715 break;
716 }
717 }
718
719 } else if (prop != NULL) {
720 if (function != NULL || mode == Variable::CONST) {
721 // We are declaring a function or constant that rewrites to a
722 // property. Use (keyed) IC to set the initial value. We cannot
723 // visit the rewrite because it's shared and we risk recording
724 // duplicate AST IDs for bailouts from optimized code.
725 ASSERT(prop->obj()->AsVariableProxy() != NULL);
726 { AccumulatorValueContext for_object(this);
727 EmitVariableLoad(prop->obj()->AsVariableProxy()->var());
728 }
729
730 if (function != NULL) {
731 __ push(eax);
732 VisitForAccumulatorValue(function);
733 __ pop(edx);
734 } else {
735 __ mov(edx, eax);
736 __ mov(eax, isolate()->factory()->the_hole_value());
737 }
738 ASSERT(prop->key()->AsLiteral() != NULL &&
739 prop->key()->AsLiteral()->handle()->IsSmi());
740 __ SafeSet(ecx, Immediate(prop->key()->AsLiteral()->handle()));
741
742 Handle<Code> ic = is_strict_mode()
743 ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
744 : isolate()->builtins()->KeyedStoreIC_Initialize();
745 EmitCallIC(ic, RelocInfo::CODE_TARGET);
746 }
747 }
748 }
749
750
VisitDeclaration(Declaration * decl)751 void FullCodeGenerator::VisitDeclaration(Declaration* decl) {
752 EmitDeclaration(decl->proxy()->var(), decl->mode(), decl->fun());
753 }
754
755
DeclareGlobals(Handle<FixedArray> pairs)756 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
757 // Call the runtime to declare the globals.
758 __ push(esi); // The context is the first argument.
759 __ push(Immediate(pairs));
760 __ push(Immediate(Smi::FromInt(is_eval() ? 1 : 0)));
761 __ push(Immediate(Smi::FromInt(strict_mode_flag())));
762 __ CallRuntime(Runtime::kDeclareGlobals, 4);
763 // Return value is ignored.
764 }
765
766
VisitSwitchStatement(SwitchStatement * stmt)767 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
768 Comment cmnt(masm_, "[ SwitchStatement");
769 Breakable nested_statement(this, stmt);
770 SetStatementPosition(stmt);
771
772 // Keep the switch value on the stack until a case matches.
773 VisitForStackValue(stmt->tag());
774 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
775
776 ZoneList<CaseClause*>* clauses = stmt->cases();
777 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
778
779 Label next_test; // Recycled for each test.
780 // Compile all the tests with branches to their bodies.
781 for (int i = 0; i < clauses->length(); i++) {
782 CaseClause* clause = clauses->at(i);
783 clause->body_target()->Unuse();
784
785 // The default is not a test, but remember it as final fall through.
786 if (clause->is_default()) {
787 default_clause = clause;
788 continue;
789 }
790
791 Comment cmnt(masm_, "[ Case comparison");
792 __ bind(&next_test);
793 next_test.Unuse();
794
795 // Compile the label expression.
796 VisitForAccumulatorValue(clause->label());
797
798 // Perform the comparison as if via '==='.
799 __ mov(edx, Operand(esp, 0)); // Switch value.
800 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
801 JumpPatchSite patch_site(masm_);
802 if (inline_smi_code) {
803 NearLabel slow_case;
804 __ mov(ecx, edx);
805 __ or_(ecx, Operand(eax));
806 patch_site.EmitJumpIfNotSmi(ecx, &slow_case);
807
808 __ cmp(edx, Operand(eax));
809 __ j(not_equal, &next_test);
810 __ Drop(1); // Switch value is no longer needed.
811 __ jmp(clause->body_target());
812 __ bind(&slow_case);
813 }
814
815 // Record position before stub call for type feedback.
816 SetSourcePosition(clause->position());
817 Handle<Code> ic = CompareIC::GetUninitialized(Token::EQ_STRICT);
818 EmitCallIC(ic, &patch_site);
819 __ test(eax, Operand(eax));
820 __ j(not_equal, &next_test);
821 __ Drop(1); // Switch value is no longer needed.
822 __ jmp(clause->body_target());
823 }
824
825 // Discard the test value and jump to the default if present, otherwise to
826 // the end of the statement.
827 __ bind(&next_test);
828 __ Drop(1); // Switch value is no longer needed.
829 if (default_clause == NULL) {
830 __ jmp(nested_statement.break_target());
831 } else {
832 __ jmp(default_clause->body_target());
833 }
834
835 // Compile all the case bodies.
836 for (int i = 0; i < clauses->length(); i++) {
837 Comment cmnt(masm_, "[ Case body");
838 CaseClause* clause = clauses->at(i);
839 __ bind(clause->body_target());
840 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
841 VisitStatements(clause->statements());
842 }
843
844 __ bind(nested_statement.break_target());
845 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
846 }
847
848
VisitForInStatement(ForInStatement * stmt)849 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
850 Comment cmnt(masm_, "[ ForInStatement");
851 SetStatementPosition(stmt);
852
853 Label loop, exit;
854 ForIn loop_statement(this, stmt);
855 increment_loop_depth();
856
857 // Get the object to enumerate over. Both SpiderMonkey and JSC
858 // ignore null and undefined in contrast to the specification; see
859 // ECMA-262 section 12.6.4.
860 VisitForAccumulatorValue(stmt->enumerable());
861 __ cmp(eax, isolate()->factory()->undefined_value());
862 __ j(equal, &exit);
863 __ cmp(eax, isolate()->factory()->null_value());
864 __ j(equal, &exit);
865
866 // Convert the object to a JS object.
867 NearLabel convert, done_convert;
868 __ test(eax, Immediate(kSmiTagMask));
869 __ j(zero, &convert);
870 __ CmpObjectType(eax, FIRST_JS_OBJECT_TYPE, ecx);
871 __ j(above_equal, &done_convert);
872 __ bind(&convert);
873 __ push(eax);
874 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
875 __ bind(&done_convert);
876 __ push(eax);
877
878 // Check cache validity in generated code. This is a fast case for
879 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
880 // guarantee cache validity, call the runtime system to check cache
881 // validity or get the property names in a fixed array.
882 Label next, call_runtime;
883 __ mov(ecx, eax);
884 __ bind(&next);
885
886 // Check that there are no elements. Register ecx contains the
887 // current JS object we've reached through the prototype chain.
888 __ cmp(FieldOperand(ecx, JSObject::kElementsOffset),
889 isolate()->factory()->empty_fixed_array());
890 __ j(not_equal, &call_runtime);
891
892 // Check that instance descriptors are not empty so that we can
893 // check for an enum cache. Leave the map in ebx for the subsequent
894 // prototype load.
895 __ mov(ebx, FieldOperand(ecx, HeapObject::kMapOffset));
896 __ mov(edx, FieldOperand(ebx, Map::kInstanceDescriptorsOffset));
897 __ cmp(edx, isolate()->factory()->empty_descriptor_array());
898 __ j(equal, &call_runtime);
899
900 // Check that there is an enum cache in the non-empty instance
901 // descriptors (edx). This is the case if the next enumeration
902 // index field does not contain a smi.
903 __ mov(edx, FieldOperand(edx, DescriptorArray::kEnumerationIndexOffset));
904 __ test(edx, Immediate(kSmiTagMask));
905 __ j(zero, &call_runtime);
906
907 // For all objects but the receiver, check that the cache is empty.
908 NearLabel check_prototype;
909 __ cmp(ecx, Operand(eax));
910 __ j(equal, &check_prototype);
911 __ mov(edx, FieldOperand(edx, DescriptorArray::kEnumCacheBridgeCacheOffset));
912 __ cmp(edx, isolate()->factory()->empty_fixed_array());
913 __ j(not_equal, &call_runtime);
914
915 // Load the prototype from the map and loop if non-null.
916 __ bind(&check_prototype);
917 __ mov(ecx, FieldOperand(ebx, Map::kPrototypeOffset));
918 __ cmp(ecx, isolate()->factory()->null_value());
919 __ j(not_equal, &next);
920
921 // The enum cache is valid. Load the map of the object being
922 // iterated over and use the cache for the iteration.
923 NearLabel use_cache;
924 __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset));
925 __ jmp(&use_cache);
926
927 // Get the set of properties to enumerate.
928 __ bind(&call_runtime);
929 __ push(eax); // Duplicate the enumerable object on the stack.
930 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
931
932 // If we got a map from the runtime call, we can do a fast
933 // modification check. Otherwise, we got a fixed array, and we have
934 // to do a slow check.
935 NearLabel fixed_array;
936 __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
937 isolate()->factory()->meta_map());
938 __ j(not_equal, &fixed_array);
939
940 // We got a map in register eax. Get the enumeration cache from it.
941 __ bind(&use_cache);
942 __ mov(ecx, FieldOperand(eax, Map::kInstanceDescriptorsOffset));
943 __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumerationIndexOffset));
944 __ mov(edx, FieldOperand(ecx, DescriptorArray::kEnumCacheBridgeCacheOffset));
945
946 // Setup the four remaining stack slots.
947 __ push(eax); // Map.
948 __ push(edx); // Enumeration cache.
949 __ mov(eax, FieldOperand(edx, FixedArray::kLengthOffset));
950 __ push(eax); // Enumeration cache length (as smi).
951 __ push(Immediate(Smi::FromInt(0))); // Initial index.
952 __ jmp(&loop);
953
954 // We got a fixed array in register eax. Iterate through that.
955 __ bind(&fixed_array);
956 __ push(Immediate(Smi::FromInt(0))); // Map (0) - force slow check.
957 __ push(eax);
958 __ mov(eax, FieldOperand(eax, FixedArray::kLengthOffset));
959 __ push(eax); // Fixed array length (as smi).
960 __ push(Immediate(Smi::FromInt(0))); // Initial index.
961
962 // Generate code for doing the condition check.
963 __ bind(&loop);
964 __ mov(eax, Operand(esp, 0 * kPointerSize)); // Get the current index.
965 __ cmp(eax, Operand(esp, 1 * kPointerSize)); // Compare to the array length.
966 __ j(above_equal, loop_statement.break_target());
967
968 // Get the current entry of the array into register ebx.
969 __ mov(ebx, Operand(esp, 2 * kPointerSize));
970 __ mov(ebx, FieldOperand(ebx, eax, times_2, FixedArray::kHeaderSize));
971
972 // Get the expected map from the stack or a zero map in the
973 // permanent slow case into register edx.
974 __ mov(edx, Operand(esp, 3 * kPointerSize));
975
976 // Check if the expected map still matches that of the enumerable.
977 // If not, we have to filter the key.
978 NearLabel update_each;
979 __ mov(ecx, Operand(esp, 4 * kPointerSize));
980 __ cmp(edx, FieldOperand(ecx, HeapObject::kMapOffset));
981 __ j(equal, &update_each);
982
983 // Convert the entry to a string or null if it isn't a property
984 // anymore. If the property has been removed while iterating, we
985 // just skip it.
986 __ push(ecx); // Enumerable.
987 __ push(ebx); // Current entry.
988 __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
989 __ test(eax, Operand(eax));
990 __ j(equal, loop_statement.continue_target());
991 __ mov(ebx, Operand(eax));
992
993 // Update the 'each' property or variable from the possibly filtered
994 // entry in register ebx.
995 __ bind(&update_each);
996 __ mov(result_register(), ebx);
997 // Perform the assignment as if via '='.
998 { EffectContext context(this);
999 EmitAssignment(stmt->each(), stmt->AssignmentId());
1000 }
1001
1002 // Generate code for the body of the loop.
1003 Visit(stmt->body());
1004
1005 // Generate code for going to the next element by incrementing the
1006 // index (smi) stored on top of the stack.
1007 __ bind(loop_statement.continue_target());
1008 __ add(Operand(esp, 0 * kPointerSize), Immediate(Smi::FromInt(1)));
1009
1010 EmitStackCheck(stmt);
1011 __ jmp(&loop);
1012
1013 // Remove the pointers stored on the stack.
1014 __ bind(loop_statement.break_target());
1015 __ add(Operand(esp), Immediate(5 * kPointerSize));
1016
1017 // Exit and decrement the loop depth.
1018 __ bind(&exit);
1019 decrement_loop_depth();
1020 }
1021
1022
EmitNewClosure(Handle<SharedFunctionInfo> info,bool pretenure)1023 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1024 bool pretenure) {
1025 // Use the fast case closure allocation code that allocates in new
1026 // space for nested functions that don't need literals cloning. If
1027 // we're running with the --always-opt or the --prepare-always-opt
1028 // flag, we need to use the runtime function so that the new function
1029 // we are creating here gets a chance to have its code optimized and
1030 // doesn't just get a copy of the existing unoptimized code.
1031 if (!FLAG_always_opt &&
1032 !FLAG_prepare_always_opt &&
1033 !pretenure &&
1034 scope()->is_function_scope() &&
1035 info->num_literals() == 0) {
1036 FastNewClosureStub stub(info->strict_mode() ? kStrictMode : kNonStrictMode);
1037 __ push(Immediate(info));
1038 __ CallStub(&stub);
1039 } else {
1040 __ push(esi);
1041 __ push(Immediate(info));
1042 __ push(Immediate(pretenure
1043 ? isolate()->factory()->true_value()
1044 : isolate()->factory()->false_value()));
1045 __ CallRuntime(Runtime::kNewClosure, 3);
1046 }
1047 context()->Plug(eax);
1048 }
1049
1050
VisitVariableProxy(VariableProxy * expr)1051 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1052 Comment cmnt(masm_, "[ VariableProxy");
1053 EmitVariableLoad(expr->var());
1054 }
1055
1056
EmitLoadGlobalSlotCheckExtensions(Slot * slot,TypeofState typeof_state,Label * slow)1057 void FullCodeGenerator::EmitLoadGlobalSlotCheckExtensions(
1058 Slot* slot,
1059 TypeofState typeof_state,
1060 Label* slow) {
1061 Register context = esi;
1062 Register temp = edx;
1063
1064 Scope* s = scope();
1065 while (s != NULL) {
1066 if (s->num_heap_slots() > 0) {
1067 if (s->calls_eval()) {
1068 // Check that extension is NULL.
1069 __ cmp(ContextOperand(context, Context::EXTENSION_INDEX),
1070 Immediate(0));
1071 __ j(not_equal, slow);
1072 }
1073 // Load next context in chain.
1074 __ mov(temp, ContextOperand(context, Context::CLOSURE_INDEX));
1075 __ mov(temp, FieldOperand(temp, JSFunction::kContextOffset));
1076 // Walk the rest of the chain without clobbering esi.
1077 context = temp;
1078 }
1079 // If no outer scope calls eval, we do not need to check more
1080 // context extensions. If we have reached an eval scope, we check
1081 // all extensions from this point.
1082 if (!s->outer_scope_calls_eval() || s->is_eval_scope()) break;
1083 s = s->outer_scope();
1084 }
1085
1086 if (s != NULL && s->is_eval_scope()) {
1087 // Loop up the context chain. There is no frame effect so it is
1088 // safe to use raw labels here.
1089 NearLabel next, fast;
1090 if (!context.is(temp)) {
1091 __ mov(temp, context);
1092 }
1093 __ bind(&next);
1094 // Terminate at global context.
1095 __ cmp(FieldOperand(temp, HeapObject::kMapOffset),
1096 Immediate(isolate()->factory()->global_context_map()));
1097 __ j(equal, &fast);
1098 // Check that extension is NULL.
1099 __ cmp(ContextOperand(temp, Context::EXTENSION_INDEX), Immediate(0));
1100 __ j(not_equal, slow);
1101 // Load next context in chain.
1102 __ mov(temp, ContextOperand(temp, Context::CLOSURE_INDEX));
1103 __ mov(temp, FieldOperand(temp, JSFunction::kContextOffset));
1104 __ jmp(&next);
1105 __ bind(&fast);
1106 }
1107
1108 // All extension objects were empty and it is safe to use a global
1109 // load IC call.
1110 __ mov(eax, GlobalObjectOperand());
1111 __ mov(ecx, slot->var()->name());
1112 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1113 RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
1114 ? RelocInfo::CODE_TARGET
1115 : RelocInfo::CODE_TARGET_CONTEXT;
1116 EmitCallIC(ic, mode);
1117 }
1118
1119
ContextSlotOperandCheckExtensions(Slot * slot,Label * slow)1120 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(
1121 Slot* slot,
1122 Label* slow) {
1123 ASSERT(slot->type() == Slot::CONTEXT);
1124 Register context = esi;
1125 Register temp = ebx;
1126
1127 for (Scope* s = scope(); s != slot->var()->scope(); s = s->outer_scope()) {
1128 if (s->num_heap_slots() > 0) {
1129 if (s->calls_eval()) {
1130 // Check that extension is NULL.
1131 __ cmp(ContextOperand(context, Context::EXTENSION_INDEX),
1132 Immediate(0));
1133 __ j(not_equal, slow);
1134 }
1135 __ mov(temp, ContextOperand(context, Context::CLOSURE_INDEX));
1136 __ mov(temp, FieldOperand(temp, JSFunction::kContextOffset));
1137 // Walk the rest of the chain without clobbering esi.
1138 context = temp;
1139 }
1140 }
1141 // Check that last extension is NULL.
1142 __ cmp(ContextOperand(context, Context::EXTENSION_INDEX), Immediate(0));
1143 __ j(not_equal, slow);
1144
1145 // This function is used only for loads, not stores, so it's safe to
1146 // return an esi-based operand (the write barrier cannot be allowed to
1147 // destroy the esi register).
1148 return ContextOperand(context, slot->index());
1149 }
1150
1151
EmitDynamicLoadFromSlotFastCase(Slot * slot,TypeofState typeof_state,Label * slow,Label * done)1152 void FullCodeGenerator::EmitDynamicLoadFromSlotFastCase(
1153 Slot* slot,
1154 TypeofState typeof_state,
1155 Label* slow,
1156 Label* done) {
1157 // Generate fast-case code for variables that might be shadowed by
1158 // eval-introduced variables. Eval is used a lot without
1159 // introducing variables. In those cases, we do not want to
1160 // perform a runtime call for all variables in the scope
1161 // containing the eval.
1162 if (slot->var()->mode() == Variable::DYNAMIC_GLOBAL) {
1163 EmitLoadGlobalSlotCheckExtensions(slot, typeof_state, slow);
1164 __ jmp(done);
1165 } else if (slot->var()->mode() == Variable::DYNAMIC_LOCAL) {
1166 Slot* potential_slot = slot->var()->local_if_not_shadowed()->AsSlot();
1167 Expression* rewrite = slot->var()->local_if_not_shadowed()->rewrite();
1168 if (potential_slot != NULL) {
1169 // Generate fast case for locals that rewrite to slots.
1170 __ mov(eax,
1171 ContextSlotOperandCheckExtensions(potential_slot, slow));
1172 if (potential_slot->var()->mode() == Variable::CONST) {
1173 __ cmp(eax, isolate()->factory()->the_hole_value());
1174 __ j(not_equal, done);
1175 __ mov(eax, isolate()->factory()->undefined_value());
1176 }
1177 __ jmp(done);
1178 } else if (rewrite != NULL) {
1179 // Generate fast case for calls of an argument function.
1180 Property* property = rewrite->AsProperty();
1181 if (property != NULL) {
1182 VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
1183 Literal* key_literal = property->key()->AsLiteral();
1184 if (obj_proxy != NULL &&
1185 key_literal != NULL &&
1186 obj_proxy->IsArguments() &&
1187 key_literal->handle()->IsSmi()) {
1188 // Load arguments object if there are no eval-introduced
1189 // variables. Then load the argument from the arguments
1190 // object using keyed load.
1191 __ mov(edx,
1192 ContextSlotOperandCheckExtensions(obj_proxy->var()->AsSlot(),
1193 slow));
1194 __ SafeSet(eax, Immediate(key_literal->handle()));
1195 Handle<Code> ic =
1196 isolate()->builtins()->KeyedLoadIC_Initialize();
1197 EmitCallIC(ic, RelocInfo::CODE_TARGET);
1198 __ jmp(done);
1199 }
1200 }
1201 }
1202 }
1203 }
1204
1205
EmitVariableLoad(Variable * var)1206 void FullCodeGenerator::EmitVariableLoad(Variable* var) {
1207 // Four cases: non-this global variables, lookup slots, all other
1208 // types of slots, and parameters that rewrite to explicit property
1209 // accesses on the arguments object.
1210 Slot* slot = var->AsSlot();
1211 Property* property = var->AsProperty();
1212
1213 if (var->is_global() && !var->is_this()) {
1214 Comment cmnt(masm_, "Global variable");
1215 // Use inline caching. Variable name is passed in ecx and the global
1216 // object on the stack.
1217 __ mov(eax, GlobalObjectOperand());
1218 __ mov(ecx, var->name());
1219 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1220 EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
1221 context()->Plug(eax);
1222
1223 } else if (slot != NULL && slot->type() == Slot::LOOKUP) {
1224 Label done, slow;
1225
1226 // Generate code for loading from variables potentially shadowed
1227 // by eval-introduced variables.
1228 EmitDynamicLoadFromSlotFastCase(slot, NOT_INSIDE_TYPEOF, &slow, &done);
1229
1230 __ bind(&slow);
1231 Comment cmnt(masm_, "Lookup slot");
1232 __ push(esi); // Context.
1233 __ push(Immediate(var->name()));
1234 __ CallRuntime(Runtime::kLoadContextSlot, 2);
1235 __ bind(&done);
1236
1237 context()->Plug(eax);
1238
1239 } else if (slot != NULL) {
1240 Comment cmnt(masm_, (slot->type() == Slot::CONTEXT)
1241 ? "Context slot"
1242 : "Stack slot");
1243 if (var->mode() == Variable::CONST) {
1244 // Constants may be the hole value if they have not been initialized.
1245 // Unhole them.
1246 NearLabel done;
1247 MemOperand slot_operand = EmitSlotSearch(slot, eax);
1248 __ mov(eax, slot_operand);
1249 __ cmp(eax, isolate()->factory()->the_hole_value());
1250 __ j(not_equal, &done);
1251 __ mov(eax, isolate()->factory()->undefined_value());
1252 __ bind(&done);
1253 context()->Plug(eax);
1254 } else {
1255 context()->Plug(slot);
1256 }
1257
1258 } else {
1259 Comment cmnt(masm_, "Rewritten parameter");
1260 ASSERT_NOT_NULL(property);
1261 // Rewritten parameter accesses are of the form "slot[literal]".
1262
1263 // Assert that the object is in a slot.
1264 Variable* object_var = property->obj()->AsVariableProxy()->AsVariable();
1265 ASSERT_NOT_NULL(object_var);
1266 Slot* object_slot = object_var->AsSlot();
1267 ASSERT_NOT_NULL(object_slot);
1268
1269 // Load the object.
1270 MemOperand object_loc = EmitSlotSearch(object_slot, eax);
1271 __ mov(edx, object_loc);
1272
1273 // Assert that the key is a smi.
1274 Literal* key_literal = property->key()->AsLiteral();
1275 ASSERT_NOT_NULL(key_literal);
1276 ASSERT(key_literal->handle()->IsSmi());
1277
1278 // Load the key.
1279 __ SafeSet(eax, Immediate(key_literal->handle()));
1280
1281 // Do a keyed property load.
1282 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
1283 EmitCallIC(ic, RelocInfo::CODE_TARGET);
1284
1285 // Drop key and object left on the stack by IC.
1286 context()->Plug(eax);
1287 }
1288 }
1289
1290
VisitRegExpLiteral(RegExpLiteral * expr)1291 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1292 Comment cmnt(masm_, "[ RegExpLiteral");
1293 NearLabel materialized;
1294 // Registers will be used as follows:
1295 // edi = JS function.
1296 // ecx = literals array.
1297 // ebx = regexp literal.
1298 // eax = regexp literal clone.
1299 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1300 __ mov(ecx, FieldOperand(edi, JSFunction::kLiteralsOffset));
1301 int literal_offset =
1302 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1303 __ mov(ebx, FieldOperand(ecx, literal_offset));
1304 __ cmp(ebx, isolate()->factory()->undefined_value());
1305 __ j(not_equal, &materialized);
1306
1307 // Create regexp literal using runtime function
1308 // Result will be in eax.
1309 __ push(ecx);
1310 __ push(Immediate(Smi::FromInt(expr->literal_index())));
1311 __ push(Immediate(expr->pattern()));
1312 __ push(Immediate(expr->flags()));
1313 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1314 __ mov(ebx, eax);
1315
1316 __ bind(&materialized);
1317 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1318 Label allocated, runtime_allocate;
1319 __ AllocateInNewSpace(size, eax, ecx, edx, &runtime_allocate, TAG_OBJECT);
1320 __ jmp(&allocated);
1321
1322 __ bind(&runtime_allocate);
1323 __ push(ebx);
1324 __ push(Immediate(Smi::FromInt(size)));
1325 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1326 __ pop(ebx);
1327
1328 __ bind(&allocated);
1329 // Copy the content into the newly allocated memory.
1330 // (Unroll copy loop once for better throughput).
1331 for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
1332 __ mov(edx, FieldOperand(ebx, i));
1333 __ mov(ecx, FieldOperand(ebx, i + kPointerSize));
1334 __ mov(FieldOperand(eax, i), edx);
1335 __ mov(FieldOperand(eax, i + kPointerSize), ecx);
1336 }
1337 if ((size % (2 * kPointerSize)) != 0) {
1338 __ mov(edx, FieldOperand(ebx, size - kPointerSize));
1339 __ mov(FieldOperand(eax, size - kPointerSize), edx);
1340 }
1341 context()->Plug(eax);
1342 }
1343
1344
VisitObjectLiteral(ObjectLiteral * expr)1345 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1346 Comment cmnt(masm_, "[ ObjectLiteral");
1347 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1348 __ push(FieldOperand(edi, JSFunction::kLiteralsOffset));
1349 __ push(Immediate(Smi::FromInt(expr->literal_index())));
1350 __ push(Immediate(expr->constant_properties()));
1351 int flags = expr->fast_elements()
1352 ? ObjectLiteral::kFastElements
1353 : ObjectLiteral::kNoFlags;
1354 flags |= expr->has_function()
1355 ? ObjectLiteral::kHasFunction
1356 : ObjectLiteral::kNoFlags;
1357 __ push(Immediate(Smi::FromInt(flags)));
1358 if (expr->depth() > 1) {
1359 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1360 } else {
1361 __ CallRuntime(Runtime::kCreateObjectLiteralShallow, 4);
1362 }
1363
1364 // If result_saved is true the result is on top of the stack. If
1365 // result_saved is false the result is in eax.
1366 bool result_saved = false;
1367
1368 // Mark all computed expressions that are bound to a key that
1369 // is shadowed by a later occurrence of the same key. For the
1370 // marked expressions, no store code is emitted.
1371 expr->CalculateEmitStore();
1372
1373 for (int i = 0; i < expr->properties()->length(); i++) {
1374 ObjectLiteral::Property* property = expr->properties()->at(i);
1375 if (property->IsCompileTimeValue()) continue;
1376
1377 Literal* key = property->key();
1378 Expression* value = property->value();
1379 if (!result_saved) {
1380 __ push(eax); // Save result on the stack
1381 result_saved = true;
1382 }
1383 switch (property->kind()) {
1384 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1385 ASSERT(!CompileTimeValue::IsCompileTimeValue(value));
1386 // Fall through.
1387 case ObjectLiteral::Property::COMPUTED:
1388 if (key->handle()->IsSymbol()) {
1389 if (property->emit_store()) {
1390 VisitForAccumulatorValue(value);
1391 __ mov(ecx, Immediate(key->handle()));
1392 __ mov(edx, Operand(esp, 0));
1393 Handle<Code> ic = is_strict_mode()
1394 ? isolate()->builtins()->StoreIC_Initialize_Strict()
1395 : isolate()->builtins()->StoreIC_Initialize();
1396 EmitCallIC(ic, RelocInfo::CODE_TARGET);
1397 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1398 } else {
1399 VisitForEffect(value);
1400 }
1401 break;
1402 }
1403 // Fall through.
1404 case ObjectLiteral::Property::PROTOTYPE:
1405 __ push(Operand(esp, 0)); // Duplicate receiver.
1406 VisitForStackValue(key);
1407 VisitForStackValue(value);
1408 if (property->emit_store()) {
1409 __ push(Immediate(Smi::FromInt(NONE))); // PropertyAttributes
1410 __ CallRuntime(Runtime::kSetProperty, 4);
1411 } else {
1412 __ Drop(3);
1413 }
1414 break;
1415 case ObjectLiteral::Property::SETTER:
1416 case ObjectLiteral::Property::GETTER:
1417 __ push(Operand(esp, 0)); // Duplicate receiver.
1418 VisitForStackValue(key);
1419 __ push(Immediate(property->kind() == ObjectLiteral::Property::SETTER ?
1420 Smi::FromInt(1) :
1421 Smi::FromInt(0)));
1422 VisitForStackValue(value);
1423 __ CallRuntime(Runtime::kDefineAccessor, 4);
1424 break;
1425 default: UNREACHABLE();
1426 }
1427 }
1428
1429 if (expr->has_function()) {
1430 ASSERT(result_saved);
1431 __ push(Operand(esp, 0));
1432 __ CallRuntime(Runtime::kToFastProperties, 1);
1433 }
1434
1435 if (result_saved) {
1436 context()->PlugTOS();
1437 } else {
1438 context()->Plug(eax);
1439 }
1440 }
1441
1442
VisitArrayLiteral(ArrayLiteral * expr)1443 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1444 Comment cmnt(masm_, "[ ArrayLiteral");
1445
1446 ZoneList<Expression*>* subexprs = expr->values();
1447 int length = subexprs->length();
1448
1449 __ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1450 __ push(FieldOperand(ebx, JSFunction::kLiteralsOffset));
1451 __ push(Immediate(Smi::FromInt(expr->literal_index())));
1452 __ push(Immediate(expr->constant_elements()));
1453 if (expr->constant_elements()->map() ==
1454 isolate()->heap()->fixed_cow_array_map()) {
1455 ASSERT(expr->depth() == 1);
1456 FastCloneShallowArrayStub stub(
1457 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length);
1458 __ CallStub(&stub);
1459 __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), 1);
1460 } else if (expr->depth() > 1) {
1461 __ CallRuntime(Runtime::kCreateArrayLiteral, 3);
1462 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
1463 __ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
1464 } else {
1465 FastCloneShallowArrayStub stub(
1466 FastCloneShallowArrayStub::CLONE_ELEMENTS, length);
1467 __ CallStub(&stub);
1468 }
1469
1470 bool result_saved = false; // Is the result saved to the stack?
1471
1472 // Emit code to evaluate all the non-constant subexpressions and to store
1473 // them into the newly cloned array.
1474 for (int i = 0; i < length; i++) {
1475 Expression* subexpr = subexprs->at(i);
1476 // If the subexpression is a literal or a simple materialized literal it
1477 // is already set in the cloned array.
1478 if (subexpr->AsLiteral() != NULL ||
1479 CompileTimeValue::IsCompileTimeValue(subexpr)) {
1480 continue;
1481 }
1482
1483 if (!result_saved) {
1484 __ push(eax);
1485 result_saved = true;
1486 }
1487 VisitForAccumulatorValue(subexpr);
1488
1489 // Store the subexpression value in the array's elements.
1490 __ mov(ebx, Operand(esp, 0)); // Copy of array literal.
1491 __ mov(ebx, FieldOperand(ebx, JSObject::kElementsOffset));
1492 int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1493 __ mov(FieldOperand(ebx, offset), result_register());
1494
1495 // Update the write barrier for the array store.
1496 __ RecordWrite(ebx, offset, result_register(), ecx);
1497
1498 PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1499 }
1500
1501 if (result_saved) {
1502 context()->PlugTOS();
1503 } else {
1504 context()->Plug(eax);
1505 }
1506 }
1507
1508
VisitAssignment(Assignment * expr)1509 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1510 Comment cmnt(masm_, "[ Assignment");
1511 // Invalid left-hand sides are rewritten to have a 'throw ReferenceError'
1512 // on the left-hand side.
1513 if (!expr->target()->IsValidLeftHandSide()) {
1514 VisitForEffect(expr->target());
1515 return;
1516 }
1517
1518 // Left-hand side can only be a property, a global or a (parameter or local)
1519 // slot. Variables with rewrite to .arguments are treated as KEYED_PROPERTY.
1520 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1521 LhsKind assign_type = VARIABLE;
1522 Property* property = expr->target()->AsProperty();
1523 if (property != NULL) {
1524 assign_type = (property->key()->IsPropertyName())
1525 ? NAMED_PROPERTY
1526 : KEYED_PROPERTY;
1527 }
1528
1529 // Evaluate LHS expression.
1530 switch (assign_type) {
1531 case VARIABLE:
1532 // Nothing to do here.
1533 break;
1534 case NAMED_PROPERTY:
1535 if (expr->is_compound()) {
1536 // We need the receiver both on the stack and in the accumulator.
1537 VisitForAccumulatorValue(property->obj());
1538 __ push(result_register());
1539 } else {
1540 VisitForStackValue(property->obj());
1541 }
1542 break;
1543 case KEYED_PROPERTY: {
1544 if (expr->is_compound()) {
1545 if (property->is_arguments_access()) {
1546 VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
1547 MemOperand slot_operand =
1548 EmitSlotSearch(obj_proxy->var()->AsSlot(), ecx);
1549 __ push(slot_operand);
1550 __ SafeSet(eax, Immediate(property->key()->AsLiteral()->handle()));
1551 } else {
1552 VisitForStackValue(property->obj());
1553 VisitForAccumulatorValue(property->key());
1554 }
1555 __ mov(edx, Operand(esp, 0));
1556 __ push(eax);
1557 } else {
1558 if (property->is_arguments_access()) {
1559 VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
1560 MemOperand slot_operand =
1561 EmitSlotSearch(obj_proxy->var()->AsSlot(), ecx);
1562 __ push(slot_operand);
1563 __ SafePush(Immediate(property->key()->AsLiteral()->handle()));
1564 } else {
1565 VisitForStackValue(property->obj());
1566 VisitForStackValue(property->key());
1567 }
1568 }
1569 break;
1570 }
1571 }
1572
1573 // For compound assignments we need another deoptimization point after the
1574 // variable/property load.
1575 if (expr->is_compound()) {
1576 { AccumulatorValueContext context(this);
1577 switch (assign_type) {
1578 case VARIABLE:
1579 EmitVariableLoad(expr->target()->AsVariableProxy()->var());
1580 PrepareForBailout(expr->target(), TOS_REG);
1581 break;
1582 case NAMED_PROPERTY:
1583 EmitNamedPropertyLoad(property);
1584 PrepareForBailoutForId(expr->CompoundLoadId(), TOS_REG);
1585 break;
1586 case KEYED_PROPERTY:
1587 EmitKeyedPropertyLoad(property);
1588 PrepareForBailoutForId(expr->CompoundLoadId(), TOS_REG);
1589 break;
1590 }
1591 }
1592
1593 Token::Value op = expr->binary_op();
1594 __ push(eax); // Left operand goes on the stack.
1595 VisitForAccumulatorValue(expr->value());
1596
1597 OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1598 ? OVERWRITE_RIGHT
1599 : NO_OVERWRITE;
1600 SetSourcePosition(expr->position() + 1);
1601 AccumulatorValueContext context(this);
1602 if (ShouldInlineSmiCase(op)) {
1603 EmitInlineSmiBinaryOp(expr,
1604 op,
1605 mode,
1606 expr->target(),
1607 expr->value());
1608 } else {
1609 EmitBinaryOp(op, mode);
1610 }
1611
1612 // Deoptimization point in case the binary operation may have side effects.
1613 PrepareForBailout(expr->binary_operation(), TOS_REG);
1614 } else {
1615 VisitForAccumulatorValue(expr->value());
1616 }
1617
1618 // Record source position before possible IC call.
1619 SetSourcePosition(expr->position());
1620
1621 // Store the value.
1622 switch (assign_type) {
1623 case VARIABLE:
1624 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1625 expr->op());
1626 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1627 context()->Plug(eax);
1628 break;
1629 case NAMED_PROPERTY:
1630 EmitNamedPropertyAssignment(expr);
1631 break;
1632 case KEYED_PROPERTY:
1633 EmitKeyedPropertyAssignment(expr);
1634 break;
1635 }
1636 }
1637
1638
EmitNamedPropertyLoad(Property * prop)1639 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
1640 SetSourcePosition(prop->position());
1641 Literal* key = prop->key()->AsLiteral();
1642 ASSERT(!key->handle()->IsSmi());
1643 __ mov(ecx, Immediate(key->handle()));
1644 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1645 EmitCallIC(ic, RelocInfo::CODE_TARGET);
1646 }
1647
1648
EmitKeyedPropertyLoad(Property * prop)1649 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
1650 SetSourcePosition(prop->position());
1651 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
1652 EmitCallIC(ic, RelocInfo::CODE_TARGET);
1653 }
1654
1655
EmitInlineSmiBinaryOp(Expression * expr,Token::Value op,OverwriteMode mode,Expression * left,Expression * right)1656 void FullCodeGenerator::EmitInlineSmiBinaryOp(Expression* expr,
1657 Token::Value op,
1658 OverwriteMode mode,
1659 Expression* left,
1660 Expression* right) {
1661 // Do combined smi check of the operands. Left operand is on the
1662 // stack. Right operand is in eax.
1663 NearLabel done, smi_case, stub_call;
1664 __ pop(edx);
1665 __ mov(ecx, eax);
1666 __ or_(eax, Operand(edx));
1667 JumpPatchSite patch_site(masm_);
1668 patch_site.EmitJumpIfSmi(eax, &smi_case);
1669
1670 __ bind(&stub_call);
1671 __ mov(eax, ecx);
1672 TypeRecordingBinaryOpStub stub(op, mode);
1673 EmitCallIC(stub.GetCode(), &patch_site);
1674 __ jmp(&done);
1675
1676 // Smi case.
1677 __ bind(&smi_case);
1678 __ mov(eax, edx); // Copy left operand in case of a stub call.
1679
1680 switch (op) {
1681 case Token::SAR:
1682 __ SmiUntag(eax);
1683 __ SmiUntag(ecx);
1684 __ sar_cl(eax); // No checks of result necessary
1685 __ SmiTag(eax);
1686 break;
1687 case Token::SHL: {
1688 Label result_ok;
1689 __ SmiUntag(eax);
1690 __ SmiUntag(ecx);
1691 __ shl_cl(eax);
1692 // Check that the *signed* result fits in a smi.
1693 __ cmp(eax, 0xc0000000);
1694 __ j(positive, &result_ok);
1695 __ SmiTag(ecx);
1696 __ jmp(&stub_call);
1697 __ bind(&result_ok);
1698 __ SmiTag(eax);
1699 break;
1700 }
1701 case Token::SHR: {
1702 Label result_ok;
1703 __ SmiUntag(eax);
1704 __ SmiUntag(ecx);
1705 __ shr_cl(eax);
1706 __ test(eax, Immediate(0xc0000000));
1707 __ j(zero, &result_ok);
1708 __ SmiTag(ecx);
1709 __ jmp(&stub_call);
1710 __ bind(&result_ok);
1711 __ SmiTag(eax);
1712 break;
1713 }
1714 case Token::ADD:
1715 __ add(eax, Operand(ecx));
1716 __ j(overflow, &stub_call);
1717 break;
1718 case Token::SUB:
1719 __ sub(eax, Operand(ecx));
1720 __ j(overflow, &stub_call);
1721 break;
1722 case Token::MUL: {
1723 __ SmiUntag(eax);
1724 __ imul(eax, Operand(ecx));
1725 __ j(overflow, &stub_call);
1726 __ test(eax, Operand(eax));
1727 __ j(not_zero, &done, taken);
1728 __ mov(ebx, edx);
1729 __ or_(ebx, Operand(ecx));
1730 __ j(negative, &stub_call);
1731 break;
1732 }
1733 case Token::BIT_OR:
1734 __ or_(eax, Operand(ecx));
1735 break;
1736 case Token::BIT_AND:
1737 __ and_(eax, Operand(ecx));
1738 break;
1739 case Token::BIT_XOR:
1740 __ xor_(eax, Operand(ecx));
1741 break;
1742 default:
1743 UNREACHABLE();
1744 }
1745
1746 __ bind(&done);
1747 context()->Plug(eax);
1748 }
1749
1750
EmitBinaryOp(Token::Value op,OverwriteMode mode)1751 void FullCodeGenerator::EmitBinaryOp(Token::Value op,
1752 OverwriteMode mode) {
1753 __ pop(edx);
1754 TypeRecordingBinaryOpStub stub(op, mode);
1755 EmitCallIC(stub.GetCode(), NULL); // NULL signals no inlined smi code.
1756 context()->Plug(eax);
1757 }
1758
1759
EmitAssignment(Expression * expr,int bailout_ast_id)1760 void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) {
1761 // Invalid left-hand sides are rewritten to have a 'throw
1762 // ReferenceError' on the left-hand side.
1763 if (!expr->IsValidLeftHandSide()) {
1764 VisitForEffect(expr);
1765 return;
1766 }
1767
1768 // Left-hand side can only be a property, a global or a (parameter or local)
1769 // slot. Variables with rewrite to .arguments are treated as KEYED_PROPERTY.
1770 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1771 LhsKind assign_type = VARIABLE;
1772 Property* prop = expr->AsProperty();
1773 if (prop != NULL) {
1774 assign_type = (prop->key()->IsPropertyName())
1775 ? NAMED_PROPERTY
1776 : KEYED_PROPERTY;
1777 }
1778
1779 switch (assign_type) {
1780 case VARIABLE: {
1781 Variable* var = expr->AsVariableProxy()->var();
1782 EffectContext context(this);
1783 EmitVariableAssignment(var, Token::ASSIGN);
1784 break;
1785 }
1786 case NAMED_PROPERTY: {
1787 __ push(eax); // Preserve value.
1788 VisitForAccumulatorValue(prop->obj());
1789 __ mov(edx, eax);
1790 __ pop(eax); // Restore value.
1791 __ mov(ecx, prop->key()->AsLiteral()->handle());
1792 Handle<Code> ic = is_strict_mode()
1793 ? isolate()->builtins()->StoreIC_Initialize_Strict()
1794 : isolate()->builtins()->StoreIC_Initialize();
1795 EmitCallIC(ic, RelocInfo::CODE_TARGET);
1796 break;
1797 }
1798 case KEYED_PROPERTY: {
1799 __ push(eax); // Preserve value.
1800 if (prop->is_synthetic()) {
1801 ASSERT(prop->obj()->AsVariableProxy() != NULL);
1802 ASSERT(prop->key()->AsLiteral() != NULL);
1803 { AccumulatorValueContext for_object(this);
1804 EmitVariableLoad(prop->obj()->AsVariableProxy()->var());
1805 }
1806 __ mov(edx, eax);
1807 __ SafeSet(ecx, Immediate(prop->key()->AsLiteral()->handle()));
1808 } else {
1809 VisitForStackValue(prop->obj());
1810 VisitForAccumulatorValue(prop->key());
1811 __ mov(ecx, eax);
1812 __ pop(edx);
1813 }
1814 __ pop(eax); // Restore value.
1815 Handle<Code> ic = is_strict_mode()
1816 ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
1817 : isolate()->builtins()->KeyedStoreIC_Initialize();
1818 EmitCallIC(ic, RelocInfo::CODE_TARGET);
1819 break;
1820 }
1821 }
1822 PrepareForBailoutForId(bailout_ast_id, TOS_REG);
1823 context()->Plug(eax);
1824 }
1825
1826
EmitVariableAssignment(Variable * var,Token::Value op)1827 void FullCodeGenerator::EmitVariableAssignment(Variable* var,
1828 Token::Value op) {
1829 // Left-hand sides that rewrite to explicit property accesses do not reach
1830 // here.
1831 ASSERT(var != NULL);
1832 ASSERT(var->is_global() || var->AsSlot() != NULL);
1833
1834 if (var->is_global()) {
1835 ASSERT(!var->is_this());
1836 // Assignment to a global variable. Use inline caching for the
1837 // assignment. Right-hand-side value is passed in eax, variable name in
1838 // ecx, and the global object on the stack.
1839 __ mov(ecx, var->name());
1840 __ mov(edx, GlobalObjectOperand());
1841 Handle<Code> ic = is_strict_mode()
1842 ? isolate()->builtins()->StoreIC_Initialize_Strict()
1843 : isolate()->builtins()->StoreIC_Initialize();
1844 EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
1845
1846 } else if (op == Token::INIT_CONST) {
1847 // Like var declarations, const declarations are hoisted to function
1848 // scope. However, unlike var initializers, const initializers are able
1849 // to drill a hole to that function context, even from inside a 'with'
1850 // context. We thus bypass the normal static scope lookup.
1851 Slot* slot = var->AsSlot();
1852 Label skip;
1853 switch (slot->type()) {
1854 case Slot::PARAMETER:
1855 // No const parameters.
1856 UNREACHABLE();
1857 break;
1858 case Slot::LOCAL:
1859 __ mov(edx, Operand(ebp, SlotOffset(slot)));
1860 __ cmp(edx, isolate()->factory()->the_hole_value());
1861 __ j(not_equal, &skip);
1862 __ mov(Operand(ebp, SlotOffset(slot)), eax);
1863 break;
1864 case Slot::CONTEXT: {
1865 __ mov(ecx, ContextOperand(esi, Context::FCONTEXT_INDEX));
1866 __ mov(edx, ContextOperand(ecx, slot->index()));
1867 __ cmp(edx, isolate()->factory()->the_hole_value());
1868 __ j(not_equal, &skip);
1869 __ mov(ContextOperand(ecx, slot->index()), eax);
1870 int offset = Context::SlotOffset(slot->index());
1871 __ mov(edx, eax); // Preserve the stored value in eax.
1872 __ RecordWrite(ecx, offset, edx, ebx);
1873 break;
1874 }
1875 case Slot::LOOKUP:
1876 __ push(eax);
1877 __ push(esi);
1878 __ push(Immediate(var->name()));
1879 __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
1880 break;
1881 }
1882 __ bind(&skip);
1883
1884 } else if (var->mode() != Variable::CONST) {
1885 // Perform the assignment for non-const variables. Const assignments
1886 // are simply skipped.
1887 Slot* slot = var->AsSlot();
1888 switch (slot->type()) {
1889 case Slot::PARAMETER:
1890 case Slot::LOCAL:
1891 // Perform the assignment.
1892 __ mov(Operand(ebp, SlotOffset(slot)), eax);
1893 break;
1894
1895 case Slot::CONTEXT: {
1896 MemOperand target = EmitSlotSearch(slot, ecx);
1897 // Perform the assignment and issue the write barrier.
1898 __ mov(target, eax);
1899 // The value of the assignment is in eax. RecordWrite clobbers its
1900 // register arguments.
1901 __ mov(edx, eax);
1902 int offset = Context::SlotOffset(slot->index());
1903 __ RecordWrite(ecx, offset, edx, ebx);
1904 break;
1905 }
1906
1907 case Slot::LOOKUP:
1908 // Call the runtime for the assignment.
1909 __ push(eax); // Value.
1910 __ push(esi); // Context.
1911 __ push(Immediate(var->name()));
1912 __ push(Immediate(Smi::FromInt(strict_mode_flag())));
1913 __ CallRuntime(Runtime::kStoreContextSlot, 4);
1914 break;
1915 }
1916 }
1917 }
1918
1919
EmitNamedPropertyAssignment(Assignment * expr)1920 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
1921 // Assignment to a property, using a named store IC.
1922 Property* prop = expr->target()->AsProperty();
1923 ASSERT(prop != NULL);
1924 ASSERT(prop->key()->AsLiteral() != NULL);
1925
1926 // If the assignment starts a block of assignments to the same object,
1927 // change to slow case to avoid the quadratic behavior of repeatedly
1928 // adding fast properties.
1929 if (expr->starts_initialization_block()) {
1930 __ push(result_register());
1931 __ push(Operand(esp, kPointerSize)); // Receiver is now under value.
1932 __ CallRuntime(Runtime::kToSlowProperties, 1);
1933 __ pop(result_register());
1934 }
1935
1936 // Record source code position before IC call.
1937 SetSourcePosition(expr->position());
1938 __ mov(ecx, prop->key()->AsLiteral()->handle());
1939 if (expr->ends_initialization_block()) {
1940 __ mov(edx, Operand(esp, 0));
1941 } else {
1942 __ pop(edx);
1943 }
1944 Handle<Code> ic = is_strict_mode()
1945 ? isolate()->builtins()->StoreIC_Initialize_Strict()
1946 : isolate()->builtins()->StoreIC_Initialize();
1947 EmitCallIC(ic, RelocInfo::CODE_TARGET);
1948
1949 // If the assignment ends an initialization block, revert to fast case.
1950 if (expr->ends_initialization_block()) {
1951 __ push(eax); // Result of assignment, saved even if not needed.
1952 __ push(Operand(esp, kPointerSize)); // Receiver is under value.
1953 __ CallRuntime(Runtime::kToFastProperties, 1);
1954 __ pop(eax);
1955 __ Drop(1);
1956 }
1957 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1958 context()->Plug(eax);
1959 }
1960
1961
EmitKeyedPropertyAssignment(Assignment * expr)1962 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
1963 // Assignment to a property, using a keyed store IC.
1964
1965 // If the assignment starts a block of assignments to the same object,
1966 // change to slow case to avoid the quadratic behavior of repeatedly
1967 // adding fast properties.
1968 if (expr->starts_initialization_block()) {
1969 __ push(result_register());
1970 // Receiver is now under the key and value.
1971 __ push(Operand(esp, 2 * kPointerSize));
1972 __ CallRuntime(Runtime::kToSlowProperties, 1);
1973 __ pop(result_register());
1974 }
1975
1976 __ pop(ecx);
1977 if (expr->ends_initialization_block()) {
1978 __ mov(edx, Operand(esp, 0)); // Leave receiver on the stack for later.
1979 } else {
1980 __ pop(edx);
1981 }
1982 // Record source code position before IC call.
1983 SetSourcePosition(expr->position());
1984 Handle<Code> ic = is_strict_mode()
1985 ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
1986 : isolate()->builtins()->KeyedStoreIC_Initialize();
1987 EmitCallIC(ic, RelocInfo::CODE_TARGET);
1988
1989 // If the assignment ends an initialization block, revert to fast case.
1990 if (expr->ends_initialization_block()) {
1991 __ pop(edx);
1992 __ push(eax); // Result of assignment, saved even if not needed.
1993 __ push(edx);
1994 __ CallRuntime(Runtime::kToFastProperties, 1);
1995 __ pop(eax);
1996 }
1997
1998 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1999 context()->Plug(eax);
2000 }
2001
2002
VisitProperty(Property * expr)2003 void FullCodeGenerator::VisitProperty(Property* expr) {
2004 Comment cmnt(masm_, "[ Property");
2005 Expression* key = expr->key();
2006
2007 if (key->IsPropertyName()) {
2008 VisitForAccumulatorValue(expr->obj());
2009 EmitNamedPropertyLoad(expr);
2010 context()->Plug(eax);
2011 } else {
2012 VisitForStackValue(expr->obj());
2013 VisitForAccumulatorValue(expr->key());
2014 __ pop(edx);
2015 EmitKeyedPropertyLoad(expr);
2016 context()->Plug(eax);
2017 }
2018 }
2019
2020
EmitCallWithIC(Call * expr,Handle<Object> name,RelocInfo::Mode mode)2021 void FullCodeGenerator::EmitCallWithIC(Call* expr,
2022 Handle<Object> name,
2023 RelocInfo::Mode mode) {
2024 // Code common for calls using the IC.
2025 ZoneList<Expression*>* args = expr->arguments();
2026 int arg_count = args->length();
2027 { PreservePositionScope scope(masm()->positions_recorder());
2028 for (int i = 0; i < arg_count; i++) {
2029 VisitForStackValue(args->at(i));
2030 }
2031 __ Set(ecx, Immediate(name));
2032 }
2033 // Record source position of the IC call.
2034 SetSourcePosition(expr->position());
2035 InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
2036 Handle<Code> ic = isolate()->stub_cache()->ComputeCallInitialize(
2037 arg_count, in_loop);
2038 EmitCallIC(ic, mode);
2039 RecordJSReturnSite(expr);
2040 // Restore context register.
2041 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2042 context()->Plug(eax);
2043 }
2044
2045
EmitKeyedCallWithIC(Call * expr,Expression * key,RelocInfo::Mode mode)2046 void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
2047 Expression* key,
2048 RelocInfo::Mode mode) {
2049 // Load the key.
2050 VisitForAccumulatorValue(key);
2051
2052 // Swap the name of the function and the receiver on the stack to follow
2053 // the calling convention for call ICs.
2054 __ pop(ecx);
2055 __ push(eax);
2056 __ push(ecx);
2057
2058 // Load the arguments.
2059 ZoneList<Expression*>* args = expr->arguments();
2060 int arg_count = args->length();
2061 { PreservePositionScope scope(masm()->positions_recorder());
2062 for (int i = 0; i < arg_count; i++) {
2063 VisitForStackValue(args->at(i));
2064 }
2065 }
2066 // Record source position of the IC call.
2067 SetSourcePosition(expr->position());
2068 InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
2069 Handle<Code> ic = isolate()->stub_cache()->ComputeKeyedCallInitialize(
2070 arg_count, in_loop);
2071 __ mov(ecx, Operand(esp, (arg_count + 1) * kPointerSize)); // Key.
2072 EmitCallIC(ic, mode);
2073 RecordJSReturnSite(expr);
2074 // Restore context register.
2075 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2076 context()->DropAndPlug(1, eax); // Drop the key still on the stack.
2077 }
2078
2079
EmitCallWithStub(Call * expr)2080 void FullCodeGenerator::EmitCallWithStub(Call* expr) {
2081 // Code common for calls using the call stub.
2082 ZoneList<Expression*>* args = expr->arguments();
2083 int arg_count = args->length();
2084 { PreservePositionScope scope(masm()->positions_recorder());
2085 for (int i = 0; i < arg_count; i++) {
2086 VisitForStackValue(args->at(i));
2087 }
2088 }
2089 // Record source position for debugger.
2090 SetSourcePosition(expr->position());
2091 InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
2092 CallFunctionStub stub(arg_count, in_loop, RECEIVER_MIGHT_BE_VALUE);
2093 __ CallStub(&stub);
2094 RecordJSReturnSite(expr);
2095 // Restore context register.
2096 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2097 context()->DropAndPlug(1, eax);
2098 }
2099
2100
EmitResolvePossiblyDirectEval(ResolveEvalFlag flag,int arg_count)2101 void FullCodeGenerator::EmitResolvePossiblyDirectEval(ResolveEvalFlag flag,
2102 int arg_count) {
2103 // Push copy of the first argument or undefined if it doesn't exist.
2104 if (arg_count > 0) {
2105 __ push(Operand(esp, arg_count * kPointerSize));
2106 } else {
2107 __ push(Immediate(isolate()->factory()->undefined_value()));
2108 }
2109
2110 // Push the receiver of the enclosing function.
2111 __ push(Operand(ebp, (2 + scope()->num_parameters()) * kPointerSize));
2112
2113 // Push the strict mode flag.
2114 __ push(Immediate(Smi::FromInt(strict_mode_flag())));
2115
2116 __ CallRuntime(flag == SKIP_CONTEXT_LOOKUP
2117 ? Runtime::kResolvePossiblyDirectEvalNoLookup
2118 : Runtime::kResolvePossiblyDirectEval, 4);
2119 }
2120
2121
VisitCall(Call * expr)2122 void FullCodeGenerator::VisitCall(Call* expr) {
2123 #ifdef DEBUG
2124 // We want to verify that RecordJSReturnSite gets called on all paths
2125 // through this function. Avoid early returns.
2126 expr->return_is_recorded_ = false;
2127 #endif
2128
2129 Comment cmnt(masm_, "[ Call");
2130 Expression* fun = expr->expression();
2131 Variable* var = fun->AsVariableProxy()->AsVariable();
2132
2133 if (var != NULL && var->is_possibly_eval()) {
2134 // In a call to eval, we first call %ResolvePossiblyDirectEval to
2135 // resolve the function we need to call and the receiver of the
2136 // call. Then we call the resolved function using the given
2137 // arguments.
2138 ZoneList<Expression*>* args = expr->arguments();
2139 int arg_count = args->length();
2140 { PreservePositionScope pos_scope(masm()->positions_recorder());
2141 VisitForStackValue(fun);
2142 // Reserved receiver slot.
2143 __ push(Immediate(isolate()->factory()->undefined_value()));
2144
2145 // Push the arguments.
2146 for (int i = 0; i < arg_count; i++) {
2147 VisitForStackValue(args->at(i));
2148 }
2149
2150 // If we know that eval can only be shadowed by eval-introduced
2151 // variables we attempt to load the global eval function directly
2152 // in generated code. If we succeed, there is no need to perform a
2153 // context lookup in the runtime system.
2154 Label done;
2155 if (var->AsSlot() != NULL && var->mode() == Variable::DYNAMIC_GLOBAL) {
2156 Label slow;
2157 EmitLoadGlobalSlotCheckExtensions(var->AsSlot(),
2158 NOT_INSIDE_TYPEOF,
2159 &slow);
2160 // Push the function and resolve eval.
2161 __ push(eax);
2162 EmitResolvePossiblyDirectEval(SKIP_CONTEXT_LOOKUP, arg_count);
2163 __ jmp(&done);
2164 __ bind(&slow);
2165 }
2166
2167 // Push copy of the function (found below the arguments) and
2168 // resolve eval.
2169 __ push(Operand(esp, (arg_count + 1) * kPointerSize));
2170 EmitResolvePossiblyDirectEval(PERFORM_CONTEXT_LOOKUP, arg_count);
2171 if (done.is_linked()) {
2172 __ bind(&done);
2173 }
2174
2175 // The runtime call returns a pair of values in eax (function) and
2176 // edx (receiver). Touch up the stack with the right values.
2177 __ mov(Operand(esp, (arg_count + 0) * kPointerSize), edx);
2178 __ mov(Operand(esp, (arg_count + 1) * kPointerSize), eax);
2179 }
2180 // Record source position for debugger.
2181 SetSourcePosition(expr->position());
2182 InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
2183 CallFunctionStub stub(arg_count, in_loop, RECEIVER_MIGHT_BE_VALUE);
2184 __ CallStub(&stub);
2185 RecordJSReturnSite(expr);
2186 // Restore context register.
2187 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2188 context()->DropAndPlug(1, eax);
2189 } else if (var != NULL && !var->is_this() && var->is_global()) {
2190 // Push global object as receiver for the call IC.
2191 __ push(GlobalObjectOperand());
2192 EmitCallWithIC(expr, var->name(), RelocInfo::CODE_TARGET_CONTEXT);
2193 } else if (var != NULL && var->AsSlot() != NULL &&
2194 var->AsSlot()->type() == Slot::LOOKUP) {
2195 // Call to a lookup slot (dynamically introduced variable).
2196 Label slow, done;
2197
2198 { PreservePositionScope scope(masm()->positions_recorder());
2199 // Generate code for loading from variables potentially shadowed
2200 // by eval-introduced variables.
2201 EmitDynamicLoadFromSlotFastCase(var->AsSlot(),
2202 NOT_INSIDE_TYPEOF,
2203 &slow,
2204 &done);
2205 }
2206
2207 __ bind(&slow);
2208 // Call the runtime to find the function to call (returned in eax)
2209 // and the object holding it (returned in edx).
2210 __ push(context_register());
2211 __ push(Immediate(var->name()));
2212 __ CallRuntime(Runtime::kLoadContextSlot, 2);
2213 __ push(eax); // Function.
2214 __ push(edx); // Receiver.
2215
2216 // If fast case code has been generated, emit code to push the
2217 // function and receiver and have the slow path jump around this
2218 // code.
2219 if (done.is_linked()) {
2220 Label call;
2221 __ jmp(&call);
2222 __ bind(&done);
2223 // Push function.
2224 __ push(eax);
2225 // Push global receiver.
2226 __ mov(ebx, GlobalObjectOperand());
2227 __ push(FieldOperand(ebx, GlobalObject::kGlobalReceiverOffset));
2228 __ bind(&call);
2229 }
2230
2231 EmitCallWithStub(expr);
2232 } else if (fun->AsProperty() != NULL) {
2233 // Call to an object property.
2234 Property* prop = fun->AsProperty();
2235 Literal* key = prop->key()->AsLiteral();
2236 if (key != NULL && key->handle()->IsSymbol()) {
2237 // Call to a named property, use call IC.
2238 { PreservePositionScope scope(masm()->positions_recorder());
2239 VisitForStackValue(prop->obj());
2240 }
2241 EmitCallWithIC(expr, key->handle(), RelocInfo::CODE_TARGET);
2242 } else {
2243 // Call to a keyed property.
2244 // For a synthetic property use keyed load IC followed by function call,
2245 // for a regular property use keyed EmitCallIC.
2246 if (prop->is_synthetic()) {
2247 // Do not visit the object and key subexpressions (they are shared
2248 // by all occurrences of the same rewritten parameter).
2249 ASSERT(prop->obj()->AsVariableProxy() != NULL);
2250 ASSERT(prop->obj()->AsVariableProxy()->var()->AsSlot() != NULL);
2251 Slot* slot = prop->obj()->AsVariableProxy()->var()->AsSlot();
2252 MemOperand operand = EmitSlotSearch(slot, edx);
2253 __ mov(edx, operand);
2254
2255 ASSERT(prop->key()->AsLiteral() != NULL);
2256 ASSERT(prop->key()->AsLiteral()->handle()->IsSmi());
2257 __ mov(eax, prop->key()->AsLiteral()->handle());
2258
2259 // Record source code position for IC call.
2260 SetSourcePosition(prop->position());
2261
2262 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2263 EmitCallIC(ic, RelocInfo::CODE_TARGET);
2264 // Push result (function).
2265 __ push(eax);
2266 // Push Global receiver.
2267 __ mov(ecx, GlobalObjectOperand());
2268 __ push(FieldOperand(ecx, GlobalObject::kGlobalReceiverOffset));
2269 EmitCallWithStub(expr);
2270 } else {
2271 { PreservePositionScope scope(masm()->positions_recorder());
2272 VisitForStackValue(prop->obj());
2273 }
2274 EmitKeyedCallWithIC(expr, prop->key(), RelocInfo::CODE_TARGET);
2275 }
2276 }
2277 } else {
2278 { PreservePositionScope scope(masm()->positions_recorder());
2279 VisitForStackValue(fun);
2280 }
2281 // Load global receiver object.
2282 __ mov(ebx, GlobalObjectOperand());
2283 __ push(FieldOperand(ebx, GlobalObject::kGlobalReceiverOffset));
2284 // Emit function call.
2285 EmitCallWithStub(expr);
2286 }
2287
2288 #ifdef DEBUG
2289 // RecordJSReturnSite should have been called.
2290 ASSERT(expr->return_is_recorded_);
2291 #endif
2292 }
2293
2294
VisitCallNew(CallNew * expr)2295 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2296 Comment cmnt(masm_, "[ CallNew");
2297 // According to ECMA-262, section 11.2.2, page 44, the function
2298 // expression in new calls must be evaluated before the
2299 // arguments.
2300
2301 // Push constructor on the stack. If it's not a function it's used as
2302 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2303 // ignored.
2304 VisitForStackValue(expr->expression());
2305
2306 // Push the arguments ("left-to-right") on the stack.
2307 ZoneList<Expression*>* args = expr->arguments();
2308 int arg_count = args->length();
2309 for (int i = 0; i < arg_count; i++) {
2310 VisitForStackValue(args->at(i));
2311 }
2312
2313 // Call the construct call builtin that handles allocation and
2314 // constructor invocation.
2315 SetSourcePosition(expr->position());
2316
2317 // Load function and argument count into edi and eax.
2318 __ SafeSet(eax, Immediate(arg_count));
2319 __ mov(edi, Operand(esp, arg_count * kPointerSize));
2320
2321 Handle<Code> construct_builtin =
2322 isolate()->builtins()->JSConstructCall();
2323 __ call(construct_builtin, RelocInfo::CONSTRUCT_CALL);
2324 context()->Plug(eax);
2325 }
2326
2327
EmitIsSmi(ZoneList<Expression * > * args)2328 void FullCodeGenerator::EmitIsSmi(ZoneList<Expression*>* args) {
2329 ASSERT(args->length() == 1);
2330
2331 VisitForAccumulatorValue(args->at(0));
2332
2333 Label materialize_true, materialize_false;
2334 Label* if_true = NULL;
2335 Label* if_false = NULL;
2336 Label* fall_through = NULL;
2337 context()->PrepareTest(&materialize_true, &materialize_false,
2338 &if_true, &if_false, &fall_through);
2339
2340 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2341 __ test(eax, Immediate(kSmiTagMask));
2342 Split(zero, if_true, if_false, fall_through);
2343
2344 context()->Plug(if_true, if_false);
2345 }
2346
2347
EmitIsNonNegativeSmi(ZoneList<Expression * > * args)2348 void FullCodeGenerator::EmitIsNonNegativeSmi(ZoneList<Expression*>* args) {
2349 ASSERT(args->length() == 1);
2350
2351 VisitForAccumulatorValue(args->at(0));
2352
2353 Label materialize_true, materialize_false;
2354 Label* if_true = NULL;
2355 Label* if_false = NULL;
2356 Label* fall_through = NULL;
2357 context()->PrepareTest(&materialize_true, &materialize_false,
2358 &if_true, &if_false, &fall_through);
2359
2360 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2361 __ test(eax, Immediate(kSmiTagMask | 0x80000000));
2362 Split(zero, if_true, if_false, fall_through);
2363
2364 context()->Plug(if_true, if_false);
2365 }
2366
2367
EmitIsObject(ZoneList<Expression * > * args)2368 void FullCodeGenerator::EmitIsObject(ZoneList<Expression*>* args) {
2369 ASSERT(args->length() == 1);
2370
2371 VisitForAccumulatorValue(args->at(0));
2372
2373 Label materialize_true, materialize_false;
2374 Label* if_true = NULL;
2375 Label* if_false = NULL;
2376 Label* fall_through = NULL;
2377 context()->PrepareTest(&materialize_true, &materialize_false,
2378 &if_true, &if_false, &fall_through);
2379
2380 __ test(eax, Immediate(kSmiTagMask));
2381 __ j(zero, if_false);
2382 __ cmp(eax, isolate()->factory()->null_value());
2383 __ j(equal, if_true);
2384 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
2385 // Undetectable objects behave like undefined when tested with typeof.
2386 __ movzx_b(ecx, FieldOperand(ebx, Map::kBitFieldOffset));
2387 __ test(ecx, Immediate(1 << Map::kIsUndetectable));
2388 __ j(not_zero, if_false);
2389 __ movzx_b(ecx, FieldOperand(ebx, Map::kInstanceTypeOffset));
2390 __ cmp(ecx, FIRST_JS_OBJECT_TYPE);
2391 __ j(below, if_false);
2392 __ cmp(ecx, LAST_JS_OBJECT_TYPE);
2393 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2394 Split(below_equal, if_true, if_false, fall_through);
2395
2396 context()->Plug(if_true, if_false);
2397 }
2398
2399
EmitIsSpecObject(ZoneList<Expression * > * args)2400 void FullCodeGenerator::EmitIsSpecObject(ZoneList<Expression*>* args) {
2401 ASSERT(args->length() == 1);
2402
2403 VisitForAccumulatorValue(args->at(0));
2404
2405 Label materialize_true, materialize_false;
2406 Label* if_true = NULL;
2407 Label* if_false = NULL;
2408 Label* fall_through = NULL;
2409 context()->PrepareTest(&materialize_true, &materialize_false,
2410 &if_true, &if_false, &fall_through);
2411
2412 __ test(eax, Immediate(kSmiTagMask));
2413 __ j(equal, if_false);
2414 __ CmpObjectType(eax, FIRST_JS_OBJECT_TYPE, ebx);
2415 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2416 Split(above_equal, if_true, if_false, fall_through);
2417
2418 context()->Plug(if_true, if_false);
2419 }
2420
2421
EmitIsUndetectableObject(ZoneList<Expression * > * args)2422 void FullCodeGenerator::EmitIsUndetectableObject(ZoneList<Expression*>* args) {
2423 ASSERT(args->length() == 1);
2424
2425 VisitForAccumulatorValue(args->at(0));
2426
2427 Label materialize_true, materialize_false;
2428 Label* if_true = NULL;
2429 Label* if_false = NULL;
2430 Label* fall_through = NULL;
2431 context()->PrepareTest(&materialize_true, &materialize_false,
2432 &if_true, &if_false, &fall_through);
2433
2434 __ test(eax, Immediate(kSmiTagMask));
2435 __ j(zero, if_false);
2436 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
2437 __ movzx_b(ebx, FieldOperand(ebx, Map::kBitFieldOffset));
2438 __ test(ebx, Immediate(1 << Map::kIsUndetectable));
2439 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2440 Split(not_zero, if_true, if_false, fall_through);
2441
2442 context()->Plug(if_true, if_false);
2443 }
2444
2445
EmitIsStringWrapperSafeForDefaultValueOf(ZoneList<Expression * > * args)2446 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
2447 ZoneList<Expression*>* args) {
2448 ASSERT(args->length() == 1);
2449
2450 VisitForAccumulatorValue(args->at(0));
2451
2452 Label materialize_true, materialize_false;
2453 Label* if_true = NULL;
2454 Label* if_false = NULL;
2455 Label* fall_through = NULL;
2456 context()->PrepareTest(&materialize_true, &materialize_false,
2457 &if_true, &if_false, &fall_through);
2458
2459 if (FLAG_debug_code) __ AbortIfSmi(eax);
2460
2461 // Check whether this map has already been checked to be safe for default
2462 // valueOf.
2463 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
2464 __ test_b(FieldOperand(ebx, Map::kBitField2Offset),
2465 1 << Map::kStringWrapperSafeForDefaultValueOf);
2466 __ j(not_zero, if_true);
2467
2468 // Check for fast case object. Return false for slow case objects.
2469 __ mov(ecx, FieldOperand(eax, JSObject::kPropertiesOffset));
2470 __ mov(ecx, FieldOperand(ecx, HeapObject::kMapOffset));
2471 __ cmp(ecx, FACTORY->hash_table_map());
2472 __ j(equal, if_false);
2473
2474 // Look for valueOf symbol in the descriptor array, and indicate false if
2475 // found. The type is not checked, so if it is a transition it is a false
2476 // negative.
2477 __ mov(ebx, FieldOperand(ebx, Map::kInstanceDescriptorsOffset));
2478 __ mov(ecx, FieldOperand(ebx, FixedArray::kLengthOffset));
2479 // ebx: descriptor array
2480 // ecx: length of descriptor array
2481 // Calculate the end of the descriptor array.
2482 STATIC_ASSERT(kSmiTag == 0);
2483 STATIC_ASSERT(kSmiTagSize == 1);
2484 STATIC_ASSERT(kPointerSize == 4);
2485 __ lea(ecx, Operand(ebx, ecx, times_2, FixedArray::kHeaderSize));
2486 // Calculate location of the first key name.
2487 __ add(Operand(ebx),
2488 Immediate(FixedArray::kHeaderSize +
2489 DescriptorArray::kFirstIndex * kPointerSize));
2490 // Loop through all the keys in the descriptor array. If one of these is the
2491 // symbol valueOf the result is false.
2492 Label entry, loop;
2493 __ jmp(&entry);
2494 __ bind(&loop);
2495 __ mov(edx, FieldOperand(ebx, 0));
2496 __ cmp(edx, FACTORY->value_of_symbol());
2497 __ j(equal, if_false);
2498 __ add(Operand(ebx), Immediate(kPointerSize));
2499 __ bind(&entry);
2500 __ cmp(ebx, Operand(ecx));
2501 __ j(not_equal, &loop);
2502
2503 // Reload map as register ebx was used as temporary above.
2504 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
2505
2506 // If a valueOf property is not found on the object check that it's
2507 // prototype is the un-modified String prototype. If not result is false.
2508 __ mov(ecx, FieldOperand(ebx, Map::kPrototypeOffset));
2509 __ test(ecx, Immediate(kSmiTagMask));
2510 __ j(zero, if_false);
2511 __ mov(ecx, FieldOperand(ecx, HeapObject::kMapOffset));
2512 __ mov(edx, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
2513 __ mov(edx,
2514 FieldOperand(edx, GlobalObject::kGlobalContextOffset));
2515 __ cmp(ecx,
2516 ContextOperand(edx,
2517 Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
2518 __ j(not_equal, if_false);
2519 // Set the bit in the map to indicate that it has been checked safe for
2520 // default valueOf and set true result.
2521 __ or_(FieldOperand(ebx, Map::kBitField2Offset),
2522 Immediate(1 << Map::kStringWrapperSafeForDefaultValueOf));
2523 __ jmp(if_true);
2524
2525 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2526 context()->Plug(if_true, if_false);
2527 }
2528
2529
EmitIsFunction(ZoneList<Expression * > * args)2530 void FullCodeGenerator::EmitIsFunction(ZoneList<Expression*>* args) {
2531 ASSERT(args->length() == 1);
2532
2533 VisitForAccumulatorValue(args->at(0));
2534
2535 Label materialize_true, materialize_false;
2536 Label* if_true = NULL;
2537 Label* if_false = NULL;
2538 Label* fall_through = NULL;
2539 context()->PrepareTest(&materialize_true, &materialize_false,
2540 &if_true, &if_false, &fall_through);
2541
2542 __ test(eax, Immediate(kSmiTagMask));
2543 __ j(zero, if_false);
2544 __ CmpObjectType(eax, JS_FUNCTION_TYPE, ebx);
2545 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2546 Split(equal, if_true, if_false, fall_through);
2547
2548 context()->Plug(if_true, if_false);
2549 }
2550
2551
EmitIsArray(ZoneList<Expression * > * args)2552 void FullCodeGenerator::EmitIsArray(ZoneList<Expression*>* args) {
2553 ASSERT(args->length() == 1);
2554
2555 VisitForAccumulatorValue(args->at(0));
2556
2557 Label materialize_true, materialize_false;
2558 Label* if_true = NULL;
2559 Label* if_false = NULL;
2560 Label* fall_through = NULL;
2561 context()->PrepareTest(&materialize_true, &materialize_false,
2562 &if_true, &if_false, &fall_through);
2563
2564 __ test(eax, Immediate(kSmiTagMask));
2565 __ j(equal, if_false);
2566 __ CmpObjectType(eax, JS_ARRAY_TYPE, ebx);
2567 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2568 Split(equal, if_true, if_false, fall_through);
2569
2570 context()->Plug(if_true, if_false);
2571 }
2572
2573
EmitIsRegExp(ZoneList<Expression * > * args)2574 void FullCodeGenerator::EmitIsRegExp(ZoneList<Expression*>* args) {
2575 ASSERT(args->length() == 1);
2576
2577 VisitForAccumulatorValue(args->at(0));
2578
2579 Label materialize_true, materialize_false;
2580 Label* if_true = NULL;
2581 Label* if_false = NULL;
2582 Label* fall_through = NULL;
2583 context()->PrepareTest(&materialize_true, &materialize_false,
2584 &if_true, &if_false, &fall_through);
2585
2586 __ test(eax, Immediate(kSmiTagMask));
2587 __ j(equal, if_false);
2588 __ CmpObjectType(eax, JS_REGEXP_TYPE, ebx);
2589 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2590 Split(equal, if_true, if_false, fall_through);
2591
2592 context()->Plug(if_true, if_false);
2593 }
2594
2595
2596
EmitIsConstructCall(ZoneList<Expression * > * args)2597 void FullCodeGenerator::EmitIsConstructCall(ZoneList<Expression*>* args) {
2598 ASSERT(args->length() == 0);
2599
2600 Label materialize_true, materialize_false;
2601 Label* if_true = NULL;
2602 Label* if_false = NULL;
2603 Label* fall_through = NULL;
2604 context()->PrepareTest(&materialize_true, &materialize_false,
2605 &if_true, &if_false, &fall_through);
2606
2607 // Get the frame pointer for the calling frame.
2608 __ mov(eax, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
2609
2610 // Skip the arguments adaptor frame if it exists.
2611 Label check_frame_marker;
2612 __ cmp(Operand(eax, StandardFrameConstants::kContextOffset),
2613 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2614 __ j(not_equal, &check_frame_marker);
2615 __ mov(eax, Operand(eax, StandardFrameConstants::kCallerFPOffset));
2616
2617 // Check the marker in the calling frame.
2618 __ bind(&check_frame_marker);
2619 __ cmp(Operand(eax, StandardFrameConstants::kMarkerOffset),
2620 Immediate(Smi::FromInt(StackFrame::CONSTRUCT)));
2621 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2622 Split(equal, if_true, if_false, fall_through);
2623
2624 context()->Plug(if_true, if_false);
2625 }
2626
2627
EmitObjectEquals(ZoneList<Expression * > * args)2628 void FullCodeGenerator::EmitObjectEquals(ZoneList<Expression*>* args) {
2629 ASSERT(args->length() == 2);
2630
2631 // Load the two objects into registers and perform the comparison.
2632 VisitForStackValue(args->at(0));
2633 VisitForAccumulatorValue(args->at(1));
2634
2635 Label materialize_true, materialize_false;
2636 Label* if_true = NULL;
2637 Label* if_false = NULL;
2638 Label* fall_through = NULL;
2639 context()->PrepareTest(&materialize_true, &materialize_false,
2640 &if_true, &if_false, &fall_through);
2641
2642 __ pop(ebx);
2643 __ cmp(eax, Operand(ebx));
2644 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
2645 Split(equal, if_true, if_false, fall_through);
2646
2647 context()->Plug(if_true, if_false);
2648 }
2649
2650
EmitArguments(ZoneList<Expression * > * args)2651 void FullCodeGenerator::EmitArguments(ZoneList<Expression*>* args) {
2652 ASSERT(args->length() == 1);
2653
2654 // ArgumentsAccessStub expects the key in edx and the formal
2655 // parameter count in eax.
2656 VisitForAccumulatorValue(args->at(0));
2657 __ mov(edx, eax);
2658 __ SafeSet(eax, Immediate(Smi::FromInt(scope()->num_parameters())));
2659 ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
2660 __ CallStub(&stub);
2661 context()->Plug(eax);
2662 }
2663
2664
EmitArgumentsLength(ZoneList<Expression * > * args)2665 void FullCodeGenerator::EmitArgumentsLength(ZoneList<Expression*>* args) {
2666 ASSERT(args->length() == 0);
2667
2668 Label exit;
2669 // Get the number of formal parameters.
2670 __ SafeSet(eax, Immediate(Smi::FromInt(scope()->num_parameters())));
2671
2672 // Check if the calling frame is an arguments adaptor frame.
2673 __ mov(ebx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
2674 __ cmp(Operand(ebx, StandardFrameConstants::kContextOffset),
2675 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2676 __ j(not_equal, &exit);
2677
2678 // Arguments adaptor case: Read the arguments length from the
2679 // adaptor frame.
2680 __ mov(eax, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset));
2681
2682 __ bind(&exit);
2683 if (FLAG_debug_code) __ AbortIfNotSmi(eax);
2684 context()->Plug(eax);
2685 }
2686
2687
EmitClassOf(ZoneList<Expression * > * args)2688 void FullCodeGenerator::EmitClassOf(ZoneList<Expression*>* args) {
2689 ASSERT(args->length() == 1);
2690 Label done, null, function, non_function_constructor;
2691
2692 VisitForAccumulatorValue(args->at(0));
2693
2694 // If the object is a smi, we return null.
2695 __ test(eax, Immediate(kSmiTagMask));
2696 __ j(zero, &null);
2697
2698 // Check that the object is a JS object but take special care of JS
2699 // functions to make sure they have 'Function' as their class.
2700 __ CmpObjectType(eax, FIRST_JS_OBJECT_TYPE, eax); // Map is now in eax.
2701 __ j(below, &null);
2702
2703 // As long as JS_FUNCTION_TYPE is the last instance type and it is
2704 // right after LAST_JS_OBJECT_TYPE, we can avoid checking for
2705 // LAST_JS_OBJECT_TYPE.
2706 ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
2707 ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
2708 __ CmpInstanceType(eax, JS_FUNCTION_TYPE);
2709 __ j(equal, &function);
2710
2711 // Check if the constructor in the map is a function.
2712 __ mov(eax, FieldOperand(eax, Map::kConstructorOffset));
2713 __ CmpObjectType(eax, JS_FUNCTION_TYPE, ebx);
2714 __ j(not_equal, &non_function_constructor);
2715
2716 // eax now contains the constructor function. Grab the
2717 // instance class name from there.
2718 __ mov(eax, FieldOperand(eax, JSFunction::kSharedFunctionInfoOffset));
2719 __ mov(eax, FieldOperand(eax, SharedFunctionInfo::kInstanceClassNameOffset));
2720 __ jmp(&done);
2721
2722 // Functions have class 'Function'.
2723 __ bind(&function);
2724 __ mov(eax, isolate()->factory()->function_class_symbol());
2725 __ jmp(&done);
2726
2727 // Objects with a non-function constructor have class 'Object'.
2728 __ bind(&non_function_constructor);
2729 __ mov(eax, isolate()->factory()->Object_symbol());
2730 __ jmp(&done);
2731
2732 // Non-JS objects have class null.
2733 __ bind(&null);
2734 __ mov(eax, isolate()->factory()->null_value());
2735
2736 // All done.
2737 __ bind(&done);
2738
2739 context()->Plug(eax);
2740 }
2741
2742
EmitLog(ZoneList<Expression * > * args)2743 void FullCodeGenerator::EmitLog(ZoneList<Expression*>* args) {
2744 // Conditionally generate a log call.
2745 // Args:
2746 // 0 (literal string): The type of logging (corresponds to the flags).
2747 // This is used to determine whether or not to generate the log call.
2748 // 1 (string): Format string. Access the string at argument index 2
2749 // with '%2s' (see Logger::LogRuntime for all the formats).
2750 // 2 (array): Arguments to the format string.
2751 ASSERT_EQ(args->length(), 3);
2752 #ifdef ENABLE_LOGGING_AND_PROFILING
2753 if (CodeGenerator::ShouldGenerateLog(args->at(0))) {
2754 VisitForStackValue(args->at(1));
2755 VisitForStackValue(args->at(2));
2756 __ CallRuntime(Runtime::kLog, 2);
2757 }
2758 #endif
2759 // Finally, we're expected to leave a value on the top of the stack.
2760 __ mov(eax, isolate()->factory()->undefined_value());
2761 context()->Plug(eax);
2762 }
2763
2764
EmitRandomHeapNumber(ZoneList<Expression * > * args)2765 void FullCodeGenerator::EmitRandomHeapNumber(ZoneList<Expression*>* args) {
2766 ASSERT(args->length() == 0);
2767
2768 Label slow_allocate_heapnumber;
2769 Label heapnumber_allocated;
2770
2771 __ AllocateHeapNumber(edi, ebx, ecx, &slow_allocate_heapnumber);
2772 __ jmp(&heapnumber_allocated);
2773
2774 __ bind(&slow_allocate_heapnumber);
2775 // Allocate a heap number.
2776 __ CallRuntime(Runtime::kNumberAlloc, 0);
2777 __ mov(edi, eax);
2778
2779 __ bind(&heapnumber_allocated);
2780
2781 __ PrepareCallCFunction(1, ebx);
2782 __ mov(Operand(esp, 0), Immediate(ExternalReference::isolate_address()));
2783 __ CallCFunction(ExternalReference::random_uint32_function(isolate()),
2784 1);
2785
2786 // Convert 32 random bits in eax to 0.(32 random bits) in a double
2787 // by computing:
2788 // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
2789 // This is implemented on both SSE2 and FPU.
2790 if (CpuFeatures::IsSupported(SSE2)) {
2791 CpuFeatures::Scope fscope(SSE2);
2792 __ mov(ebx, Immediate(0x49800000)); // 1.0 x 2^20 as single.
2793 __ movd(xmm1, Operand(ebx));
2794 __ movd(xmm0, Operand(eax));
2795 __ cvtss2sd(xmm1, xmm1);
2796 __ pxor(xmm0, xmm1);
2797 __ subsd(xmm0, xmm1);
2798 __ movdbl(FieldOperand(edi, HeapNumber::kValueOffset), xmm0);
2799 } else {
2800 // 0x4130000000000000 is 1.0 x 2^20 as a double.
2801 __ mov(FieldOperand(edi, HeapNumber::kExponentOffset),
2802 Immediate(0x41300000));
2803 __ mov(FieldOperand(edi, HeapNumber::kMantissaOffset), eax);
2804 __ fld_d(FieldOperand(edi, HeapNumber::kValueOffset));
2805 __ mov(FieldOperand(edi, HeapNumber::kMantissaOffset), Immediate(0));
2806 __ fld_d(FieldOperand(edi, HeapNumber::kValueOffset));
2807 __ fsubp(1);
2808 __ fstp_d(FieldOperand(edi, HeapNumber::kValueOffset));
2809 }
2810 __ mov(eax, edi);
2811 context()->Plug(eax);
2812 }
2813
2814
EmitSubString(ZoneList<Expression * > * args)2815 void FullCodeGenerator::EmitSubString(ZoneList<Expression*>* args) {
2816 // Load the arguments on the stack and call the stub.
2817 SubStringStub stub;
2818 ASSERT(args->length() == 3);
2819 VisitForStackValue(args->at(0));
2820 VisitForStackValue(args->at(1));
2821 VisitForStackValue(args->at(2));
2822 __ CallStub(&stub);
2823 context()->Plug(eax);
2824 }
2825
2826
EmitRegExpExec(ZoneList<Expression * > * args)2827 void FullCodeGenerator::EmitRegExpExec(ZoneList<Expression*>* args) {
2828 // Load the arguments on the stack and call the stub.
2829 RegExpExecStub stub;
2830 ASSERT(args->length() == 4);
2831 VisitForStackValue(args->at(0));
2832 VisitForStackValue(args->at(1));
2833 VisitForStackValue(args->at(2));
2834 VisitForStackValue(args->at(3));
2835 __ CallStub(&stub);
2836 context()->Plug(eax);
2837 }
2838
2839
EmitValueOf(ZoneList<Expression * > * args)2840 void FullCodeGenerator::EmitValueOf(ZoneList<Expression*>* args) {
2841 ASSERT(args->length() == 1);
2842
2843 VisitForAccumulatorValue(args->at(0)); // Load the object.
2844
2845 NearLabel done;
2846 // If the object is a smi return the object.
2847 __ test(eax, Immediate(kSmiTagMask));
2848 __ j(zero, &done);
2849 // If the object is not a value type, return the object.
2850 __ CmpObjectType(eax, JS_VALUE_TYPE, ebx);
2851 __ j(not_equal, &done);
2852 __ mov(eax, FieldOperand(eax, JSValue::kValueOffset));
2853
2854 __ bind(&done);
2855 context()->Plug(eax);
2856 }
2857
2858
EmitMathPow(ZoneList<Expression * > * args)2859 void FullCodeGenerator::EmitMathPow(ZoneList<Expression*>* args) {
2860 // Load the arguments on the stack and call the runtime function.
2861 ASSERT(args->length() == 2);
2862 VisitForStackValue(args->at(0));
2863 VisitForStackValue(args->at(1));
2864
2865 if (CpuFeatures::IsSupported(SSE2)) {
2866 MathPowStub stub;
2867 __ CallStub(&stub);
2868 } else {
2869 __ CallRuntime(Runtime::kMath_pow, 2);
2870 }
2871 context()->Plug(eax);
2872 }
2873
2874
EmitSetValueOf(ZoneList<Expression * > * args)2875 void FullCodeGenerator::EmitSetValueOf(ZoneList<Expression*>* args) {
2876 ASSERT(args->length() == 2);
2877
2878 VisitForStackValue(args->at(0)); // Load the object.
2879 VisitForAccumulatorValue(args->at(1)); // Load the value.
2880 __ pop(ebx); // eax = value. ebx = object.
2881
2882 NearLabel done;
2883 // If the object is a smi, return the value.
2884 __ test(ebx, Immediate(kSmiTagMask));
2885 __ j(zero, &done);
2886
2887 // If the object is not a value type, return the value.
2888 __ CmpObjectType(ebx, JS_VALUE_TYPE, ecx);
2889 __ j(not_equal, &done);
2890
2891 // Store the value.
2892 __ mov(FieldOperand(ebx, JSValue::kValueOffset), eax);
2893 // Update the write barrier. Save the value as it will be
2894 // overwritten by the write barrier code and is needed afterward.
2895 __ mov(edx, eax);
2896 __ RecordWrite(ebx, JSValue::kValueOffset, edx, ecx);
2897
2898 __ bind(&done);
2899 context()->Plug(eax);
2900 }
2901
2902
EmitNumberToString(ZoneList<Expression * > * args)2903 void FullCodeGenerator::EmitNumberToString(ZoneList<Expression*>* args) {
2904 ASSERT_EQ(args->length(), 1);
2905
2906 // Load the argument on the stack and call the stub.
2907 VisitForStackValue(args->at(0));
2908
2909 NumberToStringStub stub;
2910 __ CallStub(&stub);
2911 context()->Plug(eax);
2912 }
2913
2914
EmitStringCharFromCode(ZoneList<Expression * > * args)2915 void FullCodeGenerator::EmitStringCharFromCode(ZoneList<Expression*>* args) {
2916 ASSERT(args->length() == 1);
2917
2918 VisitForAccumulatorValue(args->at(0));
2919
2920 Label done;
2921 StringCharFromCodeGenerator generator(eax, ebx);
2922 generator.GenerateFast(masm_);
2923 __ jmp(&done);
2924
2925 NopRuntimeCallHelper call_helper;
2926 generator.GenerateSlow(masm_, call_helper);
2927
2928 __ bind(&done);
2929 context()->Plug(ebx);
2930 }
2931
2932
EmitStringCharCodeAt(ZoneList<Expression * > * args)2933 void FullCodeGenerator::EmitStringCharCodeAt(ZoneList<Expression*>* args) {
2934 ASSERT(args->length() == 2);
2935
2936 VisitForStackValue(args->at(0));
2937 VisitForAccumulatorValue(args->at(1));
2938
2939 Register object = ebx;
2940 Register index = eax;
2941 Register scratch = ecx;
2942 Register result = edx;
2943
2944 __ pop(object);
2945
2946 Label need_conversion;
2947 Label index_out_of_range;
2948 Label done;
2949 StringCharCodeAtGenerator generator(object,
2950 index,
2951 scratch,
2952 result,
2953 &need_conversion,
2954 &need_conversion,
2955 &index_out_of_range,
2956 STRING_INDEX_IS_NUMBER);
2957 generator.GenerateFast(masm_);
2958 __ jmp(&done);
2959
2960 __ bind(&index_out_of_range);
2961 // When the index is out of range, the spec requires us to return
2962 // NaN.
2963 __ Set(result, Immediate(isolate()->factory()->nan_value()));
2964 __ jmp(&done);
2965
2966 __ bind(&need_conversion);
2967 // Move the undefined value into the result register, which will
2968 // trigger conversion.
2969 __ Set(result, Immediate(isolate()->factory()->undefined_value()));
2970 __ jmp(&done);
2971
2972 NopRuntimeCallHelper call_helper;
2973 generator.GenerateSlow(masm_, call_helper);
2974
2975 __ bind(&done);
2976 context()->Plug(result);
2977 }
2978
2979
EmitStringCharAt(ZoneList<Expression * > * args)2980 void FullCodeGenerator::EmitStringCharAt(ZoneList<Expression*>* args) {
2981 ASSERT(args->length() == 2);
2982
2983 VisitForStackValue(args->at(0));
2984 VisitForAccumulatorValue(args->at(1));
2985
2986 Register object = ebx;
2987 Register index = eax;
2988 Register scratch1 = ecx;
2989 Register scratch2 = edx;
2990 Register result = eax;
2991
2992 __ pop(object);
2993
2994 Label need_conversion;
2995 Label index_out_of_range;
2996 Label done;
2997 StringCharAtGenerator generator(object,
2998 index,
2999 scratch1,
3000 scratch2,
3001 result,
3002 &need_conversion,
3003 &need_conversion,
3004 &index_out_of_range,
3005 STRING_INDEX_IS_NUMBER);
3006 generator.GenerateFast(masm_);
3007 __ jmp(&done);
3008
3009 __ bind(&index_out_of_range);
3010 // When the index is out of range, the spec requires us to return
3011 // the empty string.
3012 __ Set(result, Immediate(isolate()->factory()->empty_string()));
3013 __ jmp(&done);
3014
3015 __ bind(&need_conversion);
3016 // Move smi zero into the result register, which will trigger
3017 // conversion.
3018 __ Set(result, Immediate(Smi::FromInt(0)));
3019 __ jmp(&done);
3020
3021 NopRuntimeCallHelper call_helper;
3022 generator.GenerateSlow(masm_, call_helper);
3023
3024 __ bind(&done);
3025 context()->Plug(result);
3026 }
3027
3028
EmitStringAdd(ZoneList<Expression * > * args)3029 void FullCodeGenerator::EmitStringAdd(ZoneList<Expression*>* args) {
3030 ASSERT_EQ(2, args->length());
3031
3032 VisitForStackValue(args->at(0));
3033 VisitForStackValue(args->at(1));
3034
3035 StringAddStub stub(NO_STRING_ADD_FLAGS);
3036 __ CallStub(&stub);
3037 context()->Plug(eax);
3038 }
3039
3040
EmitStringCompare(ZoneList<Expression * > * args)3041 void FullCodeGenerator::EmitStringCompare(ZoneList<Expression*>* args) {
3042 ASSERT_EQ(2, args->length());
3043
3044 VisitForStackValue(args->at(0));
3045 VisitForStackValue(args->at(1));
3046
3047 StringCompareStub stub;
3048 __ CallStub(&stub);
3049 context()->Plug(eax);
3050 }
3051
3052
EmitMathSin(ZoneList<Expression * > * args)3053 void FullCodeGenerator::EmitMathSin(ZoneList<Expression*>* args) {
3054 // Load the argument on the stack and call the stub.
3055 TranscendentalCacheStub stub(TranscendentalCache::SIN,
3056 TranscendentalCacheStub::TAGGED);
3057 ASSERT(args->length() == 1);
3058 VisitForStackValue(args->at(0));
3059 __ CallStub(&stub);
3060 context()->Plug(eax);
3061 }
3062
3063
EmitMathCos(ZoneList<Expression * > * args)3064 void FullCodeGenerator::EmitMathCos(ZoneList<Expression*>* args) {
3065 // Load the argument on the stack and call the stub.
3066 TranscendentalCacheStub stub(TranscendentalCache::COS,
3067 TranscendentalCacheStub::TAGGED);
3068 ASSERT(args->length() == 1);
3069 VisitForStackValue(args->at(0));
3070 __ CallStub(&stub);
3071 context()->Plug(eax);
3072 }
3073
3074
EmitMathLog(ZoneList<Expression * > * args)3075 void FullCodeGenerator::EmitMathLog(ZoneList<Expression*>* args) {
3076 // Load the argument on the stack and call the stub.
3077 TranscendentalCacheStub stub(TranscendentalCache::LOG,
3078 TranscendentalCacheStub::TAGGED);
3079 ASSERT(args->length() == 1);
3080 VisitForStackValue(args->at(0));
3081 __ CallStub(&stub);
3082 context()->Plug(eax);
3083 }
3084
3085
EmitMathSqrt(ZoneList<Expression * > * args)3086 void FullCodeGenerator::EmitMathSqrt(ZoneList<Expression*>* args) {
3087 // Load the argument on the stack and call the runtime function.
3088 ASSERT(args->length() == 1);
3089 VisitForStackValue(args->at(0));
3090 __ CallRuntime(Runtime::kMath_sqrt, 1);
3091 context()->Plug(eax);
3092 }
3093
3094
EmitCallFunction(ZoneList<Expression * > * args)3095 void FullCodeGenerator::EmitCallFunction(ZoneList<Expression*>* args) {
3096 ASSERT(args->length() >= 2);
3097
3098 int arg_count = args->length() - 2; // For receiver and function.
3099 VisitForStackValue(args->at(0)); // Receiver.
3100 for (int i = 0; i < arg_count; i++) {
3101 VisitForStackValue(args->at(i + 1));
3102 }
3103 VisitForAccumulatorValue(args->at(arg_count + 1)); // Function.
3104
3105 // InvokeFunction requires function in edi. Move it in there.
3106 if (!result_register().is(edi)) __ mov(edi, result_register());
3107 ParameterCount count(arg_count);
3108 __ InvokeFunction(edi, count, CALL_FUNCTION);
3109 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
3110 context()->Plug(eax);
3111 }
3112
3113
EmitRegExpConstructResult(ZoneList<Expression * > * args)3114 void FullCodeGenerator::EmitRegExpConstructResult(ZoneList<Expression*>* args) {
3115 // Load the arguments on the stack and call the stub.
3116 RegExpConstructResultStub stub;
3117 ASSERT(args->length() == 3);
3118 VisitForStackValue(args->at(0));
3119 VisitForStackValue(args->at(1));
3120 VisitForStackValue(args->at(2));
3121 __ CallStub(&stub);
3122 context()->Plug(eax);
3123 }
3124
3125
EmitSwapElements(ZoneList<Expression * > * args)3126 void FullCodeGenerator::EmitSwapElements(ZoneList<Expression*>* args) {
3127 ASSERT(args->length() == 3);
3128 VisitForStackValue(args->at(0));
3129 VisitForStackValue(args->at(1));
3130 VisitForStackValue(args->at(2));
3131 Label done;
3132 Label slow_case;
3133 Register object = eax;
3134 Register index_1 = ebx;
3135 Register index_2 = ecx;
3136 Register elements = edi;
3137 Register temp = edx;
3138 __ mov(object, Operand(esp, 2 * kPointerSize));
3139 // Fetch the map and check if array is in fast case.
3140 // Check that object doesn't require security checks and
3141 // has no indexed interceptor.
3142 __ CmpObjectType(object, JS_ARRAY_TYPE, temp);
3143 __ j(not_equal, &slow_case);
3144 __ test_b(FieldOperand(temp, Map::kBitFieldOffset),
3145 KeyedLoadIC::kSlowCaseBitFieldMask);
3146 __ j(not_zero, &slow_case);
3147
3148 // Check the object's elements are in fast case and writable.
3149 __ mov(elements, FieldOperand(object, JSObject::kElementsOffset));
3150 __ cmp(FieldOperand(elements, HeapObject::kMapOffset),
3151 Immediate(isolate()->factory()->fixed_array_map()));
3152 __ j(not_equal, &slow_case);
3153
3154 // Check that both indices are smis.
3155 __ mov(index_1, Operand(esp, 1 * kPointerSize));
3156 __ mov(index_2, Operand(esp, 0));
3157 __ mov(temp, index_1);
3158 __ or_(temp, Operand(index_2));
3159 __ test(temp, Immediate(kSmiTagMask));
3160 __ j(not_zero, &slow_case);
3161
3162 // Check that both indices are valid.
3163 __ mov(temp, FieldOperand(object, JSArray::kLengthOffset));
3164 __ cmp(temp, Operand(index_1));
3165 __ j(below_equal, &slow_case);
3166 __ cmp(temp, Operand(index_2));
3167 __ j(below_equal, &slow_case);
3168
3169 // Bring addresses into index1 and index2.
3170 __ lea(index_1, CodeGenerator::FixedArrayElementOperand(elements, index_1));
3171 __ lea(index_2, CodeGenerator::FixedArrayElementOperand(elements, index_2));
3172
3173 // Swap elements. Use object and temp as scratch registers.
3174 __ mov(object, Operand(index_1, 0));
3175 __ mov(temp, Operand(index_2, 0));
3176 __ mov(Operand(index_2, 0), object);
3177 __ mov(Operand(index_1, 0), temp);
3178
3179 Label new_space;
3180 __ InNewSpace(elements, temp, equal, &new_space);
3181
3182 __ mov(object, elements);
3183 __ RecordWriteHelper(object, index_1, temp);
3184 __ RecordWriteHelper(elements, index_2, temp);
3185
3186 __ bind(&new_space);
3187 // We are done. Drop elements from the stack, and return undefined.
3188 __ add(Operand(esp), Immediate(3 * kPointerSize));
3189 __ mov(eax, isolate()->factory()->undefined_value());
3190 __ jmp(&done);
3191
3192 __ bind(&slow_case);
3193 __ CallRuntime(Runtime::kSwapElements, 3);
3194
3195 __ bind(&done);
3196 context()->Plug(eax);
3197 }
3198
3199
EmitGetFromCache(ZoneList<Expression * > * args)3200 void FullCodeGenerator::EmitGetFromCache(ZoneList<Expression*>* args) {
3201 ASSERT_EQ(2, args->length());
3202
3203 ASSERT_NE(NULL, args->at(0)->AsLiteral());
3204 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value();
3205
3206 Handle<FixedArray> jsfunction_result_caches(
3207 isolate()->global_context()->jsfunction_result_caches());
3208 if (jsfunction_result_caches->length() <= cache_id) {
3209 __ Abort("Attempt to use undefined cache.");
3210 __ mov(eax, isolate()->factory()->undefined_value());
3211 context()->Plug(eax);
3212 return;
3213 }
3214
3215 VisitForAccumulatorValue(args->at(1));
3216
3217 Register key = eax;
3218 Register cache = ebx;
3219 Register tmp = ecx;
3220 __ mov(cache, ContextOperand(esi, Context::GLOBAL_INDEX));
3221 __ mov(cache,
3222 FieldOperand(cache, GlobalObject::kGlobalContextOffset));
3223 __ mov(cache, ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
3224 __ mov(cache,
3225 FieldOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
3226
3227 Label done, not_found;
3228 // tmp now holds finger offset as a smi.
3229 ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
3230 __ mov(tmp, FieldOperand(cache, JSFunctionResultCache::kFingerOffset));
3231 __ cmp(key, CodeGenerator::FixedArrayElementOperand(cache, tmp));
3232 __ j(not_equal, ¬_found);
3233
3234 __ mov(eax, CodeGenerator::FixedArrayElementOperand(cache, tmp, 1));
3235 __ jmp(&done);
3236
3237 __ bind(¬_found);
3238 // Call runtime to perform the lookup.
3239 __ push(cache);
3240 __ push(key);
3241 __ CallRuntime(Runtime::kGetFromCache, 2);
3242
3243 __ bind(&done);
3244 context()->Plug(eax);
3245 }
3246
3247
EmitIsRegExpEquivalent(ZoneList<Expression * > * args)3248 void FullCodeGenerator::EmitIsRegExpEquivalent(ZoneList<Expression*>* args) {
3249 ASSERT_EQ(2, args->length());
3250
3251 Register right = eax;
3252 Register left = ebx;
3253 Register tmp = ecx;
3254
3255 VisitForStackValue(args->at(0));
3256 VisitForAccumulatorValue(args->at(1));
3257 __ pop(left);
3258
3259 Label done, fail, ok;
3260 __ cmp(left, Operand(right));
3261 __ j(equal, &ok);
3262 // Fail if either is a non-HeapObject.
3263 __ mov(tmp, left);
3264 __ and_(Operand(tmp), right);
3265 __ test(Operand(tmp), Immediate(kSmiTagMask));
3266 __ j(zero, &fail);
3267 __ mov(tmp, FieldOperand(left, HeapObject::kMapOffset));
3268 __ CmpInstanceType(tmp, JS_REGEXP_TYPE);
3269 __ j(not_equal, &fail);
3270 __ cmp(tmp, FieldOperand(right, HeapObject::kMapOffset));
3271 __ j(not_equal, &fail);
3272 __ mov(tmp, FieldOperand(left, JSRegExp::kDataOffset));
3273 __ cmp(tmp, FieldOperand(right, JSRegExp::kDataOffset));
3274 __ j(equal, &ok);
3275 __ bind(&fail);
3276 __ mov(eax, Immediate(isolate()->factory()->false_value()));
3277 __ jmp(&done);
3278 __ bind(&ok);
3279 __ mov(eax, Immediate(isolate()->factory()->true_value()));
3280 __ bind(&done);
3281
3282 context()->Plug(eax);
3283 }
3284
3285
EmitHasCachedArrayIndex(ZoneList<Expression * > * args)3286 void FullCodeGenerator::EmitHasCachedArrayIndex(ZoneList<Expression*>* args) {
3287 ASSERT(args->length() == 1);
3288
3289 VisitForAccumulatorValue(args->at(0));
3290
3291 if (FLAG_debug_code) {
3292 __ AbortIfNotString(eax);
3293 }
3294
3295 Label materialize_true, materialize_false;
3296 Label* if_true = NULL;
3297 Label* if_false = NULL;
3298 Label* fall_through = NULL;
3299 context()->PrepareTest(&materialize_true, &materialize_false,
3300 &if_true, &if_false, &fall_through);
3301
3302 __ test(FieldOperand(eax, String::kHashFieldOffset),
3303 Immediate(String::kContainsCachedArrayIndexMask));
3304 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
3305 Split(zero, if_true, if_false, fall_through);
3306
3307 context()->Plug(if_true, if_false);
3308 }
3309
3310
EmitGetCachedArrayIndex(ZoneList<Expression * > * args)3311 void FullCodeGenerator::EmitGetCachedArrayIndex(ZoneList<Expression*>* args) {
3312 ASSERT(args->length() == 1);
3313 VisitForAccumulatorValue(args->at(0));
3314
3315 if (FLAG_debug_code) {
3316 __ AbortIfNotString(eax);
3317 }
3318
3319 __ mov(eax, FieldOperand(eax, String::kHashFieldOffset));
3320 __ IndexFromHash(eax, eax);
3321
3322 context()->Plug(eax);
3323 }
3324
3325
EmitFastAsciiArrayJoin(ZoneList<Expression * > * args)3326 void FullCodeGenerator::EmitFastAsciiArrayJoin(ZoneList<Expression*>* args) {
3327 Label bailout, done, one_char_separator, long_separator,
3328 non_trivial_array, not_size_one_array, loop,
3329 loop_1, loop_1_condition, loop_2, loop_2_entry, loop_3, loop_3_entry;
3330
3331 ASSERT(args->length() == 2);
3332 // We will leave the separator on the stack until the end of the function.
3333 VisitForStackValue(args->at(1));
3334 // Load this to eax (= array)
3335 VisitForAccumulatorValue(args->at(0));
3336 // All aliases of the same register have disjoint lifetimes.
3337 Register array = eax;
3338 Register elements = no_reg; // Will be eax.
3339
3340 Register index = edx;
3341
3342 Register string_length = ecx;
3343
3344 Register string = esi;
3345
3346 Register scratch = ebx;
3347
3348 Register array_length = edi;
3349 Register result_pos = no_reg; // Will be edi.
3350
3351 // Separator operand is already pushed.
3352 Operand separator_operand = Operand(esp, 2 * kPointerSize);
3353 Operand result_operand = Operand(esp, 1 * kPointerSize);
3354 Operand array_length_operand = Operand(esp, 0);
3355 __ sub(Operand(esp), Immediate(2 * kPointerSize));
3356 __ cld();
3357 // Check that the array is a JSArray
3358 __ test(array, Immediate(kSmiTagMask));
3359 __ j(zero, &bailout);
3360 __ CmpObjectType(array, JS_ARRAY_TYPE, scratch);
3361 __ j(not_equal, &bailout);
3362
3363 // Check that the array has fast elements.
3364 __ test_b(FieldOperand(scratch, Map::kBitField2Offset),
3365 1 << Map::kHasFastElements);
3366 __ j(zero, &bailout);
3367
3368 // If the array has length zero, return the empty string.
3369 __ mov(array_length, FieldOperand(array, JSArray::kLengthOffset));
3370 __ SmiUntag(array_length);
3371 __ j(not_zero, &non_trivial_array);
3372 __ mov(result_operand, isolate()->factory()->empty_string());
3373 __ jmp(&done);
3374
3375 // Save the array length.
3376 __ bind(&non_trivial_array);
3377 __ mov(array_length_operand, array_length);
3378
3379 // Save the FixedArray containing array's elements.
3380 // End of array's live range.
3381 elements = array;
3382 __ mov(elements, FieldOperand(array, JSArray::kElementsOffset));
3383 array = no_reg;
3384
3385
3386 // Check that all array elements are sequential ASCII strings, and
3387 // accumulate the sum of their lengths, as a smi-encoded value.
3388 __ Set(index, Immediate(0));
3389 __ Set(string_length, Immediate(0));
3390 // Loop condition: while (index < length).
3391 // Live loop registers: index, array_length, string,
3392 // scratch, string_length, elements.
3393 if (FLAG_debug_code) {
3394 __ cmp(index, Operand(array_length));
3395 __ Assert(less, "No empty arrays here in EmitFastAsciiArrayJoin");
3396 }
3397 __ bind(&loop);
3398 __ mov(string, FieldOperand(elements,
3399 index,
3400 times_pointer_size,
3401 FixedArray::kHeaderSize));
3402 __ test(string, Immediate(kSmiTagMask));
3403 __ j(zero, &bailout);
3404 __ mov(scratch, FieldOperand(string, HeapObject::kMapOffset));
3405 __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
3406 __ and_(scratch, Immediate(
3407 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
3408 __ cmp(scratch, kStringTag | kAsciiStringTag | kSeqStringTag);
3409 __ j(not_equal, &bailout);
3410 __ add(string_length,
3411 FieldOperand(string, SeqAsciiString::kLengthOffset));
3412 __ j(overflow, &bailout);
3413 __ add(Operand(index), Immediate(1));
3414 __ cmp(index, Operand(array_length));
3415 __ j(less, &loop);
3416
3417 // If array_length is 1, return elements[0], a string.
3418 __ cmp(array_length, 1);
3419 __ j(not_equal, ¬_size_one_array);
3420 __ mov(scratch, FieldOperand(elements, FixedArray::kHeaderSize));
3421 __ mov(result_operand, scratch);
3422 __ jmp(&done);
3423
3424 __ bind(¬_size_one_array);
3425
3426 // End of array_length live range.
3427 result_pos = array_length;
3428 array_length = no_reg;
3429
3430 // Live registers:
3431 // string_length: Sum of string lengths, as a smi.
3432 // elements: FixedArray of strings.
3433
3434 // Check that the separator is a flat ASCII string.
3435 __ mov(string, separator_operand);
3436 __ test(string, Immediate(kSmiTagMask));
3437 __ j(zero, &bailout);
3438 __ mov(scratch, FieldOperand(string, HeapObject::kMapOffset));
3439 __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
3440 __ and_(scratch, Immediate(
3441 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
3442 __ cmp(scratch, ASCII_STRING_TYPE);
3443 __ j(not_equal, &bailout);
3444
3445 // Add (separator length times array_length) - separator length
3446 // to string_length.
3447 __ mov(scratch, separator_operand);
3448 __ mov(scratch, FieldOperand(scratch, SeqAsciiString::kLengthOffset));
3449 __ sub(string_length, Operand(scratch)); // May be negative, temporarily.
3450 __ imul(scratch, array_length_operand);
3451 __ j(overflow, &bailout);
3452 __ add(string_length, Operand(scratch));
3453 __ j(overflow, &bailout);
3454
3455 __ shr(string_length, 1);
3456 // Live registers and stack values:
3457 // string_length
3458 // elements
3459 __ AllocateAsciiString(result_pos, string_length, scratch,
3460 index, string, &bailout);
3461 __ mov(result_operand, result_pos);
3462 __ lea(result_pos, FieldOperand(result_pos, SeqAsciiString::kHeaderSize));
3463
3464
3465 __ mov(string, separator_operand);
3466 __ cmp(FieldOperand(string, SeqAsciiString::kLengthOffset),
3467 Immediate(Smi::FromInt(1)));
3468 __ j(equal, &one_char_separator);
3469 __ j(greater, &long_separator);
3470
3471
3472 // Empty separator case
3473 __ mov(index, Immediate(0));
3474 __ jmp(&loop_1_condition);
3475 // Loop condition: while (index < length).
3476 __ bind(&loop_1);
3477 // Each iteration of the loop concatenates one string to the result.
3478 // Live values in registers:
3479 // index: which element of the elements array we are adding to the result.
3480 // result_pos: the position to which we are currently copying characters.
3481 // elements: the FixedArray of strings we are joining.
3482
3483 // Get string = array[index].
3484 __ mov(string, FieldOperand(elements, index,
3485 times_pointer_size,
3486 FixedArray::kHeaderSize));
3487 __ mov(string_length,
3488 FieldOperand(string, String::kLengthOffset));
3489 __ shr(string_length, 1);
3490 __ lea(string,
3491 FieldOperand(string, SeqAsciiString::kHeaderSize));
3492 __ CopyBytes(string, result_pos, string_length, scratch);
3493 __ add(Operand(index), Immediate(1));
3494 __ bind(&loop_1_condition);
3495 __ cmp(index, array_length_operand);
3496 __ j(less, &loop_1); // End while (index < length).
3497 __ jmp(&done);
3498
3499
3500
3501 // One-character separator case
3502 __ bind(&one_char_separator);
3503 // Replace separator with its ascii character value.
3504 __ mov_b(scratch, FieldOperand(string, SeqAsciiString::kHeaderSize));
3505 __ mov_b(separator_operand, scratch);
3506
3507 __ Set(index, Immediate(0));
3508 // Jump into the loop after the code that copies the separator, so the first
3509 // element is not preceded by a separator
3510 __ jmp(&loop_2_entry);
3511 // Loop condition: while (index < length).
3512 __ bind(&loop_2);
3513 // Each iteration of the loop concatenates one string to the result.
3514 // Live values in registers:
3515 // index: which element of the elements array we are adding to the result.
3516 // result_pos: the position to which we are currently copying characters.
3517
3518 // Copy the separator character to the result.
3519 __ mov_b(scratch, separator_operand);
3520 __ mov_b(Operand(result_pos, 0), scratch);
3521 __ inc(result_pos);
3522
3523 __ bind(&loop_2_entry);
3524 // Get string = array[index].
3525 __ mov(string, FieldOperand(elements, index,
3526 times_pointer_size,
3527 FixedArray::kHeaderSize));
3528 __ mov(string_length,
3529 FieldOperand(string, String::kLengthOffset));
3530 __ shr(string_length, 1);
3531 __ lea(string,
3532 FieldOperand(string, SeqAsciiString::kHeaderSize));
3533 __ CopyBytes(string, result_pos, string_length, scratch);
3534 __ add(Operand(index), Immediate(1));
3535
3536 __ cmp(index, array_length_operand);
3537 __ j(less, &loop_2); // End while (index < length).
3538 __ jmp(&done);
3539
3540
3541 // Long separator case (separator is more than one character).
3542 __ bind(&long_separator);
3543
3544 __ Set(index, Immediate(0));
3545 // Jump into the loop after the code that copies the separator, so the first
3546 // element is not preceded by a separator
3547 __ jmp(&loop_3_entry);
3548 // Loop condition: while (index < length).
3549 __ bind(&loop_3);
3550 // Each iteration of the loop concatenates one string to the result.
3551 // Live values in registers:
3552 // index: which element of the elements array we are adding to the result.
3553 // result_pos: the position to which we are currently copying characters.
3554
3555 // Copy the separator to the result.
3556 __ mov(string, separator_operand);
3557 __ mov(string_length,
3558 FieldOperand(string, String::kLengthOffset));
3559 __ shr(string_length, 1);
3560 __ lea(string,
3561 FieldOperand(string, SeqAsciiString::kHeaderSize));
3562 __ CopyBytes(string, result_pos, string_length, scratch);
3563
3564 __ bind(&loop_3_entry);
3565 // Get string = array[index].
3566 __ mov(string, FieldOperand(elements, index,
3567 times_pointer_size,
3568 FixedArray::kHeaderSize));
3569 __ mov(string_length,
3570 FieldOperand(string, String::kLengthOffset));
3571 __ shr(string_length, 1);
3572 __ lea(string,
3573 FieldOperand(string, SeqAsciiString::kHeaderSize));
3574 __ CopyBytes(string, result_pos, string_length, scratch);
3575 __ add(Operand(index), Immediate(1));
3576
3577 __ cmp(index, array_length_operand);
3578 __ j(less, &loop_3); // End while (index < length).
3579 __ jmp(&done);
3580
3581
3582 __ bind(&bailout);
3583 __ mov(result_operand, isolate()->factory()->undefined_value());
3584 __ bind(&done);
3585 __ mov(eax, result_operand);
3586 // Drop temp values from the stack, and restore context register.
3587 __ add(Operand(esp), Immediate(3 * kPointerSize));
3588
3589 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
3590 context()->Plug(eax);
3591 }
3592
3593
VisitCallRuntime(CallRuntime * expr)3594 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
3595 Handle<String> name = expr->name();
3596 if (name->length() > 0 && name->Get(0) == '_') {
3597 Comment cmnt(masm_, "[ InlineRuntimeCall");
3598 EmitInlineRuntimeCall(expr);
3599 return;
3600 }
3601
3602 Comment cmnt(masm_, "[ CallRuntime");
3603 ZoneList<Expression*>* args = expr->arguments();
3604
3605 if (expr->is_jsruntime()) {
3606 // Prepare for calling JS runtime function.
3607 __ mov(eax, GlobalObjectOperand());
3608 __ push(FieldOperand(eax, GlobalObject::kBuiltinsOffset));
3609 }
3610
3611 // Push the arguments ("left-to-right").
3612 int arg_count = args->length();
3613 for (int i = 0; i < arg_count; i++) {
3614 VisitForStackValue(args->at(i));
3615 }
3616
3617 if (expr->is_jsruntime()) {
3618 // Call the JS runtime function via a call IC.
3619 __ Set(ecx, Immediate(expr->name()));
3620 InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
3621 Handle<Code> ic = isolate()->stub_cache()->ComputeCallInitialize(
3622 arg_count, in_loop);
3623 EmitCallIC(ic, RelocInfo::CODE_TARGET);
3624 // Restore context register.
3625 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
3626 } else {
3627 // Call the C runtime function.
3628 __ CallRuntime(expr->function(), arg_count);
3629 }
3630 context()->Plug(eax);
3631 }
3632
3633
VisitUnaryOperation(UnaryOperation * expr)3634 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3635 switch (expr->op()) {
3636 case Token::DELETE: {
3637 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
3638 Property* prop = expr->expression()->AsProperty();
3639 Variable* var = expr->expression()->AsVariableProxy()->AsVariable();
3640
3641 if (prop != NULL) {
3642 if (prop->is_synthetic()) {
3643 // Result of deleting parameters is false, even when they rewrite
3644 // to accesses on the arguments object.
3645 context()->Plug(false);
3646 } else {
3647 VisitForStackValue(prop->obj());
3648 VisitForStackValue(prop->key());
3649 __ push(Immediate(Smi::FromInt(strict_mode_flag())));
3650 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3651 context()->Plug(eax);
3652 }
3653 } else if (var != NULL) {
3654 // Delete of an unqualified identifier is disallowed in strict mode
3655 // but "delete this" is.
3656 ASSERT(strict_mode_flag() == kNonStrictMode || var->is_this());
3657 if (var->is_global()) {
3658 __ push(GlobalObjectOperand());
3659 __ push(Immediate(var->name()));
3660 __ push(Immediate(Smi::FromInt(kNonStrictMode)));
3661 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3662 context()->Plug(eax);
3663 } else if (var->AsSlot() != NULL &&
3664 var->AsSlot()->type() != Slot::LOOKUP) {
3665 // Result of deleting non-global, non-dynamic variables is false.
3666 // The subexpression does not have side effects.
3667 context()->Plug(false);
3668 } else {
3669 // Non-global variable. Call the runtime to try to delete from the
3670 // context where the variable was introduced.
3671 __ push(context_register());
3672 __ push(Immediate(var->name()));
3673 __ CallRuntime(Runtime::kDeleteContextSlot, 2);
3674 context()->Plug(eax);
3675 }
3676 } else {
3677 // Result of deleting non-property, non-variable reference is true.
3678 // The subexpression may have side effects.
3679 VisitForEffect(expr->expression());
3680 context()->Plug(true);
3681 }
3682 break;
3683 }
3684
3685 case Token::VOID: {
3686 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
3687 VisitForEffect(expr->expression());
3688 context()->Plug(isolate()->factory()->undefined_value());
3689 break;
3690 }
3691
3692 case Token::NOT: {
3693 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
3694 if (context()->IsEffect()) {
3695 // Unary NOT has no side effects so it's only necessary to visit the
3696 // subexpression. Match the optimizing compiler by not branching.
3697 VisitForEffect(expr->expression());
3698 } else {
3699 Label materialize_true, materialize_false;
3700 Label* if_true = NULL;
3701 Label* if_false = NULL;
3702 Label* fall_through = NULL;
3703
3704 // Notice that the labels are swapped.
3705 context()->PrepareTest(&materialize_true, &materialize_false,
3706 &if_false, &if_true, &fall_through);
3707 if (context()->IsTest()) ForwardBailoutToChild(expr);
3708 VisitForControl(expr->expression(), if_true, if_false, fall_through);
3709 context()->Plug(if_false, if_true); // Labels swapped.
3710 }
3711 break;
3712 }
3713
3714 case Token::TYPEOF: {
3715 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
3716 { StackValueContext context(this);
3717 VisitForTypeofValue(expr->expression());
3718 }
3719 __ CallRuntime(Runtime::kTypeof, 1);
3720 context()->Plug(eax);
3721 break;
3722 }
3723
3724 case Token::ADD: {
3725 Comment cmt(masm_, "[ UnaryOperation (ADD)");
3726 VisitForAccumulatorValue(expr->expression());
3727 Label no_conversion;
3728 __ test(result_register(), Immediate(kSmiTagMask));
3729 __ j(zero, &no_conversion);
3730 ToNumberStub convert_stub;
3731 __ CallStub(&convert_stub);
3732 __ bind(&no_conversion);
3733 context()->Plug(result_register());
3734 break;
3735 }
3736
3737 case Token::SUB: {
3738 Comment cmt(masm_, "[ UnaryOperation (SUB)");
3739 bool can_overwrite = expr->expression()->ResultOverwriteAllowed();
3740 UnaryOverwriteMode overwrite =
3741 can_overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE;
3742 GenericUnaryOpStub stub(Token::SUB, overwrite, NO_UNARY_FLAGS);
3743 // GenericUnaryOpStub expects the argument to be in the
3744 // accumulator register eax.
3745 VisitForAccumulatorValue(expr->expression());
3746 __ CallStub(&stub);
3747 context()->Plug(eax);
3748 break;
3749 }
3750
3751 case Token::BIT_NOT: {
3752 Comment cmt(masm_, "[ UnaryOperation (BIT_NOT)");
3753 // The generic unary operation stub expects the argument to be
3754 // in the accumulator register eax.
3755 VisitForAccumulatorValue(expr->expression());
3756 Label done;
3757 bool inline_smi_case = ShouldInlineSmiCase(expr->op());
3758 if (inline_smi_case) {
3759 NearLabel call_stub;
3760 __ test(eax, Immediate(kSmiTagMask));
3761 __ j(not_zero, &call_stub);
3762 __ lea(eax, Operand(eax, kSmiTagMask));
3763 __ not_(eax);
3764 __ jmp(&done);
3765 __ bind(&call_stub);
3766 }
3767 bool overwrite = expr->expression()->ResultOverwriteAllowed();
3768 UnaryOverwriteMode mode =
3769 overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE;
3770 UnaryOpFlags flags = inline_smi_case
3771 ? NO_UNARY_SMI_CODE_IN_STUB
3772 : NO_UNARY_FLAGS;
3773 GenericUnaryOpStub stub(Token::BIT_NOT, mode, flags);
3774 __ CallStub(&stub);
3775 __ bind(&done);
3776 context()->Plug(eax);
3777 break;
3778 }
3779
3780 default:
3781 UNREACHABLE();
3782 }
3783 }
3784
3785
VisitCountOperation(CountOperation * expr)3786 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3787 Comment cmnt(masm_, "[ CountOperation");
3788 SetSourcePosition(expr->position());
3789
3790 // Invalid left-hand sides are rewritten to have a 'throw ReferenceError'
3791 // as the left-hand side.
3792 if (!expr->expression()->IsValidLeftHandSide()) {
3793 VisitForEffect(expr->expression());
3794 return;
3795 }
3796
3797 // Expression can only be a property, a global or a (parameter or local)
3798 // slot. Variables with rewrite to .arguments are treated as KEYED_PROPERTY.
3799 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
3800 LhsKind assign_type = VARIABLE;
3801 Property* prop = expr->expression()->AsProperty();
3802 // In case of a property we use the uninitialized expression context
3803 // of the key to detect a named property.
3804 if (prop != NULL) {
3805 assign_type =
3806 (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
3807 }
3808
3809 // Evaluate expression and get value.
3810 if (assign_type == VARIABLE) {
3811 ASSERT(expr->expression()->AsVariableProxy()->var() != NULL);
3812 AccumulatorValueContext context(this);
3813 EmitVariableLoad(expr->expression()->AsVariableProxy()->var());
3814 } else {
3815 // Reserve space for result of postfix operation.
3816 if (expr->is_postfix() && !context()->IsEffect()) {
3817 __ push(Immediate(Smi::FromInt(0)));
3818 }
3819 if (assign_type == NAMED_PROPERTY) {
3820 // Put the object both on the stack and in the accumulator.
3821 VisitForAccumulatorValue(prop->obj());
3822 __ push(eax);
3823 EmitNamedPropertyLoad(prop);
3824 } else {
3825 if (prop->is_arguments_access()) {
3826 VariableProxy* obj_proxy = prop->obj()->AsVariableProxy();
3827 MemOperand slot_operand =
3828 EmitSlotSearch(obj_proxy->var()->AsSlot(), ecx);
3829 __ push(slot_operand);
3830 __ SafeSet(eax, Immediate(prop->key()->AsLiteral()->handle()));
3831 } else {
3832 VisitForStackValue(prop->obj());
3833 VisitForAccumulatorValue(prop->key());
3834 }
3835 __ mov(edx, Operand(esp, 0));
3836 __ push(eax);
3837 EmitKeyedPropertyLoad(prop);
3838 }
3839 }
3840
3841 // We need a second deoptimization point after loading the value
3842 // in case evaluating the property load my have a side effect.
3843 if (assign_type == VARIABLE) {
3844 PrepareForBailout(expr->expression(), TOS_REG);
3845 } else {
3846 PrepareForBailoutForId(expr->CountId(), TOS_REG);
3847 }
3848
3849 // Call ToNumber only if operand is not a smi.
3850 NearLabel no_conversion;
3851 if (ShouldInlineSmiCase(expr->op())) {
3852 __ test(eax, Immediate(kSmiTagMask));
3853 __ j(zero, &no_conversion);
3854 }
3855 ToNumberStub convert_stub;
3856 __ CallStub(&convert_stub);
3857 __ bind(&no_conversion);
3858
3859 // Save result for postfix expressions.
3860 if (expr->is_postfix()) {
3861 if (!context()->IsEffect()) {
3862 // Save the result on the stack. If we have a named or keyed property
3863 // we store the result under the receiver that is currently on top
3864 // of the stack.
3865 switch (assign_type) {
3866 case VARIABLE:
3867 __ push(eax);
3868 break;
3869 case NAMED_PROPERTY:
3870 __ mov(Operand(esp, kPointerSize), eax);
3871 break;
3872 case KEYED_PROPERTY:
3873 __ mov(Operand(esp, 2 * kPointerSize), eax);
3874 break;
3875 }
3876 }
3877 }
3878
3879 // Inline smi case if we are in a loop.
3880 NearLabel stub_call, done;
3881 JumpPatchSite patch_site(masm_);
3882
3883 if (ShouldInlineSmiCase(expr->op())) {
3884 if (expr->op() == Token::INC) {
3885 __ add(Operand(eax), Immediate(Smi::FromInt(1)));
3886 } else {
3887 __ sub(Operand(eax), Immediate(Smi::FromInt(1)));
3888 }
3889 __ j(overflow, &stub_call);
3890 // We could eliminate this smi check if we split the code at
3891 // the first smi check before calling ToNumber.
3892 patch_site.EmitJumpIfSmi(eax, &done);
3893
3894 __ bind(&stub_call);
3895 // Call stub. Undo operation first.
3896 if (expr->op() == Token::INC) {
3897 __ sub(Operand(eax), Immediate(Smi::FromInt(1)));
3898 } else {
3899 __ add(Operand(eax), Immediate(Smi::FromInt(1)));
3900 }
3901 }
3902
3903 // Record position before stub call.
3904 SetSourcePosition(expr->position());
3905
3906 // Call stub for +1/-1.
3907 __ mov(edx, eax);
3908 __ mov(eax, Immediate(Smi::FromInt(1)));
3909 TypeRecordingBinaryOpStub stub(expr->binary_op(), NO_OVERWRITE);
3910 EmitCallIC(stub.GetCode(), &patch_site);
3911 __ bind(&done);
3912
3913 // Store the value returned in eax.
3914 switch (assign_type) {
3915 case VARIABLE:
3916 if (expr->is_postfix()) {
3917 // Perform the assignment as if via '='.
3918 { EffectContext context(this);
3919 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3920 Token::ASSIGN);
3921 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3922 context.Plug(eax);
3923 }
3924 // For all contexts except EffectContext We have the result on
3925 // top of the stack.
3926 if (!context()->IsEffect()) {
3927 context()->PlugTOS();
3928 }
3929 } else {
3930 // Perform the assignment as if via '='.
3931 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3932 Token::ASSIGN);
3933 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3934 context()->Plug(eax);
3935 }
3936 break;
3937 case NAMED_PROPERTY: {
3938 __ mov(ecx, prop->key()->AsLiteral()->handle());
3939 __ pop(edx);
3940 Handle<Code> ic = is_strict_mode()
3941 ? isolate()->builtins()->StoreIC_Initialize_Strict()
3942 : isolate()->builtins()->StoreIC_Initialize();
3943 EmitCallIC(ic, RelocInfo::CODE_TARGET);
3944 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3945 if (expr->is_postfix()) {
3946 if (!context()->IsEffect()) {
3947 context()->PlugTOS();
3948 }
3949 } else {
3950 context()->Plug(eax);
3951 }
3952 break;
3953 }
3954 case KEYED_PROPERTY: {
3955 __ pop(ecx);
3956 __ pop(edx);
3957 Handle<Code> ic = is_strict_mode()
3958 ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
3959 : isolate()->builtins()->KeyedStoreIC_Initialize();
3960 EmitCallIC(ic, RelocInfo::CODE_TARGET);
3961 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3962 if (expr->is_postfix()) {
3963 // Result is on the stack
3964 if (!context()->IsEffect()) {
3965 context()->PlugTOS();
3966 }
3967 } else {
3968 context()->Plug(eax);
3969 }
3970 break;
3971 }
3972 }
3973 }
3974
3975
VisitForTypeofValue(Expression * expr)3976 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
3977 VariableProxy* proxy = expr->AsVariableProxy();
3978 ASSERT(!context()->IsEffect());
3979 ASSERT(!context()->IsTest());
3980
3981 if (proxy != NULL && !proxy->var()->is_this() && proxy->var()->is_global()) {
3982 Comment cmnt(masm_, "Global variable");
3983 __ mov(eax, GlobalObjectOperand());
3984 __ mov(ecx, Immediate(proxy->name()));
3985 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
3986 // Use a regular load, not a contextual load, to avoid a reference
3987 // error.
3988 EmitCallIC(ic, RelocInfo::CODE_TARGET);
3989 PrepareForBailout(expr, TOS_REG);
3990 context()->Plug(eax);
3991 } else if (proxy != NULL &&
3992 proxy->var()->AsSlot() != NULL &&
3993 proxy->var()->AsSlot()->type() == Slot::LOOKUP) {
3994 Label done, slow;
3995
3996 // Generate code for loading from variables potentially shadowed
3997 // by eval-introduced variables.
3998 Slot* slot = proxy->var()->AsSlot();
3999 EmitDynamicLoadFromSlotFastCase(slot, INSIDE_TYPEOF, &slow, &done);
4000
4001 __ bind(&slow);
4002 __ push(esi);
4003 __ push(Immediate(proxy->name()));
4004 __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
4005 PrepareForBailout(expr, TOS_REG);
4006 __ bind(&done);
4007
4008 context()->Plug(eax);
4009 } else {
4010 // This expression cannot throw a reference error at the top level.
4011 context()->HandleExpression(expr);
4012 }
4013 }
4014
4015
TryLiteralCompare(Token::Value op,Expression * left,Expression * right,Label * if_true,Label * if_false,Label * fall_through)4016 bool FullCodeGenerator::TryLiteralCompare(Token::Value op,
4017 Expression* left,
4018 Expression* right,
4019 Label* if_true,
4020 Label* if_false,
4021 Label* fall_through) {
4022 if (op != Token::EQ && op != Token::EQ_STRICT) return false;
4023
4024 // Check for the pattern: typeof <expression> == <string literal>.
4025 Literal* right_literal = right->AsLiteral();
4026 if (right_literal == NULL) return false;
4027 Handle<Object> right_literal_value = right_literal->handle();
4028 if (!right_literal_value->IsString()) return false;
4029 UnaryOperation* left_unary = left->AsUnaryOperation();
4030 if (left_unary == NULL || left_unary->op() != Token::TYPEOF) return false;
4031 Handle<String> check = Handle<String>::cast(right_literal_value);
4032
4033 { AccumulatorValueContext context(this);
4034 VisitForTypeofValue(left_unary->expression());
4035 }
4036 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
4037
4038 if (check->Equals(isolate()->heap()->number_symbol())) {
4039 __ JumpIfSmi(eax, if_true);
4040 __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
4041 isolate()->factory()->heap_number_map());
4042 Split(equal, if_true, if_false, fall_through);
4043 } else if (check->Equals(isolate()->heap()->string_symbol())) {
4044 __ JumpIfSmi(eax, if_false);
4045 __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, edx);
4046 __ j(above_equal, if_false);
4047 // Check for undetectable objects => false.
4048 __ test_b(FieldOperand(edx, Map::kBitFieldOffset),
4049 1 << Map::kIsUndetectable);
4050 Split(zero, if_true, if_false, fall_through);
4051 } else if (check->Equals(isolate()->heap()->boolean_symbol())) {
4052 __ cmp(eax, isolate()->factory()->true_value());
4053 __ j(equal, if_true);
4054 __ cmp(eax, isolate()->factory()->false_value());
4055 Split(equal, if_true, if_false, fall_through);
4056 } else if (check->Equals(isolate()->heap()->undefined_symbol())) {
4057 __ cmp(eax, isolate()->factory()->undefined_value());
4058 __ j(equal, if_true);
4059 __ JumpIfSmi(eax, if_false);
4060 // Check for undetectable objects => true.
4061 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
4062 __ movzx_b(ecx, FieldOperand(edx, Map::kBitFieldOffset));
4063 __ test(ecx, Immediate(1 << Map::kIsUndetectable));
4064 Split(not_zero, if_true, if_false, fall_through);
4065 } else if (check->Equals(isolate()->heap()->function_symbol())) {
4066 __ JumpIfSmi(eax, if_false);
4067 __ CmpObjectType(eax, FIRST_FUNCTION_CLASS_TYPE, edx);
4068 Split(above_equal, if_true, if_false, fall_through);
4069 } else if (check->Equals(isolate()->heap()->object_symbol())) {
4070 __ JumpIfSmi(eax, if_false);
4071 __ cmp(eax, isolate()->factory()->null_value());
4072 __ j(equal, if_true);
4073 __ CmpObjectType(eax, FIRST_JS_OBJECT_TYPE, edx);
4074 __ j(below, if_false);
4075 __ CmpInstanceType(edx, FIRST_FUNCTION_CLASS_TYPE);
4076 __ j(above_equal, if_false);
4077 // Check for undetectable objects => false.
4078 __ test_b(FieldOperand(edx, Map::kBitFieldOffset),
4079 1 << Map::kIsUndetectable);
4080 Split(zero, if_true, if_false, fall_through);
4081 } else {
4082 if (if_false != fall_through) __ jmp(if_false);
4083 }
4084
4085 return true;
4086 }
4087
4088
VisitCompareOperation(CompareOperation * expr)4089 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4090 Comment cmnt(masm_, "[ CompareOperation");
4091 SetSourcePosition(expr->position());
4092
4093 // Always perform the comparison for its control flow. Pack the result
4094 // into the expression's context after the comparison is performed.
4095
4096 Label materialize_true, materialize_false;
4097 Label* if_true = NULL;
4098 Label* if_false = NULL;
4099 Label* fall_through = NULL;
4100 context()->PrepareTest(&materialize_true, &materialize_false,
4101 &if_true, &if_false, &fall_through);
4102
4103 // First we try a fast inlined version of the compare when one of
4104 // the operands is a literal.
4105 Token::Value op = expr->op();
4106 Expression* left = expr->left();
4107 Expression* right = expr->right();
4108 if (TryLiteralCompare(op, left, right, if_true, if_false, fall_through)) {
4109 context()->Plug(if_true, if_false);
4110 return;
4111 }
4112
4113 VisitForStackValue(expr->left());
4114 switch (expr->op()) {
4115 case Token::IN:
4116 VisitForStackValue(expr->right());
4117 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
4118 PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
4119 __ cmp(eax, isolate()->factory()->true_value());
4120 Split(equal, if_true, if_false, fall_through);
4121 break;
4122
4123 case Token::INSTANCEOF: {
4124 VisitForStackValue(expr->right());
4125 InstanceofStub stub(InstanceofStub::kNoFlags);
4126 __ CallStub(&stub);
4127 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
4128 __ test(eax, Operand(eax));
4129 // The stub returns 0 for true.
4130 Split(zero, if_true, if_false, fall_through);
4131 break;
4132 }
4133
4134 default: {
4135 VisitForAccumulatorValue(expr->right());
4136 Condition cc = no_condition;
4137 bool strict = false;
4138 switch (op) {
4139 case Token::EQ_STRICT:
4140 strict = true;
4141 // Fall through
4142 case Token::EQ:
4143 cc = equal;
4144 __ pop(edx);
4145 break;
4146 case Token::LT:
4147 cc = less;
4148 __ pop(edx);
4149 break;
4150 case Token::GT:
4151 // Reverse left and right sizes to obtain ECMA-262 conversion order.
4152 cc = less;
4153 __ mov(edx, result_register());
4154 __ pop(eax);
4155 break;
4156 case Token::LTE:
4157 // Reverse left and right sizes to obtain ECMA-262 conversion order.
4158 cc = greater_equal;
4159 __ mov(edx, result_register());
4160 __ pop(eax);
4161 break;
4162 case Token::GTE:
4163 cc = greater_equal;
4164 __ pop(edx);
4165 break;
4166 case Token::IN:
4167 case Token::INSTANCEOF:
4168 default:
4169 UNREACHABLE();
4170 }
4171
4172 bool inline_smi_code = ShouldInlineSmiCase(op);
4173 JumpPatchSite patch_site(masm_);
4174 if (inline_smi_code) {
4175 NearLabel slow_case;
4176 __ mov(ecx, Operand(edx));
4177 __ or_(ecx, Operand(eax));
4178 patch_site.EmitJumpIfNotSmi(ecx, &slow_case);
4179 __ cmp(edx, Operand(eax));
4180 Split(cc, if_true, if_false, NULL);
4181 __ bind(&slow_case);
4182 }
4183
4184 // Record position and call the compare IC.
4185 SetSourcePosition(expr->position());
4186 Handle<Code> ic = CompareIC::GetUninitialized(op);
4187 EmitCallIC(ic, &patch_site);
4188
4189 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
4190 __ test(eax, Operand(eax));
4191 Split(cc, if_true, if_false, fall_through);
4192 }
4193 }
4194
4195 // Convert the result of the comparison into one expected for this
4196 // expression's context.
4197 context()->Plug(if_true, if_false);
4198 }
4199
4200
VisitCompareToNull(CompareToNull * expr)4201 void FullCodeGenerator::VisitCompareToNull(CompareToNull* expr) {
4202 Label materialize_true, materialize_false;
4203 Label* if_true = NULL;
4204 Label* if_false = NULL;
4205 Label* fall_through = NULL;
4206 context()->PrepareTest(&materialize_true, &materialize_false,
4207 &if_true, &if_false, &fall_through);
4208
4209 VisitForAccumulatorValue(expr->expression());
4210 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
4211
4212 __ cmp(eax, isolate()->factory()->null_value());
4213 if (expr->is_strict()) {
4214 Split(equal, if_true, if_false, fall_through);
4215 } else {
4216 __ j(equal, if_true);
4217 __ cmp(eax, isolate()->factory()->undefined_value());
4218 __ j(equal, if_true);
4219 __ test(eax, Immediate(kSmiTagMask));
4220 __ j(zero, if_false);
4221 // It can be an undetectable object.
4222 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
4223 __ movzx_b(edx, FieldOperand(edx, Map::kBitFieldOffset));
4224 __ test(edx, Immediate(1 << Map::kIsUndetectable));
4225 Split(not_zero, if_true, if_false, fall_through);
4226 }
4227 context()->Plug(if_true, if_false);
4228 }
4229
4230
VisitThisFunction(ThisFunction * expr)4231 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4232 __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
4233 context()->Plug(eax);
4234 }
4235
4236
result_register()4237 Register FullCodeGenerator::result_register() {
4238 return eax;
4239 }
4240
4241
context_register()4242 Register FullCodeGenerator::context_register() {
4243 return esi;
4244 }
4245
4246
EmitCallIC(Handle<Code> ic,RelocInfo::Mode mode)4247 void FullCodeGenerator::EmitCallIC(Handle<Code> ic, RelocInfo::Mode mode) {
4248 ASSERT(mode == RelocInfo::CODE_TARGET ||
4249 mode == RelocInfo::CODE_TARGET_CONTEXT);
4250 switch (ic->kind()) {
4251 case Code::LOAD_IC:
4252 __ IncrementCounter(isolate()->counters()->named_load_full(), 1);
4253 break;
4254 case Code::KEYED_LOAD_IC:
4255 __ IncrementCounter(isolate()->counters()->keyed_load_full(), 1);
4256 break;
4257 case Code::STORE_IC:
4258 __ IncrementCounter(isolate()->counters()->named_store_full(), 1);
4259 break;
4260 case Code::KEYED_STORE_IC:
4261 __ IncrementCounter(isolate()->counters()->keyed_store_full(), 1);
4262 default:
4263 break;
4264 }
4265
4266 __ call(ic, mode);
4267
4268 // Crankshaft doesn't need patching of inlined loads and stores.
4269 // When compiling the snapshot we need to produce code that works
4270 // with and without Crankshaft.
4271 if (V8::UseCrankshaft() && !Serializer::enabled()) {
4272 return;
4273 }
4274
4275 // If we're calling a (keyed) load or store stub, we have to mark
4276 // the call as containing no inlined code so we will not attempt to
4277 // patch it.
4278 switch (ic->kind()) {
4279 case Code::LOAD_IC:
4280 case Code::KEYED_LOAD_IC:
4281 case Code::STORE_IC:
4282 case Code::KEYED_STORE_IC:
4283 __ nop(); // Signals no inlined code.
4284 break;
4285 default:
4286 // Do nothing.
4287 break;
4288 }
4289 }
4290
4291
EmitCallIC(Handle<Code> ic,JumpPatchSite * patch_site)4292 void FullCodeGenerator::EmitCallIC(Handle<Code> ic, JumpPatchSite* patch_site) {
4293 Counters* counters = isolate()->counters();
4294 switch (ic->kind()) {
4295 case Code::LOAD_IC:
4296 __ IncrementCounter(counters->named_load_full(), 1);
4297 break;
4298 case Code::KEYED_LOAD_IC:
4299 __ IncrementCounter(counters->keyed_load_full(), 1);
4300 break;
4301 case Code::STORE_IC:
4302 __ IncrementCounter(counters->named_store_full(), 1);
4303 break;
4304 case Code::KEYED_STORE_IC:
4305 __ IncrementCounter(counters->keyed_store_full(), 1);
4306 default:
4307 break;
4308 }
4309
4310 __ call(ic, RelocInfo::CODE_TARGET);
4311 if (patch_site != NULL && patch_site->is_bound()) {
4312 patch_site->EmitPatchInfo();
4313 } else {
4314 __ nop(); // Signals no inlined code.
4315 }
4316 }
4317
4318
StoreToFrameField(int frame_offset,Register value)4319 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4320 ASSERT_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
4321 __ mov(Operand(ebp, frame_offset), value);
4322 }
4323
4324
LoadContextField(Register dst,int context_index)4325 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4326 __ mov(dst, ContextOperand(esi, context_index));
4327 }
4328
4329
4330 // ----------------------------------------------------------------------------
4331 // Non-local control flow support.
4332
EnterFinallyBlock()4333 void FullCodeGenerator::EnterFinallyBlock() {
4334 // Cook return address on top of stack (smi encoded Code* delta)
4335 ASSERT(!result_register().is(edx));
4336 __ mov(edx, Operand(esp, 0));
4337 __ sub(Operand(edx), Immediate(masm_->CodeObject()));
4338 ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize);
4339 ASSERT_EQ(0, kSmiTag);
4340 __ add(edx, Operand(edx)); // Convert to smi.
4341 __ mov(Operand(esp, 0), edx);
4342 // Store result register while executing finally block.
4343 __ push(result_register());
4344 }
4345
4346
ExitFinallyBlock()4347 void FullCodeGenerator::ExitFinallyBlock() {
4348 ASSERT(!result_register().is(edx));
4349 // Restore result register from stack.
4350 __ pop(result_register());
4351 // Uncook return address.
4352 __ mov(edx, Operand(esp, 0));
4353 __ sar(edx, 1); // Convert smi to int.
4354 __ add(Operand(edx), Immediate(masm_->CodeObject()));
4355 __ mov(Operand(esp, 0), edx);
4356 // And return.
4357 __ ret(0);
4358 }
4359
4360
4361 #undef __
4362
4363 } } // namespace v8::internal
4364
4365 #endif // V8_TARGET_ARCH_IA32
4366