1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28 #include "v8.h"
29
30 #if defined(V8_TARGET_ARCH_IA32)
31
32 #include "code-stubs.h"
33 #include "codegen.h"
34 #include "compiler.h"
35 #include "debug.h"
36 #include "full-codegen.h"
37 #include "isolate-inl.h"
38 #include "parser.h"
39 #include "scopes.h"
40 #include "stub-cache.h"
41
42 namespace v8 {
43 namespace internal {
44
45 #define __ ACCESS_MASM(masm_)
46
47
48 class JumpPatchSite BASE_EMBEDDED {
49 public:
JumpPatchSite(MacroAssembler * masm)50 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
51 #ifdef DEBUG
52 info_emitted_ = false;
53 #endif
54 }
55
~JumpPatchSite()56 ~JumpPatchSite() {
57 ASSERT(patch_site_.is_bound() == info_emitted_);
58 }
59
EmitJumpIfNotSmi(Register reg,Label * target,Label::Distance distance=Label::kFar)60 void EmitJumpIfNotSmi(Register reg,
61 Label* target,
62 Label::Distance distance = Label::kFar) {
63 __ test(reg, Immediate(kSmiTagMask));
64 EmitJump(not_carry, target, distance); // Always taken before patched.
65 }
66
EmitJumpIfSmi(Register reg,Label * target,Label::Distance distance=Label::kFar)67 void EmitJumpIfSmi(Register reg,
68 Label* target,
69 Label::Distance distance = Label::kFar) {
70 __ test(reg, Immediate(kSmiTagMask));
71 EmitJump(carry, target, distance); // Never taken before patched.
72 }
73
EmitPatchInfo()74 void EmitPatchInfo() {
75 if (patch_site_.is_bound()) {
76 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
77 ASSERT(is_int8(delta_to_patch_site));
78 __ test(eax, Immediate(delta_to_patch_site));
79 #ifdef DEBUG
80 info_emitted_ = true;
81 #endif
82 } else {
83 __ nop(); // Signals no inlined code.
84 }
85 }
86
87 private:
88 // jc will be patched with jz, jnc will become jnz.
EmitJump(Condition cc,Label * target,Label::Distance distance)89 void EmitJump(Condition cc, Label* target, Label::Distance distance) {
90 ASSERT(!patch_site_.is_bound() && !info_emitted_);
91 ASSERT(cc == carry || cc == not_carry);
92 __ bind(&patch_site_);
93 __ j(cc, target, distance);
94 }
95
96 MacroAssembler* masm_;
97 Label patch_site_;
98 #ifdef DEBUG
99 bool info_emitted_;
100 #endif
101 };
102
103
104 // TODO(jkummerow): Obsolete as soon as x64 is updated. Remove.
self_optimization_header_size()105 int FullCodeGenerator::self_optimization_header_size() {
106 UNREACHABLE();
107 return 13;
108 }
109
110
111 // Generate code for a JS function. On entry to the function the receiver
112 // and arguments have been pushed on the stack left to right, with the
113 // return address on top of them. The actual argument count matches the
114 // formal parameter count expected by the function.
115 //
116 // The live registers are:
117 // o edi: the JS function object being called (i.e. ourselves)
118 // o esi: our context
119 // o ebp: our caller's frame pointer
120 // o esp: stack pointer (pointing to return address)
121 //
122 // The function builds a JS frame. Please see JavaScriptFrameConstants in
123 // frames-ia32.h for its layout.
Generate()124 void FullCodeGenerator::Generate() {
125 CompilationInfo* info = info_;
126 handler_table_ =
127 isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
128 profiling_counter_ = isolate()->factory()->NewJSGlobalPropertyCell(
129 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget)));
130 SetFunctionPosition(function());
131 Comment cmnt(masm_, "[ function compiled by full code generator");
132
133 #ifdef DEBUG
134 if (strlen(FLAG_stop_at) > 0 &&
135 info->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
136 __ int3();
137 }
138 #endif
139
140 // Strict mode functions and builtins need to replace the receiver
141 // with undefined when called as functions (without an explicit
142 // receiver object). ecx is zero for method calls and non-zero for
143 // function calls.
144 if (!info->is_classic_mode() || info->is_native()) {
145 Label ok;
146 __ test(ecx, ecx);
147 __ j(zero, &ok, Label::kNear);
148 // +1 for return address.
149 int receiver_offset = (info->scope()->num_parameters() + 1) * kPointerSize;
150 __ mov(ecx, Operand(esp, receiver_offset));
151 __ JumpIfSmi(ecx, &ok);
152 __ CmpObjectType(ecx, JS_GLOBAL_PROXY_TYPE, ecx);
153 __ j(not_equal, &ok, Label::kNear);
154 __ mov(Operand(esp, receiver_offset),
155 Immediate(isolate()->factory()->undefined_value()));
156 __ bind(&ok);
157 }
158
159 // Open a frame scope to indicate that there is a frame on the stack. The
160 // MANUAL indicates that the scope shouldn't actually generate code to set up
161 // the frame (that is done below).
162 FrameScope frame_scope(masm_, StackFrame::MANUAL);
163
164 __ push(ebp); // Caller's frame pointer.
165 __ mov(ebp, esp);
166 __ push(esi); // Callee's context.
167 __ push(edi); // Callee's JS Function.
168
169 { Comment cmnt(masm_, "[ Allocate locals");
170 int locals_count = info->scope()->num_stack_slots();
171 if (locals_count == 1) {
172 __ push(Immediate(isolate()->factory()->undefined_value()));
173 } else if (locals_count > 1) {
174 __ mov(eax, Immediate(isolate()->factory()->undefined_value()));
175 for (int i = 0; i < locals_count; i++) {
176 __ push(eax);
177 }
178 }
179 }
180
181 bool function_in_register = true;
182
183 // Possibly allocate a local context.
184 int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
185 if (heap_slots > 0) {
186 Comment cmnt(masm_, "[ Allocate local context");
187 // Argument to NewContext is the function, which is still in edi.
188 __ push(edi);
189 if (heap_slots <= FastNewContextStub::kMaximumSlots) {
190 FastNewContextStub stub(heap_slots);
191 __ CallStub(&stub);
192 } else {
193 __ CallRuntime(Runtime::kNewFunctionContext, 1);
194 }
195 function_in_register = false;
196 // Context is returned in both eax and esi. It replaces the context
197 // passed to us. It's saved in the stack and kept live in esi.
198 __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi);
199
200 // Copy parameters into context if necessary.
201 int num_parameters = info->scope()->num_parameters();
202 for (int i = 0; i < num_parameters; i++) {
203 Variable* var = scope()->parameter(i);
204 if (var->IsContextSlot()) {
205 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
206 (num_parameters - 1 - i) * kPointerSize;
207 // Load parameter from stack.
208 __ mov(eax, Operand(ebp, parameter_offset));
209 // Store it in the context.
210 int context_offset = Context::SlotOffset(var->index());
211 __ mov(Operand(esi, context_offset), eax);
212 // Update the write barrier. This clobbers eax and ebx.
213 __ RecordWriteContextSlot(esi,
214 context_offset,
215 eax,
216 ebx,
217 kDontSaveFPRegs);
218 }
219 }
220 }
221
222 Variable* arguments = scope()->arguments();
223 if (arguments != NULL) {
224 // Function uses arguments object.
225 Comment cmnt(masm_, "[ Allocate arguments object");
226 if (function_in_register) {
227 __ push(edi);
228 } else {
229 __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
230 }
231 // Receiver is just before the parameters on the caller's stack.
232 int num_parameters = info->scope()->num_parameters();
233 int offset = num_parameters * kPointerSize;
234 __ lea(edx,
235 Operand(ebp, StandardFrameConstants::kCallerSPOffset + offset));
236 __ push(edx);
237 __ SafePush(Immediate(Smi::FromInt(num_parameters)));
238 // Arguments to ArgumentsAccessStub:
239 // function, receiver address, parameter count.
240 // The stub will rewrite receiver and parameter count if the previous
241 // stack frame was an arguments adapter frame.
242 ArgumentsAccessStub::Type type;
243 if (!is_classic_mode()) {
244 type = ArgumentsAccessStub::NEW_STRICT;
245 } else if (function()->has_duplicate_parameters()) {
246 type = ArgumentsAccessStub::NEW_NON_STRICT_SLOW;
247 } else {
248 type = ArgumentsAccessStub::NEW_NON_STRICT_FAST;
249 }
250 ArgumentsAccessStub stub(type);
251 __ CallStub(&stub);
252
253 SetVar(arguments, eax, ebx, edx);
254 }
255
256 if (FLAG_trace) {
257 __ CallRuntime(Runtime::kTraceEnter, 0);
258 }
259
260 // Visit the declarations and body unless there is an illegal
261 // redeclaration.
262 if (scope()->HasIllegalRedeclaration()) {
263 Comment cmnt(masm_, "[ Declarations");
264 scope()->VisitIllegalRedeclaration(this);
265
266 } else {
267 PrepareForBailoutForId(AstNode::kFunctionEntryId, NO_REGISTERS);
268 { Comment cmnt(masm_, "[ Declarations");
269 // For named function expressions, declare the function name as a
270 // constant.
271 if (scope()->is_function_scope() && scope()->function() != NULL) {
272 VariableProxy* proxy = scope()->function();
273 ASSERT(proxy->var()->mode() == CONST ||
274 proxy->var()->mode() == CONST_HARMONY);
275 ASSERT(proxy->var()->location() != Variable::UNALLOCATED);
276 EmitDeclaration(proxy, proxy->var()->mode(), NULL);
277 }
278 VisitDeclarations(scope()->declarations());
279 }
280
281 { Comment cmnt(masm_, "[ Stack check");
282 PrepareForBailoutForId(AstNode::kDeclarationsId, NO_REGISTERS);
283 Label ok;
284 ExternalReference stack_limit =
285 ExternalReference::address_of_stack_limit(isolate());
286 __ cmp(esp, Operand::StaticVariable(stack_limit));
287 __ j(above_equal, &ok, Label::kNear);
288 StackCheckStub stub;
289 __ CallStub(&stub);
290 __ bind(&ok);
291 }
292
293 { Comment cmnt(masm_, "[ Body");
294 ASSERT(loop_depth() == 0);
295 VisitStatements(function()->body());
296 ASSERT(loop_depth() == 0);
297 }
298 }
299
300 // Always emit a 'return undefined' in case control fell off the end of
301 // the body.
302 { Comment cmnt(masm_, "[ return <undefined>;");
303 __ mov(eax, isolate()->factory()->undefined_value());
304 EmitReturnSequence();
305 }
306 }
307
308
ClearAccumulator()309 void FullCodeGenerator::ClearAccumulator() {
310 __ Set(eax, Immediate(Smi::FromInt(0)));
311 }
312
313
EmitProfilingCounterDecrement(int delta)314 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
315 __ mov(ebx, Immediate(profiling_counter_));
316 __ sub(FieldOperand(ebx, JSGlobalPropertyCell::kValueOffset),
317 Immediate(Smi::FromInt(delta)));
318 }
319
320
EmitProfilingCounterReset()321 void FullCodeGenerator::EmitProfilingCounterReset() {
322 int reset_value = FLAG_interrupt_budget;
323 if (info_->ShouldSelfOptimize() && !FLAG_retry_self_opt) {
324 // Self-optimization is a one-off thing: if it fails, don't try again.
325 reset_value = Smi::kMaxValue;
326 }
327 if (isolate()->IsDebuggerActive()) {
328 // Detect debug break requests as soon as possible.
329 reset_value = 10;
330 }
331 __ mov(ebx, Immediate(profiling_counter_));
332 __ mov(FieldOperand(ebx, JSGlobalPropertyCell::kValueOffset),
333 Immediate(Smi::FromInt(reset_value)));
334 }
335
336
337 static const int kMaxBackEdgeWeight = 127;
338 static const int kBackEdgeDistanceDivisor = 100;
339
340
EmitStackCheck(IterationStatement * stmt,Label * back_edge_target)341 void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt,
342 Label* back_edge_target) {
343 Comment cmnt(masm_, "[ Stack check");
344 Label ok;
345
346 if (FLAG_count_based_interrupts) {
347 int weight = 1;
348 if (FLAG_weighted_back_edges) {
349 ASSERT(back_edge_target->is_bound());
350 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
351 weight = Min(kMaxBackEdgeWeight,
352 Max(1, distance / kBackEdgeDistanceDivisor));
353 }
354 EmitProfilingCounterDecrement(weight);
355 __ j(positive, &ok, Label::kNear);
356 InterruptStub stub;
357 __ CallStub(&stub);
358 } else {
359 // Count based interrupts happen often enough when they are enabled
360 // that the additional stack checks are not necessary (they would
361 // only check for interrupts).
362 ExternalReference stack_limit =
363 ExternalReference::address_of_stack_limit(isolate());
364 __ cmp(esp, Operand::StaticVariable(stack_limit));
365 __ j(above_equal, &ok, Label::kNear);
366 StackCheckStub stub;
367 __ CallStub(&stub);
368 }
369
370 // Record a mapping of this PC offset to the OSR id. This is used to find
371 // the AST id from the unoptimized code in order to use it as a key into
372 // the deoptimization input data found in the optimized code.
373 RecordStackCheck(stmt->OsrEntryId());
374
375 // Loop stack checks can be patched to perform on-stack replacement. In
376 // order to decide whether or not to perform OSR we embed the loop depth
377 // in a test instruction after the call so we can extract it from the OSR
378 // builtin.
379 ASSERT(loop_depth() > 0);
380 __ test(eax, Immediate(Min(loop_depth(), Code::kMaxLoopNestingMarker)));
381
382 if (FLAG_count_based_interrupts) {
383 EmitProfilingCounterReset();
384 }
385
386 __ bind(&ok);
387 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
388 // Record a mapping of the OSR id to this PC. This is used if the OSR
389 // entry becomes the target of a bailout. We don't expect it to be, but
390 // we want it to work if it is.
391 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
392 }
393
394
EmitReturnSequence()395 void FullCodeGenerator::EmitReturnSequence() {
396 Comment cmnt(masm_, "[ Return sequence");
397 if (return_label_.is_bound()) {
398 __ jmp(&return_label_);
399 } else {
400 // Common return label
401 __ bind(&return_label_);
402 if (FLAG_trace) {
403 __ push(eax);
404 __ CallRuntime(Runtime::kTraceExit, 1);
405 }
406 if (FLAG_interrupt_at_exit || FLAG_self_optimization) {
407 // Pretend that the exit is a backwards jump to the entry.
408 int weight = 1;
409 if (info_->ShouldSelfOptimize()) {
410 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
411 } else if (FLAG_weighted_back_edges) {
412 int distance = masm_->pc_offset();
413 weight = Min(kMaxBackEdgeWeight,
414 Max(1, distance / kBackEdgeDistanceDivisor));
415 }
416 EmitProfilingCounterDecrement(weight);
417 Label ok;
418 __ j(positive, &ok, Label::kNear);
419 __ push(eax);
420 if (info_->ShouldSelfOptimize() && FLAG_direct_self_opt) {
421 __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
422 __ CallRuntime(Runtime::kOptimizeFunctionOnNextCall, 1);
423 } else {
424 InterruptStub stub;
425 __ CallStub(&stub);
426 }
427 __ pop(eax);
428 EmitProfilingCounterReset();
429 __ bind(&ok);
430 }
431 #ifdef DEBUG
432 // Add a label for checking the size of the code used for returning.
433 Label check_exit_codesize;
434 masm_->bind(&check_exit_codesize);
435 #endif
436 SetSourcePosition(function()->end_position() - 1);
437 __ RecordJSReturn();
438 // Do not use the leave instruction here because it is too short to
439 // patch with the code required by the debugger.
440 __ mov(esp, ebp);
441 __ pop(ebp);
442
443 int arguments_bytes = (info_->scope()->num_parameters() + 1) * kPointerSize;
444 __ Ret(arguments_bytes, ecx);
445 #ifdef ENABLE_DEBUGGER_SUPPORT
446 // Check that the size of the code used for returning is large enough
447 // for the debugger's requirements.
448 ASSERT(Assembler::kJSReturnSequenceLength <=
449 masm_->SizeOfCodeGeneratedSince(&check_exit_codesize));
450 #endif
451 }
452 }
453
454
Plug(Variable * var) const455 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
456 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
457 }
458
459
Plug(Variable * var) const460 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
461 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
462 codegen()->GetVar(result_register(), var);
463 }
464
465
Plug(Variable * var) const466 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
467 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
468 MemOperand operand = codegen()->VarOperand(var, result_register());
469 // Memory operands can be pushed directly.
470 __ push(operand);
471 }
472
473
Plug(Variable * var) const474 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
475 // For simplicity we always test the accumulator register.
476 codegen()->GetVar(result_register(), var);
477 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
478 codegen()->DoTest(this);
479 }
480
481
Plug(Heap::RootListIndex index) const482 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
483 UNREACHABLE(); // Not used on IA32.
484 }
485
486
Plug(Heap::RootListIndex index) const487 void FullCodeGenerator::AccumulatorValueContext::Plug(
488 Heap::RootListIndex index) const {
489 UNREACHABLE(); // Not used on IA32.
490 }
491
492
Plug(Heap::RootListIndex index) const493 void FullCodeGenerator::StackValueContext::Plug(
494 Heap::RootListIndex index) const {
495 UNREACHABLE(); // Not used on IA32.
496 }
497
498
Plug(Heap::RootListIndex index) const499 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
500 UNREACHABLE(); // Not used on IA32.
501 }
502
503
Plug(Handle<Object> lit) const504 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
505 }
506
507
Plug(Handle<Object> lit) const508 void FullCodeGenerator::AccumulatorValueContext::Plug(
509 Handle<Object> lit) const {
510 if (lit->IsSmi()) {
511 __ SafeSet(result_register(), Immediate(lit));
512 } else {
513 __ Set(result_register(), Immediate(lit));
514 }
515 }
516
517
Plug(Handle<Object> lit) const518 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
519 if (lit->IsSmi()) {
520 __ SafePush(Immediate(lit));
521 } else {
522 __ push(Immediate(lit));
523 }
524 }
525
526
Plug(Handle<Object> lit) const527 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
528 codegen()->PrepareForBailoutBeforeSplit(condition(),
529 true,
530 true_label_,
531 false_label_);
532 ASSERT(!lit->IsUndetectableObject()); // There are no undetectable literals.
533 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
534 if (false_label_ != fall_through_) __ jmp(false_label_);
535 } else if (lit->IsTrue() || lit->IsJSObject()) {
536 if (true_label_ != fall_through_) __ jmp(true_label_);
537 } else if (lit->IsString()) {
538 if (String::cast(*lit)->length() == 0) {
539 if (false_label_ != fall_through_) __ jmp(false_label_);
540 } else {
541 if (true_label_ != fall_through_) __ jmp(true_label_);
542 }
543 } else if (lit->IsSmi()) {
544 if (Smi::cast(*lit)->value() == 0) {
545 if (false_label_ != fall_through_) __ jmp(false_label_);
546 } else {
547 if (true_label_ != fall_through_) __ jmp(true_label_);
548 }
549 } else {
550 // For simplicity we always test the accumulator register.
551 __ mov(result_register(), lit);
552 codegen()->DoTest(this);
553 }
554 }
555
556
DropAndPlug(int count,Register reg) const557 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
558 Register reg) const {
559 ASSERT(count > 0);
560 __ Drop(count);
561 }
562
563
DropAndPlug(int count,Register reg) const564 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
565 int count,
566 Register reg) const {
567 ASSERT(count > 0);
568 __ Drop(count);
569 __ Move(result_register(), reg);
570 }
571
572
DropAndPlug(int count,Register reg) const573 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
574 Register reg) const {
575 ASSERT(count > 0);
576 if (count > 1) __ Drop(count - 1);
577 __ mov(Operand(esp, 0), reg);
578 }
579
580
DropAndPlug(int count,Register reg) const581 void FullCodeGenerator::TestContext::DropAndPlug(int count,
582 Register reg) const {
583 ASSERT(count > 0);
584 // For simplicity we always test the accumulator register.
585 __ Drop(count);
586 __ Move(result_register(), reg);
587 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
588 codegen()->DoTest(this);
589 }
590
591
Plug(Label * materialize_true,Label * materialize_false) const592 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
593 Label* materialize_false) const {
594 ASSERT(materialize_true == materialize_false);
595 __ bind(materialize_true);
596 }
597
598
Plug(Label * materialize_true,Label * materialize_false) const599 void FullCodeGenerator::AccumulatorValueContext::Plug(
600 Label* materialize_true,
601 Label* materialize_false) const {
602 Label done;
603 __ bind(materialize_true);
604 __ mov(result_register(), isolate()->factory()->true_value());
605 __ jmp(&done, Label::kNear);
606 __ bind(materialize_false);
607 __ mov(result_register(), isolate()->factory()->false_value());
608 __ bind(&done);
609 }
610
611
Plug(Label * materialize_true,Label * materialize_false) const612 void FullCodeGenerator::StackValueContext::Plug(
613 Label* materialize_true,
614 Label* materialize_false) const {
615 Label done;
616 __ bind(materialize_true);
617 __ push(Immediate(isolate()->factory()->true_value()));
618 __ jmp(&done, Label::kNear);
619 __ bind(materialize_false);
620 __ push(Immediate(isolate()->factory()->false_value()));
621 __ bind(&done);
622 }
623
624
Plug(Label * materialize_true,Label * materialize_false) const625 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
626 Label* materialize_false) const {
627 ASSERT(materialize_true == true_label_);
628 ASSERT(materialize_false == false_label_);
629 }
630
631
Plug(bool flag) const632 void FullCodeGenerator::EffectContext::Plug(bool flag) const {
633 }
634
635
Plug(bool flag) const636 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
637 Handle<Object> value = flag
638 ? isolate()->factory()->true_value()
639 : isolate()->factory()->false_value();
640 __ mov(result_register(), value);
641 }
642
643
Plug(bool flag) const644 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
645 Handle<Object> value = flag
646 ? isolate()->factory()->true_value()
647 : isolate()->factory()->false_value();
648 __ push(Immediate(value));
649 }
650
651
Plug(bool flag) const652 void FullCodeGenerator::TestContext::Plug(bool flag) const {
653 codegen()->PrepareForBailoutBeforeSplit(condition(),
654 true,
655 true_label_,
656 false_label_);
657 if (flag) {
658 if (true_label_ != fall_through_) __ jmp(true_label_);
659 } else {
660 if (false_label_ != fall_through_) __ jmp(false_label_);
661 }
662 }
663
664
DoTest(Expression * condition,Label * if_true,Label * if_false,Label * fall_through)665 void FullCodeGenerator::DoTest(Expression* condition,
666 Label* if_true,
667 Label* if_false,
668 Label* fall_through) {
669 ToBooleanStub stub(result_register());
670 __ push(result_register());
671 __ CallStub(&stub, condition->test_id());
672 __ test(result_register(), result_register());
673 // The stub returns nonzero for true.
674 Split(not_zero, if_true, if_false, fall_through);
675 }
676
677
Split(Condition cc,Label * if_true,Label * if_false,Label * fall_through)678 void FullCodeGenerator::Split(Condition cc,
679 Label* if_true,
680 Label* if_false,
681 Label* fall_through) {
682 if (if_false == fall_through) {
683 __ j(cc, if_true);
684 } else if (if_true == fall_through) {
685 __ j(NegateCondition(cc), if_false);
686 } else {
687 __ j(cc, if_true);
688 __ jmp(if_false);
689 }
690 }
691
692
StackOperand(Variable * var)693 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
694 ASSERT(var->IsStackAllocated());
695 // Offset is negative because higher indexes are at lower addresses.
696 int offset = -var->index() * kPointerSize;
697 // Adjust by a (parameter or local) base offset.
698 if (var->IsParameter()) {
699 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
700 } else {
701 offset += JavaScriptFrameConstants::kLocal0Offset;
702 }
703 return Operand(ebp, offset);
704 }
705
706
VarOperand(Variable * var,Register scratch)707 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
708 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
709 if (var->IsContextSlot()) {
710 int context_chain_length = scope()->ContextChainLength(var->scope());
711 __ LoadContext(scratch, context_chain_length);
712 return ContextOperand(scratch, var->index());
713 } else {
714 return StackOperand(var);
715 }
716 }
717
718
GetVar(Register dest,Variable * var)719 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
720 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
721 MemOperand location = VarOperand(var, dest);
722 __ mov(dest, location);
723 }
724
725
SetVar(Variable * var,Register src,Register scratch0,Register scratch1)726 void FullCodeGenerator::SetVar(Variable* var,
727 Register src,
728 Register scratch0,
729 Register scratch1) {
730 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
731 ASSERT(!scratch0.is(src));
732 ASSERT(!scratch0.is(scratch1));
733 ASSERT(!scratch1.is(src));
734 MemOperand location = VarOperand(var, scratch0);
735 __ mov(location, src);
736
737 // Emit the write barrier code if the location is in the heap.
738 if (var->IsContextSlot()) {
739 int offset = Context::SlotOffset(var->index());
740 ASSERT(!scratch0.is(esi) && !src.is(esi) && !scratch1.is(esi));
741 __ RecordWriteContextSlot(scratch0, offset, src, scratch1, kDontSaveFPRegs);
742 }
743 }
744
745
PrepareForBailoutBeforeSplit(Expression * expr,bool should_normalize,Label * if_true,Label * if_false)746 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
747 bool should_normalize,
748 Label* if_true,
749 Label* if_false) {
750 // Only prepare for bailouts before splits if we're in a test
751 // context. Otherwise, we let the Visit function deal with the
752 // preparation to avoid preparing with the same AST id twice.
753 if (!context()->IsTest() || !info_->IsOptimizable()) return;
754
755 Label skip;
756 if (should_normalize) __ jmp(&skip, Label::kNear);
757 PrepareForBailout(expr, TOS_REG);
758 if (should_normalize) {
759 __ cmp(eax, isolate()->factory()->true_value());
760 Split(equal, if_true, if_false, NULL);
761 __ bind(&skip);
762 }
763 }
764
765
EmitDeclaration(VariableProxy * proxy,VariableMode mode,FunctionLiteral * function)766 void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy,
767 VariableMode mode,
768 FunctionLiteral* function) {
769 // If it was not possible to allocate the variable at compile time, we
770 // need to "declare" it at runtime to make sure it actually exists in the
771 // local context.
772 Variable* variable = proxy->var();
773 bool binding_needs_init = (function == NULL) &&
774 (mode == CONST || mode == CONST_HARMONY || mode == LET);
775 switch (variable->location()) {
776 case Variable::UNALLOCATED:
777 ++global_count_;
778 break;
779
780 case Variable::PARAMETER:
781 case Variable::LOCAL:
782 if (function != NULL) {
783 Comment cmnt(masm_, "[ Declaration");
784 VisitForAccumulatorValue(function);
785 __ mov(StackOperand(variable), result_register());
786 } else if (binding_needs_init) {
787 Comment cmnt(masm_, "[ Declaration");
788 __ mov(StackOperand(variable),
789 Immediate(isolate()->factory()->the_hole_value()));
790 }
791 break;
792
793 case Variable::CONTEXT:
794 // The variable in the decl always resides in the current function
795 // context.
796 ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
797 if (FLAG_debug_code) {
798 // Check that we're not inside a with or catch context.
799 __ mov(ebx, FieldOperand(esi, HeapObject::kMapOffset));
800 __ cmp(ebx, isolate()->factory()->with_context_map());
801 __ Check(not_equal, "Declaration in with context.");
802 __ cmp(ebx, isolate()->factory()->catch_context_map());
803 __ Check(not_equal, "Declaration in catch context.");
804 }
805 if (function != NULL) {
806 Comment cmnt(masm_, "[ Declaration");
807 VisitForAccumulatorValue(function);
808 __ mov(ContextOperand(esi, variable->index()), result_register());
809 // We know that we have written a function, which is not a smi.
810 __ RecordWriteContextSlot(esi,
811 Context::SlotOffset(variable->index()),
812 result_register(),
813 ecx,
814 kDontSaveFPRegs,
815 EMIT_REMEMBERED_SET,
816 OMIT_SMI_CHECK);
817 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
818 } else if (binding_needs_init) {
819 Comment cmnt(masm_, "[ Declaration");
820 __ mov(ContextOperand(esi, variable->index()),
821 Immediate(isolate()->factory()->the_hole_value()));
822 // No write barrier since the hole value is in old space.
823 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
824 }
825 break;
826
827 case Variable::LOOKUP: {
828 Comment cmnt(masm_, "[ Declaration");
829 __ push(esi);
830 __ push(Immediate(variable->name()));
831 // Declaration nodes are always introduced in one of four modes.
832 ASSERT(mode == VAR ||
833 mode == CONST ||
834 mode == CONST_HARMONY ||
835 mode == LET);
836 PropertyAttributes attr = (mode == CONST || mode == CONST_HARMONY)
837 ? READ_ONLY : NONE;
838 __ push(Immediate(Smi::FromInt(attr)));
839 // Push initial value, if any.
840 // Note: For variables we must not push an initial value (such as
841 // 'undefined') because we may have a (legal) redeclaration and we
842 // must not destroy the current value.
843 if (function != NULL) {
844 VisitForStackValue(function);
845 } else if (binding_needs_init) {
846 __ push(Immediate(isolate()->factory()->the_hole_value()));
847 } else {
848 __ push(Immediate(Smi::FromInt(0))); // Indicates no initial value.
849 }
850 __ CallRuntime(Runtime::kDeclareContextSlot, 4);
851 break;
852 }
853 }
854 }
855
856
DeclareGlobals(Handle<FixedArray> pairs)857 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
858 // Call the runtime to declare the globals.
859 __ push(esi); // The context is the first argument.
860 __ push(Immediate(pairs));
861 __ push(Immediate(Smi::FromInt(DeclareGlobalsFlags())));
862 __ CallRuntime(Runtime::kDeclareGlobals, 3);
863 // Return value is ignored.
864 }
865
866
VisitSwitchStatement(SwitchStatement * stmt)867 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
868 Comment cmnt(masm_, "[ SwitchStatement");
869 Breakable nested_statement(this, stmt);
870 SetStatementPosition(stmt);
871
872 // Keep the switch value on the stack until a case matches.
873 VisitForStackValue(stmt->tag());
874 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
875
876 ZoneList<CaseClause*>* clauses = stmt->cases();
877 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
878
879 Label next_test; // Recycled for each test.
880 // Compile all the tests with branches to their bodies.
881 for (int i = 0; i < clauses->length(); i++) {
882 CaseClause* clause = clauses->at(i);
883 clause->body_target()->Unuse();
884
885 // The default is not a test, but remember it as final fall through.
886 if (clause->is_default()) {
887 default_clause = clause;
888 continue;
889 }
890
891 Comment cmnt(masm_, "[ Case comparison");
892 __ bind(&next_test);
893 next_test.Unuse();
894
895 // Compile the label expression.
896 VisitForAccumulatorValue(clause->label());
897
898 // Perform the comparison as if via '==='.
899 __ mov(edx, Operand(esp, 0)); // Switch value.
900 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
901 JumpPatchSite patch_site(masm_);
902 if (inline_smi_code) {
903 Label slow_case;
904 __ mov(ecx, edx);
905 __ or_(ecx, eax);
906 patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear);
907
908 __ cmp(edx, eax);
909 __ j(not_equal, &next_test);
910 __ Drop(1); // Switch value is no longer needed.
911 __ jmp(clause->body_target());
912 __ bind(&slow_case);
913 }
914
915 // Record position before stub call for type feedback.
916 SetSourcePosition(clause->position());
917 Handle<Code> ic = CompareIC::GetUninitialized(Token::EQ_STRICT);
918 CallIC(ic, RelocInfo::CODE_TARGET, clause->CompareId());
919 patch_site.EmitPatchInfo();
920 __ test(eax, eax);
921 __ j(not_equal, &next_test);
922 __ Drop(1); // Switch value is no longer needed.
923 __ jmp(clause->body_target());
924 }
925
926 // Discard the test value and jump to the default if present, otherwise to
927 // the end of the statement.
928 __ bind(&next_test);
929 __ Drop(1); // Switch value is no longer needed.
930 if (default_clause == NULL) {
931 __ jmp(nested_statement.break_label());
932 } else {
933 __ jmp(default_clause->body_target());
934 }
935
936 // Compile all the case bodies.
937 for (int i = 0; i < clauses->length(); i++) {
938 Comment cmnt(masm_, "[ Case body");
939 CaseClause* clause = clauses->at(i);
940 __ bind(clause->body_target());
941 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
942 VisitStatements(clause->statements());
943 }
944
945 __ bind(nested_statement.break_label());
946 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
947 }
948
949
VisitForInStatement(ForInStatement * stmt)950 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
951 Comment cmnt(masm_, "[ ForInStatement");
952 SetStatementPosition(stmt);
953
954 Label loop, exit;
955 ForIn loop_statement(this, stmt);
956 increment_loop_depth();
957
958 // Get the object to enumerate over. Both SpiderMonkey and JSC
959 // ignore null and undefined in contrast to the specification; see
960 // ECMA-262 section 12.6.4.
961 VisitForAccumulatorValue(stmt->enumerable());
962 __ cmp(eax, isolate()->factory()->undefined_value());
963 __ j(equal, &exit);
964 __ cmp(eax, isolate()->factory()->null_value());
965 __ j(equal, &exit);
966
967 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
968
969 // Convert the object to a JS object.
970 Label convert, done_convert;
971 __ JumpIfSmi(eax, &convert, Label::kNear);
972 __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx);
973 __ j(above_equal, &done_convert, Label::kNear);
974 __ bind(&convert);
975 __ push(eax);
976 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
977 __ bind(&done_convert);
978 __ push(eax);
979
980 // Check for proxies.
981 Label call_runtime, use_cache, fixed_array;
982 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
983 __ CmpObjectType(eax, LAST_JS_PROXY_TYPE, ecx);
984 __ j(below_equal, &call_runtime);
985
986 // Check cache validity in generated code. This is a fast case for
987 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
988 // guarantee cache validity, call the runtime system to check cache
989 // validity or get the property names in a fixed array.
990 __ CheckEnumCache(&call_runtime);
991
992 __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset));
993 __ jmp(&use_cache, Label::kNear);
994
995 // Get the set of properties to enumerate.
996 __ bind(&call_runtime);
997 __ push(eax);
998 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
999 __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
1000 isolate()->factory()->meta_map());
1001 __ j(not_equal, &fixed_array);
1002
1003
1004 // We got a map in register eax. Get the enumeration cache from it.
1005 __ bind(&use_cache);
1006 __ LoadInstanceDescriptors(eax, ecx);
1007 __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumerationIndexOffset));
1008 __ mov(edx, FieldOperand(ecx, DescriptorArray::kEnumCacheBridgeCacheOffset));
1009
1010 // Set up the four remaining stack slots.
1011 __ push(eax); // Map.
1012 __ push(edx); // Enumeration cache.
1013 __ mov(eax, FieldOperand(edx, FixedArray::kLengthOffset));
1014 __ push(eax); // Enumeration cache length (as smi).
1015 __ push(Immediate(Smi::FromInt(0))); // Initial index.
1016 __ jmp(&loop);
1017
1018 // We got a fixed array in register eax. Iterate through that.
1019 Label non_proxy;
1020 __ bind(&fixed_array);
1021
1022 Handle<JSGlobalPropertyCell> cell =
1023 isolate()->factory()->NewJSGlobalPropertyCell(
1024 Handle<Object>(
1025 Smi::FromInt(TypeFeedbackCells::kForInFastCaseMarker)));
1026 RecordTypeFeedbackCell(stmt->PrepareId(), cell);
1027 __ LoadHeapObject(ebx, cell);
1028 __ mov(FieldOperand(ebx, JSGlobalPropertyCell::kValueOffset),
1029 Immediate(Smi::FromInt(TypeFeedbackCells::kForInSlowCaseMarker)));
1030
1031 __ mov(ebx, Immediate(Smi::FromInt(1))); // Smi indicates slow check
1032 __ mov(ecx, Operand(esp, 0 * kPointerSize)); // Get enumerated object
1033 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1034 __ CmpObjectType(ecx, LAST_JS_PROXY_TYPE, ecx);
1035 __ j(above, &non_proxy);
1036 __ mov(ebx, Immediate(Smi::FromInt(0))); // Zero indicates proxy
1037 __ bind(&non_proxy);
1038 __ push(ebx); // Smi
1039 __ push(eax); // Array
1040 __ mov(eax, FieldOperand(eax, FixedArray::kLengthOffset));
1041 __ push(eax); // Fixed array length (as smi).
1042 __ push(Immediate(Smi::FromInt(0))); // Initial index.
1043
1044 // Generate code for doing the condition check.
1045 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1046 __ bind(&loop);
1047 __ mov(eax, Operand(esp, 0 * kPointerSize)); // Get the current index.
1048 __ cmp(eax, Operand(esp, 1 * kPointerSize)); // Compare to the array length.
1049 __ j(above_equal, loop_statement.break_label());
1050
1051 // Get the current entry of the array into register ebx.
1052 __ mov(ebx, Operand(esp, 2 * kPointerSize));
1053 __ mov(ebx, FieldOperand(ebx, eax, times_2, FixedArray::kHeaderSize));
1054
1055 // Get the expected map from the stack or a smi in the
1056 // permanent slow case into register edx.
1057 __ mov(edx, Operand(esp, 3 * kPointerSize));
1058
1059 // Check if the expected map still matches that of the enumerable.
1060 // If not, we may have to filter the key.
1061 Label update_each;
1062 __ mov(ecx, Operand(esp, 4 * kPointerSize));
1063 __ cmp(edx, FieldOperand(ecx, HeapObject::kMapOffset));
1064 __ j(equal, &update_each, Label::kNear);
1065
1066 // For proxies, no filtering is done.
1067 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1068 ASSERT(Smi::FromInt(0) == 0);
1069 __ test(edx, edx);
1070 __ j(zero, &update_each);
1071
1072 // Convert the entry to a string or null if it isn't a property
1073 // anymore. If the property has been removed while iterating, we
1074 // just skip it.
1075 __ push(ecx); // Enumerable.
1076 __ push(ebx); // Current entry.
1077 __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1078 __ test(eax, eax);
1079 __ j(equal, loop_statement.continue_label());
1080 __ mov(ebx, eax);
1081
1082 // Update the 'each' property or variable from the possibly filtered
1083 // entry in register ebx.
1084 __ bind(&update_each);
1085 __ mov(result_register(), ebx);
1086 // Perform the assignment as if via '='.
1087 { EffectContext context(this);
1088 EmitAssignment(stmt->each());
1089 }
1090
1091 // Generate code for the body of the loop.
1092 Visit(stmt->body());
1093
1094 // Generate code for going to the next element by incrementing the
1095 // index (smi) stored on top of the stack.
1096 __ bind(loop_statement.continue_label());
1097 __ add(Operand(esp, 0 * kPointerSize), Immediate(Smi::FromInt(1)));
1098
1099 EmitStackCheck(stmt, &loop);
1100 __ jmp(&loop);
1101
1102 // Remove the pointers stored on the stack.
1103 __ bind(loop_statement.break_label());
1104 __ add(esp, Immediate(5 * kPointerSize));
1105
1106 // Exit and decrement the loop depth.
1107 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1108 __ bind(&exit);
1109 decrement_loop_depth();
1110 }
1111
1112
EmitNewClosure(Handle<SharedFunctionInfo> info,bool pretenure)1113 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1114 bool pretenure) {
1115 // Use the fast case closure allocation code that allocates in new
1116 // space for nested functions that don't need literals cloning. If
1117 // we're running with the --always-opt or the --prepare-always-opt
1118 // flag, we need to use the runtime function so that the new function
1119 // we are creating here gets a chance to have its code optimized and
1120 // doesn't just get a copy of the existing unoptimized code.
1121 if (!FLAG_always_opt &&
1122 !FLAG_prepare_always_opt &&
1123 !pretenure &&
1124 scope()->is_function_scope() &&
1125 info->num_literals() == 0) {
1126 FastNewClosureStub stub(info->language_mode());
1127 __ push(Immediate(info));
1128 __ CallStub(&stub);
1129 } else {
1130 __ push(esi);
1131 __ push(Immediate(info));
1132 __ push(Immediate(pretenure
1133 ? isolate()->factory()->true_value()
1134 : isolate()->factory()->false_value()));
1135 __ CallRuntime(Runtime::kNewClosure, 3);
1136 }
1137 context()->Plug(eax);
1138 }
1139
1140
VisitVariableProxy(VariableProxy * expr)1141 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1142 Comment cmnt(masm_, "[ VariableProxy");
1143 EmitVariableLoad(expr);
1144 }
1145
1146
EmitLoadGlobalCheckExtensions(Variable * var,TypeofState typeof_state,Label * slow)1147 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
1148 TypeofState typeof_state,
1149 Label* slow) {
1150 Register context = esi;
1151 Register temp = edx;
1152
1153 Scope* s = scope();
1154 while (s != NULL) {
1155 if (s->num_heap_slots() > 0) {
1156 if (s->calls_non_strict_eval()) {
1157 // Check that extension is NULL.
1158 __ cmp(ContextOperand(context, Context::EXTENSION_INDEX),
1159 Immediate(0));
1160 __ j(not_equal, slow);
1161 }
1162 // Load next context in chain.
1163 __ mov(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1164 // Walk the rest of the chain without clobbering esi.
1165 context = temp;
1166 }
1167 // If no outer scope calls eval, we do not need to check more
1168 // context extensions. If we have reached an eval scope, we check
1169 // all extensions from this point.
1170 if (!s->outer_scope_calls_non_strict_eval() || s->is_eval_scope()) break;
1171 s = s->outer_scope();
1172 }
1173
1174 if (s != NULL && s->is_eval_scope()) {
1175 // Loop up the context chain. There is no frame effect so it is
1176 // safe to use raw labels here.
1177 Label next, fast;
1178 if (!context.is(temp)) {
1179 __ mov(temp, context);
1180 }
1181 __ bind(&next);
1182 // Terminate at global context.
1183 __ cmp(FieldOperand(temp, HeapObject::kMapOffset),
1184 Immediate(isolate()->factory()->global_context_map()));
1185 __ j(equal, &fast, Label::kNear);
1186 // Check that extension is NULL.
1187 __ cmp(ContextOperand(temp, Context::EXTENSION_INDEX), Immediate(0));
1188 __ j(not_equal, slow);
1189 // Load next context in chain.
1190 __ mov(temp, ContextOperand(temp, Context::PREVIOUS_INDEX));
1191 __ jmp(&next);
1192 __ bind(&fast);
1193 }
1194
1195 // All extension objects were empty and it is safe to use a global
1196 // load IC call.
1197 __ mov(eax, GlobalObjectOperand());
1198 __ mov(ecx, var->name());
1199 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1200 RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
1201 ? RelocInfo::CODE_TARGET
1202 : RelocInfo::CODE_TARGET_CONTEXT;
1203 CallIC(ic, mode);
1204 }
1205
1206
ContextSlotOperandCheckExtensions(Variable * var,Label * slow)1207 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1208 Label* slow) {
1209 ASSERT(var->IsContextSlot());
1210 Register context = esi;
1211 Register temp = ebx;
1212
1213 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1214 if (s->num_heap_slots() > 0) {
1215 if (s->calls_non_strict_eval()) {
1216 // Check that extension is NULL.
1217 __ cmp(ContextOperand(context, Context::EXTENSION_INDEX),
1218 Immediate(0));
1219 __ j(not_equal, slow);
1220 }
1221 __ mov(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1222 // Walk the rest of the chain without clobbering esi.
1223 context = temp;
1224 }
1225 }
1226 // Check that last extension is NULL.
1227 __ cmp(ContextOperand(context, Context::EXTENSION_INDEX), Immediate(0));
1228 __ j(not_equal, slow);
1229
1230 // This function is used only for loads, not stores, so it's safe to
1231 // return an esi-based operand (the write barrier cannot be allowed to
1232 // destroy the esi register).
1233 return ContextOperand(context, var->index());
1234 }
1235
1236
EmitDynamicLookupFastCase(Variable * var,TypeofState typeof_state,Label * slow,Label * done)1237 void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var,
1238 TypeofState typeof_state,
1239 Label* slow,
1240 Label* done) {
1241 // Generate fast-case code for variables that might be shadowed by
1242 // eval-introduced variables. Eval is used a lot without
1243 // introducing variables. In those cases, we do not want to
1244 // perform a runtime call for all variables in the scope
1245 // containing the eval.
1246 if (var->mode() == DYNAMIC_GLOBAL) {
1247 EmitLoadGlobalCheckExtensions(var, typeof_state, slow);
1248 __ jmp(done);
1249 } else if (var->mode() == DYNAMIC_LOCAL) {
1250 Variable* local = var->local_if_not_shadowed();
1251 __ mov(eax, ContextSlotOperandCheckExtensions(local, slow));
1252 if (local->mode() == CONST ||
1253 local->mode() == CONST_HARMONY ||
1254 local->mode() == LET) {
1255 __ cmp(eax, isolate()->factory()->the_hole_value());
1256 __ j(not_equal, done);
1257 if (local->mode() == CONST) {
1258 __ mov(eax, isolate()->factory()->undefined_value());
1259 } else { // LET || CONST_HARMONY
1260 __ push(Immediate(var->name()));
1261 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1262 }
1263 }
1264 __ jmp(done);
1265 }
1266 }
1267
1268
EmitVariableLoad(VariableProxy * proxy)1269 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1270 // Record position before possible IC call.
1271 SetSourcePosition(proxy->position());
1272 Variable* var = proxy->var();
1273
1274 // Three cases: global variables, lookup variables, and all other types of
1275 // variables.
1276 switch (var->location()) {
1277 case Variable::UNALLOCATED: {
1278 Comment cmnt(masm_, "Global variable");
1279 // Use inline caching. Variable name is passed in ecx and the global
1280 // object in eax.
1281 __ mov(eax, GlobalObjectOperand());
1282 __ mov(ecx, var->name());
1283 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1284 CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
1285 context()->Plug(eax);
1286 break;
1287 }
1288
1289 case Variable::PARAMETER:
1290 case Variable::LOCAL:
1291 case Variable::CONTEXT: {
1292 Comment cmnt(masm_, var->IsContextSlot()
1293 ? "Context variable"
1294 : "Stack variable");
1295 if (var->binding_needs_init()) {
1296 // var->scope() may be NULL when the proxy is located in eval code and
1297 // refers to a potential outside binding. Currently those bindings are
1298 // always looked up dynamically, i.e. in that case
1299 // var->location() == LOOKUP.
1300 // always holds.
1301 ASSERT(var->scope() != NULL);
1302
1303 // Check if the binding really needs an initialization check. The check
1304 // can be skipped in the following situation: we have a LET or CONST
1305 // binding in harmony mode, both the Variable and the VariableProxy have
1306 // the same declaration scope (i.e. they are both in global code, in the
1307 // same function or in the same eval code) and the VariableProxy is in
1308 // the source physically located after the initializer of the variable.
1309 //
1310 // We cannot skip any initialization checks for CONST in non-harmony
1311 // mode because const variables may be declared but never initialized:
1312 // if (false) { const x; }; var y = x;
1313 //
1314 // The condition on the declaration scopes is a conservative check for
1315 // nested functions that access a binding and are called before the
1316 // binding is initialized:
1317 // function() { f(); let x = 1; function f() { x = 2; } }
1318 //
1319 bool skip_init_check;
1320 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1321 skip_init_check = false;
1322 } else {
1323 // Check that we always have valid source position.
1324 ASSERT(var->initializer_position() != RelocInfo::kNoPosition);
1325 ASSERT(proxy->position() != RelocInfo::kNoPosition);
1326 skip_init_check = var->mode() != CONST &&
1327 var->initializer_position() < proxy->position();
1328 }
1329
1330 if (!skip_init_check) {
1331 // Let and const need a read barrier.
1332 Label done;
1333 GetVar(eax, var);
1334 __ cmp(eax, isolate()->factory()->the_hole_value());
1335 __ j(not_equal, &done, Label::kNear);
1336 if (var->mode() == LET || var->mode() == CONST_HARMONY) {
1337 // Throw a reference error when using an uninitialized let/const
1338 // binding in harmony mode.
1339 __ push(Immediate(var->name()));
1340 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1341 } else {
1342 // Uninitalized const bindings outside of harmony mode are unholed.
1343 ASSERT(var->mode() == CONST);
1344 __ mov(eax, isolate()->factory()->undefined_value());
1345 }
1346 __ bind(&done);
1347 context()->Plug(eax);
1348 break;
1349 }
1350 }
1351 context()->Plug(var);
1352 break;
1353 }
1354
1355 case Variable::LOOKUP: {
1356 Label done, slow;
1357 // Generate code for loading from variables potentially shadowed
1358 // by eval-introduced variables.
1359 EmitDynamicLookupFastCase(var, NOT_INSIDE_TYPEOF, &slow, &done);
1360 __ bind(&slow);
1361 Comment cmnt(masm_, "Lookup variable");
1362 __ push(esi); // Context.
1363 __ push(Immediate(var->name()));
1364 __ CallRuntime(Runtime::kLoadContextSlot, 2);
1365 __ bind(&done);
1366 context()->Plug(eax);
1367 break;
1368 }
1369 }
1370 }
1371
1372
VisitRegExpLiteral(RegExpLiteral * expr)1373 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1374 Comment cmnt(masm_, "[ RegExpLiteral");
1375 Label materialized;
1376 // Registers will be used as follows:
1377 // edi = JS function.
1378 // ecx = literals array.
1379 // ebx = regexp literal.
1380 // eax = regexp literal clone.
1381 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1382 __ mov(ecx, FieldOperand(edi, JSFunction::kLiteralsOffset));
1383 int literal_offset =
1384 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1385 __ mov(ebx, FieldOperand(ecx, literal_offset));
1386 __ cmp(ebx, isolate()->factory()->undefined_value());
1387 __ j(not_equal, &materialized, Label::kNear);
1388
1389 // Create regexp literal using runtime function
1390 // Result will be in eax.
1391 __ push(ecx);
1392 __ push(Immediate(Smi::FromInt(expr->literal_index())));
1393 __ push(Immediate(expr->pattern()));
1394 __ push(Immediate(expr->flags()));
1395 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1396 __ mov(ebx, eax);
1397
1398 __ bind(&materialized);
1399 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1400 Label allocated, runtime_allocate;
1401 __ AllocateInNewSpace(size, eax, ecx, edx, &runtime_allocate, TAG_OBJECT);
1402 __ jmp(&allocated);
1403
1404 __ bind(&runtime_allocate);
1405 __ push(ebx);
1406 __ push(Immediate(Smi::FromInt(size)));
1407 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1408 __ pop(ebx);
1409
1410 __ bind(&allocated);
1411 // Copy the content into the newly allocated memory.
1412 // (Unroll copy loop once for better throughput).
1413 for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
1414 __ mov(edx, FieldOperand(ebx, i));
1415 __ mov(ecx, FieldOperand(ebx, i + kPointerSize));
1416 __ mov(FieldOperand(eax, i), edx);
1417 __ mov(FieldOperand(eax, i + kPointerSize), ecx);
1418 }
1419 if ((size % (2 * kPointerSize)) != 0) {
1420 __ mov(edx, FieldOperand(ebx, size - kPointerSize));
1421 __ mov(FieldOperand(eax, size - kPointerSize), edx);
1422 }
1423 context()->Plug(eax);
1424 }
1425
1426
EmitAccessor(Expression * expression)1427 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1428 if (expression == NULL) {
1429 __ push(Immediate(isolate()->factory()->null_value()));
1430 } else {
1431 VisitForStackValue(expression);
1432 }
1433 }
1434
1435
VisitObjectLiteral(ObjectLiteral * expr)1436 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1437 Comment cmnt(masm_, "[ ObjectLiteral");
1438 Handle<FixedArray> constant_properties = expr->constant_properties();
1439 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1440 __ push(FieldOperand(edi, JSFunction::kLiteralsOffset));
1441 __ push(Immediate(Smi::FromInt(expr->literal_index())));
1442 __ push(Immediate(constant_properties));
1443 int flags = expr->fast_elements()
1444 ? ObjectLiteral::kFastElements
1445 : ObjectLiteral::kNoFlags;
1446 flags |= expr->has_function()
1447 ? ObjectLiteral::kHasFunction
1448 : ObjectLiteral::kNoFlags;
1449 __ push(Immediate(Smi::FromInt(flags)));
1450 int properties_count = constant_properties->length() / 2;
1451 if (expr->depth() > 1) {
1452 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1453 } else if (flags != ObjectLiteral::kFastElements ||
1454 properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) {
1455 __ CallRuntime(Runtime::kCreateObjectLiteralShallow, 4);
1456 } else {
1457 FastCloneShallowObjectStub stub(properties_count);
1458 __ CallStub(&stub);
1459 }
1460
1461 // If result_saved is true the result is on top of the stack. If
1462 // result_saved is false the result is in eax.
1463 bool result_saved = false;
1464
1465 // Mark all computed expressions that are bound to a key that
1466 // is shadowed by a later occurrence of the same key. For the
1467 // marked expressions, no store code is emitted.
1468 expr->CalculateEmitStore();
1469
1470 AccessorTable accessor_table(isolate()->zone());
1471 for (int i = 0; i < expr->properties()->length(); i++) {
1472 ObjectLiteral::Property* property = expr->properties()->at(i);
1473 if (property->IsCompileTimeValue()) continue;
1474
1475 Literal* key = property->key();
1476 Expression* value = property->value();
1477 if (!result_saved) {
1478 __ push(eax); // Save result on the stack
1479 result_saved = true;
1480 }
1481 switch (property->kind()) {
1482 case ObjectLiteral::Property::CONSTANT:
1483 UNREACHABLE();
1484 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1485 ASSERT(!CompileTimeValue::IsCompileTimeValue(value));
1486 // Fall through.
1487 case ObjectLiteral::Property::COMPUTED:
1488 if (key->handle()->IsSymbol()) {
1489 if (property->emit_store()) {
1490 VisitForAccumulatorValue(value);
1491 __ mov(ecx, Immediate(key->handle()));
1492 __ mov(edx, Operand(esp, 0));
1493 Handle<Code> ic = is_classic_mode()
1494 ? isolate()->builtins()->StoreIC_Initialize()
1495 : isolate()->builtins()->StoreIC_Initialize_Strict();
1496 CallIC(ic, RelocInfo::CODE_TARGET, key->id());
1497 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1498 } else {
1499 VisitForEffect(value);
1500 }
1501 break;
1502 }
1503 // Fall through.
1504 case ObjectLiteral::Property::PROTOTYPE:
1505 __ push(Operand(esp, 0)); // Duplicate receiver.
1506 VisitForStackValue(key);
1507 VisitForStackValue(value);
1508 if (property->emit_store()) {
1509 __ push(Immediate(Smi::FromInt(NONE))); // PropertyAttributes
1510 __ CallRuntime(Runtime::kSetProperty, 4);
1511 } else {
1512 __ Drop(3);
1513 }
1514 break;
1515 case ObjectLiteral::Property::GETTER:
1516 accessor_table.lookup(key)->second->getter = value;
1517 break;
1518 case ObjectLiteral::Property::SETTER:
1519 accessor_table.lookup(key)->second->setter = value;
1520 break;
1521 }
1522 }
1523
1524 // Emit code to define accessors, using only a single call to the runtime for
1525 // each pair of corresponding getters and setters.
1526 for (AccessorTable::Iterator it = accessor_table.begin();
1527 it != accessor_table.end();
1528 ++it) {
1529 __ push(Operand(esp, 0)); // Duplicate receiver.
1530 VisitForStackValue(it->first);
1531 EmitAccessor(it->second->getter);
1532 EmitAccessor(it->second->setter);
1533 __ push(Immediate(Smi::FromInt(NONE)));
1534 __ CallRuntime(Runtime::kDefineOrRedefineAccessorProperty, 5);
1535 }
1536
1537 if (expr->has_function()) {
1538 ASSERT(result_saved);
1539 __ push(Operand(esp, 0));
1540 __ CallRuntime(Runtime::kToFastProperties, 1);
1541 }
1542
1543 if (result_saved) {
1544 context()->PlugTOS();
1545 } else {
1546 context()->Plug(eax);
1547 }
1548 }
1549
1550
VisitArrayLiteral(ArrayLiteral * expr)1551 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1552 Comment cmnt(masm_, "[ ArrayLiteral");
1553
1554 ZoneList<Expression*>* subexprs = expr->values();
1555 int length = subexprs->length();
1556 Handle<FixedArray> constant_elements = expr->constant_elements();
1557 ASSERT_EQ(2, constant_elements->length());
1558 ElementsKind constant_elements_kind =
1559 static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
1560 bool has_constant_fast_elements = constant_elements_kind == FAST_ELEMENTS;
1561 Handle<FixedArrayBase> constant_elements_values(
1562 FixedArrayBase::cast(constant_elements->get(1)));
1563
1564 __ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1565 __ push(FieldOperand(ebx, JSFunction::kLiteralsOffset));
1566 __ push(Immediate(Smi::FromInt(expr->literal_index())));
1567 __ push(Immediate(constant_elements));
1568 Heap* heap = isolate()->heap();
1569 if (has_constant_fast_elements &&
1570 constant_elements_values->map() == heap->fixed_cow_array_map()) {
1571 // If the elements are already FAST_ELEMENTS, the boilerplate cannot
1572 // change, so it's possible to specialize the stub in advance.
1573 __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), 1);
1574 FastCloneShallowArrayStub stub(
1575 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS,
1576 length);
1577 __ CallStub(&stub);
1578 } else if (expr->depth() > 1) {
1579 __ CallRuntime(Runtime::kCreateArrayLiteral, 3);
1580 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
1581 __ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
1582 } else {
1583 ASSERT(constant_elements_kind == FAST_ELEMENTS ||
1584 constant_elements_kind == FAST_SMI_ONLY_ELEMENTS ||
1585 FLAG_smi_only_arrays);
1586 // If the elements are already FAST_ELEMENTS, the boilerplate cannot
1587 // change, so it's possible to specialize the stub in advance.
1588 FastCloneShallowArrayStub::Mode mode = has_constant_fast_elements
1589 ? FastCloneShallowArrayStub::CLONE_ELEMENTS
1590 : FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS;
1591 FastCloneShallowArrayStub stub(mode, length);
1592 __ CallStub(&stub);
1593 }
1594
1595 bool result_saved = false; // Is the result saved to the stack?
1596
1597 // Emit code to evaluate all the non-constant subexpressions and to store
1598 // them into the newly cloned array.
1599 for (int i = 0; i < length; i++) {
1600 Expression* subexpr = subexprs->at(i);
1601 // If the subexpression is a literal or a simple materialized literal it
1602 // is already set in the cloned array.
1603 if (subexpr->AsLiteral() != NULL ||
1604 CompileTimeValue::IsCompileTimeValue(subexpr)) {
1605 continue;
1606 }
1607
1608 if (!result_saved) {
1609 __ push(eax);
1610 result_saved = true;
1611 }
1612 VisitForAccumulatorValue(subexpr);
1613
1614 if (constant_elements_kind == FAST_ELEMENTS) {
1615 // Fast-case array literal with ElementsKind of FAST_ELEMENTS, they cannot
1616 // transition and don't need to call the runtime stub.
1617 int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1618 __ mov(ebx, Operand(esp, 0)); // Copy of array literal.
1619 __ mov(ebx, FieldOperand(ebx, JSObject::kElementsOffset));
1620 // Store the subexpression value in the array's elements.
1621 __ mov(FieldOperand(ebx, offset), result_register());
1622 // Update the write barrier for the array store.
1623 __ RecordWriteField(ebx, offset, result_register(), ecx,
1624 kDontSaveFPRegs,
1625 EMIT_REMEMBERED_SET,
1626 INLINE_SMI_CHECK);
1627 } else {
1628 // Store the subexpression value in the array's elements.
1629 __ mov(ebx, Operand(esp, 0)); // Copy of array literal.
1630 __ mov(edi, FieldOperand(ebx, JSObject::kMapOffset));
1631 __ mov(ecx, Immediate(Smi::FromInt(i)));
1632 __ mov(edx, Immediate(Smi::FromInt(expr->literal_index())));
1633 StoreArrayLiteralElementStub stub;
1634 __ CallStub(&stub);
1635 }
1636
1637 PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1638 }
1639
1640 if (result_saved) {
1641 context()->PlugTOS();
1642 } else {
1643 context()->Plug(eax);
1644 }
1645 }
1646
1647
VisitAssignment(Assignment * expr)1648 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1649 Comment cmnt(masm_, "[ Assignment");
1650 // Invalid left-hand sides are rewritten to have a 'throw ReferenceError'
1651 // on the left-hand side.
1652 if (!expr->target()->IsValidLeftHandSide()) {
1653 VisitForEffect(expr->target());
1654 return;
1655 }
1656
1657 // Left-hand side can only be a property, a global or a (parameter or local)
1658 // slot.
1659 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1660 LhsKind assign_type = VARIABLE;
1661 Property* property = expr->target()->AsProperty();
1662 if (property != NULL) {
1663 assign_type = (property->key()->IsPropertyName())
1664 ? NAMED_PROPERTY
1665 : KEYED_PROPERTY;
1666 }
1667
1668 // Evaluate LHS expression.
1669 switch (assign_type) {
1670 case VARIABLE:
1671 // Nothing to do here.
1672 break;
1673 case NAMED_PROPERTY:
1674 if (expr->is_compound()) {
1675 // We need the receiver both on the stack and in the accumulator.
1676 VisitForAccumulatorValue(property->obj());
1677 __ push(result_register());
1678 } else {
1679 VisitForStackValue(property->obj());
1680 }
1681 break;
1682 case KEYED_PROPERTY: {
1683 if (expr->is_compound()) {
1684 VisitForStackValue(property->obj());
1685 VisitForAccumulatorValue(property->key());
1686 __ mov(edx, Operand(esp, 0));
1687 __ push(eax);
1688 } else {
1689 VisitForStackValue(property->obj());
1690 VisitForStackValue(property->key());
1691 }
1692 break;
1693 }
1694 }
1695
1696 // For compound assignments we need another deoptimization point after the
1697 // variable/property load.
1698 if (expr->is_compound()) {
1699 AccumulatorValueContext result_context(this);
1700 { AccumulatorValueContext left_operand_context(this);
1701 switch (assign_type) {
1702 case VARIABLE:
1703 EmitVariableLoad(expr->target()->AsVariableProxy());
1704 PrepareForBailout(expr->target(), TOS_REG);
1705 break;
1706 case NAMED_PROPERTY:
1707 EmitNamedPropertyLoad(property);
1708 PrepareForBailoutForId(expr->CompoundLoadId(), TOS_REG);
1709 break;
1710 case KEYED_PROPERTY:
1711 EmitKeyedPropertyLoad(property);
1712 PrepareForBailoutForId(expr->CompoundLoadId(), TOS_REG);
1713 break;
1714 }
1715 }
1716
1717 Token::Value op = expr->binary_op();
1718 __ push(eax); // Left operand goes on the stack.
1719 VisitForAccumulatorValue(expr->value());
1720
1721 OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1722 ? OVERWRITE_RIGHT
1723 : NO_OVERWRITE;
1724 SetSourcePosition(expr->position() + 1);
1725 if (ShouldInlineSmiCase(op)) {
1726 EmitInlineSmiBinaryOp(expr->binary_operation(),
1727 op,
1728 mode,
1729 expr->target(),
1730 expr->value());
1731 } else {
1732 EmitBinaryOp(expr->binary_operation(), op, mode);
1733 }
1734
1735 // Deoptimization point in case the binary operation may have side effects.
1736 PrepareForBailout(expr->binary_operation(), TOS_REG);
1737 } else {
1738 VisitForAccumulatorValue(expr->value());
1739 }
1740
1741 // Record source position before possible IC call.
1742 SetSourcePosition(expr->position());
1743
1744 // Store the value.
1745 switch (assign_type) {
1746 case VARIABLE:
1747 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1748 expr->op());
1749 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1750 context()->Plug(eax);
1751 break;
1752 case NAMED_PROPERTY:
1753 EmitNamedPropertyAssignment(expr);
1754 break;
1755 case KEYED_PROPERTY:
1756 EmitKeyedPropertyAssignment(expr);
1757 break;
1758 }
1759 }
1760
1761
EmitNamedPropertyLoad(Property * prop)1762 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
1763 SetSourcePosition(prop->position());
1764 Literal* key = prop->key()->AsLiteral();
1765 ASSERT(!key->handle()->IsSmi());
1766 __ mov(ecx, Immediate(key->handle()));
1767 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1768 CallIC(ic, RelocInfo::CODE_TARGET, prop->id());
1769 }
1770
1771
EmitKeyedPropertyLoad(Property * prop)1772 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
1773 SetSourcePosition(prop->position());
1774 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
1775 CallIC(ic, RelocInfo::CODE_TARGET, prop->id());
1776 }
1777
1778
EmitInlineSmiBinaryOp(BinaryOperation * expr,Token::Value op,OverwriteMode mode,Expression * left,Expression * right)1779 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
1780 Token::Value op,
1781 OverwriteMode mode,
1782 Expression* left,
1783 Expression* right) {
1784 // Do combined smi check of the operands. Left operand is on the
1785 // stack. Right operand is in eax.
1786 Label smi_case, done, stub_call;
1787 __ pop(edx);
1788 __ mov(ecx, eax);
1789 __ or_(eax, edx);
1790 JumpPatchSite patch_site(masm_);
1791 patch_site.EmitJumpIfSmi(eax, &smi_case, Label::kNear);
1792
1793 __ bind(&stub_call);
1794 __ mov(eax, ecx);
1795 BinaryOpStub stub(op, mode);
1796 CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
1797 patch_site.EmitPatchInfo();
1798 __ jmp(&done, Label::kNear);
1799
1800 // Smi case.
1801 __ bind(&smi_case);
1802 __ mov(eax, edx); // Copy left operand in case of a stub call.
1803
1804 switch (op) {
1805 case Token::SAR:
1806 __ SmiUntag(eax);
1807 __ SmiUntag(ecx);
1808 __ sar_cl(eax); // No checks of result necessary
1809 __ SmiTag(eax);
1810 break;
1811 case Token::SHL: {
1812 Label result_ok;
1813 __ SmiUntag(eax);
1814 __ SmiUntag(ecx);
1815 __ shl_cl(eax);
1816 // Check that the *signed* result fits in a smi.
1817 __ cmp(eax, 0xc0000000);
1818 __ j(positive, &result_ok);
1819 __ SmiTag(ecx);
1820 __ jmp(&stub_call);
1821 __ bind(&result_ok);
1822 __ SmiTag(eax);
1823 break;
1824 }
1825 case Token::SHR: {
1826 Label result_ok;
1827 __ SmiUntag(eax);
1828 __ SmiUntag(ecx);
1829 __ shr_cl(eax);
1830 __ test(eax, Immediate(0xc0000000));
1831 __ j(zero, &result_ok);
1832 __ SmiTag(ecx);
1833 __ jmp(&stub_call);
1834 __ bind(&result_ok);
1835 __ SmiTag(eax);
1836 break;
1837 }
1838 case Token::ADD:
1839 __ add(eax, ecx);
1840 __ j(overflow, &stub_call);
1841 break;
1842 case Token::SUB:
1843 __ sub(eax, ecx);
1844 __ j(overflow, &stub_call);
1845 break;
1846 case Token::MUL: {
1847 __ SmiUntag(eax);
1848 __ imul(eax, ecx);
1849 __ j(overflow, &stub_call);
1850 __ test(eax, eax);
1851 __ j(not_zero, &done, Label::kNear);
1852 __ mov(ebx, edx);
1853 __ or_(ebx, ecx);
1854 __ j(negative, &stub_call);
1855 break;
1856 }
1857 case Token::BIT_OR:
1858 __ or_(eax, ecx);
1859 break;
1860 case Token::BIT_AND:
1861 __ and_(eax, ecx);
1862 break;
1863 case Token::BIT_XOR:
1864 __ xor_(eax, ecx);
1865 break;
1866 default:
1867 UNREACHABLE();
1868 }
1869
1870 __ bind(&done);
1871 context()->Plug(eax);
1872 }
1873
1874
EmitBinaryOp(BinaryOperation * expr,Token::Value op,OverwriteMode mode)1875 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
1876 Token::Value op,
1877 OverwriteMode mode) {
1878 __ pop(edx);
1879 BinaryOpStub stub(op, mode);
1880 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
1881 CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
1882 patch_site.EmitPatchInfo();
1883 context()->Plug(eax);
1884 }
1885
1886
EmitAssignment(Expression * expr)1887 void FullCodeGenerator::EmitAssignment(Expression* expr) {
1888 // Invalid left-hand sides are rewritten to have a 'throw
1889 // ReferenceError' on the left-hand side.
1890 if (!expr->IsValidLeftHandSide()) {
1891 VisitForEffect(expr);
1892 return;
1893 }
1894
1895 // Left-hand side can only be a property, a global or a (parameter or local)
1896 // slot.
1897 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1898 LhsKind assign_type = VARIABLE;
1899 Property* prop = expr->AsProperty();
1900 if (prop != NULL) {
1901 assign_type = (prop->key()->IsPropertyName())
1902 ? NAMED_PROPERTY
1903 : KEYED_PROPERTY;
1904 }
1905
1906 switch (assign_type) {
1907 case VARIABLE: {
1908 Variable* var = expr->AsVariableProxy()->var();
1909 EffectContext context(this);
1910 EmitVariableAssignment(var, Token::ASSIGN);
1911 break;
1912 }
1913 case NAMED_PROPERTY: {
1914 __ push(eax); // Preserve value.
1915 VisitForAccumulatorValue(prop->obj());
1916 __ mov(edx, eax);
1917 __ pop(eax); // Restore value.
1918 __ mov(ecx, prop->key()->AsLiteral()->handle());
1919 Handle<Code> ic = is_classic_mode()
1920 ? isolate()->builtins()->StoreIC_Initialize()
1921 : isolate()->builtins()->StoreIC_Initialize_Strict();
1922 CallIC(ic);
1923 break;
1924 }
1925 case KEYED_PROPERTY: {
1926 __ push(eax); // Preserve value.
1927 VisitForStackValue(prop->obj());
1928 VisitForAccumulatorValue(prop->key());
1929 __ mov(ecx, eax);
1930 __ pop(edx);
1931 __ pop(eax); // Restore value.
1932 Handle<Code> ic = is_classic_mode()
1933 ? isolate()->builtins()->KeyedStoreIC_Initialize()
1934 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
1935 CallIC(ic);
1936 break;
1937 }
1938 }
1939 context()->Plug(eax);
1940 }
1941
1942
EmitVariableAssignment(Variable * var,Token::Value op)1943 void FullCodeGenerator::EmitVariableAssignment(Variable* var,
1944 Token::Value op) {
1945 if (var->IsUnallocated()) {
1946 // Global var, const, or let.
1947 __ mov(ecx, var->name());
1948 __ mov(edx, GlobalObjectOperand());
1949 Handle<Code> ic = is_classic_mode()
1950 ? isolate()->builtins()->StoreIC_Initialize()
1951 : isolate()->builtins()->StoreIC_Initialize_Strict();
1952 CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
1953
1954 } else if (op == Token::INIT_CONST) {
1955 // Const initializers need a write barrier.
1956 ASSERT(!var->IsParameter()); // No const parameters.
1957 if (var->IsStackLocal()) {
1958 Label skip;
1959 __ mov(edx, StackOperand(var));
1960 __ cmp(edx, isolate()->factory()->the_hole_value());
1961 __ j(not_equal, &skip);
1962 __ mov(StackOperand(var), eax);
1963 __ bind(&skip);
1964 } else {
1965 ASSERT(var->IsContextSlot() || var->IsLookupSlot());
1966 // Like var declarations, const declarations are hoisted to function
1967 // scope. However, unlike var initializers, const initializers are
1968 // able to drill a hole to that function context, even from inside a
1969 // 'with' context. We thus bypass the normal static scope lookup for
1970 // var->IsContextSlot().
1971 __ push(eax);
1972 __ push(esi);
1973 __ push(Immediate(var->name()));
1974 __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
1975 }
1976
1977 } else if (var->mode() == LET && op != Token::INIT_LET) {
1978 // Non-initializing assignment to let variable needs a write barrier.
1979 if (var->IsLookupSlot()) {
1980 __ push(eax); // Value.
1981 __ push(esi); // Context.
1982 __ push(Immediate(var->name()));
1983 __ push(Immediate(Smi::FromInt(language_mode())));
1984 __ CallRuntime(Runtime::kStoreContextSlot, 4);
1985 } else {
1986 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
1987 Label assign;
1988 MemOperand location = VarOperand(var, ecx);
1989 __ mov(edx, location);
1990 __ cmp(edx, isolate()->factory()->the_hole_value());
1991 __ j(not_equal, &assign, Label::kNear);
1992 __ push(Immediate(var->name()));
1993 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1994 __ bind(&assign);
1995 __ mov(location, eax);
1996 if (var->IsContextSlot()) {
1997 __ mov(edx, eax);
1998 int offset = Context::SlotOffset(var->index());
1999 __ RecordWriteContextSlot(ecx, offset, edx, ebx, kDontSaveFPRegs);
2000 }
2001 }
2002
2003 } else if (!var->is_const_mode() || op == Token::INIT_CONST_HARMONY) {
2004 // Assignment to var or initializing assignment to let/const
2005 // in harmony mode.
2006 if (var->IsStackAllocated() || var->IsContextSlot()) {
2007 MemOperand location = VarOperand(var, ecx);
2008 if (FLAG_debug_code && op == Token::INIT_LET) {
2009 // Check for an uninitialized let binding.
2010 __ mov(edx, location);
2011 __ cmp(edx, isolate()->factory()->the_hole_value());
2012 __ Check(equal, "Let binding re-initialization.");
2013 }
2014 // Perform the assignment.
2015 __ mov(location, eax);
2016 if (var->IsContextSlot()) {
2017 __ mov(edx, eax);
2018 int offset = Context::SlotOffset(var->index());
2019 __ RecordWriteContextSlot(ecx, offset, edx, ebx, kDontSaveFPRegs);
2020 }
2021 } else {
2022 ASSERT(var->IsLookupSlot());
2023 __ push(eax); // Value.
2024 __ push(esi); // Context.
2025 __ push(Immediate(var->name()));
2026 __ push(Immediate(Smi::FromInt(language_mode())));
2027 __ CallRuntime(Runtime::kStoreContextSlot, 4);
2028 }
2029 }
2030 // Non-initializing assignments to consts are ignored.
2031 }
2032
2033
EmitNamedPropertyAssignment(Assignment * expr)2034 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2035 // Assignment to a property, using a named store IC.
2036 Property* prop = expr->target()->AsProperty();
2037 ASSERT(prop != NULL);
2038 ASSERT(prop->key()->AsLiteral() != NULL);
2039
2040 // If the assignment starts a block of assignments to the same object,
2041 // change to slow case to avoid the quadratic behavior of repeatedly
2042 // adding fast properties.
2043 if (expr->starts_initialization_block()) {
2044 __ push(result_register());
2045 __ push(Operand(esp, kPointerSize)); // Receiver is now under value.
2046 __ CallRuntime(Runtime::kToSlowProperties, 1);
2047 __ pop(result_register());
2048 }
2049
2050 // Record source code position before IC call.
2051 SetSourcePosition(expr->position());
2052 __ mov(ecx, prop->key()->AsLiteral()->handle());
2053 if (expr->ends_initialization_block()) {
2054 __ mov(edx, Operand(esp, 0));
2055 } else {
2056 __ pop(edx);
2057 }
2058 Handle<Code> ic = is_classic_mode()
2059 ? isolate()->builtins()->StoreIC_Initialize()
2060 : isolate()->builtins()->StoreIC_Initialize_Strict();
2061 CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
2062
2063 // If the assignment ends an initialization block, revert to fast case.
2064 if (expr->ends_initialization_block()) {
2065 __ push(eax); // Result of assignment, saved even if not needed.
2066 __ push(Operand(esp, kPointerSize)); // Receiver is under value.
2067 __ CallRuntime(Runtime::kToFastProperties, 1);
2068 __ pop(eax);
2069 __ Drop(1);
2070 }
2071 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2072 context()->Plug(eax);
2073 }
2074
2075
EmitKeyedPropertyAssignment(Assignment * expr)2076 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2077 // Assignment to a property, using a keyed store IC.
2078
2079 // If the assignment starts a block of assignments to the same object,
2080 // change to slow case to avoid the quadratic behavior of repeatedly
2081 // adding fast properties.
2082 if (expr->starts_initialization_block()) {
2083 __ push(result_register());
2084 // Receiver is now under the key and value.
2085 __ push(Operand(esp, 2 * kPointerSize));
2086 __ CallRuntime(Runtime::kToSlowProperties, 1);
2087 __ pop(result_register());
2088 }
2089
2090 __ pop(ecx);
2091 if (expr->ends_initialization_block()) {
2092 __ mov(edx, Operand(esp, 0)); // Leave receiver on the stack for later.
2093 } else {
2094 __ pop(edx);
2095 }
2096 // Record source code position before IC call.
2097 SetSourcePosition(expr->position());
2098 Handle<Code> ic = is_classic_mode()
2099 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2100 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2101 CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
2102
2103 // If the assignment ends an initialization block, revert to fast case.
2104 if (expr->ends_initialization_block()) {
2105 __ pop(edx);
2106 __ push(eax); // Result of assignment, saved even if not needed.
2107 __ push(edx);
2108 __ CallRuntime(Runtime::kToFastProperties, 1);
2109 __ pop(eax);
2110 }
2111
2112 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2113 context()->Plug(eax);
2114 }
2115
2116
VisitProperty(Property * expr)2117 void FullCodeGenerator::VisitProperty(Property* expr) {
2118 Comment cmnt(masm_, "[ Property");
2119 Expression* key = expr->key();
2120
2121 if (key->IsPropertyName()) {
2122 VisitForAccumulatorValue(expr->obj());
2123 EmitNamedPropertyLoad(expr);
2124 context()->Plug(eax);
2125 } else {
2126 VisitForStackValue(expr->obj());
2127 VisitForAccumulatorValue(expr->key());
2128 __ pop(edx);
2129 EmitKeyedPropertyLoad(expr);
2130 context()->Plug(eax);
2131 }
2132 }
2133
2134
CallIC(Handle<Code> code,RelocInfo::Mode rmode,unsigned ast_id)2135 void FullCodeGenerator::CallIC(Handle<Code> code,
2136 RelocInfo::Mode rmode,
2137 unsigned ast_id) {
2138 ic_total_count_++;
2139 __ call(code, rmode, ast_id);
2140 }
2141
2142
2143
2144
EmitCallWithIC(Call * expr,Handle<Object> name,RelocInfo::Mode mode)2145 void FullCodeGenerator::EmitCallWithIC(Call* expr,
2146 Handle<Object> name,
2147 RelocInfo::Mode mode) {
2148 // Code common for calls using the IC.
2149 ZoneList<Expression*>* args = expr->arguments();
2150 int arg_count = args->length();
2151 { PreservePositionScope scope(masm()->positions_recorder());
2152 for (int i = 0; i < arg_count; i++) {
2153 VisitForStackValue(args->at(i));
2154 }
2155 __ Set(ecx, Immediate(name));
2156 }
2157 // Record source position of the IC call.
2158 SetSourcePosition(expr->position());
2159 Handle<Code> ic =
2160 isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
2161 CallIC(ic, mode, expr->id());
2162 RecordJSReturnSite(expr);
2163 // Restore context register.
2164 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2165 context()->Plug(eax);
2166 }
2167
2168
EmitKeyedCallWithIC(Call * expr,Expression * key)2169 void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
2170 Expression* key) {
2171 // Load the key.
2172 VisitForAccumulatorValue(key);
2173
2174 // Swap the name of the function and the receiver on the stack to follow
2175 // the calling convention for call ICs.
2176 __ pop(ecx);
2177 __ push(eax);
2178 __ push(ecx);
2179
2180 // Load the arguments.
2181 ZoneList<Expression*>* args = expr->arguments();
2182 int arg_count = args->length();
2183 { PreservePositionScope scope(masm()->positions_recorder());
2184 for (int i = 0; i < arg_count; i++) {
2185 VisitForStackValue(args->at(i));
2186 }
2187 }
2188 // Record source position of the IC call.
2189 SetSourcePosition(expr->position());
2190 Handle<Code> ic =
2191 isolate()->stub_cache()->ComputeKeyedCallInitialize(arg_count);
2192 __ mov(ecx, Operand(esp, (arg_count + 1) * kPointerSize)); // Key.
2193 CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
2194 RecordJSReturnSite(expr);
2195 // Restore context register.
2196 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2197 context()->DropAndPlug(1, eax); // Drop the key still on the stack.
2198 }
2199
2200
EmitCallWithStub(Call * expr,CallFunctionFlags flags)2201 void FullCodeGenerator::EmitCallWithStub(Call* expr, CallFunctionFlags flags) {
2202 // Code common for calls using the call stub.
2203 ZoneList<Expression*>* args = expr->arguments();
2204 int arg_count = args->length();
2205 { PreservePositionScope scope(masm()->positions_recorder());
2206 for (int i = 0; i < arg_count; i++) {
2207 VisitForStackValue(args->at(i));
2208 }
2209 }
2210 // Record source position for debugger.
2211 SetSourcePosition(expr->position());
2212
2213 // Record call targets in unoptimized code, but not in the snapshot.
2214 if (!Serializer::enabled()) {
2215 flags = static_cast<CallFunctionFlags>(flags | RECORD_CALL_TARGET);
2216 Handle<Object> uninitialized =
2217 TypeFeedbackCells::UninitializedSentinel(isolate());
2218 Handle<JSGlobalPropertyCell> cell =
2219 isolate()->factory()->NewJSGlobalPropertyCell(uninitialized);
2220 RecordTypeFeedbackCell(expr->id(), cell);
2221 __ mov(ebx, cell);
2222 }
2223
2224 CallFunctionStub stub(arg_count, flags);
2225 __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
2226 __ CallStub(&stub, expr->id());
2227
2228 RecordJSReturnSite(expr);
2229 // Restore context register.
2230 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2231 context()->DropAndPlug(1, eax);
2232 }
2233
2234
EmitResolvePossiblyDirectEval(int arg_count)2235 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2236 // Push copy of the first argument or undefined if it doesn't exist.
2237 if (arg_count > 0) {
2238 __ push(Operand(esp, arg_count * kPointerSize));
2239 } else {
2240 __ push(Immediate(isolate()->factory()->undefined_value()));
2241 }
2242
2243 // Push the receiver of the enclosing function.
2244 __ push(Operand(ebp, (2 + info_->scope()->num_parameters()) * kPointerSize));
2245 // Push the language mode.
2246 __ push(Immediate(Smi::FromInt(language_mode())));
2247
2248 // Push the start position of the scope the calls resides in.
2249 __ push(Immediate(Smi::FromInt(scope()->start_position())));
2250
2251 // Do the runtime call.
2252 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
2253 }
2254
2255
VisitCall(Call * expr)2256 void FullCodeGenerator::VisitCall(Call* expr) {
2257 #ifdef DEBUG
2258 // We want to verify that RecordJSReturnSite gets called on all paths
2259 // through this function. Avoid early returns.
2260 expr->return_is_recorded_ = false;
2261 #endif
2262
2263 Comment cmnt(masm_, "[ Call");
2264 Expression* callee = expr->expression();
2265 VariableProxy* proxy = callee->AsVariableProxy();
2266 Property* property = callee->AsProperty();
2267
2268 if (proxy != NULL && proxy->var()->is_possibly_eval()) {
2269 // In a call to eval, we first call %ResolvePossiblyDirectEval to
2270 // resolve the function we need to call and the receiver of the call.
2271 // Then we call the resolved function using the given arguments.
2272 ZoneList<Expression*>* args = expr->arguments();
2273 int arg_count = args->length();
2274 { PreservePositionScope pos_scope(masm()->positions_recorder());
2275 VisitForStackValue(callee);
2276 // Reserved receiver slot.
2277 __ push(Immediate(isolate()->factory()->undefined_value()));
2278 // Push the arguments.
2279 for (int i = 0; i < arg_count; i++) {
2280 VisitForStackValue(args->at(i));
2281 }
2282
2283 // Push a copy of the function (found below the arguments) and
2284 // resolve eval.
2285 __ push(Operand(esp, (arg_count + 1) * kPointerSize));
2286 EmitResolvePossiblyDirectEval(arg_count);
2287
2288 // The runtime call returns a pair of values in eax (function) and
2289 // edx (receiver). Touch up the stack with the right values.
2290 __ mov(Operand(esp, (arg_count + 0) * kPointerSize), edx);
2291 __ mov(Operand(esp, (arg_count + 1) * kPointerSize), eax);
2292 }
2293 // Record source position for debugger.
2294 SetSourcePosition(expr->position());
2295 CallFunctionStub stub(arg_count, RECEIVER_MIGHT_BE_IMPLICIT);
2296 __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
2297 __ CallStub(&stub);
2298 RecordJSReturnSite(expr);
2299 // Restore context register.
2300 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2301 context()->DropAndPlug(1, eax);
2302
2303 } else if (proxy != NULL && proxy->var()->IsUnallocated()) {
2304 // Push global object as receiver for the call IC.
2305 __ push(GlobalObjectOperand());
2306 EmitCallWithIC(expr, proxy->name(), RelocInfo::CODE_TARGET_CONTEXT);
2307
2308 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
2309 // Call to a lookup slot (dynamically introduced variable).
2310 Label slow, done;
2311 { PreservePositionScope scope(masm()->positions_recorder());
2312 // Generate code for loading from variables potentially shadowed by
2313 // eval-introduced variables.
2314 EmitDynamicLookupFastCase(proxy->var(), NOT_INSIDE_TYPEOF, &slow, &done);
2315 }
2316 __ bind(&slow);
2317 // Call the runtime to find the function to call (returned in eax) and
2318 // the object holding it (returned in edx).
2319 __ push(context_register());
2320 __ push(Immediate(proxy->name()));
2321 __ CallRuntime(Runtime::kLoadContextSlot, 2);
2322 __ push(eax); // Function.
2323 __ push(edx); // Receiver.
2324
2325 // If fast case code has been generated, emit code to push the function
2326 // and receiver and have the slow path jump around this code.
2327 if (done.is_linked()) {
2328 Label call;
2329 __ jmp(&call, Label::kNear);
2330 __ bind(&done);
2331 // Push function.
2332 __ push(eax);
2333 // The receiver is implicitly the global receiver. Indicate this by
2334 // passing the hole to the call function stub.
2335 __ push(Immediate(isolate()->factory()->the_hole_value()));
2336 __ bind(&call);
2337 }
2338
2339 // The receiver is either the global receiver or an object found by
2340 // LoadContextSlot. That object could be the hole if the receiver is
2341 // implicitly the global object.
2342 EmitCallWithStub(expr, RECEIVER_MIGHT_BE_IMPLICIT);
2343
2344 } else if (property != NULL) {
2345 { PreservePositionScope scope(masm()->positions_recorder());
2346 VisitForStackValue(property->obj());
2347 }
2348 if (property->key()->IsPropertyName()) {
2349 EmitCallWithIC(expr,
2350 property->key()->AsLiteral()->handle(),
2351 RelocInfo::CODE_TARGET);
2352 } else {
2353 EmitKeyedCallWithIC(expr, property->key());
2354 }
2355
2356 } else {
2357 // Call to an arbitrary expression not handled specially above.
2358 { PreservePositionScope scope(masm()->positions_recorder());
2359 VisitForStackValue(callee);
2360 }
2361 // Load global receiver object.
2362 __ mov(ebx, GlobalObjectOperand());
2363 __ push(FieldOperand(ebx, GlobalObject::kGlobalReceiverOffset));
2364 // Emit function call.
2365 EmitCallWithStub(expr, NO_CALL_FUNCTION_FLAGS);
2366 }
2367
2368 #ifdef DEBUG
2369 // RecordJSReturnSite should have been called.
2370 ASSERT(expr->return_is_recorded_);
2371 #endif
2372 }
2373
2374
VisitCallNew(CallNew * expr)2375 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2376 Comment cmnt(masm_, "[ CallNew");
2377 // According to ECMA-262, section 11.2.2, page 44, the function
2378 // expression in new calls must be evaluated before the
2379 // arguments.
2380
2381 // Push constructor on the stack. If it's not a function it's used as
2382 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2383 // ignored.
2384 VisitForStackValue(expr->expression());
2385
2386 // Push the arguments ("left-to-right") on the stack.
2387 ZoneList<Expression*>* args = expr->arguments();
2388 int arg_count = args->length();
2389 for (int i = 0; i < arg_count; i++) {
2390 VisitForStackValue(args->at(i));
2391 }
2392
2393 // Call the construct call builtin that handles allocation and
2394 // constructor invocation.
2395 SetSourcePosition(expr->position());
2396
2397 // Load function and argument count into edi and eax.
2398 __ SafeSet(eax, Immediate(arg_count));
2399 __ mov(edi, Operand(esp, arg_count * kPointerSize));
2400
2401 // Record call targets in unoptimized code, but not in the snapshot.
2402 CallFunctionFlags flags;
2403 if (!Serializer::enabled()) {
2404 flags = RECORD_CALL_TARGET;
2405 Handle<Object> uninitialized =
2406 TypeFeedbackCells::UninitializedSentinel(isolate());
2407 Handle<JSGlobalPropertyCell> cell =
2408 isolate()->factory()->NewJSGlobalPropertyCell(uninitialized);
2409 RecordTypeFeedbackCell(expr->id(), cell);
2410 __ mov(ebx, cell);
2411 } else {
2412 flags = NO_CALL_FUNCTION_FLAGS;
2413 }
2414
2415 CallConstructStub stub(flags);
2416 __ call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
2417 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2418 context()->Plug(eax);
2419 }
2420
2421
EmitIsSmi(CallRuntime * expr)2422 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2423 ZoneList<Expression*>* args = expr->arguments();
2424 ASSERT(args->length() == 1);
2425
2426 VisitForAccumulatorValue(args->at(0));
2427
2428 Label materialize_true, materialize_false;
2429 Label* if_true = NULL;
2430 Label* if_false = NULL;
2431 Label* fall_through = NULL;
2432 context()->PrepareTest(&materialize_true, &materialize_false,
2433 &if_true, &if_false, &fall_through);
2434
2435 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2436 __ test(eax, Immediate(kSmiTagMask));
2437 Split(zero, if_true, if_false, fall_through);
2438
2439 context()->Plug(if_true, if_false);
2440 }
2441
2442
EmitIsNonNegativeSmi(CallRuntime * expr)2443 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
2444 ZoneList<Expression*>* args = expr->arguments();
2445 ASSERT(args->length() == 1);
2446
2447 VisitForAccumulatorValue(args->at(0));
2448
2449 Label materialize_true, materialize_false;
2450 Label* if_true = NULL;
2451 Label* if_false = NULL;
2452 Label* fall_through = NULL;
2453 context()->PrepareTest(&materialize_true, &materialize_false,
2454 &if_true, &if_false, &fall_through);
2455
2456 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2457 __ test(eax, Immediate(kSmiTagMask | 0x80000000));
2458 Split(zero, if_true, if_false, fall_through);
2459
2460 context()->Plug(if_true, if_false);
2461 }
2462
2463
EmitIsObject(CallRuntime * expr)2464 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
2465 ZoneList<Expression*>* args = expr->arguments();
2466 ASSERT(args->length() == 1);
2467
2468 VisitForAccumulatorValue(args->at(0));
2469
2470 Label materialize_true, materialize_false;
2471 Label* if_true = NULL;
2472 Label* if_false = NULL;
2473 Label* fall_through = NULL;
2474 context()->PrepareTest(&materialize_true, &materialize_false,
2475 &if_true, &if_false, &fall_through);
2476
2477 __ JumpIfSmi(eax, if_false);
2478 __ cmp(eax, isolate()->factory()->null_value());
2479 __ j(equal, if_true);
2480 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
2481 // Undetectable objects behave like undefined when tested with typeof.
2482 __ movzx_b(ecx, FieldOperand(ebx, Map::kBitFieldOffset));
2483 __ test(ecx, Immediate(1 << Map::kIsUndetectable));
2484 __ j(not_zero, if_false);
2485 __ movzx_b(ecx, FieldOperand(ebx, Map::kInstanceTypeOffset));
2486 __ cmp(ecx, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
2487 __ j(below, if_false);
2488 __ cmp(ecx, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
2489 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2490 Split(below_equal, if_true, if_false, fall_through);
2491
2492 context()->Plug(if_true, if_false);
2493 }
2494
2495
EmitIsSpecObject(CallRuntime * expr)2496 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
2497 ZoneList<Expression*>* args = expr->arguments();
2498 ASSERT(args->length() == 1);
2499
2500 VisitForAccumulatorValue(args->at(0));
2501
2502 Label materialize_true, materialize_false;
2503 Label* if_true = NULL;
2504 Label* if_false = NULL;
2505 Label* fall_through = NULL;
2506 context()->PrepareTest(&materialize_true, &materialize_false,
2507 &if_true, &if_false, &fall_through);
2508
2509 __ JumpIfSmi(eax, if_false);
2510 __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ebx);
2511 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2512 Split(above_equal, if_true, if_false, fall_through);
2513
2514 context()->Plug(if_true, if_false);
2515 }
2516
2517
EmitIsUndetectableObject(CallRuntime * expr)2518 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
2519 ZoneList<Expression*>* args = expr->arguments();
2520 ASSERT(args->length() == 1);
2521
2522 VisitForAccumulatorValue(args->at(0));
2523
2524 Label materialize_true, materialize_false;
2525 Label* if_true = NULL;
2526 Label* if_false = NULL;
2527 Label* fall_through = NULL;
2528 context()->PrepareTest(&materialize_true, &materialize_false,
2529 &if_true, &if_false, &fall_through);
2530
2531 __ JumpIfSmi(eax, if_false);
2532 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
2533 __ movzx_b(ebx, FieldOperand(ebx, Map::kBitFieldOffset));
2534 __ test(ebx, Immediate(1 << Map::kIsUndetectable));
2535 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2536 Split(not_zero, if_true, if_false, fall_through);
2537
2538 context()->Plug(if_true, if_false);
2539 }
2540
2541
EmitIsStringWrapperSafeForDefaultValueOf(CallRuntime * expr)2542 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
2543 CallRuntime* expr) {
2544 ZoneList<Expression*>* args = expr->arguments();
2545 ASSERT(args->length() == 1);
2546
2547 VisitForAccumulatorValue(args->at(0));
2548
2549 Label materialize_true, materialize_false;
2550 Label* if_true = NULL;
2551 Label* if_false = NULL;
2552 Label* fall_through = NULL;
2553 context()->PrepareTest(&materialize_true, &materialize_false,
2554 &if_true, &if_false, &fall_through);
2555
2556 if (FLAG_debug_code) __ AbortIfSmi(eax);
2557
2558 // Check whether this map has already been checked to be safe for default
2559 // valueOf.
2560 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
2561 __ test_b(FieldOperand(ebx, Map::kBitField2Offset),
2562 1 << Map::kStringWrapperSafeForDefaultValueOf);
2563 __ j(not_zero, if_true);
2564
2565 // Check for fast case object. Return false for slow case objects.
2566 __ mov(ecx, FieldOperand(eax, JSObject::kPropertiesOffset));
2567 __ mov(ecx, FieldOperand(ecx, HeapObject::kMapOffset));
2568 __ cmp(ecx, FACTORY->hash_table_map());
2569 __ j(equal, if_false);
2570
2571 // Look for valueOf symbol in the descriptor array, and indicate false if
2572 // found. The type is not checked, so if it is a transition it is a false
2573 // negative.
2574 __ LoadInstanceDescriptors(ebx, ebx);
2575 __ mov(ecx, FieldOperand(ebx, FixedArray::kLengthOffset));
2576 // ebx: descriptor array
2577 // ecx: length of descriptor array
2578 // Calculate the end of the descriptor array.
2579 STATIC_ASSERT(kSmiTag == 0);
2580 STATIC_ASSERT(kSmiTagSize == 1);
2581 STATIC_ASSERT(kPointerSize == 4);
2582 __ lea(ecx, Operand(ebx, ecx, times_2, FixedArray::kHeaderSize));
2583 // Calculate location of the first key name.
2584 __ add(ebx,
2585 Immediate(FixedArray::kHeaderSize +
2586 DescriptorArray::kFirstIndex * kPointerSize));
2587 // Loop through all the keys in the descriptor array. If one of these is the
2588 // symbol valueOf the result is false.
2589 Label entry, loop;
2590 __ jmp(&entry);
2591 __ bind(&loop);
2592 __ mov(edx, FieldOperand(ebx, 0));
2593 __ cmp(edx, FACTORY->value_of_symbol());
2594 __ j(equal, if_false);
2595 __ add(ebx, Immediate(kPointerSize));
2596 __ bind(&entry);
2597 __ cmp(ebx, ecx);
2598 __ j(not_equal, &loop);
2599
2600 // Reload map as register ebx was used as temporary above.
2601 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
2602
2603 // If a valueOf property is not found on the object check that it's
2604 // prototype is the un-modified String prototype. If not result is false.
2605 __ mov(ecx, FieldOperand(ebx, Map::kPrototypeOffset));
2606 __ JumpIfSmi(ecx, if_false);
2607 __ mov(ecx, FieldOperand(ecx, HeapObject::kMapOffset));
2608 __ mov(edx, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
2609 __ mov(edx,
2610 FieldOperand(edx, GlobalObject::kGlobalContextOffset));
2611 __ cmp(ecx,
2612 ContextOperand(edx,
2613 Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
2614 __ j(not_equal, if_false);
2615 // Set the bit in the map to indicate that it has been checked safe for
2616 // default valueOf and set true result.
2617 __ or_(FieldOperand(ebx, Map::kBitField2Offset),
2618 Immediate(1 << Map::kStringWrapperSafeForDefaultValueOf));
2619 __ jmp(if_true);
2620
2621 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2622 context()->Plug(if_true, if_false);
2623 }
2624
2625
EmitIsFunction(CallRuntime * expr)2626 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
2627 ZoneList<Expression*>* args = expr->arguments();
2628 ASSERT(args->length() == 1);
2629
2630 VisitForAccumulatorValue(args->at(0));
2631
2632 Label materialize_true, materialize_false;
2633 Label* if_true = NULL;
2634 Label* if_false = NULL;
2635 Label* fall_through = NULL;
2636 context()->PrepareTest(&materialize_true, &materialize_false,
2637 &if_true, &if_false, &fall_through);
2638
2639 __ JumpIfSmi(eax, if_false);
2640 __ CmpObjectType(eax, JS_FUNCTION_TYPE, ebx);
2641 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2642 Split(equal, if_true, if_false, fall_through);
2643
2644 context()->Plug(if_true, if_false);
2645 }
2646
2647
EmitIsArray(CallRuntime * expr)2648 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
2649 ZoneList<Expression*>* args = expr->arguments();
2650 ASSERT(args->length() == 1);
2651
2652 VisitForAccumulatorValue(args->at(0));
2653
2654 Label materialize_true, materialize_false;
2655 Label* if_true = NULL;
2656 Label* if_false = NULL;
2657 Label* fall_through = NULL;
2658 context()->PrepareTest(&materialize_true, &materialize_false,
2659 &if_true, &if_false, &fall_through);
2660
2661 __ JumpIfSmi(eax, if_false);
2662 __ CmpObjectType(eax, JS_ARRAY_TYPE, ebx);
2663 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2664 Split(equal, if_true, if_false, fall_through);
2665
2666 context()->Plug(if_true, if_false);
2667 }
2668
2669
EmitIsRegExp(CallRuntime * expr)2670 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
2671 ZoneList<Expression*>* args = expr->arguments();
2672 ASSERT(args->length() == 1);
2673
2674 VisitForAccumulatorValue(args->at(0));
2675
2676 Label materialize_true, materialize_false;
2677 Label* if_true = NULL;
2678 Label* if_false = NULL;
2679 Label* fall_through = NULL;
2680 context()->PrepareTest(&materialize_true, &materialize_false,
2681 &if_true, &if_false, &fall_through);
2682
2683 __ JumpIfSmi(eax, if_false);
2684 __ CmpObjectType(eax, JS_REGEXP_TYPE, ebx);
2685 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2686 Split(equal, if_true, if_false, fall_through);
2687
2688 context()->Plug(if_true, if_false);
2689 }
2690
2691
2692
EmitIsConstructCall(CallRuntime * expr)2693 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
2694 ASSERT(expr->arguments()->length() == 0);
2695
2696 Label materialize_true, materialize_false;
2697 Label* if_true = NULL;
2698 Label* if_false = NULL;
2699 Label* fall_through = NULL;
2700 context()->PrepareTest(&materialize_true, &materialize_false,
2701 &if_true, &if_false, &fall_through);
2702
2703 // Get the frame pointer for the calling frame.
2704 __ mov(eax, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
2705
2706 // Skip the arguments adaptor frame if it exists.
2707 Label check_frame_marker;
2708 __ cmp(Operand(eax, StandardFrameConstants::kContextOffset),
2709 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2710 __ j(not_equal, &check_frame_marker);
2711 __ mov(eax, Operand(eax, StandardFrameConstants::kCallerFPOffset));
2712
2713 // Check the marker in the calling frame.
2714 __ bind(&check_frame_marker);
2715 __ cmp(Operand(eax, StandardFrameConstants::kMarkerOffset),
2716 Immediate(Smi::FromInt(StackFrame::CONSTRUCT)));
2717 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2718 Split(equal, if_true, if_false, fall_through);
2719
2720 context()->Plug(if_true, if_false);
2721 }
2722
2723
EmitObjectEquals(CallRuntime * expr)2724 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
2725 ZoneList<Expression*>* args = expr->arguments();
2726 ASSERT(args->length() == 2);
2727
2728 // Load the two objects into registers and perform the comparison.
2729 VisitForStackValue(args->at(0));
2730 VisitForAccumulatorValue(args->at(1));
2731
2732 Label materialize_true, materialize_false;
2733 Label* if_true = NULL;
2734 Label* if_false = NULL;
2735 Label* fall_through = NULL;
2736 context()->PrepareTest(&materialize_true, &materialize_false,
2737 &if_true, &if_false, &fall_through);
2738
2739 __ pop(ebx);
2740 __ cmp(eax, ebx);
2741 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2742 Split(equal, if_true, if_false, fall_through);
2743
2744 context()->Plug(if_true, if_false);
2745 }
2746
2747
EmitArguments(CallRuntime * expr)2748 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
2749 ZoneList<Expression*>* args = expr->arguments();
2750 ASSERT(args->length() == 1);
2751
2752 // ArgumentsAccessStub expects the key in edx and the formal
2753 // parameter count in eax.
2754 VisitForAccumulatorValue(args->at(0));
2755 __ mov(edx, eax);
2756 __ SafeSet(eax, Immediate(Smi::FromInt(info_->scope()->num_parameters())));
2757 ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
2758 __ CallStub(&stub);
2759 context()->Plug(eax);
2760 }
2761
2762
EmitArgumentsLength(CallRuntime * expr)2763 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
2764 ASSERT(expr->arguments()->length() == 0);
2765
2766 Label exit;
2767 // Get the number of formal parameters.
2768 __ SafeSet(eax, Immediate(Smi::FromInt(info_->scope()->num_parameters())));
2769
2770 // Check if the calling frame is an arguments adaptor frame.
2771 __ mov(ebx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
2772 __ cmp(Operand(ebx, StandardFrameConstants::kContextOffset),
2773 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2774 __ j(not_equal, &exit);
2775
2776 // Arguments adaptor case: Read the arguments length from the
2777 // adaptor frame.
2778 __ mov(eax, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset));
2779
2780 __ bind(&exit);
2781 if (FLAG_debug_code) __ AbortIfNotSmi(eax);
2782 context()->Plug(eax);
2783 }
2784
2785
EmitClassOf(CallRuntime * expr)2786 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
2787 ZoneList<Expression*>* args = expr->arguments();
2788 ASSERT(args->length() == 1);
2789 Label done, null, function, non_function_constructor;
2790
2791 VisitForAccumulatorValue(args->at(0));
2792
2793 // If the object is a smi, we return null.
2794 __ JumpIfSmi(eax, &null);
2795
2796 // Check that the object is a JS object but take special care of JS
2797 // functions to make sure they have 'Function' as their class.
2798 // Assume that there are only two callable types, and one of them is at
2799 // either end of the type range for JS object types. Saves extra comparisons.
2800 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
2801 __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, eax);
2802 // Map is now in eax.
2803 __ j(below, &null);
2804 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
2805 FIRST_SPEC_OBJECT_TYPE + 1);
2806 __ j(equal, &function);
2807
2808 __ CmpInstanceType(eax, LAST_SPEC_OBJECT_TYPE);
2809 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
2810 LAST_SPEC_OBJECT_TYPE - 1);
2811 __ j(equal, &function);
2812 // Assume that there is no larger type.
2813 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
2814
2815 // Check if the constructor in the map is a JS function.
2816 __ mov(eax, FieldOperand(eax, Map::kConstructorOffset));
2817 __ CmpObjectType(eax, JS_FUNCTION_TYPE, ebx);
2818 __ j(not_equal, &non_function_constructor);
2819
2820 // eax now contains the constructor function. Grab the
2821 // instance class name from there.
2822 __ mov(eax, FieldOperand(eax, JSFunction::kSharedFunctionInfoOffset));
2823 __ mov(eax, FieldOperand(eax, SharedFunctionInfo::kInstanceClassNameOffset));
2824 __ jmp(&done);
2825
2826 // Functions have class 'Function'.
2827 __ bind(&function);
2828 __ mov(eax, isolate()->factory()->function_class_symbol());
2829 __ jmp(&done);
2830
2831 // Objects with a non-function constructor have class 'Object'.
2832 __ bind(&non_function_constructor);
2833 __ mov(eax, isolate()->factory()->Object_symbol());
2834 __ jmp(&done);
2835
2836 // Non-JS objects have class null.
2837 __ bind(&null);
2838 __ mov(eax, isolate()->factory()->null_value());
2839
2840 // All done.
2841 __ bind(&done);
2842
2843 context()->Plug(eax);
2844 }
2845
2846
EmitLog(CallRuntime * expr)2847 void FullCodeGenerator::EmitLog(CallRuntime* expr) {
2848 // Conditionally generate a log call.
2849 // Args:
2850 // 0 (literal string): The type of logging (corresponds to the flags).
2851 // This is used to determine whether or not to generate the log call.
2852 // 1 (string): Format string. Access the string at argument index 2
2853 // with '%2s' (see Logger::LogRuntime for all the formats).
2854 // 2 (array): Arguments to the format string.
2855 ZoneList<Expression*>* args = expr->arguments();
2856 ASSERT_EQ(args->length(), 3);
2857 if (CodeGenerator::ShouldGenerateLog(args->at(0))) {
2858 VisitForStackValue(args->at(1));
2859 VisitForStackValue(args->at(2));
2860 __ CallRuntime(Runtime::kLog, 2);
2861 }
2862 // Finally, we're expected to leave a value on the top of the stack.
2863 __ mov(eax, isolate()->factory()->undefined_value());
2864 context()->Plug(eax);
2865 }
2866
2867
EmitRandomHeapNumber(CallRuntime * expr)2868 void FullCodeGenerator::EmitRandomHeapNumber(CallRuntime* expr) {
2869 ASSERT(expr->arguments()->length() == 0);
2870
2871 Label slow_allocate_heapnumber;
2872 Label heapnumber_allocated;
2873
2874 __ AllocateHeapNumber(edi, ebx, ecx, &slow_allocate_heapnumber);
2875 __ jmp(&heapnumber_allocated);
2876
2877 __ bind(&slow_allocate_heapnumber);
2878 // Allocate a heap number.
2879 __ CallRuntime(Runtime::kNumberAlloc, 0);
2880 __ mov(edi, eax);
2881
2882 __ bind(&heapnumber_allocated);
2883
2884 __ PrepareCallCFunction(1, ebx);
2885 __ mov(eax, ContextOperand(context_register(), Context::GLOBAL_INDEX));
2886 __ mov(eax, FieldOperand(eax, GlobalObject::kGlobalContextOffset));
2887 __ mov(Operand(esp, 0), eax);
2888 __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
2889
2890 // Convert 32 random bits in eax to 0.(32 random bits) in a double
2891 // by computing:
2892 // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
2893 // This is implemented on both SSE2 and FPU.
2894 if (CpuFeatures::IsSupported(SSE2)) {
2895 CpuFeatures::Scope fscope(SSE2);
2896 __ mov(ebx, Immediate(0x49800000)); // 1.0 x 2^20 as single.
2897 __ movd(xmm1, ebx);
2898 __ movd(xmm0, eax);
2899 __ cvtss2sd(xmm1, xmm1);
2900 __ xorps(xmm0, xmm1);
2901 __ subsd(xmm0, xmm1);
2902 __ movdbl(FieldOperand(edi, HeapNumber::kValueOffset), xmm0);
2903 } else {
2904 // 0x4130000000000000 is 1.0 x 2^20 as a double.
2905 __ mov(FieldOperand(edi, HeapNumber::kExponentOffset),
2906 Immediate(0x41300000));
2907 __ mov(FieldOperand(edi, HeapNumber::kMantissaOffset), eax);
2908 __ fld_d(FieldOperand(edi, HeapNumber::kValueOffset));
2909 __ mov(FieldOperand(edi, HeapNumber::kMantissaOffset), Immediate(0));
2910 __ fld_d(FieldOperand(edi, HeapNumber::kValueOffset));
2911 __ fsubp(1);
2912 __ fstp_d(FieldOperand(edi, HeapNumber::kValueOffset));
2913 }
2914 __ mov(eax, edi);
2915 context()->Plug(eax);
2916 }
2917
2918
EmitSubString(CallRuntime * expr)2919 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
2920 // Load the arguments on the stack and call the stub.
2921 SubStringStub stub;
2922 ZoneList<Expression*>* args = expr->arguments();
2923 ASSERT(args->length() == 3);
2924 VisitForStackValue(args->at(0));
2925 VisitForStackValue(args->at(1));
2926 VisitForStackValue(args->at(2));
2927 __ CallStub(&stub);
2928 context()->Plug(eax);
2929 }
2930
2931
EmitRegExpExec(CallRuntime * expr)2932 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
2933 // Load the arguments on the stack and call the stub.
2934 RegExpExecStub stub;
2935 ZoneList<Expression*>* args = expr->arguments();
2936 ASSERT(args->length() == 4);
2937 VisitForStackValue(args->at(0));
2938 VisitForStackValue(args->at(1));
2939 VisitForStackValue(args->at(2));
2940 VisitForStackValue(args->at(3));
2941 __ CallStub(&stub);
2942 context()->Plug(eax);
2943 }
2944
2945
EmitValueOf(CallRuntime * expr)2946 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
2947 ZoneList<Expression*>* args = expr->arguments();
2948 ASSERT(args->length() == 1);
2949
2950 VisitForAccumulatorValue(args->at(0)); // Load the object.
2951
2952 Label done;
2953 // If the object is a smi return the object.
2954 __ JumpIfSmi(eax, &done, Label::kNear);
2955 // If the object is not a value type, return the object.
2956 __ CmpObjectType(eax, JS_VALUE_TYPE, ebx);
2957 __ j(not_equal, &done, Label::kNear);
2958 __ mov(eax, FieldOperand(eax, JSValue::kValueOffset));
2959
2960 __ bind(&done);
2961 context()->Plug(eax);
2962 }
2963
2964
EmitDateField(CallRuntime * expr)2965 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
2966 ZoneList<Expression*>* args = expr->arguments();
2967 ASSERT(args->length() == 2);
2968 ASSERT_NE(NULL, args->at(1)->AsLiteral());
2969 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->handle()));
2970
2971 VisitForAccumulatorValue(args->at(0)); // Load the object.
2972
2973 Label runtime, done;
2974 Register object = eax;
2975 Register result = eax;
2976 Register scratch = ecx;
2977
2978 #ifdef DEBUG
2979 __ AbortIfSmi(object);
2980 __ CmpObjectType(object, JS_DATE_TYPE, scratch);
2981 __ Assert(equal, "Trying to get date field from non-date.");
2982 #endif
2983
2984 if (index->value() == 0) {
2985 __ mov(result, FieldOperand(object, JSDate::kValueOffset));
2986 } else {
2987 if (index->value() < JSDate::kFirstUncachedField) {
2988 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
2989 __ mov(scratch, Operand::StaticVariable(stamp));
2990 __ cmp(scratch, FieldOperand(object, JSDate::kCacheStampOffset));
2991 __ j(not_equal, &runtime, Label::kNear);
2992 __ mov(result, FieldOperand(object, JSDate::kValueOffset +
2993 kPointerSize * index->value()));
2994 __ jmp(&done);
2995 }
2996 __ bind(&runtime);
2997 __ PrepareCallCFunction(2, scratch);
2998 __ mov(Operand(esp, 0), object);
2999 __ mov(Operand(esp, 1 * kPointerSize), Immediate(index));
3000 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3001 __ bind(&done);
3002 }
3003 context()->Plug(result);
3004 }
3005
3006
EmitMathPow(CallRuntime * expr)3007 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3008 // Load the arguments on the stack and call the runtime function.
3009 ZoneList<Expression*>* args = expr->arguments();
3010 ASSERT(args->length() == 2);
3011 VisitForStackValue(args->at(0));
3012 VisitForStackValue(args->at(1));
3013
3014 if (CpuFeatures::IsSupported(SSE2)) {
3015 MathPowStub stub(MathPowStub::ON_STACK);
3016 __ CallStub(&stub);
3017 } else {
3018 __ CallRuntime(Runtime::kMath_pow, 2);
3019 }
3020 context()->Plug(eax);
3021 }
3022
3023
EmitSetValueOf(CallRuntime * expr)3024 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3025 ZoneList<Expression*>* args = expr->arguments();
3026 ASSERT(args->length() == 2);
3027
3028 VisitForStackValue(args->at(0)); // Load the object.
3029 VisitForAccumulatorValue(args->at(1)); // Load the value.
3030 __ pop(ebx); // eax = value. ebx = object.
3031
3032 Label done;
3033 // If the object is a smi, return the value.
3034 __ JumpIfSmi(ebx, &done, Label::kNear);
3035
3036 // If the object is not a value type, return the value.
3037 __ CmpObjectType(ebx, JS_VALUE_TYPE, ecx);
3038 __ j(not_equal, &done, Label::kNear);
3039
3040 // Store the value.
3041 __ mov(FieldOperand(ebx, JSValue::kValueOffset), eax);
3042
3043 // Update the write barrier. Save the value as it will be
3044 // overwritten by the write barrier code and is needed afterward.
3045 __ mov(edx, eax);
3046 __ RecordWriteField(ebx, JSValue::kValueOffset, edx, ecx, kDontSaveFPRegs);
3047
3048 __ bind(&done);
3049 context()->Plug(eax);
3050 }
3051
3052
EmitNumberToString(CallRuntime * expr)3053 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3054 ZoneList<Expression*>* args = expr->arguments();
3055 ASSERT_EQ(args->length(), 1);
3056
3057 // Load the argument on the stack and call the stub.
3058 VisitForStackValue(args->at(0));
3059
3060 NumberToStringStub stub;
3061 __ CallStub(&stub);
3062 context()->Plug(eax);
3063 }
3064
3065
EmitStringCharFromCode(CallRuntime * expr)3066 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3067 ZoneList<Expression*>* args = expr->arguments();
3068 ASSERT(args->length() == 1);
3069
3070 VisitForAccumulatorValue(args->at(0));
3071
3072 Label done;
3073 StringCharFromCodeGenerator generator(eax, ebx);
3074 generator.GenerateFast(masm_);
3075 __ jmp(&done);
3076
3077 NopRuntimeCallHelper call_helper;
3078 generator.GenerateSlow(masm_, call_helper);
3079
3080 __ bind(&done);
3081 context()->Plug(ebx);
3082 }
3083
3084
EmitStringCharCodeAt(CallRuntime * expr)3085 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3086 ZoneList<Expression*>* args = expr->arguments();
3087 ASSERT(args->length() == 2);
3088
3089 VisitForStackValue(args->at(0));
3090 VisitForAccumulatorValue(args->at(1));
3091
3092 Register object = ebx;
3093 Register index = eax;
3094 Register result = edx;
3095
3096 __ pop(object);
3097
3098 Label need_conversion;
3099 Label index_out_of_range;
3100 Label done;
3101 StringCharCodeAtGenerator generator(object,
3102 index,
3103 result,
3104 &need_conversion,
3105 &need_conversion,
3106 &index_out_of_range,
3107 STRING_INDEX_IS_NUMBER);
3108 generator.GenerateFast(masm_);
3109 __ jmp(&done);
3110
3111 __ bind(&index_out_of_range);
3112 // When the index is out of range, the spec requires us to return
3113 // NaN.
3114 __ Set(result, Immediate(isolate()->factory()->nan_value()));
3115 __ jmp(&done);
3116
3117 __ bind(&need_conversion);
3118 // Move the undefined value into the result register, which will
3119 // trigger conversion.
3120 __ Set(result, Immediate(isolate()->factory()->undefined_value()));
3121 __ jmp(&done);
3122
3123 NopRuntimeCallHelper call_helper;
3124 generator.GenerateSlow(masm_, call_helper);
3125
3126 __ bind(&done);
3127 context()->Plug(result);
3128 }
3129
3130
EmitStringCharAt(CallRuntime * expr)3131 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3132 ZoneList<Expression*>* args = expr->arguments();
3133 ASSERT(args->length() == 2);
3134
3135 VisitForStackValue(args->at(0));
3136 VisitForAccumulatorValue(args->at(1));
3137
3138 Register object = ebx;
3139 Register index = eax;
3140 Register scratch = edx;
3141 Register result = eax;
3142
3143 __ pop(object);
3144
3145 Label need_conversion;
3146 Label index_out_of_range;
3147 Label done;
3148 StringCharAtGenerator generator(object,
3149 index,
3150 scratch,
3151 result,
3152 &need_conversion,
3153 &need_conversion,
3154 &index_out_of_range,
3155 STRING_INDEX_IS_NUMBER);
3156 generator.GenerateFast(masm_);
3157 __ jmp(&done);
3158
3159 __ bind(&index_out_of_range);
3160 // When the index is out of range, the spec requires us to return
3161 // the empty string.
3162 __ Set(result, Immediate(isolate()->factory()->empty_string()));
3163 __ jmp(&done);
3164
3165 __ bind(&need_conversion);
3166 // Move smi zero into the result register, which will trigger
3167 // conversion.
3168 __ Set(result, Immediate(Smi::FromInt(0)));
3169 __ jmp(&done);
3170
3171 NopRuntimeCallHelper call_helper;
3172 generator.GenerateSlow(masm_, call_helper);
3173
3174 __ bind(&done);
3175 context()->Plug(result);
3176 }
3177
3178
EmitStringAdd(CallRuntime * expr)3179 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3180 ZoneList<Expression*>* args = expr->arguments();
3181 ASSERT_EQ(2, args->length());
3182
3183 VisitForStackValue(args->at(0));
3184 VisitForStackValue(args->at(1));
3185
3186 StringAddStub stub(NO_STRING_ADD_FLAGS);
3187 __ CallStub(&stub);
3188 context()->Plug(eax);
3189 }
3190
3191
EmitStringCompare(CallRuntime * expr)3192 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3193 ZoneList<Expression*>* args = expr->arguments();
3194 ASSERT_EQ(2, args->length());
3195
3196 VisitForStackValue(args->at(0));
3197 VisitForStackValue(args->at(1));
3198
3199 StringCompareStub stub;
3200 __ CallStub(&stub);
3201 context()->Plug(eax);
3202 }
3203
3204
EmitMathSin(CallRuntime * expr)3205 void FullCodeGenerator::EmitMathSin(CallRuntime* expr) {
3206 // Load the argument on the stack and call the stub.
3207 TranscendentalCacheStub stub(TranscendentalCache::SIN,
3208 TranscendentalCacheStub::TAGGED);
3209 ZoneList<Expression*>* args = expr->arguments();
3210 ASSERT(args->length() == 1);
3211 VisitForStackValue(args->at(0));
3212 __ CallStub(&stub);
3213 context()->Plug(eax);
3214 }
3215
3216
EmitMathCos(CallRuntime * expr)3217 void FullCodeGenerator::EmitMathCos(CallRuntime* expr) {
3218 // Load the argument on the stack and call the stub.
3219 TranscendentalCacheStub stub(TranscendentalCache::COS,
3220 TranscendentalCacheStub::TAGGED);
3221 ZoneList<Expression*>* args = expr->arguments();
3222 ASSERT(args->length() == 1);
3223 VisitForStackValue(args->at(0));
3224 __ CallStub(&stub);
3225 context()->Plug(eax);
3226 }
3227
3228
EmitMathTan(CallRuntime * expr)3229 void FullCodeGenerator::EmitMathTan(CallRuntime* expr) {
3230 // Load the argument on the stack and call the stub.
3231 TranscendentalCacheStub stub(TranscendentalCache::TAN,
3232 TranscendentalCacheStub::TAGGED);
3233 ZoneList<Expression*>* args = expr->arguments();
3234 ASSERT(args->length() == 1);
3235 VisitForStackValue(args->at(0));
3236 __ CallStub(&stub);
3237 context()->Plug(eax);
3238 }
3239
3240
EmitMathLog(CallRuntime * expr)3241 void FullCodeGenerator::EmitMathLog(CallRuntime* expr) {
3242 // Load the argument on the stack and call the stub.
3243 TranscendentalCacheStub stub(TranscendentalCache::LOG,
3244 TranscendentalCacheStub::TAGGED);
3245 ZoneList<Expression*>* args = expr->arguments();
3246 ASSERT(args->length() == 1);
3247 VisitForStackValue(args->at(0));
3248 __ CallStub(&stub);
3249 context()->Plug(eax);
3250 }
3251
3252
EmitMathSqrt(CallRuntime * expr)3253 void FullCodeGenerator::EmitMathSqrt(CallRuntime* expr) {
3254 // Load the argument on the stack and call the runtime function.
3255 ZoneList<Expression*>* args = expr->arguments();
3256 ASSERT(args->length() == 1);
3257 VisitForStackValue(args->at(0));
3258 __ CallRuntime(Runtime::kMath_sqrt, 1);
3259 context()->Plug(eax);
3260 }
3261
3262
EmitCallFunction(CallRuntime * expr)3263 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3264 ZoneList<Expression*>* args = expr->arguments();
3265 ASSERT(args->length() >= 2);
3266
3267 int arg_count = args->length() - 2; // 2 ~ receiver and function.
3268 for (int i = 0; i < arg_count + 1; ++i) {
3269 VisitForStackValue(args->at(i));
3270 }
3271 VisitForAccumulatorValue(args->last()); // Function.
3272
3273 // Check for proxy.
3274 Label proxy, done;
3275 __ CmpObjectType(eax, JS_FUNCTION_PROXY_TYPE, ebx);
3276 __ j(equal, &proxy);
3277
3278 // InvokeFunction requires the function in edi. Move it in there.
3279 __ mov(edi, result_register());
3280 ParameterCount count(arg_count);
3281 __ InvokeFunction(edi, count, CALL_FUNCTION,
3282 NullCallWrapper(), CALL_AS_METHOD);
3283 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
3284 __ jmp(&done);
3285
3286 __ bind(&proxy);
3287 __ push(eax);
3288 __ CallRuntime(Runtime::kCall, args->length());
3289 __ bind(&done);
3290
3291 context()->Plug(eax);
3292 }
3293
3294
EmitRegExpConstructResult(CallRuntime * expr)3295 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3296 // Load the arguments on the stack and call the stub.
3297 RegExpConstructResultStub stub;
3298 ZoneList<Expression*>* args = expr->arguments();
3299 ASSERT(args->length() == 3);
3300 VisitForStackValue(args->at(0));
3301 VisitForStackValue(args->at(1));
3302 VisitForStackValue(args->at(2));
3303 __ CallStub(&stub);
3304 context()->Plug(eax);
3305 }
3306
3307
EmitGetFromCache(CallRuntime * expr)3308 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3309 ZoneList<Expression*>* args = expr->arguments();
3310 ASSERT_EQ(2, args->length());
3311
3312 ASSERT_NE(NULL, args->at(0)->AsLiteral());
3313 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value();
3314
3315 Handle<FixedArray> jsfunction_result_caches(
3316 isolate()->global_context()->jsfunction_result_caches());
3317 if (jsfunction_result_caches->length() <= cache_id) {
3318 __ Abort("Attempt to use undefined cache.");
3319 __ mov(eax, isolate()->factory()->undefined_value());
3320 context()->Plug(eax);
3321 return;
3322 }
3323
3324 VisitForAccumulatorValue(args->at(1));
3325
3326 Register key = eax;
3327 Register cache = ebx;
3328 Register tmp = ecx;
3329 __ mov(cache, ContextOperand(esi, Context::GLOBAL_INDEX));
3330 __ mov(cache,
3331 FieldOperand(cache, GlobalObject::kGlobalContextOffset));
3332 __ mov(cache, ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
3333 __ mov(cache,
3334 FieldOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
3335
3336 Label done, not_found;
3337 // tmp now holds finger offset as a smi.
3338 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
3339 __ mov(tmp, FieldOperand(cache, JSFunctionResultCache::kFingerOffset));
3340 __ cmp(key, CodeGenerator::FixedArrayElementOperand(cache, tmp));
3341 __ j(not_equal, ¬_found);
3342
3343 __ mov(eax, CodeGenerator::FixedArrayElementOperand(cache, tmp, 1));
3344 __ jmp(&done);
3345
3346 __ bind(¬_found);
3347 // Call runtime to perform the lookup.
3348 __ push(cache);
3349 __ push(key);
3350 __ CallRuntime(Runtime::kGetFromCache, 2);
3351
3352 __ bind(&done);
3353 context()->Plug(eax);
3354 }
3355
3356
EmitIsRegExpEquivalent(CallRuntime * expr)3357 void FullCodeGenerator::EmitIsRegExpEquivalent(CallRuntime* expr) {
3358 ZoneList<Expression*>* args = expr->arguments();
3359 ASSERT_EQ(2, args->length());
3360
3361 Register right = eax;
3362 Register left = ebx;
3363 Register tmp = ecx;
3364
3365 VisitForStackValue(args->at(0));
3366 VisitForAccumulatorValue(args->at(1));
3367 __ pop(left);
3368
3369 Label done, fail, ok;
3370 __ cmp(left, right);
3371 __ j(equal, &ok);
3372 // Fail if either is a non-HeapObject.
3373 __ mov(tmp, left);
3374 __ and_(tmp, right);
3375 __ JumpIfSmi(tmp, &fail);
3376 __ mov(tmp, FieldOperand(left, HeapObject::kMapOffset));
3377 __ CmpInstanceType(tmp, JS_REGEXP_TYPE);
3378 __ j(not_equal, &fail);
3379 __ cmp(tmp, FieldOperand(right, HeapObject::kMapOffset));
3380 __ j(not_equal, &fail);
3381 __ mov(tmp, FieldOperand(left, JSRegExp::kDataOffset));
3382 __ cmp(tmp, FieldOperand(right, JSRegExp::kDataOffset));
3383 __ j(equal, &ok);
3384 __ bind(&fail);
3385 __ mov(eax, Immediate(isolate()->factory()->false_value()));
3386 __ jmp(&done);
3387 __ bind(&ok);
3388 __ mov(eax, Immediate(isolate()->factory()->true_value()));
3389 __ bind(&done);
3390
3391 context()->Plug(eax);
3392 }
3393
3394
EmitHasCachedArrayIndex(CallRuntime * expr)3395 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3396 ZoneList<Expression*>* args = expr->arguments();
3397 ASSERT(args->length() == 1);
3398
3399 VisitForAccumulatorValue(args->at(0));
3400
3401 if (FLAG_debug_code) {
3402 __ AbortIfNotString(eax);
3403 }
3404
3405 Label materialize_true, materialize_false;
3406 Label* if_true = NULL;
3407 Label* if_false = NULL;
3408 Label* fall_through = NULL;
3409 context()->PrepareTest(&materialize_true, &materialize_false,
3410 &if_true, &if_false, &fall_through);
3411
3412 __ test(FieldOperand(eax, String::kHashFieldOffset),
3413 Immediate(String::kContainsCachedArrayIndexMask));
3414 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3415 Split(zero, if_true, if_false, fall_through);
3416
3417 context()->Plug(if_true, if_false);
3418 }
3419
3420
EmitGetCachedArrayIndex(CallRuntime * expr)3421 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3422 ZoneList<Expression*>* args = expr->arguments();
3423 ASSERT(args->length() == 1);
3424 VisitForAccumulatorValue(args->at(0));
3425
3426 if (FLAG_debug_code) {
3427 __ AbortIfNotString(eax);
3428 }
3429
3430 __ mov(eax, FieldOperand(eax, String::kHashFieldOffset));
3431 __ IndexFromHash(eax, eax);
3432
3433 context()->Plug(eax);
3434 }
3435
3436
EmitFastAsciiArrayJoin(CallRuntime * expr)3437 void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
3438 Label bailout, done, one_char_separator, long_separator,
3439 non_trivial_array, not_size_one_array, loop,
3440 loop_1, loop_1_condition, loop_2, loop_2_entry, loop_3, loop_3_entry;
3441
3442 ZoneList<Expression*>* args = expr->arguments();
3443 ASSERT(args->length() == 2);
3444 // We will leave the separator on the stack until the end of the function.
3445 VisitForStackValue(args->at(1));
3446 // Load this to eax (= array)
3447 VisitForAccumulatorValue(args->at(0));
3448 // All aliases of the same register have disjoint lifetimes.
3449 Register array = eax;
3450 Register elements = no_reg; // Will be eax.
3451
3452 Register index = edx;
3453
3454 Register string_length = ecx;
3455
3456 Register string = esi;
3457
3458 Register scratch = ebx;
3459
3460 Register array_length = edi;
3461 Register result_pos = no_reg; // Will be edi.
3462
3463 // Separator operand is already pushed.
3464 Operand separator_operand = Operand(esp, 2 * kPointerSize);
3465 Operand result_operand = Operand(esp, 1 * kPointerSize);
3466 Operand array_length_operand = Operand(esp, 0);
3467 __ sub(esp, Immediate(2 * kPointerSize));
3468 __ cld();
3469 // Check that the array is a JSArray
3470 __ JumpIfSmi(array, &bailout);
3471 __ CmpObjectType(array, JS_ARRAY_TYPE, scratch);
3472 __ j(not_equal, &bailout);
3473
3474 // Check that the array has fast elements.
3475 __ CheckFastElements(scratch, &bailout);
3476
3477 // If the array has length zero, return the empty string.
3478 __ mov(array_length, FieldOperand(array, JSArray::kLengthOffset));
3479 __ SmiUntag(array_length);
3480 __ j(not_zero, &non_trivial_array);
3481 __ mov(result_operand, isolate()->factory()->empty_string());
3482 __ jmp(&done);
3483
3484 // Save the array length.
3485 __ bind(&non_trivial_array);
3486 __ mov(array_length_operand, array_length);
3487
3488 // Save the FixedArray containing array's elements.
3489 // End of array's live range.
3490 elements = array;
3491 __ mov(elements, FieldOperand(array, JSArray::kElementsOffset));
3492 array = no_reg;
3493
3494
3495 // Check that all array elements are sequential ASCII strings, and
3496 // accumulate the sum of their lengths, as a smi-encoded value.
3497 __ Set(index, Immediate(0));
3498 __ Set(string_length, Immediate(0));
3499 // Loop condition: while (index < length).
3500 // Live loop registers: index, array_length, string,
3501 // scratch, string_length, elements.
3502 if (FLAG_debug_code) {
3503 __ cmp(index, array_length);
3504 __ Assert(less, "No empty arrays here in EmitFastAsciiArrayJoin");
3505 }
3506 __ bind(&loop);
3507 __ mov(string, FieldOperand(elements,
3508 index,
3509 times_pointer_size,
3510 FixedArray::kHeaderSize));
3511 __ JumpIfSmi(string, &bailout);
3512 __ mov(scratch, FieldOperand(string, HeapObject::kMapOffset));
3513 __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
3514 __ and_(scratch, Immediate(
3515 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
3516 __ cmp(scratch, kStringTag | kAsciiStringTag | kSeqStringTag);
3517 __ j(not_equal, &bailout);
3518 __ add(string_length,
3519 FieldOperand(string, SeqAsciiString::kLengthOffset));
3520 __ j(overflow, &bailout);
3521 __ add(index, Immediate(1));
3522 __ cmp(index, array_length);
3523 __ j(less, &loop);
3524
3525 // If array_length is 1, return elements[0], a string.
3526 __ cmp(array_length, 1);
3527 __ j(not_equal, ¬_size_one_array);
3528 __ mov(scratch, FieldOperand(elements, FixedArray::kHeaderSize));
3529 __ mov(result_operand, scratch);
3530 __ jmp(&done);
3531
3532 __ bind(¬_size_one_array);
3533
3534 // End of array_length live range.
3535 result_pos = array_length;
3536 array_length = no_reg;
3537
3538 // Live registers:
3539 // string_length: Sum of string lengths, as a smi.
3540 // elements: FixedArray of strings.
3541
3542 // Check that the separator is a flat ASCII string.
3543 __ mov(string, separator_operand);
3544 __ JumpIfSmi(string, &bailout);
3545 __ mov(scratch, FieldOperand(string, HeapObject::kMapOffset));
3546 __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
3547 __ and_(scratch, Immediate(
3548 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
3549 __ cmp(scratch, ASCII_STRING_TYPE);
3550 __ j(not_equal, &bailout);
3551
3552 // Add (separator length times array_length) - separator length
3553 // to string_length.
3554 __ mov(scratch, separator_operand);
3555 __ mov(scratch, FieldOperand(scratch, SeqAsciiString::kLengthOffset));
3556 __ sub(string_length, scratch); // May be negative, temporarily.
3557 __ imul(scratch, array_length_operand);
3558 __ j(overflow, &bailout);
3559 __ add(string_length, scratch);
3560 __ j(overflow, &bailout);
3561
3562 __ shr(string_length, 1);
3563 // Live registers and stack values:
3564 // string_length
3565 // elements
3566 __ AllocateAsciiString(result_pos, string_length, scratch,
3567 index, string, &bailout);
3568 __ mov(result_operand, result_pos);
3569 __ lea(result_pos, FieldOperand(result_pos, SeqAsciiString::kHeaderSize));
3570
3571
3572 __ mov(string, separator_operand);
3573 __ cmp(FieldOperand(string, SeqAsciiString::kLengthOffset),
3574 Immediate(Smi::FromInt(1)));
3575 __ j(equal, &one_char_separator);
3576 __ j(greater, &long_separator);
3577
3578
3579 // Empty separator case
3580 __ mov(index, Immediate(0));
3581 __ jmp(&loop_1_condition);
3582 // Loop condition: while (index < length).
3583 __ bind(&loop_1);
3584 // Each iteration of the loop concatenates one string to the result.
3585 // Live values in registers:
3586 // index: which element of the elements array we are adding to the result.
3587 // result_pos: the position to which we are currently copying characters.
3588 // elements: the FixedArray of strings we are joining.
3589
3590 // Get string = array[index].
3591 __ mov(string, FieldOperand(elements, index,
3592 times_pointer_size,
3593 FixedArray::kHeaderSize));
3594 __ mov(string_length,
3595 FieldOperand(string, String::kLengthOffset));
3596 __ shr(string_length, 1);
3597 __ lea(string,
3598 FieldOperand(string, SeqAsciiString::kHeaderSize));
3599 __ CopyBytes(string, result_pos, string_length, scratch);
3600 __ add(index, Immediate(1));
3601 __ bind(&loop_1_condition);
3602 __ cmp(index, array_length_operand);
3603 __ j(less, &loop_1); // End while (index < length).
3604 __ jmp(&done);
3605
3606
3607
3608 // One-character separator case
3609 __ bind(&one_char_separator);
3610 // Replace separator with its ASCII character value.
3611 __ mov_b(scratch, FieldOperand(string, SeqAsciiString::kHeaderSize));
3612 __ mov_b(separator_operand, scratch);
3613
3614 __ Set(index, Immediate(0));
3615 // Jump into the loop after the code that copies the separator, so the first
3616 // element is not preceded by a separator
3617 __ jmp(&loop_2_entry);
3618 // Loop condition: while (index < length).
3619 __ bind(&loop_2);
3620 // Each iteration of the loop concatenates one string to the result.
3621 // Live values in registers:
3622 // index: which element of the elements array we are adding to the result.
3623 // result_pos: the position to which we are currently copying characters.
3624
3625 // Copy the separator character to the result.
3626 __ mov_b(scratch, separator_operand);
3627 __ mov_b(Operand(result_pos, 0), scratch);
3628 __ inc(result_pos);
3629
3630 __ bind(&loop_2_entry);
3631 // Get string = array[index].
3632 __ mov(string, FieldOperand(elements, index,
3633 times_pointer_size,
3634 FixedArray::kHeaderSize));
3635 __ mov(string_length,
3636 FieldOperand(string, String::kLengthOffset));
3637 __ shr(string_length, 1);
3638 __ lea(string,
3639 FieldOperand(string, SeqAsciiString::kHeaderSize));
3640 __ CopyBytes(string, result_pos, string_length, scratch);
3641 __ add(index, Immediate(1));
3642
3643 __ cmp(index, array_length_operand);
3644 __ j(less, &loop_2); // End while (index < length).
3645 __ jmp(&done);
3646
3647
3648 // Long separator case (separator is more than one character).
3649 __ bind(&long_separator);
3650
3651 __ Set(index, Immediate(0));
3652 // Jump into the loop after the code that copies the separator, so the first
3653 // element is not preceded by a separator
3654 __ jmp(&loop_3_entry);
3655 // Loop condition: while (index < length).
3656 __ bind(&loop_3);
3657 // Each iteration of the loop concatenates one string to the result.
3658 // Live values in registers:
3659 // index: which element of the elements array we are adding to the result.
3660 // result_pos: the position to which we are currently copying characters.
3661
3662 // Copy the separator to the result.
3663 __ mov(string, separator_operand);
3664 __ mov(string_length,
3665 FieldOperand(string, String::kLengthOffset));
3666 __ shr(string_length, 1);
3667 __ lea(string,
3668 FieldOperand(string, SeqAsciiString::kHeaderSize));
3669 __ CopyBytes(string, result_pos, string_length, scratch);
3670
3671 __ bind(&loop_3_entry);
3672 // Get string = array[index].
3673 __ mov(string, FieldOperand(elements, index,
3674 times_pointer_size,
3675 FixedArray::kHeaderSize));
3676 __ mov(string_length,
3677 FieldOperand(string, String::kLengthOffset));
3678 __ shr(string_length, 1);
3679 __ lea(string,
3680 FieldOperand(string, SeqAsciiString::kHeaderSize));
3681 __ CopyBytes(string, result_pos, string_length, scratch);
3682 __ add(index, Immediate(1));
3683
3684 __ cmp(index, array_length_operand);
3685 __ j(less, &loop_3); // End while (index < length).
3686 __ jmp(&done);
3687
3688
3689 __ bind(&bailout);
3690 __ mov(result_operand, isolate()->factory()->undefined_value());
3691 __ bind(&done);
3692 __ mov(eax, result_operand);
3693 // Drop temp values from the stack, and restore context register.
3694 __ add(esp, Immediate(3 * kPointerSize));
3695
3696 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
3697 context()->Plug(eax);
3698 }
3699
3700
VisitCallRuntime(CallRuntime * expr)3701 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
3702 Handle<String> name = expr->name();
3703 if (name->length() > 0 && name->Get(0) == '_') {
3704 Comment cmnt(masm_, "[ InlineRuntimeCall");
3705 EmitInlineRuntimeCall(expr);
3706 return;
3707 }
3708
3709 Comment cmnt(masm_, "[ CallRuntime");
3710 ZoneList<Expression*>* args = expr->arguments();
3711
3712 if (expr->is_jsruntime()) {
3713 // Prepare for calling JS runtime function.
3714 __ mov(eax, GlobalObjectOperand());
3715 __ push(FieldOperand(eax, GlobalObject::kBuiltinsOffset));
3716 }
3717
3718 // Push the arguments ("left-to-right").
3719 int arg_count = args->length();
3720 for (int i = 0; i < arg_count; i++) {
3721 VisitForStackValue(args->at(i));
3722 }
3723
3724 if (expr->is_jsruntime()) {
3725 // Call the JS runtime function via a call IC.
3726 __ Set(ecx, Immediate(expr->name()));
3727 RelocInfo::Mode mode = RelocInfo::CODE_TARGET;
3728 Handle<Code> ic =
3729 isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
3730 CallIC(ic, mode, expr->id());
3731 // Restore context register.
3732 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
3733 } else {
3734 // Call the C runtime function.
3735 __ CallRuntime(expr->function(), arg_count);
3736 }
3737 context()->Plug(eax);
3738 }
3739
3740
VisitUnaryOperation(UnaryOperation * expr)3741 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3742 switch (expr->op()) {
3743 case Token::DELETE: {
3744 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
3745 Property* property = expr->expression()->AsProperty();
3746 VariableProxy* proxy = expr->expression()->AsVariableProxy();
3747
3748 if (property != NULL) {
3749 VisitForStackValue(property->obj());
3750 VisitForStackValue(property->key());
3751 StrictModeFlag strict_mode_flag = (language_mode() == CLASSIC_MODE)
3752 ? kNonStrictMode : kStrictMode;
3753 __ push(Immediate(Smi::FromInt(strict_mode_flag)));
3754 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3755 context()->Plug(eax);
3756 } else if (proxy != NULL) {
3757 Variable* var = proxy->var();
3758 // Delete of an unqualified identifier is disallowed in strict mode
3759 // but "delete this" is allowed.
3760 ASSERT(language_mode() == CLASSIC_MODE || var->is_this());
3761 if (var->IsUnallocated()) {
3762 __ push(GlobalObjectOperand());
3763 __ push(Immediate(var->name()));
3764 __ push(Immediate(Smi::FromInt(kNonStrictMode)));
3765 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3766 context()->Plug(eax);
3767 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
3768 // Result of deleting non-global variables is false. 'this' is
3769 // not really a variable, though we implement it as one. The
3770 // subexpression does not have side effects.
3771 context()->Plug(var->is_this());
3772 } else {
3773 // Non-global variable. Call the runtime to try to delete from the
3774 // context where the variable was introduced.
3775 __ push(context_register());
3776 __ push(Immediate(var->name()));
3777 __ CallRuntime(Runtime::kDeleteContextSlot, 2);
3778 context()->Plug(eax);
3779 }
3780 } else {
3781 // Result of deleting non-property, non-variable reference is true.
3782 // The subexpression may have side effects.
3783 VisitForEffect(expr->expression());
3784 context()->Plug(true);
3785 }
3786 break;
3787 }
3788
3789 case Token::VOID: {
3790 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
3791 VisitForEffect(expr->expression());
3792 context()->Plug(isolate()->factory()->undefined_value());
3793 break;
3794 }
3795
3796 case Token::NOT: {
3797 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
3798 if (context()->IsEffect()) {
3799 // Unary NOT has no side effects so it's only necessary to visit the
3800 // subexpression. Match the optimizing compiler by not branching.
3801 VisitForEffect(expr->expression());
3802 } else if (context()->IsTest()) {
3803 const TestContext* test = TestContext::cast(context());
3804 // The labels are swapped for the recursive call.
3805 VisitForControl(expr->expression(),
3806 test->false_label(),
3807 test->true_label(),
3808 test->fall_through());
3809 context()->Plug(test->true_label(), test->false_label());
3810 } else {
3811 // We handle value contexts explicitly rather than simply visiting
3812 // for control and plugging the control flow into the context,
3813 // because we need to prepare a pair of extra administrative AST ids
3814 // for the optimizing compiler.
3815 ASSERT(context()->IsAccumulatorValue() || context()->IsStackValue());
3816 Label materialize_true, materialize_false, done;
3817 VisitForControl(expr->expression(),
3818 &materialize_false,
3819 &materialize_true,
3820 &materialize_true);
3821 __ bind(&materialize_true);
3822 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
3823 if (context()->IsAccumulatorValue()) {
3824 __ mov(eax, isolate()->factory()->true_value());
3825 } else {
3826 __ Push(isolate()->factory()->true_value());
3827 }
3828 __ jmp(&done, Label::kNear);
3829 __ bind(&materialize_false);
3830 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
3831 if (context()->IsAccumulatorValue()) {
3832 __ mov(eax, isolate()->factory()->false_value());
3833 } else {
3834 __ Push(isolate()->factory()->false_value());
3835 }
3836 __ bind(&done);
3837 }
3838 break;
3839 }
3840
3841 case Token::TYPEOF: {
3842 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
3843 { StackValueContext context(this);
3844 VisitForTypeofValue(expr->expression());
3845 }
3846 __ CallRuntime(Runtime::kTypeof, 1);
3847 context()->Plug(eax);
3848 break;
3849 }
3850
3851 case Token::ADD: {
3852 Comment cmt(masm_, "[ UnaryOperation (ADD)");
3853 VisitForAccumulatorValue(expr->expression());
3854 Label no_conversion;
3855 __ JumpIfSmi(result_register(), &no_conversion);
3856 ToNumberStub convert_stub;
3857 __ CallStub(&convert_stub);
3858 __ bind(&no_conversion);
3859 context()->Plug(result_register());
3860 break;
3861 }
3862
3863 case Token::SUB:
3864 EmitUnaryOperation(expr, "[ UnaryOperation (SUB)");
3865 break;
3866
3867 case Token::BIT_NOT:
3868 EmitUnaryOperation(expr, "[ UnaryOperation (BIT_NOT)");
3869 break;
3870
3871 default:
3872 UNREACHABLE();
3873 }
3874 }
3875
3876
EmitUnaryOperation(UnaryOperation * expr,const char * comment)3877 void FullCodeGenerator::EmitUnaryOperation(UnaryOperation* expr,
3878 const char* comment) {
3879 Comment cmt(masm_, comment);
3880 bool can_overwrite = expr->expression()->ResultOverwriteAllowed();
3881 UnaryOverwriteMode overwrite =
3882 can_overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE;
3883 UnaryOpStub stub(expr->op(), overwrite);
3884 // UnaryOpStub expects the argument to be in the
3885 // accumulator register eax.
3886 VisitForAccumulatorValue(expr->expression());
3887 SetSourcePosition(expr->position());
3888 CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
3889 context()->Plug(eax);
3890 }
3891
3892
VisitCountOperation(CountOperation * expr)3893 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3894 Comment cmnt(masm_, "[ CountOperation");
3895 SetSourcePosition(expr->position());
3896
3897 // Invalid left-hand sides are rewritten to have a 'throw ReferenceError'
3898 // as the left-hand side.
3899 if (!expr->expression()->IsValidLeftHandSide()) {
3900 VisitForEffect(expr->expression());
3901 return;
3902 }
3903
3904 // Expression can only be a property, a global or a (parameter or local)
3905 // slot.
3906 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
3907 LhsKind assign_type = VARIABLE;
3908 Property* prop = expr->expression()->AsProperty();
3909 // In case of a property we use the uninitialized expression context
3910 // of the key to detect a named property.
3911 if (prop != NULL) {
3912 assign_type =
3913 (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
3914 }
3915
3916 // Evaluate expression and get value.
3917 if (assign_type == VARIABLE) {
3918 ASSERT(expr->expression()->AsVariableProxy()->var() != NULL);
3919 AccumulatorValueContext context(this);
3920 EmitVariableLoad(expr->expression()->AsVariableProxy());
3921 } else {
3922 // Reserve space for result of postfix operation.
3923 if (expr->is_postfix() && !context()->IsEffect()) {
3924 __ push(Immediate(Smi::FromInt(0)));
3925 }
3926 if (assign_type == NAMED_PROPERTY) {
3927 // Put the object both on the stack and in the accumulator.
3928 VisitForAccumulatorValue(prop->obj());
3929 __ push(eax);
3930 EmitNamedPropertyLoad(prop);
3931 } else {
3932 VisitForStackValue(prop->obj());
3933 VisitForAccumulatorValue(prop->key());
3934 __ mov(edx, Operand(esp, 0));
3935 __ push(eax);
3936 EmitKeyedPropertyLoad(prop);
3937 }
3938 }
3939
3940 // We need a second deoptimization point after loading the value
3941 // in case evaluating the property load my have a side effect.
3942 if (assign_type == VARIABLE) {
3943 PrepareForBailout(expr->expression(), TOS_REG);
3944 } else {
3945 PrepareForBailoutForId(expr->CountId(), TOS_REG);
3946 }
3947
3948 // Call ToNumber only if operand is not a smi.
3949 Label no_conversion;
3950 if (ShouldInlineSmiCase(expr->op())) {
3951 __ JumpIfSmi(eax, &no_conversion, Label::kNear);
3952 }
3953 ToNumberStub convert_stub;
3954 __ CallStub(&convert_stub);
3955 __ bind(&no_conversion);
3956
3957 // Save result for postfix expressions.
3958 if (expr->is_postfix()) {
3959 if (!context()->IsEffect()) {
3960 // Save the result on the stack. If we have a named or keyed property
3961 // we store the result under the receiver that is currently on top
3962 // of the stack.
3963 switch (assign_type) {
3964 case VARIABLE:
3965 __ push(eax);
3966 break;
3967 case NAMED_PROPERTY:
3968 __ mov(Operand(esp, kPointerSize), eax);
3969 break;
3970 case KEYED_PROPERTY:
3971 __ mov(Operand(esp, 2 * kPointerSize), eax);
3972 break;
3973 }
3974 }
3975 }
3976
3977 // Inline smi case if we are in a loop.
3978 Label done, stub_call;
3979 JumpPatchSite patch_site(masm_);
3980
3981 if (ShouldInlineSmiCase(expr->op())) {
3982 if (expr->op() == Token::INC) {
3983 __ add(eax, Immediate(Smi::FromInt(1)));
3984 } else {
3985 __ sub(eax, Immediate(Smi::FromInt(1)));
3986 }
3987 __ j(overflow, &stub_call, Label::kNear);
3988 // We could eliminate this smi check if we split the code at
3989 // the first smi check before calling ToNumber.
3990 patch_site.EmitJumpIfSmi(eax, &done, Label::kNear);
3991
3992 __ bind(&stub_call);
3993 // Call stub. Undo operation first.
3994 if (expr->op() == Token::INC) {
3995 __ sub(eax, Immediate(Smi::FromInt(1)));
3996 } else {
3997 __ add(eax, Immediate(Smi::FromInt(1)));
3998 }
3999 }
4000
4001 // Record position before stub call.
4002 SetSourcePosition(expr->position());
4003
4004 // Call stub for +1/-1.
4005 __ mov(edx, eax);
4006 __ mov(eax, Immediate(Smi::FromInt(1)));
4007 BinaryOpStub stub(expr->binary_op(), NO_OVERWRITE);
4008 CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->CountId());
4009 patch_site.EmitPatchInfo();
4010 __ bind(&done);
4011
4012 // Store the value returned in eax.
4013 switch (assign_type) {
4014 case VARIABLE:
4015 if (expr->is_postfix()) {
4016 // Perform the assignment as if via '='.
4017 { EffectContext context(this);
4018 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4019 Token::ASSIGN);
4020 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4021 context.Plug(eax);
4022 }
4023 // For all contexts except EffectContext We have the result on
4024 // top of the stack.
4025 if (!context()->IsEffect()) {
4026 context()->PlugTOS();
4027 }
4028 } else {
4029 // Perform the assignment as if via '='.
4030 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4031 Token::ASSIGN);
4032 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4033 context()->Plug(eax);
4034 }
4035 break;
4036 case NAMED_PROPERTY: {
4037 __ mov(ecx, prop->key()->AsLiteral()->handle());
4038 __ pop(edx);
4039 Handle<Code> ic = is_classic_mode()
4040 ? isolate()->builtins()->StoreIC_Initialize()
4041 : isolate()->builtins()->StoreIC_Initialize_Strict();
4042 CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
4043 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4044 if (expr->is_postfix()) {
4045 if (!context()->IsEffect()) {
4046 context()->PlugTOS();
4047 }
4048 } else {
4049 context()->Plug(eax);
4050 }
4051 break;
4052 }
4053 case KEYED_PROPERTY: {
4054 __ pop(ecx);
4055 __ pop(edx);
4056 Handle<Code> ic = is_classic_mode()
4057 ? isolate()->builtins()->KeyedStoreIC_Initialize()
4058 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
4059 CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
4060 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4061 if (expr->is_postfix()) {
4062 // Result is on the stack
4063 if (!context()->IsEffect()) {
4064 context()->PlugTOS();
4065 }
4066 } else {
4067 context()->Plug(eax);
4068 }
4069 break;
4070 }
4071 }
4072 }
4073
4074
VisitForTypeofValue(Expression * expr)4075 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4076 VariableProxy* proxy = expr->AsVariableProxy();
4077 ASSERT(!context()->IsEffect());
4078 ASSERT(!context()->IsTest());
4079
4080 if (proxy != NULL && proxy->var()->IsUnallocated()) {
4081 Comment cmnt(masm_, "Global variable");
4082 __ mov(eax, GlobalObjectOperand());
4083 __ mov(ecx, Immediate(proxy->name()));
4084 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
4085 // Use a regular load, not a contextual load, to avoid a reference
4086 // error.
4087 CallIC(ic);
4088 PrepareForBailout(expr, TOS_REG);
4089 context()->Plug(eax);
4090 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
4091 Label done, slow;
4092
4093 // Generate code for loading from variables potentially shadowed
4094 // by eval-introduced variables.
4095 EmitDynamicLookupFastCase(proxy->var(), INSIDE_TYPEOF, &slow, &done);
4096
4097 __ bind(&slow);
4098 __ push(esi);
4099 __ push(Immediate(proxy->name()));
4100 __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
4101 PrepareForBailout(expr, TOS_REG);
4102 __ bind(&done);
4103
4104 context()->Plug(eax);
4105 } else {
4106 // This expression cannot throw a reference error at the top level.
4107 VisitInDuplicateContext(expr);
4108 }
4109 }
4110
4111
EmitLiteralCompareTypeof(Expression * expr,Expression * sub_expr,Handle<String> check)4112 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4113 Expression* sub_expr,
4114 Handle<String> check) {
4115 Label materialize_true, materialize_false;
4116 Label* if_true = NULL;
4117 Label* if_false = NULL;
4118 Label* fall_through = NULL;
4119 context()->PrepareTest(&materialize_true, &materialize_false,
4120 &if_true, &if_false, &fall_through);
4121
4122 { AccumulatorValueContext context(this);
4123 VisitForTypeofValue(sub_expr);
4124 }
4125 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4126
4127 if (check->Equals(isolate()->heap()->number_symbol())) {
4128 __ JumpIfSmi(eax, if_true);
4129 __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
4130 isolate()->factory()->heap_number_map());
4131 Split(equal, if_true, if_false, fall_through);
4132 } else if (check->Equals(isolate()->heap()->string_symbol())) {
4133 __ JumpIfSmi(eax, if_false);
4134 __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, edx);
4135 __ j(above_equal, if_false);
4136 // Check for undetectable objects => false.
4137 __ test_b(FieldOperand(edx, Map::kBitFieldOffset),
4138 1 << Map::kIsUndetectable);
4139 Split(zero, if_true, if_false, fall_through);
4140 } else if (check->Equals(isolate()->heap()->boolean_symbol())) {
4141 __ cmp(eax, isolate()->factory()->true_value());
4142 __ j(equal, if_true);
4143 __ cmp(eax, isolate()->factory()->false_value());
4144 Split(equal, if_true, if_false, fall_through);
4145 } else if (FLAG_harmony_typeof &&
4146 check->Equals(isolate()->heap()->null_symbol())) {
4147 __ cmp(eax, isolate()->factory()->null_value());
4148 Split(equal, if_true, if_false, fall_through);
4149 } else if (check->Equals(isolate()->heap()->undefined_symbol())) {
4150 __ cmp(eax, isolate()->factory()->undefined_value());
4151 __ j(equal, if_true);
4152 __ JumpIfSmi(eax, if_false);
4153 // Check for undetectable objects => true.
4154 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
4155 __ movzx_b(ecx, FieldOperand(edx, Map::kBitFieldOffset));
4156 __ test(ecx, Immediate(1 << Map::kIsUndetectable));
4157 Split(not_zero, if_true, if_false, fall_through);
4158 } else if (check->Equals(isolate()->heap()->function_symbol())) {
4159 __ JumpIfSmi(eax, if_false);
4160 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
4161 __ CmpObjectType(eax, JS_FUNCTION_TYPE, edx);
4162 __ j(equal, if_true);
4163 __ CmpInstanceType(edx, JS_FUNCTION_PROXY_TYPE);
4164 Split(equal, if_true, if_false, fall_through);
4165 } else if (check->Equals(isolate()->heap()->object_symbol())) {
4166 __ JumpIfSmi(eax, if_false);
4167 if (!FLAG_harmony_typeof) {
4168 __ cmp(eax, isolate()->factory()->null_value());
4169 __ j(equal, if_true);
4170 }
4171 __ CmpObjectType(eax, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE, edx);
4172 __ j(below, if_false);
4173 __ CmpInstanceType(edx, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
4174 __ j(above, if_false);
4175 // Check for undetectable objects => false.
4176 __ test_b(FieldOperand(edx, Map::kBitFieldOffset),
4177 1 << Map::kIsUndetectable);
4178 Split(zero, if_true, if_false, fall_through);
4179 } else {
4180 if (if_false != fall_through) __ jmp(if_false);
4181 }
4182 context()->Plug(if_true, if_false);
4183 }
4184
4185
VisitCompareOperation(CompareOperation * expr)4186 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4187 Comment cmnt(masm_, "[ CompareOperation");
4188 SetSourcePosition(expr->position());
4189
4190 // First we try a fast inlined version of the compare when one of
4191 // the operands is a literal.
4192 if (TryLiteralCompare(expr)) return;
4193
4194 // Always perform the comparison for its control flow. Pack the result
4195 // into the expression's context after the comparison is performed.
4196 Label materialize_true, materialize_false;
4197 Label* if_true = NULL;
4198 Label* if_false = NULL;
4199 Label* fall_through = NULL;
4200 context()->PrepareTest(&materialize_true, &materialize_false,
4201 &if_true, &if_false, &fall_through);
4202
4203 Token::Value op = expr->op();
4204 VisitForStackValue(expr->left());
4205 switch (op) {
4206 case Token::IN:
4207 VisitForStackValue(expr->right());
4208 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
4209 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4210 __ cmp(eax, isolate()->factory()->true_value());
4211 Split(equal, if_true, if_false, fall_through);
4212 break;
4213
4214 case Token::INSTANCEOF: {
4215 VisitForStackValue(expr->right());
4216 InstanceofStub stub(InstanceofStub::kNoFlags);
4217 __ CallStub(&stub);
4218 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4219 __ test(eax, eax);
4220 // The stub returns 0 for true.
4221 Split(zero, if_true, if_false, fall_through);
4222 break;
4223 }
4224
4225 default: {
4226 VisitForAccumulatorValue(expr->right());
4227 Condition cc = no_condition;
4228 switch (op) {
4229 case Token::EQ_STRICT:
4230 case Token::EQ:
4231 cc = equal;
4232 break;
4233 case Token::LT:
4234 cc = less;
4235 break;
4236 case Token::GT:
4237 cc = greater;
4238 break;
4239 case Token::LTE:
4240 cc = less_equal;
4241 break;
4242 case Token::GTE:
4243 cc = greater_equal;
4244 break;
4245 case Token::IN:
4246 case Token::INSTANCEOF:
4247 default:
4248 UNREACHABLE();
4249 }
4250 __ pop(edx);
4251
4252 bool inline_smi_code = ShouldInlineSmiCase(op);
4253 JumpPatchSite patch_site(masm_);
4254 if (inline_smi_code) {
4255 Label slow_case;
4256 __ mov(ecx, edx);
4257 __ or_(ecx, eax);
4258 patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear);
4259 __ cmp(edx, eax);
4260 Split(cc, if_true, if_false, NULL);
4261 __ bind(&slow_case);
4262 }
4263
4264 // Record position and call the compare IC.
4265 SetSourcePosition(expr->position());
4266 Handle<Code> ic = CompareIC::GetUninitialized(op);
4267 CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
4268 patch_site.EmitPatchInfo();
4269
4270 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4271 __ test(eax, eax);
4272 Split(cc, if_true, if_false, fall_through);
4273 }
4274 }
4275
4276 // Convert the result of the comparison into one expected for this
4277 // expression's context.
4278 context()->Plug(if_true, if_false);
4279 }
4280
4281
EmitLiteralCompareNil(CompareOperation * expr,Expression * sub_expr,NilValue nil)4282 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4283 Expression* sub_expr,
4284 NilValue nil) {
4285 Label materialize_true, materialize_false;
4286 Label* if_true = NULL;
4287 Label* if_false = NULL;
4288 Label* fall_through = NULL;
4289 context()->PrepareTest(&materialize_true, &materialize_false,
4290 &if_true, &if_false, &fall_through);
4291
4292 VisitForAccumulatorValue(sub_expr);
4293 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4294 Handle<Object> nil_value = nil == kNullValue ?
4295 isolate()->factory()->null_value() :
4296 isolate()->factory()->undefined_value();
4297 __ cmp(eax, nil_value);
4298 if (expr->op() == Token::EQ_STRICT) {
4299 Split(equal, if_true, if_false, fall_through);
4300 } else {
4301 Handle<Object> other_nil_value = nil == kNullValue ?
4302 isolate()->factory()->undefined_value() :
4303 isolate()->factory()->null_value();
4304 __ j(equal, if_true);
4305 __ cmp(eax, other_nil_value);
4306 __ j(equal, if_true);
4307 __ JumpIfSmi(eax, if_false);
4308 // It can be an undetectable object.
4309 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
4310 __ movzx_b(edx, FieldOperand(edx, Map::kBitFieldOffset));
4311 __ test(edx, Immediate(1 << Map::kIsUndetectable));
4312 Split(not_zero, if_true, if_false, fall_through);
4313 }
4314 context()->Plug(if_true, if_false);
4315 }
4316
4317
VisitThisFunction(ThisFunction * expr)4318 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4319 __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
4320 context()->Plug(eax);
4321 }
4322
4323
result_register()4324 Register FullCodeGenerator::result_register() {
4325 return eax;
4326 }
4327
4328
context_register()4329 Register FullCodeGenerator::context_register() {
4330 return esi;
4331 }
4332
4333
StoreToFrameField(int frame_offset,Register value)4334 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4335 ASSERT_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
4336 __ mov(Operand(ebp, frame_offset), value);
4337 }
4338
4339
LoadContextField(Register dst,int context_index)4340 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4341 __ mov(dst, ContextOperand(esi, context_index));
4342 }
4343
4344
PushFunctionArgumentForContextAllocation()4345 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4346 Scope* declaration_scope = scope()->DeclarationScope();
4347 if (declaration_scope->is_global_scope()) {
4348 // Contexts nested in the global context have a canonical empty function
4349 // as their closure, not the anonymous closure containing the global
4350 // code. Pass a smi sentinel and let the runtime look up the empty
4351 // function.
4352 __ push(Immediate(Smi::FromInt(0)));
4353 } else if (declaration_scope->is_eval_scope()) {
4354 // Contexts nested inside eval code have the same closure as the context
4355 // calling eval, not the anonymous closure containing the eval code.
4356 // Fetch it from the context.
4357 __ push(ContextOperand(esi, Context::CLOSURE_INDEX));
4358 } else {
4359 ASSERT(declaration_scope->is_function_scope());
4360 __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
4361 }
4362 }
4363
4364
4365 // ----------------------------------------------------------------------------
4366 // Non-local control flow support.
4367
EnterFinallyBlock()4368 void FullCodeGenerator::EnterFinallyBlock() {
4369 // Cook return address on top of stack (smi encoded Code* delta)
4370 ASSERT(!result_register().is(edx));
4371 __ pop(edx);
4372 __ sub(edx, Immediate(masm_->CodeObject()));
4373 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
4374 STATIC_ASSERT(kSmiTag == 0);
4375 __ SmiTag(edx);
4376 __ push(edx);
4377 // Store result register while executing finally block.
4378 __ push(result_register());
4379 }
4380
4381
ExitFinallyBlock()4382 void FullCodeGenerator::ExitFinallyBlock() {
4383 ASSERT(!result_register().is(edx));
4384 __ pop(result_register());
4385 // Uncook return address.
4386 __ pop(edx);
4387 __ SmiUntag(edx);
4388 __ add(edx, Immediate(masm_->CodeObject()));
4389 __ jmp(edx);
4390 }
4391
4392
4393 #undef __
4394
4395 #define __ ACCESS_MASM(masm())
4396
Exit(int * stack_depth,int * context_length)4397 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
4398 int* stack_depth,
4399 int* context_length) {
4400 // The macros used here must preserve the result register.
4401
4402 // Because the handler block contains the context of the finally
4403 // code, we can restore it directly from there for the finally code
4404 // rather than iteratively unwinding contexts via their previous
4405 // links.
4406 __ Drop(*stack_depth); // Down to the handler block.
4407 if (*context_length > 0) {
4408 // Restore the context to its dedicated register and the stack.
4409 __ mov(esi, Operand(esp, StackHandlerConstants::kContextOffset));
4410 __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi);
4411 }
4412 __ PopTryHandler();
4413 __ call(finally_entry_);
4414
4415 *stack_depth = 0;
4416 *context_length = 0;
4417 return previous_;
4418 }
4419
4420
4421 #undef __
4422
4423 } } // namespace v8::internal
4424
4425 #endif // V8_TARGET_ARCH_IA32
4426