1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28 #include "v8.h"
29
30 #if defined(V8_TARGET_ARCH_IA32)
31
32 #include "codegen.h"
33 #include "deoptimizer.h"
34 #include "full-codegen.h"
35
36 namespace v8 {
37 namespace internal {
38
39
40 #define __ ACCESS_MASM(masm)
41
42
Generate_Adaptor(MacroAssembler * masm,CFunctionId id,BuiltinExtraArguments extra_args)43 void Builtins::Generate_Adaptor(MacroAssembler* masm,
44 CFunctionId id,
45 BuiltinExtraArguments extra_args) {
46 // ----------- S t a t e -------------
47 // -- eax : number of arguments excluding receiver
48 // -- edi : called function (only guaranteed when
49 // extra_args requires it)
50 // -- esi : context
51 // -- esp[0] : return address
52 // -- esp[4] : last argument
53 // -- ...
54 // -- esp[4 * argc] : first argument (argc == eax)
55 // -- esp[4 * (argc +1)] : receiver
56 // -----------------------------------
57
58 // Insert extra arguments.
59 int num_extra_args = 0;
60 if (extra_args == NEEDS_CALLED_FUNCTION) {
61 num_extra_args = 1;
62 Register scratch = ebx;
63 __ pop(scratch); // Save return address.
64 __ push(edi);
65 __ push(scratch); // Restore return address.
66 } else {
67 ASSERT(extra_args == NO_EXTRA_ARGUMENTS);
68 }
69
70 // JumpToExternalReference expects eax to contain the number of arguments
71 // including the receiver and the extra arguments.
72 __ add(eax, Immediate(num_extra_args + 1));
73 __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
74 }
75
76
Generate_JSConstructStubHelper(MacroAssembler * masm,bool is_api_function,bool count_constructions)77 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
78 bool is_api_function,
79 bool count_constructions) {
80 // ----------- S t a t e -------------
81 // -- eax: number of arguments
82 // -- edi: constructor function
83 // -----------------------------------
84
85 // Should never count constructions for api objects.
86 ASSERT(!is_api_function || !count_constructions);
87
88 // Enter a construct frame.
89 {
90 FrameScope scope(masm, StackFrame::CONSTRUCT);
91
92 // Store a smi-tagged arguments count on the stack.
93 __ SmiTag(eax);
94 __ push(eax);
95
96 // Push the function to invoke on the stack.
97 __ push(edi);
98
99 // Try to allocate the object without transitioning into C code. If any of
100 // the preconditions is not met, the code bails out to the runtime call.
101 Label rt_call, allocated;
102 if (FLAG_inline_new) {
103 Label undo_allocation;
104 #ifdef ENABLE_DEBUGGER_SUPPORT
105 ExternalReference debug_step_in_fp =
106 ExternalReference::debug_step_in_fp_address(masm->isolate());
107 __ cmp(Operand::StaticVariable(debug_step_in_fp), Immediate(0));
108 __ j(not_equal, &rt_call);
109 #endif
110
111 // Verified that the constructor is a JSFunction.
112 // Load the initial map and verify that it is in fact a map.
113 // edi: constructor
114 __ mov(eax, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
115 // Will both indicate a NULL and a Smi
116 __ JumpIfSmi(eax, &rt_call);
117 // edi: constructor
118 // eax: initial map (if proven valid below)
119 __ CmpObjectType(eax, MAP_TYPE, ebx);
120 __ j(not_equal, &rt_call);
121
122 // Check that the constructor is not constructing a JSFunction (see
123 // comments in Runtime_NewObject in runtime.cc). In which case the
124 // initial map's instance type would be JS_FUNCTION_TYPE.
125 // edi: constructor
126 // eax: initial map
127 __ CmpInstanceType(eax, JS_FUNCTION_TYPE);
128 __ j(equal, &rt_call);
129
130 if (count_constructions) {
131 Label allocate;
132 // Decrease generous allocation count.
133 __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
134 __ dec_b(FieldOperand(ecx,
135 SharedFunctionInfo::kConstructionCountOffset));
136 __ j(not_zero, &allocate);
137
138 __ push(eax);
139 __ push(edi);
140
141 __ push(edi); // constructor
142 // The call will replace the stub, so the countdown is only done once.
143 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
144
145 __ pop(edi);
146 __ pop(eax);
147
148 __ bind(&allocate);
149 }
150
151 // Now allocate the JSObject on the heap.
152 // edi: constructor
153 // eax: initial map
154 __ movzx_b(edi, FieldOperand(eax, Map::kInstanceSizeOffset));
155 __ shl(edi, kPointerSizeLog2);
156 __ AllocateInNewSpace(
157 edi, ebx, edi, no_reg, &rt_call, NO_ALLOCATION_FLAGS);
158 // Allocated the JSObject, now initialize the fields.
159 // eax: initial map
160 // ebx: JSObject
161 // edi: start of next object
162 __ mov(Operand(ebx, JSObject::kMapOffset), eax);
163 Factory* factory = masm->isolate()->factory();
164 __ mov(ecx, factory->empty_fixed_array());
165 __ mov(Operand(ebx, JSObject::kPropertiesOffset), ecx);
166 __ mov(Operand(ebx, JSObject::kElementsOffset), ecx);
167 // Set extra fields in the newly allocated object.
168 // eax: initial map
169 // ebx: JSObject
170 // edi: start of next object
171 __ lea(ecx, Operand(ebx, JSObject::kHeaderSize));
172 __ mov(edx, factory->undefined_value());
173 if (count_constructions) {
174 __ movzx_b(esi,
175 FieldOperand(eax, Map::kPreAllocatedPropertyFieldsOffset));
176 __ lea(esi,
177 Operand(ebx, esi, times_pointer_size, JSObject::kHeaderSize));
178 // esi: offset of first field after pre-allocated fields
179 if (FLAG_debug_code) {
180 __ cmp(esi, edi);
181 __ Assert(less_equal,
182 "Unexpected number of pre-allocated property fields.");
183 }
184 __ InitializeFieldsWithFiller(ecx, esi, edx);
185 __ mov(edx, factory->one_pointer_filler_map());
186 }
187 __ InitializeFieldsWithFiller(ecx, edi, edx);
188
189 // Add the object tag to make the JSObject real, so that we can continue
190 // and jump into the continuation code at any time from now on. Any
191 // failures need to undo the allocation, so that the heap is in a
192 // consistent state and verifiable.
193 // eax: initial map
194 // ebx: JSObject
195 // edi: start of next object
196 __ or_(ebx, Immediate(kHeapObjectTag));
197
198 // Check if a non-empty properties array is needed.
199 // Allocate and initialize a FixedArray if it is.
200 // eax: initial map
201 // ebx: JSObject
202 // edi: start of next object
203 // Calculate the total number of properties described by the map.
204 __ movzx_b(edx, FieldOperand(eax, Map::kUnusedPropertyFieldsOffset));
205 __ movzx_b(ecx,
206 FieldOperand(eax, Map::kPreAllocatedPropertyFieldsOffset));
207 __ add(edx, ecx);
208 // Calculate unused properties past the end of the in-object properties.
209 __ movzx_b(ecx, FieldOperand(eax, Map::kInObjectPropertiesOffset));
210 __ sub(edx, ecx);
211 // Done if no extra properties are to be allocated.
212 __ j(zero, &allocated);
213 __ Assert(positive, "Property allocation count failed.");
214
215 // Scale the number of elements by pointer size and add the header for
216 // FixedArrays to the start of the next object calculation from above.
217 // ebx: JSObject
218 // edi: start of next object (will be start of FixedArray)
219 // edx: number of elements in properties array
220 __ AllocateInNewSpace(FixedArray::kHeaderSize,
221 times_pointer_size,
222 edx,
223 edi,
224 ecx,
225 no_reg,
226 &undo_allocation,
227 RESULT_CONTAINS_TOP);
228
229 // Initialize the FixedArray.
230 // ebx: JSObject
231 // edi: FixedArray
232 // edx: number of elements
233 // ecx: start of next object
234 __ mov(eax, factory->fixed_array_map());
235 __ mov(Operand(edi, FixedArray::kMapOffset), eax); // setup the map
236 __ SmiTag(edx);
237 __ mov(Operand(edi, FixedArray::kLengthOffset), edx); // and length
238
239 // Initialize the fields to undefined.
240 // ebx: JSObject
241 // edi: FixedArray
242 // ecx: start of next object
243 { Label loop, entry;
244 __ mov(edx, factory->undefined_value());
245 __ lea(eax, Operand(edi, FixedArray::kHeaderSize));
246 __ jmp(&entry);
247 __ bind(&loop);
248 __ mov(Operand(eax, 0), edx);
249 __ add(eax, Immediate(kPointerSize));
250 __ bind(&entry);
251 __ cmp(eax, ecx);
252 __ j(below, &loop);
253 }
254
255 // Store the initialized FixedArray into the properties field of
256 // the JSObject
257 // ebx: JSObject
258 // edi: FixedArray
259 __ or_(edi, Immediate(kHeapObjectTag)); // add the heap tag
260 __ mov(FieldOperand(ebx, JSObject::kPropertiesOffset), edi);
261
262
263 // Continue with JSObject being successfully allocated
264 // ebx: JSObject
265 __ jmp(&allocated);
266
267 // Undo the setting of the new top so that the heap is verifiable. For
268 // example, the map's unused properties potentially do not match the
269 // allocated objects unused properties.
270 // ebx: JSObject (previous new top)
271 __ bind(&undo_allocation);
272 __ UndoAllocationInNewSpace(ebx);
273 }
274
275 // Allocate the new receiver object using the runtime call.
276 __ bind(&rt_call);
277 // Must restore edi (constructor) before calling runtime.
278 __ mov(edi, Operand(esp, 0));
279 // edi: function (constructor)
280 __ push(edi);
281 __ CallRuntime(Runtime::kNewObject, 1);
282 __ mov(ebx, eax); // store result in ebx
283
284 // New object allocated.
285 // ebx: newly allocated object
286 __ bind(&allocated);
287 // Retrieve the function from the stack.
288 __ pop(edi);
289
290 // Retrieve smi-tagged arguments count from the stack.
291 __ mov(eax, Operand(esp, 0));
292 __ SmiUntag(eax);
293
294 // Push the allocated receiver to the stack. We need two copies
295 // because we may have to return the original one and the calling
296 // conventions dictate that the called function pops the receiver.
297 __ push(ebx);
298 __ push(ebx);
299
300 // Set up pointer to last argument.
301 __ lea(ebx, Operand(ebp, StandardFrameConstants::kCallerSPOffset));
302
303 // Copy arguments and receiver to the expression stack.
304 Label loop, entry;
305 __ mov(ecx, eax);
306 __ jmp(&entry);
307 __ bind(&loop);
308 __ push(Operand(ebx, ecx, times_4, 0));
309 __ bind(&entry);
310 __ dec(ecx);
311 __ j(greater_equal, &loop);
312
313 // Call the function.
314 if (is_api_function) {
315 __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
316 Handle<Code> code =
317 masm->isolate()->builtins()->HandleApiCallConstruct();
318 ParameterCount expected(0);
319 __ InvokeCode(code, expected, expected, RelocInfo::CODE_TARGET,
320 CALL_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
321 } else {
322 ParameterCount actual(eax);
323 __ InvokeFunction(edi, actual, CALL_FUNCTION,
324 NullCallWrapper(), CALL_AS_METHOD);
325 }
326
327 // Store offset of return address for deoptimizer.
328 if (!is_api_function && !count_constructions) {
329 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
330 }
331
332 // Restore context from the frame.
333 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
334
335 // If the result is an object (in the ECMA sense), we should get rid
336 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
337 // on page 74.
338 Label use_receiver, exit;
339
340 // If the result is a smi, it is *not* an object in the ECMA sense.
341 __ JumpIfSmi(eax, &use_receiver);
342
343 // If the type of the result (stored in its map) is less than
344 // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense.
345 __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx);
346 __ j(above_equal, &exit);
347
348 // Throw away the result of the constructor invocation and use the
349 // on-stack receiver as the result.
350 __ bind(&use_receiver);
351 __ mov(eax, Operand(esp, 0));
352
353 // Restore the arguments count and leave the construct frame.
354 __ bind(&exit);
355 __ mov(ebx, Operand(esp, kPointerSize)); // Get arguments count.
356
357 // Leave construct frame.
358 }
359
360 // Remove caller arguments from the stack and return.
361 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
362 __ pop(ecx);
363 __ lea(esp, Operand(esp, ebx, times_2, 1 * kPointerSize)); // 1 ~ receiver
364 __ push(ecx);
365 __ IncrementCounter(masm->isolate()->counters()->constructed_objects(), 1);
366 __ ret(0);
367 }
368
369
Generate_JSConstructStubCountdown(MacroAssembler * masm)370 void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) {
371 Generate_JSConstructStubHelper(masm, false, true);
372 }
373
374
Generate_JSConstructStubGeneric(MacroAssembler * masm)375 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
376 Generate_JSConstructStubHelper(masm, false, false);
377 }
378
379
Generate_JSConstructStubApi(MacroAssembler * masm)380 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
381 Generate_JSConstructStubHelper(masm, true, false);
382 }
383
384
Generate_JSEntryTrampolineHelper(MacroAssembler * masm,bool is_construct)385 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
386 bool is_construct) {
387 // Clear the context before we push it when entering the internal frame.
388 __ Set(esi, Immediate(0));
389
390 {
391 FrameScope scope(masm, StackFrame::INTERNAL);
392
393 // Load the previous frame pointer (ebx) to access C arguments
394 __ mov(ebx, Operand(ebp, 0));
395
396 // Get the function from the frame and setup the context.
397 __ mov(ecx, Operand(ebx, EntryFrameConstants::kFunctionArgOffset));
398 __ mov(esi, FieldOperand(ecx, JSFunction::kContextOffset));
399
400 // Push the function and the receiver onto the stack.
401 __ push(ecx);
402 __ push(Operand(ebx, EntryFrameConstants::kReceiverArgOffset));
403
404 // Load the number of arguments and setup pointer to the arguments.
405 __ mov(eax, Operand(ebx, EntryFrameConstants::kArgcOffset));
406 __ mov(ebx, Operand(ebx, EntryFrameConstants::kArgvOffset));
407
408 // Copy arguments to the stack in a loop.
409 Label loop, entry;
410 __ Set(ecx, Immediate(0));
411 __ jmp(&entry);
412 __ bind(&loop);
413 __ mov(edx, Operand(ebx, ecx, times_4, 0)); // push parameter from argv
414 __ push(Operand(edx, 0)); // dereference handle
415 __ inc(ecx);
416 __ bind(&entry);
417 __ cmp(ecx, eax);
418 __ j(not_equal, &loop);
419
420 // Get the function from the stack and call it.
421 // kPointerSize for the receiver.
422 __ mov(edi, Operand(esp, eax, times_4, kPointerSize));
423
424 // Invoke the code.
425 if (is_construct) {
426 CallConstructStub stub(NO_CALL_FUNCTION_FLAGS);
427 __ CallStub(&stub);
428 } else {
429 ParameterCount actual(eax);
430 __ InvokeFunction(edi, actual, CALL_FUNCTION,
431 NullCallWrapper(), CALL_AS_METHOD);
432 }
433
434 // Exit the internal frame. Notice that this also removes the empty.
435 // context and the function left on the stack by the code
436 // invocation.
437 }
438 __ ret(kPointerSize); // Remove receiver.
439 }
440
441
Generate_JSEntryTrampoline(MacroAssembler * masm)442 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
443 Generate_JSEntryTrampolineHelper(masm, false);
444 }
445
446
Generate_JSConstructEntryTrampoline(MacroAssembler * masm)447 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
448 Generate_JSEntryTrampolineHelper(masm, true);
449 }
450
451
Generate_LazyCompile(MacroAssembler * masm)452 void Builtins::Generate_LazyCompile(MacroAssembler* masm) {
453 {
454 FrameScope scope(masm, StackFrame::INTERNAL);
455
456 // Push a copy of the function.
457 __ push(edi);
458 // Push call kind information.
459 __ push(ecx);
460
461 __ push(edi); // Function is also the parameter to the runtime call.
462 __ CallRuntime(Runtime::kLazyCompile, 1);
463
464 // Restore call kind information.
465 __ pop(ecx);
466 // Restore receiver.
467 __ pop(edi);
468
469 // Tear down internal frame.
470 }
471
472 // Do a tail-call of the compiled function.
473 __ lea(eax, FieldOperand(eax, Code::kHeaderSize));
474 __ jmp(eax);
475 }
476
477
Generate_LazyRecompile(MacroAssembler * masm)478 void Builtins::Generate_LazyRecompile(MacroAssembler* masm) {
479 {
480 FrameScope scope(masm, StackFrame::INTERNAL);
481
482 // Push a copy of the function onto the stack.
483 __ push(edi);
484 // Push call kind information.
485 __ push(ecx);
486
487 __ push(edi); // Function is also the parameter to the runtime call.
488 __ CallRuntime(Runtime::kLazyRecompile, 1);
489
490 // Restore call kind information.
491 __ pop(ecx);
492 // Restore receiver.
493 __ pop(edi);
494
495 // Tear down internal frame.
496 }
497
498 // Do a tail-call of the compiled function.
499 __ lea(eax, FieldOperand(eax, Code::kHeaderSize));
500 __ jmp(eax);
501 }
502
503
Generate_NotifyDeoptimizedHelper(MacroAssembler * masm,Deoptimizer::BailoutType type)504 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
505 Deoptimizer::BailoutType type) {
506 {
507 FrameScope scope(masm, StackFrame::INTERNAL);
508
509 // Pass deoptimization type to the runtime system.
510 __ push(Immediate(Smi::FromInt(static_cast<int>(type))));
511 __ CallRuntime(Runtime::kNotifyDeoptimized, 1);
512
513 // Tear down internal frame.
514 }
515
516 // Get the full codegen state from the stack and untag it.
517 __ mov(ecx, Operand(esp, 1 * kPointerSize));
518 __ SmiUntag(ecx);
519
520 // Switch on the state.
521 Label not_no_registers, not_tos_eax;
522 __ cmp(ecx, FullCodeGenerator::NO_REGISTERS);
523 __ j(not_equal, ¬_no_registers, Label::kNear);
524 __ ret(1 * kPointerSize); // Remove state.
525
526 __ bind(¬_no_registers);
527 __ mov(eax, Operand(esp, 2 * kPointerSize));
528 __ cmp(ecx, FullCodeGenerator::TOS_REG);
529 __ j(not_equal, ¬_tos_eax, Label::kNear);
530 __ ret(2 * kPointerSize); // Remove state, eax.
531
532 __ bind(¬_tos_eax);
533 __ Abort("no cases left");
534 }
535
536
Generate_NotifyDeoptimized(MacroAssembler * masm)537 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
538 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
539 }
540
541
Generate_NotifyLazyDeoptimized(MacroAssembler * masm)542 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
543 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
544 }
545
546
Generate_NotifyOSR(MacroAssembler * masm)547 void Builtins::Generate_NotifyOSR(MacroAssembler* masm) {
548 // TODO(kasperl): Do we need to save/restore the XMM registers too?
549
550 // For now, we are relying on the fact that Runtime::NotifyOSR
551 // doesn't do any garbage collection which allows us to save/restore
552 // the registers without worrying about which of them contain
553 // pointers. This seems a bit fragile.
554 __ pushad();
555 {
556 FrameScope scope(masm, StackFrame::INTERNAL);
557 __ CallRuntime(Runtime::kNotifyOSR, 0);
558 }
559 __ popad();
560 __ ret(0);
561 }
562
563
Generate_FunctionCall(MacroAssembler * masm)564 void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
565 Factory* factory = masm->isolate()->factory();
566
567 // 1. Make sure we have at least one argument.
568 { Label done;
569 __ test(eax, eax);
570 __ j(not_zero, &done);
571 __ pop(ebx);
572 __ push(Immediate(factory->undefined_value()));
573 __ push(ebx);
574 __ inc(eax);
575 __ bind(&done);
576 }
577
578 // 2. Get the function to call (passed as receiver) from the stack, check
579 // if it is a function.
580 Label slow, non_function;
581 // 1 ~ return address.
582 __ mov(edi, Operand(esp, eax, times_4, 1 * kPointerSize));
583 __ JumpIfSmi(edi, &non_function);
584 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
585 __ j(not_equal, &slow);
586
587
588 // 3a. Patch the first argument if necessary when calling a function.
589 Label shift_arguments;
590 __ Set(edx, Immediate(0)); // indicate regular JS_FUNCTION
591 { Label convert_to_object, use_global_receiver, patch_receiver;
592 // Change context eagerly in case we need the global receiver.
593 __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
594
595 // Do not transform the receiver for strict mode functions.
596 __ mov(ebx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
597 __ test_b(FieldOperand(ebx, SharedFunctionInfo::kStrictModeByteOffset),
598 1 << SharedFunctionInfo::kStrictModeBitWithinByte);
599 __ j(not_equal, &shift_arguments);
600
601 // Do not transform the receiver for natives (shared already in ebx).
602 __ test_b(FieldOperand(ebx, SharedFunctionInfo::kNativeByteOffset),
603 1 << SharedFunctionInfo::kNativeBitWithinByte);
604 __ j(not_equal, &shift_arguments);
605
606 // Compute the receiver in non-strict mode.
607 __ mov(ebx, Operand(esp, eax, times_4, 0)); // First argument.
608
609 // Call ToObject on the receiver if it is not an object, or use the
610 // global object if it is null or undefined.
611 __ JumpIfSmi(ebx, &convert_to_object);
612 __ cmp(ebx, factory->null_value());
613 __ j(equal, &use_global_receiver);
614 __ cmp(ebx, factory->undefined_value());
615 __ j(equal, &use_global_receiver);
616 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
617 __ CmpObjectType(ebx, FIRST_SPEC_OBJECT_TYPE, ecx);
618 __ j(above_equal, &shift_arguments);
619
620 __ bind(&convert_to_object);
621
622 { // In order to preserve argument count.
623 FrameScope scope(masm, StackFrame::INTERNAL);
624 __ SmiTag(eax);
625 __ push(eax);
626
627 __ push(ebx);
628 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
629 __ mov(ebx, eax);
630 __ Set(edx, Immediate(0)); // restore
631
632 __ pop(eax);
633 __ SmiUntag(eax);
634 }
635
636 // Restore the function to edi.
637 __ mov(edi, Operand(esp, eax, times_4, 1 * kPointerSize));
638 __ jmp(&patch_receiver);
639
640 // Use the global receiver object from the called function as the
641 // receiver.
642 __ bind(&use_global_receiver);
643 const int kGlobalIndex =
644 Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
645 __ mov(ebx, FieldOperand(esi, kGlobalIndex));
646 __ mov(ebx, FieldOperand(ebx, GlobalObject::kGlobalContextOffset));
647 __ mov(ebx, FieldOperand(ebx, kGlobalIndex));
648 __ mov(ebx, FieldOperand(ebx, GlobalObject::kGlobalReceiverOffset));
649
650 __ bind(&patch_receiver);
651 __ mov(Operand(esp, eax, times_4, 0), ebx);
652
653 __ jmp(&shift_arguments);
654 }
655
656 // 3b. Check for function proxy.
657 __ bind(&slow);
658 __ Set(edx, Immediate(1)); // indicate function proxy
659 __ CmpInstanceType(ecx, JS_FUNCTION_PROXY_TYPE);
660 __ j(equal, &shift_arguments);
661 __ bind(&non_function);
662 __ Set(edx, Immediate(2)); // indicate non-function
663
664 // 3c. Patch the first argument when calling a non-function. The
665 // CALL_NON_FUNCTION builtin expects the non-function callee as
666 // receiver, so overwrite the first argument which will ultimately
667 // become the receiver.
668 __ mov(Operand(esp, eax, times_4, 0), edi);
669
670 // 4. Shift arguments and return address one slot down on the stack
671 // (overwriting the original receiver). Adjust argument count to make
672 // the original first argument the new receiver.
673 __ bind(&shift_arguments);
674 { Label loop;
675 __ mov(ecx, eax);
676 __ bind(&loop);
677 __ mov(ebx, Operand(esp, ecx, times_4, 0));
678 __ mov(Operand(esp, ecx, times_4, kPointerSize), ebx);
679 __ dec(ecx);
680 __ j(not_sign, &loop); // While non-negative (to copy return address).
681 __ pop(ebx); // Discard copy of return address.
682 __ dec(eax); // One fewer argument (first argument is new receiver).
683 }
684
685 // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin,
686 // or a function proxy via CALL_FUNCTION_PROXY.
687 { Label function, non_proxy;
688 __ test(edx, edx);
689 __ j(zero, &function);
690 __ Set(ebx, Immediate(0));
691 __ cmp(edx, Immediate(1));
692 __ j(not_equal, &non_proxy);
693
694 __ pop(edx); // return address
695 __ push(edi); // re-add proxy object as additional argument
696 __ push(edx);
697 __ inc(eax);
698 __ SetCallKind(ecx, CALL_AS_FUNCTION);
699 __ GetBuiltinEntry(edx, Builtins::CALL_FUNCTION_PROXY);
700 __ jmp(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
701 RelocInfo::CODE_TARGET);
702
703 __ bind(&non_proxy);
704 __ SetCallKind(ecx, CALL_AS_METHOD);
705 __ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION);
706 __ jmp(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
707 RelocInfo::CODE_TARGET);
708 __ bind(&function);
709 }
710
711 // 5b. Get the code to call from the function and check that the number of
712 // expected arguments matches what we're providing. If so, jump
713 // (tail-call) to the code in register edx without checking arguments.
714 __ mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
715 __ mov(ebx,
716 FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
717 __ mov(edx, FieldOperand(edi, JSFunction::kCodeEntryOffset));
718 __ SmiUntag(ebx);
719 __ SetCallKind(ecx, CALL_AS_METHOD);
720 __ cmp(eax, ebx);
721 __ j(not_equal,
722 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline());
723
724 ParameterCount expected(0);
725 __ InvokeCode(edx, expected, expected, JUMP_FUNCTION, NullCallWrapper(),
726 CALL_AS_METHOD);
727 }
728
729
Generate_FunctionApply(MacroAssembler * masm)730 void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
731 static const int kArgumentsOffset = 2 * kPointerSize;
732 static const int kReceiverOffset = 3 * kPointerSize;
733 static const int kFunctionOffset = 4 * kPointerSize;
734 {
735 FrameScope frame_scope(masm, StackFrame::INTERNAL);
736
737 __ push(Operand(ebp, kFunctionOffset)); // push this
738 __ push(Operand(ebp, kArgumentsOffset)); // push arguments
739 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
740
741 // Check the stack for overflow. We are not trying to catch
742 // interruptions (e.g. debug break and preemption) here, so the "real stack
743 // limit" is checked.
744 Label okay;
745 ExternalReference real_stack_limit =
746 ExternalReference::address_of_real_stack_limit(masm->isolate());
747 __ mov(edi, Operand::StaticVariable(real_stack_limit));
748 // Make ecx the space we have left. The stack might already be overflowed
749 // here which will cause ecx to become negative.
750 __ mov(ecx, esp);
751 __ sub(ecx, edi);
752 // Make edx the space we need for the array when it is unrolled onto the
753 // stack.
754 __ mov(edx, eax);
755 __ shl(edx, kPointerSizeLog2 - kSmiTagSize);
756 // Check if the arguments will overflow the stack.
757 __ cmp(ecx, edx);
758 __ j(greater, &okay); // Signed comparison.
759
760 // Out of stack space.
761 __ push(Operand(ebp, 4 * kPointerSize)); // push this
762 __ push(eax);
763 __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION);
764 __ bind(&okay);
765 // End of stack check.
766
767 // Push current index and limit.
768 const int kLimitOffset =
769 StandardFrameConstants::kExpressionsOffset - 1 * kPointerSize;
770 const int kIndexOffset = kLimitOffset - 1 * kPointerSize;
771 __ push(eax); // limit
772 __ push(Immediate(0)); // index
773
774 // Get the receiver.
775 __ mov(ebx, Operand(ebp, kReceiverOffset));
776
777 // Check that the function is a JS function (otherwise it must be a proxy).
778 Label push_receiver;
779 __ mov(edi, Operand(ebp, kFunctionOffset));
780 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
781 __ j(not_equal, &push_receiver);
782
783 // Change context eagerly to get the right global object if necessary.
784 __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
785
786 // Compute the receiver.
787 // Do not transform the receiver for strict mode functions.
788 Label call_to_object, use_global_receiver;
789 __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
790 __ test_b(FieldOperand(ecx, SharedFunctionInfo::kStrictModeByteOffset),
791 1 << SharedFunctionInfo::kStrictModeBitWithinByte);
792 __ j(not_equal, &push_receiver);
793
794 Factory* factory = masm->isolate()->factory();
795
796 // Do not transform the receiver for natives (shared already in ecx).
797 __ test_b(FieldOperand(ecx, SharedFunctionInfo::kNativeByteOffset),
798 1 << SharedFunctionInfo::kNativeBitWithinByte);
799 __ j(not_equal, &push_receiver);
800
801 // Compute the receiver in non-strict mode.
802 // Call ToObject on the receiver if it is not an object, or use the
803 // global object if it is null or undefined.
804 __ JumpIfSmi(ebx, &call_to_object);
805 __ cmp(ebx, factory->null_value());
806 __ j(equal, &use_global_receiver);
807 __ cmp(ebx, factory->undefined_value());
808 __ j(equal, &use_global_receiver);
809 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
810 __ CmpObjectType(ebx, FIRST_SPEC_OBJECT_TYPE, ecx);
811 __ j(above_equal, &push_receiver);
812
813 __ bind(&call_to_object);
814 __ push(ebx);
815 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
816 __ mov(ebx, eax);
817 __ jmp(&push_receiver);
818
819 // Use the current global receiver object as the receiver.
820 __ bind(&use_global_receiver);
821 const int kGlobalOffset =
822 Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
823 __ mov(ebx, FieldOperand(esi, kGlobalOffset));
824 __ mov(ebx, FieldOperand(ebx, GlobalObject::kGlobalContextOffset));
825 __ mov(ebx, FieldOperand(ebx, kGlobalOffset));
826 __ mov(ebx, FieldOperand(ebx, GlobalObject::kGlobalReceiverOffset));
827
828 // Push the receiver.
829 __ bind(&push_receiver);
830 __ push(ebx);
831
832 // Copy all arguments from the array to the stack.
833 Label entry, loop;
834 __ mov(eax, Operand(ebp, kIndexOffset));
835 __ jmp(&entry);
836 __ bind(&loop);
837 __ mov(edx, Operand(ebp, kArgumentsOffset)); // load arguments
838
839 // Use inline caching to speed up access to arguments.
840 Handle<Code> ic = masm->isolate()->builtins()->KeyedLoadIC_Initialize();
841 __ call(ic, RelocInfo::CODE_TARGET);
842 // It is important that we do not have a test instruction after the
843 // call. A test instruction after the call is used to indicate that
844 // we have generated an inline version of the keyed load. In this
845 // case, we know that we are not generating a test instruction next.
846
847 // Push the nth argument.
848 __ push(eax);
849
850 // Update the index on the stack and in register eax.
851 __ mov(eax, Operand(ebp, kIndexOffset));
852 __ add(eax, Immediate(1 << kSmiTagSize));
853 __ mov(Operand(ebp, kIndexOffset), eax);
854
855 __ bind(&entry);
856 __ cmp(eax, Operand(ebp, kLimitOffset));
857 __ j(not_equal, &loop);
858
859 // Invoke the function.
860 Label call_proxy;
861 ParameterCount actual(eax);
862 __ SmiUntag(eax);
863 __ mov(edi, Operand(ebp, kFunctionOffset));
864 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
865 __ j(not_equal, &call_proxy);
866 __ InvokeFunction(edi, actual, CALL_FUNCTION,
867 NullCallWrapper(), CALL_AS_METHOD);
868
869 frame_scope.GenerateLeaveFrame();
870 __ ret(3 * kPointerSize); // remove this, receiver, and arguments
871
872 // Invoke the function proxy.
873 __ bind(&call_proxy);
874 __ push(edi); // add function proxy as last argument
875 __ inc(eax);
876 __ Set(ebx, Immediate(0));
877 __ SetCallKind(ecx, CALL_AS_METHOD);
878 __ GetBuiltinEntry(edx, Builtins::CALL_FUNCTION_PROXY);
879 __ call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
880 RelocInfo::CODE_TARGET);
881
882 // Leave internal frame.
883 }
884 __ ret(3 * kPointerSize); // remove this, receiver, and arguments
885 }
886
887
888 // Allocate an empty JSArray. The allocated array is put into the result
889 // register. If the parameter initial_capacity is larger than zero an elements
890 // backing store is allocated with this size and filled with the hole values.
891 // Otherwise the elements backing store is set to the empty FixedArray.
AllocateEmptyJSArray(MacroAssembler * masm,Register array_function,Register result,Register scratch1,Register scratch2,Register scratch3,Label * gc_required)892 static void AllocateEmptyJSArray(MacroAssembler* masm,
893 Register array_function,
894 Register result,
895 Register scratch1,
896 Register scratch2,
897 Register scratch3,
898 Label* gc_required) {
899 const int initial_capacity = JSArray::kPreallocatedArrayElements;
900 STATIC_ASSERT(initial_capacity >= 0);
901
902 __ LoadInitialArrayMap(array_function, scratch2, scratch1);
903
904 // Allocate the JSArray object together with space for a fixed array with the
905 // requested elements.
906 int size = JSArray::kSize;
907 if (initial_capacity > 0) {
908 size += FixedArray::SizeFor(initial_capacity);
909 }
910 __ AllocateInNewSpace(size,
911 result,
912 scratch2,
913 scratch3,
914 gc_required,
915 TAG_OBJECT);
916
917 // Allocated the JSArray. Now initialize the fields except for the elements
918 // array.
919 // result: JSObject
920 // scratch1: initial map
921 // scratch2: start of next object
922 __ mov(FieldOperand(result, JSObject::kMapOffset), scratch1);
923 Factory* factory = masm->isolate()->factory();
924 __ mov(FieldOperand(result, JSArray::kPropertiesOffset),
925 factory->empty_fixed_array());
926 // Field JSArray::kElementsOffset is initialized later.
927 __ mov(FieldOperand(result, JSArray::kLengthOffset), Immediate(0));
928
929 // If no storage is requested for the elements array just set the empty
930 // fixed array.
931 if (initial_capacity == 0) {
932 __ mov(FieldOperand(result, JSArray::kElementsOffset),
933 factory->empty_fixed_array());
934 return;
935 }
936
937 // Calculate the location of the elements array and set elements array member
938 // of the JSArray.
939 // result: JSObject
940 // scratch2: start of next object
941 __ lea(scratch1, Operand(result, JSArray::kSize));
942 __ mov(FieldOperand(result, JSArray::kElementsOffset), scratch1);
943
944 // Initialize the FixedArray and fill it with holes. FixedArray length is
945 // stored as a smi.
946 // result: JSObject
947 // scratch1: elements array
948 // scratch2: start of next object
949 __ mov(FieldOperand(scratch1, FixedArray::kMapOffset),
950 factory->fixed_array_map());
951 __ mov(FieldOperand(scratch1, FixedArray::kLengthOffset),
952 Immediate(Smi::FromInt(initial_capacity)));
953
954 // Fill the FixedArray with the hole value. Inline the code if short.
955 // Reconsider loop unfolding if kPreallocatedArrayElements gets changed.
956 static const int kLoopUnfoldLimit = 4;
957 if (initial_capacity <= kLoopUnfoldLimit) {
958 // Use a scratch register here to have only one reloc info when unfolding
959 // the loop.
960 __ mov(scratch3, factory->the_hole_value());
961 for (int i = 0; i < initial_capacity; i++) {
962 __ mov(FieldOperand(scratch1,
963 FixedArray::kHeaderSize + i * kPointerSize),
964 scratch3);
965 }
966 } else {
967 Label loop, entry;
968 __ mov(scratch2, Immediate(initial_capacity));
969 __ jmp(&entry);
970 __ bind(&loop);
971 __ mov(FieldOperand(scratch1,
972 scratch2,
973 times_pointer_size,
974 FixedArray::kHeaderSize),
975 factory->the_hole_value());
976 __ bind(&entry);
977 __ dec(scratch2);
978 __ j(not_sign, &loop);
979 }
980 }
981
982
983 // Allocate a JSArray with the number of elements stored in a register. The
984 // register array_function holds the built-in Array function and the register
985 // array_size holds the size of the array as a smi. The allocated array is put
986 // into the result register and beginning and end of the FixedArray elements
987 // storage is put into registers elements_array and elements_array_end (see
988 // below for when that is not the case). If the parameter fill_with_holes is
989 // true the allocated elements backing store is filled with the hole values
990 // otherwise it is left uninitialized. When the backing store is filled the
991 // register elements_array is scratched.
AllocateJSArray(MacroAssembler * masm,Register array_function,Register array_size,Register result,Register elements_array,Register elements_array_end,Register scratch,bool fill_with_hole,Label * gc_required)992 static void AllocateJSArray(MacroAssembler* masm,
993 Register array_function, // Array function.
994 Register array_size, // As a smi, cannot be 0.
995 Register result,
996 Register elements_array,
997 Register elements_array_end,
998 Register scratch,
999 bool fill_with_hole,
1000 Label* gc_required) {
1001 ASSERT(scratch.is(edi)); // rep stos destination
1002 ASSERT(!fill_with_hole || array_size.is(ecx)); // rep stos count
1003 ASSERT(!fill_with_hole || !result.is(eax)); // result is never eax
1004
1005 __ LoadInitialArrayMap(array_function, scratch, elements_array);
1006
1007 // Allocate the JSArray object together with space for a FixedArray with the
1008 // requested elements.
1009 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
1010 __ AllocateInNewSpace(JSArray::kSize + FixedArray::kHeaderSize,
1011 times_half_pointer_size, // array_size is a smi.
1012 array_size,
1013 result,
1014 elements_array_end,
1015 scratch,
1016 gc_required,
1017 TAG_OBJECT);
1018
1019 // Allocated the JSArray. Now initialize the fields except for the elements
1020 // array.
1021 // result: JSObject
1022 // elements_array: initial map
1023 // elements_array_end: start of next object
1024 // array_size: size of array (smi)
1025 __ mov(FieldOperand(result, JSObject::kMapOffset), elements_array);
1026 Factory* factory = masm->isolate()->factory();
1027 __ mov(elements_array, factory->empty_fixed_array());
1028 __ mov(FieldOperand(result, JSArray::kPropertiesOffset), elements_array);
1029 // Field JSArray::kElementsOffset is initialized later.
1030 __ mov(FieldOperand(result, JSArray::kLengthOffset), array_size);
1031
1032 // Calculate the location of the elements array and set elements array member
1033 // of the JSArray.
1034 // result: JSObject
1035 // elements_array_end: start of next object
1036 // array_size: size of array (smi)
1037 __ lea(elements_array, Operand(result, JSArray::kSize));
1038 __ mov(FieldOperand(result, JSArray::kElementsOffset), elements_array);
1039
1040 // Initialize the fixed array. FixedArray length is stored as a smi.
1041 // result: JSObject
1042 // elements_array: elements array
1043 // elements_array_end: start of next object
1044 // array_size: size of array (smi)
1045 __ mov(FieldOperand(elements_array, FixedArray::kMapOffset),
1046 factory->fixed_array_map());
1047 // For non-empty JSArrays the length of the FixedArray and the JSArray is the
1048 // same.
1049 __ mov(FieldOperand(elements_array, FixedArray::kLengthOffset), array_size);
1050
1051 // Fill the allocated FixedArray with the hole value if requested.
1052 // result: JSObject
1053 // elements_array: elements array
1054 if (fill_with_hole) {
1055 __ SmiUntag(array_size);
1056 __ lea(edi, Operand(elements_array,
1057 FixedArray::kHeaderSize - kHeapObjectTag));
1058 __ mov(eax, factory->the_hole_value());
1059 __ cld();
1060 // Do not use rep stos when filling less than kRepStosThreshold
1061 // words.
1062 const int kRepStosThreshold = 16;
1063 Label loop, entry, done;
1064 __ cmp(ecx, kRepStosThreshold);
1065 __ j(below, &loop); // Note: ecx > 0.
1066 __ rep_stos();
1067 __ jmp(&done);
1068 __ bind(&loop);
1069 __ stos();
1070 __ bind(&entry);
1071 __ cmp(edi, elements_array_end);
1072 __ j(below, &loop);
1073 __ bind(&done);
1074 }
1075 }
1076
1077
1078 // Create a new array for the built-in Array function. This function allocates
1079 // the JSArray object and the FixedArray elements array and initializes these.
1080 // If the Array cannot be constructed in native code the runtime is called. This
1081 // function assumes the following state:
1082 // edi: constructor (built-in Array function)
1083 // eax: argc
1084 // esp[0]: return address
1085 // esp[4]: last argument
1086 // This function is used for both construct and normal calls of Array. Whether
1087 // it is a construct call or not is indicated by the construct_call parameter.
1088 // The only difference between handling a construct call and a normal call is
1089 // that for a construct call the constructor function in edi needs to be
1090 // preserved for entering the generic code. In both cases argc in eax needs to
1091 // be preserved.
ArrayNativeCode(MacroAssembler * masm,bool construct_call,Label * call_generic_code)1092 static void ArrayNativeCode(MacroAssembler* masm,
1093 bool construct_call,
1094 Label* call_generic_code) {
1095 Label argc_one_or_more, argc_two_or_more, prepare_generic_code_call,
1096 empty_array, not_empty_array, finish, cant_transition_map, not_double;
1097
1098 // Push the constructor and argc. No need to tag argc as a smi, as there will
1099 // be no garbage collection with this on the stack.
1100 int push_count = 0;
1101 if (construct_call) {
1102 push_count++;
1103 __ push(edi);
1104 }
1105 push_count++;
1106 __ push(eax);
1107
1108 // Check for array construction with zero arguments.
1109 __ test(eax, eax);
1110 __ j(not_zero, &argc_one_or_more);
1111
1112 __ bind(&empty_array);
1113 // Handle construction of an empty array.
1114 AllocateEmptyJSArray(masm,
1115 edi,
1116 eax,
1117 ebx,
1118 ecx,
1119 edi,
1120 &prepare_generic_code_call);
1121 __ IncrementCounter(masm->isolate()->counters()->array_function_native(), 1);
1122 __ pop(ebx);
1123 if (construct_call) {
1124 __ pop(edi);
1125 }
1126 __ ret(kPointerSize);
1127
1128 // Check for one argument. Bail out if argument is not smi or if it is
1129 // negative.
1130 __ bind(&argc_one_or_more);
1131 __ cmp(eax, 1);
1132 __ j(not_equal, &argc_two_or_more);
1133 STATIC_ASSERT(kSmiTag == 0);
1134 __ mov(ecx, Operand(esp, (push_count + 1) * kPointerSize));
1135 __ test(ecx, ecx);
1136 __ j(not_zero, ¬_empty_array);
1137
1138 // The single argument passed is zero, so we jump to the code above used to
1139 // handle the case of no arguments passed. To adapt the stack for that we move
1140 // the return address and the pushed constructor (if pushed) one stack slot up
1141 // thereby removing the passed argument. Argc is also on the stack - at the
1142 // bottom - and it needs to be changed from 1 to 0 to have the call into the
1143 // runtime system work in case a GC is required.
1144 for (int i = push_count; i > 0; i--) {
1145 __ mov(eax, Operand(esp, i * kPointerSize));
1146 __ mov(Operand(esp, (i + 1) * kPointerSize), eax);
1147 }
1148 __ Drop(2); // Drop two stack slots.
1149 __ push(Immediate(0)); // Treat this as a call with argc of zero.
1150 __ jmp(&empty_array);
1151
1152 __ bind(¬_empty_array);
1153 __ test(ecx, Immediate(kIntptrSignBit | kSmiTagMask));
1154 __ j(not_zero, &prepare_generic_code_call);
1155
1156 // Handle construction of an empty array of a certain size. Get the size from
1157 // the stack and bail out if size is to large to actually allocate an elements
1158 // array.
1159 __ cmp(ecx, JSObject::kInitialMaxFastElementArray << kSmiTagSize);
1160 __ j(greater_equal, &prepare_generic_code_call);
1161
1162 // edx: array_size (smi)
1163 // edi: constructor
1164 // esp[0]: argc (cannot be 0 here)
1165 // esp[4]: constructor (only if construct_call)
1166 // esp[8]: return address
1167 // esp[C]: argument
1168 AllocateJSArray(masm,
1169 edi,
1170 ecx,
1171 ebx,
1172 eax,
1173 edx,
1174 edi,
1175 true,
1176 &prepare_generic_code_call);
1177 Counters* counters = masm->isolate()->counters();
1178 __ IncrementCounter(counters->array_function_native(), 1);
1179 __ mov(eax, ebx);
1180 __ pop(ebx);
1181 if (construct_call) {
1182 __ pop(edi);
1183 }
1184 __ ret(2 * kPointerSize);
1185
1186 // Handle construction of an array from a list of arguments.
1187 __ bind(&argc_two_or_more);
1188 STATIC_ASSERT(kSmiTag == 0);
1189 __ SmiTag(eax); // Convet argc to a smi.
1190 // eax: array_size (smi)
1191 // edi: constructor
1192 // esp[0] : argc
1193 // esp[4]: constructor (only if construct_call)
1194 // esp[8] : return address
1195 // esp[C] : last argument
1196 AllocateJSArray(masm,
1197 edi,
1198 eax,
1199 ebx,
1200 ecx,
1201 edx,
1202 edi,
1203 false,
1204 &prepare_generic_code_call);
1205 __ IncrementCounter(counters->array_function_native(), 1);
1206 __ push(ebx);
1207 __ mov(ebx, Operand(esp, kPointerSize));
1208 // ebx: argc
1209 // edx: elements_array_end (untagged)
1210 // esp[0]: JSArray
1211 // esp[4]: argc
1212 // esp[8]: constructor (only if construct_call)
1213 // esp[12]: return address
1214 // esp[16]: last argument
1215
1216 // Location of the last argument
1217 int last_arg_offset = (construct_call ? 4 : 3) * kPointerSize;
1218 __ lea(edi, Operand(esp, last_arg_offset));
1219
1220 // Location of the first array element (Parameter fill_with_holes to
1221 // AllocateJSArray is false, so the FixedArray is returned in ecx).
1222 __ lea(edx, Operand(ecx, FixedArray::kHeaderSize - kHeapObjectTag));
1223
1224 Label has_non_smi_element;
1225
1226 // ebx: argc
1227 // edx: location of the first array element
1228 // edi: location of the last argument
1229 // esp[0]: JSArray
1230 // esp[4]: argc
1231 // esp[8]: constructor (only if construct_call)
1232 // esp[12]: return address
1233 // esp[16]: last argument
1234 Label loop, entry;
1235 __ mov(ecx, ebx);
1236 __ jmp(&entry);
1237 __ bind(&loop);
1238 __ mov(eax, Operand(edi, ecx, times_pointer_size, 0));
1239 if (FLAG_smi_only_arrays) {
1240 __ JumpIfNotSmi(eax, &has_non_smi_element);
1241 }
1242 __ mov(Operand(edx, 0), eax);
1243 __ add(edx, Immediate(kPointerSize));
1244 __ bind(&entry);
1245 __ dec(ecx);
1246 __ j(greater_equal, &loop);
1247
1248 // Remove caller arguments from the stack and return.
1249 // ebx: argc
1250 // esp[0]: JSArray
1251 // esp[4]: argc
1252 // esp[8]: constructor (only if construct_call)
1253 // esp[12]: return address
1254 // esp[16]: last argument
1255 __ bind(&finish);
1256 __ mov(ecx, Operand(esp, last_arg_offset - kPointerSize));
1257 __ pop(eax);
1258 __ pop(ebx);
1259 __ lea(esp, Operand(esp, ebx, times_pointer_size,
1260 last_arg_offset - kPointerSize));
1261 __ jmp(ecx);
1262
1263 __ bind(&has_non_smi_element);
1264 // Double values are handled by the runtime.
1265 __ CheckMap(eax,
1266 masm->isolate()->factory()->heap_number_map(),
1267 ¬_double,
1268 DONT_DO_SMI_CHECK);
1269 __ bind(&cant_transition_map);
1270 // Throw away the array that's only been partially constructed.
1271 __ pop(eax);
1272 __ UndoAllocationInNewSpace(eax);
1273 __ jmp(&prepare_generic_code_call);
1274
1275 __ bind(¬_double);
1276 // Transition FAST_SMI_ONLY_ELEMENTS to FAST_ELEMENTS.
1277 __ mov(ebx, Operand(esp, 0));
1278 __ mov(edi, FieldOperand(ebx, HeapObject::kMapOffset));
1279 __ LoadTransitionedArrayMapConditional(
1280 FAST_SMI_ONLY_ELEMENTS,
1281 FAST_ELEMENTS,
1282 edi,
1283 eax,
1284 &cant_transition_map);
1285 __ mov(FieldOperand(ebx, HeapObject::kMapOffset), edi);
1286 __ RecordWriteField(ebx, HeapObject::kMapOffset, edi, eax,
1287 kDontSaveFPRegs, OMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
1288
1289 // Prepare to re-enter the loop
1290 __ lea(edi, Operand(esp, last_arg_offset));
1291
1292 // Finish the array initialization loop.
1293 Label loop2;
1294 __ bind(&loop2);
1295 __ mov(eax, Operand(edi, ecx, times_pointer_size, 0));
1296 __ mov(Operand(edx, 0), eax);
1297 __ add(edx, Immediate(kPointerSize));
1298 __ dec(ecx);
1299 __ j(greater_equal, &loop2);
1300 __ jmp(&finish);
1301
1302 // Restore argc and constructor before running the generic code.
1303 __ bind(&prepare_generic_code_call);
1304 __ pop(eax);
1305 if (construct_call) {
1306 __ pop(edi);
1307 }
1308 __ jmp(call_generic_code);
1309 }
1310
1311
Generate_InternalArrayCode(MacroAssembler * masm)1312 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
1313 // ----------- S t a t e -------------
1314 // -- eax : argc
1315 // -- esp[0] : return address
1316 // -- esp[4] : last argument
1317 // -----------------------------------
1318 Label generic_array_code;
1319
1320 // Get the InternalArray function.
1321 __ LoadGlobalFunction(Context::INTERNAL_ARRAY_FUNCTION_INDEX, edi);
1322
1323 if (FLAG_debug_code) {
1324 // Initial map for the builtin InternalArray function should be a map.
1325 __ mov(ebx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
1326 // Will both indicate a NULL and a Smi.
1327 __ test(ebx, Immediate(kSmiTagMask));
1328 __ Assert(not_zero, "Unexpected initial map for InternalArray function");
1329 __ CmpObjectType(ebx, MAP_TYPE, ecx);
1330 __ Assert(equal, "Unexpected initial map for InternalArray function");
1331 }
1332
1333 // Run the native code for the InternalArray function called as a normal
1334 // function.
1335 ArrayNativeCode(masm, false, &generic_array_code);
1336
1337 // Jump to the generic internal array code in case the specialized code cannot
1338 // handle the construction.
1339 __ bind(&generic_array_code);
1340 Handle<Code> array_code =
1341 masm->isolate()->builtins()->InternalArrayCodeGeneric();
1342 __ jmp(array_code, RelocInfo::CODE_TARGET);
1343 }
1344
1345
Generate_ArrayCode(MacroAssembler * masm)1346 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
1347 // ----------- S t a t e -------------
1348 // -- eax : argc
1349 // -- esp[0] : return address
1350 // -- esp[4] : last argument
1351 // -----------------------------------
1352 Label generic_array_code;
1353
1354 // Get the Array function.
1355 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, edi);
1356
1357 if (FLAG_debug_code) {
1358 // Initial map for the builtin Array function should be a map.
1359 __ mov(ebx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
1360 // Will both indicate a NULL and a Smi.
1361 __ test(ebx, Immediate(kSmiTagMask));
1362 __ Assert(not_zero, "Unexpected initial map for Array function");
1363 __ CmpObjectType(ebx, MAP_TYPE, ecx);
1364 __ Assert(equal, "Unexpected initial map for Array function");
1365 }
1366
1367 // Run the native code for the Array function called as a normal function.
1368 ArrayNativeCode(masm, false, &generic_array_code);
1369
1370 // Jump to the generic array code in case the specialized code cannot handle
1371 // the construction.
1372 __ bind(&generic_array_code);
1373 Handle<Code> array_code =
1374 masm->isolate()->builtins()->ArrayCodeGeneric();
1375 __ jmp(array_code, RelocInfo::CODE_TARGET);
1376 }
1377
1378
Generate_ArrayConstructCode(MacroAssembler * masm)1379 void Builtins::Generate_ArrayConstructCode(MacroAssembler* masm) {
1380 // ----------- S t a t e -------------
1381 // -- eax : argc
1382 // -- edi : constructor
1383 // -- esp[0] : return address
1384 // -- esp[4] : last argument
1385 // -----------------------------------
1386 Label generic_constructor;
1387
1388 if (FLAG_debug_code) {
1389 // The array construct code is only set for the global and natives
1390 // builtin Array functions which always have maps.
1391
1392 // Initial map for the builtin Array function should be a map.
1393 __ mov(ebx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
1394 // Will both indicate a NULL and a Smi.
1395 __ test(ebx, Immediate(kSmiTagMask));
1396 __ Assert(not_zero, "Unexpected initial map for Array function");
1397 __ CmpObjectType(ebx, MAP_TYPE, ecx);
1398 __ Assert(equal, "Unexpected initial map for Array function");
1399 }
1400
1401 // Run the native code for the Array function called as constructor.
1402 ArrayNativeCode(masm, true, &generic_constructor);
1403
1404 // Jump to the generic construct code in case the specialized code cannot
1405 // handle the construction.
1406 __ bind(&generic_constructor);
1407 Handle<Code> generic_construct_stub =
1408 masm->isolate()->builtins()->JSConstructStubGeneric();
1409 __ jmp(generic_construct_stub, RelocInfo::CODE_TARGET);
1410 }
1411
1412
Generate_StringConstructCode(MacroAssembler * masm)1413 void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
1414 // ----------- S t a t e -------------
1415 // -- eax : number of arguments
1416 // -- edi : constructor function
1417 // -- esp[0] : return address
1418 // -- esp[(argc - n) * 4] : arg[n] (zero-based)
1419 // -- esp[(argc + 1) * 4] : receiver
1420 // -----------------------------------
1421 Counters* counters = masm->isolate()->counters();
1422 __ IncrementCounter(counters->string_ctor_calls(), 1);
1423
1424 if (FLAG_debug_code) {
1425 __ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, ecx);
1426 __ cmp(edi, ecx);
1427 __ Assert(equal, "Unexpected String function");
1428 }
1429
1430 // Load the first argument into eax and get rid of the rest
1431 // (including the receiver).
1432 Label no_arguments;
1433 __ test(eax, eax);
1434 __ j(zero, &no_arguments);
1435 __ mov(ebx, Operand(esp, eax, times_pointer_size, 0));
1436 __ pop(ecx);
1437 __ lea(esp, Operand(esp, eax, times_pointer_size, kPointerSize));
1438 __ push(ecx);
1439 __ mov(eax, ebx);
1440
1441 // Lookup the argument in the number to string cache.
1442 Label not_cached, argument_is_string;
1443 NumberToStringStub::GenerateLookupNumberStringCache(
1444 masm,
1445 eax, // Input.
1446 ebx, // Result.
1447 ecx, // Scratch 1.
1448 edx, // Scratch 2.
1449 false, // Input is known to be smi?
1450 ¬_cached);
1451 __ IncrementCounter(counters->string_ctor_cached_number(), 1);
1452 __ bind(&argument_is_string);
1453 // ----------- S t a t e -------------
1454 // -- ebx : argument converted to string
1455 // -- edi : constructor function
1456 // -- esp[0] : return address
1457 // -----------------------------------
1458
1459 // Allocate a JSValue and put the tagged pointer into eax.
1460 Label gc_required;
1461 __ AllocateInNewSpace(JSValue::kSize,
1462 eax, // Result.
1463 ecx, // New allocation top (we ignore it).
1464 no_reg,
1465 &gc_required,
1466 TAG_OBJECT);
1467
1468 // Set the map.
1469 __ LoadGlobalFunctionInitialMap(edi, ecx);
1470 if (FLAG_debug_code) {
1471 __ cmpb(FieldOperand(ecx, Map::kInstanceSizeOffset),
1472 JSValue::kSize >> kPointerSizeLog2);
1473 __ Assert(equal, "Unexpected string wrapper instance size");
1474 __ cmpb(FieldOperand(ecx, Map::kUnusedPropertyFieldsOffset), 0);
1475 __ Assert(equal, "Unexpected unused properties of string wrapper");
1476 }
1477 __ mov(FieldOperand(eax, HeapObject::kMapOffset), ecx);
1478
1479 // Set properties and elements.
1480 Factory* factory = masm->isolate()->factory();
1481 __ Set(ecx, Immediate(factory->empty_fixed_array()));
1482 __ mov(FieldOperand(eax, JSObject::kPropertiesOffset), ecx);
1483 __ mov(FieldOperand(eax, JSObject::kElementsOffset), ecx);
1484
1485 // Set the value.
1486 __ mov(FieldOperand(eax, JSValue::kValueOffset), ebx);
1487
1488 // Ensure the object is fully initialized.
1489 STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize);
1490
1491 // We're done. Return.
1492 __ ret(0);
1493
1494 // The argument was not found in the number to string cache. Check
1495 // if it's a string already before calling the conversion builtin.
1496 Label convert_argument;
1497 __ bind(¬_cached);
1498 STATIC_ASSERT(kSmiTag == 0);
1499 __ JumpIfSmi(eax, &convert_argument);
1500 Condition is_string = masm->IsObjectStringType(eax, ebx, ecx);
1501 __ j(NegateCondition(is_string), &convert_argument);
1502 __ mov(ebx, eax);
1503 __ IncrementCounter(counters->string_ctor_string_value(), 1);
1504 __ jmp(&argument_is_string);
1505
1506 // Invoke the conversion builtin and put the result into ebx.
1507 __ bind(&convert_argument);
1508 __ IncrementCounter(counters->string_ctor_conversions(), 1);
1509 {
1510 FrameScope scope(masm, StackFrame::INTERNAL);
1511 __ push(edi); // Preserve the function.
1512 __ push(eax);
1513 __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION);
1514 __ pop(edi);
1515 }
1516 __ mov(ebx, eax);
1517 __ jmp(&argument_is_string);
1518
1519 // Load the empty string into ebx, remove the receiver from the
1520 // stack, and jump back to the case where the argument is a string.
1521 __ bind(&no_arguments);
1522 __ Set(ebx, Immediate(factory->empty_string()));
1523 __ pop(ecx);
1524 __ lea(esp, Operand(esp, kPointerSize));
1525 __ push(ecx);
1526 __ jmp(&argument_is_string);
1527
1528 // At this point the argument is already a string. Call runtime to
1529 // create a string wrapper.
1530 __ bind(&gc_required);
1531 __ IncrementCounter(counters->string_ctor_gc_required(), 1);
1532 {
1533 FrameScope scope(masm, StackFrame::INTERNAL);
1534 __ push(ebx);
1535 __ CallRuntime(Runtime::kNewStringWrapper, 1);
1536 }
1537 __ ret(0);
1538 }
1539
1540
EnterArgumentsAdaptorFrame(MacroAssembler * masm)1541 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1542 __ push(ebp);
1543 __ mov(ebp, esp);
1544
1545 // Store the arguments adaptor context sentinel.
1546 __ push(Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1547
1548 // Push the function on the stack.
1549 __ push(edi);
1550
1551 // Preserve the number of arguments on the stack. Must preserve eax,
1552 // ebx and ecx because these registers are used when copying the
1553 // arguments and the receiver.
1554 STATIC_ASSERT(kSmiTagSize == 1);
1555 __ lea(edi, Operand(eax, eax, times_1, kSmiTag));
1556 __ push(edi);
1557 }
1558
1559
LeaveArgumentsAdaptorFrame(MacroAssembler * masm)1560 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1561 // Retrieve the number of arguments from the stack.
1562 __ mov(ebx, Operand(ebp, ArgumentsAdaptorFrameConstants::kLengthOffset));
1563
1564 // Leave the frame.
1565 __ leave();
1566
1567 // Remove caller arguments from the stack.
1568 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
1569 __ pop(ecx);
1570 __ lea(esp, Operand(esp, ebx, times_2, 1 * kPointerSize)); // 1 ~ receiver
1571 __ push(ecx);
1572 }
1573
1574
Generate_ArgumentsAdaptorTrampoline(MacroAssembler * masm)1575 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1576 // ----------- S t a t e -------------
1577 // -- eax : actual number of arguments
1578 // -- ebx : expected number of arguments
1579 // -- ecx : call kind information
1580 // -- edx : code entry to call
1581 // -----------------------------------
1582
1583 Label invoke, dont_adapt_arguments;
1584 __ IncrementCounter(masm->isolate()->counters()->arguments_adaptors(), 1);
1585
1586 Label enough, too_few;
1587 __ cmp(eax, ebx);
1588 __ j(less, &too_few);
1589 __ cmp(ebx, SharedFunctionInfo::kDontAdaptArgumentsSentinel);
1590 __ j(equal, &dont_adapt_arguments);
1591
1592 { // Enough parameters: Actual >= expected.
1593 __ bind(&enough);
1594 EnterArgumentsAdaptorFrame(masm);
1595
1596 // Copy receiver and all expected arguments.
1597 const int offset = StandardFrameConstants::kCallerSPOffset;
1598 __ lea(eax, Operand(ebp, eax, times_4, offset));
1599 __ mov(edi, -1); // account for receiver
1600
1601 Label copy;
1602 __ bind(©);
1603 __ inc(edi);
1604 __ push(Operand(eax, 0));
1605 __ sub(eax, Immediate(kPointerSize));
1606 __ cmp(edi, ebx);
1607 __ j(less, ©);
1608 __ jmp(&invoke);
1609 }
1610
1611 { // Too few parameters: Actual < expected.
1612 __ bind(&too_few);
1613 EnterArgumentsAdaptorFrame(masm);
1614
1615 // Copy receiver and all actual arguments.
1616 const int offset = StandardFrameConstants::kCallerSPOffset;
1617 __ lea(edi, Operand(ebp, eax, times_4, offset));
1618 // ebx = expected - actual.
1619 __ sub(ebx, eax);
1620 // eax = -actual - 1
1621 __ neg(eax);
1622 __ sub(eax, Immediate(1));
1623
1624 Label copy;
1625 __ bind(©);
1626 __ inc(eax);
1627 __ push(Operand(edi, 0));
1628 __ sub(edi, Immediate(kPointerSize));
1629 __ test(eax, eax);
1630 __ j(not_zero, ©);
1631
1632 // Fill remaining expected arguments with undefined values.
1633 Label fill;
1634 __ bind(&fill);
1635 __ inc(eax);
1636 __ push(Immediate(masm->isolate()->factory()->undefined_value()));
1637 __ cmp(eax, ebx);
1638 __ j(less, &fill);
1639 }
1640
1641 // Call the entry point.
1642 __ bind(&invoke);
1643 // Restore function pointer.
1644 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1645 __ call(edx);
1646
1647 // Store offset of return address for deoptimizer.
1648 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
1649
1650 // Leave frame and return.
1651 LeaveArgumentsAdaptorFrame(masm);
1652 __ ret(0);
1653
1654 // -------------------------------------------
1655 // Dont adapt arguments.
1656 // -------------------------------------------
1657 __ bind(&dont_adapt_arguments);
1658 __ jmp(edx);
1659 }
1660
1661
Generate_OnStackReplacement(MacroAssembler * masm)1662 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1663 CpuFeatures::TryForceFeatureScope scope(SSE2);
1664 if (!CpuFeatures::IsSupported(SSE2) && FLAG_debug_code) {
1665 __ Abort("Unreachable code: Cannot optimize without SSE2 support.");
1666 return;
1667 }
1668
1669 // Get the loop depth of the stack guard check. This is recorded in
1670 // a test(eax, depth) instruction right after the call.
1671 Label stack_check;
1672 __ mov(ebx, Operand(esp, 0)); // return address
1673 if (FLAG_debug_code) {
1674 __ cmpb(Operand(ebx, 0), Assembler::kTestAlByte);
1675 __ Assert(equal, "test eax instruction not found after loop stack check");
1676 }
1677 __ movzx_b(ebx, Operand(ebx, 1)); // depth
1678
1679 // Get the loop nesting level at which we allow OSR from the
1680 // unoptimized code and check if we want to do OSR yet. If not we
1681 // should perform a stack guard check so we can get interrupts while
1682 // waiting for on-stack replacement.
1683 __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1684 __ mov(ecx, FieldOperand(eax, JSFunction::kSharedFunctionInfoOffset));
1685 __ mov(ecx, FieldOperand(ecx, SharedFunctionInfo::kCodeOffset));
1686 __ cmpb(ebx, FieldOperand(ecx, Code::kAllowOSRAtLoopNestingLevelOffset));
1687 __ j(greater, &stack_check);
1688
1689 // Pass the function to optimize as the argument to the on-stack
1690 // replacement runtime function.
1691 {
1692 FrameScope scope(masm, StackFrame::INTERNAL);
1693 __ push(eax);
1694 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
1695 }
1696
1697 // If the result was -1 it means that we couldn't optimize the
1698 // function. Just return and continue in the unoptimized version.
1699 Label skip;
1700 __ cmp(eax, Immediate(Smi::FromInt(-1)));
1701 __ j(not_equal, &skip, Label::kNear);
1702 __ ret(0);
1703
1704 // Insert a stack guard check so that if we decide not to perform
1705 // on-stack replacement right away, the function calling this stub can
1706 // still be interrupted.
1707 __ bind(&stack_check);
1708 Label ok;
1709 ExternalReference stack_limit =
1710 ExternalReference::address_of_stack_limit(masm->isolate());
1711 __ cmp(esp, Operand::StaticVariable(stack_limit));
1712 __ j(above_equal, &ok, Label::kNear);
1713 StackCheckStub stub;
1714 __ TailCallStub(&stub);
1715 if (FLAG_debug_code) {
1716 __ Abort("Unreachable code: returned from tail call.");
1717 }
1718 __ bind(&ok);
1719 __ ret(0);
1720
1721 __ bind(&skip);
1722 // Untag the AST id and push it on the stack.
1723 __ SmiUntag(eax);
1724 __ push(eax);
1725
1726 // Generate the code for doing the frame-to-frame translation using
1727 // the deoptimizer infrastructure.
1728 Deoptimizer::EntryGenerator generator(masm, Deoptimizer::OSR);
1729 generator.Generate();
1730 }
1731
1732
1733 #undef __
1734 }
1735 } // namespace v8::internal
1736
1737 #endif // V8_TARGET_ARCH_IA32
1738