1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #if V8_TARGET_ARCH_X87
6
7 #include "src/code-factory.h"
8 #include "src/codegen.h"
9 #include "src/deoptimizer.h"
10 #include "src/full-codegen/full-codegen.h"
11 #include "src/x87/frames-x87.h"
12
13 namespace v8 {
14 namespace internal {
15
16 #define __ ACCESS_MASM(masm)
17
Generate_Adaptor(MacroAssembler * masm,Address address,ExitFrameType exit_frame_type)18 void Builtins::Generate_Adaptor(MacroAssembler* masm, Address address,
19 ExitFrameType exit_frame_type) {
20 // ----------- S t a t e -------------
21 // -- eax : number of arguments excluding receiver
22 // -- edi : target
23 // -- edx : new.target
24 // -- esp[0] : return address
25 // -- esp[4] : last argument
26 // -- ...
27 // -- esp[4 * argc] : first argument
28 // -- esp[4 * (argc +1)] : receiver
29 // -----------------------------------
30 __ AssertFunction(edi);
31
32 // Make sure we operate in the context of the called function (for example
33 // ConstructStubs implemented in C++ will be run in the context of the caller
34 // instead of the callee, due to the way that [[Construct]] is defined for
35 // ordinary functions).
36 __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
37
38 // JumpToExternalReference expects eax to contain the number of arguments
39 // including the receiver and the extra arguments.
40 const int num_extra_args = 3;
41 __ add(eax, Immediate(num_extra_args + 1));
42
43 // Insert extra arguments.
44 __ PopReturnAddressTo(ecx);
45 __ SmiTag(eax);
46 __ Push(eax);
47 __ SmiUntag(eax);
48 __ Push(edi);
49 __ Push(edx);
50 __ PushReturnAddressFrom(ecx);
51
52 __ JumpToExternalReference(ExternalReference(address, masm->isolate()),
53 exit_frame_type == BUILTIN_EXIT);
54 }
55
GenerateTailCallToReturnedCode(MacroAssembler * masm,Runtime::FunctionId function_id)56 static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
57 Runtime::FunctionId function_id) {
58 // ----------- S t a t e -------------
59 // -- eax : argument count (preserved for callee)
60 // -- edx : new target (preserved for callee)
61 // -- edi : target function (preserved for callee)
62 // -----------------------------------
63 {
64 FrameScope scope(masm, StackFrame::INTERNAL);
65 // Push the number of arguments to the callee.
66 __ SmiTag(eax);
67 __ push(eax);
68 // Push a copy of the target function and the new target.
69 __ push(edi);
70 __ push(edx);
71 // Function is also the parameter to the runtime call.
72 __ push(edi);
73
74 __ CallRuntime(function_id, 1);
75 __ mov(ebx, eax);
76
77 // Restore target function and new target.
78 __ pop(edx);
79 __ pop(edi);
80 __ pop(eax);
81 __ SmiUntag(eax);
82 }
83
84 __ lea(ebx, FieldOperand(ebx, Code::kHeaderSize));
85 __ jmp(ebx);
86 }
87
GenerateTailCallToSharedCode(MacroAssembler * masm)88 static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
89 __ mov(ebx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
90 __ mov(ebx, FieldOperand(ebx, SharedFunctionInfo::kCodeOffset));
91 __ lea(ebx, FieldOperand(ebx, Code::kHeaderSize));
92 __ jmp(ebx);
93 }
94
Generate_InOptimizationQueue(MacroAssembler * masm)95 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
96 // Checking whether the queued function is ready for install is optional,
97 // since we come across interrupts and stack checks elsewhere. However,
98 // not checking may delay installing ready functions, and always checking
99 // would be quite expensive. A good compromise is to first check against
100 // stack limit as a cue for an interrupt signal.
101 Label ok;
102 ExternalReference stack_limit =
103 ExternalReference::address_of_stack_limit(masm->isolate());
104 __ cmp(esp, Operand::StaticVariable(stack_limit));
105 __ j(above_equal, &ok, Label::kNear);
106
107 GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
108
109 __ bind(&ok);
110 GenerateTailCallToSharedCode(masm);
111 }
112
113 namespace {
114
Generate_JSConstructStubHelper(MacroAssembler * masm,bool is_api_function,bool create_implicit_receiver,bool check_derived_construct)115 void Generate_JSConstructStubHelper(MacroAssembler* masm, bool is_api_function,
116 bool create_implicit_receiver,
117 bool check_derived_construct) {
118 // ----------- S t a t e -------------
119 // -- eax: number of arguments
120 // -- esi: context
121 // -- edi: constructor function
122 // -- edx: new target
123 // -----------------------------------
124
125 // Enter a construct frame.
126 {
127 FrameScope scope(masm, StackFrame::CONSTRUCT);
128
129 // Preserve the incoming parameters on the stack.
130 __ SmiTag(eax);
131 __ push(esi);
132 __ push(eax);
133
134 if (create_implicit_receiver) {
135 // Allocate the new receiver object.
136 __ Push(edi);
137 __ Push(edx);
138 FastNewObjectStub stub(masm->isolate());
139 __ CallStub(&stub);
140 __ mov(ebx, eax);
141 __ Pop(edx);
142 __ Pop(edi);
143
144 // ----------- S t a t e -------------
145 // -- edi: constructor function
146 // -- ebx: newly allocated object
147 // -- edx: new target
148 // -----------------------------------
149
150 // Retrieve smi-tagged arguments count from the stack.
151 __ mov(eax, Operand(esp, 0));
152 }
153
154 __ SmiUntag(eax);
155
156 if (create_implicit_receiver) {
157 // Push the allocated receiver to the stack. We need two copies
158 // because we may have to return the original one and the calling
159 // conventions dictate that the called function pops the receiver.
160 __ push(ebx);
161 __ push(ebx);
162 } else {
163 __ PushRoot(Heap::kTheHoleValueRootIndex);
164 }
165
166 // Set up pointer to last argument.
167 __ lea(ebx, Operand(ebp, StandardFrameConstants::kCallerSPOffset));
168
169 // Copy arguments and receiver to the expression stack.
170 Label loop, entry;
171 __ mov(ecx, eax);
172 __ jmp(&entry);
173 __ bind(&loop);
174 __ push(Operand(ebx, ecx, times_4, 0));
175 __ bind(&entry);
176 __ dec(ecx);
177 __ j(greater_equal, &loop);
178
179 // Call the function.
180 ParameterCount actual(eax);
181 __ InvokeFunction(edi, edx, actual, CALL_FUNCTION,
182 CheckDebugStepCallWrapper());
183
184 // Store offset of return address for deoptimizer.
185 if (create_implicit_receiver && !is_api_function) {
186 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
187 }
188
189 // Restore context from the frame.
190 __ mov(esi, Operand(ebp, ConstructFrameConstants::kContextOffset));
191
192 if (create_implicit_receiver) {
193 // If the result is an object (in the ECMA sense), we should get rid
194 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
195 // on page 74.
196 Label use_receiver, exit;
197
198 // If the result is a smi, it is *not* an object in the ECMA sense.
199 __ JumpIfSmi(eax, &use_receiver, Label::kNear);
200
201 // If the type of the result (stored in its map) is less than
202 // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
203 __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ecx);
204 __ j(above_equal, &exit, Label::kNear);
205
206 // Throw away the result of the constructor invocation and use the
207 // on-stack receiver as the result.
208 __ bind(&use_receiver);
209 __ mov(eax, Operand(esp, 0));
210
211 // Restore the arguments count and leave the construct frame. The
212 // arguments count is stored below the receiver.
213 __ bind(&exit);
214 __ mov(ebx, Operand(esp, 1 * kPointerSize));
215 } else {
216 __ mov(ebx, Operand(esp, 0));
217 }
218
219 // Leave construct frame.
220 }
221
222 // ES6 9.2.2. Step 13+
223 // Check that the result is not a Smi, indicating that the constructor result
224 // from a derived class is neither undefined nor an Object.
225 if (check_derived_construct) {
226 Label dont_throw;
227 __ JumpIfNotSmi(eax, &dont_throw);
228 {
229 FrameScope scope(masm, StackFrame::INTERNAL);
230 __ CallRuntime(Runtime::kThrowDerivedConstructorReturnedNonObject);
231 }
232 __ bind(&dont_throw);
233 }
234
235 // Remove caller arguments from the stack and return.
236 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
237 __ pop(ecx);
238 __ lea(esp, Operand(esp, ebx, times_2, 1 * kPointerSize)); // 1 ~ receiver
239 __ push(ecx);
240 if (create_implicit_receiver) {
241 __ IncrementCounter(masm->isolate()->counters()->constructed_objects(), 1);
242 }
243 __ ret(0);
244 }
245
246 } // namespace
247
Generate_JSConstructStubGeneric(MacroAssembler * masm)248 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
249 Generate_JSConstructStubHelper(masm, false, true, false);
250 }
251
Generate_JSConstructStubApi(MacroAssembler * masm)252 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
253 Generate_JSConstructStubHelper(masm, true, false, false);
254 }
255
Generate_JSBuiltinsConstructStub(MacroAssembler * masm)256 void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
257 Generate_JSConstructStubHelper(masm, false, false, false);
258 }
259
Generate_JSBuiltinsConstructStubForDerived(MacroAssembler * masm)260 void Builtins::Generate_JSBuiltinsConstructStubForDerived(
261 MacroAssembler* masm) {
262 Generate_JSConstructStubHelper(masm, false, false, true);
263 }
264
Generate_ConstructedNonConstructable(MacroAssembler * masm)265 void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
266 FrameScope scope(masm, StackFrame::INTERNAL);
267 __ push(edi);
268 __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
269 }
270
271 enum IsTagged { kEaxIsSmiTagged, kEaxIsUntaggedInt };
272
273 // Clobbers ecx, edx, edi; preserves all other registers.
Generate_CheckStackOverflow(MacroAssembler * masm,IsTagged eax_is_tagged)274 static void Generate_CheckStackOverflow(MacroAssembler* masm,
275 IsTagged eax_is_tagged) {
276 // eax : the number of items to be pushed to the stack
277 //
278 // Check the stack for overflow. We are not trying to catch
279 // interruptions (e.g. debug break and preemption) here, so the "real stack
280 // limit" is checked.
281 Label okay;
282 ExternalReference real_stack_limit =
283 ExternalReference::address_of_real_stack_limit(masm->isolate());
284 __ mov(edi, Operand::StaticVariable(real_stack_limit));
285 // Make ecx the space we have left. The stack might already be overflowed
286 // here which will cause ecx to become negative.
287 __ mov(ecx, esp);
288 __ sub(ecx, edi);
289 // Make edx the space we need for the array when it is unrolled onto the
290 // stack.
291 __ mov(edx, eax);
292 int smi_tag = eax_is_tagged == kEaxIsSmiTagged ? kSmiTagSize : 0;
293 __ shl(edx, kPointerSizeLog2 - smi_tag);
294 // Check if the arguments will overflow the stack.
295 __ cmp(ecx, edx);
296 __ j(greater, &okay); // Signed comparison.
297
298 // Out of stack space.
299 __ CallRuntime(Runtime::kThrowStackOverflow);
300
301 __ bind(&okay);
302 }
303
Generate_JSEntryTrampolineHelper(MacroAssembler * masm,bool is_construct)304 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
305 bool is_construct) {
306 ProfileEntryHookStub::MaybeCallEntryHook(masm);
307
308 {
309 FrameScope scope(masm, StackFrame::INTERNAL);
310
311 // Setup the context (we need to use the caller context from the isolate).
312 ExternalReference context_address(Isolate::kContextAddress,
313 masm->isolate());
314 __ mov(esi, Operand::StaticVariable(context_address));
315
316 // Load the previous frame pointer (ebx) to access C arguments
317 __ mov(ebx, Operand(ebp, 0));
318
319 // Push the function and the receiver onto the stack.
320 __ push(Operand(ebx, EntryFrameConstants::kFunctionArgOffset));
321 __ push(Operand(ebx, EntryFrameConstants::kReceiverArgOffset));
322
323 // Load the number of arguments and setup pointer to the arguments.
324 __ mov(eax, Operand(ebx, EntryFrameConstants::kArgcOffset));
325 __ mov(ebx, Operand(ebx, EntryFrameConstants::kArgvOffset));
326
327 // Check if we have enough stack space to push all arguments.
328 // Expects argument count in eax. Clobbers ecx, edx, edi.
329 Generate_CheckStackOverflow(masm, kEaxIsUntaggedInt);
330
331 // Copy arguments to the stack in a loop.
332 Label loop, entry;
333 __ Move(ecx, Immediate(0));
334 __ jmp(&entry, Label::kNear);
335 __ bind(&loop);
336 __ mov(edx, Operand(ebx, ecx, times_4, 0)); // push parameter from argv
337 __ push(Operand(edx, 0)); // dereference handle
338 __ inc(ecx);
339 __ bind(&entry);
340 __ cmp(ecx, eax);
341 __ j(not_equal, &loop);
342
343 // Load the previous frame pointer (ebx) to access C arguments
344 __ mov(ebx, Operand(ebp, 0));
345
346 // Get the new.target and function from the frame.
347 __ mov(edx, Operand(ebx, EntryFrameConstants::kNewTargetArgOffset));
348 __ mov(edi, Operand(ebx, EntryFrameConstants::kFunctionArgOffset));
349
350 // Invoke the code.
351 Handle<Code> builtin = is_construct
352 ? masm->isolate()->builtins()->Construct()
353 : masm->isolate()->builtins()->Call();
354 __ Call(builtin, RelocInfo::CODE_TARGET);
355
356 // Exit the internal frame. Notice that this also removes the empty.
357 // context and the function left on the stack by the code
358 // invocation.
359 }
360 __ ret(kPointerSize); // Remove receiver.
361 }
362
Generate_JSEntryTrampoline(MacroAssembler * masm)363 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
364 Generate_JSEntryTrampolineHelper(masm, false);
365 }
366
Generate_JSConstructEntryTrampoline(MacroAssembler * masm)367 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
368 Generate_JSEntryTrampolineHelper(masm, true);
369 }
370
371 // static
Generate_ResumeGeneratorTrampoline(MacroAssembler * masm)372 void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
373 // ----------- S t a t e -------------
374 // -- eax : the value to pass to the generator
375 // -- ebx : the JSGeneratorObject to resume
376 // -- edx : the resume mode (tagged)
377 // -- esp[0] : return address
378 // -----------------------------------
379 __ AssertGeneratorObject(ebx);
380
381 // Store input value into generator object.
382 __ mov(FieldOperand(ebx, JSGeneratorObject::kInputOrDebugPosOffset), eax);
383 __ RecordWriteField(ebx, JSGeneratorObject::kInputOrDebugPosOffset, eax, ecx,
384 kDontSaveFPRegs);
385
386 // Store resume mode into generator object.
387 __ mov(FieldOperand(ebx, JSGeneratorObject::kResumeModeOffset), edx);
388
389 // Load suspended function and context.
390 __ mov(esi, FieldOperand(ebx, JSGeneratorObject::kContextOffset));
391 __ mov(edi, FieldOperand(ebx, JSGeneratorObject::kFunctionOffset));
392
393 // Flood function if we are stepping.
394 Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
395 Label stepping_prepared;
396 ExternalReference last_step_action =
397 ExternalReference::debug_last_step_action_address(masm->isolate());
398 STATIC_ASSERT(StepFrame > StepIn);
399 __ cmpb(Operand::StaticVariable(last_step_action), Immediate(StepIn));
400 __ j(greater_equal, &prepare_step_in_if_stepping);
401
402 // Flood function if we need to continue stepping in the suspended generator.
403 ExternalReference debug_suspended_generator =
404 ExternalReference::debug_suspended_generator_address(masm->isolate());
405 __ cmp(ebx, Operand::StaticVariable(debug_suspended_generator));
406 __ j(equal, &prepare_step_in_suspended_generator);
407 __ bind(&stepping_prepared);
408
409 // Pop return address.
410 __ PopReturnAddressTo(eax);
411
412 // Push receiver.
413 __ Push(FieldOperand(ebx, JSGeneratorObject::kReceiverOffset));
414
415 // ----------- S t a t e -------------
416 // -- eax : return address
417 // -- ebx : the JSGeneratorObject to resume
418 // -- edx : the resume mode (tagged)
419 // -- edi : generator function
420 // -- esi : generator context
421 // -- esp[0] : generator receiver
422 // -----------------------------------
423
424 // Push holes for arguments to generator function. Since the parser forced
425 // context allocation for any variables in generators, the actual argument
426 // values have already been copied into the context and these dummy values
427 // will never be used.
428 __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
429 __ mov(ecx,
430 FieldOperand(ecx, SharedFunctionInfo::kFormalParameterCountOffset));
431 {
432 Label done_loop, loop;
433 __ bind(&loop);
434 __ sub(ecx, Immediate(Smi::FromInt(1)));
435 __ j(carry, &done_loop, Label::kNear);
436 __ PushRoot(Heap::kTheHoleValueRootIndex);
437 __ jmp(&loop);
438 __ bind(&done_loop);
439 }
440
441 // Dispatch on the kind of generator object.
442 Label old_generator;
443 __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
444 __ mov(ecx, FieldOperand(ecx, SharedFunctionInfo::kFunctionDataOffset));
445 __ CmpObjectType(ecx, BYTECODE_ARRAY_TYPE, ecx);
446 __ j(not_equal, &old_generator);
447
448 // New-style (ignition/turbofan) generator object
449 {
450 __ PushReturnAddressFrom(eax);
451 __ mov(eax, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
452 __ mov(eax,
453 FieldOperand(ecx, SharedFunctionInfo::kFormalParameterCountOffset));
454 // We abuse new.target both to indicate that this is a resume call and to
455 // pass in the generator object. In ordinary calls, new.target is always
456 // undefined because generator functions are non-constructable.
457 __ mov(edx, ebx);
458 __ jmp(FieldOperand(edi, JSFunction::kCodeEntryOffset));
459 }
460
461 // Old-style (full-codegen) generator object
462 __ bind(&old_generator);
463 {
464 // Enter a new JavaScript frame, and initialize its slots as they were when
465 // the generator was suspended.
466 FrameScope scope(masm, StackFrame::MANUAL);
467 __ PushReturnAddressFrom(eax); // Return address.
468 __ Push(ebp); // Caller's frame pointer.
469 __ Move(ebp, esp);
470 __ Push(esi); // Callee's context.
471 __ Push(edi); // Callee's JS Function.
472
473 // Restore the operand stack.
474 __ mov(eax, FieldOperand(ebx, JSGeneratorObject::kOperandStackOffset));
475 {
476 Label done_loop, loop;
477 __ Move(ecx, Smi::kZero);
478 __ bind(&loop);
479 __ cmp(ecx, FieldOperand(eax, FixedArray::kLengthOffset));
480 __ j(equal, &done_loop, Label::kNear);
481 __ Push(FieldOperand(eax, ecx, times_half_pointer_size,
482 FixedArray::kHeaderSize));
483 __ add(ecx, Immediate(Smi::FromInt(1)));
484 __ jmp(&loop);
485 __ bind(&done_loop);
486 }
487
488 // Reset operand stack so we don't leak.
489 __ mov(FieldOperand(ebx, JSGeneratorObject::kOperandStackOffset),
490 Immediate(masm->isolate()->factory()->empty_fixed_array()));
491
492 // Resume the generator function at the continuation.
493 __ mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
494 __ mov(edx, FieldOperand(edx, SharedFunctionInfo::kCodeOffset));
495 __ mov(ecx, FieldOperand(ebx, JSGeneratorObject::kContinuationOffset));
496 __ SmiUntag(ecx);
497 __ lea(edx, FieldOperand(edx, ecx, times_1, Code::kHeaderSize));
498 __ mov(FieldOperand(ebx, JSGeneratorObject::kContinuationOffset),
499 Immediate(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
500 __ mov(eax, ebx); // Continuation expects generator object in eax.
501 __ jmp(edx);
502 }
503
504 __ bind(&prepare_step_in_if_stepping);
505 {
506 FrameScope scope(masm, StackFrame::INTERNAL);
507 __ Push(ebx);
508 __ Push(edx);
509 __ Push(edi);
510 __ CallRuntime(Runtime::kDebugPrepareStepInIfStepping);
511 __ Pop(edx);
512 __ Pop(ebx);
513 __ mov(edi, FieldOperand(ebx, JSGeneratorObject::kFunctionOffset));
514 }
515 __ jmp(&stepping_prepared);
516
517 __ bind(&prepare_step_in_suspended_generator);
518 {
519 FrameScope scope(masm, StackFrame::INTERNAL);
520 __ Push(ebx);
521 __ Push(edx);
522 __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
523 __ Pop(edx);
524 __ Pop(ebx);
525 __ mov(edi, FieldOperand(ebx, JSGeneratorObject::kFunctionOffset));
526 }
527 __ jmp(&stepping_prepared);
528 }
529
LeaveInterpreterFrame(MacroAssembler * masm,Register scratch1,Register scratch2)530 static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch1,
531 Register scratch2) {
532 Register args_count = scratch1;
533 Register return_pc = scratch2;
534
535 // Get the arguments + reciever count.
536 __ mov(args_count,
537 Operand(ebp, InterpreterFrameConstants::kBytecodeArrayFromFp));
538 __ mov(args_count,
539 FieldOperand(args_count, BytecodeArray::kParameterSizeOffset));
540
541 // Leave the frame (also dropping the register file).
542 __ leave();
543
544 // Drop receiver + arguments.
545 __ pop(return_pc);
546 __ add(esp, args_count);
547 __ push(return_pc);
548 }
549
550 // Generate code for entering a JS function with the interpreter.
551 // On entry to the function the receiver and arguments have been pushed on the
552 // stack left to right. The actual argument count matches the formal parameter
553 // count expected by the function.
554 //
555 // The live registers are:
556 // o edi: the JS function object being called
557 // o edx: the new target
558 // o esi: our context
559 // o ebp: the caller's frame pointer
560 // o esp: stack pointer (pointing to return address)
561 //
562 // The function builds an interpreter frame. See InterpreterFrameConstants in
563 // frames.h for its layout.
Generate_InterpreterEntryTrampoline(MacroAssembler * masm)564 void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
565 ProfileEntryHookStub::MaybeCallEntryHook(masm);
566
567 // Open a frame scope to indicate that there is a frame on the stack. The
568 // MANUAL indicates that the scope shouldn't actually generate code to set up
569 // the frame (that is done below).
570 FrameScope frame_scope(masm, StackFrame::MANUAL);
571 __ push(ebp); // Caller's frame pointer.
572 __ mov(ebp, esp);
573 __ push(esi); // Callee's context.
574 __ push(edi); // Callee's JS function.
575 __ push(edx); // Callee's new target.
576
577 // Get the bytecode array from the function object (or from the DebugInfo if
578 // it is present) and load it into kInterpreterBytecodeArrayRegister.
579 __ mov(eax, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
580 Label load_debug_bytecode_array, bytecode_array_loaded;
581 __ cmp(FieldOperand(eax, SharedFunctionInfo::kDebugInfoOffset),
582 Immediate(DebugInfo::uninitialized()));
583 __ j(not_equal, &load_debug_bytecode_array);
584 __ mov(kInterpreterBytecodeArrayRegister,
585 FieldOperand(eax, SharedFunctionInfo::kFunctionDataOffset));
586 __ bind(&bytecode_array_loaded);
587
588 // Check whether we should continue to use the interpreter.
589 Label switch_to_different_code_kind;
590 __ Move(ecx, masm->CodeObject()); // Self-reference to this code.
591 __ cmp(ecx, FieldOperand(eax, SharedFunctionInfo::kCodeOffset));
592 __ j(not_equal, &switch_to_different_code_kind);
593
594 // Increment invocation count for the function.
595 __ EmitLoadTypeFeedbackVector(ecx);
596 __ add(FieldOperand(ecx,
597 TypeFeedbackVector::kInvocationCountIndex * kPointerSize +
598 TypeFeedbackVector::kHeaderSize),
599 Immediate(Smi::FromInt(1)));
600
601 // Check function data field is actually a BytecodeArray object.
602 if (FLAG_debug_code) {
603 __ AssertNotSmi(kInterpreterBytecodeArrayRegister);
604 __ CmpObjectType(kInterpreterBytecodeArrayRegister, BYTECODE_ARRAY_TYPE,
605 eax);
606 __ Assert(equal, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
607 }
608
609 // Push bytecode array.
610 __ push(kInterpreterBytecodeArrayRegister);
611 // Push Smi tagged initial bytecode array offset.
612 __ push(Immediate(Smi::FromInt(BytecodeArray::kHeaderSize - kHeapObjectTag)));
613
614 // Allocate the local and temporary register file on the stack.
615 {
616 // Load frame size from the BytecodeArray object.
617 __ mov(ebx, FieldOperand(kInterpreterBytecodeArrayRegister,
618 BytecodeArray::kFrameSizeOffset));
619
620 // Do a stack check to ensure we don't go over the limit.
621 Label ok;
622 __ mov(ecx, esp);
623 __ sub(ecx, ebx);
624 ExternalReference stack_limit =
625 ExternalReference::address_of_real_stack_limit(masm->isolate());
626 __ cmp(ecx, Operand::StaticVariable(stack_limit));
627 __ j(above_equal, &ok);
628 __ CallRuntime(Runtime::kThrowStackOverflow);
629 __ bind(&ok);
630
631 // If ok, push undefined as the initial value for all register file entries.
632 Label loop_header;
633 Label loop_check;
634 __ mov(eax, Immediate(masm->isolate()->factory()->undefined_value()));
635 __ jmp(&loop_check);
636 __ bind(&loop_header);
637 // TODO(rmcilroy): Consider doing more than one push per loop iteration.
638 __ push(eax);
639 // Continue loop if not done.
640 __ bind(&loop_check);
641 __ sub(ebx, Immediate(kPointerSize));
642 __ j(greater_equal, &loop_header);
643 }
644
645 // Load accumulator, bytecode offset and dispatch table into registers.
646 __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
647 __ mov(kInterpreterBytecodeOffsetRegister,
648 Immediate(BytecodeArray::kHeaderSize - kHeapObjectTag));
649 __ mov(kInterpreterDispatchTableRegister,
650 Immediate(ExternalReference::interpreter_dispatch_table_address(
651 masm->isolate())));
652
653 // Dispatch to the first bytecode handler for the function.
654 __ movzx_b(ebx, Operand(kInterpreterBytecodeArrayRegister,
655 kInterpreterBytecodeOffsetRegister, times_1, 0));
656 __ mov(ebx, Operand(kInterpreterDispatchTableRegister, ebx,
657 times_pointer_size, 0));
658 __ call(ebx);
659 masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset());
660
661 // The return value is in eax.
662 LeaveInterpreterFrame(masm, ebx, ecx);
663 __ ret(0);
664
665 // Load debug copy of the bytecode array.
666 __ bind(&load_debug_bytecode_array);
667 Register debug_info = kInterpreterBytecodeArrayRegister;
668 __ mov(debug_info, FieldOperand(eax, SharedFunctionInfo::kDebugInfoOffset));
669 __ mov(kInterpreterBytecodeArrayRegister,
670 FieldOperand(debug_info, DebugInfo::kDebugBytecodeArrayIndex));
671 __ jmp(&bytecode_array_loaded);
672
673 // If the shared code is no longer this entry trampoline, then the underlying
674 // function has been switched to a different kind of code and we heal the
675 // closure by switching the code entry field over to the new code as well.
676 __ bind(&switch_to_different_code_kind);
677 __ pop(edx); // Callee's new target.
678 __ pop(edi); // Callee's JS function.
679 __ pop(esi); // Callee's context.
680 __ leave(); // Leave the frame so we can tail call.
681 __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
682 __ mov(ecx, FieldOperand(ecx, SharedFunctionInfo::kCodeOffset));
683 __ lea(ecx, FieldOperand(ecx, Code::kHeaderSize));
684 __ mov(FieldOperand(edi, JSFunction::kCodeEntryOffset), ecx);
685 __ RecordWriteCodeEntryField(edi, ecx, ebx);
686 __ jmp(ecx);
687 }
688
Generate_StackOverflowCheck(MacroAssembler * masm,Register num_args,Register scratch1,Register scratch2,Label * stack_overflow,bool include_receiver=false)689 static void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args,
690 Register scratch1, Register scratch2,
691 Label* stack_overflow,
692 bool include_receiver = false) {
693 // Check the stack for overflow. We are not trying to catch
694 // interruptions (e.g. debug break and preemption) here, so the "real stack
695 // limit" is checked.
696 ExternalReference real_stack_limit =
697 ExternalReference::address_of_real_stack_limit(masm->isolate());
698 __ mov(scratch1, Operand::StaticVariable(real_stack_limit));
699 // Make scratch2 the space we have left. The stack might already be overflowed
700 // here which will cause scratch2 to become negative.
701 __ mov(scratch2, esp);
702 __ sub(scratch2, scratch1);
703 // Make scratch1 the space we need for the array when it is unrolled onto the
704 // stack.
705 __ mov(scratch1, num_args);
706 if (include_receiver) {
707 __ add(scratch1, Immediate(1));
708 }
709 __ shl(scratch1, kPointerSizeLog2);
710 // Check if the arguments will overflow the stack.
711 __ cmp(scratch2, scratch1);
712 __ j(less_equal, stack_overflow); // Signed comparison.
713 }
714
Generate_InterpreterPushArgs(MacroAssembler * masm,Register array_limit,Register start_address)715 static void Generate_InterpreterPushArgs(MacroAssembler* masm,
716 Register array_limit,
717 Register start_address) {
718 // ----------- S t a t e -------------
719 // -- start_address : Pointer to the last argument in the args array.
720 // -- array_limit : Pointer to one before the first argument in the
721 // args array.
722 // -----------------------------------
723 Label loop_header, loop_check;
724 __ jmp(&loop_check);
725 __ bind(&loop_header);
726 __ Push(Operand(start_address, 0));
727 __ sub(start_address, Immediate(kPointerSize));
728 __ bind(&loop_check);
729 __ cmp(start_address, array_limit);
730 __ j(greater, &loop_header, Label::kNear);
731 }
732
733 // static
Generate_InterpreterPushArgsAndCallImpl(MacroAssembler * masm,TailCallMode tail_call_mode,CallableType function_type)734 void Builtins::Generate_InterpreterPushArgsAndCallImpl(
735 MacroAssembler* masm, TailCallMode tail_call_mode,
736 CallableType function_type) {
737 // ----------- S t a t e -------------
738 // -- eax : the number of arguments (not including the receiver)
739 // -- ebx : the address of the first argument to be pushed. Subsequent
740 // arguments should be consecutive above this, in the same order as
741 // they are to be pushed onto the stack.
742 // -- edi : the target to call (can be any Object).
743 // -----------------------------------
744 Label stack_overflow;
745 // Compute the expected number of arguments.
746 __ mov(ecx, eax);
747 __ add(ecx, Immediate(1)); // Add one for receiver.
748
749 // Add a stack check before pushing the arguments. We need an extra register
750 // to perform a stack check. So push it onto the stack temporarily. This
751 // might cause stack overflow, but it will be detected by the check.
752 __ Push(edi);
753 Generate_StackOverflowCheck(masm, ecx, edx, edi, &stack_overflow);
754 __ Pop(edi);
755
756 // Pop return address to allow tail-call after pushing arguments.
757 __ Pop(edx);
758
759 // Find the address of the last argument.
760 __ shl(ecx, kPointerSizeLog2);
761 __ neg(ecx);
762 __ add(ecx, ebx);
763 Generate_InterpreterPushArgs(masm, ecx, ebx);
764
765 // Call the target.
766 __ Push(edx); // Re-push return address.
767
768 if (function_type == CallableType::kJSFunction) {
769 __ Jump(masm->isolate()->builtins()->CallFunction(ConvertReceiverMode::kAny,
770 tail_call_mode),
771 RelocInfo::CODE_TARGET);
772 } else {
773 DCHECK_EQ(function_type, CallableType::kAny);
774 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny,
775 tail_call_mode),
776 RelocInfo::CODE_TARGET);
777 }
778
779 __ bind(&stack_overflow);
780 {
781 // Pop the temporary registers, so that return address is on top of stack.
782 __ Pop(edi);
783
784 __ TailCallRuntime(Runtime::kThrowStackOverflow);
785
786 // This should be unreachable.
787 __ int3();
788 }
789 }
790
791 namespace {
792
793 // This function modified start_addr, and only reads the contents of num_args
794 // register. scratch1 and scratch2 are used as temporary registers. Their
795 // original values are restored after the use.
Generate_InterpreterPushArgsAndReturnAddress(MacroAssembler * masm,Register num_args,Register start_addr,Register scratch1,Register scratch2,bool receiver_in_args,int num_slots_above_ret_addr,Label * stack_overflow)796 void Generate_InterpreterPushArgsAndReturnAddress(
797 MacroAssembler* masm, Register num_args, Register start_addr,
798 Register scratch1, Register scratch2, bool receiver_in_args,
799 int num_slots_above_ret_addr, Label* stack_overflow) {
800 // We have to move return address and the temporary registers above it
801 // before we can copy arguments onto the stack. To achieve this:
802 // Step 1: Increment the stack pointer by num_args + 1 (for receiver).
803 // Step 2: Move the return address and values above it to the top of stack.
804 // Step 3: Copy the arguments into the correct locations.
805 // current stack =====> required stack layout
806 // | | | scratch1 | (2) <-- esp(1)
807 // | | | .... | (2)
808 // | | | scratch-n | (2)
809 // | | | return addr | (2)
810 // | | | arg N | (3)
811 // | scratch1 | <-- esp | .... |
812 // | .... | | arg 0 |
813 // | scratch-n | | arg 0 |
814 // | return addr | | receiver slot |
815
816 // Check for stack overflow before we increment the stack pointer.
817 Generate_StackOverflowCheck(masm, num_args, scratch1, scratch2,
818 stack_overflow, true);
819
820 // Step 1 - Update the stack pointer. scratch1 already contains the required
821 // increment to the stack. i.e. num_args + 1 stack slots. This is computed in
822 // the Generate_StackOverflowCheck.
823
824 #ifdef _MSC_VER
825 // TODO(mythria): Move it to macro assembler.
826 // In windows, we cannot increment the stack size by more than one page
827 // (mimimum page size is 4KB) without accessing at least one byte on the
828 // page. Check this:
829 // https://msdn.microsoft.com/en-us/library/aa227153(v=vs.60).aspx.
830 const int page_size = 4 * 1024;
831 Label check_offset, update_stack_pointer;
832 __ bind(&check_offset);
833 __ cmp(scratch1, page_size);
834 __ j(less, &update_stack_pointer);
835 __ sub(esp, Immediate(page_size));
836 // Just to touch the page, before we increment further.
837 __ mov(Operand(esp, 0), Immediate(0));
838 __ sub(scratch1, Immediate(page_size));
839 __ jmp(&check_offset);
840 __ bind(&update_stack_pointer);
841 #endif
842
843 __ sub(esp, scratch1);
844
845 // Step 2 move return_address and slots above it to the correct locations.
846 // Move from top to bottom, otherwise we may overwrite when num_args = 0 or 1,
847 // basically when the source and destination overlap. We at least need one
848 // extra slot for receiver, so no extra checks are required to avoid copy.
849 for (int i = 0; i < num_slots_above_ret_addr + 1; i++) {
850 __ mov(scratch1,
851 Operand(esp, num_args, times_pointer_size, (i + 1) * kPointerSize));
852 __ mov(Operand(esp, i * kPointerSize), scratch1);
853 }
854
855 // Step 3 copy arguments to correct locations.
856 if (receiver_in_args) {
857 __ mov(scratch1, num_args);
858 __ add(scratch1, Immediate(1));
859 } else {
860 // Slot meant for receiver contains return address. Reset it so that
861 // we will not incorrectly interpret return address as an object.
862 __ mov(Operand(esp, num_args, times_pointer_size,
863 (num_slots_above_ret_addr + 1) * kPointerSize),
864 Immediate(0));
865 __ mov(scratch1, num_args);
866 }
867
868 Label loop_header, loop_check;
869 __ jmp(&loop_check);
870 __ bind(&loop_header);
871 __ mov(scratch2, Operand(start_addr, 0));
872 __ mov(Operand(esp, scratch1, times_pointer_size,
873 num_slots_above_ret_addr * kPointerSize),
874 scratch2);
875 __ sub(start_addr, Immediate(kPointerSize));
876 __ sub(scratch1, Immediate(1));
877 __ bind(&loop_check);
878 __ cmp(scratch1, Immediate(0));
879 __ j(greater, &loop_header, Label::kNear);
880 }
881
882 } // end anonymous namespace
883
884 // static
Generate_InterpreterPushArgsAndConstructImpl(MacroAssembler * masm,CallableType construct_type)885 void Builtins::Generate_InterpreterPushArgsAndConstructImpl(
886 MacroAssembler* masm, CallableType construct_type) {
887 // ----------- S t a t e -------------
888 // -- eax : the number of arguments (not including the receiver)
889 // -- edx : the new target
890 // -- edi : the constructor
891 // -- ebx : allocation site feedback (if available or undefined)
892 // -- ecx : the address of the first argument to be pushed. Subsequent
893 // arguments should be consecutive above this, in the same order as
894 // they are to be pushed onto the stack.
895 // -----------------------------------
896 Label stack_overflow;
897 // We need two scratch registers. Push edi and edx onto stack.
898 __ Push(edi);
899 __ Push(edx);
900
901 // Push arguments and move return address to the top of stack.
902 // The eax register is readonly. The ecx register will be modified. The edx
903 // and edi registers will be modified but restored to their original values.
904 Generate_InterpreterPushArgsAndReturnAddress(masm, eax, ecx, edx, edi, false,
905 2, &stack_overflow);
906
907 // Restore edi and edx
908 __ Pop(edx);
909 __ Pop(edi);
910
911 __ AssertUndefinedOrAllocationSite(ebx);
912 if (construct_type == CallableType::kJSFunction) {
913 // Tail call to the function-specific construct stub (still in the caller
914 // context at this point).
915 __ AssertFunction(edi);
916
917 __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
918 __ mov(ecx, FieldOperand(ecx, SharedFunctionInfo::kConstructStubOffset));
919 __ lea(ecx, FieldOperand(ecx, Code::kHeaderSize));
920 __ jmp(ecx);
921 } else {
922 DCHECK_EQ(construct_type, CallableType::kAny);
923
924 // Call the constructor with unmodified eax, edi, edx values.
925 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
926 }
927
928 __ bind(&stack_overflow);
929 {
930 // Pop the temporary registers, so that return address is on top of stack.
931 __ Pop(edx);
932 __ Pop(edi);
933
934 __ TailCallRuntime(Runtime::kThrowStackOverflow);
935
936 // This should be unreachable.
937 __ int3();
938 }
939 }
940
941 // static
Generate_InterpreterPushArgsAndConstructArray(MacroAssembler * masm)942 void Builtins::Generate_InterpreterPushArgsAndConstructArray(
943 MacroAssembler* masm) {
944 // ----------- S t a t e -------------
945 // -- eax : the number of arguments (not including the receiver)
946 // -- edx : the target to call checked to be Array function.
947 // -- ebx : the allocation site feedback
948 // -- ecx : the address of the first argument to be pushed. Subsequent
949 // arguments should be consecutive above this, in the same order as
950 // they are to be pushed onto the stack.
951 // -----------------------------------
952 Label stack_overflow;
953 // We need two scratch registers. Register edi is available, push edx onto
954 // stack.
955 __ Push(edx);
956
957 // Push arguments and move return address to the top of stack.
958 // The eax register is readonly. The ecx register will be modified. The edx
959 // and edi registers will be modified but restored to their original values.
960 Generate_InterpreterPushArgsAndReturnAddress(masm, eax, ecx, edx, edi, true,
961 1, &stack_overflow);
962
963 // Restore edx.
964 __ Pop(edx);
965
966 // Array constructor expects constructor in edi. It is same as edx here.
967 __ Move(edi, edx);
968
969 ArrayConstructorStub stub(masm->isolate());
970 __ TailCallStub(&stub);
971
972 __ bind(&stack_overflow);
973 {
974 // Pop the temporary registers, so that return address is on top of stack.
975 __ Pop(edx);
976
977 __ TailCallRuntime(Runtime::kThrowStackOverflow);
978
979 // This should be unreachable.
980 __ int3();
981 }
982 }
983
Generate_InterpreterEnterBytecode(MacroAssembler * masm)984 static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
985 // Set the return address to the correct point in the interpreter entry
986 // trampoline.
987 Smi* interpreter_entry_return_pc_offset(
988 masm->isolate()->heap()->interpreter_entry_return_pc_offset());
989 DCHECK_NE(interpreter_entry_return_pc_offset, Smi::kZero);
990 __ LoadHeapObject(ebx,
991 masm->isolate()->builtins()->InterpreterEntryTrampoline());
992 __ add(ebx, Immediate(interpreter_entry_return_pc_offset->value() +
993 Code::kHeaderSize - kHeapObjectTag));
994 __ push(ebx);
995
996 // Initialize the dispatch table register.
997 __ mov(kInterpreterDispatchTableRegister,
998 Immediate(ExternalReference::interpreter_dispatch_table_address(
999 masm->isolate())));
1000
1001 // Get the bytecode array pointer from the frame.
1002 __ mov(kInterpreterBytecodeArrayRegister,
1003 Operand(ebp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1004
1005 if (FLAG_debug_code) {
1006 // Check function data field is actually a BytecodeArray object.
1007 __ AssertNotSmi(kInterpreterBytecodeArrayRegister);
1008 __ CmpObjectType(kInterpreterBytecodeArrayRegister, BYTECODE_ARRAY_TYPE,
1009 ebx);
1010 __ Assert(equal, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1011 }
1012
1013 // Get the target bytecode offset from the frame.
1014 __ mov(kInterpreterBytecodeOffsetRegister,
1015 Operand(ebp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1016 __ SmiUntag(kInterpreterBytecodeOffsetRegister);
1017
1018 // Dispatch to the target bytecode.
1019 __ movzx_b(ebx, Operand(kInterpreterBytecodeArrayRegister,
1020 kInterpreterBytecodeOffsetRegister, times_1, 0));
1021 __ mov(ebx, Operand(kInterpreterDispatchTableRegister, ebx,
1022 times_pointer_size, 0));
1023 __ jmp(ebx);
1024 }
1025
Generate_InterpreterEnterBytecodeAdvance(MacroAssembler * masm)1026 void Builtins::Generate_InterpreterEnterBytecodeAdvance(MacroAssembler* masm) {
1027 // Advance the current bytecode offset stored within the given interpreter
1028 // stack frame. This simulates what all bytecode handlers do upon completion
1029 // of the underlying operation.
1030 __ mov(ebx, Operand(ebp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1031 __ mov(edx, Operand(ebp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1032 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
1033 {
1034 FrameScope scope(masm, StackFrame::INTERNAL);
1035 __ Push(kInterpreterAccumulatorRegister);
1036 __ Push(ebx); // First argument is the bytecode array.
1037 __ Push(edx); // Second argument is the bytecode offset.
1038 __ CallRuntime(Runtime::kInterpreterAdvanceBytecodeOffset);
1039 __ Move(edx, eax); // Result is the new bytecode offset.
1040 __ Pop(kInterpreterAccumulatorRegister);
1041 }
1042 __ mov(Operand(ebp, InterpreterFrameConstants::kBytecodeOffsetFromFp), edx);
1043
1044 Generate_InterpreterEnterBytecode(masm);
1045 }
1046
Generate_InterpreterEnterBytecodeDispatch(MacroAssembler * masm)1047 void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
1048 Generate_InterpreterEnterBytecode(masm);
1049 }
1050
Generate_CompileLazy(MacroAssembler * masm)1051 void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
1052 // ----------- S t a t e -------------
1053 // -- eax : argument count (preserved for callee)
1054 // -- edx : new target (preserved for callee)
1055 // -- edi : target function (preserved for callee)
1056 // -----------------------------------
1057 // First lookup code, maybe we don't need to compile!
1058 Label gotta_call_runtime, gotta_call_runtime_no_stack;
1059 Label try_shared;
1060 Label loop_top, loop_bottom;
1061
1062 Register closure = edi;
1063 Register new_target = edx;
1064 Register argument_count = eax;
1065
1066 __ push(argument_count);
1067 __ push(new_target);
1068 __ push(closure);
1069
1070 Register map = argument_count;
1071 Register index = ebx;
1072 __ mov(map, FieldOperand(closure, JSFunction::kSharedFunctionInfoOffset));
1073 __ mov(map, FieldOperand(map, SharedFunctionInfo::kOptimizedCodeMapOffset));
1074 __ mov(index, FieldOperand(map, FixedArray::kLengthOffset));
1075 __ cmp(index, Immediate(Smi::FromInt(2)));
1076 __ j(less, &gotta_call_runtime);
1077
1078 // Find literals.
1079 // edx : native context
1080 // ebx : length / index
1081 // eax : optimized code map
1082 // stack[0] : new target
1083 // stack[4] : closure
1084 Register native_context = edx;
1085 __ mov(native_context, NativeContextOperand());
1086
1087 __ bind(&loop_top);
1088 Register temp = edi;
1089
1090 // Does the native context match?
1091 __ mov(temp, FieldOperand(map, index, times_half_pointer_size,
1092 SharedFunctionInfo::kOffsetToPreviousContext));
1093 __ mov(temp, FieldOperand(temp, WeakCell::kValueOffset));
1094 __ cmp(temp, native_context);
1095 __ j(not_equal, &loop_bottom);
1096 // OSR id set to none?
1097 __ mov(temp, FieldOperand(map, index, times_half_pointer_size,
1098 SharedFunctionInfo::kOffsetToPreviousOsrAstId));
1099 const int bailout_id = BailoutId::None().ToInt();
1100 __ cmp(temp, Immediate(Smi::FromInt(bailout_id)));
1101 __ j(not_equal, &loop_bottom);
1102 // Literals available?
1103 __ mov(temp, FieldOperand(map, index, times_half_pointer_size,
1104 SharedFunctionInfo::kOffsetToPreviousLiterals));
1105 __ mov(temp, FieldOperand(temp, WeakCell::kValueOffset));
1106 __ JumpIfSmi(temp, &gotta_call_runtime);
1107
1108 // Save the literals in the closure.
1109 __ mov(ecx, Operand(esp, 0));
1110 __ mov(FieldOperand(ecx, JSFunction::kLiteralsOffset), temp);
1111 __ push(index);
1112 __ RecordWriteField(ecx, JSFunction::kLiteralsOffset, temp, index,
1113 kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
1114 __ pop(index);
1115
1116 // Code available?
1117 Register entry = ecx;
1118 __ mov(entry, FieldOperand(map, index, times_half_pointer_size,
1119 SharedFunctionInfo::kOffsetToPreviousCachedCode));
1120 __ mov(entry, FieldOperand(entry, WeakCell::kValueOffset));
1121 __ JumpIfSmi(entry, &try_shared);
1122
1123 // Found literals and code. Get them into the closure and return.
1124 __ pop(closure);
1125 // Store code entry in the closure.
1126 __ lea(entry, FieldOperand(entry, Code::kHeaderSize));
1127 __ mov(FieldOperand(closure, JSFunction::kCodeEntryOffset), entry);
1128 __ RecordWriteCodeEntryField(closure, entry, eax);
1129
1130 // Link the closure into the optimized function list.
1131 // ecx : code entry
1132 // edx : native context
1133 // edi : closure
1134 __ mov(ebx,
1135 ContextOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST));
1136 __ mov(FieldOperand(closure, JSFunction::kNextFunctionLinkOffset), ebx);
1137 __ RecordWriteField(closure, JSFunction::kNextFunctionLinkOffset, ebx, eax,
1138 kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
1139 const int function_list_offset =
1140 Context::SlotOffset(Context::OPTIMIZED_FUNCTIONS_LIST);
1141 __ mov(ContextOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST),
1142 closure);
1143 // Save closure before the write barrier.
1144 __ mov(ebx, closure);
1145 __ RecordWriteContextSlot(native_context, function_list_offset, closure, eax,
1146 kDontSaveFPRegs);
1147 __ mov(closure, ebx);
1148 __ pop(new_target);
1149 __ pop(argument_count);
1150 __ jmp(entry);
1151
1152 __ bind(&loop_bottom);
1153 __ sub(index, Immediate(Smi::FromInt(SharedFunctionInfo::kEntryLength)));
1154 __ cmp(index, Immediate(Smi::FromInt(1)));
1155 __ j(greater, &loop_top);
1156
1157 // We found neither literals nor code.
1158 __ jmp(&gotta_call_runtime);
1159
1160 __ bind(&try_shared);
1161 __ pop(closure);
1162 __ pop(new_target);
1163 __ pop(argument_count);
1164 __ mov(entry, FieldOperand(closure, JSFunction::kSharedFunctionInfoOffset));
1165 // Is the shared function marked for tier up?
1166 __ test_b(FieldOperand(entry, SharedFunctionInfo::kMarkedForTierUpByteOffset),
1167 Immediate(1 << SharedFunctionInfo::kMarkedForTierUpBitWithinByte));
1168 __ j(not_zero, &gotta_call_runtime_no_stack);
1169 // Is the full code valid?
1170 __ mov(entry, FieldOperand(entry, SharedFunctionInfo::kCodeOffset));
1171 __ mov(ebx, FieldOperand(entry, Code::kFlagsOffset));
1172 __ and_(ebx, Code::KindField::kMask);
1173 __ shr(ebx, Code::KindField::kShift);
1174 __ cmp(ebx, Immediate(Code::BUILTIN));
1175 __ j(equal, &gotta_call_runtime_no_stack);
1176 // Yes, install the full code.
1177 __ lea(entry, FieldOperand(entry, Code::kHeaderSize));
1178 __ mov(FieldOperand(closure, JSFunction::kCodeEntryOffset), entry);
1179 __ RecordWriteCodeEntryField(closure, entry, ebx);
1180 __ jmp(entry);
1181
1182 __ bind(&gotta_call_runtime);
1183 __ pop(closure);
1184 __ pop(new_target);
1185 __ pop(argument_count);
1186 __ bind(&gotta_call_runtime_no_stack);
1187
1188 GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
1189 }
1190
Generate_CompileBaseline(MacroAssembler * masm)1191 void Builtins::Generate_CompileBaseline(MacroAssembler* masm) {
1192 GenerateTailCallToReturnedCode(masm, Runtime::kCompileBaseline);
1193 }
1194
Generate_CompileOptimized(MacroAssembler * masm)1195 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
1196 GenerateTailCallToReturnedCode(masm,
1197 Runtime::kCompileOptimized_NotConcurrent);
1198 }
1199
Generate_CompileOptimizedConcurrent(MacroAssembler * masm)1200 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
1201 GenerateTailCallToReturnedCode(masm, Runtime::kCompileOptimized_Concurrent);
1202 }
1203
Generate_InstantiateAsmJs(MacroAssembler * masm)1204 void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) {
1205 // ----------- S t a t e -------------
1206 // -- eax : argument count (preserved for callee)
1207 // -- edx : new target (preserved for callee)
1208 // -- edi : target function (preserved for callee)
1209 // -----------------------------------
1210 Label failed;
1211 {
1212 FrameScope scope(masm, StackFrame::INTERNAL);
1213 // Preserve argument count for later compare.
1214 __ mov(ecx, eax);
1215 // Push the number of arguments to the callee.
1216 __ SmiTag(eax);
1217 __ push(eax);
1218 // Push a copy of the target function and the new target.
1219 __ push(edi);
1220 __ push(edx);
1221
1222 // The function.
1223 __ push(edi);
1224 // Copy arguments from caller (stdlib, foreign, heap).
1225 Label args_done;
1226 for (int j = 0; j < 4; ++j) {
1227 Label over;
1228 if (j < 3) {
1229 __ cmp(ecx, Immediate(j));
1230 __ j(not_equal, &over, Label::kNear);
1231 }
1232 for (int i = j - 1; i >= 0; --i) {
1233 __ Push(Operand(
1234 ebp, StandardFrameConstants::kCallerSPOffset + i * kPointerSize));
1235 }
1236 for (int i = 0; i < 3 - j; ++i) {
1237 __ PushRoot(Heap::kUndefinedValueRootIndex);
1238 }
1239 if (j < 3) {
1240 __ jmp(&args_done, Label::kNear);
1241 __ bind(&over);
1242 }
1243 }
1244 __ bind(&args_done);
1245
1246 // Call runtime, on success unwind frame, and parent frame.
1247 __ CallRuntime(Runtime::kInstantiateAsmJs, 4);
1248 // A smi 0 is returned on failure, an object on success.
1249 __ JumpIfSmi(eax, &failed, Label::kNear);
1250
1251 __ Drop(2);
1252 __ Pop(ecx);
1253 __ SmiUntag(ecx);
1254 scope.GenerateLeaveFrame();
1255
1256 __ PopReturnAddressTo(ebx);
1257 __ inc(ecx);
1258 __ lea(esp, Operand(esp, ecx, times_pointer_size, 0));
1259 __ PushReturnAddressFrom(ebx);
1260 __ ret(0);
1261
1262 __ bind(&failed);
1263 // Restore target function and new target.
1264 __ pop(edx);
1265 __ pop(edi);
1266 __ pop(eax);
1267 __ SmiUntag(eax);
1268 }
1269 // On failure, tail call back to regular js.
1270 GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
1271 }
1272
GenerateMakeCodeYoungAgainCommon(MacroAssembler * masm)1273 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
1274 // For now, we are relying on the fact that make_code_young doesn't do any
1275 // garbage collection which allows us to save/restore the registers without
1276 // worrying about which of them contain pointers. We also don't build an
1277 // internal frame to make the code faster, since we shouldn't have to do stack
1278 // crawls in MakeCodeYoung. This seems a bit fragile.
1279
1280 // Re-execute the code that was patched back to the young age when
1281 // the stub returns.
1282 __ sub(Operand(esp, 0), Immediate(5));
1283 __ pushad();
1284 __ mov(eax, Operand(esp, 8 * kPointerSize));
1285 {
1286 FrameScope scope(masm, StackFrame::MANUAL);
1287 __ PrepareCallCFunction(2, ebx);
1288 __ mov(Operand(esp, 1 * kPointerSize),
1289 Immediate(ExternalReference::isolate_address(masm->isolate())));
1290 __ mov(Operand(esp, 0), eax);
1291 __ CallCFunction(
1292 ExternalReference::get_make_code_young_function(masm->isolate()), 2);
1293 }
1294 __ popad();
1295 __ ret(0);
1296 }
1297
1298 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
1299 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
1300 MacroAssembler* masm) { \
1301 GenerateMakeCodeYoungAgainCommon(masm); \
1302 } \
1303 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
1304 MacroAssembler* masm) { \
1305 GenerateMakeCodeYoungAgainCommon(masm); \
1306 }
CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)1307 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
1308 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
1309
1310 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
1311 // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
1312 // that make_code_young doesn't do any garbage collection which allows us to
1313 // save/restore the registers without worrying about which of them contain
1314 // pointers.
1315 __ pushad();
1316 __ mov(eax, Operand(esp, 8 * kPointerSize));
1317 __ sub(eax, Immediate(Assembler::kCallInstructionLength));
1318 { // NOLINT
1319 FrameScope scope(masm, StackFrame::MANUAL);
1320 __ PrepareCallCFunction(2, ebx);
1321 __ mov(Operand(esp, 1 * kPointerSize),
1322 Immediate(ExternalReference::isolate_address(masm->isolate())));
1323 __ mov(Operand(esp, 0), eax);
1324 __ CallCFunction(
1325 ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
1326 2);
1327 }
1328 __ popad();
1329
1330 // Perform prologue operations usually performed by the young code stub.
1331 __ pop(eax); // Pop return address into scratch register.
1332 __ push(ebp); // Caller's frame pointer.
1333 __ mov(ebp, esp);
1334 __ push(esi); // Callee's context.
1335 __ push(edi); // Callee's JS Function.
1336 __ push(eax); // Push return address after frame prologue.
1337
1338 // Jump to point after the code-age stub.
1339 __ ret(0);
1340 }
1341
Generate_MarkCodeAsExecutedTwice(MacroAssembler * masm)1342 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
1343 GenerateMakeCodeYoungAgainCommon(masm);
1344 }
1345
Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler * masm)1346 void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) {
1347 Generate_MarkCodeAsExecutedOnce(masm);
1348 }
1349
Generate_NotifyStubFailureHelper(MacroAssembler * masm,SaveFPRegsMode save_doubles)1350 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
1351 SaveFPRegsMode save_doubles) {
1352 // Enter an internal frame.
1353 {
1354 FrameScope scope(masm, StackFrame::INTERNAL);
1355
1356 // Preserve registers across notification, this is important for compiled
1357 // stubs that tail call the runtime on deopts passing their parameters in
1358 // registers.
1359 __ pushad();
1360 __ CallRuntime(Runtime::kNotifyStubFailure, save_doubles);
1361 __ popad();
1362 // Tear down internal frame.
1363 }
1364
1365 __ pop(MemOperand(esp, 0)); // Ignore state offset
1366 __ ret(0); // Return to IC Miss stub, continuation still on stack.
1367 }
1368
Generate_NotifyStubFailure(MacroAssembler * masm)1369 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
1370 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
1371 }
1372
Generate_NotifyStubFailureSaveDoubles(MacroAssembler * masm)1373 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
1374 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
1375 }
1376
Generate_NotifyDeoptimizedHelper(MacroAssembler * masm,Deoptimizer::BailoutType type)1377 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
1378 Deoptimizer::BailoutType type) {
1379 {
1380 FrameScope scope(masm, StackFrame::INTERNAL);
1381
1382 // Pass deoptimization type to the runtime system.
1383 __ push(Immediate(Smi::FromInt(static_cast<int>(type))));
1384 __ CallRuntime(Runtime::kNotifyDeoptimized);
1385
1386 // Tear down internal frame.
1387 }
1388
1389 // Get the full codegen state from the stack and untag it.
1390 __ mov(ecx, Operand(esp, 1 * kPointerSize));
1391 __ SmiUntag(ecx);
1392
1393 // Switch on the state.
1394 Label not_no_registers, not_tos_eax;
1395 __ cmp(ecx, static_cast<int>(Deoptimizer::BailoutState::NO_REGISTERS));
1396 __ j(not_equal, ¬_no_registers, Label::kNear);
1397 __ ret(1 * kPointerSize); // Remove state.
1398
1399 __ bind(¬_no_registers);
1400 DCHECK_EQ(kInterpreterAccumulatorRegister.code(), eax.code());
1401 __ mov(eax, Operand(esp, 2 * kPointerSize));
1402 __ cmp(ecx, static_cast<int>(Deoptimizer::BailoutState::TOS_REGISTER));
1403 __ j(not_equal, ¬_tos_eax, Label::kNear);
1404 __ ret(2 * kPointerSize); // Remove state, eax.
1405
1406 __ bind(¬_tos_eax);
1407 __ Abort(kNoCasesLeft);
1408 }
1409
Generate_NotifyDeoptimized(MacroAssembler * masm)1410 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1411 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1412 }
1413
Generate_NotifySoftDeoptimized(MacroAssembler * masm)1414 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
1415 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1416 }
1417
Generate_NotifyLazyDeoptimized(MacroAssembler * masm)1418 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
1419 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1420 }
1421
1422 // static
Generate_FunctionPrototypeApply(MacroAssembler * masm)1423 void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
1424 // ----------- S t a t e -------------
1425 // -- eax : argc
1426 // -- esp[0] : return address
1427 // -- esp[4] : argArray
1428 // -- esp[8] : thisArg
1429 // -- esp[12] : receiver
1430 // -----------------------------------
1431
1432 // 1. Load receiver into edi, argArray into eax (if present), remove all
1433 // arguments from the stack (including the receiver), and push thisArg (if
1434 // present) instead.
1435 {
1436 Label no_arg_array, no_this_arg;
1437 __ LoadRoot(edx, Heap::kUndefinedValueRootIndex);
1438 __ mov(ebx, edx);
1439 __ mov(edi, Operand(esp, eax, times_pointer_size, kPointerSize));
1440 __ test(eax, eax);
1441 __ j(zero, &no_this_arg, Label::kNear);
1442 {
1443 __ mov(edx, Operand(esp, eax, times_pointer_size, 0));
1444 __ cmp(eax, Immediate(1));
1445 __ j(equal, &no_arg_array, Label::kNear);
1446 __ mov(ebx, Operand(esp, eax, times_pointer_size, -kPointerSize));
1447 __ bind(&no_arg_array);
1448 }
1449 __ bind(&no_this_arg);
1450 __ PopReturnAddressTo(ecx);
1451 __ lea(esp, Operand(esp, eax, times_pointer_size, kPointerSize));
1452 __ Push(edx);
1453 __ PushReturnAddressFrom(ecx);
1454 __ Move(eax, ebx);
1455 }
1456
1457 // ----------- S t a t e -------------
1458 // -- eax : argArray
1459 // -- edi : receiver
1460 // -- esp[0] : return address
1461 // -- esp[4] : thisArg
1462 // -----------------------------------
1463
1464 // 2. Make sure the receiver is actually callable.
1465 Label receiver_not_callable;
1466 __ JumpIfSmi(edi, &receiver_not_callable, Label::kNear);
1467 __ mov(ecx, FieldOperand(edi, HeapObject::kMapOffset));
1468 __ test_b(FieldOperand(ecx, Map::kBitFieldOffset),
1469 Immediate(1 << Map::kIsCallable));
1470 __ j(zero, &receiver_not_callable, Label::kNear);
1471
1472 // 3. Tail call with no arguments if argArray is null or undefined.
1473 Label no_arguments;
1474 __ JumpIfRoot(eax, Heap::kNullValueRootIndex, &no_arguments, Label::kNear);
1475 __ JumpIfRoot(eax, Heap::kUndefinedValueRootIndex, &no_arguments,
1476 Label::kNear);
1477
1478 // 4a. Apply the receiver to the given argArray (passing undefined for
1479 // new.target).
1480 __ LoadRoot(edx, Heap::kUndefinedValueRootIndex);
1481 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1482
1483 // 4b. The argArray is either null or undefined, so we tail call without any
1484 // arguments to the receiver.
1485 __ bind(&no_arguments);
1486 {
1487 __ Set(eax, 0);
1488 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1489 }
1490
1491 // 4c. The receiver is not callable, throw an appropriate TypeError.
1492 __ bind(&receiver_not_callable);
1493 {
1494 __ mov(Operand(esp, kPointerSize), edi);
1495 __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1496 }
1497 }
1498
1499 // static
Generate_FunctionPrototypeCall(MacroAssembler * masm)1500 void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
1501 // Stack Layout:
1502 // esp[0] : Return address
1503 // esp[8] : Argument n
1504 // esp[16] : Argument n-1
1505 // ...
1506 // esp[8 * n] : Argument 1
1507 // esp[8 * (n + 1)] : Receiver (callable to call)
1508 //
1509 // eax contains the number of arguments, n, not counting the receiver.
1510 //
1511 // 1. Make sure we have at least one argument.
1512 {
1513 Label done;
1514 __ test(eax, eax);
1515 __ j(not_zero, &done, Label::kNear);
1516 __ PopReturnAddressTo(ebx);
1517 __ PushRoot(Heap::kUndefinedValueRootIndex);
1518 __ PushReturnAddressFrom(ebx);
1519 __ inc(eax);
1520 __ bind(&done);
1521 }
1522
1523 // 2. Get the callable to call (passed as receiver) from the stack.
1524 __ mov(edi, Operand(esp, eax, times_pointer_size, kPointerSize));
1525
1526 // 3. Shift arguments and return address one slot down on the stack
1527 // (overwriting the original receiver). Adjust argument count to make
1528 // the original first argument the new receiver.
1529 {
1530 Label loop;
1531 __ mov(ecx, eax);
1532 __ bind(&loop);
1533 __ mov(ebx, Operand(esp, ecx, times_pointer_size, 0));
1534 __ mov(Operand(esp, ecx, times_pointer_size, kPointerSize), ebx);
1535 __ dec(ecx);
1536 __ j(not_sign, &loop); // While non-negative (to copy return address).
1537 __ pop(ebx); // Discard copy of return address.
1538 __ dec(eax); // One fewer argument (first argument is new receiver).
1539 }
1540
1541 // 4. Call the callable.
1542 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1543 }
1544
Generate_ReflectApply(MacroAssembler * masm)1545 void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
1546 // ----------- S t a t e -------------
1547 // -- eax : argc
1548 // -- esp[0] : return address
1549 // -- esp[4] : argumentsList
1550 // -- esp[8] : thisArgument
1551 // -- esp[12] : target
1552 // -- esp[16] : receiver
1553 // -----------------------------------
1554
1555 // 1. Load target into edi (if present), argumentsList into eax (if present),
1556 // remove all arguments from the stack (including the receiver), and push
1557 // thisArgument (if present) instead.
1558 {
1559 Label done;
1560 __ LoadRoot(edi, Heap::kUndefinedValueRootIndex);
1561 __ mov(edx, edi);
1562 __ mov(ebx, edi);
1563 __ cmp(eax, Immediate(1));
1564 __ j(below, &done, Label::kNear);
1565 __ mov(edi, Operand(esp, eax, times_pointer_size, -0 * kPointerSize));
1566 __ j(equal, &done, Label::kNear);
1567 __ mov(edx, Operand(esp, eax, times_pointer_size, -1 * kPointerSize));
1568 __ cmp(eax, Immediate(3));
1569 __ j(below, &done, Label::kNear);
1570 __ mov(ebx, Operand(esp, eax, times_pointer_size, -2 * kPointerSize));
1571 __ bind(&done);
1572 __ PopReturnAddressTo(ecx);
1573 __ lea(esp, Operand(esp, eax, times_pointer_size, kPointerSize));
1574 __ Push(edx);
1575 __ PushReturnAddressFrom(ecx);
1576 __ Move(eax, ebx);
1577 }
1578
1579 // ----------- S t a t e -------------
1580 // -- eax : argumentsList
1581 // -- edi : target
1582 // -- esp[0] : return address
1583 // -- esp[4] : thisArgument
1584 // -----------------------------------
1585
1586 // 2. Make sure the target is actually callable.
1587 Label target_not_callable;
1588 __ JumpIfSmi(edi, &target_not_callable, Label::kNear);
1589 __ mov(ecx, FieldOperand(edi, HeapObject::kMapOffset));
1590 __ test_b(FieldOperand(ecx, Map::kBitFieldOffset),
1591 Immediate(1 << Map::kIsCallable));
1592 __ j(zero, &target_not_callable, Label::kNear);
1593
1594 // 3a. Apply the target to the given argumentsList (passing undefined for
1595 // new.target).
1596 __ LoadRoot(edx, Heap::kUndefinedValueRootIndex);
1597 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1598
1599 // 3b. The target is not callable, throw an appropriate TypeError.
1600 __ bind(&target_not_callable);
1601 {
1602 __ mov(Operand(esp, kPointerSize), edi);
1603 __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1604 }
1605 }
1606
Generate_ReflectConstruct(MacroAssembler * masm)1607 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
1608 // ----------- S t a t e -------------
1609 // -- eax : argc
1610 // -- esp[0] : return address
1611 // -- esp[4] : new.target (optional)
1612 // -- esp[8] : argumentsList
1613 // -- esp[12] : target
1614 // -- esp[16] : receiver
1615 // -----------------------------------
1616
1617 // 1. Load target into edi (if present), argumentsList into eax (if present),
1618 // new.target into edx (if present, otherwise use target), remove all
1619 // arguments from the stack (including the receiver), and push thisArgument
1620 // (if present) instead.
1621 {
1622 Label done;
1623 __ LoadRoot(edi, Heap::kUndefinedValueRootIndex);
1624 __ mov(edx, edi);
1625 __ mov(ebx, edi);
1626 __ cmp(eax, Immediate(1));
1627 __ j(below, &done, Label::kNear);
1628 __ mov(edi, Operand(esp, eax, times_pointer_size, -0 * kPointerSize));
1629 __ mov(edx, edi);
1630 __ j(equal, &done, Label::kNear);
1631 __ mov(ebx, Operand(esp, eax, times_pointer_size, -1 * kPointerSize));
1632 __ cmp(eax, Immediate(3));
1633 __ j(below, &done, Label::kNear);
1634 __ mov(edx, Operand(esp, eax, times_pointer_size, -2 * kPointerSize));
1635 __ bind(&done);
1636 __ PopReturnAddressTo(ecx);
1637 __ lea(esp, Operand(esp, eax, times_pointer_size, kPointerSize));
1638 __ PushRoot(Heap::kUndefinedValueRootIndex);
1639 __ PushReturnAddressFrom(ecx);
1640 __ Move(eax, ebx);
1641 }
1642
1643 // ----------- S t a t e -------------
1644 // -- eax : argumentsList
1645 // -- edx : new.target
1646 // -- edi : target
1647 // -- esp[0] : return address
1648 // -- esp[4] : receiver (undefined)
1649 // -----------------------------------
1650
1651 // 2. Make sure the target is actually a constructor.
1652 Label target_not_constructor;
1653 __ JumpIfSmi(edi, &target_not_constructor, Label::kNear);
1654 __ mov(ecx, FieldOperand(edi, HeapObject::kMapOffset));
1655 __ test_b(FieldOperand(ecx, Map::kBitFieldOffset),
1656 Immediate(1 << Map::kIsConstructor));
1657 __ j(zero, &target_not_constructor, Label::kNear);
1658
1659 // 3. Make sure the target is actually a constructor.
1660 Label new_target_not_constructor;
1661 __ JumpIfSmi(edx, &new_target_not_constructor, Label::kNear);
1662 __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
1663 __ test_b(FieldOperand(ecx, Map::kBitFieldOffset),
1664 Immediate(1 << Map::kIsConstructor));
1665 __ j(zero, &new_target_not_constructor, Label::kNear);
1666
1667 // 4a. Construct the target with the given new.target and argumentsList.
1668 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1669
1670 // 4b. The target is not a constructor, throw an appropriate TypeError.
1671 __ bind(&target_not_constructor);
1672 {
1673 __ mov(Operand(esp, kPointerSize), edi);
1674 __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
1675 }
1676
1677 // 4c. The new.target is not a constructor, throw an appropriate TypeError.
1678 __ bind(&new_target_not_constructor);
1679 {
1680 __ mov(Operand(esp, kPointerSize), edx);
1681 __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
1682 }
1683 }
1684
Generate_InternalArrayCode(MacroAssembler * masm)1685 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
1686 // ----------- S t a t e -------------
1687 // -- eax : argc
1688 // -- esp[0] : return address
1689 // -- esp[4] : last argument
1690 // -----------------------------------
1691 Label generic_array_code;
1692
1693 // Get the InternalArray function.
1694 __ LoadGlobalFunction(Context::INTERNAL_ARRAY_FUNCTION_INDEX, edi);
1695
1696 if (FLAG_debug_code) {
1697 // Initial map for the builtin InternalArray function should be a map.
1698 __ mov(ebx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
1699 // Will both indicate a NULL and a Smi.
1700 __ test(ebx, Immediate(kSmiTagMask));
1701 __ Assert(not_zero, kUnexpectedInitialMapForInternalArrayFunction);
1702 __ CmpObjectType(ebx, MAP_TYPE, ecx);
1703 __ Assert(equal, kUnexpectedInitialMapForInternalArrayFunction);
1704 }
1705
1706 // Run the native code for the InternalArray function called as a normal
1707 // function.
1708 // tail call a stub
1709 InternalArrayConstructorStub stub(masm->isolate());
1710 __ TailCallStub(&stub);
1711 }
1712
Generate_ArrayCode(MacroAssembler * masm)1713 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
1714 // ----------- S t a t e -------------
1715 // -- eax : argc
1716 // -- esp[0] : return address
1717 // -- esp[4] : last argument
1718 // -----------------------------------
1719 Label generic_array_code;
1720
1721 // Get the Array function.
1722 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, edi);
1723 __ mov(edx, edi);
1724
1725 if (FLAG_debug_code) {
1726 // Initial map for the builtin Array function should be a map.
1727 __ mov(ebx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
1728 // Will both indicate a NULL and a Smi.
1729 __ test(ebx, Immediate(kSmiTagMask));
1730 __ Assert(not_zero, kUnexpectedInitialMapForArrayFunction);
1731 __ CmpObjectType(ebx, MAP_TYPE, ecx);
1732 __ Assert(equal, kUnexpectedInitialMapForArrayFunction);
1733 }
1734
1735 // Run the native code for the Array function called as a normal function.
1736 // tail call a stub
1737 __ mov(ebx, masm->isolate()->factory()->undefined_value());
1738 ArrayConstructorStub stub(masm->isolate());
1739 __ TailCallStub(&stub);
1740 }
1741
1742 // static
Generate_MathMaxMin(MacroAssembler * masm,MathMaxMinKind kind)1743 void Builtins::Generate_MathMaxMin(MacroAssembler* masm, MathMaxMinKind kind) {
1744 // ----------- S t a t e -------------
1745 // -- eax : number of arguments
1746 // -- edi : function
1747 // -- esi : context
1748 // -- esp[0] : return address
1749 // -- esp[(argc - n) * 8] : arg[n] (zero-based)
1750 // -- esp[(argc + 1) * 8] : receiver
1751 // -----------------------------------
1752 Condition const cc = (kind == MathMaxMinKind::kMin) ? below : above;
1753 Heap::RootListIndex const root_index =
1754 (kind == MathMaxMinKind::kMin) ? Heap::kInfinityValueRootIndex
1755 : Heap::kMinusInfinityValueRootIndex;
1756 const int reg_sel = (kind == MathMaxMinKind::kMin) ? 1 : 0;
1757
1758 // Load the accumulator with the default return value (either -Infinity or
1759 // +Infinity), with the tagged value in edx and the double value in stx_0.
1760 __ LoadRoot(edx, root_index);
1761 __ fld_d(FieldOperand(edx, HeapNumber::kValueOffset));
1762 __ Move(ecx, eax);
1763
1764 Label done_loop, loop;
1765 __ bind(&loop);
1766 {
1767 // Check if all parameters done.
1768 __ test(ecx, ecx);
1769 __ j(zero, &done_loop);
1770
1771 // Load the next parameter tagged value into ebx.
1772 __ mov(ebx, Operand(esp, ecx, times_pointer_size, 0));
1773
1774 // Load the double value of the parameter into stx_1, maybe converting the
1775 // parameter to a number first using the ToNumber builtin if necessary.
1776 Label convert, convert_smi, convert_number, done_convert;
1777 __ bind(&convert);
1778 __ JumpIfSmi(ebx, &convert_smi);
1779 __ JumpIfRoot(FieldOperand(ebx, HeapObject::kMapOffset),
1780 Heap::kHeapNumberMapRootIndex, &convert_number);
1781 {
1782 // Parameter is not a Number, use the ToNumber builtin to convert it.
1783 FrameScope scope(masm, StackFrame::MANUAL);
1784 __ SmiTag(eax);
1785 __ SmiTag(ecx);
1786 __ EnterBuiltinFrame(esi, edi, eax);
1787 __ Push(ecx);
1788 __ Push(edx);
1789 __ mov(eax, ebx);
1790 __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
1791 __ mov(ebx, eax);
1792 __ Pop(edx);
1793 __ Pop(ecx);
1794 __ LeaveBuiltinFrame(esi, edi, eax);
1795 __ SmiUntag(ecx);
1796 __ SmiUntag(eax);
1797 {
1798 // Restore the double accumulator value (stX_0).
1799 Label restore_smi, done_restore;
1800 __ JumpIfSmi(edx, &restore_smi, Label::kNear);
1801 __ fld_d(FieldOperand(edx, HeapNumber::kValueOffset));
1802 __ jmp(&done_restore, Label::kNear);
1803 __ bind(&restore_smi);
1804 __ SmiUntag(edx);
1805 __ push(edx);
1806 __ fild_s(Operand(esp, 0));
1807 __ pop(edx);
1808 __ SmiTag(edx);
1809 __ bind(&done_restore);
1810 }
1811 }
1812 __ jmp(&convert);
1813 __ bind(&convert_number);
1814 // Load another value into stx_1
1815 __ fld_d(FieldOperand(ebx, HeapNumber::kValueOffset));
1816 __ fxch();
1817 __ jmp(&done_convert, Label::kNear);
1818 __ bind(&convert_smi);
1819 __ SmiUntag(ebx);
1820 __ push(ebx);
1821 __ fild_s(Operand(esp, 0));
1822 __ pop(ebx);
1823 __ fxch();
1824 __ SmiTag(ebx);
1825 __ bind(&done_convert);
1826
1827 // Perform the actual comparison with the accumulator value on the left hand
1828 // side (stx_0) and the next parameter value on the right hand side (stx_1).
1829 Label compare_equal, compare_nan, compare_swap, done_compare;
1830
1831 // Duplicates the 2 float data for FCmp
1832 __ fld(1);
1833 __ fld(1);
1834 __ FCmp();
1835 __ j(parity_even, &compare_nan, Label::kNear);
1836 __ j(cc, &done_compare, Label::kNear);
1837 __ j(equal, &compare_equal, Label::kNear);
1838
1839 // Result is on the right hand side(stx_0).
1840 __ bind(&compare_swap);
1841 __ fxch();
1842 __ mov(edx, ebx);
1843 __ jmp(&done_compare, Label::kNear);
1844
1845 // At least one side is NaN, which means that the result will be NaN too.
1846 __ bind(&compare_nan);
1847 // Set the result on the right hand side (stx_0) to nan
1848 __ fstp(0);
1849 __ LoadRoot(edx, Heap::kNanValueRootIndex);
1850 __ fld_d(FieldOperand(edx, HeapNumber::kValueOffset));
1851 __ jmp(&done_compare, Label::kNear);
1852
1853 // Left and right hand side are equal, check for -0 vs. +0.
1854 __ bind(&compare_equal);
1855 // Check the sign of the value in reg_sel
1856 __ fld(reg_sel);
1857 __ FXamSign();
1858 __ j(not_zero, &compare_swap);
1859
1860 __ bind(&done_compare);
1861 // The right result is on the right hand side(stx_0)
1862 // and can remove the useless stx_1 now.
1863 __ fxch();
1864 __ fstp(0);
1865 __ dec(ecx);
1866 __ jmp(&loop);
1867 }
1868
1869 __ bind(&done_loop);
1870 __ PopReturnAddressTo(ecx);
1871 __ lea(esp, Operand(esp, eax, times_pointer_size, kPointerSize));
1872 __ PushReturnAddressFrom(ecx);
1873 __ mov(eax, edx);
1874 __ Ret();
1875 }
1876
1877 // static
Generate_NumberConstructor(MacroAssembler * masm)1878 void Builtins::Generate_NumberConstructor(MacroAssembler* masm) {
1879 // ----------- S t a t e -------------
1880 // -- eax : number of arguments
1881 // -- edi : constructor function
1882 // -- esi : context
1883 // -- esp[0] : return address
1884 // -- esp[(argc - n) * 4] : arg[n] (zero-based)
1885 // -- esp[(argc + 1) * 4] : receiver
1886 // -----------------------------------
1887
1888 // 1. Load the first argument into ebx.
1889 Label no_arguments;
1890 {
1891 __ test(eax, eax);
1892 __ j(zero, &no_arguments, Label::kNear);
1893 __ mov(ebx, Operand(esp, eax, times_pointer_size, 0));
1894 }
1895
1896 // 2a. Convert the first argument to a number.
1897 {
1898 FrameScope scope(masm, StackFrame::MANUAL);
1899 __ SmiTag(eax);
1900 __ EnterBuiltinFrame(esi, edi, eax);
1901 __ mov(eax, ebx);
1902 __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
1903 __ LeaveBuiltinFrame(esi, edi, ebx); // Argc popped to ebx.
1904 __ SmiUntag(ebx);
1905 }
1906
1907 {
1908 // Drop all arguments including the receiver.
1909 __ PopReturnAddressTo(ecx);
1910 __ lea(esp, Operand(esp, ebx, times_pointer_size, kPointerSize));
1911 __ PushReturnAddressFrom(ecx);
1912 __ Ret();
1913 }
1914
1915 // 2b. No arguments, return +0 (already in eax).
1916 __ bind(&no_arguments);
1917 __ ret(1 * kPointerSize);
1918 }
1919
1920 // static
Generate_NumberConstructor_ConstructStub(MacroAssembler * masm)1921 void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) {
1922 // ----------- S t a t e -------------
1923 // -- eax : number of arguments
1924 // -- edi : constructor function
1925 // -- edx : new target
1926 // -- esi : context
1927 // -- esp[0] : return address
1928 // -- esp[(argc - n) * 4] : arg[n] (zero-based)
1929 // -- esp[(argc + 1) * 4] : receiver
1930 // -----------------------------------
1931
1932 // 1. Make sure we operate in the context of the called function.
1933 __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
1934
1935 // Store argc in r8.
1936 __ mov(ecx, eax);
1937 __ SmiTag(ecx);
1938
1939 // 2. Load the first argument into ebx.
1940 {
1941 Label no_arguments, done;
1942 __ test(eax, eax);
1943 __ j(zero, &no_arguments, Label::kNear);
1944 __ mov(ebx, Operand(esp, eax, times_pointer_size, 0));
1945 __ jmp(&done, Label::kNear);
1946 __ bind(&no_arguments);
1947 __ Move(ebx, Smi::kZero);
1948 __ bind(&done);
1949 }
1950
1951 // 3. Make sure ebx is a number.
1952 {
1953 Label done_convert;
1954 __ JumpIfSmi(ebx, &done_convert);
1955 __ CompareRoot(FieldOperand(ebx, HeapObject::kMapOffset),
1956 Heap::kHeapNumberMapRootIndex);
1957 __ j(equal, &done_convert);
1958 {
1959 FrameScope scope(masm, StackFrame::MANUAL);
1960 __ EnterBuiltinFrame(esi, edi, ecx);
1961 __ Push(edx);
1962 __ Move(eax, ebx);
1963 __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
1964 __ Move(ebx, eax);
1965 __ Pop(edx);
1966 __ LeaveBuiltinFrame(esi, edi, ecx);
1967 }
1968 __ bind(&done_convert);
1969 }
1970
1971 // 4. Check if new target and constructor differ.
1972 Label drop_frame_and_ret, done_alloc, new_object;
1973 __ cmp(edx, edi);
1974 __ j(not_equal, &new_object);
1975
1976 // 5. Allocate a JSValue wrapper for the number.
1977 __ AllocateJSValue(eax, edi, ebx, esi, &done_alloc);
1978 __ jmp(&drop_frame_and_ret);
1979
1980 __ bind(&done_alloc);
1981 __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset)); // Restore esi.
1982
1983 // 6. Fallback to the runtime to create new object.
1984 __ bind(&new_object);
1985 {
1986 FrameScope scope(masm, StackFrame::MANUAL);
1987 __ EnterBuiltinFrame(esi, edi, ecx);
1988 __ Push(ebx); // the first argument
1989 FastNewObjectStub stub(masm->isolate());
1990 __ CallStub(&stub);
1991 __ Pop(FieldOperand(eax, JSValue::kValueOffset));
1992 __ LeaveBuiltinFrame(esi, edi, ecx);
1993 }
1994
1995 __ bind(&drop_frame_and_ret);
1996 {
1997 // Drop all arguments including the receiver.
1998 __ PopReturnAddressTo(esi);
1999 __ SmiUntag(ecx);
2000 __ lea(esp, Operand(esp, ecx, times_pointer_size, kPointerSize));
2001 __ PushReturnAddressFrom(esi);
2002 __ Ret();
2003 }
2004 }
2005
2006 // static
Generate_StringConstructor(MacroAssembler * masm)2007 void Builtins::Generate_StringConstructor(MacroAssembler* masm) {
2008 // ----------- S t a t e -------------
2009 // -- eax : number of arguments
2010 // -- edi : constructor function
2011 // -- esi : context
2012 // -- esp[0] : return address
2013 // -- esp[(argc - n) * 4] : arg[n] (zero-based)
2014 // -- esp[(argc + 1) * 4] : receiver
2015 // -----------------------------------
2016
2017 // 1. Load the first argument into eax.
2018 Label no_arguments;
2019 {
2020 __ mov(ebx, eax); // Store argc in ebx.
2021 __ test(eax, eax);
2022 __ j(zero, &no_arguments, Label::kNear);
2023 __ mov(eax, Operand(esp, eax, times_pointer_size, 0));
2024 }
2025
2026 // 2a. At least one argument, return eax if it's a string, otherwise
2027 // dispatch to appropriate conversion.
2028 Label drop_frame_and_ret, to_string, symbol_descriptive_string;
2029 {
2030 __ JumpIfSmi(eax, &to_string, Label::kNear);
2031 STATIC_ASSERT(FIRST_NONSTRING_TYPE == SYMBOL_TYPE);
2032 __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, edx);
2033 __ j(above, &to_string, Label::kNear);
2034 __ j(equal, &symbol_descriptive_string, Label::kNear);
2035 __ jmp(&drop_frame_and_ret, Label::kNear);
2036 }
2037
2038 // 2b. No arguments, return the empty string (and pop the receiver).
2039 __ bind(&no_arguments);
2040 {
2041 __ LoadRoot(eax, Heap::kempty_stringRootIndex);
2042 __ ret(1 * kPointerSize);
2043 }
2044
2045 // 3a. Convert eax to a string.
2046 __ bind(&to_string);
2047 {
2048 FrameScope scope(masm, StackFrame::MANUAL);
2049 __ SmiTag(ebx);
2050 __ EnterBuiltinFrame(esi, edi, ebx);
2051 __ Call(masm->isolate()->builtins()->ToString(), RelocInfo::CODE_TARGET);
2052 __ LeaveBuiltinFrame(esi, edi, ebx);
2053 __ SmiUntag(ebx);
2054 }
2055 __ jmp(&drop_frame_and_ret, Label::kNear);
2056
2057 // 3b. Convert symbol in eax to a string.
2058 __ bind(&symbol_descriptive_string);
2059 {
2060 __ PopReturnAddressTo(ecx);
2061 __ lea(esp, Operand(esp, ebx, times_pointer_size, kPointerSize));
2062 __ Push(eax);
2063 __ PushReturnAddressFrom(ecx);
2064 __ TailCallRuntime(Runtime::kSymbolDescriptiveString);
2065 }
2066
2067 __ bind(&drop_frame_and_ret);
2068 {
2069 // Drop all arguments including the receiver.
2070 __ PopReturnAddressTo(ecx);
2071 __ lea(esp, Operand(esp, ebx, times_pointer_size, kPointerSize));
2072 __ PushReturnAddressFrom(ecx);
2073 __ Ret();
2074 }
2075 }
2076
2077 // static
Generate_StringConstructor_ConstructStub(MacroAssembler * masm)2078 void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
2079 // ----------- S t a t e -------------
2080 // -- eax : number of arguments
2081 // -- edi : constructor function
2082 // -- edx : new target
2083 // -- esi : context
2084 // -- esp[0] : return address
2085 // -- esp[(argc - n) * 4] : arg[n] (zero-based)
2086 // -- esp[(argc + 1) * 4] : receiver
2087 // -----------------------------------
2088
2089 // 1. Make sure we operate in the context of the called function.
2090 __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
2091
2092 __ mov(ebx, eax);
2093
2094 // 2. Load the first argument into eax.
2095 {
2096 Label no_arguments, done;
2097 __ test(ebx, ebx);
2098 __ j(zero, &no_arguments, Label::kNear);
2099 __ mov(eax, Operand(esp, ebx, times_pointer_size, 0));
2100 __ jmp(&done, Label::kNear);
2101 __ bind(&no_arguments);
2102 __ LoadRoot(eax, Heap::kempty_stringRootIndex);
2103 __ bind(&done);
2104 }
2105
2106 // 3. Make sure eax is a string.
2107 {
2108 Label convert, done_convert;
2109 __ JumpIfSmi(eax, &convert, Label::kNear);
2110 __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, ecx);
2111 __ j(below, &done_convert);
2112 __ bind(&convert);
2113 {
2114 FrameScope scope(masm, StackFrame::MANUAL);
2115 __ SmiTag(ebx);
2116 __ EnterBuiltinFrame(esi, edi, ebx);
2117 __ Push(edx);
2118 __ Call(masm->isolate()->builtins()->ToString(), RelocInfo::CODE_TARGET);
2119 __ Pop(edx);
2120 __ LeaveBuiltinFrame(esi, edi, ebx);
2121 __ SmiUntag(ebx);
2122 }
2123 __ bind(&done_convert);
2124 }
2125
2126 // 4. Check if new target and constructor differ.
2127 Label drop_frame_and_ret, done_alloc, new_object;
2128 __ cmp(edx, edi);
2129 __ j(not_equal, &new_object);
2130
2131 // 5. Allocate a JSValue wrapper for the string.
2132 // AllocateJSValue can't handle src == dst register. Reuse esi and restore it
2133 // as needed after the call.
2134 __ mov(esi, eax);
2135 __ AllocateJSValue(eax, edi, esi, ecx, &done_alloc);
2136 __ jmp(&drop_frame_and_ret);
2137
2138 __ bind(&done_alloc);
2139 {
2140 // Restore eax to the first argument and esi to the context.
2141 __ mov(eax, esi);
2142 __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
2143 }
2144
2145 // 6. Fallback to the runtime to create new object.
2146 __ bind(&new_object);
2147 {
2148 FrameScope scope(masm, StackFrame::MANUAL);
2149 __ SmiTag(ebx);
2150 __ EnterBuiltinFrame(esi, edi, ebx);
2151 __ Push(eax); // the first argument
2152 FastNewObjectStub stub(masm->isolate());
2153 __ CallStub(&stub);
2154 __ Pop(FieldOperand(eax, JSValue::kValueOffset));
2155 __ LeaveBuiltinFrame(esi, edi, ebx);
2156 __ SmiUntag(ebx);
2157 }
2158
2159 __ bind(&drop_frame_and_ret);
2160 {
2161 // Drop all arguments including the receiver.
2162 __ PopReturnAddressTo(ecx);
2163 __ lea(esp, Operand(esp, ebx, times_pointer_size, kPointerSize));
2164 __ PushReturnAddressFrom(ecx);
2165 __ Ret();
2166 }
2167 }
2168
EnterArgumentsAdaptorFrame(MacroAssembler * masm)2169 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
2170 __ push(ebp);
2171 __ mov(ebp, esp);
2172
2173 // Store the arguments adaptor context sentinel.
2174 __ push(Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2175
2176 // Push the function on the stack.
2177 __ push(edi);
2178
2179 // Preserve the number of arguments on the stack. Must preserve eax,
2180 // ebx and ecx because these registers are used when copying the
2181 // arguments and the receiver.
2182 STATIC_ASSERT(kSmiTagSize == 1);
2183 __ lea(edi, Operand(eax, eax, times_1, kSmiTag));
2184 __ push(edi);
2185 }
2186
LeaveArgumentsAdaptorFrame(MacroAssembler * masm)2187 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
2188 // Retrieve the number of arguments from the stack.
2189 __ mov(ebx, Operand(ebp, ArgumentsAdaptorFrameConstants::kLengthOffset));
2190
2191 // Leave the frame.
2192 __ leave();
2193
2194 // Remove caller arguments from the stack.
2195 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
2196 __ pop(ecx);
2197 __ lea(esp, Operand(esp, ebx, times_2, 1 * kPointerSize)); // 1 ~ receiver
2198 __ push(ecx);
2199 }
2200
2201 // static
Generate_Apply(MacroAssembler * masm)2202 void Builtins::Generate_Apply(MacroAssembler* masm) {
2203 // ----------- S t a t e -------------
2204 // -- eax : argumentsList
2205 // -- edi : target
2206 // -- edx : new.target (checked to be constructor or undefined)
2207 // -- esp[0] : return address.
2208 // -- esp[4] : thisArgument
2209 // -----------------------------------
2210
2211 // Create the list of arguments from the array-like argumentsList.
2212 {
2213 Label create_arguments, create_array, create_runtime, done_create;
2214 __ JumpIfSmi(eax, &create_runtime);
2215
2216 // Load the map of argumentsList into ecx.
2217 __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset));
2218
2219 // Load native context into ebx.
2220 __ mov(ebx, NativeContextOperand());
2221
2222 // Check if argumentsList is an (unmodified) arguments object.
2223 __ cmp(ecx, ContextOperand(ebx, Context::SLOPPY_ARGUMENTS_MAP_INDEX));
2224 __ j(equal, &create_arguments);
2225 __ cmp(ecx, ContextOperand(ebx, Context::STRICT_ARGUMENTS_MAP_INDEX));
2226 __ j(equal, &create_arguments);
2227
2228 // Check if argumentsList is a fast JSArray.
2229 __ CmpInstanceType(ecx, JS_ARRAY_TYPE);
2230 __ j(equal, &create_array);
2231
2232 // Ask the runtime to create the list (actually a FixedArray).
2233 __ bind(&create_runtime);
2234 {
2235 FrameScope scope(masm, StackFrame::INTERNAL);
2236 __ Push(edi);
2237 __ Push(edx);
2238 __ Push(eax);
2239 __ CallRuntime(Runtime::kCreateListFromArrayLike);
2240 __ Pop(edx);
2241 __ Pop(edi);
2242 __ mov(ebx, FieldOperand(eax, FixedArray::kLengthOffset));
2243 __ SmiUntag(ebx);
2244 }
2245 __ jmp(&done_create);
2246
2247 // Try to create the list from an arguments object.
2248 __ bind(&create_arguments);
2249 __ mov(ebx, FieldOperand(eax, JSArgumentsObject::kLengthOffset));
2250 __ mov(ecx, FieldOperand(eax, JSObject::kElementsOffset));
2251 __ cmp(ebx, FieldOperand(ecx, FixedArray::kLengthOffset));
2252 __ j(not_equal, &create_runtime);
2253 __ SmiUntag(ebx);
2254 __ mov(eax, ecx);
2255 __ jmp(&done_create);
2256
2257 // Try to create the list from a JSArray object.
2258 __ bind(&create_array);
2259 __ mov(ecx, FieldOperand(ecx, Map::kBitField2Offset));
2260 __ DecodeField<Map::ElementsKindBits>(ecx);
2261 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
2262 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
2263 STATIC_ASSERT(FAST_ELEMENTS == 2);
2264 __ cmp(ecx, Immediate(FAST_ELEMENTS));
2265 __ j(above, &create_runtime);
2266 __ cmp(ecx, Immediate(FAST_HOLEY_SMI_ELEMENTS));
2267 __ j(equal, &create_runtime);
2268 __ mov(ebx, FieldOperand(eax, JSArray::kLengthOffset));
2269 __ SmiUntag(ebx);
2270 __ mov(eax, FieldOperand(eax, JSArray::kElementsOffset));
2271
2272 __ bind(&done_create);
2273 }
2274
2275 // Check for stack overflow.
2276 {
2277 // Check the stack for overflow. We are not trying to catch interruptions
2278 // (i.e. debug break and preemption) here, so check the "real stack limit".
2279 Label done;
2280 ExternalReference real_stack_limit =
2281 ExternalReference::address_of_real_stack_limit(masm->isolate());
2282 __ mov(ecx, Operand::StaticVariable(real_stack_limit));
2283 // Make ecx the space we have left. The stack might already be overflowed
2284 // here which will cause ecx to become negative.
2285 __ neg(ecx);
2286 __ add(ecx, esp);
2287 __ sar(ecx, kPointerSizeLog2);
2288 // Check if the arguments will overflow the stack.
2289 __ cmp(ecx, ebx);
2290 __ j(greater, &done, Label::kNear); // Signed comparison.
2291 __ TailCallRuntime(Runtime::kThrowStackOverflow);
2292 __ bind(&done);
2293 }
2294
2295 // ----------- S t a t e -------------
2296 // -- edi : target
2297 // -- eax : args (a FixedArray built from argumentsList)
2298 // -- ebx : len (number of elements to push from args)
2299 // -- edx : new.target (checked to be constructor or undefined)
2300 // -- esp[0] : return address.
2301 // -- esp[4] : thisArgument
2302 // -----------------------------------
2303
2304 // Push arguments onto the stack (thisArgument is already on the stack).
2305 {
2306 __ push(edx);
2307 __ fld_s(MemOperand(esp, 0));
2308 __ lea(esp, Operand(esp, kFloatSize));
2309
2310 __ PopReturnAddressTo(edx);
2311 __ Move(ecx, Immediate(0));
2312 Label done, loop;
2313 __ bind(&loop);
2314 __ cmp(ecx, ebx);
2315 __ j(equal, &done, Label::kNear);
2316 __ Push(
2317 FieldOperand(eax, ecx, times_pointer_size, FixedArray::kHeaderSize));
2318 __ inc(ecx);
2319 __ jmp(&loop);
2320 __ bind(&done);
2321 __ PushReturnAddressFrom(edx);
2322
2323 __ lea(esp, Operand(esp, -kFloatSize));
2324 __ fstp_s(MemOperand(esp, 0));
2325 __ pop(edx);
2326
2327 __ Move(eax, ebx);
2328 }
2329
2330 // Dispatch to Call or Construct depending on whether new.target is undefined.
2331 {
2332 __ CompareRoot(edx, Heap::kUndefinedValueRootIndex);
2333 __ j(equal, masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
2334 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
2335 }
2336 }
2337
2338 namespace {
2339
2340 // Drops top JavaScript frame and an arguments adaptor frame below it (if
2341 // present) preserving all the arguments prepared for current call.
2342 // Does nothing if debugger is currently active.
2343 // ES6 14.6.3. PrepareForTailCall
2344 //
2345 // Stack structure for the function g() tail calling f():
2346 //
2347 // ------- Caller frame: -------
2348 // | ...
2349 // | g()'s arg M
2350 // | ...
2351 // | g()'s arg 1
2352 // | g()'s receiver arg
2353 // | g()'s caller pc
2354 // ------- g()'s frame: -------
2355 // | g()'s caller fp <- fp
2356 // | g()'s context
2357 // | function pointer: g
2358 // | -------------------------
2359 // | ...
2360 // | ...
2361 // | f()'s arg N
2362 // | ...
2363 // | f()'s arg 1
2364 // | f()'s receiver arg
2365 // | f()'s caller pc <- sp
2366 // ----------------------
2367 //
PrepareForTailCall(MacroAssembler * masm,Register args_reg,Register scratch1,Register scratch2,Register scratch3)2368 void PrepareForTailCall(MacroAssembler* masm, Register args_reg,
2369 Register scratch1, Register scratch2,
2370 Register scratch3) {
2371 DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3));
2372 Comment cmnt(masm, "[ PrepareForTailCall");
2373
2374 // Prepare for tail call only if ES2015 tail call elimination is enabled.
2375 Label done;
2376 ExternalReference is_tail_call_elimination_enabled =
2377 ExternalReference::is_tail_call_elimination_enabled_address(
2378 masm->isolate());
2379 __ movzx_b(scratch1,
2380 Operand::StaticVariable(is_tail_call_elimination_enabled));
2381 __ cmp(scratch1, Immediate(0));
2382 __ j(equal, &done, Label::kNear);
2383
2384 // Drop possible interpreter handler/stub frame.
2385 {
2386 Label no_interpreter_frame;
2387 __ cmp(Operand(ebp, CommonFrameConstants::kContextOrFrameTypeOffset),
2388 Immediate(Smi::FromInt(StackFrame::STUB)));
2389 __ j(not_equal, &no_interpreter_frame, Label::kNear);
2390 __ mov(ebp, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
2391 __ bind(&no_interpreter_frame);
2392 }
2393
2394 // Check if next frame is an arguments adaptor frame.
2395 Register caller_args_count_reg = scratch1;
2396 Label no_arguments_adaptor, formal_parameter_count_loaded;
2397 __ mov(scratch2, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
2398 __ cmp(Operand(scratch2, CommonFrameConstants::kContextOrFrameTypeOffset),
2399 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2400 __ j(not_equal, &no_arguments_adaptor, Label::kNear);
2401
2402 // Drop current frame and load arguments count from arguments adaptor frame.
2403 __ mov(ebp, scratch2);
2404 __ mov(caller_args_count_reg,
2405 Operand(ebp, ArgumentsAdaptorFrameConstants::kLengthOffset));
2406 __ SmiUntag(caller_args_count_reg);
2407 __ jmp(&formal_parameter_count_loaded, Label::kNear);
2408
2409 __ bind(&no_arguments_adaptor);
2410 // Load caller's formal parameter count
2411 __ mov(scratch1, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
2412 __ mov(scratch1,
2413 FieldOperand(scratch1, JSFunction::kSharedFunctionInfoOffset));
2414 __ mov(
2415 caller_args_count_reg,
2416 FieldOperand(scratch1, SharedFunctionInfo::kFormalParameterCountOffset));
2417 __ SmiUntag(caller_args_count_reg);
2418
2419 __ bind(&formal_parameter_count_loaded);
2420
2421 ParameterCount callee_args_count(args_reg);
2422 __ PrepareForTailCall(callee_args_count, caller_args_count_reg, scratch2,
2423 scratch3, ReturnAddressState::kOnStack, 0);
2424 __ bind(&done);
2425 }
2426 } // namespace
2427
2428 // static
Generate_CallFunction(MacroAssembler * masm,ConvertReceiverMode mode,TailCallMode tail_call_mode)2429 void Builtins::Generate_CallFunction(MacroAssembler* masm,
2430 ConvertReceiverMode mode,
2431 TailCallMode tail_call_mode) {
2432 // ----------- S t a t e -------------
2433 // -- eax : the number of arguments (not including the receiver)
2434 // -- edi : the function to call (checked to be a JSFunction)
2435 // -----------------------------------
2436 __ AssertFunction(edi);
2437
2438 // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
2439 // Check that the function is not a "classConstructor".
2440 Label class_constructor;
2441 __ mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2442 __ test_b(FieldOperand(edx, SharedFunctionInfo::kFunctionKindByteOffset),
2443 Immediate(SharedFunctionInfo::kClassConstructorBitsWithinByte));
2444 __ j(not_zero, &class_constructor);
2445
2446 // Enter the context of the function; ToObject has to run in the function
2447 // context, and we also need to take the global proxy from the function
2448 // context in case of conversion.
2449 STATIC_ASSERT(SharedFunctionInfo::kNativeByteOffset ==
2450 SharedFunctionInfo::kStrictModeByteOffset);
2451 __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
2452 // We need to convert the receiver for non-native sloppy mode functions.
2453 Label done_convert;
2454 __ test_b(FieldOperand(edx, SharedFunctionInfo::kNativeByteOffset),
2455 Immediate((1 << SharedFunctionInfo::kNativeBitWithinByte) |
2456 (1 << SharedFunctionInfo::kStrictModeBitWithinByte)));
2457 __ j(not_zero, &done_convert);
2458 {
2459 // ----------- S t a t e -------------
2460 // -- eax : the number of arguments (not including the receiver)
2461 // -- edx : the shared function info.
2462 // -- edi : the function to call (checked to be a JSFunction)
2463 // -- esi : the function context.
2464 // -----------------------------------
2465
2466 if (mode == ConvertReceiverMode::kNullOrUndefined) {
2467 // Patch receiver to global proxy.
2468 __ LoadGlobalProxy(ecx);
2469 } else {
2470 Label convert_to_object, convert_receiver;
2471 __ mov(ecx, Operand(esp, eax, times_pointer_size, kPointerSize));
2472 __ JumpIfSmi(ecx, &convert_to_object, Label::kNear);
2473 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2474 __ CmpObjectType(ecx, FIRST_JS_RECEIVER_TYPE, ebx);
2475 __ j(above_equal, &done_convert);
2476 if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
2477 Label convert_global_proxy;
2478 __ JumpIfRoot(ecx, Heap::kUndefinedValueRootIndex,
2479 &convert_global_proxy, Label::kNear);
2480 __ JumpIfNotRoot(ecx, Heap::kNullValueRootIndex, &convert_to_object,
2481 Label::kNear);
2482 __ bind(&convert_global_proxy);
2483 {
2484 // Patch receiver to global proxy.
2485 __ LoadGlobalProxy(ecx);
2486 }
2487 __ jmp(&convert_receiver);
2488 }
2489 __ bind(&convert_to_object);
2490 {
2491 // Convert receiver using ToObject.
2492 // TODO(bmeurer): Inline the allocation here to avoid building the frame
2493 // in the fast case? (fall back to AllocateInNewSpace?)
2494 FrameScope scope(masm, StackFrame::INTERNAL);
2495 __ SmiTag(eax);
2496 __ Push(eax);
2497 __ Push(edi);
2498 __ mov(eax, ecx);
2499 __ Push(esi);
2500 __ Call(masm->isolate()->builtins()->ToObject(),
2501 RelocInfo::CODE_TARGET);
2502 __ Pop(esi);
2503 __ mov(ecx, eax);
2504 __ Pop(edi);
2505 __ Pop(eax);
2506 __ SmiUntag(eax);
2507 }
2508 __ mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2509 __ bind(&convert_receiver);
2510 }
2511 __ mov(Operand(esp, eax, times_pointer_size, kPointerSize), ecx);
2512 }
2513 __ bind(&done_convert);
2514
2515 // ----------- S t a t e -------------
2516 // -- eax : the number of arguments (not including the receiver)
2517 // -- edx : the shared function info.
2518 // -- edi : the function to call (checked to be a JSFunction)
2519 // -- esi : the function context.
2520 // -----------------------------------
2521
2522 if (tail_call_mode == TailCallMode::kAllow) {
2523 PrepareForTailCall(masm, eax, ebx, ecx, edx);
2524 // Reload shared function info.
2525 __ mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2526 }
2527
2528 __ mov(ebx,
2529 FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
2530 __ SmiUntag(ebx);
2531 ParameterCount actual(eax);
2532 ParameterCount expected(ebx);
2533 __ InvokeFunctionCode(edi, no_reg, expected, actual, JUMP_FUNCTION,
2534 CheckDebugStepCallWrapper());
2535 // The function is a "classConstructor", need to raise an exception.
2536 __ bind(&class_constructor);
2537 {
2538 FrameScope frame(masm, StackFrame::INTERNAL);
2539 __ push(edi);
2540 __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
2541 }
2542 }
2543
2544 namespace {
2545
Generate_PushBoundArguments(MacroAssembler * masm)2546 void Generate_PushBoundArguments(MacroAssembler* masm) {
2547 // ----------- S t a t e -------------
2548 // -- eax : the number of arguments (not including the receiver)
2549 // -- edx : new.target (only in case of [[Construct]])
2550 // -- edi : target (checked to be a JSBoundFunction)
2551 // -----------------------------------
2552
2553 // Load [[BoundArguments]] into ecx and length of that into ebx.
2554 Label no_bound_arguments;
2555 __ mov(ecx, FieldOperand(edi, JSBoundFunction::kBoundArgumentsOffset));
2556 __ mov(ebx, FieldOperand(ecx, FixedArray::kLengthOffset));
2557 __ SmiUntag(ebx);
2558 __ test(ebx, ebx);
2559 __ j(zero, &no_bound_arguments);
2560 {
2561 // ----------- S t a t e -------------
2562 // -- eax : the number of arguments (not including the receiver)
2563 // -- edx : new.target (only in case of [[Construct]])
2564 // -- edi : target (checked to be a JSBoundFunction)
2565 // -- ecx : the [[BoundArguments]] (implemented as FixedArray)
2566 // -- ebx : the number of [[BoundArguments]]
2567 // -----------------------------------
2568
2569 // Reserve stack space for the [[BoundArguments]].
2570 {
2571 Label done;
2572 __ lea(ecx, Operand(ebx, times_pointer_size, 0));
2573 __ sub(esp, ecx);
2574 // Check the stack for overflow. We are not trying to catch interruptions
2575 // (i.e. debug break and preemption) here, so check the "real stack
2576 // limit".
2577 __ CompareRoot(esp, ecx, Heap::kRealStackLimitRootIndex);
2578 __ j(greater, &done, Label::kNear); // Signed comparison.
2579 // Restore the stack pointer.
2580 __ lea(esp, Operand(esp, ebx, times_pointer_size, 0));
2581 {
2582 FrameScope scope(masm, StackFrame::MANUAL);
2583 __ EnterFrame(StackFrame::INTERNAL);
2584 __ CallRuntime(Runtime::kThrowStackOverflow);
2585 }
2586 __ bind(&done);
2587 }
2588
2589 // Adjust effective number of arguments to include return address.
2590 __ inc(eax);
2591
2592 // Relocate arguments and return address down the stack.
2593 {
2594 Label loop;
2595 __ Set(ecx, 0);
2596 __ lea(ebx, Operand(esp, ebx, times_pointer_size, 0));
2597 __ bind(&loop);
2598 __ fld_s(Operand(ebx, ecx, times_pointer_size, 0));
2599 __ fstp_s(Operand(esp, ecx, times_pointer_size, 0));
2600 __ inc(ecx);
2601 __ cmp(ecx, eax);
2602 __ j(less, &loop);
2603 }
2604
2605 // Copy [[BoundArguments]] to the stack (below the arguments).
2606 {
2607 Label loop;
2608 __ mov(ecx, FieldOperand(edi, JSBoundFunction::kBoundArgumentsOffset));
2609 __ mov(ebx, FieldOperand(ecx, FixedArray::kLengthOffset));
2610 __ SmiUntag(ebx);
2611 __ bind(&loop);
2612 __ dec(ebx);
2613 __ fld_s(
2614 FieldOperand(ecx, ebx, times_pointer_size, FixedArray::kHeaderSize));
2615 __ fstp_s(Operand(esp, eax, times_pointer_size, 0));
2616 __ lea(eax, Operand(eax, 1));
2617 __ j(greater, &loop);
2618 }
2619
2620 // Adjust effective number of arguments (eax contains the number of
2621 // arguments from the call plus return address plus the number of
2622 // [[BoundArguments]]), so we need to subtract one for the return address.
2623 __ dec(eax);
2624 }
2625 __ bind(&no_bound_arguments);
2626 }
2627
2628 } // namespace
2629
2630 // static
Generate_CallBoundFunctionImpl(MacroAssembler * masm,TailCallMode tail_call_mode)2631 void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm,
2632 TailCallMode tail_call_mode) {
2633 // ----------- S t a t e -------------
2634 // -- eax : the number of arguments (not including the receiver)
2635 // -- edi : the function to call (checked to be a JSBoundFunction)
2636 // -----------------------------------
2637 __ AssertBoundFunction(edi);
2638
2639 if (tail_call_mode == TailCallMode::kAllow) {
2640 PrepareForTailCall(masm, eax, ebx, ecx, edx);
2641 }
2642
2643 // Patch the receiver to [[BoundThis]].
2644 __ mov(ebx, FieldOperand(edi, JSBoundFunction::kBoundThisOffset));
2645 __ mov(Operand(esp, eax, times_pointer_size, kPointerSize), ebx);
2646
2647 // Push the [[BoundArguments]] onto the stack.
2648 Generate_PushBoundArguments(masm);
2649
2650 // Call the [[BoundTargetFunction]] via the Call builtin.
2651 __ mov(edi, FieldOperand(edi, JSBoundFunction::kBoundTargetFunctionOffset));
2652 __ mov(ecx, Operand::StaticVariable(ExternalReference(
2653 Builtins::kCall_ReceiverIsAny, masm->isolate())));
2654 __ lea(ecx, FieldOperand(ecx, Code::kHeaderSize));
2655 __ jmp(ecx);
2656 }
2657
2658 // static
Generate_Call(MacroAssembler * masm,ConvertReceiverMode mode,TailCallMode tail_call_mode)2659 void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode,
2660 TailCallMode tail_call_mode) {
2661 // ----------- S t a t e -------------
2662 // -- eax : the number of arguments (not including the receiver)
2663 // -- edi : the target to call (can be any Object).
2664 // -----------------------------------
2665
2666 Label non_callable, non_function, non_smi;
2667 __ JumpIfSmi(edi, &non_callable);
2668 __ bind(&non_smi);
2669 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
2670 __ j(equal, masm->isolate()->builtins()->CallFunction(mode, tail_call_mode),
2671 RelocInfo::CODE_TARGET);
2672 __ CmpInstanceType(ecx, JS_BOUND_FUNCTION_TYPE);
2673 __ j(equal, masm->isolate()->builtins()->CallBoundFunction(tail_call_mode),
2674 RelocInfo::CODE_TARGET);
2675
2676 // Check if target has a [[Call]] internal method.
2677 __ test_b(FieldOperand(ecx, Map::kBitFieldOffset),
2678 Immediate(1 << Map::kIsCallable));
2679 __ j(zero, &non_callable);
2680
2681 __ CmpInstanceType(ecx, JS_PROXY_TYPE);
2682 __ j(not_equal, &non_function);
2683
2684 // 0. Prepare for tail call if necessary.
2685 if (tail_call_mode == TailCallMode::kAllow) {
2686 PrepareForTailCall(masm, eax, ebx, ecx, edx);
2687 }
2688
2689 // 1. Runtime fallback for Proxy [[Call]].
2690 __ PopReturnAddressTo(ecx);
2691 __ Push(edi);
2692 __ PushReturnAddressFrom(ecx);
2693 // Increase the arguments size to include the pushed function and the
2694 // existing receiver on the stack.
2695 __ add(eax, Immediate(2));
2696 // Tail-call to the runtime.
2697 __ JumpToExternalReference(
2698 ExternalReference(Runtime::kJSProxyCall, masm->isolate()));
2699
2700 // 2. Call to something else, which might have a [[Call]] internal method (if
2701 // not we raise an exception).
2702 __ bind(&non_function);
2703 // Overwrite the original receiver with the (original) target.
2704 __ mov(Operand(esp, eax, times_pointer_size, kPointerSize), edi);
2705 // Let the "call_as_function_delegate" take care of the rest.
2706 __ LoadGlobalFunction(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, edi);
2707 __ Jump(masm->isolate()->builtins()->CallFunction(
2708 ConvertReceiverMode::kNotNullOrUndefined, tail_call_mode),
2709 RelocInfo::CODE_TARGET);
2710
2711 // 3. Call to something that is not callable.
2712 __ bind(&non_callable);
2713 {
2714 FrameScope scope(masm, StackFrame::INTERNAL);
2715 __ Push(edi);
2716 __ CallRuntime(Runtime::kThrowCalledNonCallable);
2717 }
2718 }
2719
2720 // static
Generate_ConstructFunction(MacroAssembler * masm)2721 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2722 // ----------- S t a t e -------------
2723 // -- eax : the number of arguments (not including the receiver)
2724 // -- edx : the new target (checked to be a constructor)
2725 // -- edi : the constructor to call (checked to be a JSFunction)
2726 // -----------------------------------
2727 __ AssertFunction(edi);
2728
2729 // Calling convention for function specific ConstructStubs require
2730 // ebx to contain either an AllocationSite or undefined.
2731 __ LoadRoot(ebx, Heap::kUndefinedValueRootIndex);
2732
2733 // Tail call to the function-specific construct stub (still in the caller
2734 // context at this point).
2735 __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2736 __ mov(ecx, FieldOperand(ecx, SharedFunctionInfo::kConstructStubOffset));
2737 __ lea(ecx, FieldOperand(ecx, Code::kHeaderSize));
2738 __ jmp(ecx);
2739 }
2740
2741 // static
Generate_ConstructBoundFunction(MacroAssembler * masm)2742 void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2743 // ----------- S t a t e -------------
2744 // -- eax : the number of arguments (not including the receiver)
2745 // -- edx : the new target (checked to be a constructor)
2746 // -- edi : the constructor to call (checked to be a JSBoundFunction)
2747 // -----------------------------------
2748 __ AssertBoundFunction(edi);
2749
2750 // Push the [[BoundArguments]] onto the stack.
2751 Generate_PushBoundArguments(masm);
2752
2753 // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
2754 {
2755 Label done;
2756 __ cmp(edi, edx);
2757 __ j(not_equal, &done, Label::kNear);
2758 __ mov(edx, FieldOperand(edi, JSBoundFunction::kBoundTargetFunctionOffset));
2759 __ bind(&done);
2760 }
2761
2762 // Construct the [[BoundTargetFunction]] via the Construct builtin.
2763 __ mov(edi, FieldOperand(edi, JSBoundFunction::kBoundTargetFunctionOffset));
2764 __ mov(ecx, Operand::StaticVariable(
2765 ExternalReference(Builtins::kConstruct, masm->isolate())));
2766 __ lea(ecx, FieldOperand(ecx, Code::kHeaderSize));
2767 __ jmp(ecx);
2768 }
2769
2770 // static
Generate_ConstructProxy(MacroAssembler * masm)2771 void Builtins::Generate_ConstructProxy(MacroAssembler* masm) {
2772 // ----------- S t a t e -------------
2773 // -- eax : the number of arguments (not including the receiver)
2774 // -- edi : the constructor to call (checked to be a JSProxy)
2775 // -- edx : the new target (either the same as the constructor or
2776 // the JSFunction on which new was invoked initially)
2777 // -----------------------------------
2778
2779 // Call into the Runtime for Proxy [[Construct]].
2780 __ PopReturnAddressTo(ecx);
2781 __ Push(edi);
2782 __ Push(edx);
2783 __ PushReturnAddressFrom(ecx);
2784 // Include the pushed new_target, constructor and the receiver.
2785 __ add(eax, Immediate(3));
2786 // Tail-call to the runtime.
2787 __ JumpToExternalReference(
2788 ExternalReference(Runtime::kJSProxyConstruct, masm->isolate()));
2789 }
2790
2791 // static
Generate_Construct(MacroAssembler * masm)2792 void Builtins::Generate_Construct(MacroAssembler* masm) {
2793 // ----------- S t a t e -------------
2794 // -- eax : the number of arguments (not including the receiver)
2795 // -- edx : the new target (either the same as the constructor or
2796 // the JSFunction on which new was invoked initially)
2797 // -- edi : the constructor to call (can be any Object)
2798 // -----------------------------------
2799
2800 // Check if target is a Smi.
2801 Label non_constructor;
2802 __ JumpIfSmi(edi, &non_constructor, Label::kNear);
2803
2804 // Dispatch based on instance type.
2805 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
2806 __ j(equal, masm->isolate()->builtins()->ConstructFunction(),
2807 RelocInfo::CODE_TARGET);
2808
2809 // Check if target has a [[Construct]] internal method.
2810 __ test_b(FieldOperand(ecx, Map::kBitFieldOffset),
2811 Immediate(1 << Map::kIsConstructor));
2812 __ j(zero, &non_constructor, Label::kNear);
2813
2814 // Only dispatch to bound functions after checking whether they are
2815 // constructors.
2816 __ CmpInstanceType(ecx, JS_BOUND_FUNCTION_TYPE);
2817 __ j(equal, masm->isolate()->builtins()->ConstructBoundFunction(),
2818 RelocInfo::CODE_TARGET);
2819
2820 // Only dispatch to proxies after checking whether they are constructors.
2821 __ CmpInstanceType(ecx, JS_PROXY_TYPE);
2822 __ j(equal, masm->isolate()->builtins()->ConstructProxy(),
2823 RelocInfo::CODE_TARGET);
2824
2825 // Called Construct on an exotic Object with a [[Construct]] internal method.
2826 {
2827 // Overwrite the original receiver with the (original) target.
2828 __ mov(Operand(esp, eax, times_pointer_size, kPointerSize), edi);
2829 // Let the "call_as_constructor_delegate" take care of the rest.
2830 __ LoadGlobalFunction(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, edi);
2831 __ Jump(masm->isolate()->builtins()->CallFunction(),
2832 RelocInfo::CODE_TARGET);
2833 }
2834
2835 // Called Construct on an Object that doesn't have a [[Construct]] internal
2836 // method.
2837 __ bind(&non_constructor);
2838 __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(),
2839 RelocInfo::CODE_TARGET);
2840 }
2841
2842 // static
Generate_AllocateInNewSpace(MacroAssembler * masm)2843 void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) {
2844 // ----------- S t a t e -------------
2845 // -- edx : requested object size (untagged)
2846 // -- esp[0] : return address
2847 // -----------------------------------
2848 __ SmiTag(edx);
2849 __ PopReturnAddressTo(ecx);
2850 __ Push(edx);
2851 __ PushReturnAddressFrom(ecx);
2852 __ Move(esi, Smi::kZero);
2853 __ TailCallRuntime(Runtime::kAllocateInNewSpace);
2854 }
2855
2856 // static
Generate_AllocateInOldSpace(MacroAssembler * masm)2857 void Builtins::Generate_AllocateInOldSpace(MacroAssembler* masm) {
2858 // ----------- S t a t e -------------
2859 // -- edx : requested object size (untagged)
2860 // -- esp[0] : return address
2861 // -----------------------------------
2862 __ SmiTag(edx);
2863 __ PopReturnAddressTo(ecx);
2864 __ Push(edx);
2865 __ Push(Smi::FromInt(AllocateTargetSpace::encode(OLD_SPACE)));
2866 __ PushReturnAddressFrom(ecx);
2867 __ Move(esi, Smi::kZero);
2868 __ TailCallRuntime(Runtime::kAllocateInTargetSpace);
2869 }
2870
2871 // static
Generate_Abort(MacroAssembler * masm)2872 void Builtins::Generate_Abort(MacroAssembler* masm) {
2873 // ----------- S t a t e -------------
2874 // -- edx : message_id as Smi
2875 // -- esp[0] : return address
2876 // -----------------------------------
2877 __ PopReturnAddressTo(ecx);
2878 __ Push(edx);
2879 __ PushReturnAddressFrom(ecx);
2880 __ Move(esi, Smi::kZero);
2881 __ TailCallRuntime(Runtime::kAbort);
2882 }
2883
Generate_ArgumentsAdaptorTrampoline(MacroAssembler * masm)2884 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
2885 // ----------- S t a t e -------------
2886 // -- eax : actual number of arguments
2887 // -- ebx : expected number of arguments
2888 // -- edx : new target (passed through to callee)
2889 // -- edi : function (passed through to callee)
2890 // -----------------------------------
2891
2892 Label invoke, dont_adapt_arguments, stack_overflow;
2893 __ IncrementCounter(masm->isolate()->counters()->arguments_adaptors(), 1);
2894
2895 Label enough, too_few;
2896 __ cmp(eax, ebx);
2897 __ j(less, &too_few);
2898 __ cmp(ebx, SharedFunctionInfo::kDontAdaptArgumentsSentinel);
2899 __ j(equal, &dont_adapt_arguments);
2900
2901 { // Enough parameters: Actual >= expected.
2902 __ bind(&enough);
2903 EnterArgumentsAdaptorFrame(masm);
2904 // edi is used as a scratch register. It should be restored from the frame
2905 // when needed.
2906 Generate_StackOverflowCheck(masm, ebx, ecx, edi, &stack_overflow);
2907
2908 // Copy receiver and all expected arguments.
2909 const int offset = StandardFrameConstants::kCallerSPOffset;
2910 __ lea(edi, Operand(ebp, eax, times_4, offset));
2911 __ mov(eax, -1); // account for receiver
2912
2913 Label copy;
2914 __ bind(©);
2915 __ inc(eax);
2916 __ push(Operand(edi, 0));
2917 __ sub(edi, Immediate(kPointerSize));
2918 __ cmp(eax, ebx);
2919 __ j(less, ©);
2920 // eax now contains the expected number of arguments.
2921 __ jmp(&invoke);
2922 }
2923
2924 { // Too few parameters: Actual < expected.
2925 __ bind(&too_few);
2926 EnterArgumentsAdaptorFrame(masm);
2927 // edi is used as a scratch register. It should be restored from the frame
2928 // when needed.
2929 Generate_StackOverflowCheck(masm, ebx, ecx, edi, &stack_overflow);
2930
2931 // Remember expected arguments in ecx.
2932 __ mov(ecx, ebx);
2933
2934 // Copy receiver and all actual arguments.
2935 const int offset = StandardFrameConstants::kCallerSPOffset;
2936 __ lea(edi, Operand(ebp, eax, times_4, offset));
2937 // ebx = expected - actual.
2938 __ sub(ebx, eax);
2939 // eax = -actual - 1
2940 __ neg(eax);
2941 __ sub(eax, Immediate(1));
2942
2943 Label copy;
2944 __ bind(©);
2945 __ inc(eax);
2946 __ push(Operand(edi, 0));
2947 __ sub(edi, Immediate(kPointerSize));
2948 __ test(eax, eax);
2949 __ j(not_zero, ©);
2950
2951 // Fill remaining expected arguments with undefined values.
2952 Label fill;
2953 __ bind(&fill);
2954 __ inc(eax);
2955 __ push(Immediate(masm->isolate()->factory()->undefined_value()));
2956 __ cmp(eax, ebx);
2957 __ j(less, &fill);
2958
2959 // Restore expected arguments.
2960 __ mov(eax, ecx);
2961 }
2962
2963 // Call the entry point.
2964 __ bind(&invoke);
2965 // Restore function pointer.
2966 __ mov(edi, Operand(ebp, ArgumentsAdaptorFrameConstants::kFunctionOffset));
2967 // eax : expected number of arguments
2968 // edx : new target (passed through to callee)
2969 // edi : function (passed through to callee)
2970 __ mov(ecx, FieldOperand(edi, JSFunction::kCodeEntryOffset));
2971 __ call(ecx);
2972
2973 // Store offset of return address for deoptimizer.
2974 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
2975
2976 // Leave frame and return.
2977 LeaveArgumentsAdaptorFrame(masm);
2978 __ ret(0);
2979
2980 // -------------------------------------------
2981 // Dont adapt arguments.
2982 // -------------------------------------------
2983 __ bind(&dont_adapt_arguments);
2984 __ mov(ecx, FieldOperand(edi, JSFunction::kCodeEntryOffset));
2985 __ jmp(ecx);
2986
2987 __ bind(&stack_overflow);
2988 {
2989 FrameScope frame(masm, StackFrame::MANUAL);
2990 __ CallRuntime(Runtime::kThrowStackOverflow);
2991 __ int3();
2992 }
2993 }
2994
CompatibleReceiverCheck(MacroAssembler * masm,Register receiver,Register function_template_info,Register scratch0,Register scratch1,Label * receiver_check_failed)2995 static void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver,
2996 Register function_template_info,
2997 Register scratch0, Register scratch1,
2998 Label* receiver_check_failed) {
2999 // If there is no signature, return the holder.
3000 __ CompareRoot(FieldOperand(function_template_info,
3001 FunctionTemplateInfo::kSignatureOffset),
3002 Heap::kUndefinedValueRootIndex);
3003 Label receiver_check_passed;
3004 __ j(equal, &receiver_check_passed, Label::kNear);
3005
3006 // Walk the prototype chain.
3007 __ mov(scratch0, FieldOperand(receiver, HeapObject::kMapOffset));
3008 Label prototype_loop_start;
3009 __ bind(&prototype_loop_start);
3010
3011 // Get the constructor, if any.
3012 __ GetMapConstructor(scratch0, scratch0, scratch1);
3013 __ CmpInstanceType(scratch1, JS_FUNCTION_TYPE);
3014 Label next_prototype;
3015 __ j(not_equal, &next_prototype, Label::kNear);
3016
3017 // Get the constructor's signature.
3018 __ mov(scratch0,
3019 FieldOperand(scratch0, JSFunction::kSharedFunctionInfoOffset));
3020 __ mov(scratch0,
3021 FieldOperand(scratch0, SharedFunctionInfo::kFunctionDataOffset));
3022
3023 // Loop through the chain of inheriting function templates.
3024 Label function_template_loop;
3025 __ bind(&function_template_loop);
3026
3027 // If the signatures match, we have a compatible receiver.
3028 __ cmp(scratch0, FieldOperand(function_template_info,
3029 FunctionTemplateInfo::kSignatureOffset));
3030 __ j(equal, &receiver_check_passed, Label::kNear);
3031
3032 // If the current type is not a FunctionTemplateInfo, load the next prototype
3033 // in the chain.
3034 __ JumpIfSmi(scratch0, &next_prototype, Label::kNear);
3035 __ CmpObjectType(scratch0, FUNCTION_TEMPLATE_INFO_TYPE, scratch1);
3036 __ j(not_equal, &next_prototype, Label::kNear);
3037
3038 // Otherwise load the parent function template and iterate.
3039 __ mov(scratch0,
3040 FieldOperand(scratch0, FunctionTemplateInfo::kParentTemplateOffset));
3041 __ jmp(&function_template_loop, Label::kNear);
3042
3043 // Load the next prototype.
3044 __ bind(&next_prototype);
3045 __ mov(receiver, FieldOperand(receiver, HeapObject::kMapOffset));
3046 __ test(FieldOperand(receiver, Map::kBitField3Offset),
3047 Immediate(Map::HasHiddenPrototype::kMask));
3048 __ j(zero, receiver_check_failed);
3049
3050 __ mov(receiver, FieldOperand(receiver, Map::kPrototypeOffset));
3051 __ mov(scratch0, FieldOperand(receiver, HeapObject::kMapOffset));
3052 // Iterate.
3053 __ jmp(&prototype_loop_start, Label::kNear);
3054
3055 __ bind(&receiver_check_passed);
3056 }
3057
Generate_HandleFastApiCall(MacroAssembler * masm)3058 void Builtins::Generate_HandleFastApiCall(MacroAssembler* masm) {
3059 // ----------- S t a t e -------------
3060 // -- eax : number of arguments (not including the receiver)
3061 // -- edi : callee
3062 // -- esi : context
3063 // -- esp[0] : return address
3064 // -- esp[4] : last argument
3065 // -- ...
3066 // -- esp[eax * 4] : first argument
3067 // -- esp[(eax + 1) * 4] : receiver
3068 // -----------------------------------
3069
3070 // Load the FunctionTemplateInfo.
3071 __ mov(ebx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
3072 __ mov(ebx, FieldOperand(ebx, SharedFunctionInfo::kFunctionDataOffset));
3073
3074 // Do the compatible receiver check.
3075 Label receiver_check_failed;
3076 __ mov(ecx, Operand(esp, eax, times_pointer_size, kPCOnStackSize));
3077 __ Push(eax);
3078 CompatibleReceiverCheck(masm, ecx, ebx, edx, eax, &receiver_check_failed);
3079 __ Pop(eax);
3080 // Get the callback offset from the FunctionTemplateInfo, and jump to the
3081 // beginning of the code.
3082 __ mov(edx, FieldOperand(ebx, FunctionTemplateInfo::kCallCodeOffset));
3083 __ mov(edx, FieldOperand(edx, CallHandlerInfo::kFastHandlerOffset));
3084 __ add(edx, Immediate(Code::kHeaderSize - kHeapObjectTag));
3085 __ jmp(edx);
3086
3087 // Compatible receiver check failed: pop return address, arguments and
3088 // receiver and throw an Illegal Invocation exception.
3089 __ bind(&receiver_check_failed);
3090 __ Pop(eax);
3091 __ PopReturnAddressTo(ebx);
3092 __ lea(eax, Operand(eax, times_pointer_size, 1 * kPointerSize));
3093 __ add(esp, eax);
3094 __ PushReturnAddressFrom(ebx);
3095 {
3096 FrameScope scope(masm, StackFrame::INTERNAL);
3097 __ TailCallRuntime(Runtime::kThrowIllegalInvocation);
3098 }
3099 }
3100
Generate_OnStackReplacementHelper(MacroAssembler * masm,bool has_handler_frame)3101 static void Generate_OnStackReplacementHelper(MacroAssembler* masm,
3102 bool has_handler_frame) {
3103 // Lookup the function in the JavaScript frame.
3104 if (has_handler_frame) {
3105 __ mov(eax, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
3106 __ mov(eax, Operand(eax, JavaScriptFrameConstants::kFunctionOffset));
3107 } else {
3108 __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
3109 }
3110
3111 {
3112 FrameScope scope(masm, StackFrame::INTERNAL);
3113 // Pass function as argument.
3114 __ push(eax);
3115 __ CallRuntime(Runtime::kCompileForOnStackReplacement);
3116 }
3117
3118 Label skip;
3119 // If the code object is null, just return to the caller.
3120 __ cmp(eax, Immediate(0));
3121 __ j(not_equal, &skip, Label::kNear);
3122 __ ret(0);
3123
3124 __ bind(&skip);
3125
3126 // Drop any potential handler frame that is be sitting on top of the actual
3127 // JavaScript frame. This is the case then OSR is triggered from bytecode.
3128 if (has_handler_frame) {
3129 __ leave();
3130 }
3131
3132 // Load deoptimization data from the code object.
3133 __ mov(ebx, Operand(eax, Code::kDeoptimizationDataOffset - kHeapObjectTag));
3134
3135 // Load the OSR entrypoint offset from the deoptimization data.
3136 __ mov(ebx, Operand(ebx, FixedArray::OffsetOfElementAt(
3137 DeoptimizationInputData::kOsrPcOffsetIndex) -
3138 kHeapObjectTag));
3139 __ SmiUntag(ebx);
3140
3141 // Compute the target address = code_obj + header_size + osr_offset
3142 __ lea(eax, Operand(eax, ebx, times_1, Code::kHeaderSize - kHeapObjectTag));
3143
3144 // Overwrite the return address on the stack.
3145 __ mov(Operand(esp, 0), eax);
3146
3147 // And "return" to the OSR entry point of the function.
3148 __ ret(0);
3149 }
3150
Generate_OnStackReplacement(MacroAssembler * masm)3151 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
3152 Generate_OnStackReplacementHelper(masm, false);
3153 }
3154
Generate_InterpreterOnStackReplacement(MacroAssembler * masm)3155 void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) {
3156 Generate_OnStackReplacementHelper(masm, true);
3157 }
3158
3159 #undef __
3160 } // namespace internal
3161 } // namespace v8
3162
3163 #endif // V8_TARGET_ARCH_X87
3164