• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #if V8_TARGET_ARCH_IA32
6 
7 #include "src/api/api-arguments.h"
8 #include "src/base/bits-iterator.h"
9 #include "src/base/iterator.h"
10 #include "src/codegen/code-factory.h"
11 // For interpreter_entry_return_pc_offset. TODO(jkummerow): Drop.
12 #include "src/codegen/macro-assembler-inl.h"
13 #include "src/codegen/register-configuration.h"
14 #include "src/debug/debug.h"
15 #include "src/deoptimizer/deoptimizer.h"
16 #include "src/execution/frame-constants.h"
17 #include "src/execution/frames.h"
18 #include "src/heap/heap-inl.h"
19 #include "src/logging/counters.h"
20 #include "src/objects/cell.h"
21 #include "src/objects/foreign.h"
22 #include "src/objects/heap-number.h"
23 #include "src/objects/js-generator.h"
24 #include "src/objects/objects-inl.h"
25 #include "src/objects/smi.h"
26 #include "src/wasm/wasm-linkage.h"
27 #include "src/wasm/wasm-objects.h"
28 
29 namespace v8 {
30 namespace internal {
31 
32 #define __ ACCESS_MASM(masm)
33 
Generate_Adaptor(MacroAssembler * masm,Address address)34 void Builtins::Generate_Adaptor(MacroAssembler* masm, Address address) {
35   __ Move(kJavaScriptCallExtraArg1Register,
36           Immediate(ExternalReference::Create(address)));
37   __ Jump(BUILTIN_CODE(masm->isolate(), AdaptorWithBuiltinExitFrame),
38           RelocInfo::CODE_TARGET);
39 }
40 
GenerateTailCallToReturnedCode(MacroAssembler * masm,Runtime::FunctionId function_id)41 static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
42                                            Runtime::FunctionId function_id) {
43   // ----------- S t a t e -------------
44   //  -- eax : actual argument count
45   //  -- edx : new target (preserved for callee)
46   //  -- edi : target function (preserved for callee)
47   // -----------------------------------
48   {
49     FrameScope scope(masm, StackFrame::INTERNAL);
50     // Push a copy of the target function, the new target and the actual
51     // argument count.
52     __ push(kJavaScriptCallTargetRegister);
53     __ push(kJavaScriptCallNewTargetRegister);
54     __ SmiTag(kJavaScriptCallArgCountRegister);
55     __ push(kJavaScriptCallArgCountRegister);
56     // Function is also the parameter to the runtime call.
57     __ push(kJavaScriptCallTargetRegister);
58 
59     __ CallRuntime(function_id, 1);
60     __ mov(ecx, eax);
61 
62     // Restore target function, new target and actual argument count.
63     __ pop(kJavaScriptCallArgCountRegister);
64     __ SmiUntag(kJavaScriptCallArgCountRegister);
65     __ pop(kJavaScriptCallNewTargetRegister);
66     __ pop(kJavaScriptCallTargetRegister);
67   }
68 
69   static_assert(kJavaScriptCallCodeStartRegister == ecx, "ABI mismatch");
70   __ JumpCodeObject(ecx);
71 }
72 
73 namespace {
74 
Generate_JSBuiltinsConstructStubHelper(MacroAssembler * masm)75 void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) {
76   // ----------- S t a t e -------------
77   //  -- eax: number of arguments
78   //  -- edi: constructor function
79   //  -- edx: new target
80   //  -- esi: context
81   // -----------------------------------
82 
83   Label stack_overflow;
84 
85   __ StackOverflowCheck(eax, ecx, &stack_overflow);
86 
87   // Enter a construct frame.
88   {
89     FrameScope scope(masm, StackFrame::CONSTRUCT);
90 
91     // Preserve the incoming parameters on the stack.
92     __ SmiTag(eax);
93     __ push(esi);
94     __ push(eax);
95     __ SmiUntag(eax);
96 
97     // TODO(victorgomes): When the arguments adaptor is completely removed, we
98     // should get the formal parameter count and copy the arguments in its
99     // correct position (including any undefined), instead of delaying this to
100     // InvokeFunction.
101 
102     // Set up pointer to first argument (skip receiver).
103     __ lea(esi, Operand(ebp, StandardFrameConstants::kCallerSPOffset +
104                                  kSystemPointerSize));
105     // Copy arguments to the expression stack.
106     __ PushArray(esi, eax, ecx);
107     // The receiver for the builtin/api call.
108     __ PushRoot(RootIndex::kTheHoleValue);
109 
110     // Call the function.
111     // eax: number of arguments (untagged)
112     // edi: constructor function
113     // edx: new target
114     // Reload context from the frame.
115     __ mov(esi, Operand(ebp, ConstructFrameConstants::kContextOffset));
116     __ InvokeFunction(edi, edx, eax, CALL_FUNCTION);
117 
118     // Restore context from the frame.
119     __ mov(esi, Operand(ebp, ConstructFrameConstants::kContextOffset));
120     // Restore smi-tagged arguments count from the frame.
121     __ mov(edx, Operand(ebp, ConstructFrameConstants::kLengthOffset));
122     // Leave construct frame.
123   }
124 
125   // Remove caller arguments from the stack and return.
126   STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
127   __ PopReturnAddressTo(ecx);
128   __ lea(esp, Operand(esp, edx, times_half_system_pointer_size,
129                       1 * kSystemPointerSize));  // 1 ~ receiver
130   __ PushReturnAddressFrom(ecx);
131   __ ret(0);
132 
133   __ bind(&stack_overflow);
134   {
135     FrameScope scope(masm, StackFrame::INTERNAL);
136     __ CallRuntime(Runtime::kThrowStackOverflow);
137     __ int3();  // This should be unreachable.
138   }
139 }
140 
141 }  // namespace
142 
143 // The construct stub for ES5 constructor functions and ES6 class constructors.
Generate_JSConstructStubGeneric(MacroAssembler * masm)144 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
145   // ----------- S t a t e -------------
146   //  -- eax: number of arguments (untagged)
147   //  -- edi: constructor function
148   //  -- edx: new target
149   //  -- esi: context
150   //  -- sp[...]: constructor arguments
151   // -----------------------------------
152 
153   FrameScope scope(masm, StackFrame::MANUAL);
154   // Enter a construct frame.
155   __ EnterFrame(StackFrame::CONSTRUCT);
156 
157   Label post_instantiation_deopt_entry, not_create_implicit_receiver;
158 
159   // Preserve the incoming parameters on the stack.
160   __ mov(ecx, eax);
161   __ SmiTag(ecx);
162   __ Push(esi);
163   __ Push(ecx);
164   __ Push(edi);
165   __ PushRoot(RootIndex::kTheHoleValue);
166   __ Push(edx);
167 
168   // ----------- S t a t e -------------
169   //  --         sp[0*kSystemPointerSize]: new target
170   //  --         sp[1*kSystemPointerSize]: padding
171   //  -- edi and sp[2*kSystemPointerSize]: constructor function
172   //  --         sp[3*kSystemPointerSize]: argument count
173   //  --         sp[4*kSystemPointerSize]: context
174   // -----------------------------------
175 
176   __ mov(eax, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
177   __ mov(eax, FieldOperand(eax, SharedFunctionInfo::kFlagsOffset));
178   __ DecodeField<SharedFunctionInfo::FunctionKindBits>(eax);
179   __ JumpIfIsInRange(eax, kDefaultDerivedConstructor, kDerivedConstructor, ecx,
180                      &not_create_implicit_receiver, Label::kNear);
181 
182   // If not derived class constructor: Allocate the new receiver object.
183   __ IncrementCounter(masm->isolate()->counters()->constructed_objects(), 1,
184                       eax);
185   __ Call(BUILTIN_CODE(masm->isolate(), FastNewObject), RelocInfo::CODE_TARGET);
186   __ jmp(&post_instantiation_deopt_entry, Label::kNear);
187 
188   // Else: use TheHoleValue as receiver for constructor call
189   __ bind(&not_create_implicit_receiver);
190   __ LoadRoot(eax, RootIndex::kTheHoleValue);
191 
192   // ----------- S t a t e -------------
193   //  --                         eax: implicit receiver
194   //  -- Slot 4 / sp[0*kSystemPointerSize]: new target
195   //  -- Slot 3 / sp[1*kSystemPointerSize]: padding
196   //  -- Slot 2 / sp[2*kSystemPointerSize]: constructor function
197   //  -- Slot 1 / sp[3*kSystemPointerSize]: number of arguments (tagged)
198   //  -- Slot 0 / sp[4*kSystemPointerSize]: context
199   // -----------------------------------
200   // Deoptimizer enters here.
201   masm->isolate()->heap()->SetConstructStubCreateDeoptPCOffset(
202       masm->pc_offset());
203   __ bind(&post_instantiation_deopt_entry);
204 
205   // Restore new target.
206   __ Pop(edx);
207 
208   // Push the allocated receiver to the stack.
209   __ Push(eax);
210 
211   // We need two copies because we may have to return the original one
212   // and the calling conventions dictate that the called function pops the
213   // receiver. The second copy is pushed after the arguments, we saved in r8
214   // since rax needs to store the number of arguments before
215   // InvokingFunction.
216   __ movd(xmm0, eax);
217 
218   // Set up pointer to first argument (skip receiver).
219   __ lea(edi, Operand(ebp, StandardFrameConstants::kCallerSPOffset +
220                                kSystemPointerSize));
221 
222   // Restore argument count.
223   __ mov(eax, Operand(ebp, ConstructFrameConstants::kLengthOffset));
224   __ SmiUntag(eax);
225 
226   // Check if we have enough stack space to push all arguments.
227   // Argument count in eax. Clobbers ecx.
228   Label stack_overflow;
229   __ StackOverflowCheck(eax, ecx, &stack_overflow);
230 
231   // TODO(victorgomes): When the arguments adaptor is completely removed, we
232   // should get the formal parameter count and copy the arguments in its
233   // correct position (including any undefined), instead of delaying this to
234   // InvokeFunction.
235 
236   // Copy arguments to the expression stack.
237   __ PushArray(edi, eax, ecx);
238 
239   // Push implicit receiver.
240   __ movd(ecx, xmm0);
241   __ Push(ecx);
242 
243   // Restore and and call the constructor function.
244   __ mov(edi, Operand(ebp, ConstructFrameConstants::kConstructorOffset));
245   __ InvokeFunction(edi, edx, eax, CALL_FUNCTION);
246 
247   // ----------- S t a t e -------------
248   //  --                eax: constructor result
249   //  -- sp[0*kSystemPointerSize]: implicit receiver
250   //  -- sp[1*kSystemPointerSize]: padding
251   //  -- sp[2*kSystemPointerSize]: constructor function
252   //  -- sp[3*kSystemPointerSize]: number of arguments
253   //  -- sp[4*kSystemPointerSize]: context
254   // -----------------------------------
255 
256   // Store offset of return address for deoptimizer.
257   masm->isolate()->heap()->SetConstructStubInvokeDeoptPCOffset(
258       masm->pc_offset());
259 
260   // If the result is an object (in the ECMA sense), we should get rid
261   // of the receiver and use the result; see ECMA-262 section 13.2.2-7
262   // on page 74.
263 
264   Label check_result, use_receiver, do_throw, leave_and_return;
265   // If the result is undefined, we jump out to using the implicit receiver.
266   __ JumpIfNotRoot(eax, RootIndex::kUndefinedValue, &check_result,
267                    Label::kNear);
268 
269   // Throw away the result of the constructor invocation and use the
270   // on-stack receiver as the result.
271   __ bind(&use_receiver);
272   __ mov(eax, Operand(esp, 0 * kSystemPointerSize));
273   __ JumpIfRoot(eax, RootIndex::kTheHoleValue, &do_throw);
274 
275   __ bind(&leave_and_return);
276   // Restore smi-tagged arguments count from the frame.
277   __ mov(edx, Operand(ebp, ConstructFrameConstants::kLengthOffset));
278   __ LeaveFrame(StackFrame::CONSTRUCT);
279 
280   // Remove caller arguments from the stack and return.
281   STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
282   __ pop(ecx);
283   __ lea(esp, Operand(esp, edx, times_half_system_pointer_size,
284                       1 * kSystemPointerSize));  // 1 ~ receiver
285   __ push(ecx);
286   __ ret(0);
287 
288   // Otherwise we do a smi check and fall through to check if the return value
289   // is a valid receiver.
290   __ bind(&check_result);
291 
292   // If the result is a smi, it is *not* an object in the ECMA sense.
293   __ JumpIfSmi(eax, &use_receiver, Label::kNear);
294 
295   // If the type of the result (stored in its map) is less than
296   // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
297   STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
298   __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ecx);
299   __ j(above_equal, &leave_and_return, Label::kNear);
300   __ jmp(&use_receiver, Label::kNear);
301 
302   __ bind(&do_throw);
303   // Restore context from the frame.
304   __ mov(esi, Operand(ebp, ConstructFrameConstants::kContextOffset));
305   __ CallRuntime(Runtime::kThrowConstructorReturnedNonObject);
306   // This should be unreachable.
307   __ int3();
308 
309   __ bind(&stack_overflow);
310   // Restore context from the frame.
311   __ mov(esi, Operand(ebp, ConstructFrameConstants::kContextOffset));
312   __ CallRuntime(Runtime::kThrowStackOverflow);
313   // This should be unreachable.
314   __ int3();
315 }
316 
Generate_JSBuiltinsConstructStub(MacroAssembler * masm)317 void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
318   Generate_JSBuiltinsConstructStubHelper(masm);
319 }
320 
Generate_ConstructedNonConstructable(MacroAssembler * masm)321 void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
322   FrameScope scope(masm, StackFrame::INTERNAL);
323   __ push(edi);
324   __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
325 }
326 
327 namespace {
328 
329 // Called with the native C calling convention. The corresponding function
330 // signature is either:
331 //
332 //   using JSEntryFunction = GeneratedCode<Address(
333 //       Address root_register_value, Address new_target, Address target,
334 //       Address receiver, intptr_t argc, Address** argv)>;
335 // or
336 //   using JSEntryFunction = GeneratedCode<Address(
337 //       Address root_register_value, MicrotaskQueue* microtask_queue)>;
Generate_JSEntryVariant(MacroAssembler * masm,StackFrame::Type type,Builtins::Name entry_trampoline)338 void Generate_JSEntryVariant(MacroAssembler* masm, StackFrame::Type type,
339                              Builtins::Name entry_trampoline) {
340   Label invoke, handler_entry, exit;
341   Label not_outermost_js, not_outermost_js_2;
342 
343   {  // NOLINT. Scope block confuses linter.
344     NoRootArrayScope uninitialized_root_register(masm);
345 
346     // Set up frame.
347     __ push(ebp);
348     __ mov(ebp, esp);
349 
350     // Push marker in two places.
351     __ push(Immediate(StackFrame::TypeToMarker(type)));
352     // Reserve a slot for the context. It is filled after the root register has
353     // been set up.
354     __ AllocateStackSpace(kSystemPointerSize);
355     // Save callee-saved registers (C calling conventions).
356     __ push(edi);
357     __ push(esi);
358     __ push(ebx);
359 
360     // Initialize the root register based on the given Isolate* argument.
361     // C calling convention. The first argument is passed on the stack.
362     __ mov(kRootRegister,
363            Operand(ebp, EntryFrameConstants::kRootRegisterValueOffset));
364   }
365 
366   // Save copies of the top frame descriptor on the stack.
367   ExternalReference c_entry_fp = ExternalReference::Create(
368       IsolateAddressId::kCEntryFPAddress, masm->isolate());
369   __ push(__ ExternalReferenceAsOperand(c_entry_fp, edi));
370 
371   // Store the context address in the previously-reserved slot.
372   ExternalReference context_address = ExternalReference::Create(
373       IsolateAddressId::kContextAddress, masm->isolate());
374   __ mov(edi, __ ExternalReferenceAsOperand(context_address, edi));
375   static constexpr int kOffsetToContextSlot = -2 * kSystemPointerSize;
376   __ mov(Operand(ebp, kOffsetToContextSlot), edi);
377 
378   // If this is the outermost JS call, set js_entry_sp value.
379   ExternalReference js_entry_sp = ExternalReference::Create(
380       IsolateAddressId::kJSEntrySPAddress, masm->isolate());
381   __ cmp(__ ExternalReferenceAsOperand(js_entry_sp, edi), Immediate(0));
382   __ j(not_equal, &not_outermost_js, Label::kNear);
383   __ mov(__ ExternalReferenceAsOperand(js_entry_sp, edi), ebp);
384   __ push(Immediate(StackFrame::OUTERMOST_JSENTRY_FRAME));
385   __ jmp(&invoke, Label::kNear);
386   __ bind(&not_outermost_js);
387   __ push(Immediate(StackFrame::INNER_JSENTRY_FRAME));
388 
389   // Jump to a faked try block that does the invoke, with a faked catch
390   // block that sets the pending exception.
391   __ jmp(&invoke);
392   __ bind(&handler_entry);
393 
394   // Store the current pc as the handler offset. It's used later to create the
395   // handler table.
396   masm->isolate()->builtins()->SetJSEntryHandlerOffset(handler_entry.pos());
397 
398   // Caught exception: Store result (exception) in the pending exception
399   // field in the JSEnv and return a failure sentinel.
400   ExternalReference pending_exception = ExternalReference::Create(
401       IsolateAddressId::kPendingExceptionAddress, masm->isolate());
402   __ mov(__ ExternalReferenceAsOperand(pending_exception, edi), eax);
403   __ Move(eax, masm->isolate()->factory()->exception());
404   __ jmp(&exit);
405 
406   // Invoke: Link this frame into the handler chain.
407   __ bind(&invoke);
408   __ PushStackHandler(edi);
409 
410   // Invoke the function by calling through JS entry trampoline builtin and
411   // pop the faked function when we return.
412   Handle<Code> trampoline_code =
413       masm->isolate()->builtins()->builtin_handle(entry_trampoline);
414   __ Call(trampoline_code, RelocInfo::CODE_TARGET);
415 
416   // Unlink this frame from the handler chain.
417   __ PopStackHandler(edi);
418 
419   __ bind(&exit);
420 
421   // Check if the current stack frame is marked as the outermost JS frame.
422   __ pop(edi);
423   __ cmp(edi, Immediate(StackFrame::OUTERMOST_JSENTRY_FRAME));
424   __ j(not_equal, &not_outermost_js_2);
425   __ mov(__ ExternalReferenceAsOperand(js_entry_sp, edi), Immediate(0));
426   __ bind(&not_outermost_js_2);
427 
428   // Restore the top frame descriptor from the stack.
429   __ pop(__ ExternalReferenceAsOperand(c_entry_fp, edi));
430 
431   // Restore callee-saved registers (C calling conventions).
432   __ pop(ebx);
433   __ pop(esi);
434   __ pop(edi);
435   __ add(esp, Immediate(2 * kSystemPointerSize));  // remove markers
436 
437   // Restore frame pointer and return.
438   __ pop(ebp);
439   __ ret(0);
440 }
441 
442 }  // namespace
443 
Generate_JSEntry(MacroAssembler * masm)444 void Builtins::Generate_JSEntry(MacroAssembler* masm) {
445   Generate_JSEntryVariant(masm, StackFrame::ENTRY,
446                           Builtins::kJSEntryTrampoline);
447 }
448 
Generate_JSConstructEntry(MacroAssembler * masm)449 void Builtins::Generate_JSConstructEntry(MacroAssembler* masm) {
450   Generate_JSEntryVariant(masm, StackFrame::CONSTRUCT_ENTRY,
451                           Builtins::kJSConstructEntryTrampoline);
452 }
453 
Generate_JSRunMicrotasksEntry(MacroAssembler * masm)454 void Builtins::Generate_JSRunMicrotasksEntry(MacroAssembler* masm) {
455   Generate_JSEntryVariant(masm, StackFrame::ENTRY,
456                           Builtins::kRunMicrotasksTrampoline);
457 }
458 
Generate_JSEntryTrampolineHelper(MacroAssembler * masm,bool is_construct)459 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
460                                              bool is_construct) {
461   {
462     FrameScope scope(masm, StackFrame::INTERNAL);
463 
464     const Register scratch1 = edx;
465     const Register scratch2 = edi;
466 
467     // Setup the context (we need to use the caller context from the isolate).
468     ExternalReference context_address = ExternalReference::Create(
469         IsolateAddressId::kContextAddress, masm->isolate());
470     __ mov(esi, __ ExternalReferenceAsOperand(context_address, scratch1));
471 
472     // Load the previous frame pointer (edx) to access C arguments
473     __ mov(scratch1, Operand(ebp, 0));
474 
475     // Push the function.
476     __ push(Operand(scratch1, EntryFrameConstants::kFunctionArgOffset));
477 
478     // Load the number of arguments and setup pointer to the arguments.
479     __ mov(eax, Operand(scratch1, EntryFrameConstants::kArgcOffset));
480     __ mov(scratch1, Operand(scratch1, EntryFrameConstants::kArgvOffset));
481 
482     // Check if we have enough stack space to push all arguments.
483     // Argument count in eax. Clobbers ecx.
484     Label enough_stack_space, stack_overflow;
485     __ StackOverflowCheck(eax, ecx, &stack_overflow);
486     __ jmp(&enough_stack_space);
487 
488     __ bind(&stack_overflow);
489     __ CallRuntime(Runtime::kThrowStackOverflow);
490     // This should be unreachable.
491     __ int3();
492 
493     __ bind(&enough_stack_space);
494 
495     // Copy arguments to the stack in a loop.
496     Label loop, entry;
497     __ Move(ecx, eax);
498     __ jmp(&entry, Label::kNear);
499     __ bind(&loop);
500     // Push the parameter from argv.
501     __ mov(scratch2, Operand(scratch1, ecx, times_system_pointer_size, 0));
502     __ push(Operand(scratch2, 0));  // dereference handle
503     __ bind(&entry);
504     __ dec(ecx);
505     __ j(greater_equal, &loop);
506 
507     // Load the previous frame pointer to access C arguments
508     __ mov(scratch2, Operand(ebp, 0));
509 
510     // Push the receiver onto the stack.
511     __ push(Operand(scratch2, EntryFrameConstants::kReceiverArgOffset));
512 
513     // Get the new.target and function from the frame.
514     __ mov(edx, Operand(scratch2, EntryFrameConstants::kNewTargetArgOffset));
515     __ mov(edi, Operand(scratch2, EntryFrameConstants::kFunctionArgOffset));
516 
517     // Invoke the code.
518     Handle<Code> builtin = is_construct
519                                ? BUILTIN_CODE(masm->isolate(), Construct)
520                                : masm->isolate()->builtins()->Call();
521     __ Call(builtin, RelocInfo::CODE_TARGET);
522 
523     // Exit the internal frame. Notice that this also removes the empty.
524     // context and the function left on the stack by the code
525     // invocation.
526   }
527   __ ret(0);
528 }
529 
Generate_JSEntryTrampoline(MacroAssembler * masm)530 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
531   Generate_JSEntryTrampolineHelper(masm, false);
532 }
533 
Generate_JSConstructEntryTrampoline(MacroAssembler * masm)534 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
535   Generate_JSEntryTrampolineHelper(masm, true);
536 }
537 
Generate_RunMicrotasksTrampoline(MacroAssembler * masm)538 void Builtins::Generate_RunMicrotasksTrampoline(MacroAssembler* masm) {
539   // This expects two C++ function parameters passed by Invoke() in
540   // execution.cc.
541   //   r1: microtask_queue
542   __ mov(RunMicrotasksDescriptor::MicrotaskQueueRegister(),
543          Operand(ebp, EntryFrameConstants::kMicrotaskQueueArgOffset));
544   __ Jump(BUILTIN_CODE(masm->isolate(), RunMicrotasks), RelocInfo::CODE_TARGET);
545 }
546 
GetSharedFunctionInfoBytecode(MacroAssembler * masm,Register sfi_data,Register scratch1)547 static void GetSharedFunctionInfoBytecode(MacroAssembler* masm,
548                                           Register sfi_data,
549                                           Register scratch1) {
550   Label done;
551 
552   __ CmpObjectType(sfi_data, INTERPRETER_DATA_TYPE, scratch1);
553   __ j(not_equal, &done, Label::kNear);
554   __ mov(sfi_data,
555          FieldOperand(sfi_data, InterpreterData::kBytecodeArrayOffset));
556 
557   __ bind(&done);
558 }
559 
560 // static
Generate_ResumeGeneratorTrampoline(MacroAssembler * masm)561 void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
562   // ----------- S t a t e -------------
563   //  -- eax    : the value to pass to the generator
564   //  -- edx    : the JSGeneratorObject to resume
565   //  -- esp[0] : return address
566   // -----------------------------------
567   __ AssertGeneratorObject(edx);
568 
569   // Store input value into generator object.
570   __ mov(FieldOperand(edx, JSGeneratorObject::kInputOrDebugPosOffset), eax);
571   __ RecordWriteField(edx, JSGeneratorObject::kInputOrDebugPosOffset, eax, ecx,
572                       kDontSaveFPRegs);
573 
574   // Load suspended function and context.
575   __ mov(edi, FieldOperand(edx, JSGeneratorObject::kFunctionOffset));
576   __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
577 
578   // Flood function if we are stepping.
579   Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
580   Label stepping_prepared;
581   ExternalReference debug_hook =
582       ExternalReference::debug_hook_on_function_call_address(masm->isolate());
583   __ cmpb(__ ExternalReferenceAsOperand(debug_hook, ecx), Immediate(0));
584   __ j(not_equal, &prepare_step_in_if_stepping);
585 
586   // Flood function if we need to continue stepping in the suspended generator.
587   ExternalReference debug_suspended_generator =
588       ExternalReference::debug_suspended_generator_address(masm->isolate());
589   __ cmp(edx, __ ExternalReferenceAsOperand(debug_suspended_generator, ecx));
590   __ j(equal, &prepare_step_in_suspended_generator);
591   __ bind(&stepping_prepared);
592 
593   // Check the stack for overflow. We are not trying to catch interruptions
594   // (i.e. debug break and preemption) here, so check the "real stack limit".
595   Label stack_overflow;
596   __ CompareStackLimit(esp, StackLimitKind::kRealStackLimit);
597   __ j(below, &stack_overflow);
598 
599   // Pop return address.
600   __ PopReturnAddressTo(eax);
601 
602   // ----------- S t a t e -------------
603   //  -- eax    : return address
604   //  -- edx    : the JSGeneratorObject to resume
605   //  -- edi    : generator function
606   //  -- esi    : generator context
607   // -----------------------------------
608 
609   {
610     __ movd(xmm0, ebx);
611 
612     // Copy the function arguments from the generator object's register file.
613     __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
614     __ movzx_w(ecx, FieldOperand(
615                         ecx, SharedFunctionInfo::kFormalParameterCountOffset));
616     __ mov(ebx,
617            FieldOperand(edx, JSGeneratorObject::kParametersAndRegistersOffset));
618     {
619       Label done_loop, loop;
620       __ mov(edi, ecx);
621 
622       __ bind(&loop);
623       __ dec(edi);
624       __ j(less, &done_loop);
625       __ Push(
626           FieldOperand(ebx, edi, times_tagged_size, FixedArray::kHeaderSize));
627       __ jmp(&loop);
628 
629       __ bind(&done_loop);
630     }
631 
632     // Push receiver.
633     __ Push(FieldOperand(edx, JSGeneratorObject::kReceiverOffset));
634 
635     // Restore registers.
636     __ mov(edi, FieldOperand(edx, JSGeneratorObject::kFunctionOffset));
637     __ movd(ebx, xmm0);
638   }
639 
640   // Underlying function needs to have bytecode available.
641   if (FLAG_debug_code) {
642     __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
643     __ mov(ecx, FieldOperand(ecx, SharedFunctionInfo::kFunctionDataOffset));
644     __ Push(eax);
645     GetSharedFunctionInfoBytecode(masm, ecx, eax);
646     __ Pop(eax);
647     __ CmpObjectType(ecx, BYTECODE_ARRAY_TYPE, ecx);
648     __ Assert(equal, AbortReason::kMissingBytecodeArray);
649   }
650 
651   // Resume (Ignition/TurboFan) generator object.
652   {
653     __ PushReturnAddressFrom(eax);
654     __ mov(eax, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
655     __ movzx_w(eax, FieldOperand(
656                         eax, SharedFunctionInfo::kFormalParameterCountOffset));
657     // We abuse new.target both to indicate that this is a resume call and to
658     // pass in the generator object.  In ordinary calls, new.target is always
659     // undefined because generator functions are non-constructable.
660     static_assert(kJavaScriptCallCodeStartRegister == ecx, "ABI mismatch");
661     __ mov(ecx, FieldOperand(edi, JSFunction::kCodeOffset));
662     __ JumpCodeObject(ecx);
663   }
664 
665   __ bind(&prepare_step_in_if_stepping);
666   {
667     FrameScope scope(masm, StackFrame::INTERNAL);
668     __ Push(edx);
669     __ Push(edi);
670     // Push hole as receiver since we do not use it for stepping.
671     __ PushRoot(RootIndex::kTheHoleValue);
672     __ CallRuntime(Runtime::kDebugOnFunctionCall);
673     __ Pop(edx);
674     __ mov(edi, FieldOperand(edx, JSGeneratorObject::kFunctionOffset));
675   }
676   __ jmp(&stepping_prepared);
677 
678   __ bind(&prepare_step_in_suspended_generator);
679   {
680     FrameScope scope(masm, StackFrame::INTERNAL);
681     __ Push(edx);
682     __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
683     __ Pop(edx);
684     __ mov(edi, FieldOperand(edx, JSGeneratorObject::kFunctionOffset));
685   }
686   __ jmp(&stepping_prepared);
687 
688   __ bind(&stack_overflow);
689   {
690     FrameScope scope(masm, StackFrame::INTERNAL);
691     __ CallRuntime(Runtime::kThrowStackOverflow);
692     __ int3();  // This should be unreachable.
693   }
694 }
695 
ReplaceClosureCodeWithOptimizedCode(MacroAssembler * masm,Register optimized_code,Register closure,Register scratch1,Register scratch2)696 static void ReplaceClosureCodeWithOptimizedCode(MacroAssembler* masm,
697                                                 Register optimized_code,
698                                                 Register closure,
699                                                 Register scratch1,
700                                                 Register scratch2) {
701   // Store the optimized code in the closure.
702   __ mov(FieldOperand(closure, JSFunction::kCodeOffset), optimized_code);
703   __ mov(scratch1, optimized_code);  // Write barrier clobbers scratch1 below.
704   __ RecordWriteField(closure, JSFunction::kCodeOffset, scratch1, scratch2,
705                       kDontSaveFPRegs, OMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
706 }
707 
LeaveInterpreterFrame(MacroAssembler * masm,Register scratch1,Register scratch2)708 static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch1,
709                                   Register scratch2) {
710   Register params_size = scratch1;
711   // Get the size of the formal parameters + receiver (in bytes).
712   __ mov(params_size,
713          Operand(ebp, InterpreterFrameConstants::kBytecodeArrayFromFp));
714   __ mov(params_size,
715          FieldOperand(params_size, BytecodeArray::kParameterSizeOffset));
716 
717 #ifdef V8_NO_ARGUMENTS_ADAPTOR
718   Register actual_params_size = scratch2;
719   // Compute the size of the actual parameters + receiver (in bytes).
720   __ mov(actual_params_size, Operand(ebp, StandardFrameConstants::kArgCOffset));
721   __ lea(actual_params_size,
722          Operand(actual_params_size, times_system_pointer_size,
723                  kSystemPointerSize));
724 
725   // If actual is bigger than formal, then we should use it to free up the stack
726   // arguments.
727   Label corrected_args_count;
728   __ cmp(params_size, actual_params_size);
729   __ j(greater_equal, &corrected_args_count, Label::kNear);
730   __ mov(params_size, actual_params_size);
731   __ bind(&corrected_args_count);
732 #endif
733 
734   // Leave the frame (also dropping the register file).
735   __ leave();
736 
737   // Drop receiver + arguments.
738   Register return_pc = scratch2;
739   __ PopReturnAddressTo(return_pc);
740   __ add(esp, params_size);
741   __ PushReturnAddressFrom(return_pc);
742 }
743 
744 // Tail-call |function_id| if |actual_marker| == |expected_marker|
TailCallRuntimeIfMarkerEquals(MacroAssembler * masm,Register actual_marker,OptimizationMarker expected_marker,Runtime::FunctionId function_id)745 static void TailCallRuntimeIfMarkerEquals(MacroAssembler* masm,
746                                           Register actual_marker,
747                                           OptimizationMarker expected_marker,
748                                           Runtime::FunctionId function_id) {
749   Label no_match;
750   __ cmp(actual_marker, expected_marker);
751   __ j(not_equal, &no_match, Label::kNear);
752   GenerateTailCallToReturnedCode(masm, function_id);
753   __ bind(&no_match);
754 }
755 
TailCallOptimizedCodeSlot(MacroAssembler * masm,Register optimized_code_entry)756 static void TailCallOptimizedCodeSlot(MacroAssembler* masm,
757                                       Register optimized_code_entry) {
758   // ----------- S t a t e -------------
759   //  -- eax : actual argument count
760   //  -- edx : new target (preserved for callee if needed, and caller)
761   //  -- edi : target function (preserved for callee if needed, and caller)
762   // -----------------------------------
763   DCHECK(!AreAliased(edx, edi, optimized_code_entry));
764 
765   Register closure = edi;
766   __ movd(xmm0, eax);
767   __ movd(xmm1, edx);
768 
769   Label heal_optimized_code_slot;
770 
771   // If the optimized code is cleared, go to runtime to update the optimization
772   // marker field.
773   __ LoadWeakValue(optimized_code_entry, &heal_optimized_code_slot);
774 
775   // Check if the optimized code is marked for deopt. If it is, bailout to a
776   // given label.
777   __ mov(eax,
778          FieldOperand(optimized_code_entry, Code::kCodeDataContainerOffset));
779   __ test(FieldOperand(eax, CodeDataContainer::kKindSpecificFlagsOffset),
780           Immediate(1 << Code::kMarkedForDeoptimizationBit));
781   __ j(not_zero, &heal_optimized_code_slot);
782 
783   // Optimized code is good, get it into the closure and link the closure
784   // into the optimized functions list, then tail call the optimized code.
785   ReplaceClosureCodeWithOptimizedCode(masm, optimized_code_entry, closure, edx,
786                                       eax);
787   static_assert(kJavaScriptCallCodeStartRegister == ecx, "ABI mismatch");
788   __ LoadCodeObjectEntry(ecx, optimized_code_entry);
789   __ movd(edx, xmm1);
790   __ movd(eax, xmm0);
791   __ jmp(ecx);
792 
793   // Optimized code slot contains deoptimized code or code is cleared and
794   // optimized code marker isn't updated. Evict the code, update the marker
795   // and re-enter the closure's code.
796   __ bind(&heal_optimized_code_slot);
797   __ movd(edx, xmm1);
798   __ movd(eax, xmm0);
799   GenerateTailCallToReturnedCode(masm, Runtime::kHealOptimizedCodeSlot);
800 }
801 
MaybeOptimizeCode(MacroAssembler * masm,Register optimization_marker)802 static void MaybeOptimizeCode(MacroAssembler* masm,
803                               Register optimization_marker) {
804   // ----------- S t a t e -------------
805   //  -- eax : actual argument count
806   //  -- edx : new target (preserved for callee if needed, and caller)
807   //  -- edi : target function (preserved for callee if needed, and caller)
808   //  -- optimization_marker : a Smi containing a non-zero optimization marker.
809   // -----------------------------------
810   DCHECK(!AreAliased(edx, edi, optimization_marker));
811 
812   // TODO(v8:8394): The logging of first execution will break if
813   // feedback vectors are not allocated. We need to find a different way of
814   // logging these events if required.
815   TailCallRuntimeIfMarkerEquals(masm, optimization_marker,
816                                 OptimizationMarker::kLogFirstExecution,
817                                 Runtime::kFunctionFirstExecution);
818   TailCallRuntimeIfMarkerEquals(masm, optimization_marker,
819                                 OptimizationMarker::kCompileOptimized,
820                                 Runtime::kCompileOptimized_NotConcurrent);
821   TailCallRuntimeIfMarkerEquals(masm, optimization_marker,
822                                 OptimizationMarker::kCompileOptimizedConcurrent,
823                                 Runtime::kCompileOptimized_Concurrent);
824 
825   // Marker should be one of LogFirstExecution / CompileOptimized /
826   // CompileOptimizedConcurrent. InOptimizationQueue and None shouldn't reach
827   // here.
828   if (FLAG_debug_code) {
829     __ int3();
830   }
831 }
832 
833 // Advance the current bytecode offset. This simulates what all bytecode
834 // handlers do upon completion of the underlying operation. Will bail out to a
835 // label if the bytecode (without prefix) is a return bytecode. Will not advance
836 // the bytecode offset if the current bytecode is a JumpLoop, instead just
837 // re-executing the JumpLoop to jump to the correct bytecode.
AdvanceBytecodeOffsetOrReturn(MacroAssembler * masm,Register bytecode_array,Register bytecode_offset,Register scratch1,Register scratch2,Register scratch3,Label * if_return)838 static void AdvanceBytecodeOffsetOrReturn(MacroAssembler* masm,
839                                           Register bytecode_array,
840                                           Register bytecode_offset,
841                                           Register scratch1, Register scratch2,
842                                           Register scratch3, Label* if_return) {
843   Register bytecode_size_table = scratch1;
844   Register bytecode = scratch2;
845 
846   // The bytecode offset value will be increased by one in wide and extra wide
847   // cases. In the case of having a wide or extra wide JumpLoop bytecode, we
848   // will restore the original bytecode. In order to simplify the code, we have
849   // a backup of it.
850   Register original_bytecode_offset = scratch3;
851   DCHECK(!AreAliased(bytecode_array, bytecode_offset, bytecode_size_table,
852                      bytecode, original_bytecode_offset));
853   __ Move(bytecode_size_table,
854           Immediate(ExternalReference::bytecode_size_table_address()));
855 
856   // Load the current bytecode.
857   __ movzx_b(bytecode, Operand(bytecode_array, bytecode_offset, times_1, 0));
858   __ Move(original_bytecode_offset, bytecode_offset);
859 
860   // Check if the bytecode is a Wide or ExtraWide prefix bytecode.
861   Label process_bytecode, extra_wide;
862   STATIC_ASSERT(0 == static_cast<int>(interpreter::Bytecode::kWide));
863   STATIC_ASSERT(1 == static_cast<int>(interpreter::Bytecode::kExtraWide));
864   STATIC_ASSERT(2 == static_cast<int>(interpreter::Bytecode::kDebugBreakWide));
865   STATIC_ASSERT(3 ==
866                 static_cast<int>(interpreter::Bytecode::kDebugBreakExtraWide));
867   __ cmp(bytecode, Immediate(0x3));
868   __ j(above, &process_bytecode, Label::kNear);
869   // The code to load the next bytecode is common to both wide and extra wide.
870   // We can hoist them up here. inc has to happen before test since it
871   // modifies the ZF flag.
872   __ inc(bytecode_offset);
873   __ test(bytecode, Immediate(0x1));
874   __ movzx_b(bytecode, Operand(bytecode_array, bytecode_offset, times_1, 0));
875   __ j(not_equal, &extra_wide, Label::kNear);
876 
877   // Load the next bytecode and update table to the wide scaled table.
878   __ add(bytecode_size_table,
879          Immediate(kIntSize * interpreter::Bytecodes::kBytecodeCount));
880   __ jmp(&process_bytecode, Label::kNear);
881 
882   __ bind(&extra_wide);
883   // Update table to the extra wide scaled table.
884   __ add(bytecode_size_table,
885          Immediate(2 * kIntSize * interpreter::Bytecodes::kBytecodeCount));
886 
887   __ bind(&process_bytecode);
888 
889 // Bailout to the return label if this is a return bytecode.
890 #define JUMP_IF_EQUAL(NAME)                                            \
891   __ cmp(bytecode,                                                     \
892          Immediate(static_cast<int>(interpreter::Bytecode::k##NAME))); \
893   __ j(equal, if_return);
894   RETURN_BYTECODE_LIST(JUMP_IF_EQUAL)
895 #undef JUMP_IF_EQUAL
896 
897   // If this is a JumpLoop, re-execute it to perform the jump to the beginning
898   // of the loop.
899   Label end, not_jump_loop;
900   __ cmp(bytecode,
901          Immediate(static_cast<int>(interpreter::Bytecode::kJumpLoop)));
902   __ j(not_equal, &not_jump_loop, Label::kNear);
903   // If this is a wide or extra wide JumpLoop, we need to restore the original
904   // bytecode_offset since we might have increased it to skip the wide /
905   // extra-wide prefix bytecode.
906   __ Move(bytecode_offset, original_bytecode_offset);
907   __ jmp(&end, Label::kNear);
908 
909   __ bind(&not_jump_loop);
910   // Otherwise, load the size of the current bytecode and advance the offset.
911   __ add(bytecode_offset,
912          Operand(bytecode_size_table, bytecode, times_int_size, 0));
913 
914   __ bind(&end);
915 }
916 
917 // Generate code for entering a JS function with the interpreter.
918 // On entry to the function the receiver and arguments have been pushed on the
919 // stack left to right.
920 //
921 // The live registers are:
922 //   o eax: actual argument count (not including the receiver)
923 //   o edi: the JS function object being called
924 //   o edx: the incoming new target or generator object
925 //   o esi: our context
926 //   o ebp: the caller's frame pointer
927 //   o esp: stack pointer (pointing to return address)
928 //
929 // The function builds an interpreter frame.  See InterpreterFrameConstants in
930 // frames.h for its layout.
Generate_InterpreterEntryTrampoline(MacroAssembler * masm)931 void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
932   Register closure = edi;
933 
934   __ movd(xmm0, eax);  // Spill actual argument count.
935 
936   // The bytecode array could have been flushed from the shared function info,
937   // if so, call into CompileLazy.
938   Label compile_lazy;
939   __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
940   __ mov(ecx, FieldOperand(ecx, SharedFunctionInfo::kFunctionDataOffset));
941   GetSharedFunctionInfoBytecode(masm, ecx, eax);
942   __ CmpObjectType(ecx, BYTECODE_ARRAY_TYPE, eax);
943   __ j(not_equal, &compile_lazy);
944 
945   Register feedback_vector = ecx;
946   Label push_stack_frame;
947   // Load feedback vector and check if it is valid. If valid, check for
948   // optimized code and update invocation count. Otherwise, setup the stack
949   // frame.
950   __ mov(feedback_vector,
951          FieldOperand(closure, JSFunction::kFeedbackCellOffset));
952   __ mov(feedback_vector, FieldOperand(feedback_vector, Cell::kValueOffset));
953   __ mov(eax, FieldOperand(feedback_vector, HeapObject::kMapOffset));
954   __ CmpInstanceType(eax, FEEDBACK_VECTOR_TYPE);
955   __ j(not_equal, &push_stack_frame);
956 
957   // Load the optimization state from the feedback vector and re-use the
958   // register.
959   Register optimization_state = ecx;
960   // Store feedback_vector. We may need it if we need to load the optimze code
961   // slot entry.
962   __ movd(xmm1, feedback_vector);
963   __ mov(optimization_state,
964          FieldOperand(feedback_vector, FeedbackVector::kFlagsOffset));
965 
966   // Check if there is optimized code or a optimization marker that needes to be
967   // processed.
968   Label has_optimized_code_or_marker;
969   __ test(
970       optimization_state,
971       Immediate(FeedbackVector::kHasOptimizedCodeOrCompileOptimizedMarkerMask));
972   __ j(not_zero, &has_optimized_code_or_marker);
973 
974   Label not_optimized;
975   __ bind(&not_optimized);
976 
977   // Load the feedback vector and increment the invocation count.
978   __ mov(feedback_vector,
979          FieldOperand(closure, JSFunction::kFeedbackCellOffset));
980   __ mov(feedback_vector, FieldOperand(feedback_vector, Cell::kValueOffset));
981   __ inc(FieldOperand(feedback_vector, FeedbackVector::kInvocationCountOffset));
982 
983   // Open a frame scope to indicate that there is a frame on the stack.  The
984   // MANUAL indicates that the scope shouldn't actually generate code to set
985   // up the frame (that is done below).
986   __ bind(&push_stack_frame);
987   FrameScope frame_scope(masm, StackFrame::MANUAL);
988   __ push(ebp);  // Caller's frame pointer.
989   __ mov(ebp, esp);
990   __ push(kContextRegister);               // Callee's context.
991   __ push(kJavaScriptCallTargetRegister);  // Callee's JS function.
992   __ movd(kJavaScriptCallArgCountRegister, xmm0);
993   __ push(kJavaScriptCallArgCountRegister);  // Actual argument count.
994 
995   // Get the bytecode array from the function object and load it into
996   // kInterpreterBytecodeArrayRegister.
997   __ mov(eax, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
998   __ mov(kInterpreterBytecodeArrayRegister,
999          FieldOperand(eax, SharedFunctionInfo::kFunctionDataOffset));
1000   GetSharedFunctionInfoBytecode(masm, kInterpreterBytecodeArrayRegister, eax);
1001 
1002   // Check function data field is actually a BytecodeArray object.
1003   if (FLAG_debug_code) {
1004     __ AssertNotSmi(kInterpreterBytecodeArrayRegister);
1005     __ CmpObjectType(kInterpreterBytecodeArrayRegister, BYTECODE_ARRAY_TYPE,
1006                      eax);
1007     __ Assert(
1008         equal,
1009         AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1010   }
1011 
1012   // Reset code age and the OSR arming. The OSR field and BytecodeAgeOffset are
1013   // 8-bit fields next to each other, so we could just optimize by writing a
1014   // 16-bit. These static asserts guard our assumption is valid.
1015   STATIC_ASSERT(BytecodeArray::kBytecodeAgeOffset ==
1016                 BytecodeArray::kOsrNestingLevelOffset + kCharSize);
1017   STATIC_ASSERT(BytecodeArray::kNoAgeBytecodeAge == 0);
1018   __ mov_w(FieldOperand(kInterpreterBytecodeArrayRegister,
1019                         BytecodeArray::kOsrNestingLevelOffset),
1020            Immediate(0));
1021 
1022   // Push bytecode array.
1023   __ push(kInterpreterBytecodeArrayRegister);
1024   // Push Smi tagged initial bytecode array offset.
1025   __ push(Immediate(Smi::FromInt(BytecodeArray::kHeaderSize - kHeapObjectTag)));
1026 
1027   // Allocate the local and temporary register file on the stack.
1028   Label stack_overflow;
1029   {
1030     // Load frame size from the BytecodeArray object.
1031     Register frame_size = ecx;
1032     __ mov(frame_size, FieldOperand(kInterpreterBytecodeArrayRegister,
1033                                     BytecodeArray::kFrameSizeOffset));
1034 
1035     // Do a stack check to ensure we don't go over the limit.
1036     __ mov(eax, esp);
1037     __ sub(eax, frame_size);
1038     __ CompareStackLimit(eax, StackLimitKind::kRealStackLimit);
1039     __ j(below, &stack_overflow);
1040 
1041     // If ok, push undefined as the initial value for all register file entries.
1042     Label loop_header;
1043     Label loop_check;
1044     __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kUndefinedValue);
1045     __ jmp(&loop_check);
1046     __ bind(&loop_header);
1047     // TODO(rmcilroy): Consider doing more than one push per loop iteration.
1048     __ push(kInterpreterAccumulatorRegister);
1049     // Continue loop if not done.
1050     __ bind(&loop_check);
1051     __ sub(frame_size, Immediate(kSystemPointerSize));
1052     __ j(greater_equal, &loop_header);
1053   }
1054 
1055   // If the bytecode array has a valid incoming new target or generator object
1056   // register, initialize it with incoming value which was passed in edx.
1057   Label no_incoming_new_target_or_generator_register;
1058   __ mov(ecx, FieldOperand(
1059                   kInterpreterBytecodeArrayRegister,
1060                   BytecodeArray::kIncomingNewTargetOrGeneratorRegisterOffset));
1061   __ test(ecx, ecx);
1062   __ j(zero, &no_incoming_new_target_or_generator_register);
1063   __ mov(Operand(ebp, ecx, times_system_pointer_size, 0), edx);
1064   __ bind(&no_incoming_new_target_or_generator_register);
1065 
1066   // Perform interrupt stack check.
1067   // TODO(solanes): Merge with the real stack limit check above.
1068   Label stack_check_interrupt, after_stack_check_interrupt;
1069   __ CompareStackLimit(esp, StackLimitKind::kInterruptStackLimit);
1070   __ j(below, &stack_check_interrupt);
1071   __ bind(&after_stack_check_interrupt);
1072 
1073   // The accumulator is already loaded with undefined.
1074 
1075   __ mov(kInterpreterBytecodeOffsetRegister,
1076          Immediate(BytecodeArray::kHeaderSize - kHeapObjectTag));
1077 
1078   // Load the dispatch table into a register and dispatch to the bytecode
1079   // handler at the current bytecode offset.
1080   Label do_dispatch;
1081   __ bind(&do_dispatch);
1082   __ Move(kInterpreterDispatchTableRegister,
1083           Immediate(ExternalReference::interpreter_dispatch_table_address(
1084               masm->isolate())));
1085   __ movzx_b(ecx, Operand(kInterpreterBytecodeArrayRegister,
1086                           kInterpreterBytecodeOffsetRegister, times_1, 0));
1087   __ mov(kJavaScriptCallCodeStartRegister,
1088          Operand(kInterpreterDispatchTableRegister, ecx,
1089                  times_system_pointer_size, 0));
1090   __ call(kJavaScriptCallCodeStartRegister);
1091   masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset());
1092 
1093   // Any returns to the entry trampoline are either due to the return bytecode
1094   // or the interpreter tail calling a builtin and then a dispatch.
1095 
1096   // Get bytecode array and bytecode offset from the stack frame.
1097   __ mov(kInterpreterBytecodeArrayRegister,
1098          Operand(ebp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1099   __ mov(kInterpreterBytecodeOffsetRegister,
1100          Operand(ebp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1101   __ SmiUntag(kInterpreterBytecodeOffsetRegister);
1102 
1103   // Either return, or advance to the next bytecode and dispatch.
1104   Label do_return;
1105   __ Push(eax);
1106   AdvanceBytecodeOffsetOrReturn(masm, kInterpreterBytecodeArrayRegister,
1107                                 kInterpreterBytecodeOffsetRegister, ecx,
1108                                 kInterpreterDispatchTableRegister, eax,
1109                                 &do_return);
1110   __ Pop(eax);
1111   __ jmp(&do_dispatch);
1112 
1113   __ bind(&do_return);
1114   __ Pop(eax);
1115   // The return value is in eax.
1116   LeaveInterpreterFrame(masm, edx, ecx);
1117   __ ret(0);
1118 
1119   __ bind(&stack_check_interrupt);
1120   // Modify the bytecode offset in the stack to be kFunctionEntryBytecodeOffset
1121   // for the call to the StackGuard.
1122   __ mov(Operand(ebp, InterpreterFrameConstants::kBytecodeOffsetFromFp),
1123          Immediate(Smi::FromInt(BytecodeArray::kHeaderSize - kHeapObjectTag +
1124                                 kFunctionEntryBytecodeOffset)));
1125   __ CallRuntime(Runtime::kStackGuard);
1126 
1127   // After the call, restore the bytecode array, bytecode offset and accumulator
1128   // registers again. Also, restore the bytecode offset in the stack to its
1129   // previous value.
1130   __ mov(kInterpreterBytecodeArrayRegister,
1131          Operand(ebp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1132   __ mov(kInterpreterBytecodeOffsetRegister,
1133          Immediate(BytecodeArray::kHeaderSize - kHeapObjectTag));
1134   __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kUndefinedValue);
1135 
1136   // It's ok to clobber kInterpreterBytecodeOffsetRegister since we are setting
1137   // it again after continuing.
1138   __ SmiTag(kInterpreterBytecodeOffsetRegister);
1139   __ mov(Operand(ebp, InterpreterFrameConstants::kBytecodeOffsetFromFp),
1140          kInterpreterBytecodeOffsetRegister);
1141 
1142   __ jmp(&after_stack_check_interrupt);
1143 
1144   __ bind(&has_optimized_code_or_marker);
1145   Label maybe_has_optimized_code;
1146   // Restore actual argument count.
1147   __ movd(eax, xmm0);
1148 
1149   // Check if optimized code is available
1150   __ test(
1151       optimization_state,
1152       Immediate(FeedbackVector::kHasCompileOptimizedOrLogFirstExecutionMarker));
1153   __ j(zero, &maybe_has_optimized_code);
1154 
1155   Register optimization_marker = optimization_state;
1156   __ DecodeField<FeedbackVector::OptimizationMarkerBits>(optimization_marker);
1157   MaybeOptimizeCode(masm, optimization_marker);
1158   // Fall through if there's no runnable optimized code.
1159   __ jmp(&not_optimized);
1160 
1161   __ bind(&maybe_has_optimized_code);
1162   Register optimized_code_entry = optimization_marker;
1163   __ movd(optimized_code_entry, xmm1);
1164   __ mov(
1165       optimized_code_entry,
1166       FieldOperand(feedback_vector, FeedbackVector::kMaybeOptimizedCodeOffset));
1167   TailCallOptimizedCodeSlot(masm, optimized_code_entry);
1168 
1169   __ bind(&compile_lazy);
1170   // Restore actual argument count.
1171   __ movd(eax, xmm0);
1172   GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
1173 
1174   __ bind(&stack_overflow);
1175   __ CallRuntime(Runtime::kThrowStackOverflow);
1176   __ int3();  // Should not return.
1177 }
1178 
1179 
Generate_InterpreterPushArgs(MacroAssembler * masm,Register array_limit,Register start_address)1180 static void Generate_InterpreterPushArgs(MacroAssembler* masm,
1181                                          Register array_limit,
1182                                          Register start_address) {
1183   // ----------- S t a t e -------------
1184   //  -- start_address : Pointer to the last argument in the args array.
1185   //  -- array_limit : Pointer to one before the first argument in the
1186   //                   args array.
1187   // -----------------------------------
1188   Label loop_header, loop_check;
1189   __ jmp(&loop_check);
1190   __ bind(&loop_header);
1191   __ Push(Operand(array_limit, 0));
1192   __ bind(&loop_check);
1193   __ add(array_limit, Immediate(kSystemPointerSize));
1194   __ cmp(array_limit, start_address);
1195   __ j(below_equal, &loop_header, Label::kNear);
1196 }
1197 
1198 // static
Generate_InterpreterPushArgsThenCallImpl(MacroAssembler * masm,ConvertReceiverMode receiver_mode,InterpreterPushArgsMode mode)1199 void Builtins::Generate_InterpreterPushArgsThenCallImpl(
1200     MacroAssembler* masm, ConvertReceiverMode receiver_mode,
1201     InterpreterPushArgsMode mode) {
1202   DCHECK(mode != InterpreterPushArgsMode::kArrayFunction);
1203   // ----------- S t a t e -------------
1204   //  -- eax : the number of arguments (not including the receiver)
1205   //  -- ecx : the address of the first argument to be pushed. Subsequent
1206   //           arguments should be consecutive above this, in the same order as
1207   //           they are to be pushed onto the stack.
1208   //  -- edi : the target to call (can be any Object).
1209   // -----------------------------------
1210 
1211   const Register scratch = edx;
1212   const Register argv = ecx;
1213 
1214   Label stack_overflow;
1215   if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1216     // The spread argument should not be pushed.
1217     __ dec(eax);
1218   }
1219 
1220   // Add a stack check before pushing the arguments.
1221   __ StackOverflowCheck(eax, scratch, &stack_overflow, true);
1222 
1223   __ movd(xmm0, eax);  // Spill number of arguments.
1224 
1225   // Compute the expected number of arguments.
1226   __ mov(scratch, eax);
1227 
1228   // Pop return address to allow tail-call after pushing arguments.
1229   __ PopReturnAddressTo(eax);
1230 
1231   if (receiver_mode != ConvertReceiverMode::kNullOrUndefined) {
1232     __ add(scratch, Immediate(1));  // Add one for receiver.
1233   }
1234 
1235   // Find the address of the last argument.
1236   __ shl(scratch, kSystemPointerSizeLog2);
1237   __ neg(scratch);
1238   __ add(scratch, argv);
1239 
1240   if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1241     __ movd(xmm1, scratch);
1242     Generate_InterpreterPushArgs(masm, scratch, argv);
1243     // Pass the spread in the register ecx.
1244     __ movd(ecx, xmm1);
1245     __ mov(ecx, Operand(ecx, 0));
1246   } else {
1247     Generate_InterpreterPushArgs(masm, scratch, argv);
1248   }
1249 
1250   // Push "undefined" as the receiver arg if we need to.
1251   if (receiver_mode == ConvertReceiverMode::kNullOrUndefined) {
1252     __ PushRoot(RootIndex::kUndefinedValue);
1253   }
1254 
1255   __ PushReturnAddressFrom(eax);
1256   __ movd(eax, xmm0);  // Restore number of arguments.
1257 
1258   // Call the target.
1259   if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1260     __ Jump(BUILTIN_CODE(masm->isolate(), CallWithSpread),
1261             RelocInfo::CODE_TARGET);
1262   } else {
1263     __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny),
1264             RelocInfo::CODE_TARGET);
1265   }
1266 
1267   __ bind(&stack_overflow);
1268   {
1269     __ TailCallRuntime(Runtime::kThrowStackOverflow);
1270 
1271     // This should be unreachable.
1272     __ int3();
1273   }
1274 }
1275 
1276 namespace {
1277 
1278 // This function modifies start_addr, and only reads the contents of num_args
1279 // register. scratch1 and scratch2 are used as temporary registers.
Generate_InterpreterPushZeroAndArgsAndReturnAddress(MacroAssembler * masm,Register num_args,Register start_addr,Register scratch1,Register scratch2,int num_slots_to_move,Label * stack_overflow)1280 void Generate_InterpreterPushZeroAndArgsAndReturnAddress(
1281     MacroAssembler* masm, Register num_args, Register start_addr,
1282     Register scratch1, Register scratch2, int num_slots_to_move,
1283     Label* stack_overflow) {
1284   // We have to move return address and the temporary registers above it
1285   // before we can copy arguments onto the stack. To achieve this:
1286   // Step 1: Increment the stack pointer by num_args + 1 (for receiver).
1287   // Step 2: Move the return address and values around it to the top of stack.
1288   // Step 3: Copy the arguments into the correct locations.
1289   //  current stack    =====>    required stack layout
1290   // |             |            | return addr   | (2) <-- esp (1)
1291   // |             |            | addtl. slot   |
1292   // |             |            | arg N         | (3)
1293   // |             |            | ....          |
1294   // |             |            | arg 1         |
1295   // | return addr | <-- esp    | arg 0         |
1296   // | addtl. slot |            | receiver slot |
1297 
1298   // Check for stack overflow before we increment the stack pointer.
1299   __ StackOverflowCheck(num_args, scratch1, stack_overflow, true);
1300 
1301   // Step 1 - Update the stack pointer.
1302 
1303   __ lea(scratch1,
1304          Operand(num_args, times_system_pointer_size, kSystemPointerSize));
1305   __ AllocateStackSpace(scratch1);
1306 
1307   // Step 2 move return_address and slots around it to the correct locations.
1308   // Move from top to bottom, otherwise we may overwrite when num_args = 0 or 1,
1309   // basically when the source and destination overlap. We at least need one
1310   // extra slot for receiver, so no extra checks are required to avoid copy.
1311   for (int i = 0; i < num_slots_to_move + 1; i++) {
1312     __ mov(scratch1, Operand(esp, num_args, times_system_pointer_size,
1313                              (i + 1) * kSystemPointerSize));
1314     __ mov(Operand(esp, i * kSystemPointerSize), scratch1);
1315   }
1316 
1317   // Step 3 copy arguments to correct locations.
1318   // Slot meant for receiver contains return address. Reset it so that
1319   // we will not incorrectly interpret return address as an object.
1320   __ mov(Operand(esp, (num_slots_to_move + 1) * kSystemPointerSize),
1321          Immediate(0));
1322   __ mov(scratch1, Immediate(0));
1323 
1324   Label loop_header, loop_check;
1325   __ jmp(&loop_check);
1326   __ bind(&loop_header);
1327   __ mov(scratch2, Operand(start_addr, 0));
1328   __ mov(Operand(esp, scratch1, times_system_pointer_size,
1329                  (num_slots_to_move + 1) * kSystemPointerSize),
1330          scratch2);
1331   __ sub(start_addr, Immediate(kSystemPointerSize));
1332   __ bind(&loop_check);
1333   __ inc(scratch1);
1334   __ cmp(scratch1, eax);
1335   __ j(less_equal, &loop_header, Label::kNear);
1336 }
1337 
1338 }  // anonymous namespace
1339 
1340 // static
Generate_InterpreterPushArgsThenConstructImpl(MacroAssembler * masm,InterpreterPushArgsMode mode)1341 void Builtins::Generate_InterpreterPushArgsThenConstructImpl(
1342     MacroAssembler* masm, InterpreterPushArgsMode mode) {
1343   // ----------- S t a t e -------------
1344   //  -- eax     : the number of arguments (not including the receiver)
1345   //  -- ecx     : the address of the first argument to be pushed. Subsequent
1346   //               arguments should be consecutive above this, in the same order
1347   //               as they are to be pushed onto the stack.
1348   //  -- esp[0]  : return address
1349   //  -- esp[4]  : allocation site feedback (if available or undefined)
1350   //  -- esp[8]  : the new target
1351   //  -- esp[12] : the constructor
1352   // -----------------------------------
1353   Label stack_overflow;
1354 
1355   if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1356     // The spread argument should not be pushed.
1357     __ dec(eax);
1358   }
1359 
1360   // Push arguments and move return address and stack spill slots to the top of
1361   // stack. The eax register is readonly. The ecx register will be modified. edx
1362   // and edi are used as scratch registers.
1363   Generate_InterpreterPushZeroAndArgsAndReturnAddress(
1364       masm, eax, ecx, edx, edi,
1365       InterpreterPushArgsThenConstructDescriptor::kStackArgumentsCount,
1366       &stack_overflow);
1367 
1368   // Call the appropriate constructor. eax and ecx already contain intended
1369   // values, remaining registers still need to be initialized from the stack.
1370 
1371   if (mode == InterpreterPushArgsMode::kArrayFunction) {
1372     // Tail call to the array construct stub (still in the caller context at
1373     // this point).
1374 
1375     __ movd(xmm0, eax);  // Spill number of arguments.
1376     __ PopReturnAddressTo(eax);
1377     __ Pop(kJavaScriptCallExtraArg1Register);
1378     __ Pop(kJavaScriptCallNewTargetRegister);
1379     __ Pop(kJavaScriptCallTargetRegister);
1380     __ PushReturnAddressFrom(eax);
1381 
1382     __ AssertFunction(kJavaScriptCallTargetRegister);
1383     __ AssertUndefinedOrAllocationSite(kJavaScriptCallExtraArg1Register, eax);
1384 
1385     __ movd(eax, xmm0);  // Reload number of arguments.
1386     __ Jump(BUILTIN_CODE(masm->isolate(), ArrayConstructorImpl),
1387             RelocInfo::CODE_TARGET);
1388   } else if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1389     __ movd(xmm0, eax);  // Spill number of arguments.
1390     __ PopReturnAddressTo(eax);
1391     __ Drop(1);  // The allocation site is unused.
1392     __ Pop(kJavaScriptCallNewTargetRegister);
1393     __ Pop(kJavaScriptCallTargetRegister);
1394     // Pass the spread in the register ecx, overwriting ecx.
1395     __ mov(ecx, Operand(ecx, 0));
1396     __ PushReturnAddressFrom(eax);
1397     __ movd(eax, xmm0);  // Reload number of arguments.
1398     __ Jump(BUILTIN_CODE(masm->isolate(), ConstructWithSpread),
1399             RelocInfo::CODE_TARGET);
1400   } else {
1401     DCHECK_EQ(InterpreterPushArgsMode::kOther, mode);
1402     __ PopReturnAddressTo(ecx);
1403     __ Drop(1);  // The allocation site is unused.
1404     __ Pop(kJavaScriptCallNewTargetRegister);
1405     __ Pop(kJavaScriptCallTargetRegister);
1406     __ PushReturnAddressFrom(ecx);
1407 
1408     __ Jump(BUILTIN_CODE(masm->isolate(), Construct), RelocInfo::CODE_TARGET);
1409   }
1410 
1411   __ bind(&stack_overflow);
1412   __ TailCallRuntime(Runtime::kThrowStackOverflow);
1413   __ int3();
1414 }
1415 
Generate_InterpreterEnterBytecode(MacroAssembler * masm)1416 static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
1417   // Set the return address to the correct point in the interpreter entry
1418   // trampoline.
1419   Label builtin_trampoline, trampoline_loaded;
1420   Smi interpreter_entry_return_pc_offset(
1421       masm->isolate()->heap()->interpreter_entry_return_pc_offset());
1422   DCHECK_NE(interpreter_entry_return_pc_offset, Smi::zero());
1423 
1424   static constexpr Register scratch = ecx;
1425 
1426   // If the SFI function_data is an InterpreterData, the function will have a
1427   // custom copy of the interpreter entry trampoline for profiling. If so,
1428   // get the custom trampoline, otherwise grab the entry address of the global
1429   // trampoline.
1430   __ mov(scratch, Operand(ebp, StandardFrameConstants::kFunctionOffset));
1431   __ mov(scratch, FieldOperand(scratch, JSFunction::kSharedFunctionInfoOffset));
1432   __ mov(scratch,
1433          FieldOperand(scratch, SharedFunctionInfo::kFunctionDataOffset));
1434   __ Push(eax);
1435   __ CmpObjectType(scratch, INTERPRETER_DATA_TYPE, eax);
1436   __ j(not_equal, &builtin_trampoline, Label::kNear);
1437 
1438   __ mov(scratch,
1439          FieldOperand(scratch, InterpreterData::kInterpreterTrampolineOffset));
1440   __ add(scratch, Immediate(Code::kHeaderSize - kHeapObjectTag));
1441   __ jmp(&trampoline_loaded, Label::kNear);
1442 
1443   __ bind(&builtin_trampoline);
1444   __ mov(scratch,
1445          __ ExternalReferenceAsOperand(
1446              ExternalReference::
1447                  address_of_interpreter_entry_trampoline_instruction_start(
1448                      masm->isolate()),
1449              scratch));
1450 
1451   __ bind(&trampoline_loaded);
1452   __ Pop(eax);
1453   __ add(scratch, Immediate(interpreter_entry_return_pc_offset.value()));
1454   __ push(scratch);
1455 
1456   // Initialize the dispatch table register.
1457   __ Move(kInterpreterDispatchTableRegister,
1458           Immediate(ExternalReference::interpreter_dispatch_table_address(
1459               masm->isolate())));
1460 
1461   // Get the bytecode array pointer from the frame.
1462   __ mov(kInterpreterBytecodeArrayRegister,
1463          Operand(ebp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1464 
1465   if (FLAG_debug_code) {
1466     // Check function data field is actually a BytecodeArray object.
1467     __ AssertNotSmi(kInterpreterBytecodeArrayRegister);
1468     __ CmpObjectType(kInterpreterBytecodeArrayRegister, BYTECODE_ARRAY_TYPE,
1469                      scratch);
1470     __ Assert(
1471         equal,
1472         AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1473   }
1474 
1475   // Get the target bytecode offset from the frame.
1476   __ mov(kInterpreterBytecodeOffsetRegister,
1477          Operand(ebp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1478   __ SmiUntag(kInterpreterBytecodeOffsetRegister);
1479 
1480   if (FLAG_debug_code) {
1481     Label okay;
1482     __ cmp(kInterpreterBytecodeOffsetRegister,
1483            Immediate(BytecodeArray::kHeaderSize - kHeapObjectTag));
1484     __ j(greater_equal, &okay, Label::kNear);
1485     __ int3();
1486     __ bind(&okay);
1487   }
1488 
1489   // Dispatch to the target bytecode.
1490   __ movzx_b(scratch, Operand(kInterpreterBytecodeArrayRegister,
1491                               kInterpreterBytecodeOffsetRegister, times_1, 0));
1492   __ mov(kJavaScriptCallCodeStartRegister,
1493          Operand(kInterpreterDispatchTableRegister, scratch,
1494                  times_system_pointer_size, 0));
1495   __ jmp(kJavaScriptCallCodeStartRegister);
1496 }
1497 
Generate_InterpreterEnterBytecodeAdvance(MacroAssembler * masm)1498 void Builtins::Generate_InterpreterEnterBytecodeAdvance(MacroAssembler* masm) {
1499   // Get bytecode array and bytecode offset from the stack frame.
1500   __ mov(kInterpreterBytecodeArrayRegister,
1501          Operand(ebp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1502   __ mov(kInterpreterBytecodeOffsetRegister,
1503          Operand(ebp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1504   __ SmiUntag(kInterpreterBytecodeOffsetRegister);
1505 
1506   Label enter_bytecode, function_entry_bytecode;
1507   __ cmp(kInterpreterBytecodeOffsetRegister,
1508          Immediate(BytecodeArray::kHeaderSize - kHeapObjectTag +
1509                    kFunctionEntryBytecodeOffset));
1510   __ j(equal, &function_entry_bytecode);
1511 
1512   // Advance to the next bytecode.
1513   Label if_return;
1514   __ Push(eax);
1515   AdvanceBytecodeOffsetOrReturn(masm, kInterpreterBytecodeArrayRegister,
1516                                 kInterpreterBytecodeOffsetRegister, ecx, esi,
1517                                 eax, &if_return);
1518   __ Pop(eax);
1519 
1520   __ bind(&enter_bytecode);
1521   // Convert new bytecode offset to a Smi and save in the stackframe.
1522   __ mov(ecx, kInterpreterBytecodeOffsetRegister);
1523   __ SmiTag(ecx);
1524   __ mov(Operand(ebp, InterpreterFrameConstants::kBytecodeOffsetFromFp), ecx);
1525 
1526   Generate_InterpreterEnterBytecode(masm);
1527 
1528   __ bind(&function_entry_bytecode);
1529   // If the code deoptimizes during the implicit function entry stack interrupt
1530   // check, it will have a bailout ID of kFunctionEntryBytecodeOffset, which is
1531   // not a valid bytecode offset. Detect this case and advance to the first
1532   // actual bytecode.
1533   __ mov(kInterpreterBytecodeOffsetRegister,
1534          Immediate(BytecodeArray::kHeaderSize - kHeapObjectTag));
1535   __ jmp(&enter_bytecode);
1536 
1537   // We should never take the if_return path.
1538   __ bind(&if_return);
1539   // No need to pop eax here since we will be aborting anyway.
1540   __ Abort(AbortReason::kInvalidBytecodeAdvance);
1541 }
1542 
Generate_InterpreterEnterBytecodeDispatch(MacroAssembler * masm)1543 void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
1544   Generate_InterpreterEnterBytecode(masm);
1545 }
1546 
1547 namespace {
Generate_ContinueToBuiltinHelper(MacroAssembler * masm,bool java_script_builtin,bool with_result)1548 void Generate_ContinueToBuiltinHelper(MacroAssembler* masm,
1549                                       bool java_script_builtin,
1550                                       bool with_result) {
1551   const RegisterConfiguration* config(RegisterConfiguration::Default());
1552   int allocatable_register_count = config->num_allocatable_general_registers();
1553   if (with_result) {
1554     if (java_script_builtin) {
1555       // xmm0 is not included in the allocateable registers.
1556       __ movd(xmm0, eax);
1557     } else {
1558       // Overwrite the hole inserted by the deoptimizer with the return value
1559       // from the LAZY deopt point.
1560       __ mov(
1561           Operand(esp, config->num_allocatable_general_registers() *
1562                                kSystemPointerSize +
1563                            BuiltinContinuationFrameConstants::kFixedFrameSize),
1564           eax);
1565     }
1566   }
1567 
1568   // Replace the builtin index Smi on the stack with the start address of the
1569   // builtin loaded from the builtins table. The ret below will return to this
1570   // address.
1571   int offset_to_builtin_index = allocatable_register_count * kSystemPointerSize;
1572   __ mov(eax, Operand(esp, offset_to_builtin_index));
1573   __ LoadEntryFromBuiltinIndex(eax);
1574   __ mov(Operand(esp, offset_to_builtin_index), eax);
1575 
1576   for (int i = allocatable_register_count - 1; i >= 0; --i) {
1577     int code = config->GetAllocatableGeneralCode(i);
1578     __ pop(Register::from_code(code));
1579     if (java_script_builtin && code == kJavaScriptCallArgCountRegister.code()) {
1580       __ SmiUntag(Register::from_code(code));
1581     }
1582   }
1583   if (with_result && java_script_builtin) {
1584     // Overwrite the hole inserted by the deoptimizer with the return value from
1585     // the LAZY deopt point. eax contains the arguments count, the return value
1586     // from LAZY is always the last argument.
1587     __ movd(Operand(esp, eax, times_system_pointer_size,
1588                     BuiltinContinuationFrameConstants::kFixedFrameSize),
1589             xmm0);
1590   }
1591   __ mov(
1592       ebp,
1593       Operand(esp, BuiltinContinuationFrameConstants::kFixedFrameSizeFromFp));
1594   const int offsetToPC =
1595       BuiltinContinuationFrameConstants::kFixedFrameSizeFromFp -
1596       kSystemPointerSize;
1597   __ pop(Operand(esp, offsetToPC));
1598   __ Drop(offsetToPC / kSystemPointerSize);
1599   __ ret(0);
1600 }
1601 }  // namespace
1602 
Generate_ContinueToCodeStubBuiltin(MacroAssembler * masm)1603 void Builtins::Generate_ContinueToCodeStubBuiltin(MacroAssembler* masm) {
1604   Generate_ContinueToBuiltinHelper(masm, false, false);
1605 }
1606 
Generate_ContinueToCodeStubBuiltinWithResult(MacroAssembler * masm)1607 void Builtins::Generate_ContinueToCodeStubBuiltinWithResult(
1608     MacroAssembler* masm) {
1609   Generate_ContinueToBuiltinHelper(masm, false, true);
1610 }
1611 
Generate_ContinueToJavaScriptBuiltin(MacroAssembler * masm)1612 void Builtins::Generate_ContinueToJavaScriptBuiltin(MacroAssembler* masm) {
1613   Generate_ContinueToBuiltinHelper(masm, true, false);
1614 }
1615 
Generate_ContinueToJavaScriptBuiltinWithResult(MacroAssembler * masm)1616 void Builtins::Generate_ContinueToJavaScriptBuiltinWithResult(
1617     MacroAssembler* masm) {
1618   Generate_ContinueToBuiltinHelper(masm, true, true);
1619 }
1620 
Generate_NotifyDeoptimized(MacroAssembler * masm)1621 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1622   {
1623     FrameScope scope(masm, StackFrame::INTERNAL);
1624     __ CallRuntime(Runtime::kNotifyDeoptimized);
1625     // Tear down internal frame.
1626   }
1627 
1628   DCHECK_EQ(kInterpreterAccumulatorRegister.code(), eax.code());
1629   __ mov(eax, Operand(esp, 1 * kSystemPointerSize));
1630   __ ret(1 * kSystemPointerSize);  // Remove eax.
1631 }
1632 
1633 // static
Generate_FunctionPrototypeApply(MacroAssembler * masm)1634 void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
1635   // ----------- S t a t e -------------
1636   //  -- eax     : argc
1637   //  -- esp[0]  : return address
1638   //  -- esp[1]  : receiver
1639   //  -- esp[2]  : thisArg
1640   //  -- esp[3]  : argArray
1641   // -----------------------------------
1642 
1643   // 1. Load receiver into xmm0, argArray into edx (if present), remove all
1644   // arguments from the stack (including the receiver), and push thisArg (if
1645   // present) instead.
1646   {
1647     Label no_arg_array, no_this_arg;
1648     StackArgumentsAccessor args(eax);
1649     // Spill receiver to allow the usage of edi as a scratch register.
1650     __ movd(xmm0, args[0]);
1651 
1652     __ LoadRoot(edx, RootIndex::kUndefinedValue);
1653     __ mov(edi, edx);
1654     __ test(eax, eax);
1655     __ j(zero, &no_this_arg, Label::kNear);
1656     {
1657       __ mov(edi, args[1]);
1658       __ cmp(eax, Immediate(1));
1659       __ j(equal, &no_arg_array, Label::kNear);
1660       __ mov(edx, args[2]);
1661       __ bind(&no_arg_array);
1662     }
1663     __ bind(&no_this_arg);
1664     __ PopReturnAddressTo(ecx);
1665     __ lea(esp,
1666            Operand(esp, eax, times_system_pointer_size, kSystemPointerSize));
1667     __ Push(edi);
1668     __ PushReturnAddressFrom(ecx);
1669 
1670     // Restore receiver to edi.
1671     __ movd(edi, xmm0);
1672   }
1673 
1674   // ----------- S t a t e -------------
1675   //  -- edx    : argArray
1676   //  -- edi    : receiver
1677   //  -- esp[0] : return address
1678   //  -- esp[4] : thisArg
1679   // -----------------------------------
1680 
1681   // 2. We don't need to check explicitly for callable receiver here,
1682   // since that's the first thing the Call/CallWithArrayLike builtins
1683   // will do.
1684 
1685   // 3. Tail call with no arguments if argArray is null or undefined.
1686   Label no_arguments;
1687   __ JumpIfRoot(edx, RootIndex::kNullValue, &no_arguments, Label::kNear);
1688   __ JumpIfRoot(edx, RootIndex::kUndefinedValue, &no_arguments, Label::kNear);
1689 
1690   // 4a. Apply the receiver to the given argArray.
1691   __ Jump(BUILTIN_CODE(masm->isolate(), CallWithArrayLike),
1692           RelocInfo::CODE_TARGET);
1693 
1694   // 4b. The argArray is either null or undefined, so we tail call without any
1695   // arguments to the receiver.
1696   __ bind(&no_arguments);
1697   {
1698     __ Set(eax, 0);
1699     __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1700   }
1701 }
1702 
1703 // static
Generate_FunctionPrototypeCall(MacroAssembler * masm)1704 void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
1705   // Stack Layout:
1706   // esp[0]           : Return address
1707   // esp[8]           : Argument 0 (receiver: callable to call)
1708   // esp[16]          : Argument 1
1709   //  ...
1710   // esp[8 * n]       : Argument n-1
1711   // esp[8 * (n + 1)] : Argument n
1712   // eax contains the number of arguments, n, not counting the receiver.
1713 
1714   // 1. Get the callable to call (passed as receiver) from the stack.
1715   {
1716     StackArgumentsAccessor args(eax);
1717     __ mov(edi, args.GetReceiverOperand());
1718   }
1719 
1720   // 2. Save the return address and drop the callable.
1721   __ PopReturnAddressTo(edx);
1722   __ Pop(ecx);
1723 
1724   // 3. Make sure we have at least one argument.
1725   {
1726     Label done;
1727     __ test(eax, eax);
1728     __ j(not_zero, &done, Label::kNear);
1729     __ PushRoot(RootIndex::kUndefinedValue);
1730     __ inc(eax);
1731     __ bind(&done);
1732   }
1733 
1734   // 4. Push back the return address one slot down on the stack (overwriting the
1735   // original callable), making the original first argument the new receiver.
1736   __ PushReturnAddressFrom(edx);
1737   __ dec(eax);  // One fewer argument (first argument is new receiver).
1738 
1739   // 5. Call the callable.
1740   __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1741 }
1742 
Generate_ReflectApply(MacroAssembler * masm)1743 void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
1744   // ----------- S t a t e -------------
1745   //  -- eax     : argc
1746   //  -- esp[0]  : return address
1747   //  -- esp[4]  : receiver
1748   //  -- esp[8]  : target         (if argc >= 1)
1749   //  -- esp[12] : thisArgument   (if argc >= 2)
1750   //  -- esp[16] : argumentsList  (if argc == 3)
1751   // -----------------------------------
1752 
1753   // 1. Load target into edi (if present), argumentsList into edx (if present),
1754   // remove all arguments from the stack (including the receiver), and push
1755   // thisArgument (if present) instead.
1756   {
1757     Label done;
1758     StackArgumentsAccessor args(eax);
1759     __ LoadRoot(edi, RootIndex::kUndefinedValue);
1760     __ mov(edx, edi);
1761     __ mov(ecx, edi);
1762     __ cmp(eax, Immediate(1));
1763     __ j(below, &done, Label::kNear);
1764     __ mov(edi, args[1]);  // target
1765     __ j(equal, &done, Label::kNear);
1766     __ mov(ecx, args[2]);  // thisArgument
1767     __ cmp(eax, Immediate(3));
1768     __ j(below, &done, Label::kNear);
1769     __ mov(edx, args[3]);  // argumentsList
1770     __ bind(&done);
1771 
1772     // Spill argumentsList to use edx as a scratch register.
1773     __ movd(xmm0, edx);
1774 
1775     __ PopReturnAddressTo(edx);
1776     __ lea(esp,
1777            Operand(esp, eax, times_system_pointer_size, kSystemPointerSize));
1778     __ Push(ecx);
1779     __ PushReturnAddressFrom(edx);
1780 
1781     // Restore argumentsList.
1782     __ movd(edx, xmm0);
1783   }
1784 
1785   // ----------- S t a t e -------------
1786   //  -- edx    : argumentsList
1787   //  -- edi    : target
1788   //  -- esp[0] : return address
1789   //  -- esp[4] : thisArgument
1790   // -----------------------------------
1791 
1792   // 2. We don't need to check explicitly for callable target here,
1793   // since that's the first thing the Call/CallWithArrayLike builtins
1794   // will do.
1795 
1796   // 3. Apply the target to the given argumentsList.
1797   __ Jump(BUILTIN_CODE(masm->isolate(), CallWithArrayLike),
1798           RelocInfo::CODE_TARGET);
1799 }
1800 
Generate_ReflectConstruct(MacroAssembler * masm)1801 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
1802   // ----------- S t a t e -------------
1803   //  -- eax     : argc
1804   //  -- esp[0]  : return address
1805   //  -- esp[4]  : receiver
1806   //  -- esp[8]  : target
1807   //  -- esp[12] : argumentsList
1808   //  -- esp[16] : new.target (optional)
1809   // -----------------------------------
1810 
1811   // 1. Load target into edi (if present), argumentsList into ecx (if present),
1812   // new.target into edx (if present, otherwise use target), remove all
1813   // arguments from the stack (including the receiver), and push thisArgument
1814   // (if present) instead.
1815   {
1816     Label done;
1817     StackArgumentsAccessor args(eax);
1818     __ LoadRoot(edi, RootIndex::kUndefinedValue);
1819     __ mov(edx, edi);
1820     __ mov(ecx, edi);
1821     __ cmp(eax, Immediate(1));
1822     __ j(below, &done, Label::kNear);
1823     __ mov(edi, args[1]);  // target
1824     __ mov(edx, edi);
1825     __ j(equal, &done, Label::kNear);
1826     __ mov(ecx, args[2]);  // argumentsList
1827     __ cmp(eax, Immediate(3));
1828     __ j(below, &done, Label::kNear);
1829     __ mov(edx, args[3]);  // new.target
1830     __ bind(&done);
1831 
1832     // Spill argumentsList to use ecx as a scratch register.
1833     __ movd(xmm0, ecx);
1834 
1835     __ PopReturnAddressTo(ecx);
1836     __ lea(esp,
1837            Operand(esp, eax, times_system_pointer_size, kSystemPointerSize));
1838     __ PushRoot(RootIndex::kUndefinedValue);
1839     __ PushReturnAddressFrom(ecx);
1840 
1841     // Restore argumentsList.
1842     __ movd(ecx, xmm0);
1843   }
1844 
1845   // ----------- S t a t e -------------
1846   //  -- ecx    : argumentsList
1847   //  -- edx    : new.target
1848   //  -- edi    : target
1849   //  -- esp[0] : return address
1850   //  -- esp[4] : receiver (undefined)
1851   // -----------------------------------
1852 
1853   // 2. We don't need to check explicitly for constructor target here,
1854   // since that's the first thing the Construct/ConstructWithArrayLike
1855   // builtins will do.
1856 
1857   // 3. We don't need to check explicitly for constructor new.target here,
1858   // since that's the second thing the Construct/ConstructWithArrayLike
1859   // builtins will do.
1860 
1861   // 4. Construct the target with the given new.target and argumentsList.
1862   __ Jump(BUILTIN_CODE(masm->isolate(), ConstructWithArrayLike),
1863           RelocInfo::CODE_TARGET);
1864 }
1865 
EnterArgumentsAdaptorFrame(MacroAssembler * masm)1866 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1867   __ push(ebp);
1868   __ mov(ebp, esp);
1869 
1870   // Store the arguments adaptor context sentinel.
1871   __ push(Immediate(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
1872 
1873   // Push the function on the stack.
1874   __ push(edi);
1875 
1876   // Preserve the number of arguments on the stack. Must preserve eax,
1877   // ebx and ecx because these registers are used when copying the
1878   // arguments and the receiver.
1879   STATIC_ASSERT(kSmiTagSize == 1);
1880   __ lea(edi, Operand(eax, eax, times_1, kSmiTag));
1881   __ push(edi);
1882 
1883   __ Push(Immediate(0));  // Padding.
1884 }
1885 
LeaveArgumentsAdaptorFrame(MacroAssembler * masm)1886 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1887   // Retrieve the number of arguments from the stack.
1888   __ mov(edi, Operand(ebp, ArgumentsAdaptorFrameConstants::kLengthOffset));
1889 
1890   // Leave the frame.
1891   __ leave();
1892 
1893   // Remove caller arguments from the stack.
1894   STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
1895   __ PopReturnAddressTo(ecx);
1896   __ lea(esp, Operand(esp, edi, times_half_system_pointer_size,
1897                       1 * kSystemPointerSize));  // 1 ~ receiver
1898   __ PushReturnAddressFrom(ecx);
1899 }
1900 
1901 // static
Generate_CallOrConstructVarargs(MacroAssembler * masm,Handle<Code> code)1902 void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm,
1903                                                Handle<Code> code) {
1904   // ----------- S t a t e -------------
1905   //  -- edi    : target
1906   //  -- esi    : context for the Call / Construct builtin
1907   //  -- eax    : number of parameters on the stack (not including the receiver)
1908   //  -- ecx    : len (number of elements to from args)
1909   //  -- ecx    : new.target (checked to be constructor or undefined)
1910   //  -- esp[4] : arguments list (a FixedArray)
1911   //  -- esp[0] : return address.
1912   // -----------------------------------
1913 
1914   // We need to preserve eax, edi, esi and ebx.
1915   __ movd(xmm0, edx);
1916   __ movd(xmm1, edi);
1917   __ movd(xmm2, eax);
1918   __ movd(xmm3, esi);  // Spill the context.
1919 
1920   const Register kArgumentsList = esi;
1921   const Register kArgumentsLength = ecx;
1922 
1923   __ PopReturnAddressTo(edx);
1924   __ pop(kArgumentsList);
1925   __ PushReturnAddressFrom(edx);
1926 
1927   if (masm->emit_debug_code()) {
1928     // Allow kArgumentsList to be a FixedArray, or a FixedDoubleArray if
1929     // kArgumentsLength == 0.
1930     Label ok, fail;
1931     __ AssertNotSmi(kArgumentsList);
1932     __ mov(edx, FieldOperand(kArgumentsList, HeapObject::kMapOffset));
1933     __ CmpInstanceType(edx, FIXED_ARRAY_TYPE);
1934     __ j(equal, &ok);
1935     __ CmpInstanceType(edx, FIXED_DOUBLE_ARRAY_TYPE);
1936     __ j(not_equal, &fail);
1937     __ cmp(kArgumentsLength, 0);
1938     __ j(equal, &ok);
1939     // Fall through.
1940     __ bind(&fail);
1941     __ Abort(AbortReason::kOperandIsNotAFixedArray);
1942 
1943     __ bind(&ok);
1944   }
1945 
1946   // Check the stack for overflow. We are not trying to catch interruptions
1947   // (i.e. debug break and preemption) here, so check the "real stack limit".
1948   Label stack_overflow;
1949   __ StackOverflowCheck(kArgumentsLength, edx, &stack_overflow);
1950 
1951   __ movd(xmm4, kArgumentsList);  // Spill the arguments list.
1952 
1953   // Move the arguments already in the stack,
1954   // including the receiver and the return address.
1955   {
1956     Label copy, check;
1957     Register src = edx, current = edi, tmp = esi;
1958     // Update stack pointer.
1959     __ mov(src, esp);
1960     __ lea(tmp, Operand(kArgumentsLength, times_system_pointer_size, 0));
1961     __ AllocateStackSpace(tmp);
1962     // Include return address and receiver.
1963     __ add(eax, Immediate(2));
1964     __ mov(current, Immediate(0));
1965     __ jmp(&check);
1966     // Loop.
1967     __ bind(&copy);
1968     __ mov(tmp, Operand(src, current, times_system_pointer_size, 0));
1969     __ mov(Operand(esp, current, times_system_pointer_size, 0), tmp);
1970     __ inc(current);
1971     __ bind(&check);
1972     __ cmp(current, eax);
1973     __ j(less, &copy);
1974     __ lea(edx, Operand(esp, eax, times_system_pointer_size, 0));
1975   }
1976 
1977   __ movd(kArgumentsList, xmm4);  // Recover arguments list.
1978 
1979   // Push additional arguments onto the stack.
1980   {
1981     __ Move(eax, Immediate(0));
1982     Label done, push, loop;
1983     __ bind(&loop);
1984     __ cmp(eax, kArgumentsLength);
1985     __ j(equal, &done, Label::kNear);
1986     // Turn the hole into undefined as we go.
1987     __ mov(edi, FieldOperand(kArgumentsList, eax, times_tagged_size,
1988                              FixedArray::kHeaderSize));
1989     __ CompareRoot(edi, RootIndex::kTheHoleValue);
1990     __ j(not_equal, &push, Label::kNear);
1991     __ LoadRoot(edi, RootIndex::kUndefinedValue);
1992     __ bind(&push);
1993     __ mov(Operand(edx, 0), edi);
1994     __ add(edx, Immediate(kSystemPointerSize));
1995     __ inc(eax);
1996     __ jmp(&loop);
1997     __ bind(&done);
1998   }
1999 
2000   // Restore eax, edi and edx.
2001   __ movd(esi, xmm3);  // Restore the context.
2002   __ movd(eax, xmm2);
2003   __ movd(edi, xmm1);
2004   __ movd(edx, xmm0);
2005 
2006   // Compute the actual parameter count.
2007   __ add(eax, kArgumentsLength);
2008 
2009   // Tail-call to the actual Call or Construct builtin.
2010   __ Jump(code, RelocInfo::CODE_TARGET);
2011 
2012   __ bind(&stack_overflow);
2013   __ movd(esi, xmm3);  // Restore the context.
2014   __ TailCallRuntime(Runtime::kThrowStackOverflow);
2015 }
2016 
2017 // static
Generate_CallOrConstructForwardVarargs(MacroAssembler * masm,CallOrConstructMode mode,Handle<Code> code)2018 void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm,
2019                                                       CallOrConstructMode mode,
2020                                                       Handle<Code> code) {
2021   // ----------- S t a t e -------------
2022   //  -- eax : the number of arguments (not including the receiver)
2023   //  -- edi : the target to call (can be any Object)
2024   //  -- esi : context for the Call / Construct builtin
2025   //  -- edx : the new target (for [[Construct]] calls)
2026   //  -- ecx : start index (to support rest parameters)
2027   // -----------------------------------
2028 
2029   __ movd(xmm0, esi);  // Spill the context.
2030 
2031   Register scratch = esi;
2032 
2033   // Check if new.target has a [[Construct]] internal method.
2034   if (mode == CallOrConstructMode::kConstruct) {
2035     Label new_target_constructor, new_target_not_constructor;
2036     __ JumpIfSmi(edx, &new_target_not_constructor, Label::kNear);
2037     __ mov(scratch, FieldOperand(edx, HeapObject::kMapOffset));
2038     __ test_b(FieldOperand(scratch, Map::kBitFieldOffset),
2039               Immediate(Map::Bits1::IsConstructorBit::kMask));
2040     __ j(not_zero, &new_target_constructor, Label::kNear);
2041     __ bind(&new_target_not_constructor);
2042     {
2043       FrameScope scope(masm, StackFrame::MANUAL);
2044       __ EnterFrame(StackFrame::INTERNAL);
2045       __ Push(edx);
2046       __ movd(esi, xmm0);  // Restore the context.
2047       __ CallRuntime(Runtime::kThrowNotConstructor);
2048     }
2049     __ bind(&new_target_constructor);
2050   }
2051 
2052   __ movd(xmm1, edx);  // Preserve new.target (in case of [[Construct]]).
2053 
2054 #ifdef V8_NO_ARGUMENTS_ADAPTOR
2055   // TODO(victorgomes): Remove this copy when all the arguments adaptor frame
2056   // code is erased.
2057   __ mov(scratch, ebp);
2058   __ mov(edx, Operand(ebp, StandardFrameConstants::kArgCOffset));
2059 #else
2060   // Check if we have an arguments adaptor frame below the function frame.
2061   Label arguments_adaptor, arguments_done;
2062   __ mov(scratch, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
2063   __ cmp(Operand(scratch, CommonFrameConstants::kContextOrFrameTypeOffset),
2064          Immediate(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
2065   __ j(equal, &arguments_adaptor, Label::kNear);
2066   {
2067     __ mov(edx, Operand(ebp, StandardFrameConstants::kFunctionOffset));
2068     __ mov(edx, FieldOperand(edx, JSFunction::kSharedFunctionInfoOffset));
2069     __ movzx_w(edx, FieldOperand(
2070                         edx, SharedFunctionInfo::kFormalParameterCountOffset));
2071     __ mov(scratch, ebp);
2072   }
2073   __ jmp(&arguments_done, Label::kNear);
2074   __ bind(&arguments_adaptor);
2075   {
2076     // Just load the length from the ArgumentsAdaptorFrame.
2077     __ mov(edx,
2078            Operand(scratch, ArgumentsAdaptorFrameConstants::kLengthOffset));
2079     __ SmiUntag(edx);
2080   }
2081   __ bind(&arguments_done);
2082 #endif
2083 
2084   Label stack_done, stack_overflow;
2085   __ sub(edx, ecx);
2086   __ j(less_equal, &stack_done);
2087   {
2088     // ----------- S t a t e -------------
2089     //  -- eax : the number of arguments already in the stack (not including the
2090     //  receiver)
2091     //  -- ecx : start index (to support rest parameters)
2092     //  -- edx : number of arguments to copy, i.e. arguments count - start index
2093     //  -- edi : the target to call (can be any Object)
2094     //  -- esi : point to the caller stack frame
2095     //  -- xmm0 : context for the Call / Construct builtin
2096     //  -- xmm1 : the new target (for [[Construct]] calls)
2097     // -----------------------------------
2098 
2099     // Forward the arguments from the caller frame.
2100     __ movd(xmm2, edi);  // Preserve the target to call.
2101     __ StackOverflowCheck(edx, edi, &stack_overflow);
2102     __ movd(xmm3, ebx);  // Preserve root register.
2103 
2104     Register scratch = ebx;
2105 
2106     // Point to the first argument to copy (skipping receiver).
2107     __ lea(ecx, Operand(ecx, times_system_pointer_size,
2108                         CommonFrameConstants::kFixedFrameSizeAboveFp +
2109                             kSystemPointerSize));
2110     __ add(esi, ecx);
2111 
2112     // Move the arguments already in the stack,
2113     // including the receiver and the return address.
2114     {
2115       Label copy, check;
2116       Register src = ecx, current = edi;
2117       // Update stack pointer.
2118       __ mov(src, esp);
2119       __ lea(scratch, Operand(edx, times_system_pointer_size, 0));
2120       __ AllocateStackSpace(scratch);
2121       // Include return address and receiver.
2122       __ add(eax, Immediate(2));
2123       __ Set(current, 0);
2124       __ jmp(&check);
2125       // Loop.
2126       __ bind(&copy);
2127       __ mov(scratch, Operand(src, current, times_system_pointer_size, 0));
2128       __ mov(Operand(esp, current, times_system_pointer_size, 0), scratch);
2129       __ inc(current);
2130       __ bind(&check);
2131       __ cmp(current, eax);
2132       __ j(less, &copy);
2133       __ lea(ecx, Operand(esp, eax, times_system_pointer_size, 0));
2134     }
2135 
2136     // Update total number of arguments.
2137     __ sub(eax, Immediate(2));
2138     __ add(eax, edx);
2139 
2140     // Copy the additional caller arguments onto the stack.
2141     // TODO(victorgomes): Consider using forward order as potentially more cache
2142     // friendly.
2143     {
2144       Register src = esi, dest = ecx, num = edx;
2145       Label loop;
2146       __ bind(&loop);
2147       __ dec(num);
2148       __ mov(scratch, Operand(src, num, times_system_pointer_size, 0));
2149       __ mov(Operand(dest, num, times_system_pointer_size, 0), scratch);
2150       __ j(not_zero, &loop);
2151     }
2152 
2153     __ movd(ebx, xmm3);  // Restore root register.
2154     __ movd(edi, xmm2);  // Restore the target to call.
2155   }
2156   __ bind(&stack_done);
2157 
2158   __ movd(edx, xmm1);  // Restore new.target (in case of [[Construct]]).
2159   __ movd(esi, xmm0);  // Restore the context.
2160 
2161   // Tail-call to the {code} handler.
2162   __ Jump(code, RelocInfo::CODE_TARGET);
2163 
2164   __ bind(&stack_overflow);
2165   __ movd(edi, xmm2);  // Restore the target to call.
2166   __ movd(esi, xmm0);  // Restore the context.
2167   __ TailCallRuntime(Runtime::kThrowStackOverflow);
2168 }
2169 
2170 // static
Generate_CallFunction(MacroAssembler * masm,ConvertReceiverMode mode)2171 void Builtins::Generate_CallFunction(MacroAssembler* masm,
2172                                      ConvertReceiverMode mode) {
2173   // ----------- S t a t e -------------
2174   //  -- eax : the number of arguments (not including the receiver)
2175   //  -- edi : the function to call (checked to be a JSFunction)
2176   // -----------------------------------
2177   StackArgumentsAccessor args(eax);
2178   __ AssertFunction(edi);
2179 
2180   // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
2181   // Check that the function is not a "classConstructor".
2182   Label class_constructor;
2183   __ mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2184   __ test(FieldOperand(edx, SharedFunctionInfo::kFlagsOffset),
2185           Immediate(SharedFunctionInfo::IsClassConstructorBit::kMask));
2186   __ j(not_zero, &class_constructor);
2187 
2188   // Enter the context of the function; ToObject has to run in the function
2189   // context, and we also need to take the global proxy from the function
2190   // context in case of conversion.
2191   __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
2192   // We need to convert the receiver for non-native sloppy mode functions.
2193   Label done_convert;
2194   __ test(FieldOperand(edx, SharedFunctionInfo::kFlagsOffset),
2195           Immediate(SharedFunctionInfo::IsNativeBit::kMask |
2196                     SharedFunctionInfo::IsStrictBit::kMask));
2197   __ j(not_zero, &done_convert);
2198   {
2199     // ----------- S t a t e -------------
2200     //  -- eax : the number of arguments (not including the receiver)
2201     //  -- edx : the shared function info.
2202     //  -- edi : the function to call (checked to be a JSFunction)
2203     //  -- esi : the function context.
2204     // -----------------------------------
2205 
2206     if (mode == ConvertReceiverMode::kNullOrUndefined) {
2207       // Patch receiver to global proxy.
2208       __ LoadGlobalProxy(ecx);
2209     } else {
2210       Label convert_to_object, convert_receiver;
2211       __ mov(ecx, args.GetReceiverOperand());
2212       __ JumpIfSmi(ecx, &convert_to_object, Label::kNear);
2213       STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2214       __ CmpObjectType(ecx, FIRST_JS_RECEIVER_TYPE, ecx);  // Clobbers ecx.
2215       __ j(above_equal, &done_convert);
2216       // Reload the receiver (it was clobbered by CmpObjectType).
2217       __ mov(ecx, args.GetReceiverOperand());
2218       if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
2219         Label convert_global_proxy;
2220         __ JumpIfRoot(ecx, RootIndex::kUndefinedValue, &convert_global_proxy,
2221                       Label::kNear);
2222         __ JumpIfNotRoot(ecx, RootIndex::kNullValue, &convert_to_object,
2223                          Label::kNear);
2224         __ bind(&convert_global_proxy);
2225         {
2226           // Patch receiver to global proxy.
2227           __ LoadGlobalProxy(ecx);
2228         }
2229         __ jmp(&convert_receiver);
2230       }
2231       __ bind(&convert_to_object);
2232       {
2233         // Convert receiver using ToObject.
2234         // TODO(bmeurer): Inline the allocation here to avoid building the frame
2235         // in the fast case? (fall back to AllocateInNewSpace?)
2236         FrameScope scope(masm, StackFrame::INTERNAL);
2237         __ SmiTag(eax);
2238         __ Push(eax);
2239         __ Push(edi);
2240         __ mov(eax, ecx);
2241         __ Push(esi);
2242         __ Call(BUILTIN_CODE(masm->isolate(), ToObject),
2243                 RelocInfo::CODE_TARGET);
2244         __ Pop(esi);
2245         __ mov(ecx, eax);
2246         __ Pop(edi);
2247         __ Pop(eax);
2248         __ SmiUntag(eax);
2249       }
2250       __ mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2251       __ bind(&convert_receiver);
2252     }
2253     __ mov(args.GetReceiverOperand(), ecx);
2254   }
2255   __ bind(&done_convert);
2256 
2257   // ----------- S t a t e -------------
2258   //  -- eax : the number of arguments (not including the receiver)
2259   //  -- edx : the shared function info.
2260   //  -- edi : the function to call (checked to be a JSFunction)
2261   //  -- esi : the function context.
2262   // -----------------------------------
2263 
2264   __ movzx_w(
2265       ecx, FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
2266   __ InvokeFunctionCode(edi, no_reg, ecx, eax, JUMP_FUNCTION);
2267   // The function is a "classConstructor", need to raise an exception.
2268   __ bind(&class_constructor);
2269   {
2270     FrameScope frame(masm, StackFrame::INTERNAL);
2271     __ push(edi);
2272     __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
2273   }
2274 }
2275 
2276 namespace {
2277 
Generate_PushBoundArguments(MacroAssembler * masm)2278 void Generate_PushBoundArguments(MacroAssembler* masm) {
2279   // ----------- S t a t e -------------
2280   //  -- eax : the number of arguments (not including the receiver)
2281   //  -- edx : new.target (only in case of [[Construct]])
2282   //  -- edi : target (checked to be a JSBoundFunction)
2283   // -----------------------------------
2284   __ movd(xmm0, edx);  // Spill edx.
2285 
2286   // Load [[BoundArguments]] into ecx and length of that into edx.
2287   Label no_bound_arguments;
2288   __ mov(ecx, FieldOperand(edi, JSBoundFunction::kBoundArgumentsOffset));
2289   __ mov(edx, FieldOperand(ecx, FixedArray::kLengthOffset));
2290   __ SmiUntag(edx);
2291   __ test(edx, edx);
2292   __ j(zero, &no_bound_arguments);
2293   {
2294     // ----------- S t a t e -------------
2295     //  -- eax  : the number of arguments (not including the receiver)
2296     //  -- xmm0 : new.target (only in case of [[Construct]])
2297     //  -- edi  : target (checked to be a JSBoundFunction)
2298     //  -- ecx  : the [[BoundArguments]] (implemented as FixedArray)
2299     //  -- edx  : the number of [[BoundArguments]]
2300     // -----------------------------------
2301 
2302     // Check the stack for overflow.
2303     {
2304       Label done, stack_overflow;
2305       __ StackOverflowCheck(edx, ecx, &stack_overflow);
2306       __ jmp(&done);
2307       __ bind(&stack_overflow);
2308       {
2309         FrameScope frame(masm, StackFrame::MANUAL);
2310         __ CallRuntime(Runtime::kThrowStackOverflow);
2311         __ int3();
2312       }
2313       __ bind(&done);
2314     }
2315 
2316     // Spill context.
2317     __ movd(xmm3, esi);
2318 
2319     // Save Return Adress and Receiver into registers.
2320     __ pop(esi);
2321     __ movd(xmm1, esi);
2322     __ pop(esi);
2323     __ movd(xmm2, esi);
2324 
2325     // Push [[BoundArguments]] to the stack.
2326     {
2327       Label loop;
2328       __ mov(ecx, FieldOperand(edi, JSBoundFunction::kBoundArgumentsOffset));
2329       __ mov(edx, FieldOperand(ecx, FixedArray::kLengthOffset));
2330       __ SmiUntag(edx);
2331       // Adjust effective number of arguments (eax contains the number of
2332       // arguments from the call not including receiver plus the number of
2333       // [[BoundArguments]]).
2334       __ add(eax, edx);
2335       __ bind(&loop);
2336       __ dec(edx);
2337       __ mov(esi, FieldOperand(ecx, edx, times_tagged_size,
2338                                FixedArray::kHeaderSize));
2339       __ push(esi);
2340       __ j(greater, &loop);
2341     }
2342 
2343     // Restore Receiver and Return Address.
2344     __ movd(esi, xmm2);
2345     __ push(esi);
2346     __ movd(esi, xmm1);
2347     __ push(esi);
2348 
2349     // Restore context.
2350     __ movd(esi, xmm3);
2351   }
2352 
2353   __ bind(&no_bound_arguments);
2354   __ movd(edx, xmm0);  // Reload edx.
2355 }
2356 
2357 }  // namespace
2358 
2359 // static
Generate_CallBoundFunctionImpl(MacroAssembler * masm)2360 void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm) {
2361   // ----------- S t a t e -------------
2362   //  -- eax : the number of arguments (not including the receiver)
2363   //  -- edi : the function to call (checked to be a JSBoundFunction)
2364   // -----------------------------------
2365   __ AssertBoundFunction(edi);
2366 
2367   // Patch the receiver to [[BoundThis]].
2368   StackArgumentsAccessor args(eax);
2369   __ mov(ecx, FieldOperand(edi, JSBoundFunction::kBoundThisOffset));
2370   __ mov(args.GetReceiverOperand(), ecx);
2371 
2372   // Push the [[BoundArguments]] onto the stack.
2373   Generate_PushBoundArguments(masm);
2374 
2375   // Call the [[BoundTargetFunction]] via the Call builtin.
2376   __ mov(edi, FieldOperand(edi, JSBoundFunction::kBoundTargetFunctionOffset));
2377   __ Jump(BUILTIN_CODE(masm->isolate(), Call_ReceiverIsAny),
2378           RelocInfo::CODE_TARGET);
2379 }
2380 
2381 // static
Generate_Call(MacroAssembler * masm,ConvertReceiverMode mode)2382 void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode) {
2383   // ----------- S t a t e -------------
2384   //  -- eax : the number of arguments (not including the receiver)
2385   //  -- edi : the target to call (can be any Object).
2386   // -----------------------------------
2387   StackArgumentsAccessor args(eax);
2388 
2389   Label non_callable, non_function, non_smi, non_jsfunction,
2390       non_jsboundfunction;
2391   __ JumpIfSmi(edi, &non_callable);
2392   __ bind(&non_smi);
2393   __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
2394   __ j(not_equal, &non_jsfunction);
2395   __ Jump(masm->isolate()->builtins()->CallFunction(mode),
2396           RelocInfo::CODE_TARGET);
2397 
2398   __ bind(&non_jsfunction);
2399   __ CmpInstanceType(ecx, JS_BOUND_FUNCTION_TYPE);
2400   __ j(not_equal, &non_jsboundfunction);
2401   __ Jump(BUILTIN_CODE(masm->isolate(), CallBoundFunction),
2402           RelocInfo::CODE_TARGET);
2403 
2404   // Check if target is a proxy and call CallProxy external builtin
2405   __ bind(&non_jsboundfunction);
2406   __ test_b(FieldOperand(ecx, Map::kBitFieldOffset),
2407             Immediate(Map::Bits1::IsCallableBit::kMask));
2408   __ j(zero, &non_callable);
2409 
2410   // Call CallProxy external builtin
2411   __ CmpInstanceType(ecx, JS_PROXY_TYPE);
2412   __ j(not_equal, &non_function);
2413   __ Jump(BUILTIN_CODE(masm->isolate(), CallProxy), RelocInfo::CODE_TARGET);
2414 
2415   // 2. Call to something else, which might have a [[Call]] internal method (if
2416   // not we raise an exception).
2417   __ bind(&non_function);
2418   // Overwrite the original receiver with the (original) target.
2419   __ mov(args.GetReceiverOperand(), edi);
2420   // Let the "call_as_function_delegate" take care of the rest.
2421   __ LoadNativeContextSlot(edi, Context::CALL_AS_FUNCTION_DELEGATE_INDEX);
2422   __ Jump(masm->isolate()->builtins()->CallFunction(
2423               ConvertReceiverMode::kNotNullOrUndefined),
2424           RelocInfo::CODE_TARGET);
2425 
2426   // 3. Call to something that is not callable.
2427   __ bind(&non_callable);
2428   {
2429     FrameScope scope(masm, StackFrame::INTERNAL);
2430     __ Push(edi);
2431     __ CallRuntime(Runtime::kThrowCalledNonCallable);
2432   }
2433 }
2434 
2435 // static
Generate_ConstructFunction(MacroAssembler * masm)2436 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2437   // ----------- S t a t e -------------
2438   //  -- eax : the number of arguments (not including the receiver)
2439   //  -- edx : the new target (checked to be a constructor)
2440   //  -- edi : the constructor to call (checked to be a JSFunction)
2441   // -----------------------------------
2442   __ AssertConstructor(edi);
2443   __ AssertFunction(edi);
2444 
2445   Label call_generic_stub;
2446 
2447   // Jump to JSBuiltinsConstructStub or JSConstructStubGeneric.
2448   __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2449   __ test(FieldOperand(ecx, SharedFunctionInfo::kFlagsOffset),
2450           Immediate(SharedFunctionInfo::ConstructAsBuiltinBit::kMask));
2451   __ j(zero, &call_generic_stub, Label::kNear);
2452 
2453   // Calling convention for function specific ConstructStubs require
2454   // ecx to contain either an AllocationSite or undefined.
2455   __ LoadRoot(ecx, RootIndex::kUndefinedValue);
2456   __ Jump(BUILTIN_CODE(masm->isolate(), JSBuiltinsConstructStub),
2457           RelocInfo::CODE_TARGET);
2458 
2459   __ bind(&call_generic_stub);
2460   // Calling convention for function specific ConstructStubs require
2461   // ecx to contain either an AllocationSite or undefined.
2462   __ LoadRoot(ecx, RootIndex::kUndefinedValue);
2463   __ Jump(BUILTIN_CODE(masm->isolate(), JSConstructStubGeneric),
2464           RelocInfo::CODE_TARGET);
2465 }
2466 
2467 // static
Generate_ConstructBoundFunction(MacroAssembler * masm)2468 void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2469   // ----------- S t a t e -------------
2470   //  -- eax : the number of arguments (not including the receiver)
2471   //  -- edx : the new target (checked to be a constructor)
2472   //  -- edi : the constructor to call (checked to be a JSBoundFunction)
2473   // -----------------------------------
2474   __ AssertConstructor(edi);
2475   __ AssertBoundFunction(edi);
2476 
2477   // Push the [[BoundArguments]] onto the stack.
2478   Generate_PushBoundArguments(masm);
2479 
2480   // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
2481   {
2482     Label done;
2483     __ cmp(edi, edx);
2484     __ j(not_equal, &done, Label::kNear);
2485     __ mov(edx, FieldOperand(edi, JSBoundFunction::kBoundTargetFunctionOffset));
2486     __ bind(&done);
2487   }
2488 
2489   // Construct the [[BoundTargetFunction]] via the Construct builtin.
2490   __ mov(edi, FieldOperand(edi, JSBoundFunction::kBoundTargetFunctionOffset));
2491   __ Jump(BUILTIN_CODE(masm->isolate(), Construct), RelocInfo::CODE_TARGET);
2492 }
2493 
2494 // static
Generate_Construct(MacroAssembler * masm)2495 void Builtins::Generate_Construct(MacroAssembler* masm) {
2496   // ----------- S t a t e -------------
2497   //  -- eax : the number of arguments (not including the receiver)
2498   //  -- edx : the new target (either the same as the constructor or
2499   //           the JSFunction on which new was invoked initially)
2500   //  -- edi : the constructor to call (can be any Object)
2501   // -----------------------------------
2502   StackArgumentsAccessor args(eax);
2503 
2504   // Check if target is a Smi.
2505   Label non_constructor, non_proxy, non_jsfunction, non_jsboundfunction;
2506   __ JumpIfSmi(edi, &non_constructor);
2507 
2508   // Check if target has a [[Construct]] internal method.
2509   __ mov(ecx, FieldOperand(edi, HeapObject::kMapOffset));
2510   __ test_b(FieldOperand(ecx, Map::kBitFieldOffset),
2511             Immediate(Map::Bits1::IsConstructorBit::kMask));
2512   __ j(zero, &non_constructor);
2513 
2514   // Dispatch based on instance type.
2515   __ CmpInstanceType(ecx, JS_FUNCTION_TYPE);
2516   __ j(not_equal, &non_jsfunction);
2517   __ Jump(BUILTIN_CODE(masm->isolate(), ConstructFunction),
2518           RelocInfo::CODE_TARGET);
2519 
2520   // Only dispatch to bound functions after checking whether they are
2521   // constructors.
2522   __ bind(&non_jsfunction);
2523   __ CmpInstanceType(ecx, JS_BOUND_FUNCTION_TYPE);
2524   __ j(not_equal, &non_jsboundfunction);
2525   __ Jump(BUILTIN_CODE(masm->isolate(), ConstructBoundFunction),
2526           RelocInfo::CODE_TARGET);
2527 
2528   // Only dispatch to proxies after checking whether they are constructors.
2529   __ bind(&non_jsboundfunction);
2530   __ CmpInstanceType(ecx, JS_PROXY_TYPE);
2531   __ j(not_equal, &non_proxy);
2532   __ Jump(BUILTIN_CODE(masm->isolate(), ConstructProxy),
2533           RelocInfo::CODE_TARGET);
2534 
2535   // Called Construct on an exotic Object with a [[Construct]] internal method.
2536   __ bind(&non_proxy);
2537   {
2538     // Overwrite the original receiver with the (original) target.
2539     __ mov(args.GetReceiverOperand(), edi);
2540     // Let the "call_as_constructor_delegate" take care of the rest.
2541     __ LoadNativeContextSlot(edi, Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX);
2542     __ Jump(masm->isolate()->builtins()->CallFunction(),
2543             RelocInfo::CODE_TARGET);
2544   }
2545 
2546   // Called Construct on an Object that doesn't have a [[Construct]] internal
2547   // method.
2548   __ bind(&non_constructor);
2549   __ Jump(BUILTIN_CODE(masm->isolate(), ConstructedNonConstructable),
2550           RelocInfo::CODE_TARGET);
2551 }
2552 
Generate_ArgumentsAdaptorTrampoline(MacroAssembler * masm)2553 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
2554   // ----------- S t a t e -------------
2555   //  -- eax : actual number of arguments
2556   //  -- ecx : expected number of arguments
2557   //  -- edx : new target (passed through to callee)
2558   //  -- edi : function (passed through to callee)
2559   // -----------------------------------
2560 
2561   const Register kExpectedNumberOfArgumentsRegister = ecx;
2562 
2563   Label invoke, dont_adapt_arguments, stack_overflow, enough, too_few;
2564   __ cmp(kExpectedNumberOfArgumentsRegister, kDontAdaptArgumentsSentinel);
2565   __ j(equal, &dont_adapt_arguments);
2566   __ cmp(eax, kExpectedNumberOfArgumentsRegister);
2567   __ j(less, &too_few);
2568 
2569   {  // Enough parameters: Actual >= expected.
2570     __ bind(&enough);
2571     EnterArgumentsAdaptorFrame(masm);
2572     // edi is used as a scratch register. It should be restored from the frame
2573     // when needed.
2574     __ StackOverflowCheck(kExpectedNumberOfArgumentsRegister, edi,
2575                           &stack_overflow);
2576 
2577     // Copy receiver and all expected arguments.
2578     const int offset = StandardFrameConstants::kCallerSPOffset;
2579     __ lea(edi, Operand(ebp, ecx, times_system_pointer_size, offset));
2580     __ mov(eax, -1);  // account for receiver
2581 
2582     Label copy;
2583     __ bind(&copy);
2584     __ inc(eax);
2585     __ push(Operand(edi, 0));
2586     __ sub(edi, Immediate(kSystemPointerSize));
2587     __ cmp(eax, kExpectedNumberOfArgumentsRegister);
2588     __ j(less, &copy);
2589     // eax now contains the expected number of arguments.
2590     __ jmp(&invoke);
2591   }
2592 
2593   {  // Too few parameters: Actual < expected.
2594     __ bind(&too_few);
2595     EnterArgumentsAdaptorFrame(masm);
2596     // edi is used as a scratch register. It should be restored from the frame
2597     // when needed.
2598     __ StackOverflowCheck(kExpectedNumberOfArgumentsRegister, edi,
2599                           &stack_overflow);
2600 
2601     // Remember expected arguments in xmm0.
2602     __ movd(xmm0, kExpectedNumberOfArgumentsRegister);
2603 
2604     // Remember new target.
2605     __ movd(xmm1, edx);
2606 
2607     // Fill remaining expected arguments with undefined values.
2608     Label fill;
2609     __ mov(edx, ecx);
2610     __ sub(edx, eax);
2611     __ bind(&fill);
2612     __ Push(Immediate(masm->isolate()->factory()->undefined_value()));
2613     __ dec(edx);
2614     __ j(greater, &fill);
2615 
2616     // Copy receiver and all actual arguments.
2617     const int offset = StandardFrameConstants::kCallerSPOffset;
2618     __ lea(edi, Operand(ebp, eax, times_system_pointer_size, offset));
2619     __ mov(edx, Immediate(-1));
2620 
2621     Label copy;
2622     __ bind(&copy);
2623     __ inc(edx);
2624     __ push(Operand(edi, 0));
2625     __ sub(edi, Immediate(kSystemPointerSize));
2626     __ cmp(edx, eax);
2627     __ j(less, &copy);
2628 
2629     // Restore new.target
2630     __ movd(edx, xmm1);
2631 
2632     // Restore expected arguments.
2633     __ movd(eax, xmm0);
2634   }
2635 
2636   // Call the entry point.
2637   __ bind(&invoke);
2638   // Restore function pointer.
2639   __ mov(edi, Operand(ebp, ArgumentsAdaptorFrameConstants::kFunctionOffset));
2640   // eax : expected number of arguments
2641   // edx : new target (passed through to callee)
2642   // edi : function (passed through to callee)
2643   static_assert(kJavaScriptCallCodeStartRegister == ecx, "ABI mismatch");
2644   __ mov(ecx, FieldOperand(edi, JSFunction::kCodeOffset));
2645   __ CallCodeObject(ecx);
2646 
2647   // Store offset of return address for deoptimizer.
2648   masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
2649 
2650   // Leave frame and return.
2651   LeaveArgumentsAdaptorFrame(masm);
2652   __ ret(0);
2653 
2654   // -------------------------------------------
2655   // Dont adapt arguments.
2656   // -------------------------------------------
2657   __ bind(&dont_adapt_arguments);
2658   static_assert(kJavaScriptCallCodeStartRegister == ecx, "ABI mismatch");
2659   __ mov(ecx, FieldOperand(edi, JSFunction::kCodeOffset));
2660   __ JumpCodeObject(ecx);
2661 
2662   __ bind(&stack_overflow);
2663   {
2664     FrameScope frame(masm, StackFrame::MANUAL);
2665     __ CallRuntime(Runtime::kThrowStackOverflow);
2666     __ int3();
2667   }
2668 }
2669 
Generate_InterpreterOnStackReplacement(MacroAssembler * masm)2670 void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) {
2671   {
2672     FrameScope scope(masm, StackFrame::INTERNAL);
2673     __ CallRuntime(Runtime::kCompileForOnStackReplacement);
2674   }
2675 
2676   Label skip;
2677   // If the code object is null, just return to the caller.
2678   __ cmp(eax, Immediate(0));
2679   __ j(not_equal, &skip, Label::kNear);
2680   __ ret(0);
2681 
2682   __ bind(&skip);
2683 
2684   // Drop the handler frame that is be sitting on top of the actual
2685   // JavaScript frame. This is the case then OSR is triggered from bytecode.
2686   __ leave();
2687 
2688   // Load deoptimization data from the code object.
2689   __ mov(ecx, Operand(eax, Code::kDeoptimizationDataOffset - kHeapObjectTag));
2690 
2691   // Load the OSR entrypoint offset from the deoptimization data.
2692   __ mov(ecx, Operand(ecx, FixedArray::OffsetOfElementAt(
2693                                DeoptimizationData::kOsrPcOffsetIndex) -
2694                                kHeapObjectTag));
2695   __ SmiUntag(ecx);
2696 
2697   // Compute the target address = code_obj + header_size + osr_offset
2698   __ lea(eax, Operand(eax, ecx, times_1, Code::kHeaderSize - kHeapObjectTag));
2699 
2700   // Overwrite the return address on the stack.
2701   __ mov(Operand(esp, 0), eax);
2702 
2703   // And "return" to the OSR entry point of the function.
2704   __ ret(0);
2705 }
2706 
Generate_WasmCompileLazy(MacroAssembler * masm)2707 void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) {
2708   // The function index was put in edi by the jump table trampoline.
2709   // Convert to Smi for the runtime call.
2710   __ SmiTag(kWasmCompileLazyFuncIndexRegister);
2711   {
2712     HardAbortScope hard_abort(masm);  // Avoid calls to Abort.
2713     FrameScope scope(masm, StackFrame::WASM_COMPILE_LAZY);
2714 
2715     // Save all parameter registers (see wasm-linkage.cc). They might be
2716     // overwritten in the runtime call below. We don't have any callee-saved
2717     // registers in wasm, so no need to store anything else.
2718     static_assert(WasmCompileLazyFrameConstants::kNumberOfSavedGpParamRegs ==
2719                       arraysize(wasm::kGpParamRegisters),
2720                   "frame size mismatch");
2721     for (Register reg : wasm::kGpParamRegisters) {
2722       __ Push(reg);
2723     }
2724     static_assert(WasmCompileLazyFrameConstants::kNumberOfSavedFpParamRegs ==
2725                       arraysize(wasm::kFpParamRegisters),
2726                   "frame size mismatch");
2727     __ AllocateStackSpace(kSimd128Size * arraysize(wasm::kFpParamRegisters));
2728     int offset = 0;
2729     for (DoubleRegister reg : wasm::kFpParamRegisters) {
2730       __ movdqu(Operand(esp, offset), reg);
2731       offset += kSimd128Size;
2732     }
2733 
2734     // Push the Wasm instance as an explicit argument to WasmCompileLazy.
2735     __ Push(kWasmInstanceRegister);
2736     // Push the function index as second argument.
2737     __ Push(kWasmCompileLazyFuncIndexRegister);
2738     // Initialize the JavaScript context with 0. CEntry will use it to
2739     // set the current context on the isolate.
2740     __ Move(kContextRegister, Smi::zero());
2741     {
2742       // At this point, ebx has been spilled to the stack but is not yet
2743       // overwritten with another value. We can still use it as kRootRegister.
2744       __ CallRuntime(Runtime::kWasmCompileLazy, 2);
2745     }
2746     // The entrypoint address is the return value.
2747     __ mov(edi, kReturnRegister0);
2748 
2749     // Restore registers.
2750     for (DoubleRegister reg : base::Reversed(wasm::kFpParamRegisters)) {
2751       offset -= kSimd128Size;
2752       __ movdqu(reg, Operand(esp, offset));
2753     }
2754     DCHECK_EQ(0, offset);
2755     __ add(esp, Immediate(kSimd128Size * arraysize(wasm::kFpParamRegisters)));
2756     for (Register reg : base::Reversed(wasm::kGpParamRegisters)) {
2757       __ Pop(reg);
2758     }
2759   }
2760   // Finally, jump to the entrypoint.
2761   __ jmp(edi);
2762 }
2763 
Generate_WasmDebugBreak(MacroAssembler * masm)2764 void Builtins::Generate_WasmDebugBreak(MacroAssembler* masm) {
2765   HardAbortScope hard_abort(masm);  // Avoid calls to Abort.
2766   {
2767     FrameScope scope(masm, StackFrame::WASM_DEBUG_BREAK);
2768 
2769     // Save all parameter registers. They might hold live values, we restore
2770     // them after the runtime call.
2771     for (int reg_code : base::bits::IterateBitsBackwards(
2772              WasmDebugBreakFrameConstants::kPushedGpRegs)) {
2773       __ Push(Register::from_code(reg_code));
2774     }
2775 
2776     constexpr int kFpStackSize =
2777         kSimd128Size * WasmDebugBreakFrameConstants::kNumPushedFpRegisters;
2778     __ AllocateStackSpace(kFpStackSize);
2779     int offset = kFpStackSize;
2780     for (int reg_code : base::bits::IterateBitsBackwards(
2781              WasmDebugBreakFrameConstants::kPushedFpRegs)) {
2782       offset -= kSimd128Size;
2783       __ movdqu(Operand(esp, offset), DoubleRegister::from_code(reg_code));
2784     }
2785 
2786     // Initialize the JavaScript context with 0. CEntry will use it to
2787     // set the current context on the isolate.
2788     __ Move(kContextRegister, Smi::zero());
2789     __ CallRuntime(Runtime::kWasmDebugBreak, 0);
2790 
2791     // Restore registers.
2792     for (int reg_code :
2793          base::bits::IterateBits(WasmDebugBreakFrameConstants::kPushedFpRegs)) {
2794       __ movdqu(DoubleRegister::from_code(reg_code), Operand(esp, offset));
2795       offset += kSimd128Size;
2796     }
2797     __ add(esp, Immediate(kFpStackSize));
2798     for (int reg_code :
2799          base::bits::IterateBits(WasmDebugBreakFrameConstants::kPushedGpRegs)) {
2800       __ Pop(Register::from_code(reg_code));
2801     }
2802   }
2803 
2804   __ ret(0);
2805 }
2806 
Generate_CEntry(MacroAssembler * masm,int result_size,SaveFPRegsMode save_doubles,ArgvMode argv_mode,bool builtin_exit_frame)2807 void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size,
2808                                SaveFPRegsMode save_doubles, ArgvMode argv_mode,
2809                                bool builtin_exit_frame) {
2810   // eax: number of arguments including receiver
2811   // edx: pointer to C function
2812   // ebp: frame pointer  (restored after C call)
2813   // esp: stack pointer  (restored after C call)
2814   // esi: current context (C callee-saved)
2815   // edi: JS function of the caller (C callee-saved)
2816   //
2817   // If argv_mode == kArgvInRegister:
2818   // ecx: pointer to the first argument
2819 
2820   STATIC_ASSERT(eax == kRuntimeCallArgCountRegister);
2821   STATIC_ASSERT(ecx == kRuntimeCallArgvRegister);
2822   STATIC_ASSERT(edx == kRuntimeCallFunctionRegister);
2823   STATIC_ASSERT(esi == kContextRegister);
2824   STATIC_ASSERT(edi == kJSFunctionRegister);
2825 
2826   DCHECK(!AreAliased(kRuntimeCallArgCountRegister, kRuntimeCallArgvRegister,
2827                      kRuntimeCallFunctionRegister, kContextRegister,
2828                      kJSFunctionRegister, kRootRegister));
2829 
2830   // Reserve space on the stack for the three arguments passed to the call. If
2831   // result size is greater than can be returned in registers, also reserve
2832   // space for the hidden argument for the result location, and space for the
2833   // result itself.
2834   int arg_stack_space = 3;
2835 
2836   // Enter the exit frame that transitions from JavaScript to C++.
2837   if (argv_mode == kArgvInRegister) {
2838     DCHECK(save_doubles == kDontSaveFPRegs);
2839     DCHECK(!builtin_exit_frame);
2840     __ EnterApiExitFrame(arg_stack_space, edi);
2841 
2842     // Move argc and argv into the correct registers.
2843     __ mov(esi, ecx);
2844     __ mov(edi, eax);
2845   } else {
2846     __ EnterExitFrame(
2847         arg_stack_space, save_doubles == kSaveFPRegs,
2848         builtin_exit_frame ? StackFrame::BUILTIN_EXIT : StackFrame::EXIT);
2849   }
2850 
2851   // edx: pointer to C function
2852   // ebp: frame pointer  (restored after C call)
2853   // esp: stack pointer  (restored after C call)
2854   // edi: number of arguments including receiver  (C callee-saved)
2855   // esi: pointer to the first argument (C callee-saved)
2856 
2857   // Result returned in eax, or eax+edx if result size is 2.
2858 
2859   // Check stack alignment.
2860   if (FLAG_debug_code) {
2861     __ CheckStackAlignment();
2862   }
2863   // Call C function.
2864   __ mov(Operand(esp, 0 * kSystemPointerSize), edi);  // argc.
2865   __ mov(Operand(esp, 1 * kSystemPointerSize), esi);  // argv.
2866   __ Move(ecx, Immediate(ExternalReference::isolate_address(masm->isolate())));
2867   __ mov(Operand(esp, 2 * kSystemPointerSize), ecx);
2868   __ call(kRuntimeCallFunctionRegister);
2869 
2870   // Result is in eax or edx:eax - do not destroy these registers!
2871 
2872   // Check result for exception sentinel.
2873   Label exception_returned;
2874   __ CompareRoot(eax, RootIndex::kException);
2875   __ j(equal, &exception_returned);
2876 
2877   // Check that there is no pending exception, otherwise we
2878   // should have returned the exception sentinel.
2879   if (FLAG_debug_code) {
2880     __ push(edx);
2881     __ LoadRoot(edx, RootIndex::kTheHoleValue);
2882     Label okay;
2883     ExternalReference pending_exception_address = ExternalReference::Create(
2884         IsolateAddressId::kPendingExceptionAddress, masm->isolate());
2885     __ cmp(edx, __ ExternalReferenceAsOperand(pending_exception_address, ecx));
2886     // Cannot use check here as it attempts to generate call into runtime.
2887     __ j(equal, &okay, Label::kNear);
2888     __ int3();
2889     __ bind(&okay);
2890     __ pop(edx);
2891   }
2892 
2893   // Exit the JavaScript to C++ exit frame.
2894   __ LeaveExitFrame(save_doubles == kSaveFPRegs, argv_mode == kArgvOnStack);
2895   __ ret(0);
2896 
2897   // Handling of exception.
2898   __ bind(&exception_returned);
2899 
2900   ExternalReference pending_handler_context_address = ExternalReference::Create(
2901       IsolateAddressId::kPendingHandlerContextAddress, masm->isolate());
2902   ExternalReference pending_handler_entrypoint_address =
2903       ExternalReference::Create(
2904           IsolateAddressId::kPendingHandlerEntrypointAddress, masm->isolate());
2905   ExternalReference pending_handler_fp_address = ExternalReference::Create(
2906       IsolateAddressId::kPendingHandlerFPAddress, masm->isolate());
2907   ExternalReference pending_handler_sp_address = ExternalReference::Create(
2908       IsolateAddressId::kPendingHandlerSPAddress, masm->isolate());
2909 
2910   // Ask the runtime for help to determine the handler. This will set eax to
2911   // contain the current pending exception, don't clobber it.
2912   ExternalReference find_handler =
2913       ExternalReference::Create(Runtime::kUnwindAndFindExceptionHandler);
2914   {
2915     FrameScope scope(masm, StackFrame::MANUAL);
2916     __ PrepareCallCFunction(3, eax);
2917     __ mov(Operand(esp, 0 * kSystemPointerSize), Immediate(0));  // argc.
2918     __ mov(Operand(esp, 1 * kSystemPointerSize), Immediate(0));  // argv.
2919     __ Move(esi,
2920             Immediate(ExternalReference::isolate_address(masm->isolate())));
2921     __ mov(Operand(esp, 2 * kSystemPointerSize), esi);
2922     __ CallCFunction(find_handler, 3);
2923   }
2924 
2925   // Retrieve the handler context, SP and FP.
2926   __ mov(esp, __ ExternalReferenceAsOperand(pending_handler_sp_address, esi));
2927   __ mov(ebp, __ ExternalReferenceAsOperand(pending_handler_fp_address, esi));
2928   __ mov(esi,
2929          __ ExternalReferenceAsOperand(pending_handler_context_address, esi));
2930 
2931   // If the handler is a JS frame, restore the context to the frame. Note that
2932   // the context will be set to (esi == 0) for non-JS frames.
2933   Label skip;
2934   __ test(esi, esi);
2935   __ j(zero, &skip, Label::kNear);
2936   __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi);
2937   __ bind(&skip);
2938 
2939   // Compute the handler entry address and jump to it.
2940   __ mov(edi, __ ExternalReferenceAsOperand(pending_handler_entrypoint_address,
2941                                             edi));
2942   __ jmp(edi);
2943 }
2944 
Generate_DoubleToI(MacroAssembler * masm)2945 void Builtins::Generate_DoubleToI(MacroAssembler* masm) {
2946   Label check_negative, process_64_bits, done;
2947 
2948   // Account for return address and saved regs.
2949   const int kArgumentOffset = 4 * kSystemPointerSize;
2950 
2951   MemOperand mantissa_operand(MemOperand(esp, kArgumentOffset));
2952   MemOperand exponent_operand(
2953       MemOperand(esp, kArgumentOffset + kDoubleSize / 2));
2954 
2955   // The result is returned on the stack.
2956   MemOperand return_operand = mantissa_operand;
2957 
2958   Register scratch1 = ebx;
2959 
2960   // Since we must use ecx for shifts below, use some other register (eax)
2961   // to calculate the result.
2962   Register result_reg = eax;
2963   // Save ecx if it isn't the return register and therefore volatile, or if it
2964   // is the return register, then save the temp register we use in its stead for
2965   // the result.
2966   Register save_reg = eax;
2967   __ push(ecx);
2968   __ push(scratch1);
2969   __ push(save_reg);
2970 
2971   __ mov(scratch1, mantissa_operand);
2972   if (CpuFeatures::IsSupported(SSE3)) {
2973     CpuFeatureScope scope(masm, SSE3);
2974     // Load x87 register with heap number.
2975     __ fld_d(mantissa_operand);
2976   }
2977   __ mov(ecx, exponent_operand);
2978 
2979   __ and_(ecx, HeapNumber::kExponentMask);
2980   __ shr(ecx, HeapNumber::kExponentShift);
2981   __ lea(result_reg, MemOperand(ecx, -HeapNumber::kExponentBias));
2982   __ cmp(result_reg, Immediate(HeapNumber::kMantissaBits));
2983   __ j(below, &process_64_bits);
2984 
2985   // Result is entirely in lower 32-bits of mantissa
2986   int delta = HeapNumber::kExponentBias + Double::kPhysicalSignificandSize;
2987   if (CpuFeatures::IsSupported(SSE3)) {
2988     __ fstp(0);
2989   }
2990   __ sub(ecx, Immediate(delta));
2991   __ xor_(result_reg, result_reg);
2992   __ cmp(ecx, Immediate(31));
2993   __ j(above, &done);
2994   __ shl_cl(scratch1);
2995   __ jmp(&check_negative);
2996 
2997   __ bind(&process_64_bits);
2998   if (CpuFeatures::IsSupported(SSE3)) {
2999     CpuFeatureScope scope(masm, SSE3);
3000     // Reserve space for 64 bit answer.
3001     __ AllocateStackSpace(kDoubleSize);  // Nolint.
3002     // Do conversion, which cannot fail because we checked the exponent.
3003     __ fisttp_d(Operand(esp, 0));
3004     __ mov(result_reg, Operand(esp, 0));  // Load low word of answer as result
3005     __ add(esp, Immediate(kDoubleSize));
3006     __ jmp(&done);
3007   } else {
3008     // Result must be extracted from shifted 32-bit mantissa
3009     __ sub(ecx, Immediate(delta));
3010     __ neg(ecx);
3011     __ mov(result_reg, exponent_operand);
3012     __ and_(result_reg,
3013             Immediate(static_cast<uint32_t>(Double::kSignificandMask >> 32)));
3014     __ add(result_reg,
3015            Immediate(static_cast<uint32_t>(Double::kHiddenBit >> 32)));
3016     __ shrd_cl(scratch1, result_reg);
3017     __ shr_cl(result_reg);
3018     __ test(ecx, Immediate(32));
3019     __ cmov(not_equal, scratch1, result_reg);
3020   }
3021 
3022   // If the double was negative, negate the integer result.
3023   __ bind(&check_negative);
3024   __ mov(result_reg, scratch1);
3025   __ neg(result_reg);
3026   __ cmp(exponent_operand, Immediate(0));
3027   __ cmov(greater, result_reg, scratch1);
3028 
3029   // Restore registers
3030   __ bind(&done);
3031   __ mov(return_operand, result_reg);
3032   __ pop(save_reg);
3033   __ pop(scratch1);
3034   __ pop(ecx);
3035   __ ret(0);
3036 }
3037 
Generate_GenericJSToWasmWrapper(MacroAssembler * masm)3038 void Builtins::Generate_GenericJSToWasmWrapper(MacroAssembler* masm) {
3039   // TODO(v8:10701): Implement for this platform.
3040   __ Trap();
3041 }
3042 
3043 namespace {
3044 
3045 // Generates an Operand for saving parameters after PrepareCallApiFunction.
ApiParameterOperand(int index)3046 Operand ApiParameterOperand(int index) {
3047   return Operand(esp, index * kSystemPointerSize);
3048 }
3049 
3050 // Prepares stack to put arguments (aligns and so on). Reserves
3051 // space for return value if needed (assumes the return value is a handle).
3052 // Arguments must be stored in ApiParameterOperand(0), ApiParameterOperand(1)
3053 // etc. Saves context (esi). If space was reserved for return value then
3054 // stores the pointer to the reserved slot into esi.
PrepareCallApiFunction(MacroAssembler * masm,int argc,Register scratch)3055 void PrepareCallApiFunction(MacroAssembler* masm, int argc, Register scratch) {
3056   __ EnterApiExitFrame(argc, scratch);
3057   if (__ emit_debug_code()) {
3058     __ mov(esi, Immediate(bit_cast<int32_t>(kZapValue)));
3059   }
3060 }
3061 
3062 // Calls an API function.  Allocates HandleScope, extracts returned value
3063 // from handle and propagates exceptions.  Clobbers esi, edi and
3064 // caller-save registers.  Restores context.  On return removes
3065 // stack_space * kSystemPointerSize (GCed).
CallApiFunctionAndReturn(MacroAssembler * masm,Register function_address,ExternalReference thunk_ref,Operand thunk_last_arg,int stack_space,Operand * stack_space_operand,Operand return_value_operand)3066 void CallApiFunctionAndReturn(MacroAssembler* masm, Register function_address,
3067                               ExternalReference thunk_ref,
3068                               Operand thunk_last_arg, int stack_space,
3069                               Operand* stack_space_operand,
3070                               Operand return_value_operand) {
3071   Isolate* isolate = masm->isolate();
3072 
3073   ExternalReference next_address =
3074       ExternalReference::handle_scope_next_address(isolate);
3075   ExternalReference limit_address =
3076       ExternalReference::handle_scope_limit_address(isolate);
3077   ExternalReference level_address =
3078       ExternalReference::handle_scope_level_address(isolate);
3079 
3080   DCHECK(edx == function_address);
3081   // Allocate HandleScope in callee-save registers.
3082   __ add(__ ExternalReferenceAsOperand(level_address, esi), Immediate(1));
3083   __ mov(esi, __ ExternalReferenceAsOperand(next_address, esi));
3084   __ mov(edi, __ ExternalReferenceAsOperand(limit_address, edi));
3085 
3086   Label profiler_enabled, end_profiler_check;
3087   __ Move(eax, Immediate(ExternalReference::is_profiling_address(isolate)));
3088   __ cmpb(Operand(eax, 0), Immediate(0));
3089   __ j(not_zero, &profiler_enabled);
3090   __ Move(eax, Immediate(ExternalReference::address_of_runtime_stats_flag()));
3091   __ cmp(Operand(eax, 0), Immediate(0));
3092   __ j(not_zero, &profiler_enabled);
3093   {
3094     // Call the api function directly.
3095     __ mov(eax, function_address);
3096     __ jmp(&end_profiler_check);
3097   }
3098   __ bind(&profiler_enabled);
3099   {
3100     // Additional parameter is the address of the actual getter function.
3101     __ mov(thunk_last_arg, function_address);
3102     __ Move(eax, Immediate(thunk_ref));
3103   }
3104   __ bind(&end_profiler_check);
3105 
3106   // Call the api function.
3107   __ call(eax);
3108 
3109   Label prologue;
3110   // Load the value from ReturnValue
3111   __ mov(eax, return_value_operand);
3112 
3113   Label promote_scheduled_exception;
3114   Label delete_allocated_handles;
3115   Label leave_exit_frame;
3116 
3117   __ bind(&prologue);
3118   // No more valid handles (the result handle was the last one). Restore
3119   // previous handle scope.
3120   __ mov(__ ExternalReferenceAsOperand(next_address, ecx), esi);
3121   __ sub(__ ExternalReferenceAsOperand(level_address, ecx), Immediate(1));
3122   __ Assert(above_equal, AbortReason::kInvalidHandleScopeLevel);
3123   __ cmp(edi, __ ExternalReferenceAsOperand(limit_address, ecx));
3124   __ j(not_equal, &delete_allocated_handles);
3125 
3126   // Leave the API exit frame.
3127   __ bind(&leave_exit_frame);
3128   if (stack_space_operand != nullptr) {
3129     DCHECK_EQ(stack_space, 0);
3130     __ mov(edx, *stack_space_operand);
3131   }
3132   __ LeaveApiExitFrame();
3133 
3134   // Check if the function scheduled an exception.
3135   ExternalReference scheduled_exception_address =
3136       ExternalReference::scheduled_exception_address(isolate);
3137   __ mov(ecx, __ ExternalReferenceAsOperand(scheduled_exception_address, ecx));
3138   __ CompareRoot(ecx, RootIndex::kTheHoleValue);
3139   __ j(not_equal, &promote_scheduled_exception);
3140 
3141 #if DEBUG
3142   // Check if the function returned a valid JavaScript value.
3143   Label ok;
3144   Register return_value = eax;
3145   Register map = ecx;
3146 
3147   __ JumpIfSmi(return_value, &ok, Label::kNear);
3148   __ mov(map, FieldOperand(return_value, HeapObject::kMapOffset));
3149 
3150   __ CmpInstanceType(map, LAST_NAME_TYPE);
3151   __ j(below_equal, &ok, Label::kNear);
3152 
3153   __ CmpInstanceType(map, FIRST_JS_RECEIVER_TYPE);
3154   __ j(above_equal, &ok, Label::kNear);
3155 
3156   __ CompareRoot(map, RootIndex::kHeapNumberMap);
3157   __ j(equal, &ok, Label::kNear);
3158 
3159   __ CompareRoot(map, RootIndex::kBigIntMap);
3160   __ j(equal, &ok, Label::kNear);
3161 
3162   __ CompareRoot(return_value, RootIndex::kUndefinedValue);
3163   __ j(equal, &ok, Label::kNear);
3164 
3165   __ CompareRoot(return_value, RootIndex::kTrueValue);
3166   __ j(equal, &ok, Label::kNear);
3167 
3168   __ CompareRoot(return_value, RootIndex::kFalseValue);
3169   __ j(equal, &ok, Label::kNear);
3170 
3171   __ CompareRoot(return_value, RootIndex::kNullValue);
3172   __ j(equal, &ok, Label::kNear);
3173 
3174   __ Abort(AbortReason::kAPICallReturnedInvalidObject);
3175 
3176   __ bind(&ok);
3177 #endif
3178 
3179   if (stack_space_operand == nullptr) {
3180     DCHECK_NE(stack_space, 0);
3181     __ ret(stack_space * kSystemPointerSize);
3182   } else {
3183     DCHECK_EQ(0, stack_space);
3184     __ pop(ecx);
3185     __ add(esp, edx);
3186     __ jmp(ecx);
3187   }
3188 
3189   // Re-throw by promoting a scheduled exception.
3190   __ bind(&promote_scheduled_exception);
3191   __ TailCallRuntime(Runtime::kPromoteScheduledException);
3192 
3193   // HandleScope limit has changed. Delete allocated extensions.
3194   ExternalReference delete_extensions =
3195       ExternalReference::delete_handle_scope_extensions();
3196   __ bind(&delete_allocated_handles);
3197   __ mov(__ ExternalReferenceAsOperand(limit_address, ecx), edi);
3198   __ mov(edi, eax);
3199   __ Move(eax, Immediate(ExternalReference::isolate_address(isolate)));
3200   __ mov(Operand(esp, 0), eax);
3201   __ Move(eax, Immediate(delete_extensions));
3202   __ call(eax);
3203   __ mov(eax, edi);
3204   __ jmp(&leave_exit_frame);
3205 }
3206 
3207 }  // namespace
3208 
Generate_CallApiCallback(MacroAssembler * masm)3209 void Builtins::Generate_CallApiCallback(MacroAssembler* masm) {
3210   // ----------- S t a t e -------------
3211   //  -- esi                 : context
3212   //  -- edx                 : api function address
3213   //  -- ecx                 : arguments count (not including the receiver)
3214   //  -- eax                 : call data
3215   //  -- edi                 : holder
3216   //  -- esp[0]              : return address
3217   //  -- esp[8]              : argument 0 (receiver)
3218   //  -- esp[16]             : argument 1
3219   //  -- ...
3220   //  -- esp[argc * 8]       : argument (argc - 1)
3221   //  -- esp[(argc + 1) * 8] : argument argc
3222   // -----------------------------------
3223 
3224   Register api_function_address = edx;
3225   Register argc = ecx;
3226   Register call_data = eax;
3227   Register holder = edi;
3228 
3229   // Park argc in xmm0.
3230   __ movd(xmm0, argc);
3231 
3232   DCHECK(!AreAliased(api_function_address, argc, holder));
3233 
3234   using FCA = FunctionCallbackArguments;
3235 
3236   STATIC_ASSERT(FCA::kArgsLength == 6);
3237   STATIC_ASSERT(FCA::kNewTargetIndex == 5);
3238   STATIC_ASSERT(FCA::kDataIndex == 4);
3239   STATIC_ASSERT(FCA::kReturnValueOffset == 3);
3240   STATIC_ASSERT(FCA::kReturnValueDefaultValueIndex == 2);
3241   STATIC_ASSERT(FCA::kIsolateIndex == 1);
3242   STATIC_ASSERT(FCA::kHolderIndex == 0);
3243 
3244   // Set up FunctionCallbackInfo's implicit_args on the stack as follows:
3245   //
3246   // Current state:
3247   //   esp[0]: return address
3248   //
3249   // Target state:
3250   //   esp[0 * kSystemPointerSize]: return address
3251   //   esp[1 * kSystemPointerSize]: kHolder
3252   //   esp[2 * kSystemPointerSize]: kIsolate
3253   //   esp[3 * kSystemPointerSize]: undefined (kReturnValueDefaultValue)
3254   //   esp[4 * kSystemPointerSize]: undefined (kReturnValue)
3255   //   esp[5 * kSystemPointerSize]: kData
3256   //   esp[6 * kSystemPointerSize]: undefined (kNewTarget)
3257 
3258   __ PopReturnAddressTo(ecx);
3259   __ PushRoot(RootIndex::kUndefinedValue);
3260   __ Push(call_data);
3261   __ PushRoot(RootIndex::kUndefinedValue);
3262   __ PushRoot(RootIndex::kUndefinedValue);
3263   __ Push(Immediate(ExternalReference::isolate_address(masm->isolate())));
3264   __ Push(holder);
3265   __ PushReturnAddressFrom(ecx);
3266 
3267   // Reload argc from xmm0.
3268   __ movd(argc, xmm0);
3269 
3270   // Keep a pointer to kHolder (= implicit_args) in a scratch register.
3271   // We use it below to set up the FunctionCallbackInfo object.
3272   Register scratch = eax;
3273   __ lea(scratch, Operand(esp, 1 * kSystemPointerSize));
3274 
3275   // The API function takes a reference to v8::Arguments. If the CPU profiler
3276   // is enabled, a wrapper function will be called and we need to pass
3277   // the address of the callback as an additional parameter. Always allocate
3278   // space for it.
3279   static constexpr int kApiArgc = 1 + 1;
3280 
3281   // Allocate the v8::Arguments structure in the arguments' space since
3282   // it's not controlled by GC.
3283   static constexpr int kApiStackSpace = 4;
3284 
3285   PrepareCallApiFunction(masm, kApiArgc + kApiStackSpace, edi);
3286 
3287   // FunctionCallbackInfo::implicit_args_ (points at kHolder as set up above).
3288   __ mov(ApiParameterOperand(kApiArgc + 0), scratch);
3289 
3290   // FunctionCallbackInfo::values_ (points at the first varargs argument passed
3291   // on the stack).
3292   __ lea(scratch,
3293          Operand(scratch, (FCA::kArgsLength + 1) * kSystemPointerSize));
3294   __ mov(ApiParameterOperand(kApiArgc + 1), scratch);
3295 
3296   // FunctionCallbackInfo::length_.
3297   __ mov(ApiParameterOperand(kApiArgc + 2), argc);
3298 
3299   // We also store the number of bytes to drop from the stack after returning
3300   // from the API function here.
3301   __ lea(scratch,
3302          Operand(argc, times_system_pointer_size,
3303                  (FCA::kArgsLength + 1 /* receiver */) * kSystemPointerSize));
3304   __ mov(ApiParameterOperand(kApiArgc + 3), scratch);
3305 
3306   // v8::InvocationCallback's argument.
3307   __ lea(scratch, ApiParameterOperand(kApiArgc + 0));
3308   __ mov(ApiParameterOperand(0), scratch);
3309 
3310   ExternalReference thunk_ref = ExternalReference::invoke_function_callback();
3311 
3312   // There are two stack slots above the arguments we constructed on the stack:
3313   // the stored ebp (pushed by EnterApiExitFrame), and the return address.
3314   static constexpr int kStackSlotsAboveFCA = 2;
3315   Operand return_value_operand(
3316       ebp,
3317       (kStackSlotsAboveFCA + FCA::kReturnValueOffset) * kSystemPointerSize);
3318 
3319   static constexpr int kUseStackSpaceOperand = 0;
3320   Operand stack_space_operand = ApiParameterOperand(kApiArgc + 3);
3321   CallApiFunctionAndReturn(masm, api_function_address, thunk_ref,
3322                            ApiParameterOperand(1), kUseStackSpaceOperand,
3323                            &stack_space_operand, return_value_operand);
3324 }
3325 
Generate_CallApiGetter(MacroAssembler * masm)3326 void Builtins::Generate_CallApiGetter(MacroAssembler* masm) {
3327   // Build v8::PropertyCallbackInfo::args_ array on the stack and push property
3328   // name below the exit frame to make GC aware of them.
3329   STATIC_ASSERT(PropertyCallbackArguments::kShouldThrowOnErrorIndex == 0);
3330   STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 1);
3331   STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 2);
3332   STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 3);
3333   STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 4);
3334   STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 5);
3335   STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 6);
3336   STATIC_ASSERT(PropertyCallbackArguments::kArgsLength == 7);
3337 
3338   Register receiver = ApiGetterDescriptor::ReceiverRegister();
3339   Register holder = ApiGetterDescriptor::HolderRegister();
3340   Register callback = ApiGetterDescriptor::CallbackRegister();
3341   Register scratch = edi;
3342   DCHECK(!AreAliased(receiver, holder, callback, scratch));
3343 
3344   __ pop(scratch);  // Pop return address to extend the frame.
3345   __ push(receiver);
3346   __ push(FieldOperand(callback, AccessorInfo::kDataOffset));
3347   __ PushRoot(RootIndex::kUndefinedValue);  // ReturnValue
3348   // ReturnValue default value
3349   __ PushRoot(RootIndex::kUndefinedValue);
3350   __ Push(Immediate(ExternalReference::isolate_address(masm->isolate())));
3351   __ push(holder);
3352   __ push(Immediate(Smi::zero()));  // should_throw_on_error -> false
3353   __ push(FieldOperand(callback, AccessorInfo::kNameOffset));
3354   __ push(scratch);  // Restore return address.
3355 
3356   // v8::PropertyCallbackInfo::args_ array and name handle.
3357   const int kStackUnwindSpace = PropertyCallbackArguments::kArgsLength + 1;
3358 
3359   // Allocate v8::PropertyCallbackInfo object, arguments for callback and
3360   // space for optional callback address parameter (in case CPU profiler is
3361   // active) in non-GCed stack space.
3362   const int kApiArgc = 3 + 1;
3363 
3364   PrepareCallApiFunction(masm, kApiArgc, scratch);
3365 
3366   // Load address of v8::PropertyAccessorInfo::args_ array. The value in ebp
3367   // here corresponds to esp + kSystemPointerSize before PrepareCallApiFunction.
3368   __ lea(scratch, Operand(ebp, kSystemPointerSize + 2 * kSystemPointerSize));
3369   // Create v8::PropertyCallbackInfo object on the stack and initialize
3370   // it's args_ field.
3371   Operand info_object = ApiParameterOperand(3);
3372   __ mov(info_object, scratch);
3373 
3374   // Name as handle.
3375   __ sub(scratch, Immediate(kSystemPointerSize));
3376   __ mov(ApiParameterOperand(0), scratch);
3377   // Arguments pointer.
3378   __ lea(scratch, info_object);
3379   __ mov(ApiParameterOperand(1), scratch);
3380   // Reserve space for optional callback address parameter.
3381   Operand thunk_last_arg = ApiParameterOperand(2);
3382 
3383   ExternalReference thunk_ref =
3384       ExternalReference::invoke_accessor_getter_callback();
3385 
3386   __ mov(scratch, FieldOperand(callback, AccessorInfo::kJsGetterOffset));
3387   Register function_address = edx;
3388   __ mov(function_address,
3389          FieldOperand(scratch, Foreign::kForeignAddressOffset));
3390   // +3 is to skip prolog, return address and name handle.
3391   Operand return_value_operand(
3392       ebp,
3393       (PropertyCallbackArguments::kReturnValueOffset + 3) * kSystemPointerSize);
3394   Operand* const kUseStackSpaceConstant = nullptr;
3395   CallApiFunctionAndReturn(masm, function_address, thunk_ref, thunk_last_arg,
3396                            kStackUnwindSpace, kUseStackSpaceConstant,
3397                            return_value_operand);
3398 }
3399 
Generate_DirectCEntry(MacroAssembler * masm)3400 void Builtins::Generate_DirectCEntry(MacroAssembler* masm) {
3401   __ int3();  // Unused on this architecture.
3402 }
3403 
3404 namespace {
3405 
3406 enum Direction { FORWARD, BACKWARD };
3407 enum Alignment { MOVE_ALIGNED, MOVE_UNALIGNED };
3408 
3409 // Expects registers:
3410 // esi - source, aligned if alignment == ALIGNED
3411 // edi - destination, always aligned
3412 // ecx - count (copy size in bytes)
3413 // edx - loop count (number of 64 byte chunks)
MemMoveEmitMainLoop(MacroAssembler * masm,Label * move_last_15,Direction direction,Alignment alignment)3414 void MemMoveEmitMainLoop(MacroAssembler* masm, Label* move_last_15,
3415                          Direction direction, Alignment alignment) {
3416   Register src = esi;
3417   Register dst = edi;
3418   Register count = ecx;
3419   Register loop_count = edx;
3420   Label loop, move_last_31, move_last_63;
3421   __ cmp(loop_count, 0);
3422   __ j(equal, &move_last_63);
3423   __ bind(&loop);
3424   // Main loop. Copy in 64 byte chunks.
3425   if (direction == BACKWARD) __ sub(src, Immediate(0x40));
3426   __ movdq(alignment == MOVE_ALIGNED, xmm0, Operand(src, 0x00));
3427   __ movdq(alignment == MOVE_ALIGNED, xmm1, Operand(src, 0x10));
3428   __ movdq(alignment == MOVE_ALIGNED, xmm2, Operand(src, 0x20));
3429   __ movdq(alignment == MOVE_ALIGNED, xmm3, Operand(src, 0x30));
3430   if (direction == FORWARD) __ add(src, Immediate(0x40));
3431   if (direction == BACKWARD) __ sub(dst, Immediate(0x40));
3432   __ movdqa(Operand(dst, 0x00), xmm0);
3433   __ movdqa(Operand(dst, 0x10), xmm1);
3434   __ movdqa(Operand(dst, 0x20), xmm2);
3435   __ movdqa(Operand(dst, 0x30), xmm3);
3436   if (direction == FORWARD) __ add(dst, Immediate(0x40));
3437   __ dec(loop_count);
3438   __ j(not_zero, &loop);
3439   // At most 63 bytes left to copy.
3440   __ bind(&move_last_63);
3441   __ test(count, Immediate(0x20));
3442   __ j(zero, &move_last_31);
3443   if (direction == BACKWARD) __ sub(src, Immediate(0x20));
3444   __ movdq(alignment == MOVE_ALIGNED, xmm0, Operand(src, 0x00));
3445   __ movdq(alignment == MOVE_ALIGNED, xmm1, Operand(src, 0x10));
3446   if (direction == FORWARD) __ add(src, Immediate(0x20));
3447   if (direction == BACKWARD) __ sub(dst, Immediate(0x20));
3448   __ movdqa(Operand(dst, 0x00), xmm0);
3449   __ movdqa(Operand(dst, 0x10), xmm1);
3450   if (direction == FORWARD) __ add(dst, Immediate(0x20));
3451   // At most 31 bytes left to copy.
3452   __ bind(&move_last_31);
3453   __ test(count, Immediate(0x10));
3454   __ j(zero, move_last_15);
3455   if (direction == BACKWARD) __ sub(src, Immediate(0x10));
3456   __ movdq(alignment == MOVE_ALIGNED, xmm0, Operand(src, 0));
3457   if (direction == FORWARD) __ add(src, Immediate(0x10));
3458   if (direction == BACKWARD) __ sub(dst, Immediate(0x10));
3459   __ movdqa(Operand(dst, 0), xmm0);
3460   if (direction == FORWARD) __ add(dst, Immediate(0x10));
3461 }
3462 
MemMoveEmitPopAndReturn(MacroAssembler * masm)3463 void MemMoveEmitPopAndReturn(MacroAssembler* masm) {
3464   __ pop(esi);
3465   __ pop(edi);
3466   __ ret(0);
3467 }
3468 
3469 }  // namespace
3470 
Generate_MemMove(MacroAssembler * masm)3471 void Builtins::Generate_MemMove(MacroAssembler* masm) {
3472   // Generated code is put into a fixed, unmovable buffer, and not into
3473   // the V8 heap. We can't, and don't, refer to any relocatable addresses
3474   // (e.g. the JavaScript nan-object).
3475 
3476   // 32-bit C declaration function calls pass arguments on stack.
3477 
3478   // Stack layout:
3479   // esp[12]: Third argument, size.
3480   // esp[8]: Second argument, source pointer.
3481   // esp[4]: First argument, destination pointer.
3482   // esp[0]: return address
3483 
3484   const int kDestinationOffset = 1 * kSystemPointerSize;
3485   const int kSourceOffset = 2 * kSystemPointerSize;
3486   const int kSizeOffset = 3 * kSystemPointerSize;
3487 
3488   // When copying up to this many bytes, use special "small" handlers.
3489   const size_t kSmallCopySize = 8;
3490   // When copying up to this many bytes, use special "medium" handlers.
3491   const size_t kMediumCopySize = 63;
3492   // When non-overlapping region of src and dst is less than this,
3493   // use a more careful implementation (slightly slower).
3494   const size_t kMinMoveDistance = 16;
3495   // Note that these values are dictated by the implementation below,
3496   // do not just change them and hope things will work!
3497 
3498   int stack_offset = 0;  // Update if we change the stack height.
3499 
3500   Label backward, backward_much_overlap;
3501   Label forward_much_overlap, small_size, medium_size, pop_and_return;
3502   __ push(edi);
3503   __ push(esi);
3504   stack_offset += 2 * kSystemPointerSize;
3505   Register dst = edi;
3506   Register src = esi;
3507   Register count = ecx;
3508   Register loop_count = edx;
3509   __ mov(dst, Operand(esp, stack_offset + kDestinationOffset));
3510   __ mov(src, Operand(esp, stack_offset + kSourceOffset));
3511   __ mov(count, Operand(esp, stack_offset + kSizeOffset));
3512 
3513   __ cmp(dst, src);
3514   __ j(equal, &pop_and_return);
3515 
3516   __ prefetch(Operand(src, 0), 1);
3517   __ cmp(count, kSmallCopySize);
3518   __ j(below_equal, &small_size);
3519   __ cmp(count, kMediumCopySize);
3520   __ j(below_equal, &medium_size);
3521   __ cmp(dst, src);
3522   __ j(above, &backward);
3523 
3524   {
3525     // |dst| is a lower address than |src|. Copy front-to-back.
3526     Label unaligned_source, move_last_15, skip_last_move;
3527     __ mov(eax, src);
3528     __ sub(eax, dst);
3529     __ cmp(eax, kMinMoveDistance);
3530     __ j(below, &forward_much_overlap);
3531     // Copy first 16 bytes.
3532     __ movdqu(xmm0, Operand(src, 0));
3533     __ movdqu(Operand(dst, 0), xmm0);
3534     // Determine distance to alignment: 16 - (dst & 0xF).
3535     __ mov(edx, dst);
3536     __ and_(edx, 0xF);
3537     __ neg(edx);
3538     __ add(edx, Immediate(16));
3539     __ add(dst, edx);
3540     __ add(src, edx);
3541     __ sub(count, edx);
3542     // dst is now aligned. Main copy loop.
3543     __ mov(loop_count, count);
3544     __ shr(loop_count, 6);
3545     // Check if src is also aligned.
3546     __ test(src, Immediate(0xF));
3547     __ j(not_zero, &unaligned_source);
3548     // Copy loop for aligned source and destination.
3549     MemMoveEmitMainLoop(masm, &move_last_15, FORWARD, MOVE_ALIGNED);
3550     // At most 15 bytes to copy. Copy 16 bytes at end of string.
3551     __ bind(&move_last_15);
3552     __ and_(count, 0xF);
3553     __ j(zero, &skip_last_move, Label::kNear);
3554     __ movdqu(xmm0, Operand(src, count, times_1, -0x10));
3555     __ movdqu(Operand(dst, count, times_1, -0x10), xmm0);
3556     __ bind(&skip_last_move);
3557     MemMoveEmitPopAndReturn(masm);
3558 
3559     // Copy loop for unaligned source and aligned destination.
3560     __ bind(&unaligned_source);
3561     MemMoveEmitMainLoop(masm, &move_last_15, FORWARD, MOVE_UNALIGNED);
3562     __ jmp(&move_last_15);
3563 
3564     // Less than kMinMoveDistance offset between dst and src.
3565     Label loop_until_aligned, last_15_much_overlap;
3566     __ bind(&loop_until_aligned);
3567     __ mov_b(eax, Operand(src, 0));
3568     __ inc(src);
3569     __ mov_b(Operand(dst, 0), eax);
3570     __ inc(dst);
3571     __ dec(count);
3572     __ bind(&forward_much_overlap);  // Entry point into this block.
3573     __ test(dst, Immediate(0xF));
3574     __ j(not_zero, &loop_until_aligned);
3575     // dst is now aligned, src can't be. Main copy loop.
3576     __ mov(loop_count, count);
3577     __ shr(loop_count, 6);
3578     MemMoveEmitMainLoop(masm, &last_15_much_overlap, FORWARD, MOVE_UNALIGNED);
3579     __ bind(&last_15_much_overlap);
3580     __ and_(count, 0xF);
3581     __ j(zero, &pop_and_return);
3582     __ cmp(count, kSmallCopySize);
3583     __ j(below_equal, &small_size);
3584     __ jmp(&medium_size);
3585   }
3586 
3587   {
3588     // |dst| is a higher address than |src|. Copy backwards.
3589     Label unaligned_source, move_first_15, skip_last_move;
3590     __ bind(&backward);
3591     // |dst| and |src| always point to the end of what's left to copy.
3592     __ add(dst, count);
3593     __ add(src, count);
3594     __ mov(eax, dst);
3595     __ sub(eax, src);
3596     __ cmp(eax, kMinMoveDistance);
3597     __ j(below, &backward_much_overlap);
3598     // Copy last 16 bytes.
3599     __ movdqu(xmm0, Operand(src, -0x10));
3600     __ movdqu(Operand(dst, -0x10), xmm0);
3601     // Find distance to alignment: dst & 0xF
3602     __ mov(edx, dst);
3603     __ and_(edx, 0xF);
3604     __ sub(dst, edx);
3605     __ sub(src, edx);
3606     __ sub(count, edx);
3607     // dst is now aligned. Main copy loop.
3608     __ mov(loop_count, count);
3609     __ shr(loop_count, 6);
3610     // Check if src is also aligned.
3611     __ test(src, Immediate(0xF));
3612     __ j(not_zero, &unaligned_source);
3613     // Copy loop for aligned source and destination.
3614     MemMoveEmitMainLoop(masm, &move_first_15, BACKWARD, MOVE_ALIGNED);
3615     // At most 15 bytes to copy. Copy 16 bytes at beginning of string.
3616     __ bind(&move_first_15);
3617     __ and_(count, 0xF);
3618     __ j(zero, &skip_last_move, Label::kNear);
3619     __ sub(src, count);
3620     __ sub(dst, count);
3621     __ movdqu(xmm0, Operand(src, 0));
3622     __ movdqu(Operand(dst, 0), xmm0);
3623     __ bind(&skip_last_move);
3624     MemMoveEmitPopAndReturn(masm);
3625 
3626     // Copy loop for unaligned source and aligned destination.
3627     __ bind(&unaligned_source);
3628     MemMoveEmitMainLoop(masm, &move_first_15, BACKWARD, MOVE_UNALIGNED);
3629     __ jmp(&move_first_15);
3630 
3631     // Less than kMinMoveDistance offset between dst and src.
3632     Label loop_until_aligned, first_15_much_overlap;
3633     __ bind(&loop_until_aligned);
3634     __ dec(src);
3635     __ dec(dst);
3636     __ mov_b(eax, Operand(src, 0));
3637     __ mov_b(Operand(dst, 0), eax);
3638     __ dec(count);
3639     __ bind(&backward_much_overlap);  // Entry point into this block.
3640     __ test(dst, Immediate(0xF));
3641     __ j(not_zero, &loop_until_aligned);
3642     // dst is now aligned, src can't be. Main copy loop.
3643     __ mov(loop_count, count);
3644     __ shr(loop_count, 6);
3645     MemMoveEmitMainLoop(masm, &first_15_much_overlap, BACKWARD, MOVE_UNALIGNED);
3646     __ bind(&first_15_much_overlap);
3647     __ and_(count, 0xF);
3648     __ j(zero, &pop_and_return);
3649     // Small/medium handlers expect dst/src to point to the beginning.
3650     __ sub(dst, count);
3651     __ sub(src, count);
3652     __ cmp(count, kSmallCopySize);
3653     __ j(below_equal, &small_size);
3654     __ jmp(&medium_size);
3655   }
3656   {
3657     // Special handlers for 9 <= copy_size < 64. No assumptions about
3658     // alignment or move distance, so all reads must be unaligned and
3659     // must happen before any writes.
3660     Label f9_16, f17_32, f33_48, f49_63;
3661 
3662     __ bind(&f9_16);
3663     __ movsd(xmm0, Operand(src, 0));
3664     __ movsd(xmm1, Operand(src, count, times_1, -8));
3665     __ movsd(Operand(dst, 0), xmm0);
3666     __ movsd(Operand(dst, count, times_1, -8), xmm1);
3667     MemMoveEmitPopAndReturn(masm);
3668 
3669     __ bind(&f17_32);
3670     __ movdqu(xmm0, Operand(src, 0));
3671     __ movdqu(xmm1, Operand(src, count, times_1, -0x10));
3672     __ movdqu(Operand(dst, 0x00), xmm0);
3673     __ movdqu(Operand(dst, count, times_1, -0x10), xmm1);
3674     MemMoveEmitPopAndReturn(masm);
3675 
3676     __ bind(&f33_48);
3677     __ movdqu(xmm0, Operand(src, 0x00));
3678     __ movdqu(xmm1, Operand(src, 0x10));
3679     __ movdqu(xmm2, Operand(src, count, times_1, -0x10));
3680     __ movdqu(Operand(dst, 0x00), xmm0);
3681     __ movdqu(Operand(dst, 0x10), xmm1);
3682     __ movdqu(Operand(dst, count, times_1, -0x10), xmm2);
3683     MemMoveEmitPopAndReturn(masm);
3684 
3685     __ bind(&f49_63);
3686     __ movdqu(xmm0, Operand(src, 0x00));
3687     __ movdqu(xmm1, Operand(src, 0x10));
3688     __ movdqu(xmm2, Operand(src, 0x20));
3689     __ movdqu(xmm3, Operand(src, count, times_1, -0x10));
3690     __ movdqu(Operand(dst, 0x00), xmm0);
3691     __ movdqu(Operand(dst, 0x10), xmm1);
3692     __ movdqu(Operand(dst, 0x20), xmm2);
3693     __ movdqu(Operand(dst, count, times_1, -0x10), xmm3);
3694     MemMoveEmitPopAndReturn(masm);
3695 
3696     __ bind(&medium_size);  // Entry point into this block.
3697     __ mov(eax, count);
3698     __ dec(eax);
3699     __ shr(eax, 4);
3700     if (FLAG_debug_code) {
3701       Label ok;
3702       __ cmp(eax, 3);
3703       __ j(below_equal, &ok);
3704       __ int3();
3705       __ bind(&ok);
3706     }
3707 
3708     // Dispatch to handlers.
3709     Label eax_is_2_or_3;
3710 
3711     __ cmp(eax, 1);
3712     __ j(greater, &eax_is_2_or_3);
3713     __ j(less, &f9_16);  // eax == 0.
3714     __ jmp(&f17_32);     // eax == 1.
3715 
3716     __ bind(&eax_is_2_or_3);
3717     __ cmp(eax, 3);
3718     __ j(less, &f33_48);  // eax == 2.
3719     __ jmp(&f49_63);      // eax == 3.
3720   }
3721   {
3722     // Specialized copiers for copy_size <= 8 bytes.
3723     Label f0, f1, f2, f3, f4, f5_8;
3724     __ bind(&f0);
3725     MemMoveEmitPopAndReturn(masm);
3726 
3727     __ bind(&f1);
3728     __ mov_b(eax, Operand(src, 0));
3729     __ mov_b(Operand(dst, 0), eax);
3730     MemMoveEmitPopAndReturn(masm);
3731 
3732     __ bind(&f2);
3733     __ mov_w(eax, Operand(src, 0));
3734     __ mov_w(Operand(dst, 0), eax);
3735     MemMoveEmitPopAndReturn(masm);
3736 
3737     __ bind(&f3);
3738     __ mov_w(eax, Operand(src, 0));
3739     __ mov_b(edx, Operand(src, 2));
3740     __ mov_w(Operand(dst, 0), eax);
3741     __ mov_b(Operand(dst, 2), edx);
3742     MemMoveEmitPopAndReturn(masm);
3743 
3744     __ bind(&f4);
3745     __ mov(eax, Operand(src, 0));
3746     __ mov(Operand(dst, 0), eax);
3747     MemMoveEmitPopAndReturn(masm);
3748 
3749     __ bind(&f5_8);
3750     __ mov(eax, Operand(src, 0));
3751     __ mov(edx, Operand(src, count, times_1, -4));
3752     __ mov(Operand(dst, 0), eax);
3753     __ mov(Operand(dst, count, times_1, -4), edx);
3754     MemMoveEmitPopAndReturn(masm);
3755 
3756     __ bind(&small_size);  // Entry point into this block.
3757     if (FLAG_debug_code) {
3758       Label ok;
3759       __ cmp(count, 8);
3760       __ j(below_equal, &ok);
3761       __ int3();
3762       __ bind(&ok);
3763     }
3764 
3765     // Dispatch to handlers.
3766     Label count_is_above_3, count_is_2_or_3;
3767 
3768     __ cmp(count, 3);
3769     __ j(greater, &count_is_above_3);
3770 
3771     __ cmp(count, 1);
3772     __ j(greater, &count_is_2_or_3);
3773     __ j(less, &f0);  // count == 0.
3774     __ jmp(&f1);      // count == 1.
3775 
3776     __ bind(&count_is_2_or_3);
3777     __ cmp(count, 3);
3778     __ j(less, &f2);  // count == 2.
3779     __ jmp(&f3);      // count == 3.
3780 
3781     __ bind(&count_is_above_3);
3782     __ cmp(count, 5);
3783     __ j(less, &f4);  // count == 4.
3784     __ jmp(&f5_8);    // count in [5, 8[.
3785   }
3786 
3787   __ bind(&pop_and_return);
3788   MemMoveEmitPopAndReturn(masm);
3789 }
3790 
3791 namespace {
3792 
Generate_DeoptimizationEntry(MacroAssembler * masm,DeoptimizeKind deopt_kind)3793 void Generate_DeoptimizationEntry(MacroAssembler* masm,
3794                                   DeoptimizeKind deopt_kind) {
3795   Isolate* isolate = masm->isolate();
3796 
3797   // Save all general purpose registers before messing with them.
3798   const int kNumberOfRegisters = Register::kNumRegisters;
3799 
3800   const int kDoubleRegsSize = kDoubleSize * XMMRegister::kNumRegisters;
3801   __ AllocateStackSpace(kDoubleRegsSize);
3802   const RegisterConfiguration* config = RegisterConfiguration::Default();
3803   for (int i = 0; i < config->num_allocatable_double_registers(); ++i) {
3804     int code = config->GetAllocatableDoubleCode(i);
3805     XMMRegister xmm_reg = XMMRegister::from_code(code);
3806     int offset = code * kDoubleSize;
3807     __ movsd(Operand(esp, offset), xmm_reg);
3808   }
3809 
3810   __ pushad();
3811 
3812   ExternalReference c_entry_fp_address =
3813       ExternalReference::Create(IsolateAddressId::kCEntryFPAddress, isolate);
3814   __ mov(masm->ExternalReferenceAsOperand(c_entry_fp_address, esi), ebp);
3815 
3816   const int kSavedRegistersAreaSize =
3817       kNumberOfRegisters * kSystemPointerSize + kDoubleRegsSize;
3818 
3819   // Get the address of the location in the code object
3820   // and compute the fp-to-sp delta in register edx.
3821   __ mov(ecx, Operand(esp, kSavedRegistersAreaSize));
3822   __ lea(edx, Operand(esp, kSavedRegistersAreaSize + 1 * kSystemPointerSize));
3823 
3824   __ sub(edx, ebp);
3825   __ neg(edx);
3826 
3827   // Allocate a new deoptimizer object.
3828   __ PrepareCallCFunction(6, eax);
3829   __ mov(eax, Immediate(0));
3830   Label context_check;
3831   __ mov(edi, Operand(ebp, CommonFrameConstants::kContextOrFrameTypeOffset));
3832   __ JumpIfSmi(edi, &context_check);
3833   __ mov(eax, Operand(ebp, StandardFrameConstants::kFunctionOffset));
3834   __ bind(&context_check);
3835   __ mov(Operand(esp, 0 * kSystemPointerSize), eax);  // Function.
3836   __ mov(Operand(esp, 1 * kSystemPointerSize),
3837          Immediate(static_cast<int>(deopt_kind)));
3838   __ mov(Operand(esp, 2 * kSystemPointerSize),
3839          Immediate(Deoptimizer::kFixedExitSizeMarker));  // Bailout id.
3840   __ mov(Operand(esp, 3 * kSystemPointerSize), ecx);     // Code address or 0.
3841   __ mov(Operand(esp, 4 * kSystemPointerSize), edx);     // Fp-to-sp delta.
3842   __ Move(Operand(esp, 5 * kSystemPointerSize),
3843           Immediate(ExternalReference::isolate_address(masm->isolate())));
3844   {
3845     AllowExternalCallThatCantCauseGC scope(masm);
3846     __ CallCFunction(ExternalReference::new_deoptimizer_function(), 6);
3847   }
3848 
3849   // Preserve deoptimizer object in register eax and get the input
3850   // frame descriptor pointer.
3851   __ mov(esi, Operand(eax, Deoptimizer::input_offset()));
3852 
3853   // Fill in the input registers.
3854   for (int i = kNumberOfRegisters - 1; i >= 0; i--) {
3855     int offset =
3856         (i * kSystemPointerSize) + FrameDescription::registers_offset();
3857     __ pop(Operand(esi, offset));
3858   }
3859 
3860   int double_regs_offset = FrameDescription::double_registers_offset();
3861   // Fill in the double input registers.
3862   for (int i = 0; i < config->num_allocatable_double_registers(); ++i) {
3863     int code = config->GetAllocatableDoubleCode(i);
3864     int dst_offset = code * kDoubleSize + double_regs_offset;
3865     int src_offset = code * kDoubleSize;
3866     __ movsd(xmm0, Operand(esp, src_offset));
3867     __ movsd(Operand(esi, dst_offset), xmm0);
3868   }
3869 
3870   // Clear FPU all exceptions.
3871   // TODO(ulan): Find out why the TOP register is not zero here in some cases,
3872   // and check that the generated code never deoptimizes with unbalanced stack.
3873   __ fnclex();
3874 
3875   // Mark the stack as not iterable for the CPU profiler which won't be able to
3876   // walk the stack without the return address.
3877   __ mov_b(__ ExternalReferenceAsOperand(
3878                ExternalReference::stack_is_iterable_address(isolate), edx),
3879            Immediate(0));
3880 
3881   // Remove the return address and the double registers.
3882   __ add(esp, Immediate(kDoubleRegsSize + 1 * kSystemPointerSize));
3883 
3884   // Compute a pointer to the unwinding limit in register ecx; that is
3885   // the first stack slot not part of the input frame.
3886   __ mov(ecx, Operand(esi, FrameDescription::frame_size_offset()));
3887   __ add(ecx, esp);
3888 
3889   // Unwind the stack down to - but not including - the unwinding
3890   // limit and copy the contents of the activation frame to the input
3891   // frame description.
3892   __ lea(edx, Operand(esi, FrameDescription::frame_content_offset()));
3893   Label pop_loop_header;
3894   __ jmp(&pop_loop_header);
3895   Label pop_loop;
3896   __ bind(&pop_loop);
3897   __ pop(Operand(edx, 0));
3898   __ add(edx, Immediate(sizeof(uint32_t)));
3899   __ bind(&pop_loop_header);
3900   __ cmp(ecx, esp);
3901   __ j(not_equal, &pop_loop);
3902 
3903   // Compute the output frame in the deoptimizer.
3904   __ push(eax);
3905   __ PrepareCallCFunction(1, esi);
3906   __ mov(Operand(esp, 0 * kSystemPointerSize), eax);
3907   {
3908     AllowExternalCallThatCantCauseGC scope(masm);
3909     __ CallCFunction(ExternalReference::compute_output_frames_function(), 1);
3910   }
3911   __ pop(eax);
3912 
3913   __ mov(esp, Operand(eax, Deoptimizer::caller_frame_top_offset()));
3914 
3915   // Replace the current (input) frame with the output frames.
3916   Label outer_push_loop, inner_push_loop, outer_loop_header, inner_loop_header;
3917   // Outer loop state: eax = current FrameDescription**, edx = one
3918   // past the last FrameDescription**.
3919   __ mov(edx, Operand(eax, Deoptimizer::output_count_offset()));
3920   __ mov(eax, Operand(eax, Deoptimizer::output_offset()));
3921   __ lea(edx, Operand(eax, edx, times_system_pointer_size, 0));
3922   __ jmp(&outer_loop_header);
3923   __ bind(&outer_push_loop);
3924   // Inner loop state: esi = current FrameDescription*, ecx = loop
3925   // index.
3926   __ mov(esi, Operand(eax, 0));
3927   __ mov(ecx, Operand(esi, FrameDescription::frame_size_offset()));
3928   __ jmp(&inner_loop_header);
3929   __ bind(&inner_push_loop);
3930   __ sub(ecx, Immediate(sizeof(uint32_t)));
3931   __ push(Operand(esi, ecx, times_1, FrameDescription::frame_content_offset()));
3932   __ bind(&inner_loop_header);
3933   __ test(ecx, ecx);
3934   __ j(not_zero, &inner_push_loop);
3935   __ add(eax, Immediate(kSystemPointerSize));
3936   __ bind(&outer_loop_header);
3937   __ cmp(eax, edx);
3938   __ j(below, &outer_push_loop);
3939 
3940   // In case of a failed STUB, we have to restore the XMM registers.
3941   for (int i = 0; i < config->num_allocatable_double_registers(); ++i) {
3942     int code = config->GetAllocatableDoubleCode(i);
3943     XMMRegister xmm_reg = XMMRegister::from_code(code);
3944     int src_offset = code * kDoubleSize + double_regs_offset;
3945     __ movsd(xmm_reg, Operand(esi, src_offset));
3946   }
3947 
3948   // Push pc and continuation from the last output frame.
3949   __ push(Operand(esi, FrameDescription::pc_offset()));
3950   __ push(Operand(esi, FrameDescription::continuation_offset()));
3951 
3952   // Push the registers from the last output frame.
3953   for (int i = 0; i < kNumberOfRegisters; i++) {
3954     int offset =
3955         (i * kSystemPointerSize) + FrameDescription::registers_offset();
3956     __ push(Operand(esi, offset));
3957   }
3958 
3959   __ mov_b(__ ExternalReferenceAsOperand(
3960                ExternalReference::stack_is_iterable_address(isolate), edx),
3961            Immediate(1));
3962 
3963   // Restore the registers from the stack.
3964   __ popad();
3965 
3966   __ InitializeRootRegister();
3967 
3968   // Return to the continuation point.
3969   __ ret(0);
3970 }
3971 
3972 }  // namespace
3973 
Generate_DeoptimizationEntry_Eager(MacroAssembler * masm)3974 void Builtins::Generate_DeoptimizationEntry_Eager(MacroAssembler* masm) {
3975   Generate_DeoptimizationEntry(masm, DeoptimizeKind::kEager);
3976 }
3977 
Generate_DeoptimizationEntry_Soft(MacroAssembler * masm)3978 void Builtins::Generate_DeoptimizationEntry_Soft(MacroAssembler* masm) {
3979   Generate_DeoptimizationEntry(masm, DeoptimizeKind::kSoft);
3980 }
3981 
Generate_DeoptimizationEntry_Bailout(MacroAssembler * masm)3982 void Builtins::Generate_DeoptimizationEntry_Bailout(MacroAssembler* masm) {
3983   Generate_DeoptimizationEntry(masm, DeoptimizeKind::kBailout);
3984 }
3985 
Generate_DeoptimizationEntry_Lazy(MacroAssembler * masm)3986 void Builtins::Generate_DeoptimizationEntry_Lazy(MacroAssembler* masm) {
3987   Generate_DeoptimizationEntry(masm, DeoptimizeKind::kLazy);
3988 }
3989 
3990 #undef __
3991 
3992 }  // namespace internal
3993 }  // namespace v8
3994 
3995 #endif  // V8_TARGET_ARCH_IA32
3996