• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #if V8_TARGET_ARCH_ARM
6 
7 #include "src/codegen.h"
8 #include "src/debug/debug.h"
9 #include "src/deoptimizer.h"
10 #include "src/full-codegen/full-codegen.h"
11 #include "src/runtime/runtime.h"
12 
13 namespace v8 {
14 namespace internal {
15 
16 #define __ ACCESS_MASM(masm)
17 
Generate_Adaptor(MacroAssembler * masm,Address address,ExitFrameType exit_frame_type)18 void Builtins::Generate_Adaptor(MacroAssembler* masm, Address address,
19                                 ExitFrameType exit_frame_type) {
20   // ----------- S t a t e -------------
21   //  -- r0                 : number of arguments excluding receiver
22   //  -- r1                 : target
23   //  -- r3                 : new.target
24   //  -- sp[0]              : last argument
25   //  -- ...
26   //  -- sp[4 * (argc - 1)] : first argument
27   //  -- sp[4 * argc]       : receiver
28   // -----------------------------------
29   __ AssertFunction(r1);
30 
31   // Make sure we operate in the context of the called function (for example
32   // ConstructStubs implemented in C++ will be run in the context of the caller
33   // instead of the callee, due to the way that [[Construct]] is defined for
34   // ordinary functions).
35   __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
36 
37   // JumpToExternalReference expects r0 to contain the number of arguments
38   // including the receiver and the extra arguments.
39   const int num_extra_args = 3;
40   __ add(r0, r0, Operand(num_extra_args + 1));
41 
42   // Insert extra arguments.
43   __ SmiTag(r0);
44   __ Push(r0, r1, r3);
45   __ SmiUntag(r0);
46 
47   __ JumpToExternalReference(ExternalReference(address, masm->isolate()),
48                              exit_frame_type == BUILTIN_EXIT);
49 }
50 
51 // Load the built-in InternalArray function from the current context.
GenerateLoadInternalArrayFunction(MacroAssembler * masm,Register result)52 static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
53                                               Register result) {
54   // Load the InternalArray function from the current native context.
55   __ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, result);
56 }
57 
58 // Load the built-in Array function from the current context.
GenerateLoadArrayFunction(MacroAssembler * masm,Register result)59 static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
60   // Load the Array function from the current native context.
61   __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, result);
62 }
63 
Generate_InternalArrayCode(MacroAssembler * masm)64 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
65   // ----------- S t a t e -------------
66   //  -- r0     : number of arguments
67   //  -- lr     : return address
68   //  -- sp[...]: constructor arguments
69   // -----------------------------------
70   Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
71 
72   // Get the InternalArray function.
73   GenerateLoadInternalArrayFunction(masm, r1);
74 
75   if (FLAG_debug_code) {
76     // Initial map for the builtin InternalArray functions should be maps.
77     __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
78     __ SmiTst(r2);
79     __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction);
80     __ CompareObjectType(r2, r3, r4, MAP_TYPE);
81     __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction);
82   }
83 
84   // Run the native code for the InternalArray function called as a normal
85   // function.
86   // tail call a stub
87   InternalArrayConstructorStub stub(masm->isolate());
88   __ TailCallStub(&stub);
89 }
90 
Generate_ArrayCode(MacroAssembler * masm)91 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
92   // ----------- S t a t e -------------
93   //  -- r0     : number of arguments
94   //  -- lr     : return address
95   //  -- sp[...]: constructor arguments
96   // -----------------------------------
97   Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
98 
99   // Get the Array function.
100   GenerateLoadArrayFunction(masm, r1);
101 
102   if (FLAG_debug_code) {
103     // Initial map for the builtin Array functions should be maps.
104     __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
105     __ SmiTst(r2);
106     __ Assert(ne, kUnexpectedInitialMapForArrayFunction);
107     __ CompareObjectType(r2, r3, r4, MAP_TYPE);
108     __ Assert(eq, kUnexpectedInitialMapForArrayFunction);
109   }
110 
111   __ mov(r3, r1);
112   // Run the native code for the Array function called as a normal function.
113   // tail call a stub
114   __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
115   ArrayConstructorStub stub(masm->isolate());
116   __ TailCallStub(&stub);
117 }
118 
119 // static
Generate_MathMaxMin(MacroAssembler * masm,MathMaxMinKind kind)120 void Builtins::Generate_MathMaxMin(MacroAssembler* masm, MathMaxMinKind kind) {
121   // ----------- S t a t e -------------
122   //  -- r0                     : number of arguments
123   //  -- r1                     : function
124   //  -- cp                     : context
125   //  -- lr                     : return address
126   //  -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
127   //  -- sp[argc * 4]           : receiver
128   // -----------------------------------
129   Condition const cc_done = (kind == MathMaxMinKind::kMin) ? mi : gt;
130   Condition const cc_swap = (kind == MathMaxMinKind::kMin) ? gt : mi;
131   Heap::RootListIndex const root_index =
132       (kind == MathMaxMinKind::kMin) ? Heap::kInfinityValueRootIndex
133                                      : Heap::kMinusInfinityValueRootIndex;
134   DoubleRegister const reg = (kind == MathMaxMinKind::kMin) ? d2 : d1;
135 
136   // Load the accumulator with the default return value (either -Infinity or
137   // +Infinity), with the tagged value in r5 and the double value in d1.
138   __ LoadRoot(r5, root_index);
139   __ vldr(d1, FieldMemOperand(r5, HeapNumber::kValueOffset));
140 
141   Label done_loop, loop;
142   __ mov(r4, r0);
143   __ bind(&loop);
144   {
145     // Check if all parameters done.
146     __ sub(r4, r4, Operand(1), SetCC);
147     __ b(lt, &done_loop);
148 
149     // Load the next parameter tagged value into r2.
150     __ ldr(r2, MemOperand(sp, r4, LSL, kPointerSizeLog2));
151 
152     // Load the double value of the parameter into d2, maybe converting the
153     // parameter to a number first using the ToNumber builtin if necessary.
154     Label convert, convert_smi, convert_number, done_convert;
155     __ bind(&convert);
156     __ JumpIfSmi(r2, &convert_smi);
157     __ ldr(r3, FieldMemOperand(r2, HeapObject::kMapOffset));
158     __ JumpIfRoot(r3, Heap::kHeapNumberMapRootIndex, &convert_number);
159     {
160       // Parameter is not a Number, use the ToNumber builtin to convert it.
161       DCHECK(!FLAG_enable_embedded_constant_pool);
162       FrameScope scope(masm, StackFrame::MANUAL);
163       __ SmiTag(r0);
164       __ SmiTag(r4);
165       __ EnterBuiltinFrame(cp, r1, r0);
166       __ Push(r4, r5);
167       __ mov(r0, r2);
168       __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
169       __ mov(r2, r0);
170       __ Pop(r4, r5);
171       __ LeaveBuiltinFrame(cp, r1, r0);
172       __ SmiUntag(r4);
173       __ SmiUntag(r0);
174       {
175         // Restore the double accumulator value (d1).
176         Label done_restore;
177         __ SmiToDouble(d1, r5);
178         __ JumpIfSmi(r5, &done_restore);
179         __ vldr(d1, FieldMemOperand(r5, HeapNumber::kValueOffset));
180         __ bind(&done_restore);
181       }
182     }
183     __ b(&convert);
184     __ bind(&convert_number);
185     __ vldr(d2, FieldMemOperand(r2, HeapNumber::kValueOffset));
186     __ b(&done_convert);
187     __ bind(&convert_smi);
188     __ SmiToDouble(d2, r2);
189     __ bind(&done_convert);
190 
191     // Perform the actual comparison with the accumulator value on the left hand
192     // side (d1) and the next parameter value on the right hand side (d2).
193     Label compare_nan, compare_swap;
194     __ VFPCompareAndSetFlags(d1, d2);
195     __ b(cc_done, &loop);
196     __ b(cc_swap, &compare_swap);
197     __ b(vs, &compare_nan);
198 
199     // Left and right hand side are equal, check for -0 vs. +0.
200     __ VmovHigh(ip, reg);
201     __ cmp(ip, Operand(0x80000000));
202     __ b(ne, &loop);
203 
204     // Result is on the right hand side.
205     __ bind(&compare_swap);
206     __ vmov(d1, d2);
207     __ mov(r5, r2);
208     __ b(&loop);
209 
210     // At least one side is NaN, which means that the result will be NaN too.
211     __ bind(&compare_nan);
212     __ LoadRoot(r5, Heap::kNanValueRootIndex);
213     __ vldr(d1, FieldMemOperand(r5, HeapNumber::kValueOffset));
214     __ b(&loop);
215   }
216 
217   __ bind(&done_loop);
218   // Drop all slots, including the receiver.
219   __ add(r0, r0, Operand(1));
220   __ Drop(r0);
221   __ mov(r0, r5);
222   __ Ret();
223 }
224 
225 // static
Generate_NumberConstructor(MacroAssembler * masm)226 void Builtins::Generate_NumberConstructor(MacroAssembler* masm) {
227   // ----------- S t a t e -------------
228   //  -- r0                     : number of arguments
229   //  -- r1                     : constructor function
230   //  -- cp                     : context
231   //  -- lr                     : return address
232   //  -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
233   //  -- sp[argc * 4]           : receiver
234   // -----------------------------------
235 
236   // 1. Load the first argument into r0.
237   Label no_arguments;
238   {
239     __ mov(r2, r0);  // Store argc in r2.
240     __ sub(r0, r0, Operand(1), SetCC);
241     __ b(lo, &no_arguments);
242     __ ldr(r0, MemOperand(sp, r0, LSL, kPointerSizeLog2));
243   }
244 
245   // 2a. Convert the first argument to a number.
246   {
247     FrameScope scope(masm, StackFrame::MANUAL);
248     __ SmiTag(r2);
249     __ EnterBuiltinFrame(cp, r1, r2);
250     __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
251     __ LeaveBuiltinFrame(cp, r1, r2);
252     __ SmiUntag(r2);
253   }
254 
255   {
256     // Drop all arguments including the receiver.
257     __ Drop(r2);
258     __ Ret(1);
259   }
260 
261   // 2b. No arguments, return +0.
262   __ bind(&no_arguments);
263   __ Move(r0, Smi::kZero);
264   __ Ret(1);
265 }
266 
267 // static
Generate_NumberConstructor_ConstructStub(MacroAssembler * masm)268 void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) {
269   // ----------- S t a t e -------------
270   //  -- r0                     : number of arguments
271   //  -- r1                     : constructor function
272   //  -- r3                     : new target
273   //  -- cp                     : context
274   //  -- lr                     : return address
275   //  -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
276   //  -- sp[argc * 4]           : receiver
277   // -----------------------------------
278 
279   // 1. Make sure we operate in the context of the called function.
280   __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
281 
282   // 2. Load the first argument into r2.
283   {
284     Label no_arguments, done;
285     __ mov(r6, r0);  // Store argc in r6.
286     __ sub(r0, r0, Operand(1), SetCC);
287     __ b(lo, &no_arguments);
288     __ ldr(r2, MemOperand(sp, r0, LSL, kPointerSizeLog2));
289     __ b(&done);
290     __ bind(&no_arguments);
291     __ Move(r2, Smi::kZero);
292     __ bind(&done);
293   }
294 
295   // 3. Make sure r2 is a number.
296   {
297     Label done_convert;
298     __ JumpIfSmi(r2, &done_convert);
299     __ CompareObjectType(r2, r4, r4, HEAP_NUMBER_TYPE);
300     __ b(eq, &done_convert);
301     {
302       FrameScope scope(masm, StackFrame::MANUAL);
303       __ SmiTag(r6);
304       __ EnterBuiltinFrame(cp, r1, r6);
305       __ Push(r3);
306       __ Move(r0, r2);
307       __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
308       __ Move(r2, r0);
309       __ Pop(r3);
310       __ LeaveBuiltinFrame(cp, r1, r6);
311       __ SmiUntag(r6);
312     }
313     __ bind(&done_convert);
314   }
315 
316   // 4. Check if new target and constructor differ.
317   Label drop_frame_and_ret, new_object;
318   __ cmp(r1, r3);
319   __ b(ne, &new_object);
320 
321   // 5. Allocate a JSValue wrapper for the number.
322   __ AllocateJSValue(r0, r1, r2, r4, r5, &new_object);
323   __ b(&drop_frame_and_ret);
324 
325   // 6. Fallback to the runtime to create new object.
326   __ bind(&new_object);
327   {
328     FrameScope scope(masm, StackFrame::MANUAL);
329     FastNewObjectStub stub(masm->isolate());
330     __ SmiTag(r6);
331     __ EnterBuiltinFrame(cp, r1, r6);
332     __ Push(r2);  // first argument
333     __ CallStub(&stub);
334     __ Pop(r2);
335     __ LeaveBuiltinFrame(cp, r1, r6);
336     __ SmiUntag(r6);
337   }
338   __ str(r2, FieldMemOperand(r0, JSValue::kValueOffset));
339 
340   __ bind(&drop_frame_and_ret);
341   {
342     __ Drop(r6);
343     __ Ret(1);
344   }
345 }
346 
347 // static
Generate_StringConstructor(MacroAssembler * masm)348 void Builtins::Generate_StringConstructor(MacroAssembler* masm) {
349   // ----------- S t a t e -------------
350   //  -- r0                     : number of arguments
351   //  -- r1                     : constructor function
352   //  -- cp                     : context
353   //  -- lr                     : return address
354   //  -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
355   //  -- sp[argc * 4]           : receiver
356   // -----------------------------------
357 
358   // 1. Load the first argument into r0.
359   Label no_arguments;
360   {
361     __ mov(r2, r0);  // Store argc in r2.
362     __ sub(r0, r0, Operand(1), SetCC);
363     __ b(lo, &no_arguments);
364     __ ldr(r0, MemOperand(sp, r0, LSL, kPointerSizeLog2));
365   }
366 
367   // 2a. At least one argument, return r0 if it's a string, otherwise
368   // dispatch to appropriate conversion.
369   Label drop_frame_and_ret, to_string, symbol_descriptive_string;
370   {
371     __ JumpIfSmi(r0, &to_string);
372     STATIC_ASSERT(FIRST_NONSTRING_TYPE == SYMBOL_TYPE);
373     __ CompareObjectType(r0, r3, r3, FIRST_NONSTRING_TYPE);
374     __ b(hi, &to_string);
375     __ b(eq, &symbol_descriptive_string);
376     __ b(&drop_frame_and_ret);
377   }
378 
379   // 2b. No arguments, return the empty string (and pop the receiver).
380   __ bind(&no_arguments);
381   {
382     __ LoadRoot(r0, Heap::kempty_stringRootIndex);
383     __ Ret(1);
384   }
385 
386   // 3a. Convert r0 to a string.
387   __ bind(&to_string);
388   {
389     FrameScope scope(masm, StackFrame::MANUAL);
390     __ SmiTag(r2);
391     __ EnterBuiltinFrame(cp, r1, r2);
392     __ Call(masm->isolate()->builtins()->ToString(), RelocInfo::CODE_TARGET);
393     __ LeaveBuiltinFrame(cp, r1, r2);
394     __ SmiUntag(r2);
395   }
396   __ b(&drop_frame_and_ret);
397 
398   // 3b. Convert symbol in r0 to a string.
399   __ bind(&symbol_descriptive_string);
400   {
401     __ Drop(r2);
402     __ Drop(1);
403     __ Push(r0);
404     __ TailCallRuntime(Runtime::kSymbolDescriptiveString);
405   }
406 
407   __ bind(&drop_frame_and_ret);
408   {
409     __ Drop(r2);
410     __ Ret(1);
411   }
412 }
413 
414 // static
Generate_StringConstructor_ConstructStub(MacroAssembler * masm)415 void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
416   // ----------- S t a t e -------------
417   //  -- r0                     : number of arguments
418   //  -- r1                     : constructor function
419   //  -- r3                     : new target
420   //  -- cp                     : context
421   //  -- lr                     : return address
422   //  -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
423   //  -- sp[argc * 4]           : receiver
424   // -----------------------------------
425 
426   // 1. Make sure we operate in the context of the called function.
427   __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
428 
429   // 2. Load the first argument into r2.
430   {
431     Label no_arguments, done;
432     __ mov(r6, r0);  // Store argc in r6.
433     __ sub(r0, r0, Operand(1), SetCC);
434     __ b(lo, &no_arguments);
435     __ ldr(r2, MemOperand(sp, r0, LSL, kPointerSizeLog2));
436     __ b(&done);
437     __ bind(&no_arguments);
438     __ LoadRoot(r2, Heap::kempty_stringRootIndex);
439     __ bind(&done);
440   }
441 
442   // 3. Make sure r2 is a string.
443   {
444     Label convert, done_convert;
445     __ JumpIfSmi(r2, &convert);
446     __ CompareObjectType(r2, r4, r4, FIRST_NONSTRING_TYPE);
447     __ b(lo, &done_convert);
448     __ bind(&convert);
449     {
450       FrameScope scope(masm, StackFrame::MANUAL);
451       __ SmiTag(r6);
452       __ EnterBuiltinFrame(cp, r1, r6);
453       __ Push(r3);
454       __ Move(r0, r2);
455       __ Call(masm->isolate()->builtins()->ToString(), RelocInfo::CODE_TARGET);
456       __ Move(r2, r0);
457       __ Pop(r3);
458       __ LeaveBuiltinFrame(cp, r1, r6);
459       __ SmiUntag(r6);
460     }
461     __ bind(&done_convert);
462   }
463 
464   // 4. Check if new target and constructor differ.
465   Label drop_frame_and_ret, new_object;
466   __ cmp(r1, r3);
467   __ b(ne, &new_object);
468 
469   // 5. Allocate a JSValue wrapper for the string.
470   __ AllocateJSValue(r0, r1, r2, r4, r5, &new_object);
471   __ b(&drop_frame_and_ret);
472 
473   // 6. Fallback to the runtime to create new object.
474   __ bind(&new_object);
475   {
476     FrameScope scope(masm, StackFrame::MANUAL);
477     FastNewObjectStub stub(masm->isolate());
478     __ SmiTag(r6);
479     __ EnterBuiltinFrame(cp, r1, r6);
480     __ Push(r2);  // first argument
481     __ CallStub(&stub);
482     __ Pop(r2);
483     __ LeaveBuiltinFrame(cp, r1, r6);
484     __ SmiUntag(r6);
485   }
486   __ str(r2, FieldMemOperand(r0, JSValue::kValueOffset));
487 
488   __ bind(&drop_frame_and_ret);
489   {
490     __ Drop(r6);
491     __ Ret(1);
492   }
493 }
494 
GenerateTailCallToSharedCode(MacroAssembler * masm)495 static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
496   __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
497   __ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kCodeOffset));
498   __ add(r2, r2, Operand(Code::kHeaderSize - kHeapObjectTag));
499   __ Jump(r2);
500 }
501 
GenerateTailCallToReturnedCode(MacroAssembler * masm,Runtime::FunctionId function_id)502 static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
503                                            Runtime::FunctionId function_id) {
504   // ----------- S t a t e -------------
505   //  -- r0 : argument count (preserved for callee)
506   //  -- r1 : target function (preserved for callee)
507   //  -- r3 : new target (preserved for callee)
508   // -----------------------------------
509   {
510     FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
511     // Push the number of arguments to the callee.
512     __ SmiTag(r0);
513     __ push(r0);
514     // Push a copy of the target function and the new target.
515     __ push(r1);
516     __ push(r3);
517     // Push function as parameter to the runtime call.
518     __ Push(r1);
519 
520     __ CallRuntime(function_id, 1);
521     __ mov(r2, r0);
522 
523     // Restore target function and new target.
524     __ pop(r3);
525     __ pop(r1);
526     __ pop(r0);
527     __ SmiUntag(r0, r0);
528   }
529   __ add(r2, r2, Operand(Code::kHeaderSize - kHeapObjectTag));
530   __ Jump(r2);
531 }
532 
Generate_InOptimizationQueue(MacroAssembler * masm)533 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
534   // Checking whether the queued function is ready for install is optional,
535   // since we come across interrupts and stack checks elsewhere.  However,
536   // not checking may delay installing ready functions, and always checking
537   // would be quite expensive.  A good compromise is to first check against
538   // stack limit as a cue for an interrupt signal.
539   Label ok;
540   __ LoadRoot(ip, Heap::kStackLimitRootIndex);
541   __ cmp(sp, Operand(ip));
542   __ b(hs, &ok);
543 
544   GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
545 
546   __ bind(&ok);
547   GenerateTailCallToSharedCode(masm);
548 }
549 
550 namespace {
551 
Generate_JSConstructStubHelper(MacroAssembler * masm,bool is_api_function,bool create_implicit_receiver,bool check_derived_construct)552 void Generate_JSConstructStubHelper(MacroAssembler* masm, bool is_api_function,
553                                     bool create_implicit_receiver,
554                                     bool check_derived_construct) {
555   // ----------- S t a t e -------------
556   //  -- r0     : number of arguments
557   //  -- r1     : constructor function
558   //  -- r3     : new target
559   //  -- cp     : context
560   //  -- lr     : return address
561   //  -- sp[...]: constructor arguments
562   // -----------------------------------
563 
564   Isolate* isolate = masm->isolate();
565 
566   // Enter a construct frame.
567   {
568     FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT);
569 
570     // Preserve the incoming parameters on the stack.
571     __ SmiTag(r0);
572     __ Push(cp, r0);
573 
574     if (create_implicit_receiver) {
575       // Allocate the new receiver object.
576       __ Push(r1, r3);
577       FastNewObjectStub stub(masm->isolate());
578       __ CallStub(&stub);
579       __ mov(r4, r0);
580       __ Pop(r1, r3);
581 
582       // ----------- S t a t e -------------
583       //  -- r1: constructor function
584       //  -- r3: new target
585       //  -- r4: newly allocated object
586       // -----------------------------------
587 
588       // Retrieve smi-tagged arguments count from the stack.
589       __ ldr(r0, MemOperand(sp));
590     }
591 
592     __ SmiUntag(r0);
593 
594     if (create_implicit_receiver) {
595       // Push the allocated receiver to the stack. We need two copies
596       // because we may have to return the original one and the calling
597       // conventions dictate that the called function pops the receiver.
598       __ push(r4);
599       __ push(r4);
600     } else {
601       __ PushRoot(Heap::kTheHoleValueRootIndex);
602     }
603 
604     // Set up pointer to last argument.
605     __ add(r2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
606 
607     // Copy arguments and receiver to the expression stack.
608     // r0: number of arguments
609     // r1: constructor function
610     // r2: address of last argument (caller sp)
611     // r3: new target
612     // r4: number of arguments (smi-tagged)
613     // sp[0]: receiver
614     // sp[1]: receiver
615     // sp[2]: number of arguments (smi-tagged)
616     Label loop, entry;
617     __ SmiTag(r4, r0);
618     __ b(&entry);
619     __ bind(&loop);
620     __ ldr(ip, MemOperand(r2, r4, LSL, kPointerSizeLog2 - 1));
621     __ push(ip);
622     __ bind(&entry);
623     __ sub(r4, r4, Operand(2), SetCC);
624     __ b(ge, &loop);
625 
626     // Call the function.
627     // r0: number of arguments
628     // r1: constructor function
629     // r3: new target
630     ParameterCount actual(r0);
631     __ InvokeFunction(r1, r3, actual, CALL_FUNCTION,
632                       CheckDebugStepCallWrapper());
633 
634     // Store offset of return address for deoptimizer.
635     if (create_implicit_receiver && !is_api_function) {
636       masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
637     }
638 
639     // Restore context from the frame.
640     // r0: result
641     // sp[0]: receiver
642     // sp[1]: number of arguments (smi-tagged)
643     __ ldr(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
644 
645     if (create_implicit_receiver) {
646       // If the result is an object (in the ECMA sense), we should get rid
647       // of the receiver and use the result; see ECMA-262 section 13.2.2-7
648       // on page 74.
649       Label use_receiver, exit;
650 
651       // If the result is a smi, it is *not* an object in the ECMA sense.
652       // r0: result
653       // sp[0]: receiver
654       // sp[1]: number of arguments (smi-tagged)
655       __ JumpIfSmi(r0, &use_receiver);
656 
657       // If the type of the result (stored in its map) is less than
658       // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
659       __ CompareObjectType(r0, r1, r3, FIRST_JS_RECEIVER_TYPE);
660       __ b(ge, &exit);
661 
662       // Throw away the result of the constructor invocation and use the
663       // on-stack receiver as the result.
664       __ bind(&use_receiver);
665       __ ldr(r0, MemOperand(sp));
666 
667       // Remove receiver from the stack, remove caller arguments, and
668       // return.
669       __ bind(&exit);
670       // r0: result
671       // sp[0]: receiver (newly allocated object)
672       // sp[1]: number of arguments (smi-tagged)
673       __ ldr(r1, MemOperand(sp, 1 * kPointerSize));
674     } else {
675       __ ldr(r1, MemOperand(sp));
676     }
677 
678     // Leave construct frame.
679   }
680 
681   // ES6 9.2.2. Step 13+
682   // Check that the result is not a Smi, indicating that the constructor result
683   // from a derived class is neither undefined nor an Object.
684   if (check_derived_construct) {
685     Label dont_throw;
686     __ JumpIfNotSmi(r0, &dont_throw);
687     {
688       FrameScope scope(masm, StackFrame::INTERNAL);
689       __ CallRuntime(Runtime::kThrowDerivedConstructorReturnedNonObject);
690     }
691     __ bind(&dont_throw);
692   }
693 
694   __ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2 - 1));
695   __ add(sp, sp, Operand(kPointerSize));
696   if (create_implicit_receiver) {
697     __ IncrementCounter(isolate->counters()->constructed_objects(), 1, r1, r2);
698   }
699   __ Jump(lr);
700 }
701 
702 }  // namespace
703 
Generate_JSConstructStubGeneric(MacroAssembler * masm)704 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
705   Generate_JSConstructStubHelper(masm, false, true, false);
706 }
707 
Generate_JSConstructStubApi(MacroAssembler * masm)708 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
709   Generate_JSConstructStubHelper(masm, true, false, false);
710 }
711 
Generate_JSBuiltinsConstructStub(MacroAssembler * masm)712 void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
713   Generate_JSConstructStubHelper(masm, false, false, false);
714 }
715 
Generate_JSBuiltinsConstructStubForDerived(MacroAssembler * masm)716 void Builtins::Generate_JSBuiltinsConstructStubForDerived(
717     MacroAssembler* masm) {
718   Generate_JSConstructStubHelper(masm, false, false, true);
719 }
720 
721 // static
Generate_ResumeGeneratorTrampoline(MacroAssembler * masm)722 void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
723   // ----------- S t a t e -------------
724   //  -- r0 : the value to pass to the generator
725   //  -- r1 : the JSGeneratorObject to resume
726   //  -- r2 : the resume mode (tagged)
727   //  -- lr : return address
728   // -----------------------------------
729   __ AssertGeneratorObject(r1);
730 
731   // Store input value into generator object.
732   __ str(r0, FieldMemOperand(r1, JSGeneratorObject::kInputOrDebugPosOffset));
733   __ RecordWriteField(r1, JSGeneratorObject::kInputOrDebugPosOffset, r0, r3,
734                       kLRHasNotBeenSaved, kDontSaveFPRegs);
735 
736   // Store resume mode into generator object.
737   __ str(r2, FieldMemOperand(r1, JSGeneratorObject::kResumeModeOffset));
738 
739   // Load suspended function and context.
740   __ ldr(cp, FieldMemOperand(r1, JSGeneratorObject::kContextOffset));
741   __ ldr(r4, FieldMemOperand(r1, JSGeneratorObject::kFunctionOffset));
742 
743   // Flood function if we are stepping.
744   Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
745   Label stepping_prepared;
746   ExternalReference last_step_action =
747       ExternalReference::debug_last_step_action_address(masm->isolate());
748   STATIC_ASSERT(StepFrame > StepIn);
749   __ mov(ip, Operand(last_step_action));
750   __ ldrsb(ip, MemOperand(ip));
751   __ cmp(ip, Operand(StepIn));
752   __ b(ge, &prepare_step_in_if_stepping);
753 
754   // Flood function if we need to continue stepping in the suspended generator.
755   ExternalReference debug_suspended_generator =
756       ExternalReference::debug_suspended_generator_address(masm->isolate());
757   __ mov(ip, Operand(debug_suspended_generator));
758   __ ldr(ip, MemOperand(ip));
759   __ cmp(ip, Operand(r1));
760   __ b(eq, &prepare_step_in_suspended_generator);
761   __ bind(&stepping_prepared);
762 
763   // Push receiver.
764   __ ldr(ip, FieldMemOperand(r1, JSGeneratorObject::kReceiverOffset));
765   __ Push(ip);
766 
767   // ----------- S t a t e -------------
768   //  -- r1    : the JSGeneratorObject to resume
769   //  -- r2    : the resume mode (tagged)
770   //  -- r4    : generator function
771   //  -- cp    : generator context
772   //  -- lr    : return address
773   //  -- sp[0] : generator receiver
774   // -----------------------------------
775 
776   // Push holes for arguments to generator function. Since the parser forced
777   // context allocation for any variables in generators, the actual argument
778   // values have already been copied into the context and these dummy values
779   // will never be used.
780   __ ldr(r3, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
781   __ ldr(r3,
782          FieldMemOperand(r3, SharedFunctionInfo::kFormalParameterCountOffset));
783   {
784     Label done_loop, loop;
785     __ bind(&loop);
786     __ sub(r3, r3, Operand(Smi::FromInt(1)), SetCC);
787     __ b(mi, &done_loop);
788     __ PushRoot(Heap::kTheHoleValueRootIndex);
789     __ b(&loop);
790     __ bind(&done_loop);
791   }
792 
793   // Dispatch on the kind of generator object.
794   Label old_generator;
795   __ ldr(r3, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
796   __ ldr(r3, FieldMemOperand(r3, SharedFunctionInfo::kFunctionDataOffset));
797   __ CompareObjectType(r3, r3, r3, BYTECODE_ARRAY_TYPE);
798   __ b(ne, &old_generator);
799 
800   // New-style (ignition/turbofan) generator object
801   {
802     __ ldr(r0, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
803     __ ldr(r0, FieldMemOperand(
804                    r0, SharedFunctionInfo::kFormalParameterCountOffset));
805     __ SmiUntag(r0);
806     // We abuse new.target both to indicate that this is a resume call and to
807     // pass in the generator object.  In ordinary calls, new.target is always
808     // undefined because generator functions are non-constructable.
809     __ Move(r3, r1);
810     __ Move(r1, r4);
811     __ ldr(r5, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
812     __ Jump(r5);
813   }
814 
815   // Old-style (full-codegen) generator object
816   __ bind(&old_generator);
817   {
818     // Enter a new JavaScript frame, and initialize its slots as they were when
819     // the generator was suspended.
820     DCHECK(!FLAG_enable_embedded_constant_pool);
821     FrameScope scope(masm, StackFrame::MANUAL);
822     __ Push(lr, fp);
823     __ Move(fp, sp);
824     __ Push(cp, r4);
825 
826     // Restore the operand stack.
827     __ ldr(r0, FieldMemOperand(r1, JSGeneratorObject::kOperandStackOffset));
828     __ ldr(r3, FieldMemOperand(r0, FixedArray::kLengthOffset));
829     __ add(r0, r0, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
830     __ add(r3, r0, Operand(r3, LSL, kPointerSizeLog2 - 1));
831     {
832       Label done_loop, loop;
833       __ bind(&loop);
834       __ cmp(r0, r3);
835       __ b(eq, &done_loop);
836       __ ldr(ip, MemOperand(r0, kPointerSize, PostIndex));
837       __ Push(ip);
838       __ b(&loop);
839       __ bind(&done_loop);
840     }
841 
842     // Reset operand stack so we don't leak.
843     __ LoadRoot(ip, Heap::kEmptyFixedArrayRootIndex);
844     __ str(ip, FieldMemOperand(r1, JSGeneratorObject::kOperandStackOffset));
845 
846     // Resume the generator function at the continuation.
847     __ ldr(r3, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
848     __ ldr(r3, FieldMemOperand(r3, SharedFunctionInfo::kCodeOffset));
849     __ add(r3, r3, Operand(Code::kHeaderSize - kHeapObjectTag));
850     __ ldr(r2, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset));
851     __ add(r3, r3, Operand(r2, ASR, 1));
852     __ mov(r2, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
853     __ str(r2, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset));
854     __ Move(r0, r1);  // Continuation expects generator object in r0.
855     __ Jump(r3);
856   }
857 
858   __ bind(&prepare_step_in_if_stepping);
859   {
860     FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
861     __ Push(r1, r2, r4);
862     __ CallRuntime(Runtime::kDebugPrepareStepInIfStepping);
863     __ Pop(r1, r2);
864     __ ldr(r4, FieldMemOperand(r1, JSGeneratorObject::kFunctionOffset));
865   }
866   __ b(&stepping_prepared);
867 
868   __ bind(&prepare_step_in_suspended_generator);
869   {
870     FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
871     __ Push(r1, r2);
872     __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
873     __ Pop(r1, r2);
874     __ ldr(r4, FieldMemOperand(r1, JSGeneratorObject::kFunctionOffset));
875   }
876   __ b(&stepping_prepared);
877 }
878 
Generate_ConstructedNonConstructable(MacroAssembler * masm)879 void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
880   FrameScope scope(masm, StackFrame::INTERNAL);
881   __ push(r1);
882   __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
883 }
884 
885 enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt };
886 
887 // Clobbers r2; preserves all other registers.
Generate_CheckStackOverflow(MacroAssembler * masm,Register argc,IsTagged argc_is_tagged)888 static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc,
889                                         IsTagged argc_is_tagged) {
890   // Check the stack for overflow. We are not trying to catch
891   // interruptions (e.g. debug break and preemption) here, so the "real stack
892   // limit" is checked.
893   Label okay;
894   __ LoadRoot(r2, Heap::kRealStackLimitRootIndex);
895   // Make r2 the space we have left. The stack might already be overflowed
896   // here which will cause r2 to become negative.
897   __ sub(r2, sp, r2);
898   // Check if the arguments will overflow the stack.
899   if (argc_is_tagged == kArgcIsSmiTagged) {
900     __ cmp(r2, Operand::PointerOffsetFromSmiKey(argc));
901   } else {
902     DCHECK(argc_is_tagged == kArgcIsUntaggedInt);
903     __ cmp(r2, Operand(argc, LSL, kPointerSizeLog2));
904   }
905   __ b(gt, &okay);  // Signed comparison.
906 
907   // Out of stack space.
908   __ CallRuntime(Runtime::kThrowStackOverflow);
909 
910   __ bind(&okay);
911 }
912 
Generate_JSEntryTrampolineHelper(MacroAssembler * masm,bool is_construct)913 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
914                                              bool is_construct) {
915   // Called from Generate_JS_Entry
916   // r0: new.target
917   // r1: function
918   // r2: receiver
919   // r3: argc
920   // r4: argv
921   // r5-r6, r8 (if !FLAG_enable_embedded_constant_pool) and cp may be clobbered
922   ProfileEntryHookStub::MaybeCallEntryHook(masm);
923 
924   // Enter an internal frame.
925   {
926     FrameScope scope(masm, StackFrame::INTERNAL);
927 
928     // Setup the context (we need to use the caller context from the isolate).
929     ExternalReference context_address(Isolate::kContextAddress,
930                                       masm->isolate());
931     __ mov(cp, Operand(context_address));
932     __ ldr(cp, MemOperand(cp));
933 
934     __ InitializeRootRegister();
935 
936     // Push the function and the receiver onto the stack.
937     __ Push(r1, r2);
938 
939     // Check if we have enough stack space to push all arguments.
940     // Clobbers r2.
941     Generate_CheckStackOverflow(masm, r3, kArgcIsUntaggedInt);
942 
943     // Remember new.target.
944     __ mov(r5, r0);
945 
946     // Copy arguments to the stack in a loop.
947     // r1: function
948     // r3: argc
949     // r4: argv, i.e. points to first arg
950     Label loop, entry;
951     __ add(r2, r4, Operand(r3, LSL, kPointerSizeLog2));
952     // r2 points past last arg.
953     __ b(&entry);
954     __ bind(&loop);
955     __ ldr(r0, MemOperand(r4, kPointerSize, PostIndex));  // read next parameter
956     __ ldr(r0, MemOperand(r0));                           // dereference handle
957     __ push(r0);                                          // push parameter
958     __ bind(&entry);
959     __ cmp(r4, r2);
960     __ b(ne, &loop);
961 
962     // Setup new.target and argc.
963     __ mov(r0, Operand(r3));
964     __ mov(r3, Operand(r5));
965 
966     // Initialize all JavaScript callee-saved registers, since they will be seen
967     // by the garbage collector as part of handlers.
968     __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
969     __ mov(r5, Operand(r4));
970     __ mov(r6, Operand(r4));
971     if (!FLAG_enable_embedded_constant_pool) {
972       __ mov(r8, Operand(r4));
973     }
974     if (kR9Available == 1) {
975       __ mov(r9, Operand(r4));
976     }
977 
978     // Invoke the code.
979     Handle<Code> builtin = is_construct
980                                ? masm->isolate()->builtins()->Construct()
981                                : masm->isolate()->builtins()->Call();
982     __ Call(builtin, RelocInfo::CODE_TARGET);
983 
984     // Exit the JS frame and remove the parameters (except function), and
985     // return.
986     // Respect ABI stack constraint.
987   }
988   __ Jump(lr);
989 
990   // r0: result
991 }
992 
Generate_JSEntryTrampoline(MacroAssembler * masm)993 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
994   Generate_JSEntryTrampolineHelper(masm, false);
995 }
996 
Generate_JSConstructEntryTrampoline(MacroAssembler * masm)997 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
998   Generate_JSEntryTrampolineHelper(masm, true);
999 }
1000 
LeaveInterpreterFrame(MacroAssembler * masm,Register scratch)1001 static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch) {
1002   Register args_count = scratch;
1003 
1004   // Get the arguments + receiver count.
1005   __ ldr(args_count,
1006          MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1007   __ ldr(args_count,
1008          FieldMemOperand(args_count, BytecodeArray::kParameterSizeOffset));
1009 
1010   // Leave the frame (also dropping the register file).
1011   __ LeaveFrame(StackFrame::JAVA_SCRIPT);
1012 
1013   // Drop receiver + arguments.
1014   __ add(sp, sp, args_count, LeaveCC);
1015 }
1016 
1017 // Generate code for entering a JS function with the interpreter.
1018 // On entry to the function the receiver and arguments have been pushed on the
1019 // stack left to right.  The actual argument count matches the formal parameter
1020 // count expected by the function.
1021 //
1022 // The live registers are:
1023 //   o r1: the JS function object being called.
1024 //   o r3: the new target
1025 //   o cp: our context
1026 //   o pp: the caller's constant pool pointer (if enabled)
1027 //   o fp: the caller's frame pointer
1028 //   o sp: stack pointer
1029 //   o lr: return address
1030 //
1031 // The function builds an interpreter frame.  See InterpreterFrameConstants in
1032 // frames.h for its layout.
Generate_InterpreterEntryTrampoline(MacroAssembler * masm)1033 void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
1034   ProfileEntryHookStub::MaybeCallEntryHook(masm);
1035 
1036   // Open a frame scope to indicate that there is a frame on the stack.  The
1037   // MANUAL indicates that the scope shouldn't actually generate code to set up
1038   // the frame (that is done below).
1039   FrameScope frame_scope(masm, StackFrame::MANUAL);
1040   __ PushStandardFrame(r1);
1041 
1042   // Get the bytecode array from the function object (or from the DebugInfo if
1043   // it is present) and load it into kInterpreterBytecodeArrayRegister.
1044   __ ldr(r0, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
1045   Register debug_info = kInterpreterBytecodeArrayRegister;
1046   DCHECK(!debug_info.is(r0));
1047   __ ldr(debug_info, FieldMemOperand(r0, SharedFunctionInfo::kDebugInfoOffset));
1048   __ cmp(debug_info, Operand(DebugInfo::uninitialized()));
1049   // Load original bytecode array or the debug copy.
1050   __ ldr(kInterpreterBytecodeArrayRegister,
1051          FieldMemOperand(r0, SharedFunctionInfo::kFunctionDataOffset), eq);
1052   __ ldr(kInterpreterBytecodeArrayRegister,
1053          FieldMemOperand(debug_info, DebugInfo::kDebugBytecodeArrayIndex), ne);
1054 
1055   // Check whether we should continue to use the interpreter.
1056   Label switch_to_different_code_kind;
1057   __ ldr(r0, FieldMemOperand(r0, SharedFunctionInfo::kCodeOffset));
1058   __ cmp(r0, Operand(masm->CodeObject()));  // Self-reference to this code.
1059   __ b(ne, &switch_to_different_code_kind);
1060 
1061   // Increment invocation count for the function.
1062   __ ldr(r2, FieldMemOperand(r1, JSFunction::kLiteralsOffset));
1063   __ ldr(r2, FieldMemOperand(r2, LiteralsArray::kFeedbackVectorOffset));
1064   __ ldr(r9, FieldMemOperand(
1065                  r2, TypeFeedbackVector::kInvocationCountIndex * kPointerSize +
1066                          TypeFeedbackVector::kHeaderSize));
1067   __ add(r9, r9, Operand(Smi::FromInt(1)));
1068   __ str(r9, FieldMemOperand(
1069                  r2, TypeFeedbackVector::kInvocationCountIndex * kPointerSize +
1070                          TypeFeedbackVector::kHeaderSize));
1071 
1072   // Check function data field is actually a BytecodeArray object.
1073   if (FLAG_debug_code) {
1074     __ SmiTst(kInterpreterBytecodeArrayRegister);
1075     __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1076     __ CompareObjectType(kInterpreterBytecodeArrayRegister, r0, no_reg,
1077                          BYTECODE_ARRAY_TYPE);
1078     __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1079   }
1080 
1081   // Load the initial bytecode offset.
1082   __ mov(kInterpreterBytecodeOffsetRegister,
1083          Operand(BytecodeArray::kHeaderSize - kHeapObjectTag));
1084 
1085   // Push new.target, bytecode array and Smi tagged bytecode array offset.
1086   __ SmiTag(r0, kInterpreterBytecodeOffsetRegister);
1087   __ Push(r3, kInterpreterBytecodeArrayRegister, r0);
1088 
1089   // Allocate the local and temporary register file on the stack.
1090   {
1091     // Load frame size from the BytecodeArray object.
1092     __ ldr(r4, FieldMemOperand(kInterpreterBytecodeArrayRegister,
1093                                BytecodeArray::kFrameSizeOffset));
1094 
1095     // Do a stack check to ensure we don't go over the limit.
1096     Label ok;
1097     __ sub(r9, sp, Operand(r4));
1098     __ LoadRoot(r2, Heap::kRealStackLimitRootIndex);
1099     __ cmp(r9, Operand(r2));
1100     __ b(hs, &ok);
1101     __ CallRuntime(Runtime::kThrowStackOverflow);
1102     __ bind(&ok);
1103 
1104     // If ok, push undefined as the initial value for all register file entries.
1105     Label loop_header;
1106     Label loop_check;
1107     __ LoadRoot(r9, Heap::kUndefinedValueRootIndex);
1108     __ b(&loop_check, al);
1109     __ bind(&loop_header);
1110     // TODO(rmcilroy): Consider doing more than one push per loop iteration.
1111     __ push(r9);
1112     // Continue loop if not done.
1113     __ bind(&loop_check);
1114     __ sub(r4, r4, Operand(kPointerSize), SetCC);
1115     __ b(&loop_header, ge);
1116   }
1117 
1118   // Load accumulator and dispatch table into registers.
1119   __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
1120   __ mov(kInterpreterDispatchTableRegister,
1121          Operand(ExternalReference::interpreter_dispatch_table_address(
1122              masm->isolate())));
1123 
1124   // Dispatch to the first bytecode handler for the function.
1125   __ ldrb(r1, MemOperand(kInterpreterBytecodeArrayRegister,
1126                          kInterpreterBytecodeOffsetRegister));
1127   __ ldr(ip, MemOperand(kInterpreterDispatchTableRegister, r1, LSL,
1128                         kPointerSizeLog2));
1129   __ Call(ip);
1130   masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset());
1131 
1132   // The return value is in r0.
1133   LeaveInterpreterFrame(masm, r2);
1134   __ Jump(lr);
1135 
1136   // If the shared code is no longer this entry trampoline, then the underlying
1137   // function has been switched to a different kind of code and we heal the
1138   // closure by switching the code entry field over to the new code as well.
1139   __ bind(&switch_to_different_code_kind);
1140   __ LeaveFrame(StackFrame::JAVA_SCRIPT);
1141   __ ldr(r4, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
1142   __ ldr(r4, FieldMemOperand(r4, SharedFunctionInfo::kCodeOffset));
1143   __ add(r4, r4, Operand(Code::kHeaderSize - kHeapObjectTag));
1144   __ str(r4, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
1145   __ RecordWriteCodeEntryField(r1, r4, r5);
1146   __ Jump(r4);
1147 }
1148 
Generate_StackOverflowCheck(MacroAssembler * masm,Register num_args,Register scratch,Label * stack_overflow)1149 static void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args,
1150                                         Register scratch,
1151                                         Label* stack_overflow) {
1152   // Check the stack for overflow. We are not trying to catch
1153   // interruptions (e.g. debug break and preemption) here, so the "real stack
1154   // limit" is checked.
1155   __ LoadRoot(scratch, Heap::kRealStackLimitRootIndex);
1156   // Make scratch the space we have left. The stack might already be overflowed
1157   // here which will cause scratch to become negative.
1158   __ sub(scratch, sp, scratch);
1159   // Check if the arguments will overflow the stack.
1160   __ cmp(scratch, Operand(num_args, LSL, kPointerSizeLog2));
1161   __ b(le, stack_overflow);  // Signed comparison.
1162 }
1163 
Generate_InterpreterPushArgs(MacroAssembler * masm,Register num_args,Register index,Register limit,Register scratch,Label * stack_overflow)1164 static void Generate_InterpreterPushArgs(MacroAssembler* masm,
1165                                          Register num_args, Register index,
1166                                          Register limit, Register scratch,
1167                                          Label* stack_overflow) {
1168   // Add a stack check before pushing arguments.
1169   Generate_StackOverflowCheck(masm, num_args, scratch, stack_overflow);
1170 
1171   // Find the address of the last argument.
1172   __ mov(limit, num_args);
1173   __ mov(limit, Operand(limit, LSL, kPointerSizeLog2));
1174   __ sub(limit, index, limit);
1175 
1176   Label loop_header, loop_check;
1177   __ b(al, &loop_check);
1178   __ bind(&loop_header);
1179   __ ldr(scratch, MemOperand(index, -kPointerSize, PostIndex));
1180   __ push(scratch);
1181   __ bind(&loop_check);
1182   __ cmp(index, limit);
1183   __ b(gt, &loop_header);
1184 }
1185 
1186 // static
Generate_InterpreterPushArgsAndCallImpl(MacroAssembler * masm,TailCallMode tail_call_mode,CallableType function_type)1187 void Builtins::Generate_InterpreterPushArgsAndCallImpl(
1188     MacroAssembler* masm, TailCallMode tail_call_mode,
1189     CallableType function_type) {
1190   // ----------- S t a t e -------------
1191   //  -- r0 : the number of arguments (not including the receiver)
1192   //  -- r2 : the address of the first argument to be pushed. Subsequent
1193   //          arguments should be consecutive above this, in the same order as
1194   //          they are to be pushed onto the stack.
1195   //  -- r1 : the target to call (can be any Object).
1196   // -----------------------------------
1197   Label stack_overflow;
1198 
1199   __ add(r3, r0, Operand(1));  // Add one for receiver.
1200 
1201   // Push the arguments. r2, r4, r5 will be modified.
1202   Generate_InterpreterPushArgs(masm, r3, r2, r4, r5, &stack_overflow);
1203 
1204   // Call the target.
1205   if (function_type == CallableType::kJSFunction) {
1206     __ Jump(masm->isolate()->builtins()->CallFunction(ConvertReceiverMode::kAny,
1207                                                       tail_call_mode),
1208             RelocInfo::CODE_TARGET);
1209   } else {
1210     DCHECK_EQ(function_type, CallableType::kAny);
1211     __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny,
1212                                               tail_call_mode),
1213             RelocInfo::CODE_TARGET);
1214   }
1215 
1216   __ bind(&stack_overflow);
1217   {
1218     __ TailCallRuntime(Runtime::kThrowStackOverflow);
1219     // Unreachable code.
1220     __ bkpt(0);
1221   }
1222 }
1223 
1224 // static
Generate_InterpreterPushArgsAndConstructImpl(MacroAssembler * masm,CallableType construct_type)1225 void Builtins::Generate_InterpreterPushArgsAndConstructImpl(
1226     MacroAssembler* masm, CallableType construct_type) {
1227   // ----------- S t a t e -------------
1228   // -- r0 : argument count (not including receiver)
1229   // -- r3 : new target
1230   // -- r1 : constructor to call
1231   // -- r2 : allocation site feedback if available, undefined otherwise.
1232   // -- r4 : address of the first argument
1233   // -----------------------------------
1234   Label stack_overflow;
1235 
1236   // Push a slot for the receiver to be constructed.
1237   __ mov(ip, Operand::Zero());
1238   __ push(ip);
1239 
1240   // Push the arguments. r5, r4, r6 will be modified.
1241   Generate_InterpreterPushArgs(masm, r0, r4, r5, r6, &stack_overflow);
1242 
1243   __ AssertUndefinedOrAllocationSite(r2, r5);
1244   if (construct_type == CallableType::kJSFunction) {
1245     __ AssertFunction(r1);
1246 
1247     // Tail call to the function-specific construct stub (still in the caller
1248     // context at this point).
1249     __ ldr(r4, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
1250     __ ldr(r4, FieldMemOperand(r4, SharedFunctionInfo::kConstructStubOffset));
1251     // Jump to the construct function.
1252     __ add(pc, r4, Operand(Code::kHeaderSize - kHeapObjectTag));
1253 
1254   } else {
1255     DCHECK_EQ(construct_type, CallableType::kAny);
1256     // Call the constructor with r0, r1, and r3 unmodified.
1257     __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
1258   }
1259 
1260   __ bind(&stack_overflow);
1261   {
1262     __ TailCallRuntime(Runtime::kThrowStackOverflow);
1263     // Unreachable code.
1264     __ bkpt(0);
1265   }
1266 }
1267 
1268 // static
Generate_InterpreterPushArgsAndConstructArray(MacroAssembler * masm)1269 void Builtins::Generate_InterpreterPushArgsAndConstructArray(
1270     MacroAssembler* masm) {
1271   // ----------- S t a t e -------------
1272   // -- r0 : argument count (not including receiver)
1273   // -- r1 : target to call verified to be Array function
1274   // -- r2 : allocation site feedback if available, undefined otherwise.
1275   // -- r3 : address of the first argument
1276   // -----------------------------------
1277   Label stack_overflow;
1278 
1279   __ add(r4, r0, Operand(1));  // Add one for receiver.
1280 
1281   // TODO(mythria): Add a stack check before pushing arguments.
1282   // Push the arguments. r3, r5, r6 will be modified.
1283   Generate_InterpreterPushArgs(masm, r4, r3, r5, r6, &stack_overflow);
1284 
1285   // Array constructor expects constructor in r3. It is same as r1 here.
1286   __ mov(r3, r1);
1287 
1288   ArrayConstructorStub stub(masm->isolate());
1289   __ TailCallStub(&stub);
1290 
1291   __ bind(&stack_overflow);
1292   {
1293     __ TailCallRuntime(Runtime::kThrowStackOverflow);
1294     // Unreachable code.
1295     __ bkpt(0);
1296   }
1297 }
1298 
Generate_InterpreterEnterBytecode(MacroAssembler * masm)1299 static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
1300   // Set the return address to the correct point in the interpreter entry
1301   // trampoline.
1302   Smi* interpreter_entry_return_pc_offset(
1303       masm->isolate()->heap()->interpreter_entry_return_pc_offset());
1304   DCHECK_NE(interpreter_entry_return_pc_offset, Smi::kZero);
1305   __ Move(r2, masm->isolate()->builtins()->InterpreterEntryTrampoline());
1306   __ add(lr, r2, Operand(interpreter_entry_return_pc_offset->value() +
1307                          Code::kHeaderSize - kHeapObjectTag));
1308 
1309   // Initialize the dispatch table register.
1310   __ mov(kInterpreterDispatchTableRegister,
1311          Operand(ExternalReference::interpreter_dispatch_table_address(
1312              masm->isolate())));
1313 
1314   // Get the bytecode array pointer from the frame.
1315   __ ldr(kInterpreterBytecodeArrayRegister,
1316          MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1317 
1318   if (FLAG_debug_code) {
1319     // Check function data field is actually a BytecodeArray object.
1320     __ SmiTst(kInterpreterBytecodeArrayRegister);
1321     __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1322     __ CompareObjectType(kInterpreterBytecodeArrayRegister, r1, no_reg,
1323                          BYTECODE_ARRAY_TYPE);
1324     __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1325   }
1326 
1327   // Get the target bytecode offset from the frame.
1328   __ ldr(kInterpreterBytecodeOffsetRegister,
1329          MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1330   __ SmiUntag(kInterpreterBytecodeOffsetRegister);
1331 
1332   // Dispatch to the target bytecode.
1333   __ ldrb(r1, MemOperand(kInterpreterBytecodeArrayRegister,
1334                          kInterpreterBytecodeOffsetRegister));
1335   __ ldr(ip, MemOperand(kInterpreterDispatchTableRegister, r1, LSL,
1336                         kPointerSizeLog2));
1337   __ mov(pc, ip);
1338 }
1339 
Generate_InterpreterEnterBytecodeAdvance(MacroAssembler * masm)1340 void Builtins::Generate_InterpreterEnterBytecodeAdvance(MacroAssembler* masm) {
1341   // Advance the current bytecode offset stored within the given interpreter
1342   // stack frame. This simulates what all bytecode handlers do upon completion
1343   // of the underlying operation.
1344   __ ldr(r1, MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1345   __ ldr(r2, MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1346   __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
1347   {
1348     FrameScope scope(masm, StackFrame::INTERNAL);
1349     __ Push(kInterpreterAccumulatorRegister, r1, r2);
1350     __ CallRuntime(Runtime::kInterpreterAdvanceBytecodeOffset);
1351     __ mov(r2, r0);  // Result is the new bytecode offset.
1352     __ Pop(kInterpreterAccumulatorRegister);
1353   }
1354   __ str(r2, MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1355 
1356   Generate_InterpreterEnterBytecode(masm);
1357 }
1358 
Generate_InterpreterEnterBytecodeDispatch(MacroAssembler * masm)1359 void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
1360   Generate_InterpreterEnterBytecode(masm);
1361 }
1362 
Generate_CompileLazy(MacroAssembler * masm)1363 void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
1364   // ----------- S t a t e -------------
1365   //  -- r0 : argument count (preserved for callee)
1366   //  -- r3 : new target (preserved for callee)
1367   //  -- r1 : target function (preserved for callee)
1368   // -----------------------------------
1369   // First lookup code, maybe we don't need to compile!
1370   Label gotta_call_runtime, gotta_call_runtime_no_stack;
1371   Label try_shared;
1372   Label loop_top, loop_bottom;
1373 
1374   Register argument_count = r0;
1375   Register closure = r1;
1376   Register new_target = r3;
1377   __ push(argument_count);
1378   __ push(new_target);
1379   __ push(closure);
1380 
1381   Register map = argument_count;
1382   Register index = r2;
1383   __ ldr(map, FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
1384   __ ldr(map,
1385          FieldMemOperand(map, SharedFunctionInfo::kOptimizedCodeMapOffset));
1386   __ ldr(index, FieldMemOperand(map, FixedArray::kLengthOffset));
1387   __ cmp(index, Operand(Smi::FromInt(2)));
1388   __ b(lt, &gotta_call_runtime);
1389 
1390   // Find literals.
1391   // r3  : native context
1392   // r2  : length / index
1393   // r0  : optimized code map
1394   // stack[0] : new target
1395   // stack[4] : closure
1396   Register native_context = r3;
1397   __ ldr(native_context, NativeContextMemOperand());
1398 
1399   __ bind(&loop_top);
1400   Register temp = r1;
1401   Register array_pointer = r5;
1402 
1403   // Does the native context match?
1404   __ add(array_pointer, map, Operand::PointerOffsetFromSmiKey(index));
1405   __ ldr(temp, FieldMemOperand(array_pointer,
1406                                SharedFunctionInfo::kOffsetToPreviousContext));
1407   __ ldr(temp, FieldMemOperand(temp, WeakCell::kValueOffset));
1408   __ cmp(temp, native_context);
1409   __ b(ne, &loop_bottom);
1410   // OSR id set to none?
1411   __ ldr(temp, FieldMemOperand(array_pointer,
1412                                SharedFunctionInfo::kOffsetToPreviousOsrAstId));
1413   const int bailout_id = BailoutId::None().ToInt();
1414   __ cmp(temp, Operand(Smi::FromInt(bailout_id)));
1415   __ b(ne, &loop_bottom);
1416   // Literals available?
1417   __ ldr(temp, FieldMemOperand(array_pointer,
1418                                SharedFunctionInfo::kOffsetToPreviousLiterals));
1419   __ ldr(temp, FieldMemOperand(temp, WeakCell::kValueOffset));
1420   __ JumpIfSmi(temp, &gotta_call_runtime);
1421 
1422   // Save the literals in the closure.
1423   __ ldr(r4, MemOperand(sp, 0));
1424   __ str(temp, FieldMemOperand(r4, JSFunction::kLiteralsOffset));
1425   __ push(index);
1426   __ RecordWriteField(r4, JSFunction::kLiteralsOffset, temp, index,
1427                       kLRHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
1428                       OMIT_SMI_CHECK);
1429   __ pop(index);
1430 
1431   // Code available?
1432   Register entry = r4;
1433   __ ldr(entry,
1434          FieldMemOperand(array_pointer,
1435                          SharedFunctionInfo::kOffsetToPreviousCachedCode));
1436   __ ldr(entry, FieldMemOperand(entry, WeakCell::kValueOffset));
1437   __ JumpIfSmi(entry, &try_shared);
1438 
1439   // Found literals and code. Get them into the closure and return.
1440   __ pop(closure);
1441   // Store code entry in the closure.
1442   __ add(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
1443   __ str(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset));
1444   __ RecordWriteCodeEntryField(closure, entry, r5);
1445 
1446   // Link the closure into the optimized function list.
1447   // r4 : code entry
1448   // r3 : native context
1449   // r1 : closure
1450   __ ldr(r5,
1451          ContextMemOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST));
1452   __ str(r5, FieldMemOperand(closure, JSFunction::kNextFunctionLinkOffset));
1453   __ RecordWriteField(closure, JSFunction::kNextFunctionLinkOffset, r5, r0,
1454                       kLRHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
1455                       OMIT_SMI_CHECK);
1456   const int function_list_offset =
1457       Context::SlotOffset(Context::OPTIMIZED_FUNCTIONS_LIST);
1458   __ str(closure,
1459          ContextMemOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST));
1460   // Save closure before the write barrier.
1461   __ mov(r5, closure);
1462   __ RecordWriteContextSlot(native_context, function_list_offset, closure, r0,
1463                             kLRHasNotBeenSaved, kDontSaveFPRegs);
1464   __ mov(closure, r5);
1465   __ pop(new_target);
1466   __ pop(argument_count);
1467   __ Jump(entry);
1468 
1469   __ bind(&loop_bottom);
1470   __ sub(index, index, Operand(Smi::FromInt(SharedFunctionInfo::kEntryLength)));
1471   __ cmp(index, Operand(Smi::FromInt(1)));
1472   __ b(gt, &loop_top);
1473 
1474   // We found neither literals nor code.
1475   __ jmp(&gotta_call_runtime);
1476 
1477   __ bind(&try_shared);
1478   __ pop(closure);
1479   __ pop(new_target);
1480   __ pop(argument_count);
1481   __ ldr(entry,
1482          FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
1483   // Is the shared function marked for tier up?
1484   __ ldrb(r5, FieldMemOperand(entry,
1485                               SharedFunctionInfo::kMarkedForTierUpByteOffset));
1486   __ tst(r5, Operand(1 << SharedFunctionInfo::kMarkedForTierUpBitWithinByte));
1487   __ b(ne, &gotta_call_runtime_no_stack);
1488   // Is the full code valid?
1489   __ ldr(entry, FieldMemOperand(entry, SharedFunctionInfo::kCodeOffset));
1490   __ ldr(r5, FieldMemOperand(entry, Code::kFlagsOffset));
1491   __ and_(r5, r5, Operand(Code::KindField::kMask));
1492   __ mov(r5, Operand(r5, LSR, Code::KindField::kShift));
1493   __ cmp(r5, Operand(Code::BUILTIN));
1494   __ b(eq, &gotta_call_runtime_no_stack);
1495   // Yes, install the full code.
1496   __ add(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
1497   __ str(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset));
1498   __ RecordWriteCodeEntryField(closure, entry, r5);
1499   __ Jump(entry);
1500 
1501   __ bind(&gotta_call_runtime);
1502   __ pop(closure);
1503   __ pop(new_target);
1504   __ pop(argument_count);
1505   __ bind(&gotta_call_runtime_no_stack);
1506   GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
1507 }
1508 
Generate_CompileBaseline(MacroAssembler * masm)1509 void Builtins::Generate_CompileBaseline(MacroAssembler* masm) {
1510   GenerateTailCallToReturnedCode(masm, Runtime::kCompileBaseline);
1511 }
1512 
Generate_CompileOptimized(MacroAssembler * masm)1513 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
1514   GenerateTailCallToReturnedCode(masm,
1515                                  Runtime::kCompileOptimized_NotConcurrent);
1516 }
1517 
Generate_CompileOptimizedConcurrent(MacroAssembler * masm)1518 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
1519   GenerateTailCallToReturnedCode(masm, Runtime::kCompileOptimized_Concurrent);
1520 }
1521 
Generate_InstantiateAsmJs(MacroAssembler * masm)1522 void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) {
1523   // ----------- S t a t e -------------
1524   //  -- r0 : argument count (preserved for callee)
1525   //  -- r1 : new target (preserved for callee)
1526   //  -- r3 : target function (preserved for callee)
1527   // -----------------------------------
1528   Label failed;
1529   {
1530     FrameScope scope(masm, StackFrame::INTERNAL);
1531     // Preserve argument count for later compare.
1532     __ Move(r4, r0);
1533     // Push the number of arguments to the callee.
1534     __ SmiTag(r0);
1535     __ push(r0);
1536     // Push a copy of the target function and the new target.
1537     __ push(r1);
1538     __ push(r3);
1539 
1540     // The function.
1541     __ push(r1);
1542     // Copy arguments from caller (stdlib, foreign, heap).
1543     Label args_done;
1544     for (int j = 0; j < 4; ++j) {
1545       Label over;
1546       if (j < 3) {
1547         __ cmp(r4, Operand(j));
1548         __ b(ne, &over);
1549       }
1550       for (int i = j - 1; i >= 0; --i) {
1551         __ ldr(r4, MemOperand(fp, StandardFrameConstants::kCallerSPOffset +
1552                                       i * kPointerSize));
1553         __ push(r4);
1554       }
1555       for (int i = 0; i < 3 - j; ++i) {
1556         __ PushRoot(Heap::kUndefinedValueRootIndex);
1557       }
1558       if (j < 3) {
1559         __ jmp(&args_done);
1560         __ bind(&over);
1561       }
1562     }
1563     __ bind(&args_done);
1564 
1565     // Call runtime, on success unwind frame, and parent frame.
1566     __ CallRuntime(Runtime::kInstantiateAsmJs, 4);
1567     // A smi 0 is returned on failure, an object on success.
1568     __ JumpIfSmi(r0, &failed);
1569 
1570     __ Drop(2);
1571     __ pop(r4);
1572     __ SmiUntag(r4);
1573     scope.GenerateLeaveFrame();
1574 
1575     __ add(r4, r4, Operand(1));
1576     __ Drop(r4);
1577     __ Ret();
1578 
1579     __ bind(&failed);
1580     // Restore target function and new target.
1581     __ pop(r3);
1582     __ pop(r1);
1583     __ pop(r0);
1584     __ SmiUntag(r0);
1585   }
1586   // On failure, tail call back to regular js.
1587   GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
1588 }
1589 
GenerateMakeCodeYoungAgainCommon(MacroAssembler * masm)1590 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
1591   // For now, we are relying on the fact that make_code_young doesn't do any
1592   // garbage collection which allows us to save/restore the registers without
1593   // worrying about which of them contain pointers. We also don't build an
1594   // internal frame to make the code faster, since we shouldn't have to do stack
1595   // crawls in MakeCodeYoung. This seems a bit fragile.
1596 
1597   // The following registers must be saved and restored when calling through to
1598   // the runtime:
1599   //   r0 - contains return address (beginning of patch sequence)
1600   //   r1 - isolate
1601   //   r3 - new target
1602   FrameScope scope(masm, StackFrame::MANUAL);
1603   __ stm(db_w, sp, r0.bit() | r1.bit() | r3.bit() | fp.bit() | lr.bit());
1604   __ PrepareCallCFunction(2, 0, r2);
1605   __ mov(r1, Operand(ExternalReference::isolate_address(masm->isolate())));
1606   __ CallCFunction(
1607       ExternalReference::get_make_code_young_function(masm->isolate()), 2);
1608   __ ldm(ia_w, sp, r0.bit() | r1.bit() | r3.bit() | fp.bit() | lr.bit());
1609   __ mov(pc, r0);
1610 }
1611 
1612 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C)                  \
1613   void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
1614       MacroAssembler* masm) {                                 \
1615     GenerateMakeCodeYoungAgainCommon(masm);                   \
1616   }                                                           \
1617   void Builtins::Generate_Make##C##CodeYoungAgainOddMarking(  \
1618       MacroAssembler* masm) {                                 \
1619     GenerateMakeCodeYoungAgainCommon(masm);                   \
1620   }
CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)1621 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
1622 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
1623 
1624 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
1625   // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
1626   // that make_code_young doesn't do any garbage collection which allows us to
1627   // save/restore the registers without worrying about which of them contain
1628   // pointers.
1629 
1630   // The following registers must be saved and restored when calling through to
1631   // the runtime:
1632   //   r0 - contains return address (beginning of patch sequence)
1633   //   r1 - isolate
1634   //   r3 - new target
1635   FrameScope scope(masm, StackFrame::MANUAL);
1636   __ stm(db_w, sp, r0.bit() | r1.bit() | r3.bit() | fp.bit() | lr.bit());
1637   __ PrepareCallCFunction(2, 0, r2);
1638   __ mov(r1, Operand(ExternalReference::isolate_address(masm->isolate())));
1639   __ CallCFunction(
1640       ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
1641       2);
1642   __ ldm(ia_w, sp, r0.bit() | r1.bit() | r3.bit() | fp.bit() | lr.bit());
1643 
1644   // Perform prologue operations usually performed by the young code stub.
1645   __ PushStandardFrame(r1);
1646 
1647   // Jump to point after the code-age stub.
1648   __ add(r0, r0, Operand(kNoCodeAgeSequenceLength));
1649   __ mov(pc, r0);
1650 }
1651 
Generate_MarkCodeAsExecutedTwice(MacroAssembler * masm)1652 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
1653   GenerateMakeCodeYoungAgainCommon(masm);
1654 }
1655 
Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler * masm)1656 void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) {
1657   Generate_MarkCodeAsExecutedOnce(masm);
1658 }
1659 
Generate_NotifyStubFailureHelper(MacroAssembler * masm,SaveFPRegsMode save_doubles)1660 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
1661                                              SaveFPRegsMode save_doubles) {
1662   {
1663     FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1664 
1665     // Preserve registers across notification, this is important for compiled
1666     // stubs that tail call the runtime on deopts passing their parameters in
1667     // registers.
1668     __ stm(db_w, sp, kJSCallerSaved | kCalleeSaved);
1669     // Pass the function and deoptimization type to the runtime system.
1670     __ CallRuntime(Runtime::kNotifyStubFailure, save_doubles);
1671     __ ldm(ia_w, sp, kJSCallerSaved | kCalleeSaved);
1672   }
1673 
1674   __ add(sp, sp, Operand(kPointerSize));  // Ignore state
1675   __ mov(pc, lr);                         // Jump to miss handler
1676 }
1677 
Generate_NotifyStubFailure(MacroAssembler * masm)1678 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
1679   Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
1680 }
1681 
Generate_NotifyStubFailureSaveDoubles(MacroAssembler * masm)1682 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
1683   Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
1684 }
1685 
Generate_NotifyDeoptimizedHelper(MacroAssembler * masm,Deoptimizer::BailoutType type)1686 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
1687                                              Deoptimizer::BailoutType type) {
1688   {
1689     FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1690     // Pass the function and deoptimization type to the runtime system.
1691     __ mov(r0, Operand(Smi::FromInt(static_cast<int>(type))));
1692     __ push(r0);
1693     __ CallRuntime(Runtime::kNotifyDeoptimized);
1694   }
1695 
1696   // Get the full codegen state from the stack and untag it -> r6.
1697   __ ldr(r6, MemOperand(sp, 0 * kPointerSize));
1698   __ SmiUntag(r6);
1699   // Switch on the state.
1700   Label with_tos_register, unknown_state;
1701   __ cmp(r6,
1702          Operand(static_cast<int>(Deoptimizer::BailoutState::NO_REGISTERS)));
1703   __ b(ne, &with_tos_register);
1704   __ add(sp, sp, Operand(1 * kPointerSize));  // Remove state.
1705   __ Ret();
1706 
1707   __ bind(&with_tos_register);
1708   DCHECK_EQ(kInterpreterAccumulatorRegister.code(), r0.code());
1709   __ ldr(r0, MemOperand(sp, 1 * kPointerSize));
1710   __ cmp(r6,
1711          Operand(static_cast<int>(Deoptimizer::BailoutState::TOS_REGISTER)));
1712   __ b(ne, &unknown_state);
1713   __ add(sp, sp, Operand(2 * kPointerSize));  // Remove state.
1714   __ Ret();
1715 
1716   __ bind(&unknown_state);
1717   __ stop("no cases left");
1718 }
1719 
Generate_NotifyDeoptimized(MacroAssembler * masm)1720 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1721   Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1722 }
1723 
Generate_NotifySoftDeoptimized(MacroAssembler * masm)1724 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
1725   Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1726 }
1727 
Generate_NotifyLazyDeoptimized(MacroAssembler * masm)1728 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
1729   Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1730 }
1731 
CompatibleReceiverCheck(MacroAssembler * masm,Register receiver,Register function_template_info,Register scratch0,Register scratch1,Register scratch2,Label * receiver_check_failed)1732 static void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver,
1733                                     Register function_template_info,
1734                                     Register scratch0, Register scratch1,
1735                                     Register scratch2,
1736                                     Label* receiver_check_failed) {
1737   Register signature = scratch0;
1738   Register map = scratch1;
1739   Register constructor = scratch2;
1740 
1741   // If there is no signature, return the holder.
1742   __ ldr(signature, FieldMemOperand(function_template_info,
1743                                     FunctionTemplateInfo::kSignatureOffset));
1744   __ CompareRoot(signature, Heap::kUndefinedValueRootIndex);
1745   Label receiver_check_passed;
1746   __ b(eq, &receiver_check_passed);
1747 
1748   // Walk the prototype chain.
1749   __ ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
1750   Label prototype_loop_start;
1751   __ bind(&prototype_loop_start);
1752 
1753   // Get the constructor, if any.
1754   __ GetMapConstructor(constructor, map, ip, ip);
1755   __ cmp(ip, Operand(JS_FUNCTION_TYPE));
1756   Label next_prototype;
1757   __ b(ne, &next_prototype);
1758   Register type = constructor;
1759   __ ldr(type,
1760          FieldMemOperand(constructor, JSFunction::kSharedFunctionInfoOffset));
1761   __ ldr(type, FieldMemOperand(type, SharedFunctionInfo::kFunctionDataOffset));
1762 
1763   // Loop through the chain of inheriting function templates.
1764   Label function_template_loop;
1765   __ bind(&function_template_loop);
1766 
1767   // If the signatures match, we have a compatible receiver.
1768   __ cmp(signature, type);
1769   __ b(eq, &receiver_check_passed);
1770 
1771   // If the current type is not a FunctionTemplateInfo, load the next prototype
1772   // in the chain.
1773   __ JumpIfSmi(type, &next_prototype);
1774   __ CompareObjectType(type, ip, ip, FUNCTION_TEMPLATE_INFO_TYPE);
1775 
1776   // Otherwise load the parent function template and iterate.
1777   __ ldr(type,
1778          FieldMemOperand(type, FunctionTemplateInfo::kParentTemplateOffset),
1779          eq);
1780   __ b(&function_template_loop, eq);
1781 
1782   // Load the next prototype.
1783   __ bind(&next_prototype);
1784   __ ldr(ip, FieldMemOperand(map, Map::kBitField3Offset));
1785   __ tst(ip, Operand(Map::HasHiddenPrototype::kMask));
1786   __ b(eq, receiver_check_failed);
1787   __ ldr(receiver, FieldMemOperand(map, Map::kPrototypeOffset));
1788   __ ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
1789   // Iterate.
1790   __ b(&prototype_loop_start);
1791 
1792   __ bind(&receiver_check_passed);
1793 }
1794 
Generate_HandleFastApiCall(MacroAssembler * masm)1795 void Builtins::Generate_HandleFastApiCall(MacroAssembler* masm) {
1796   // ----------- S t a t e -------------
1797   //  -- r0                 : number of arguments excluding receiver
1798   //  -- r1                 : callee
1799   //  -- lr                 : return address
1800   //  -- sp[0]              : last argument
1801   //  -- ...
1802   //  -- sp[4 * (argc - 1)] : first argument
1803   //  -- sp[4 * argc]       : receiver
1804   // -----------------------------------
1805 
1806   // Load the FunctionTemplateInfo.
1807   __ ldr(r3, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
1808   __ ldr(r3, FieldMemOperand(r3, SharedFunctionInfo::kFunctionDataOffset));
1809 
1810   // Do the compatible receiver check.
1811   Label receiver_check_failed;
1812   __ ldr(r2, MemOperand(sp, r0, LSL, kPointerSizeLog2));
1813   CompatibleReceiverCheck(masm, r2, r3, r4, r5, r6, &receiver_check_failed);
1814 
1815   // Get the callback offset from the FunctionTemplateInfo, and jump to the
1816   // beginning of the code.
1817   __ ldr(r4, FieldMemOperand(r3, FunctionTemplateInfo::kCallCodeOffset));
1818   __ ldr(r4, FieldMemOperand(r4, CallHandlerInfo::kFastHandlerOffset));
1819   __ add(r4, r4, Operand(Code::kHeaderSize - kHeapObjectTag));
1820   __ Jump(r4);
1821 
1822   // Compatible receiver check failed: throw an Illegal Invocation exception.
1823   __ bind(&receiver_check_failed);
1824   // Drop the arguments (including the receiver)
1825   __ add(r0, r0, Operand(1));
1826   __ add(sp, sp, Operand(r0, LSL, kPointerSizeLog2));
1827   __ TailCallRuntime(Runtime::kThrowIllegalInvocation);
1828 }
1829 
Generate_OnStackReplacementHelper(MacroAssembler * masm,bool has_handler_frame)1830 static void Generate_OnStackReplacementHelper(MacroAssembler* masm,
1831                                               bool has_handler_frame) {
1832   // Lookup the function in the JavaScript frame.
1833   if (has_handler_frame) {
1834     __ ldr(r0, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1835     __ ldr(r0, MemOperand(r0, JavaScriptFrameConstants::kFunctionOffset));
1836   } else {
1837     __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1838   }
1839 
1840   {
1841     FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1842     // Pass function as argument.
1843     __ push(r0);
1844     __ CallRuntime(Runtime::kCompileForOnStackReplacement);
1845   }
1846 
1847   // If the code object is null, just return to the caller.
1848   Label skip;
1849   __ cmp(r0, Operand(Smi::kZero));
1850   __ b(ne, &skip);
1851   __ Ret();
1852 
1853   __ bind(&skip);
1854 
1855   // Drop any potential handler frame that is be sitting on top of the actual
1856   // JavaScript frame. This is the case then OSR is triggered from bytecode.
1857   if (has_handler_frame) {
1858     __ LeaveFrame(StackFrame::STUB);
1859   }
1860 
1861   // Load deoptimization data from the code object.
1862   // <deopt_data> = <code>[#deoptimization_data_offset]
1863   __ ldr(r1, FieldMemOperand(r0, Code::kDeoptimizationDataOffset));
1864 
1865   {
1866     ConstantPoolUnavailableScope constant_pool_unavailable(masm);
1867     __ add(r0, r0, Operand(Code::kHeaderSize - kHeapObjectTag));  // Code start
1868 
1869     if (FLAG_enable_embedded_constant_pool) {
1870       __ LoadConstantPoolPointerRegisterFromCodeTargetAddress(r0);
1871     }
1872 
1873     // Load the OSR entrypoint offset from the deoptimization data.
1874     // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
1875     __ ldr(r1, FieldMemOperand(
1876                    r1, FixedArray::OffsetOfElementAt(
1877                            DeoptimizationInputData::kOsrPcOffsetIndex)));
1878 
1879     // Compute the target address = code start + osr_offset
1880     __ add(lr, r0, Operand::SmiUntag(r1));
1881 
1882     // And "return" to the OSR entry point of the function.
1883     __ Ret();
1884   }
1885 }
1886 
Generate_OnStackReplacement(MacroAssembler * masm)1887 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1888   Generate_OnStackReplacementHelper(masm, false);
1889 }
1890 
Generate_InterpreterOnStackReplacement(MacroAssembler * masm)1891 void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) {
1892   Generate_OnStackReplacementHelper(masm, true);
1893 }
1894 
1895 // static
Generate_FunctionPrototypeApply(MacroAssembler * masm)1896 void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
1897   // ----------- S t a t e -------------
1898   //  -- r0    : argc
1899   //  -- sp[0] : argArray
1900   //  -- sp[4] : thisArg
1901   //  -- sp[8] : receiver
1902   // -----------------------------------
1903 
1904   // 1. Load receiver into r1, argArray into r0 (if present), remove all
1905   // arguments from the stack (including the receiver), and push thisArg (if
1906   // present) instead.
1907   {
1908     __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
1909     __ mov(r3, r2);
1910     __ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));  // receiver
1911     __ sub(r4, r0, Operand(1), SetCC);
1912     __ ldr(r2, MemOperand(sp, r4, LSL, kPointerSizeLog2), ge);  // thisArg
1913     __ sub(r4, r4, Operand(1), SetCC, ge);
1914     __ ldr(r3, MemOperand(sp, r4, LSL, kPointerSizeLog2), ge);  // argArray
1915     __ add(sp, sp, Operand(r0, LSL, kPointerSizeLog2));
1916     __ str(r2, MemOperand(sp, 0));
1917     __ mov(r0, r3);
1918   }
1919 
1920   // ----------- S t a t e -------------
1921   //  -- r0    : argArray
1922   //  -- r1    : receiver
1923   //  -- sp[0] : thisArg
1924   // -----------------------------------
1925 
1926   // 2. Make sure the receiver is actually callable.
1927   Label receiver_not_callable;
1928   __ JumpIfSmi(r1, &receiver_not_callable);
1929   __ ldr(r4, FieldMemOperand(r1, HeapObject::kMapOffset));
1930   __ ldrb(r4, FieldMemOperand(r4, Map::kBitFieldOffset));
1931   __ tst(r4, Operand(1 << Map::kIsCallable));
1932   __ b(eq, &receiver_not_callable);
1933 
1934   // 3. Tail call with no arguments if argArray is null or undefined.
1935   Label no_arguments;
1936   __ JumpIfRoot(r0, Heap::kNullValueRootIndex, &no_arguments);
1937   __ JumpIfRoot(r0, Heap::kUndefinedValueRootIndex, &no_arguments);
1938 
1939   // 4a. Apply the receiver to the given argArray (passing undefined for
1940   // new.target).
1941   __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
1942   __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1943 
1944   // 4b. The argArray is either null or undefined, so we tail call without any
1945   // arguments to the receiver.
1946   __ bind(&no_arguments);
1947   {
1948     __ mov(r0, Operand(0));
1949     __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1950   }
1951 
1952   // 4c. The receiver is not callable, throw an appropriate TypeError.
1953   __ bind(&receiver_not_callable);
1954   {
1955     __ str(r1, MemOperand(sp, 0));
1956     __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1957   }
1958 }
1959 
1960 // static
Generate_FunctionPrototypeCall(MacroAssembler * masm)1961 void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
1962   // 1. Make sure we have at least one argument.
1963   // r0: actual number of arguments
1964   {
1965     Label done;
1966     __ cmp(r0, Operand::Zero());
1967     __ b(ne, &done);
1968     __ PushRoot(Heap::kUndefinedValueRootIndex);
1969     __ add(r0, r0, Operand(1));
1970     __ bind(&done);
1971   }
1972 
1973   // 2. Get the callable to call (passed as receiver) from the stack.
1974   // r0: actual number of arguments
1975   __ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
1976 
1977   // 3. Shift arguments and return address one slot down on the stack
1978   //    (overwriting the original receiver).  Adjust argument count to make
1979   //    the original first argument the new receiver.
1980   // r0: actual number of arguments
1981   // r1: callable
1982   {
1983     Label loop;
1984     // Calculate the copy start address (destination). Copy end address is sp.
1985     __ add(r2, sp, Operand(r0, LSL, kPointerSizeLog2));
1986 
1987     __ bind(&loop);
1988     __ ldr(ip, MemOperand(r2, -kPointerSize));
1989     __ str(ip, MemOperand(r2));
1990     __ sub(r2, r2, Operand(kPointerSize));
1991     __ cmp(r2, sp);
1992     __ b(ne, &loop);
1993     // Adjust the actual number of arguments and remove the top element
1994     // (which is a copy of the last argument).
1995     __ sub(r0, r0, Operand(1));
1996     __ pop();
1997   }
1998 
1999   // 4. Call the callable.
2000   __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
2001 }
2002 
Generate_ReflectApply(MacroAssembler * masm)2003 void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
2004   // ----------- S t a t e -------------
2005   //  -- r0     : argc
2006   //  -- sp[0]  : argumentsList
2007   //  -- sp[4]  : thisArgument
2008   //  -- sp[8]  : target
2009   //  -- sp[12] : receiver
2010   // -----------------------------------
2011 
2012   // 1. Load target into r1 (if present), argumentsList into r0 (if present),
2013   // remove all arguments from the stack (including the receiver), and push
2014   // thisArgument (if present) instead.
2015   {
2016     __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
2017     __ mov(r2, r1);
2018     __ mov(r3, r1);
2019     __ sub(r4, r0, Operand(1), SetCC);
2020     __ ldr(r1, MemOperand(sp, r4, LSL, kPointerSizeLog2), ge);  // target
2021     __ sub(r4, r4, Operand(1), SetCC, ge);
2022     __ ldr(r2, MemOperand(sp, r4, LSL, kPointerSizeLog2), ge);  // thisArgument
2023     __ sub(r4, r4, Operand(1), SetCC, ge);
2024     __ ldr(r3, MemOperand(sp, r4, LSL, kPointerSizeLog2), ge);  // argumentsList
2025     __ add(sp, sp, Operand(r0, LSL, kPointerSizeLog2));
2026     __ str(r2, MemOperand(sp, 0));
2027     __ mov(r0, r3);
2028   }
2029 
2030   // ----------- S t a t e -------------
2031   //  -- r0    : argumentsList
2032   //  -- r1    : target
2033   //  -- sp[0] : thisArgument
2034   // -----------------------------------
2035 
2036   // 2. Make sure the target is actually callable.
2037   Label target_not_callable;
2038   __ JumpIfSmi(r1, &target_not_callable);
2039   __ ldr(r4, FieldMemOperand(r1, HeapObject::kMapOffset));
2040   __ ldrb(r4, FieldMemOperand(r4, Map::kBitFieldOffset));
2041   __ tst(r4, Operand(1 << Map::kIsCallable));
2042   __ b(eq, &target_not_callable);
2043 
2044   // 3a. Apply the target to the given argumentsList (passing undefined for
2045   // new.target).
2046   __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
2047   __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
2048 
2049   // 3b. The target is not callable, throw an appropriate TypeError.
2050   __ bind(&target_not_callable);
2051   {
2052     __ str(r1, MemOperand(sp, 0));
2053     __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
2054   }
2055 }
2056 
Generate_ReflectConstruct(MacroAssembler * masm)2057 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
2058   // ----------- S t a t e -------------
2059   //  -- r0     : argc
2060   //  -- sp[0]  : new.target (optional)
2061   //  -- sp[4]  : argumentsList
2062   //  -- sp[8]  : target
2063   //  -- sp[12] : receiver
2064   // -----------------------------------
2065 
2066   // 1. Load target into r1 (if present), argumentsList into r0 (if present),
2067   // new.target into r3 (if present, otherwise use target), remove all
2068   // arguments from the stack (including the receiver), and push thisArgument
2069   // (if present) instead.
2070   {
2071     __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
2072     __ mov(r2, r1);
2073     __ str(r2, MemOperand(sp, r0, LSL, kPointerSizeLog2));  // receiver
2074     __ sub(r4, r0, Operand(1), SetCC);
2075     __ ldr(r1, MemOperand(sp, r4, LSL, kPointerSizeLog2), ge);  // target
2076     __ mov(r3, r1);  // new.target defaults to target
2077     __ sub(r4, r4, Operand(1), SetCC, ge);
2078     __ ldr(r2, MemOperand(sp, r4, LSL, kPointerSizeLog2), ge);  // argumentsList
2079     __ sub(r4, r4, Operand(1), SetCC, ge);
2080     __ ldr(r3, MemOperand(sp, r4, LSL, kPointerSizeLog2), ge);  // new.target
2081     __ add(sp, sp, Operand(r0, LSL, kPointerSizeLog2));
2082     __ mov(r0, r2);
2083   }
2084 
2085   // ----------- S t a t e -------------
2086   //  -- r0    : argumentsList
2087   //  -- r3    : new.target
2088   //  -- r1    : target
2089   //  -- sp[0] : receiver (undefined)
2090   // -----------------------------------
2091 
2092   // 2. Make sure the target is actually a constructor.
2093   Label target_not_constructor;
2094   __ JumpIfSmi(r1, &target_not_constructor);
2095   __ ldr(r4, FieldMemOperand(r1, HeapObject::kMapOffset));
2096   __ ldrb(r4, FieldMemOperand(r4, Map::kBitFieldOffset));
2097   __ tst(r4, Operand(1 << Map::kIsConstructor));
2098   __ b(eq, &target_not_constructor);
2099 
2100   // 3. Make sure the target is actually a constructor.
2101   Label new_target_not_constructor;
2102   __ JumpIfSmi(r3, &new_target_not_constructor);
2103   __ ldr(r4, FieldMemOperand(r3, HeapObject::kMapOffset));
2104   __ ldrb(r4, FieldMemOperand(r4, Map::kBitFieldOffset));
2105   __ tst(r4, Operand(1 << Map::kIsConstructor));
2106   __ b(eq, &new_target_not_constructor);
2107 
2108   // 4a. Construct the target with the given new.target and argumentsList.
2109   __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
2110 
2111   // 4b. The target is not a constructor, throw an appropriate TypeError.
2112   __ bind(&target_not_constructor);
2113   {
2114     __ str(r1, MemOperand(sp, 0));
2115     __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
2116   }
2117 
2118   // 4c. The new.target is not a constructor, throw an appropriate TypeError.
2119   __ bind(&new_target_not_constructor);
2120   {
2121     __ str(r3, MemOperand(sp, 0));
2122     __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
2123   }
2124 }
2125 
EnterArgumentsAdaptorFrame(MacroAssembler * masm)2126 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
2127   __ SmiTag(r0);
2128   __ mov(r4, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2129   __ stm(db_w, sp, r0.bit() | r1.bit() | r4.bit() |
2130                        (FLAG_enable_embedded_constant_pool ? pp.bit() : 0) |
2131                        fp.bit() | lr.bit());
2132   __ add(fp, sp,
2133          Operand(StandardFrameConstants::kFixedFrameSizeFromFp + kPointerSize));
2134 }
2135 
LeaveArgumentsAdaptorFrame(MacroAssembler * masm)2136 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
2137   // ----------- S t a t e -------------
2138   //  -- r0 : result being passed through
2139   // -----------------------------------
2140   // Get the number of arguments passed (as a smi), tear down the frame and
2141   // then tear down the parameters.
2142   __ ldr(r1, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp +
2143                               kPointerSize)));
2144 
2145   __ LeaveFrame(StackFrame::ARGUMENTS_ADAPTOR);
2146   __ add(sp, sp, Operand::PointerOffsetFromSmiKey(r1));
2147   __ add(sp, sp, Operand(kPointerSize));  // adjust for receiver
2148 }
2149 
2150 // static
Generate_Apply(MacroAssembler * masm)2151 void Builtins::Generate_Apply(MacroAssembler* masm) {
2152   // ----------- S t a t e -------------
2153   //  -- r0    : argumentsList
2154   //  -- r1    : target
2155   //  -- r3    : new.target (checked to be constructor or undefined)
2156   //  -- sp[0] : thisArgument
2157   // -----------------------------------
2158 
2159   // Create the list of arguments from the array-like argumentsList.
2160   {
2161     Label create_arguments, create_array, create_runtime, done_create;
2162     __ JumpIfSmi(r0, &create_runtime);
2163 
2164     // Load the map of argumentsList into r2.
2165     __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
2166 
2167     // Load native context into r4.
2168     __ ldr(r4, NativeContextMemOperand());
2169 
2170     // Check if argumentsList is an (unmodified) arguments object.
2171     __ ldr(ip, ContextMemOperand(r4, Context::SLOPPY_ARGUMENTS_MAP_INDEX));
2172     __ cmp(ip, r2);
2173     __ b(eq, &create_arguments);
2174     __ ldr(ip, ContextMemOperand(r4, Context::STRICT_ARGUMENTS_MAP_INDEX));
2175     __ cmp(ip, r2);
2176     __ b(eq, &create_arguments);
2177 
2178     // Check if argumentsList is a fast JSArray.
2179     __ CompareInstanceType(r2, ip, JS_ARRAY_TYPE);
2180     __ b(eq, &create_array);
2181 
2182     // Ask the runtime to create the list (actually a FixedArray).
2183     __ bind(&create_runtime);
2184     {
2185       FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2186       __ Push(r1, r3, r0);
2187       __ CallRuntime(Runtime::kCreateListFromArrayLike);
2188       __ Pop(r1, r3);
2189       __ ldr(r2, FieldMemOperand(r0, FixedArray::kLengthOffset));
2190       __ SmiUntag(r2);
2191     }
2192     __ jmp(&done_create);
2193 
2194     // Try to create the list from an arguments object.
2195     __ bind(&create_arguments);
2196     __ ldr(r2, FieldMemOperand(r0, JSArgumentsObject::kLengthOffset));
2197     __ ldr(r4, FieldMemOperand(r0, JSObject::kElementsOffset));
2198     __ ldr(ip, FieldMemOperand(r4, FixedArray::kLengthOffset));
2199     __ cmp(r2, ip);
2200     __ b(ne, &create_runtime);
2201     __ SmiUntag(r2);
2202     __ mov(r0, r4);
2203     __ b(&done_create);
2204 
2205     // Try to create the list from a JSArray object.
2206     __ bind(&create_array);
2207     __ ldr(r2, FieldMemOperand(r2, Map::kBitField2Offset));
2208     __ DecodeField<Map::ElementsKindBits>(r2);
2209     STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
2210     STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
2211     STATIC_ASSERT(FAST_ELEMENTS == 2);
2212     __ cmp(r2, Operand(FAST_ELEMENTS));
2213     __ b(hi, &create_runtime);
2214     __ cmp(r2, Operand(FAST_HOLEY_SMI_ELEMENTS));
2215     __ b(eq, &create_runtime);
2216     __ ldr(r2, FieldMemOperand(r0, JSArray::kLengthOffset));
2217     __ ldr(r0, FieldMemOperand(r0, JSArray::kElementsOffset));
2218     __ SmiUntag(r2);
2219 
2220     __ bind(&done_create);
2221   }
2222 
2223   // Check for stack overflow.
2224   {
2225     // Check the stack for overflow. We are not trying to catch interruptions
2226     // (i.e. debug break and preemption) here, so check the "real stack limit".
2227     Label done;
2228     __ LoadRoot(ip, Heap::kRealStackLimitRootIndex);
2229     // Make ip the space we have left. The stack might already be overflowed
2230     // here which will cause ip to become negative.
2231     __ sub(ip, sp, ip);
2232     // Check if the arguments will overflow the stack.
2233     __ cmp(ip, Operand(r2, LSL, kPointerSizeLog2));
2234     __ b(gt, &done);  // Signed comparison.
2235     __ TailCallRuntime(Runtime::kThrowStackOverflow);
2236     __ bind(&done);
2237   }
2238 
2239   // ----------- S t a t e -------------
2240   //  -- r1    : target
2241   //  -- r0    : args (a FixedArray built from argumentsList)
2242   //  -- r2    : len (number of elements to push from args)
2243   //  -- r3    : new.target (checked to be constructor or undefined)
2244   //  -- sp[0] : thisArgument
2245   // -----------------------------------
2246 
2247   // Push arguments onto the stack (thisArgument is already on the stack).
2248   {
2249     __ mov(r4, Operand(0));
2250     Label done, loop;
2251     __ bind(&loop);
2252     __ cmp(r4, r2);
2253     __ b(eq, &done);
2254     __ add(ip, r0, Operand(r4, LSL, kPointerSizeLog2));
2255     __ ldr(ip, FieldMemOperand(ip, FixedArray::kHeaderSize));
2256     __ Push(ip);
2257     __ add(r4, r4, Operand(1));
2258     __ b(&loop);
2259     __ bind(&done);
2260     __ Move(r0, r4);
2261   }
2262 
2263   // Dispatch to Call or Construct depending on whether new.target is undefined.
2264   {
2265     __ CompareRoot(r3, Heap::kUndefinedValueRootIndex);
2266     __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET, eq);
2267     __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
2268   }
2269 }
2270 
2271 namespace {
2272 
2273 // Drops top JavaScript frame and an arguments adaptor frame below it (if
2274 // present) preserving all the arguments prepared for current call.
2275 // Does nothing if debugger is currently active.
2276 // ES6 14.6.3. PrepareForTailCall
2277 //
2278 // Stack structure for the function g() tail calling f():
2279 //
2280 // ------- Caller frame: -------
2281 // |  ...
2282 // |  g()'s arg M
2283 // |  ...
2284 // |  g()'s arg 1
2285 // |  g()'s receiver arg
2286 // |  g()'s caller pc
2287 // ------- g()'s frame: -------
2288 // |  g()'s caller fp      <- fp
2289 // |  g()'s context
2290 // |  function pointer: g
2291 // |  -------------------------
2292 // |  ...
2293 // |  ...
2294 // |  f()'s arg N
2295 // |  ...
2296 // |  f()'s arg 1
2297 // |  f()'s receiver arg   <- sp (f()'s caller pc is not on the stack yet!)
2298 // ----------------------
2299 //
PrepareForTailCall(MacroAssembler * masm,Register args_reg,Register scratch1,Register scratch2,Register scratch3)2300 void PrepareForTailCall(MacroAssembler* masm, Register args_reg,
2301                         Register scratch1, Register scratch2,
2302                         Register scratch3) {
2303   DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3));
2304   Comment cmnt(masm, "[ PrepareForTailCall");
2305 
2306   // Prepare for tail call only if ES2015 tail call elimination is enabled.
2307   Label done;
2308   ExternalReference is_tail_call_elimination_enabled =
2309       ExternalReference::is_tail_call_elimination_enabled_address(
2310           masm->isolate());
2311   __ mov(scratch1, Operand(is_tail_call_elimination_enabled));
2312   __ ldrb(scratch1, MemOperand(scratch1));
2313   __ cmp(scratch1, Operand(0));
2314   __ b(eq, &done);
2315 
2316   // Drop possible interpreter handler/stub frame.
2317   {
2318     Label no_interpreter_frame;
2319     __ ldr(scratch3,
2320            MemOperand(fp, CommonFrameConstants::kContextOrFrameTypeOffset));
2321     __ cmp(scratch3, Operand(Smi::FromInt(StackFrame::STUB)));
2322     __ b(ne, &no_interpreter_frame);
2323     __ ldr(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2324     __ bind(&no_interpreter_frame);
2325   }
2326 
2327   // Check if next frame is an arguments adaptor frame.
2328   Register caller_args_count_reg = scratch1;
2329   Label no_arguments_adaptor, formal_parameter_count_loaded;
2330   __ ldr(scratch2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2331   __ ldr(scratch3,
2332          MemOperand(scratch2, CommonFrameConstants::kContextOrFrameTypeOffset));
2333   __ cmp(scratch3, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2334   __ b(ne, &no_arguments_adaptor);
2335 
2336   // Drop current frame and load arguments count from arguments adaptor frame.
2337   __ mov(fp, scratch2);
2338   __ ldr(caller_args_count_reg,
2339          MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset));
2340   __ SmiUntag(caller_args_count_reg);
2341   __ b(&formal_parameter_count_loaded);
2342 
2343   __ bind(&no_arguments_adaptor);
2344   // Load caller's formal parameter count
2345   __ ldr(scratch1,
2346          MemOperand(fp, ArgumentsAdaptorFrameConstants::kFunctionOffset));
2347   __ ldr(scratch1,
2348          FieldMemOperand(scratch1, JSFunction::kSharedFunctionInfoOffset));
2349   __ ldr(caller_args_count_reg,
2350          FieldMemOperand(scratch1,
2351                          SharedFunctionInfo::kFormalParameterCountOffset));
2352   __ SmiUntag(caller_args_count_reg);
2353 
2354   __ bind(&formal_parameter_count_loaded);
2355 
2356   ParameterCount callee_args_count(args_reg);
2357   __ PrepareForTailCall(callee_args_count, caller_args_count_reg, scratch2,
2358                         scratch3);
2359   __ bind(&done);
2360 }
2361 }  // namespace
2362 
2363 // static
Generate_CallFunction(MacroAssembler * masm,ConvertReceiverMode mode,TailCallMode tail_call_mode)2364 void Builtins::Generate_CallFunction(MacroAssembler* masm,
2365                                      ConvertReceiverMode mode,
2366                                      TailCallMode tail_call_mode) {
2367   // ----------- S t a t e -------------
2368   //  -- r0 : the number of arguments (not including the receiver)
2369   //  -- r1 : the function to call (checked to be a JSFunction)
2370   // -----------------------------------
2371   __ AssertFunction(r1);
2372 
2373   // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
2374   // Check that the function is not a "classConstructor".
2375   Label class_constructor;
2376   __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
2377   __ ldrb(r3, FieldMemOperand(r2, SharedFunctionInfo::kFunctionKindByteOffset));
2378   __ tst(r3, Operand(SharedFunctionInfo::kClassConstructorBitsWithinByte));
2379   __ b(ne, &class_constructor);
2380 
2381   // Enter the context of the function; ToObject has to run in the function
2382   // context, and we also need to take the global proxy from the function
2383   // context in case of conversion.
2384   STATIC_ASSERT(SharedFunctionInfo::kNativeByteOffset ==
2385                 SharedFunctionInfo::kStrictModeByteOffset);
2386   __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
2387   // We need to convert the receiver for non-native sloppy mode functions.
2388   Label done_convert;
2389   __ ldrb(r3, FieldMemOperand(r2, SharedFunctionInfo::kNativeByteOffset));
2390   __ tst(r3, Operand((1 << SharedFunctionInfo::kNativeBitWithinByte) |
2391                      (1 << SharedFunctionInfo::kStrictModeBitWithinByte)));
2392   __ b(ne, &done_convert);
2393   {
2394     // ----------- S t a t e -------------
2395     //  -- r0 : the number of arguments (not including the receiver)
2396     //  -- r1 : the function to call (checked to be a JSFunction)
2397     //  -- r2 : the shared function info.
2398     //  -- cp : the function context.
2399     // -----------------------------------
2400 
2401     if (mode == ConvertReceiverMode::kNullOrUndefined) {
2402       // Patch receiver to global proxy.
2403       __ LoadGlobalProxy(r3);
2404     } else {
2405       Label convert_to_object, convert_receiver;
2406       __ ldr(r3, MemOperand(sp, r0, LSL, kPointerSizeLog2));
2407       __ JumpIfSmi(r3, &convert_to_object);
2408       STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2409       __ CompareObjectType(r3, r4, r4, FIRST_JS_RECEIVER_TYPE);
2410       __ b(hs, &done_convert);
2411       if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
2412         Label convert_global_proxy;
2413         __ JumpIfRoot(r3, Heap::kUndefinedValueRootIndex,
2414                       &convert_global_proxy);
2415         __ JumpIfNotRoot(r3, Heap::kNullValueRootIndex, &convert_to_object);
2416         __ bind(&convert_global_proxy);
2417         {
2418           // Patch receiver to global proxy.
2419           __ LoadGlobalProxy(r3);
2420         }
2421         __ b(&convert_receiver);
2422       }
2423       __ bind(&convert_to_object);
2424       {
2425         // Convert receiver using ToObject.
2426         // TODO(bmeurer): Inline the allocation here to avoid building the frame
2427         // in the fast case? (fall back to AllocateInNewSpace?)
2428         FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2429         __ SmiTag(r0);
2430         __ Push(r0, r1);
2431         __ mov(r0, r3);
2432         __ Push(cp);
2433         __ Call(masm->isolate()->builtins()->ToObject(),
2434                 RelocInfo::CODE_TARGET);
2435         __ Pop(cp);
2436         __ mov(r3, r0);
2437         __ Pop(r0, r1);
2438         __ SmiUntag(r0);
2439       }
2440       __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
2441       __ bind(&convert_receiver);
2442     }
2443     __ str(r3, MemOperand(sp, r0, LSL, kPointerSizeLog2));
2444   }
2445   __ bind(&done_convert);
2446 
2447   // ----------- S t a t e -------------
2448   //  -- r0 : the number of arguments (not including the receiver)
2449   //  -- r1 : the function to call (checked to be a JSFunction)
2450   //  -- r2 : the shared function info.
2451   //  -- cp : the function context.
2452   // -----------------------------------
2453 
2454   if (tail_call_mode == TailCallMode::kAllow) {
2455     PrepareForTailCall(masm, r0, r3, r4, r5);
2456   }
2457 
2458   __ ldr(r2,
2459          FieldMemOperand(r2, SharedFunctionInfo::kFormalParameterCountOffset));
2460   __ SmiUntag(r2);
2461   ParameterCount actual(r0);
2462   ParameterCount expected(r2);
2463   __ InvokeFunctionCode(r1, no_reg, expected, actual, JUMP_FUNCTION,
2464                         CheckDebugStepCallWrapper());
2465 
2466   // The function is a "classConstructor", need to raise an exception.
2467   __ bind(&class_constructor);
2468   {
2469     FrameScope frame(masm, StackFrame::INTERNAL);
2470     __ push(r1);
2471     __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
2472   }
2473 }
2474 
2475 namespace {
2476 
Generate_PushBoundArguments(MacroAssembler * masm)2477 void Generate_PushBoundArguments(MacroAssembler* masm) {
2478   // ----------- S t a t e -------------
2479   //  -- r0 : the number of arguments (not including the receiver)
2480   //  -- r1 : target (checked to be a JSBoundFunction)
2481   //  -- r3 : new.target (only in case of [[Construct]])
2482   // -----------------------------------
2483 
2484   // Load [[BoundArguments]] into r2 and length of that into r4.
2485   Label no_bound_arguments;
2486   __ ldr(r2, FieldMemOperand(r1, JSBoundFunction::kBoundArgumentsOffset));
2487   __ ldr(r4, FieldMemOperand(r2, FixedArray::kLengthOffset));
2488   __ SmiUntag(r4);
2489   __ cmp(r4, Operand(0));
2490   __ b(eq, &no_bound_arguments);
2491   {
2492     // ----------- S t a t e -------------
2493     //  -- r0 : the number of arguments (not including the receiver)
2494     //  -- r1 : target (checked to be a JSBoundFunction)
2495     //  -- r2 : the [[BoundArguments]] (implemented as FixedArray)
2496     //  -- r3 : new.target (only in case of [[Construct]])
2497     //  -- r4 : the number of [[BoundArguments]]
2498     // -----------------------------------
2499 
2500     // Reserve stack space for the [[BoundArguments]].
2501     {
2502       Label done;
2503       __ sub(sp, sp, Operand(r4, LSL, kPointerSizeLog2));
2504       // Check the stack for overflow. We are not trying to catch interruptions
2505       // (i.e. debug break and preemption) here, so check the "real stack
2506       // limit".
2507       __ CompareRoot(sp, Heap::kRealStackLimitRootIndex);
2508       __ b(gt, &done);  // Signed comparison.
2509       // Restore the stack pointer.
2510       __ add(sp, sp, Operand(r4, LSL, kPointerSizeLog2));
2511       {
2512         FrameScope scope(masm, StackFrame::MANUAL);
2513         __ EnterFrame(StackFrame::INTERNAL);
2514         __ CallRuntime(Runtime::kThrowStackOverflow);
2515       }
2516       __ bind(&done);
2517     }
2518 
2519     // Relocate arguments down the stack.
2520     {
2521       Label loop, done_loop;
2522       __ mov(r5, Operand(0));
2523       __ bind(&loop);
2524       __ cmp(r5, r0);
2525       __ b(gt, &done_loop);
2526       __ ldr(ip, MemOperand(sp, r4, LSL, kPointerSizeLog2));
2527       __ str(ip, MemOperand(sp, r5, LSL, kPointerSizeLog2));
2528       __ add(r4, r4, Operand(1));
2529       __ add(r5, r5, Operand(1));
2530       __ b(&loop);
2531       __ bind(&done_loop);
2532     }
2533 
2534     // Copy [[BoundArguments]] to the stack (below the arguments).
2535     {
2536       Label loop;
2537       __ ldr(r4, FieldMemOperand(r2, FixedArray::kLengthOffset));
2538       __ SmiUntag(r4);
2539       __ add(r2, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
2540       __ bind(&loop);
2541       __ sub(r4, r4, Operand(1), SetCC);
2542       __ ldr(ip, MemOperand(r2, r4, LSL, kPointerSizeLog2));
2543       __ str(ip, MemOperand(sp, r0, LSL, kPointerSizeLog2));
2544       __ add(r0, r0, Operand(1));
2545       __ b(gt, &loop);
2546     }
2547   }
2548   __ bind(&no_bound_arguments);
2549 }
2550 
2551 }  // namespace
2552 
2553 // static
Generate_CallBoundFunctionImpl(MacroAssembler * masm,TailCallMode tail_call_mode)2554 void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm,
2555                                               TailCallMode tail_call_mode) {
2556   // ----------- S t a t e -------------
2557   //  -- r0 : the number of arguments (not including the receiver)
2558   //  -- r1 : the function to call (checked to be a JSBoundFunction)
2559   // -----------------------------------
2560   __ AssertBoundFunction(r1);
2561 
2562   if (tail_call_mode == TailCallMode::kAllow) {
2563     PrepareForTailCall(masm, r0, r3, r4, r5);
2564   }
2565 
2566   // Patch the receiver to [[BoundThis]].
2567   __ ldr(ip, FieldMemOperand(r1, JSBoundFunction::kBoundThisOffset));
2568   __ str(ip, MemOperand(sp, r0, LSL, kPointerSizeLog2));
2569 
2570   // Push the [[BoundArguments]] onto the stack.
2571   Generate_PushBoundArguments(masm);
2572 
2573   // Call the [[BoundTargetFunction]] via the Call builtin.
2574   __ ldr(r1, FieldMemOperand(r1, JSBoundFunction::kBoundTargetFunctionOffset));
2575   __ mov(ip, Operand(ExternalReference(Builtins::kCall_ReceiverIsAny,
2576                                        masm->isolate())));
2577   __ ldr(ip, MemOperand(ip));
2578   __ add(pc, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
2579 }
2580 
2581 // static
Generate_Call(MacroAssembler * masm,ConvertReceiverMode mode,TailCallMode tail_call_mode)2582 void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode,
2583                              TailCallMode tail_call_mode) {
2584   // ----------- S t a t e -------------
2585   //  -- r0 : the number of arguments (not including the receiver)
2586   //  -- r1 : the target to call (can be any Object).
2587   // -----------------------------------
2588 
2589   Label non_callable, non_function, non_smi;
2590   __ JumpIfSmi(r1, &non_callable);
2591   __ bind(&non_smi);
2592   __ CompareObjectType(r1, r4, r5, JS_FUNCTION_TYPE);
2593   __ Jump(masm->isolate()->builtins()->CallFunction(mode, tail_call_mode),
2594           RelocInfo::CODE_TARGET, eq);
2595   __ cmp(r5, Operand(JS_BOUND_FUNCTION_TYPE));
2596   __ Jump(masm->isolate()->builtins()->CallBoundFunction(tail_call_mode),
2597           RelocInfo::CODE_TARGET, eq);
2598 
2599   // Check if target has a [[Call]] internal method.
2600   __ ldrb(r4, FieldMemOperand(r4, Map::kBitFieldOffset));
2601   __ tst(r4, Operand(1 << Map::kIsCallable));
2602   __ b(eq, &non_callable);
2603 
2604   __ cmp(r5, Operand(JS_PROXY_TYPE));
2605   __ b(ne, &non_function);
2606 
2607   // 0. Prepare for tail call if necessary.
2608   if (tail_call_mode == TailCallMode::kAllow) {
2609     PrepareForTailCall(masm, r0, r3, r4, r5);
2610   }
2611 
2612   // 1. Runtime fallback for Proxy [[Call]].
2613   __ Push(r1);
2614   // Increase the arguments size to include the pushed function and the
2615   // existing receiver on the stack.
2616   __ add(r0, r0, Operand(2));
2617   // Tail-call to the runtime.
2618   __ JumpToExternalReference(
2619       ExternalReference(Runtime::kJSProxyCall, masm->isolate()));
2620 
2621   // 2. Call to something else, which might have a [[Call]] internal method (if
2622   // not we raise an exception).
2623   __ bind(&non_function);
2624   // Overwrite the original receiver the (original) target.
2625   __ str(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
2626   // Let the "call_as_function_delegate" take care of the rest.
2627   __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, r1);
2628   __ Jump(masm->isolate()->builtins()->CallFunction(
2629               ConvertReceiverMode::kNotNullOrUndefined, tail_call_mode),
2630           RelocInfo::CODE_TARGET);
2631 
2632   // 3. Call to something that is not callable.
2633   __ bind(&non_callable);
2634   {
2635     FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2636     __ Push(r1);
2637     __ CallRuntime(Runtime::kThrowCalledNonCallable);
2638   }
2639 }
2640 
2641 // static
Generate_ConstructFunction(MacroAssembler * masm)2642 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2643   // ----------- S t a t e -------------
2644   //  -- r0 : the number of arguments (not including the receiver)
2645   //  -- r1 : the constructor to call (checked to be a JSFunction)
2646   //  -- r3 : the new target (checked to be a constructor)
2647   // -----------------------------------
2648   __ AssertFunction(r1);
2649 
2650   // Calling convention for function specific ConstructStubs require
2651   // r2 to contain either an AllocationSite or undefined.
2652   __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
2653 
2654   // Tail call to the function-specific construct stub (still in the caller
2655   // context at this point).
2656   __ ldr(r4, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
2657   __ ldr(r4, FieldMemOperand(r4, SharedFunctionInfo::kConstructStubOffset));
2658   __ add(pc, r4, Operand(Code::kHeaderSize - kHeapObjectTag));
2659 }
2660 
2661 // static
Generate_ConstructBoundFunction(MacroAssembler * masm)2662 void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2663   // ----------- S t a t e -------------
2664   //  -- r0 : the number of arguments (not including the receiver)
2665   //  -- r1 : the function to call (checked to be a JSBoundFunction)
2666   //  -- r3 : the new target (checked to be a constructor)
2667   // -----------------------------------
2668   __ AssertBoundFunction(r1);
2669 
2670   // Push the [[BoundArguments]] onto the stack.
2671   Generate_PushBoundArguments(masm);
2672 
2673   // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
2674   __ cmp(r1, r3);
2675   __ ldr(r3, FieldMemOperand(r1, JSBoundFunction::kBoundTargetFunctionOffset),
2676          eq);
2677 
2678   // Construct the [[BoundTargetFunction]] via the Construct builtin.
2679   __ ldr(r1, FieldMemOperand(r1, JSBoundFunction::kBoundTargetFunctionOffset));
2680   __ mov(ip, Operand(ExternalReference(Builtins::kConstruct, masm->isolate())));
2681   __ ldr(ip, MemOperand(ip));
2682   __ add(pc, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
2683 }
2684 
2685 // static
Generate_ConstructProxy(MacroAssembler * masm)2686 void Builtins::Generate_ConstructProxy(MacroAssembler* masm) {
2687   // ----------- S t a t e -------------
2688   //  -- r0 : the number of arguments (not including the receiver)
2689   //  -- r1 : the constructor to call (checked to be a JSProxy)
2690   //  -- r3 : the new target (either the same as the constructor or
2691   //          the JSFunction on which new was invoked initially)
2692   // -----------------------------------
2693 
2694   // Call into the Runtime for Proxy [[Construct]].
2695   __ Push(r1);
2696   __ Push(r3);
2697   // Include the pushed new_target, constructor and the receiver.
2698   __ add(r0, r0, Operand(3));
2699   // Tail-call to the runtime.
2700   __ JumpToExternalReference(
2701       ExternalReference(Runtime::kJSProxyConstruct, masm->isolate()));
2702 }
2703 
2704 // static
Generate_Construct(MacroAssembler * masm)2705 void Builtins::Generate_Construct(MacroAssembler* masm) {
2706   // ----------- S t a t e -------------
2707   //  -- r0 : the number of arguments (not including the receiver)
2708   //  -- r1 : the constructor to call (can be any Object)
2709   //  -- r3 : the new target (either the same as the constructor or
2710   //          the JSFunction on which new was invoked initially)
2711   // -----------------------------------
2712 
2713   // Check if target is a Smi.
2714   Label non_constructor;
2715   __ JumpIfSmi(r1, &non_constructor);
2716 
2717   // Dispatch based on instance type.
2718   __ CompareObjectType(r1, r4, r5, JS_FUNCTION_TYPE);
2719   __ Jump(masm->isolate()->builtins()->ConstructFunction(),
2720           RelocInfo::CODE_TARGET, eq);
2721 
2722   // Check if target has a [[Construct]] internal method.
2723   __ ldrb(r2, FieldMemOperand(r4, Map::kBitFieldOffset));
2724   __ tst(r2, Operand(1 << Map::kIsConstructor));
2725   __ b(eq, &non_constructor);
2726 
2727   // Only dispatch to bound functions after checking whether they are
2728   // constructors.
2729   __ cmp(r5, Operand(JS_BOUND_FUNCTION_TYPE));
2730   __ Jump(masm->isolate()->builtins()->ConstructBoundFunction(),
2731           RelocInfo::CODE_TARGET, eq);
2732 
2733   // Only dispatch to proxies after checking whether they are constructors.
2734   __ cmp(r5, Operand(JS_PROXY_TYPE));
2735   __ Jump(masm->isolate()->builtins()->ConstructProxy(), RelocInfo::CODE_TARGET,
2736           eq);
2737 
2738   // Called Construct on an exotic Object with a [[Construct]] internal method.
2739   {
2740     // Overwrite the original receiver with the (original) target.
2741     __ str(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
2742     // Let the "call_as_constructor_delegate" take care of the rest.
2743     __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, r1);
2744     __ Jump(masm->isolate()->builtins()->CallFunction(),
2745             RelocInfo::CODE_TARGET);
2746   }
2747 
2748   // Called Construct on an Object that doesn't have a [[Construct]] internal
2749   // method.
2750   __ bind(&non_constructor);
2751   __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(),
2752           RelocInfo::CODE_TARGET);
2753 }
2754 
2755 // static
Generate_AllocateInNewSpace(MacroAssembler * masm)2756 void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) {
2757   // ----------- S t a t e -------------
2758   //  -- r1 : requested object size (untagged)
2759   //  -- lr : return address
2760   // -----------------------------------
2761   __ SmiTag(r1);
2762   __ Push(r1);
2763   __ Move(cp, Smi::kZero);
2764   __ TailCallRuntime(Runtime::kAllocateInNewSpace);
2765 }
2766 
2767 // static
Generate_AllocateInOldSpace(MacroAssembler * masm)2768 void Builtins::Generate_AllocateInOldSpace(MacroAssembler* masm) {
2769   // ----------- S t a t e -------------
2770   //  -- r1 : requested object size (untagged)
2771   //  -- lr : return address
2772   // -----------------------------------
2773   __ SmiTag(r1);
2774   __ Move(r2, Smi::FromInt(AllocateTargetSpace::encode(OLD_SPACE)));
2775   __ Push(r1, r2);
2776   __ Move(cp, Smi::kZero);
2777   __ TailCallRuntime(Runtime::kAllocateInTargetSpace);
2778 }
2779 
2780 // static
Generate_Abort(MacroAssembler * masm)2781 void Builtins::Generate_Abort(MacroAssembler* masm) {
2782   // ----------- S t a t e -------------
2783   //  -- r1 : message_id as Smi
2784   //  -- lr : return address
2785   // -----------------------------------
2786   __ Push(r1);
2787   __ Move(cp, Smi::kZero);
2788   __ TailCallRuntime(Runtime::kAbort);
2789 }
2790 
Generate_ArgumentsAdaptorTrampoline(MacroAssembler * masm)2791 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
2792   // ----------- S t a t e -------------
2793   //  -- r0 : actual number of arguments
2794   //  -- r1 : function (passed through to callee)
2795   //  -- r2 : expected number of arguments
2796   //  -- r3 : new target (passed through to callee)
2797   // -----------------------------------
2798 
2799   Label invoke, dont_adapt_arguments, stack_overflow;
2800 
2801   Label enough, too_few;
2802   __ cmp(r0, r2);
2803   __ b(lt, &too_few);
2804   __ cmp(r2, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
2805   __ b(eq, &dont_adapt_arguments);
2806 
2807   {  // Enough parameters: actual >= expected
2808     __ bind(&enough);
2809     EnterArgumentsAdaptorFrame(masm);
2810     Generate_StackOverflowCheck(masm, r2, r5, &stack_overflow);
2811 
2812     // Calculate copy start address into r0 and copy end address into r4.
2813     // r0: actual number of arguments as a smi
2814     // r1: function
2815     // r2: expected number of arguments
2816     // r3: new target (passed through to callee)
2817     __ add(r0, fp, Operand::PointerOffsetFromSmiKey(r0));
2818     // adjust for return address and receiver
2819     __ add(r0, r0, Operand(2 * kPointerSize));
2820     __ sub(r4, r0, Operand(r2, LSL, kPointerSizeLog2));
2821 
2822     // Copy the arguments (including the receiver) to the new stack frame.
2823     // r0: copy start address
2824     // r1: function
2825     // r2: expected number of arguments
2826     // r3: new target (passed through to callee)
2827     // r4: copy end address
2828 
2829     Label copy;
2830     __ bind(&copy);
2831     __ ldr(ip, MemOperand(r0, 0));
2832     __ push(ip);
2833     __ cmp(r0, r4);  // Compare before moving to next argument.
2834     __ sub(r0, r0, Operand(kPointerSize));
2835     __ b(ne, &copy);
2836 
2837     __ b(&invoke);
2838   }
2839 
2840   {  // Too few parameters: Actual < expected
2841     __ bind(&too_few);
2842     EnterArgumentsAdaptorFrame(masm);
2843     Generate_StackOverflowCheck(masm, r2, r5, &stack_overflow);
2844 
2845     // Calculate copy start address into r0 and copy end address is fp.
2846     // r0: actual number of arguments as a smi
2847     // r1: function
2848     // r2: expected number of arguments
2849     // r3: new target (passed through to callee)
2850     __ add(r0, fp, Operand::PointerOffsetFromSmiKey(r0));
2851 
2852     // Copy the arguments (including the receiver) to the new stack frame.
2853     // r0: copy start address
2854     // r1: function
2855     // r2: expected number of arguments
2856     // r3: new target (passed through to callee)
2857     Label copy;
2858     __ bind(&copy);
2859     // Adjust load for return address and receiver.
2860     __ ldr(ip, MemOperand(r0, 2 * kPointerSize));
2861     __ push(ip);
2862     __ cmp(r0, fp);  // Compare before moving to next argument.
2863     __ sub(r0, r0, Operand(kPointerSize));
2864     __ b(ne, &copy);
2865 
2866     // Fill the remaining expected arguments with undefined.
2867     // r1: function
2868     // r2: expected number of arguments
2869     // r3: new target (passed through to callee)
2870     __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
2871     __ sub(r4, fp, Operand(r2, LSL, kPointerSizeLog2));
2872     // Adjust for frame.
2873     __ sub(r4, r4, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
2874                            2 * kPointerSize));
2875 
2876     Label fill;
2877     __ bind(&fill);
2878     __ push(ip);
2879     __ cmp(sp, r4);
2880     __ b(ne, &fill);
2881   }
2882 
2883   // Call the entry point.
2884   __ bind(&invoke);
2885   __ mov(r0, r2);
2886   // r0 : expected number of arguments
2887   // r1 : function (passed through to callee)
2888   // r3 : new target (passed through to callee)
2889   __ ldr(r4, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
2890   __ Call(r4);
2891 
2892   // Store offset of return address for deoptimizer.
2893   masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
2894 
2895   // Exit frame and return.
2896   LeaveArgumentsAdaptorFrame(masm);
2897   __ Jump(lr);
2898 
2899   // -------------------------------------------
2900   // Dont adapt arguments.
2901   // -------------------------------------------
2902   __ bind(&dont_adapt_arguments);
2903   __ ldr(r4, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
2904   __ Jump(r4);
2905 
2906   __ bind(&stack_overflow);
2907   {
2908     FrameScope frame(masm, StackFrame::MANUAL);
2909     __ CallRuntime(Runtime::kThrowStackOverflow);
2910     __ bkpt(0);
2911   }
2912 }
2913 
2914 #undef __
2915 
2916 }  // namespace internal
2917 }  // namespace v8
2918 
2919 #endif  // V8_TARGET_ARCH_ARM
2920