• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 //     * Redistributions of source code must retain the above copyright
7 //       notice, this list of conditions and the following disclaimer.
8 //     * Redistributions in binary form must reproduce the above
9 //       copyright notice, this list of conditions and the following
10 //       disclaimer in the documentation and/or other materials provided
11 //       with the distribution.
12 //     * Neither the name of Google Inc. nor the names of its
13 //       contributors may be used to endorse or promote products derived
14 //       from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #if defined(V8_TARGET_ARCH_ARM)
31 
32 #include "codegen.h"
33 #include "debug.h"
34 #include "deoptimizer.h"
35 #include "full-codegen.h"
36 #include "runtime.h"
37 
38 namespace v8 {
39 namespace internal {
40 
41 
42 #define __ ACCESS_MASM(masm)
43 
44 
Generate_Adaptor(MacroAssembler * masm,CFunctionId id,BuiltinExtraArguments extra_args)45 void Builtins::Generate_Adaptor(MacroAssembler* masm,
46                                 CFunctionId id,
47                                 BuiltinExtraArguments extra_args) {
48   // ----------- S t a t e -------------
49   //  -- r0                 : number of arguments excluding receiver
50   //  -- r1                 : called function (only guaranteed when
51   //                          extra_args requires it)
52   //  -- cp                 : context
53   //  -- sp[0]              : last argument
54   //  -- ...
55   //  -- sp[4 * (argc - 1)] : first argument (argc == r0)
56   //  -- sp[4 * argc]       : receiver
57   // -----------------------------------
58 
59   // Insert extra arguments.
60   int num_extra_args = 0;
61   if (extra_args == NEEDS_CALLED_FUNCTION) {
62     num_extra_args = 1;
63     __ push(r1);
64   } else {
65     ASSERT(extra_args == NO_EXTRA_ARGUMENTS);
66   }
67 
68   // JumpToExternalReference expects r0 to contain the number of arguments
69   // including the receiver and the extra arguments.
70   __ add(r0, r0, Operand(num_extra_args + 1));
71   __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
72 }
73 
74 
75 // Load the built-in InternalArray function from the current context.
GenerateLoadInternalArrayFunction(MacroAssembler * masm,Register result)76 static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
77                                               Register result) {
78   // Load the global context.
79 
80   __ ldr(result, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
81   __ ldr(result,
82          FieldMemOperand(result, GlobalObject::kGlobalContextOffset));
83   // Load the InternalArray function from the global context.
84   __ ldr(result,
85          MemOperand(result,
86                     Context::SlotOffset(
87                         Context::INTERNAL_ARRAY_FUNCTION_INDEX)));
88 }
89 
90 
91 // Load the built-in Array function from the current context.
GenerateLoadArrayFunction(MacroAssembler * masm,Register result)92 static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
93   // Load the global context.
94 
95   __ ldr(result, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
96   __ ldr(result,
97          FieldMemOperand(result, GlobalObject::kGlobalContextOffset));
98   // Load the Array function from the global context.
99   __ ldr(result,
100          MemOperand(result,
101                     Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX)));
102 }
103 
104 
105 // Allocate an empty JSArray. The allocated array is put into the result
106 // register. An elements backing store is allocated with size initial_capacity
107 // and filled with the hole values.
AllocateEmptyJSArray(MacroAssembler * masm,Register array_function,Register result,Register scratch1,Register scratch2,Register scratch3,Label * gc_required)108 static void AllocateEmptyJSArray(MacroAssembler* masm,
109                                  Register array_function,
110                                  Register result,
111                                  Register scratch1,
112                                  Register scratch2,
113                                  Register scratch3,
114                                  Label* gc_required) {
115   const int initial_capacity = JSArray::kPreallocatedArrayElements;
116   STATIC_ASSERT(initial_capacity >= 0);
117   __ LoadInitialArrayMap(array_function, scratch2, scratch1);
118 
119   // Allocate the JSArray object together with space for a fixed array with the
120   // requested elements.
121   int size = JSArray::kSize;
122   if (initial_capacity > 0) {
123     size += FixedArray::SizeFor(initial_capacity);
124   }
125   __ AllocateInNewSpace(size,
126                         result,
127                         scratch2,
128                         scratch3,
129                         gc_required,
130                         TAG_OBJECT);
131 
132   // Allocated the JSArray. Now initialize the fields except for the elements
133   // array.
134   // result: JSObject
135   // scratch1: initial map
136   // scratch2: start of next object
137   __ str(scratch1, FieldMemOperand(result, JSObject::kMapOffset));
138   __ LoadRoot(scratch1, Heap::kEmptyFixedArrayRootIndex);
139   __ str(scratch1, FieldMemOperand(result, JSArray::kPropertiesOffset));
140   // Field JSArray::kElementsOffset is initialized later.
141   __ mov(scratch3,  Operand(0, RelocInfo::NONE));
142   __ str(scratch3, FieldMemOperand(result, JSArray::kLengthOffset));
143 
144   if (initial_capacity == 0) {
145     __ str(scratch1, FieldMemOperand(result, JSArray::kElementsOffset));
146     return;
147   }
148 
149   // Calculate the location of the elements array and set elements array member
150   // of the JSArray.
151   // result: JSObject
152   // scratch2: start of next object
153   __ add(scratch1, result, Operand(JSArray::kSize));
154   __ str(scratch1, FieldMemOperand(result, JSArray::kElementsOffset));
155 
156   // Clear the heap tag on the elements array.
157   __ sub(scratch1, scratch1, Operand(kHeapObjectTag));
158 
159   // Initialize the FixedArray and fill it with holes. FixedArray length is
160   // stored as a smi.
161   // result: JSObject
162   // scratch1: elements array (untagged)
163   // scratch2: start of next object
164   __ LoadRoot(scratch3, Heap::kFixedArrayMapRootIndex);
165   STATIC_ASSERT(0 * kPointerSize == FixedArray::kMapOffset);
166   __ str(scratch3, MemOperand(scratch1, kPointerSize, PostIndex));
167   __ mov(scratch3,  Operand(Smi::FromInt(initial_capacity)));
168   STATIC_ASSERT(1 * kPointerSize == FixedArray::kLengthOffset);
169   __ str(scratch3, MemOperand(scratch1, kPointerSize, PostIndex));
170 
171   // Fill the FixedArray with the hole value. Inline the code if short.
172   STATIC_ASSERT(2 * kPointerSize == FixedArray::kHeaderSize);
173   __ LoadRoot(scratch3, Heap::kTheHoleValueRootIndex);
174   static const int kLoopUnfoldLimit = 4;
175   if (initial_capacity <= kLoopUnfoldLimit) {
176     for (int i = 0; i < initial_capacity; i++) {
177       __ str(scratch3, MemOperand(scratch1, kPointerSize, PostIndex));
178     }
179   } else {
180     Label loop, entry;
181     __ add(scratch2, scratch1, Operand(initial_capacity * kPointerSize));
182     __ b(&entry);
183     __ bind(&loop);
184     __ str(scratch3, MemOperand(scratch1, kPointerSize, PostIndex));
185     __ bind(&entry);
186     __ cmp(scratch1, scratch2);
187     __ b(lt, &loop);
188   }
189 }
190 
191 // Allocate a JSArray with the number of elements stored in a register. The
192 // register array_function holds the built-in Array function and the register
193 // array_size holds the size of the array as a smi. The allocated array is put
194 // into the result register and beginning and end of the FixedArray elements
195 // storage is put into registers elements_array_storage and elements_array_end
196 // (see  below for when that is not the case). If the parameter fill_with_holes
197 // is true the allocated elements backing store is filled with the hole values
198 // otherwise it is left uninitialized. When the backing store is filled the
199 // register elements_array_storage is scratched.
AllocateJSArray(MacroAssembler * masm,Register array_function,Register array_size,Register result,Register elements_array_storage,Register elements_array_end,Register scratch1,Register scratch2,bool fill_with_hole,Label * gc_required)200 static void AllocateJSArray(MacroAssembler* masm,
201                             Register array_function,  // Array function.
202                             Register array_size,  // As a smi, cannot be 0.
203                             Register result,
204                             Register elements_array_storage,
205                             Register elements_array_end,
206                             Register scratch1,
207                             Register scratch2,
208                             bool fill_with_hole,
209                             Label* gc_required) {
210   // Load the initial map from the array function.
211   __ LoadInitialArrayMap(array_function, scratch2, elements_array_storage);
212 
213   if (FLAG_debug_code) {  // Assert that array size is not zero.
214     __ tst(array_size, array_size);
215     __ Assert(ne, "array size is unexpectedly 0");
216   }
217 
218   // Allocate the JSArray object together with space for a FixedArray with the
219   // requested number of elements.
220   STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
221   __ mov(elements_array_end,
222          Operand((JSArray::kSize + FixedArray::kHeaderSize) / kPointerSize));
223   __ add(elements_array_end,
224          elements_array_end,
225          Operand(array_size, ASR, kSmiTagSize));
226   __ AllocateInNewSpace(
227       elements_array_end,
228       result,
229       scratch1,
230       scratch2,
231       gc_required,
232       static_cast<AllocationFlags>(TAG_OBJECT | SIZE_IN_WORDS));
233 
234   // Allocated the JSArray. Now initialize the fields except for the elements
235   // array.
236   // result: JSObject
237   // elements_array_storage: initial map
238   // array_size: size of array (smi)
239   __ str(elements_array_storage, FieldMemOperand(result, JSObject::kMapOffset));
240   __ LoadRoot(elements_array_storage, Heap::kEmptyFixedArrayRootIndex);
241   __ str(elements_array_storage,
242          FieldMemOperand(result, JSArray::kPropertiesOffset));
243   // Field JSArray::kElementsOffset is initialized later.
244   __ str(array_size, FieldMemOperand(result, JSArray::kLengthOffset));
245 
246   // Calculate the location of the elements array and set elements array member
247   // of the JSArray.
248   // result: JSObject
249   // array_size: size of array (smi)
250   __ add(elements_array_storage, result, Operand(JSArray::kSize));
251   __ str(elements_array_storage,
252          FieldMemOperand(result, JSArray::kElementsOffset));
253 
254   // Clear the heap tag on the elements array.
255   STATIC_ASSERT(kSmiTag == 0);
256   __ sub(elements_array_storage,
257          elements_array_storage,
258          Operand(kHeapObjectTag));
259   // Initialize the fixed array and fill it with holes. FixedArray length is
260   // stored as a smi.
261   // result: JSObject
262   // elements_array_storage: elements array (untagged)
263   // array_size: size of array (smi)
264   __ LoadRoot(scratch1, Heap::kFixedArrayMapRootIndex);
265   ASSERT_EQ(0 * kPointerSize, FixedArray::kMapOffset);
266   __ str(scratch1, MemOperand(elements_array_storage, kPointerSize, PostIndex));
267   STATIC_ASSERT(kSmiTag == 0);
268   ASSERT_EQ(1 * kPointerSize, FixedArray::kLengthOffset);
269   __ str(array_size,
270          MemOperand(elements_array_storage, kPointerSize, PostIndex));
271 
272   // Calculate elements array and elements array end.
273   // result: JSObject
274   // elements_array_storage: elements array element storage
275   // array_size: smi-tagged size of elements array
276   STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
277   __ add(elements_array_end,
278          elements_array_storage,
279          Operand(array_size, LSL, kPointerSizeLog2 - kSmiTagSize));
280 
281   // Fill the allocated FixedArray with the hole value if requested.
282   // result: JSObject
283   // elements_array_storage: elements array element storage
284   // elements_array_end: start of next object
285   if (fill_with_hole) {
286     Label loop, entry;
287     __ LoadRoot(scratch1, Heap::kTheHoleValueRootIndex);
288     __ jmp(&entry);
289     __ bind(&loop);
290     __ str(scratch1,
291            MemOperand(elements_array_storage, kPointerSize, PostIndex));
292     __ bind(&entry);
293     __ cmp(elements_array_storage, elements_array_end);
294     __ b(lt, &loop);
295   }
296 }
297 
298 // Create a new array for the built-in Array function. This function allocates
299 // the JSArray object and the FixedArray elements array and initializes these.
300 // If the Array cannot be constructed in native code the runtime is called. This
301 // function assumes the following state:
302 //   r0: argc
303 //   r1: constructor (built-in Array function)
304 //   lr: return address
305 //   sp[0]: last argument
306 // This function is used for both construct and normal calls of Array. The only
307 // difference between handling a construct call and a normal call is that for a
308 // construct call the constructor function in r1 needs to be preserved for
309 // entering the generic code. In both cases argc in r0 needs to be preserved.
310 // Both registers are preserved by this code so no need to differentiate between
311 // construct call and normal call.
ArrayNativeCode(MacroAssembler * masm,Label * call_generic_code)312 static void ArrayNativeCode(MacroAssembler* masm,
313                             Label* call_generic_code) {
314   Counters* counters = masm->isolate()->counters();
315   Label argc_one_or_more, argc_two_or_more, not_empty_array, empty_array,
316       has_non_smi_element, finish, cant_transition_map, not_double;
317 
318   // Check for array construction with zero arguments or one.
319   __ cmp(r0, Operand(0, RelocInfo::NONE));
320   __ b(ne, &argc_one_or_more);
321 
322   // Handle construction of an empty array.
323   __ bind(&empty_array);
324   AllocateEmptyJSArray(masm,
325                        r1,
326                        r2,
327                        r3,
328                        r4,
329                        r5,
330                        call_generic_code);
331   __ IncrementCounter(counters->array_function_native(), 1, r3, r4);
332   // Set up return value, remove receiver from stack and return.
333   __ mov(r0, r2);
334   __ add(sp, sp, Operand(kPointerSize));
335   __ Jump(lr);
336 
337   // Check for one argument. Bail out if argument is not smi or if it is
338   // negative.
339   __ bind(&argc_one_or_more);
340   __ cmp(r0, Operand(1));
341   __ b(ne, &argc_two_or_more);
342   STATIC_ASSERT(kSmiTag == 0);
343   __ ldr(r2, MemOperand(sp));  // Get the argument from the stack.
344   __ tst(r2, r2);
345   __ b(ne, &not_empty_array);
346   __ Drop(1);  // Adjust stack.
347   __ mov(r0, Operand(0));  // Treat this as a call with argc of zero.
348   __ b(&empty_array);
349 
350   __ bind(&not_empty_array);
351   __ and_(r3, r2, Operand(kIntptrSignBit | kSmiTagMask), SetCC);
352   __ b(ne, call_generic_code);
353 
354   // Handle construction of an empty array of a certain size. Bail out if size
355   // is too large to actually allocate an elements array.
356   STATIC_ASSERT(kSmiTag == 0);
357   __ cmp(r2, Operand(JSObject::kInitialMaxFastElementArray << kSmiTagSize));
358   __ b(ge, call_generic_code);
359 
360   // r0: argc
361   // r1: constructor
362   // r2: array_size (smi)
363   // sp[0]: argument
364   AllocateJSArray(masm,
365                   r1,
366                   r2,
367                   r3,
368                   r4,
369                   r5,
370                   r6,
371                   r7,
372                   true,
373                   call_generic_code);
374   __ IncrementCounter(counters->array_function_native(), 1, r2, r4);
375   // Set up return value, remove receiver and argument from stack and return.
376   __ mov(r0, r3);
377   __ add(sp, sp, Operand(2 * kPointerSize));
378   __ Jump(lr);
379 
380   // Handle construction of an array from a list of arguments.
381   __ bind(&argc_two_or_more);
382   __ mov(r2, Operand(r0, LSL, kSmiTagSize));  // Convet argc to a smi.
383 
384   // r0: argc
385   // r1: constructor
386   // r2: array_size (smi)
387   // sp[0]: last argument
388   AllocateJSArray(masm,
389                   r1,
390                   r2,
391                   r3,
392                   r4,
393                   r5,
394                   r6,
395                   r7,
396                   false,
397                   call_generic_code);
398   __ IncrementCounter(counters->array_function_native(), 1, r2, r6);
399 
400   // Fill arguments as array elements. Copy from the top of the stack (last
401   // element) to the array backing store filling it backwards. Note:
402   // elements_array_end points after the backing store therefore PreIndex is
403   // used when filling the backing store.
404   // r0: argc
405   // r3: JSArray
406   // r4: elements_array storage start (untagged)
407   // r5: elements_array_end (untagged)
408   // sp[0]: last argument
409   Label loop, entry;
410   __ mov(r7, sp);
411   __ jmp(&entry);
412   __ bind(&loop);
413   __ ldr(r2, MemOperand(r7, kPointerSize, PostIndex));
414   if (FLAG_smi_only_arrays) {
415     __ JumpIfNotSmi(r2, &has_non_smi_element);
416   }
417   __ str(r2, MemOperand(r5, -kPointerSize, PreIndex));
418   __ bind(&entry);
419   __ cmp(r4, r5);
420   __ b(lt, &loop);
421 
422   __ bind(&finish);
423   __ mov(sp, r7);
424 
425   // Remove caller arguments and receiver from the stack, setup return value and
426   // return.
427   // r0: argc
428   // r3: JSArray
429   // sp[0]: receiver
430   __ add(sp, sp, Operand(kPointerSize));
431   __ mov(r0, r3);
432   __ Jump(lr);
433 
434   __ bind(&has_non_smi_element);
435   // Double values are handled by the runtime.
436   __ CheckMap(
437       r2, r9, Heap::kHeapNumberMapRootIndex, &not_double, DONT_DO_SMI_CHECK);
438   __ bind(&cant_transition_map);
439   __ UndoAllocationInNewSpace(r3, r4);
440   __ b(call_generic_code);
441 
442   __ bind(&not_double);
443   // Transition FAST_SMI_ONLY_ELEMENTS to FAST_ELEMENTS.
444   // r3: JSArray
445   __ ldr(r2, FieldMemOperand(r3, HeapObject::kMapOffset));
446   __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
447                                          FAST_ELEMENTS,
448                                          r2,
449                                          r9,
450                                          &cant_transition_map);
451   __ str(r2, FieldMemOperand(r3, HeapObject::kMapOffset));
452   __ RecordWriteField(r3,
453                       HeapObject::kMapOffset,
454                       r2,
455                       r9,
456                       kLRHasNotBeenSaved,
457                       kDontSaveFPRegs,
458                       EMIT_REMEMBERED_SET,
459                       OMIT_SMI_CHECK);
460   Label loop2;
461   __ sub(r7, r7, Operand(kPointerSize));
462   __ bind(&loop2);
463   __ ldr(r2, MemOperand(r7, kPointerSize, PostIndex));
464   __ str(r2, MemOperand(r5, -kPointerSize, PreIndex));
465   __ cmp(r4, r5);
466   __ b(lt, &loop2);
467   __ b(&finish);
468 }
469 
470 
Generate_InternalArrayCode(MacroAssembler * masm)471 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
472   // ----------- S t a t e -------------
473   //  -- r0     : number of arguments
474   //  -- lr     : return address
475   //  -- sp[...]: constructor arguments
476   // -----------------------------------
477   Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
478 
479   // Get the InternalArray function.
480   GenerateLoadInternalArrayFunction(masm, r1);
481 
482   if (FLAG_debug_code) {
483     // Initial map for the builtin InternalArray functions should be maps.
484     __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
485     __ tst(r2, Operand(kSmiTagMask));
486     __ Assert(ne, "Unexpected initial map for InternalArray function");
487     __ CompareObjectType(r2, r3, r4, MAP_TYPE);
488     __ Assert(eq, "Unexpected initial map for InternalArray function");
489   }
490 
491   // Run the native code for the InternalArray function called as a normal
492   // function.
493   ArrayNativeCode(masm, &generic_array_code);
494 
495   // Jump to the generic array code if the specialized code cannot handle the
496   // construction.
497   __ bind(&generic_array_code);
498 
499   Handle<Code> array_code =
500       masm->isolate()->builtins()->InternalArrayCodeGeneric();
501   __ Jump(array_code, RelocInfo::CODE_TARGET);
502 }
503 
504 
Generate_ArrayCode(MacroAssembler * masm)505 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
506   // ----------- S t a t e -------------
507   //  -- r0     : number of arguments
508   //  -- lr     : return address
509   //  -- sp[...]: constructor arguments
510   // -----------------------------------
511   Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
512 
513   // Get the Array function.
514   GenerateLoadArrayFunction(masm, r1);
515 
516   if (FLAG_debug_code) {
517     // Initial map for the builtin Array functions should be maps.
518     __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
519     __ tst(r2, Operand(kSmiTagMask));
520     __ Assert(ne, "Unexpected initial map for Array function");
521     __ CompareObjectType(r2, r3, r4, MAP_TYPE);
522     __ Assert(eq, "Unexpected initial map for Array function");
523   }
524 
525   // Run the native code for the Array function called as a normal function.
526   ArrayNativeCode(masm, &generic_array_code);
527 
528   // Jump to the generic array code if the specialized code cannot handle
529   // the construction.
530   __ bind(&generic_array_code);
531 
532   Handle<Code> array_code =
533       masm->isolate()->builtins()->ArrayCodeGeneric();
534   __ Jump(array_code, RelocInfo::CODE_TARGET);
535 }
536 
537 
Generate_ArrayConstructCode(MacroAssembler * masm)538 void Builtins::Generate_ArrayConstructCode(MacroAssembler* masm) {
539   // ----------- S t a t e -------------
540   //  -- r0     : number of arguments
541   //  -- r1     : constructor function
542   //  -- lr     : return address
543   //  -- sp[...]: constructor arguments
544   // -----------------------------------
545   Label generic_constructor;
546 
547   if (FLAG_debug_code) {
548     // The array construct code is only set for the builtin and internal
549     // Array functions which always have a map.
550     // Initial map for the builtin Array function should be a map.
551     __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
552     __ tst(r2, Operand(kSmiTagMask));
553     __ Assert(ne, "Unexpected initial map for Array function");
554     __ CompareObjectType(r2, r3, r4, MAP_TYPE);
555     __ Assert(eq, "Unexpected initial map for Array function");
556   }
557 
558   // Run the native code for the Array function called as a constructor.
559   ArrayNativeCode(masm, &generic_constructor);
560 
561   // Jump to the generic construct code in case the specialized code cannot
562   // handle the construction.
563   __ bind(&generic_constructor);
564   Handle<Code> generic_construct_stub =
565       masm->isolate()->builtins()->JSConstructStubGeneric();
566   __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
567 }
568 
569 
Generate_StringConstructCode(MacroAssembler * masm)570 void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
571   // ----------- S t a t e -------------
572   //  -- r0                     : number of arguments
573   //  -- r1                     : constructor function
574   //  -- lr                     : return address
575   //  -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
576   //  -- sp[argc * 4]           : receiver
577   // -----------------------------------
578   Counters* counters = masm->isolate()->counters();
579   __ IncrementCounter(counters->string_ctor_calls(), 1, r2, r3);
580 
581   Register function = r1;
582   if (FLAG_debug_code) {
583     __ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, r2);
584     __ cmp(function, Operand(r2));
585     __ Assert(eq, "Unexpected String function");
586   }
587 
588   // Load the first arguments in r0 and get rid of the rest.
589   Label no_arguments;
590   __ cmp(r0, Operand(0, RelocInfo::NONE));
591   __ b(eq, &no_arguments);
592   // First args = sp[(argc - 1) * 4].
593   __ sub(r0, r0, Operand(1));
594   __ ldr(r0, MemOperand(sp, r0, LSL, kPointerSizeLog2, PreIndex));
595   // sp now point to args[0], drop args[0] + receiver.
596   __ Drop(2);
597 
598   Register argument = r2;
599   Label not_cached, argument_is_string;
600   NumberToStringStub::GenerateLookupNumberStringCache(
601       masm,
602       r0,        // Input.
603       argument,  // Result.
604       r3,        // Scratch.
605       r4,        // Scratch.
606       r5,        // Scratch.
607       false,     // Is it a Smi?
608       &not_cached);
609   __ IncrementCounter(counters->string_ctor_cached_number(), 1, r3, r4);
610   __ bind(&argument_is_string);
611 
612   // ----------- S t a t e -------------
613   //  -- r2     : argument converted to string
614   //  -- r1     : constructor function
615   //  -- lr     : return address
616   // -----------------------------------
617 
618   Label gc_required;
619   __ AllocateInNewSpace(JSValue::kSize,
620                         r0,  // Result.
621                         r3,  // Scratch.
622                         r4,  // Scratch.
623                         &gc_required,
624                         TAG_OBJECT);
625 
626   // Initialising the String Object.
627   Register map = r3;
628   __ LoadGlobalFunctionInitialMap(function, map, r4);
629   if (FLAG_debug_code) {
630     __ ldrb(r4, FieldMemOperand(map, Map::kInstanceSizeOffset));
631     __ cmp(r4, Operand(JSValue::kSize >> kPointerSizeLog2));
632     __ Assert(eq, "Unexpected string wrapper instance size");
633     __ ldrb(r4, FieldMemOperand(map, Map::kUnusedPropertyFieldsOffset));
634     __ cmp(r4, Operand(0, RelocInfo::NONE));
635     __ Assert(eq, "Unexpected unused properties of string wrapper");
636   }
637   __ str(map, FieldMemOperand(r0, HeapObject::kMapOffset));
638 
639   __ LoadRoot(r3, Heap::kEmptyFixedArrayRootIndex);
640   __ str(r3, FieldMemOperand(r0, JSObject::kPropertiesOffset));
641   __ str(r3, FieldMemOperand(r0, JSObject::kElementsOffset));
642 
643   __ str(argument, FieldMemOperand(r0, JSValue::kValueOffset));
644 
645   // Ensure the object is fully initialized.
646   STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize);
647 
648   __ Ret();
649 
650   // The argument was not found in the number to string cache. Check
651   // if it's a string already before calling the conversion builtin.
652   Label convert_argument;
653   __ bind(&not_cached);
654   __ JumpIfSmi(r0, &convert_argument);
655 
656   // Is it a String?
657   __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
658   __ ldrb(r3, FieldMemOperand(r2, Map::kInstanceTypeOffset));
659   STATIC_ASSERT(kNotStringTag != 0);
660   __ tst(r3, Operand(kIsNotStringMask));
661   __ b(ne, &convert_argument);
662   __ mov(argument, r0);
663   __ IncrementCounter(counters->string_ctor_conversions(), 1, r3, r4);
664   __ b(&argument_is_string);
665 
666   // Invoke the conversion builtin and put the result into r2.
667   __ bind(&convert_argument);
668   __ push(function);  // Preserve the function.
669   __ IncrementCounter(counters->string_ctor_conversions(), 1, r3, r4);
670   {
671     FrameScope scope(masm, StackFrame::INTERNAL);
672     __ push(r0);
673     __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION);
674   }
675   __ pop(function);
676   __ mov(argument, r0);
677   __ b(&argument_is_string);
678 
679   // Load the empty string into r2, remove the receiver from the
680   // stack, and jump back to the case where the argument is a string.
681   __ bind(&no_arguments);
682   __ LoadRoot(argument, Heap::kEmptyStringRootIndex);
683   __ Drop(1);
684   __ b(&argument_is_string);
685 
686   // At this point the argument is already a string. Call runtime to
687   // create a string wrapper.
688   __ bind(&gc_required);
689   __ IncrementCounter(counters->string_ctor_gc_required(), 1, r3, r4);
690   {
691     FrameScope scope(masm, StackFrame::INTERNAL);
692     __ push(argument);
693     __ CallRuntime(Runtime::kNewStringWrapper, 1);
694   }
695   __ Ret();
696 }
697 
698 
Generate_JSConstructStubHelper(MacroAssembler * masm,bool is_api_function,bool count_constructions)699 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
700                                            bool is_api_function,
701                                            bool count_constructions) {
702   // ----------- S t a t e -------------
703   //  -- r0     : number of arguments
704   //  -- r1     : constructor function
705   //  -- lr     : return address
706   //  -- sp[...]: constructor arguments
707   // -----------------------------------
708 
709   // Should never count constructions for api objects.
710   ASSERT(!is_api_function || !count_constructions);
711 
712   Isolate* isolate = masm->isolate();
713 
714   // Enter a construct frame.
715   {
716     FrameScope scope(masm, StackFrame::CONSTRUCT);
717 
718     // Preserve the two incoming parameters on the stack.
719     __ mov(r0, Operand(r0, LSL, kSmiTagSize));
720     __ push(r0);  // Smi-tagged arguments count.
721     __ push(r1);  // Constructor function.
722 
723     // Try to allocate the object without transitioning into C code. If any of
724     // the preconditions is not met, the code bails out to the runtime call.
725     Label rt_call, allocated;
726     if (FLAG_inline_new) {
727       Label undo_allocation;
728 #ifdef ENABLE_DEBUGGER_SUPPORT
729       ExternalReference debug_step_in_fp =
730           ExternalReference::debug_step_in_fp_address(isolate);
731       __ mov(r2, Operand(debug_step_in_fp));
732       __ ldr(r2, MemOperand(r2));
733       __ tst(r2, r2);
734       __ b(ne, &rt_call);
735 #endif
736 
737       // Load the initial map and verify that it is in fact a map.
738       // r1: constructor function
739       __ ldr(r2, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset));
740       __ JumpIfSmi(r2, &rt_call);
741       __ CompareObjectType(r2, r3, r4, MAP_TYPE);
742       __ b(ne, &rt_call);
743 
744       // Check that the constructor is not constructing a JSFunction (see
745       // comments in Runtime_NewObject in runtime.cc). In which case the
746       // initial map's instance type would be JS_FUNCTION_TYPE.
747       // r1: constructor function
748       // r2: initial map
749       __ CompareInstanceType(r2, r3, JS_FUNCTION_TYPE);
750       __ b(eq, &rt_call);
751 
752       if (count_constructions) {
753         Label allocate;
754         // Decrease generous allocation count.
755         __ ldr(r3, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
756         MemOperand constructor_count =
757             FieldMemOperand(r3, SharedFunctionInfo::kConstructionCountOffset);
758         __ ldrb(r4, constructor_count);
759         __ sub(r4, r4, Operand(1), SetCC);
760         __ strb(r4, constructor_count);
761         __ b(ne, &allocate);
762 
763         __ Push(r1, r2);
764 
765         __ push(r1);  // constructor
766         // The call will replace the stub, so the countdown is only done once.
767         __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
768 
769         __ pop(r2);
770         __ pop(r1);
771 
772         __ bind(&allocate);
773       }
774 
775       // Now allocate the JSObject on the heap.
776       // r1: constructor function
777       // r2: initial map
778       __ ldrb(r3, FieldMemOperand(r2, Map::kInstanceSizeOffset));
779       __ AllocateInNewSpace(r3, r4, r5, r6, &rt_call, SIZE_IN_WORDS);
780 
781       // Allocated the JSObject, now initialize the fields. Map is set to
782       // initial map and properties and elements are set to empty fixed array.
783       // r1: constructor function
784       // r2: initial map
785       // r3: object size
786       // r4: JSObject (not tagged)
787       __ LoadRoot(r6, Heap::kEmptyFixedArrayRootIndex);
788       __ mov(r5, r4);
789       ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset);
790       __ str(r2, MemOperand(r5, kPointerSize, PostIndex));
791       ASSERT_EQ(1 * kPointerSize, JSObject::kPropertiesOffset);
792       __ str(r6, MemOperand(r5, kPointerSize, PostIndex));
793       ASSERT_EQ(2 * kPointerSize, JSObject::kElementsOffset);
794       __ str(r6, MemOperand(r5, kPointerSize, PostIndex));
795 
796       // Fill all the in-object properties with the appropriate filler.
797       // r1: constructor function
798       // r2: initial map
799       // r3: object size (in words)
800       // r4: JSObject (not tagged)
801       // r5: First in-object property of JSObject (not tagged)
802       __ add(r6, r4, Operand(r3, LSL, kPointerSizeLog2));  // End of object.
803       ASSERT_EQ(3 * kPointerSize, JSObject::kHeaderSize);
804       __ LoadRoot(r7, Heap::kUndefinedValueRootIndex);
805       if (count_constructions) {
806         __ ldr(r0, FieldMemOperand(r2, Map::kInstanceSizesOffset));
807         __ Ubfx(r0, r0, Map::kPreAllocatedPropertyFieldsByte * kBitsPerByte,
808                 kBitsPerByte);
809         __ add(r0, r5, Operand(r0, LSL, kPointerSizeLog2));
810         // r0: offset of first field after pre-allocated fields
811         if (FLAG_debug_code) {
812           __ cmp(r0, r6);
813           __ Assert(le, "Unexpected number of pre-allocated property fields.");
814         }
815         __ InitializeFieldsWithFiller(r5, r0, r7);
816         // To allow for truncation.
817         __ LoadRoot(r7, Heap::kOnePointerFillerMapRootIndex);
818       }
819       __ InitializeFieldsWithFiller(r5, r6, r7);
820 
821       // Add the object tag to make the JSObject real, so that we can continue
822       // and jump into the continuation code at any time from now on. Any
823       // failures need to undo the allocation, so that the heap is in a
824       // consistent state and verifiable.
825       __ add(r4, r4, Operand(kHeapObjectTag));
826 
827       // Check if a non-empty properties array is needed. Continue with
828       // allocated object if not fall through to runtime call if it is.
829       // r1: constructor function
830       // r4: JSObject
831       // r5: start of next object (not tagged)
832       __ ldrb(r3, FieldMemOperand(r2, Map::kUnusedPropertyFieldsOffset));
833       // The field instance sizes contains both pre-allocated property fields
834       // and in-object properties.
835       __ ldr(r0, FieldMemOperand(r2, Map::kInstanceSizesOffset));
836       __ Ubfx(r6, r0, Map::kPreAllocatedPropertyFieldsByte * kBitsPerByte,
837               kBitsPerByte);
838       __ add(r3, r3, Operand(r6));
839       __ Ubfx(r6, r0, Map::kInObjectPropertiesByte * kBitsPerByte,
840               kBitsPerByte);
841       __ sub(r3, r3, Operand(r6), SetCC);
842 
843       // Done if no extra properties are to be allocated.
844       __ b(eq, &allocated);
845       __ Assert(pl, "Property allocation count failed.");
846 
847       // Scale the number of elements by pointer size and add the header for
848       // FixedArrays to the start of the next object calculation from above.
849       // r1: constructor
850       // r3: number of elements in properties array
851       // r4: JSObject
852       // r5: start of next object
853       __ add(r0, r3, Operand(FixedArray::kHeaderSize / kPointerSize));
854       __ AllocateInNewSpace(
855           r0,
856           r5,
857           r6,
858           r2,
859           &undo_allocation,
860           static_cast<AllocationFlags>(RESULT_CONTAINS_TOP | SIZE_IN_WORDS));
861 
862       // Initialize the FixedArray.
863       // r1: constructor
864       // r3: number of elements in properties array
865       // r4: JSObject
866       // r5: FixedArray (not tagged)
867       __ LoadRoot(r6, Heap::kFixedArrayMapRootIndex);
868       __ mov(r2, r5);
869       ASSERT_EQ(0 * kPointerSize, JSObject::kMapOffset);
870       __ str(r6, MemOperand(r2, kPointerSize, PostIndex));
871       ASSERT_EQ(1 * kPointerSize, FixedArray::kLengthOffset);
872       __ mov(r0, Operand(r3, LSL, kSmiTagSize));
873       __ str(r0, MemOperand(r2, kPointerSize, PostIndex));
874 
875       // Initialize the fields to undefined.
876       // r1: constructor function
877       // r2: First element of FixedArray (not tagged)
878       // r3: number of elements in properties array
879       // r4: JSObject
880       // r5: FixedArray (not tagged)
881       __ add(r6, r2, Operand(r3, LSL, kPointerSizeLog2));  // End of object.
882       ASSERT_EQ(2 * kPointerSize, FixedArray::kHeaderSize);
883       { Label loop, entry;
884         if (count_constructions) {
885           __ LoadRoot(r7, Heap::kUndefinedValueRootIndex);
886         } else if (FLAG_debug_code) {
887           __ LoadRoot(r8, Heap::kUndefinedValueRootIndex);
888           __ cmp(r7, r8);
889           __ Assert(eq, "Undefined value not loaded.");
890         }
891         __ b(&entry);
892         __ bind(&loop);
893         __ str(r7, MemOperand(r2, kPointerSize, PostIndex));
894         __ bind(&entry);
895         __ cmp(r2, r6);
896         __ b(lt, &loop);
897       }
898 
899       // Store the initialized FixedArray into the properties field of
900       // the JSObject
901       // r1: constructor function
902       // r4: JSObject
903       // r5: FixedArray (not tagged)
904       __ add(r5, r5, Operand(kHeapObjectTag));  // Add the heap tag.
905       __ str(r5, FieldMemOperand(r4, JSObject::kPropertiesOffset));
906 
907       // Continue with JSObject being successfully allocated
908       // r1: constructor function
909       // r4: JSObject
910       __ jmp(&allocated);
911 
912       // Undo the setting of the new top so that the heap is verifiable. For
913       // example, the map's unused properties potentially do not match the
914       // allocated objects unused properties.
915       // r4: JSObject (previous new top)
916       __ bind(&undo_allocation);
917       __ UndoAllocationInNewSpace(r4, r5);
918     }
919 
920     // Allocate the new receiver object using the runtime call.
921     // r1: constructor function
922     __ bind(&rt_call);
923     __ push(r1);  // argument for Runtime_NewObject
924     __ CallRuntime(Runtime::kNewObject, 1);
925     __ mov(r4, r0);
926 
927     // Receiver for constructor call allocated.
928     // r4: JSObject
929     __ bind(&allocated);
930     __ push(r4);
931     __ push(r4);
932 
933     // Reload the number of arguments and the constructor from the stack.
934     // sp[0]: receiver
935     // sp[1]: receiver
936     // sp[2]: constructor function
937     // sp[3]: number of arguments (smi-tagged)
938     __ ldr(r1, MemOperand(sp, 2 * kPointerSize));
939     __ ldr(r3, MemOperand(sp, 3 * kPointerSize));
940 
941     // Set up pointer to last argument.
942     __ add(r2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
943 
944     // Set up number of arguments for function call below
945     __ mov(r0, Operand(r3, LSR, kSmiTagSize));
946 
947     // Copy arguments and receiver to the expression stack.
948     // r0: number of arguments
949     // r1: constructor function
950     // r2: address of last argument (caller sp)
951     // r3: number of arguments (smi-tagged)
952     // sp[0]: receiver
953     // sp[1]: receiver
954     // sp[2]: constructor function
955     // sp[3]: number of arguments (smi-tagged)
956     Label loop, entry;
957     __ b(&entry);
958     __ bind(&loop);
959     __ ldr(ip, MemOperand(r2, r3, LSL, kPointerSizeLog2 - 1));
960     __ push(ip);
961     __ bind(&entry);
962     __ sub(r3, r3, Operand(2), SetCC);
963     __ b(ge, &loop);
964 
965     // Call the function.
966     // r0: number of arguments
967     // r1: constructor function
968     if (is_api_function) {
969       __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
970       Handle<Code> code =
971           masm->isolate()->builtins()->HandleApiCallConstruct();
972       ParameterCount expected(0);
973       __ InvokeCode(code, expected, expected,
974                     RelocInfo::CODE_TARGET, CALL_FUNCTION, CALL_AS_METHOD);
975     } else {
976       ParameterCount actual(r0);
977       __ InvokeFunction(r1, actual, CALL_FUNCTION,
978                         NullCallWrapper(), CALL_AS_METHOD);
979     }
980 
981     // Store offset of return address for deoptimizer.
982     if (!is_api_function && !count_constructions) {
983       masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
984     }
985 
986     // Restore context from the frame.
987     // r0: result
988     // sp[0]: receiver
989     // sp[1]: constructor function
990     // sp[2]: number of arguments (smi-tagged)
991     __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
992 
993     // If the result is an object (in the ECMA sense), we should get rid
994     // of the receiver and use the result; see ECMA-262 section 13.2.2-7
995     // on page 74.
996     Label use_receiver, exit;
997 
998     // If the result is a smi, it is *not* an object in the ECMA sense.
999     // r0: result
1000     // sp[0]: receiver (newly allocated object)
1001     // sp[1]: constructor function
1002     // sp[2]: number of arguments (smi-tagged)
1003     __ JumpIfSmi(r0, &use_receiver);
1004 
1005     // If the type of the result (stored in its map) is less than
1006     // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense.
1007     __ CompareObjectType(r0, r3, r3, FIRST_SPEC_OBJECT_TYPE);
1008     __ b(ge, &exit);
1009 
1010     // Throw away the result of the constructor invocation and use the
1011     // on-stack receiver as the result.
1012     __ bind(&use_receiver);
1013     __ ldr(r0, MemOperand(sp));
1014 
1015     // Remove receiver from the stack, remove caller arguments, and
1016     // return.
1017     __ bind(&exit);
1018     // r0: result
1019     // sp[0]: receiver (newly allocated object)
1020     // sp[1]: constructor function
1021     // sp[2]: number of arguments (smi-tagged)
1022     __ ldr(r1, MemOperand(sp, 2 * kPointerSize));
1023 
1024     // Leave construct frame.
1025   }
1026 
1027   __ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2 - 1));
1028   __ add(sp, sp, Operand(kPointerSize));
1029   __ IncrementCounter(isolate->counters()->constructed_objects(), 1, r1, r2);
1030   __ Jump(lr);
1031 }
1032 
1033 
Generate_JSConstructStubCountdown(MacroAssembler * masm)1034 void Builtins::Generate_JSConstructStubCountdown(MacroAssembler* masm) {
1035   Generate_JSConstructStubHelper(masm, false, true);
1036 }
1037 
1038 
Generate_JSConstructStubGeneric(MacroAssembler * masm)1039 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
1040   Generate_JSConstructStubHelper(masm, false, false);
1041 }
1042 
1043 
Generate_JSConstructStubApi(MacroAssembler * masm)1044 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
1045   Generate_JSConstructStubHelper(masm, true, false);
1046 }
1047 
1048 
Generate_JSEntryTrampolineHelper(MacroAssembler * masm,bool is_construct)1049 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
1050                                              bool is_construct) {
1051   // Called from Generate_JS_Entry
1052   // r0: code entry
1053   // r1: function
1054   // r2: receiver
1055   // r3: argc
1056   // r4: argv
1057   // r5-r7, cp may be clobbered
1058 
1059   // Clear the context before we push it when entering the internal frame.
1060   __ mov(cp, Operand(0, RelocInfo::NONE));
1061 
1062   // Enter an internal frame.
1063   {
1064     FrameScope scope(masm, StackFrame::INTERNAL);
1065 
1066     // Set up the context from the function argument.
1067     __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
1068 
1069     __ InitializeRootRegister();
1070 
1071     // Push the function and the receiver onto the stack.
1072     __ push(r1);
1073     __ push(r2);
1074 
1075     // Copy arguments to the stack in a loop.
1076     // r1: function
1077     // r3: argc
1078     // r4: argv, i.e. points to first arg
1079     Label loop, entry;
1080     __ add(r2, r4, Operand(r3, LSL, kPointerSizeLog2));
1081     // r2 points past last arg.
1082     __ b(&entry);
1083     __ bind(&loop);
1084     __ ldr(r0, MemOperand(r4, kPointerSize, PostIndex));  // read next parameter
1085     __ ldr(r0, MemOperand(r0));  // dereference handle
1086     __ push(r0);  // push parameter
1087     __ bind(&entry);
1088     __ cmp(r4, r2);
1089     __ b(ne, &loop);
1090 
1091     // Initialize all JavaScript callee-saved registers, since they will be seen
1092     // by the garbage collector as part of handlers.
1093     __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
1094     __ mov(r5, Operand(r4));
1095     __ mov(r6, Operand(r4));
1096     __ mov(r7, Operand(r4));
1097     if (kR9Available == 1) {
1098       __ mov(r9, Operand(r4));
1099     }
1100 
1101     // Invoke the code and pass argc as r0.
1102     __ mov(r0, Operand(r3));
1103     if (is_construct) {
1104       CallConstructStub stub(NO_CALL_FUNCTION_FLAGS);
1105       __ CallStub(&stub);
1106     } else {
1107       ParameterCount actual(r0);
1108       __ InvokeFunction(r1, actual, CALL_FUNCTION,
1109                         NullCallWrapper(), CALL_AS_METHOD);
1110     }
1111     // Exit the JS frame and remove the parameters (except function), and
1112     // return.
1113     // Respect ABI stack constraint.
1114   }
1115   __ Jump(lr);
1116 
1117   // r0: result
1118 }
1119 
1120 
Generate_JSEntryTrampoline(MacroAssembler * masm)1121 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
1122   Generate_JSEntryTrampolineHelper(masm, false);
1123 }
1124 
1125 
Generate_JSConstructEntryTrampoline(MacroAssembler * masm)1126 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
1127   Generate_JSEntryTrampolineHelper(masm, true);
1128 }
1129 
1130 
Generate_LazyCompile(MacroAssembler * masm)1131 void Builtins::Generate_LazyCompile(MacroAssembler* masm) {
1132   // Enter an internal frame.
1133   {
1134     FrameScope scope(masm, StackFrame::INTERNAL);
1135 
1136     // Preserve the function.
1137     __ push(r1);
1138     // Push call kind information.
1139     __ push(r5);
1140 
1141     // Push the function on the stack as the argument to the runtime function.
1142     __ push(r1);
1143     __ CallRuntime(Runtime::kLazyCompile, 1);
1144     // Calculate the entry point.
1145     __ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
1146 
1147     // Restore call kind information.
1148     __ pop(r5);
1149     // Restore saved function.
1150     __ pop(r1);
1151 
1152     // Tear down internal frame.
1153   }
1154 
1155   // Do a tail-call of the compiled function.
1156   __ Jump(r2);
1157 }
1158 
1159 
Generate_LazyRecompile(MacroAssembler * masm)1160 void Builtins::Generate_LazyRecompile(MacroAssembler* masm) {
1161   // Enter an internal frame.
1162   {
1163     FrameScope scope(masm, StackFrame::INTERNAL);
1164 
1165     // Preserve the function.
1166     __ push(r1);
1167     // Push call kind information.
1168     __ push(r5);
1169 
1170     // Push the function on the stack as the argument to the runtime function.
1171     __ push(r1);
1172     __ CallRuntime(Runtime::kLazyRecompile, 1);
1173     // Calculate the entry point.
1174     __ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
1175 
1176     // Restore call kind information.
1177     __ pop(r5);
1178     // Restore saved function.
1179     __ pop(r1);
1180 
1181     // Tear down internal frame.
1182   }
1183 
1184   // Do a tail-call of the compiled function.
1185   __ Jump(r2);
1186 }
1187 
1188 
Generate_NotifyDeoptimizedHelper(MacroAssembler * masm,Deoptimizer::BailoutType type)1189 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
1190                                              Deoptimizer::BailoutType type) {
1191   {
1192     FrameScope scope(masm, StackFrame::INTERNAL);
1193     // Pass the function and deoptimization type to the runtime system.
1194     __ mov(r0, Operand(Smi::FromInt(static_cast<int>(type))));
1195     __ push(r0);
1196     __ CallRuntime(Runtime::kNotifyDeoptimized, 1);
1197   }
1198 
1199   // Get the full codegen state from the stack and untag it -> r6.
1200   __ ldr(r6, MemOperand(sp, 0 * kPointerSize));
1201   __ SmiUntag(r6);
1202   // Switch on the state.
1203   Label with_tos_register, unknown_state;
1204   __ cmp(r6, Operand(FullCodeGenerator::NO_REGISTERS));
1205   __ b(ne, &with_tos_register);
1206   __ add(sp, sp, Operand(1 * kPointerSize));  // Remove state.
1207   __ Ret();
1208 
1209   __ bind(&with_tos_register);
1210   __ ldr(r0, MemOperand(sp, 1 * kPointerSize));
1211   __ cmp(r6, Operand(FullCodeGenerator::TOS_REG));
1212   __ b(ne, &unknown_state);
1213   __ add(sp, sp, Operand(2 * kPointerSize));  // Remove state.
1214   __ Ret();
1215 
1216   __ bind(&unknown_state);
1217   __ stop("no cases left");
1218 }
1219 
1220 
Generate_NotifyDeoptimized(MacroAssembler * masm)1221 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1222   Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1223 }
1224 
1225 
Generate_NotifyLazyDeoptimized(MacroAssembler * masm)1226 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
1227   Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1228 }
1229 
1230 
Generate_NotifyOSR(MacroAssembler * masm)1231 void Builtins::Generate_NotifyOSR(MacroAssembler* masm) {
1232   // For now, we are relying on the fact that Runtime::NotifyOSR
1233   // doesn't do any garbage collection which allows us to save/restore
1234   // the registers without worrying about which of them contain
1235   // pointers. This seems a bit fragile.
1236   __ stm(db_w, sp, kJSCallerSaved | kCalleeSaved | lr.bit() | fp.bit());
1237   {
1238     FrameScope scope(masm, StackFrame::INTERNAL);
1239     __ CallRuntime(Runtime::kNotifyOSR, 0);
1240   }
1241   __ ldm(ia_w, sp, kJSCallerSaved | kCalleeSaved | lr.bit() | fp.bit());
1242   __ Ret();
1243 }
1244 
1245 
Generate_OnStackReplacement(MacroAssembler * masm)1246 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1247   CpuFeatures::TryForceFeatureScope scope(VFP3);
1248   if (!CpuFeatures::IsSupported(VFP3)) {
1249     __ Abort("Unreachable code: Cannot optimize without VFP3 support.");
1250     return;
1251   }
1252 
1253   // Lookup the function in the JavaScript frame and push it as an
1254   // argument to the on-stack replacement function.
1255   __ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1256   {
1257     FrameScope scope(masm, StackFrame::INTERNAL);
1258     __ push(r0);
1259     __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
1260   }
1261 
1262   // If the result was -1 it means that we couldn't optimize the
1263   // function. Just return and continue in the unoptimized version.
1264   Label skip;
1265   __ cmp(r0, Operand(Smi::FromInt(-1)));
1266   __ b(ne, &skip);
1267   __ Ret();
1268 
1269   __ bind(&skip);
1270   // Untag the AST id and push it on the stack.
1271   __ SmiUntag(r0);
1272   __ push(r0);
1273 
1274   // Generate the code for doing the frame-to-frame translation using
1275   // the deoptimizer infrastructure.
1276   Deoptimizer::EntryGenerator generator(masm, Deoptimizer::OSR);
1277   generator.Generate();
1278 }
1279 
1280 
Generate_FunctionCall(MacroAssembler * masm)1281 void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
1282   // 1. Make sure we have at least one argument.
1283   // r0: actual number of arguments
1284   { Label done;
1285     __ cmp(r0, Operand(0));
1286     __ b(ne, &done);
1287     __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
1288     __ push(r2);
1289     __ add(r0, r0, Operand(1));
1290     __ bind(&done);
1291   }
1292 
1293   // 2. Get the function to call (passed as receiver) from the stack, check
1294   //    if it is a function.
1295   // r0: actual number of arguments
1296   Label slow, non_function;
1297   __ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
1298   __ JumpIfSmi(r1, &non_function);
1299   __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE);
1300   __ b(ne, &slow);
1301 
1302   // 3a. Patch the first argument if necessary when calling a function.
1303   // r0: actual number of arguments
1304   // r1: function
1305   Label shift_arguments;
1306   __ mov(r4, Operand(0, RelocInfo::NONE));  // indicate regular JS_FUNCTION
1307   { Label convert_to_object, use_global_receiver, patch_receiver;
1308     // Change context eagerly in case we need the global receiver.
1309     __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
1310 
1311     // Do not transform the receiver for strict mode functions.
1312     __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
1313     __ ldr(r3, FieldMemOperand(r2, SharedFunctionInfo::kCompilerHintsOffset));
1314     __ tst(r3, Operand(1 << (SharedFunctionInfo::kStrictModeFunction +
1315                              kSmiTagSize)));
1316     __ b(ne, &shift_arguments);
1317 
1318     // Do not transform the receiver for native (Compilerhints already in r3).
1319     __ tst(r3, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
1320     __ b(ne, &shift_arguments);
1321 
1322     // Compute the receiver in non-strict mode.
1323     __ add(r2, sp, Operand(r0, LSL, kPointerSizeLog2));
1324     __ ldr(r2, MemOperand(r2, -kPointerSize));
1325     // r0: actual number of arguments
1326     // r1: function
1327     // r2: first argument
1328     __ JumpIfSmi(r2, &convert_to_object);
1329 
1330     __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
1331     __ cmp(r2, r3);
1332     __ b(eq, &use_global_receiver);
1333     __ LoadRoot(r3, Heap::kNullValueRootIndex);
1334     __ cmp(r2, r3);
1335     __ b(eq, &use_global_receiver);
1336 
1337     STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
1338     __ CompareObjectType(r2, r3, r3, FIRST_SPEC_OBJECT_TYPE);
1339     __ b(ge, &shift_arguments);
1340 
1341     __ bind(&convert_to_object);
1342 
1343     {
1344       // Enter an internal frame in order to preserve argument count.
1345       FrameScope scope(masm, StackFrame::INTERNAL);
1346       __ mov(r0, Operand(r0, LSL, kSmiTagSize));  // Smi-tagged.
1347       __ push(r0);
1348 
1349       __ push(r2);
1350       __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1351       __ mov(r2, r0);
1352 
1353       __ pop(r0);
1354       __ mov(r0, Operand(r0, ASR, kSmiTagSize));
1355 
1356       // Exit the internal frame.
1357     }
1358 
1359     // Restore the function to r1, and the flag to r4.
1360     __ ldr(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2));
1361     __ mov(r4, Operand(0, RelocInfo::NONE));
1362     __ jmp(&patch_receiver);
1363 
1364     // Use the global receiver object from the called function as the
1365     // receiver.
1366     __ bind(&use_global_receiver);
1367     const int kGlobalIndex =
1368         Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
1369     __ ldr(r2, FieldMemOperand(cp, kGlobalIndex));
1370     __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalContextOffset));
1371     __ ldr(r2, FieldMemOperand(r2, kGlobalIndex));
1372     __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalReceiverOffset));
1373 
1374     __ bind(&patch_receiver);
1375     __ add(r3, sp, Operand(r0, LSL, kPointerSizeLog2));
1376     __ str(r2, MemOperand(r3, -kPointerSize));
1377 
1378     __ jmp(&shift_arguments);
1379   }
1380 
1381   // 3b. Check for function proxy.
1382   __ bind(&slow);
1383   __ mov(r4, Operand(1, RelocInfo::NONE));  // indicate function proxy
1384   __ cmp(r2, Operand(JS_FUNCTION_PROXY_TYPE));
1385   __ b(eq, &shift_arguments);
1386   __ bind(&non_function);
1387   __ mov(r4, Operand(2, RelocInfo::NONE));  // indicate non-function
1388 
1389   // 3c. Patch the first argument when calling a non-function.  The
1390   //     CALL_NON_FUNCTION builtin expects the non-function callee as
1391   //     receiver, so overwrite the first argument which will ultimately
1392   //     become the receiver.
1393   // r0: actual number of arguments
1394   // r1: function
1395   // r4: call type (0: JS function, 1: function proxy, 2: non-function)
1396   __ add(r2, sp, Operand(r0, LSL, kPointerSizeLog2));
1397   __ str(r1, MemOperand(r2, -kPointerSize));
1398 
1399   // 4. Shift arguments and return address one slot down on the stack
1400   //    (overwriting the original receiver).  Adjust argument count to make
1401   //    the original first argument the new receiver.
1402   // r0: actual number of arguments
1403   // r1: function
1404   // r4: call type (0: JS function, 1: function proxy, 2: non-function)
1405   __ bind(&shift_arguments);
1406   { Label loop;
1407     // Calculate the copy start address (destination). Copy end address is sp.
1408     __ add(r2, sp, Operand(r0, LSL, kPointerSizeLog2));
1409 
1410     __ bind(&loop);
1411     __ ldr(ip, MemOperand(r2, -kPointerSize));
1412     __ str(ip, MemOperand(r2));
1413     __ sub(r2, r2, Operand(kPointerSize));
1414     __ cmp(r2, sp);
1415     __ b(ne, &loop);
1416     // Adjust the actual number of arguments and remove the top element
1417     // (which is a copy of the last argument).
1418     __ sub(r0, r0, Operand(1));
1419     __ pop();
1420   }
1421 
1422   // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin,
1423   //     or a function proxy via CALL_FUNCTION_PROXY.
1424   // r0: actual number of arguments
1425   // r1: function
1426   // r4: call type (0: JS function, 1: function proxy, 2: non-function)
1427   { Label function, non_proxy;
1428     __ tst(r4, r4);
1429     __ b(eq, &function);
1430     // Expected number of arguments is 0 for CALL_NON_FUNCTION.
1431     __ mov(r2, Operand(0, RelocInfo::NONE));
1432     __ SetCallKind(r5, CALL_AS_METHOD);
1433     __ cmp(r4, Operand(1));
1434     __ b(ne, &non_proxy);
1435 
1436     __ push(r1);  // re-add proxy object as additional argument
1437     __ add(r0, r0, Operand(1));
1438     __ GetBuiltinEntry(r3, Builtins::CALL_FUNCTION_PROXY);
1439     __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1440             RelocInfo::CODE_TARGET);
1441 
1442     __ bind(&non_proxy);
1443     __ GetBuiltinEntry(r3, Builtins::CALL_NON_FUNCTION);
1444     __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1445             RelocInfo::CODE_TARGET);
1446     __ bind(&function);
1447   }
1448 
1449   // 5b. Get the code to call from the function and check that the number of
1450   //     expected arguments matches what we're providing.  If so, jump
1451   //     (tail-call) to the code in register edx without checking arguments.
1452   // r0: actual number of arguments
1453   // r1: function
1454   __ ldr(r3, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
1455   __ ldr(r2,
1456          FieldMemOperand(r3, SharedFunctionInfo::kFormalParameterCountOffset));
1457   __ mov(r2, Operand(r2, ASR, kSmiTagSize));
1458   __ ldr(r3, FieldMemOperand(r1, JSFunction::kCodeEntryOffset));
1459   __ SetCallKind(r5, CALL_AS_METHOD);
1460   __ cmp(r2, r0);  // Check formal and actual parameter counts.
1461   __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1462           RelocInfo::CODE_TARGET,
1463           ne);
1464 
1465   ParameterCount expected(0);
1466   __ InvokeCode(r3, expected, expected, JUMP_FUNCTION,
1467                 NullCallWrapper(), CALL_AS_METHOD);
1468 }
1469 
1470 
Generate_FunctionApply(MacroAssembler * masm)1471 void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
1472   const int kIndexOffset    = -5 * kPointerSize;
1473   const int kLimitOffset    = -4 * kPointerSize;
1474   const int kArgsOffset     =  2 * kPointerSize;
1475   const int kRecvOffset     =  3 * kPointerSize;
1476   const int kFunctionOffset =  4 * kPointerSize;
1477 
1478   {
1479     FrameScope frame_scope(masm, StackFrame::INTERNAL);
1480 
1481     __ ldr(r0, MemOperand(fp, kFunctionOffset));  // get the function
1482     __ push(r0);
1483     __ ldr(r0, MemOperand(fp, kArgsOffset));  // get the args array
1484     __ push(r0);
1485     __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
1486 
1487     // Check the stack for overflow. We are not trying to catch
1488     // interruptions (e.g. debug break and preemption) here, so the "real stack
1489     // limit" is checked.
1490     Label okay;
1491     __ LoadRoot(r2, Heap::kRealStackLimitRootIndex);
1492     // Make r2 the space we have left. The stack might already be overflowed
1493     // here which will cause r2 to become negative.
1494     __ sub(r2, sp, r2);
1495     // Check if the arguments will overflow the stack.
1496     __ cmp(r2, Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize));
1497     __ b(gt, &okay);  // Signed comparison.
1498 
1499     // Out of stack space.
1500     __ ldr(r1, MemOperand(fp, kFunctionOffset));
1501     __ push(r1);
1502     __ push(r0);
1503     __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION);
1504     // End of stack check.
1505 
1506     // Push current limit and index.
1507     __ bind(&okay);
1508     __ push(r0);  // limit
1509     __ mov(r1, Operand(0, RelocInfo::NONE));  // initial index
1510     __ push(r1);
1511 
1512     // Get the receiver.
1513     __ ldr(r0, MemOperand(fp, kRecvOffset));
1514 
1515     // Check that the function is a JS function (otherwise it must be a proxy).
1516     Label push_receiver;
1517     __ ldr(r1, MemOperand(fp, kFunctionOffset));
1518     __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE);
1519     __ b(ne, &push_receiver);
1520 
1521     // Change context eagerly to get the right global object if necessary.
1522     __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
1523     // Load the shared function info while the function is still in r1.
1524     __ ldr(r2, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset));
1525 
1526     // Compute the receiver.
1527     // Do not transform the receiver for strict mode functions.
1528     Label call_to_object, use_global_receiver;
1529     __ ldr(r2, FieldMemOperand(r2, SharedFunctionInfo::kCompilerHintsOffset));
1530     __ tst(r2, Operand(1 << (SharedFunctionInfo::kStrictModeFunction +
1531                              kSmiTagSize)));
1532     __ b(ne, &push_receiver);
1533 
1534     // Do not transform the receiver for strict mode functions.
1535     __ tst(r2, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
1536     __ b(ne, &push_receiver);
1537 
1538     // Compute the receiver in non-strict mode.
1539     __ JumpIfSmi(r0, &call_to_object);
1540     __ LoadRoot(r1, Heap::kNullValueRootIndex);
1541     __ cmp(r0, r1);
1542     __ b(eq, &use_global_receiver);
1543     __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
1544     __ cmp(r0, r1);
1545     __ b(eq, &use_global_receiver);
1546 
1547     // Check if the receiver is already a JavaScript object.
1548     // r0: receiver
1549     STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
1550     __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE);
1551     __ b(ge, &push_receiver);
1552 
1553     // Convert the receiver to a regular object.
1554     // r0: receiver
1555     __ bind(&call_to_object);
1556     __ push(r0);
1557     __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1558     __ b(&push_receiver);
1559 
1560     // Use the current global receiver object as the receiver.
1561     __ bind(&use_global_receiver);
1562     const int kGlobalOffset =
1563         Context::kHeaderSize + Context::GLOBAL_INDEX * kPointerSize;
1564     __ ldr(r0, FieldMemOperand(cp, kGlobalOffset));
1565     __ ldr(r0, FieldMemOperand(r0, GlobalObject::kGlobalContextOffset));
1566     __ ldr(r0, FieldMemOperand(r0, kGlobalOffset));
1567     __ ldr(r0, FieldMemOperand(r0, GlobalObject::kGlobalReceiverOffset));
1568 
1569     // Push the receiver.
1570     // r0: receiver
1571     __ bind(&push_receiver);
1572     __ push(r0);
1573 
1574     // Copy all arguments from the array to the stack.
1575     Label entry, loop;
1576     __ ldr(r0, MemOperand(fp, kIndexOffset));
1577     __ b(&entry);
1578 
1579     // Load the current argument from the arguments array and push it to the
1580     // stack.
1581     // r0: current argument index
1582     __ bind(&loop);
1583     __ ldr(r1, MemOperand(fp, kArgsOffset));
1584     __ push(r1);
1585     __ push(r0);
1586 
1587     // Call the runtime to access the property in the arguments array.
1588     __ CallRuntime(Runtime::kGetProperty, 2);
1589     __ push(r0);
1590 
1591     // Use inline caching to access the arguments.
1592     __ ldr(r0, MemOperand(fp, kIndexOffset));
1593     __ add(r0, r0, Operand(1 << kSmiTagSize));
1594     __ str(r0, MemOperand(fp, kIndexOffset));
1595 
1596     // Test if the copy loop has finished copying all the elements from the
1597     // arguments object.
1598     __ bind(&entry);
1599     __ ldr(r1, MemOperand(fp, kLimitOffset));
1600     __ cmp(r0, r1);
1601     __ b(ne, &loop);
1602 
1603     // Invoke the function.
1604     Label call_proxy;
1605     ParameterCount actual(r0);
1606     __ mov(r0, Operand(r0, ASR, kSmiTagSize));
1607     __ ldr(r1, MemOperand(fp, kFunctionOffset));
1608     __ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE);
1609     __ b(ne, &call_proxy);
1610     __ InvokeFunction(r1, actual, CALL_FUNCTION,
1611                       NullCallWrapper(), CALL_AS_METHOD);
1612 
1613     frame_scope.GenerateLeaveFrame();
1614     __ add(sp, sp, Operand(3 * kPointerSize));
1615     __ Jump(lr);
1616 
1617     // Invoke the function proxy.
1618     __ bind(&call_proxy);
1619     __ push(r1);  // add function proxy as last argument
1620     __ add(r0, r0, Operand(1));
1621     __ mov(r2, Operand(0, RelocInfo::NONE));
1622     __ SetCallKind(r5, CALL_AS_METHOD);
1623     __ GetBuiltinEntry(r3, Builtins::CALL_FUNCTION_PROXY);
1624     __ Call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1625             RelocInfo::CODE_TARGET);
1626 
1627     // Tear down the internal frame and remove function, receiver and args.
1628   }
1629   __ add(sp, sp, Operand(3 * kPointerSize));
1630   __ Jump(lr);
1631 }
1632 
1633 
EnterArgumentsAdaptorFrame(MacroAssembler * masm)1634 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1635   __ mov(r0, Operand(r0, LSL, kSmiTagSize));
1636   __ mov(r4, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1637   __ stm(db_w, sp, r0.bit() | r1.bit() | r4.bit() | fp.bit() | lr.bit());
1638   __ add(fp, sp, Operand(3 * kPointerSize));
1639 }
1640 
1641 
LeaveArgumentsAdaptorFrame(MacroAssembler * masm)1642 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1643   // ----------- S t a t e -------------
1644   //  -- r0 : result being passed through
1645   // -----------------------------------
1646   // Get the number of arguments passed (as a smi), tear down the frame and
1647   // then tear down the parameters.
1648   __ ldr(r1, MemOperand(fp, -3 * kPointerSize));
1649   __ mov(sp, fp);
1650   __ ldm(ia_w, sp, fp.bit() | lr.bit());
1651   __ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2 - kSmiTagSize));
1652   __ add(sp, sp, Operand(kPointerSize));  // adjust for receiver
1653 }
1654 
1655 
Generate_ArgumentsAdaptorTrampoline(MacroAssembler * masm)1656 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1657   // ----------- S t a t e -------------
1658   //  -- r0 : actual number of arguments
1659   //  -- r1 : function (passed through to callee)
1660   //  -- r2 : expected number of arguments
1661   //  -- r3 : code entry to call
1662   //  -- r5 : call kind information
1663   // -----------------------------------
1664 
1665   Label invoke, dont_adapt_arguments;
1666 
1667   Label enough, too_few;
1668   __ cmp(r0, r2);
1669   __ b(lt, &too_few);
1670   __ cmp(r2, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
1671   __ b(eq, &dont_adapt_arguments);
1672 
1673   {  // Enough parameters: actual >= expected
1674     __ bind(&enough);
1675     EnterArgumentsAdaptorFrame(masm);
1676 
1677     // Calculate copy start address into r0 and copy end address into r2.
1678     // r0: actual number of arguments as a smi
1679     // r1: function
1680     // r2: expected number of arguments
1681     // r3: code entry to call
1682     __ add(r0, fp, Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize));
1683     // adjust for return address and receiver
1684     __ add(r0, r0, Operand(2 * kPointerSize));
1685     __ sub(r2, r0, Operand(r2, LSL, kPointerSizeLog2));
1686 
1687     // Copy the arguments (including the receiver) to the new stack frame.
1688     // r0: copy start address
1689     // r1: function
1690     // r2: copy end address
1691     // r3: code entry to call
1692 
1693     Label copy;
1694     __ bind(&copy);
1695     __ ldr(ip, MemOperand(r0, 0));
1696     __ push(ip);
1697     __ cmp(r0, r2);  // Compare before moving to next argument.
1698     __ sub(r0, r0, Operand(kPointerSize));
1699     __ b(ne, &copy);
1700 
1701     __ b(&invoke);
1702   }
1703 
1704   {  // Too few parameters: Actual < expected
1705     __ bind(&too_few);
1706     EnterArgumentsAdaptorFrame(masm);
1707 
1708     // Calculate copy start address into r0 and copy end address is fp.
1709     // r0: actual number of arguments as a smi
1710     // r1: function
1711     // r2: expected number of arguments
1712     // r3: code entry to call
1713     __ add(r0, fp, Operand(r0, LSL, kPointerSizeLog2 - kSmiTagSize));
1714 
1715     // Copy the arguments (including the receiver) to the new stack frame.
1716     // r0: copy start address
1717     // r1: function
1718     // r2: expected number of arguments
1719     // r3: code entry to call
1720     Label copy;
1721     __ bind(&copy);
1722     // Adjust load for return address and receiver.
1723     __ ldr(ip, MemOperand(r0, 2 * kPointerSize));
1724     __ push(ip);
1725     __ cmp(r0, fp);  // Compare before moving to next argument.
1726     __ sub(r0, r0, Operand(kPointerSize));
1727     __ b(ne, &copy);
1728 
1729     // Fill the remaining expected arguments with undefined.
1730     // r1: function
1731     // r2: expected number of arguments
1732     // r3: code entry to call
1733     __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1734     __ sub(r2, fp, Operand(r2, LSL, kPointerSizeLog2));
1735     __ sub(r2, r2, Operand(4 * kPointerSize));  // Adjust for frame.
1736 
1737     Label fill;
1738     __ bind(&fill);
1739     __ push(ip);
1740     __ cmp(sp, r2);
1741     __ b(ne, &fill);
1742   }
1743 
1744   // Call the entry point.
1745   __ bind(&invoke);
1746   __ Call(r3);
1747 
1748   // Store offset of return address for deoptimizer.
1749   masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
1750 
1751   // Exit frame and return.
1752   LeaveArgumentsAdaptorFrame(masm);
1753   __ Jump(lr);
1754 
1755 
1756   // -------------------------------------------
1757   // Dont adapt arguments.
1758   // -------------------------------------------
1759   __ bind(&dont_adapt_arguments);
1760   __ Jump(r3);
1761 }
1762 
1763 
1764 #undef __
1765 
1766 } }  // namespace v8::internal
1767 
1768 #endif  // V8_TARGET_ARCH_ARM
1769