• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #if V8_TARGET_ARCH_PPC
6 
7 #include "src/codegen.h"
8 #include "src/debug/debug.h"
9 #include "src/deoptimizer.h"
10 #include "src/full-codegen/full-codegen.h"
11 #include "src/runtime/runtime.h"
12 
13 namespace v8 {
14 namespace internal {
15 
16 
17 #define __ ACCESS_MASM(masm)
18 
19 
Generate_Adaptor(MacroAssembler * masm,CFunctionId id,BuiltinExtraArguments extra_args)20 void Builtins::Generate_Adaptor(MacroAssembler* masm, CFunctionId id,
21                                 BuiltinExtraArguments extra_args) {
22   // ----------- S t a t e -------------
23   //  -- r3                 : number of arguments excluding receiver
24   //  -- r4                 : target
25   //  -- r6                 : new.target
26   //  -- sp[0]              : last argument
27   //  -- ...
28   //  -- sp[4 * (argc - 1)] : first argument
29   //  -- sp[4 * argc]       : receiver
30   // -----------------------------------
31   __ AssertFunction(r4);
32 
33   // Make sure we operate in the context of the called function (for example
34   // ConstructStubs implemented in C++ will be run in the context of the caller
35   // instead of the callee, due to the way that [[Construct]] is defined for
36   // ordinary functions).
37   __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset));
38 
39   // Insert extra arguments.
40   int num_extra_args = 0;
41   switch (extra_args) {
42     case BuiltinExtraArguments::kTarget:
43       __ Push(r4);
44       ++num_extra_args;
45       break;
46     case BuiltinExtraArguments::kNewTarget:
47       __ Push(r6);
48       ++num_extra_args;
49       break;
50     case BuiltinExtraArguments::kTargetAndNewTarget:
51       __ Push(r4, r6);
52       num_extra_args += 2;
53       break;
54     case BuiltinExtraArguments::kNone:
55       break;
56   }
57 
58   // JumpToExternalReference expects r3 to contain the number of arguments
59   // including the receiver and the extra arguments.
60   __ addi(r3, r3, Operand(num_extra_args + 1));
61 
62   __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
63 }
64 
65 
66 // Load the built-in InternalArray function from the current context.
GenerateLoadInternalArrayFunction(MacroAssembler * masm,Register result)67 static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
68                                               Register result) {
69   // Load the InternalArray function from the current native context.
70   __ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, result);
71 }
72 
73 
74 // Load the built-in Array function from the current context.
GenerateLoadArrayFunction(MacroAssembler * masm,Register result)75 static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
76   // Load the Array function from the current native context.
77   __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, result);
78 }
79 
80 
Generate_InternalArrayCode(MacroAssembler * masm)81 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
82   // ----------- S t a t e -------------
83   //  -- r3     : number of arguments
84   //  -- lr     : return address
85   //  -- sp[...]: constructor arguments
86   // -----------------------------------
87   Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
88 
89   // Get the InternalArray function.
90   GenerateLoadInternalArrayFunction(masm, r4);
91 
92   if (FLAG_debug_code) {
93     // Initial map for the builtin InternalArray functions should be maps.
94     __ LoadP(r5, FieldMemOperand(r4, JSFunction::kPrototypeOrInitialMapOffset));
95     __ TestIfSmi(r5, r0);
96     __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction, cr0);
97     __ CompareObjectType(r5, r6, r7, MAP_TYPE);
98     __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction);
99   }
100 
101   // Run the native code for the InternalArray function called as a normal
102   // function.
103   // tail call a stub
104   InternalArrayConstructorStub stub(masm->isolate());
105   __ TailCallStub(&stub);
106 }
107 
108 
Generate_ArrayCode(MacroAssembler * masm)109 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
110   // ----------- S t a t e -------------
111   //  -- r3     : number of arguments
112   //  -- lr     : return address
113   //  -- sp[...]: constructor arguments
114   // -----------------------------------
115   Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
116 
117   // Get the Array function.
118   GenerateLoadArrayFunction(masm, r4);
119 
120   if (FLAG_debug_code) {
121     // Initial map for the builtin Array functions should be maps.
122     __ LoadP(r5, FieldMemOperand(r4, JSFunction::kPrototypeOrInitialMapOffset));
123     __ TestIfSmi(r5, r0);
124     __ Assert(ne, kUnexpectedInitialMapForArrayFunction, cr0);
125     __ CompareObjectType(r5, r6, r7, MAP_TYPE);
126     __ Assert(eq, kUnexpectedInitialMapForArrayFunction);
127   }
128 
129   __ mr(r6, r4);
130   // Run the native code for the Array function called as a normal function.
131   // tail call a stub
132   __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
133   ArrayConstructorStub stub(masm->isolate());
134   __ TailCallStub(&stub);
135 }
136 
137 
138 // static
Generate_NumberConstructor(MacroAssembler * masm)139 void Builtins::Generate_NumberConstructor(MacroAssembler* masm) {
140   // ----------- S t a t e -------------
141   //  -- r3                     : number of arguments
142   //  -- r4                     : constructor function
143   //  -- lr                     : return address
144   //  -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
145   //  -- sp[argc * 4]           : receiver
146   // -----------------------------------
147 
148   // 1. Load the first argument into r3 and get rid of the rest (including the
149   // receiver).
150   Label no_arguments;
151   {
152     __ cmpi(r3, Operand::Zero());
153     __ beq(&no_arguments);
154     __ subi(r3, r3, Operand(1));
155     __ ShiftLeftImm(r3, r3, Operand(kPointerSizeLog2));
156     __ LoadPUX(r3, MemOperand(sp, r3));
157     __ Drop(2);
158   }
159 
160   // 2a. Convert the first argument to a number.
161   ToNumberStub stub(masm->isolate());
162   __ TailCallStub(&stub);
163 
164   // 2b. No arguments, return +0.
165   __ bind(&no_arguments);
166   __ LoadSmiLiteral(r3, Smi::FromInt(0));
167   __ Ret(1);
168 }
169 
170 
171 // static
Generate_NumberConstructor_ConstructStub(MacroAssembler * masm)172 void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) {
173   // ----------- S t a t e -------------
174   //  -- r3                     : number of arguments
175   //  -- r4                     : constructor function
176   //  -- r6                     : new target
177   //  -- lr                     : return address
178   //  -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
179   //  -- sp[argc * 4]           : receiver
180   // -----------------------------------
181 
182   // 1. Make sure we operate in the context of the called function.
183   __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset));
184 
185   // 2. Load the first argument into r5 and get rid of the rest (including the
186   // receiver).
187   {
188     Label no_arguments, done;
189     __ cmpi(r3, Operand::Zero());
190     __ beq(&no_arguments);
191     __ subi(r3, r3, Operand(1));
192     __ ShiftLeftImm(r5, r3, Operand(kPointerSizeLog2));
193     __ LoadPUX(r5, MemOperand(sp, r5));
194     __ Drop(2);
195     __ b(&done);
196     __ bind(&no_arguments);
197     __ LoadSmiLiteral(r5, Smi::FromInt(0));
198     __ Drop(1);
199     __ bind(&done);
200   }
201 
202   // 3. Make sure r5 is a number.
203   {
204     Label done_convert;
205     __ JumpIfSmi(r5, &done_convert);
206     __ CompareObjectType(r5, r7, r7, HEAP_NUMBER_TYPE);
207     __ beq(&done_convert);
208     {
209       FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
210       __ Push(r4, r6);
211       __ mr(r3, r5);
212       ToNumberStub stub(masm->isolate());
213       __ CallStub(&stub);
214       __ mr(r5, r3);
215       __ Pop(r4, r6);
216     }
217     __ bind(&done_convert);
218   }
219 
220   // 4. Check if new target and constructor differ.
221   Label new_object;
222   __ cmp(r4, r6);
223   __ bne(&new_object);
224 
225   // 5. Allocate a JSValue wrapper for the number.
226   __ AllocateJSValue(r3, r4, r5, r7, r8, &new_object);
227   __ Ret();
228 
229   // 6. Fallback to the runtime to create new object.
230   __ bind(&new_object);
231   {
232     FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
233     __ Push(r5, r4, r6);  // first argument, constructor, new target
234     __ CallRuntime(Runtime::kNewObject);
235     __ Pop(r5);
236   }
237   __ StoreP(r5, FieldMemOperand(r3, JSValue::kValueOffset), r0);
238   __ Ret();
239 }
240 
241 
242 // static
Generate_StringConstructor(MacroAssembler * masm)243 void Builtins::Generate_StringConstructor(MacroAssembler* masm) {
244   // ----------- S t a t e -------------
245   //  -- r3                     : number of arguments
246   //  -- r4                     : constructor function
247   //  -- lr                     : return address
248   //  -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
249   //  -- sp[argc * 4]           : receiver
250   // -----------------------------------
251 
252   // 1. Load the first argument into r3 and get rid of the rest (including the
253   // receiver).
254   Label no_arguments;
255   {
256     __ cmpi(r3, Operand::Zero());
257     __ beq(&no_arguments);
258     __ subi(r3, r3, Operand(1));
259     __ ShiftLeftImm(r3, r3, Operand(kPointerSizeLog2));
260     __ LoadPUX(r3, MemOperand(sp, r3));
261     __ Drop(2);
262   }
263 
264   // 2a. At least one argument, return r3 if it's a string, otherwise
265   // dispatch to appropriate conversion.
266   Label to_string, symbol_descriptive_string;
267   {
268     __ JumpIfSmi(r3, &to_string);
269     STATIC_ASSERT(FIRST_NONSTRING_TYPE == SYMBOL_TYPE);
270     __ CompareObjectType(r3, r4, r4, FIRST_NONSTRING_TYPE);
271     __ bgt(&to_string);
272     __ beq(&symbol_descriptive_string);
273     __ Ret();
274   }
275 
276   // 2b. No arguments, return the empty string (and pop the receiver).
277   __ bind(&no_arguments);
278   {
279     __ LoadRoot(r3, Heap::kempty_stringRootIndex);
280     __ Ret(1);
281   }
282 
283   // 3a. Convert r3 to a string.
284   __ bind(&to_string);
285   {
286     ToStringStub stub(masm->isolate());
287     __ TailCallStub(&stub);
288   }
289 
290   // 3b. Convert symbol in r3 to a string.
291   __ bind(&symbol_descriptive_string);
292   {
293     __ Push(r3);
294     __ TailCallRuntime(Runtime::kSymbolDescriptiveString);
295   }
296 }
297 
298 
299 // static
Generate_StringConstructor_ConstructStub(MacroAssembler * masm)300 void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
301   // ----------- S t a t e -------------
302   //  -- r3                     : number of arguments
303   //  -- r4                     : constructor function
304   //  -- r6                     : new target
305   //  -- lr                     : return address
306   //  -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
307   //  -- sp[argc * 4]           : receiver
308   // -----------------------------------
309 
310   // 1. Make sure we operate in the context of the called function.
311   __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset));
312 
313   // 2. Load the first argument into r5 and get rid of the rest (including the
314   // receiver).
315   {
316     Label no_arguments, done;
317     __ cmpi(r3, Operand::Zero());
318     __ beq(&no_arguments);
319     __ subi(r3, r3, Operand(1));
320     __ ShiftLeftImm(r5, r3, Operand(kPointerSizeLog2));
321     __ LoadPUX(r5, MemOperand(sp, r5));
322     __ Drop(2);
323     __ b(&done);
324     __ bind(&no_arguments);
325     __ LoadRoot(r5, Heap::kempty_stringRootIndex);
326     __ Drop(1);
327     __ bind(&done);
328   }
329 
330   // 3. Make sure r5 is a string.
331   {
332     Label convert, done_convert;
333     __ JumpIfSmi(r5, &convert);
334     __ CompareObjectType(r5, r7, r7, FIRST_NONSTRING_TYPE);
335     __ blt(&done_convert);
336     __ bind(&convert);
337     {
338       FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
339       ToStringStub stub(masm->isolate());
340       __ Push(r4, r6);
341       __ mr(r3, r5);
342       __ CallStub(&stub);
343       __ mr(r5, r3);
344       __ Pop(r4, r6);
345     }
346     __ bind(&done_convert);
347   }
348 
349   // 4. Check if new target and constructor differ.
350   Label new_object;
351   __ cmp(r4, r6);
352   __ bne(&new_object);
353 
354   // 5. Allocate a JSValue wrapper for the string.
355   __ AllocateJSValue(r3, r4, r5, r7, r8, &new_object);
356   __ Ret();
357 
358   // 6. Fallback to the runtime to create new object.
359   __ bind(&new_object);
360   {
361     FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
362     __ Push(r5, r4, r6);  // first argument, constructor, new target
363     __ CallRuntime(Runtime::kNewObject);
364     __ Pop(r5);
365   }
366   __ StoreP(r5, FieldMemOperand(r3, JSValue::kValueOffset), r0);
367   __ Ret();
368 }
369 
370 
CallRuntimePassFunction(MacroAssembler * masm,Runtime::FunctionId function_id)371 static void CallRuntimePassFunction(MacroAssembler* masm,
372                                     Runtime::FunctionId function_id) {
373   // ----------- S t a t e -------------
374   //  -- r4 : target function (preserved for callee)
375   //  -- r6 : new target (preserved for callee)
376   // -----------------------------------
377 
378   FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
379   // Push a copy of the target function and the new target.
380   // Push function as parameter to the runtime call.
381   __ Push(r4, r6, r4);
382 
383   __ CallRuntime(function_id, 1);
384   // Restore target function and new target.
385   __ Pop(r4, r6);
386 }
387 
388 
GenerateTailCallToSharedCode(MacroAssembler * masm)389 static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
390   __ LoadP(ip, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
391   __ LoadP(ip, FieldMemOperand(ip, SharedFunctionInfo::kCodeOffset));
392   __ addi(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
393   __ JumpToJSEntry(ip);
394 }
395 
396 
GenerateTailCallToReturnedCode(MacroAssembler * masm)397 static void GenerateTailCallToReturnedCode(MacroAssembler* masm) {
398   __ addi(ip, r3, Operand(Code::kHeaderSize - kHeapObjectTag));
399   __ JumpToJSEntry(ip);
400 }
401 
402 
Generate_InOptimizationQueue(MacroAssembler * masm)403 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
404   // Checking whether the queued function is ready for install is optional,
405   // since we come across interrupts and stack checks elsewhere.  However,
406   // not checking may delay installing ready functions, and always checking
407   // would be quite expensive.  A good compromise is to first check against
408   // stack limit as a cue for an interrupt signal.
409   Label ok;
410   __ LoadRoot(ip, Heap::kStackLimitRootIndex);
411   __ cmpl(sp, ip);
412   __ bge(&ok);
413 
414   CallRuntimePassFunction(masm, Runtime::kTryInstallOptimizedCode);
415   GenerateTailCallToReturnedCode(masm);
416 
417   __ bind(&ok);
418   GenerateTailCallToSharedCode(masm);
419 }
420 
421 
Generate_JSConstructStubHelper(MacroAssembler * masm,bool is_api_function,bool create_implicit_receiver)422 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
423                                            bool is_api_function,
424                                            bool create_implicit_receiver) {
425   // ----------- S t a t e -------------
426   //  -- r3     : number of arguments
427   //  -- r4     : constructor function
428   //  -- r5     : allocation site or undefined
429   //  -- r6     : new target
430   //  -- lr     : return address
431   //  -- sp[...]: constructor arguments
432   // -----------------------------------
433 
434   Isolate* isolate = masm->isolate();
435 
436   // Enter a construct frame.
437   {
438     FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT);
439 
440     // Preserve the incoming parameters on the stack.
441     __ AssertUndefinedOrAllocationSite(r5, r7);
442 
443     if (!create_implicit_receiver) {
444       __ SmiTag(r7, r3, SetRC);
445       __ Push(r5, r7);
446       __ PushRoot(Heap::kTheHoleValueRootIndex);
447     } else {
448       __ SmiTag(r3);
449       __ Push(r5, r3);
450 
451       // Try to allocate the object without transitioning into C code. If any of
452       // the preconditions is not met, the code bails out to the runtime call.
453       Label rt_call, allocated;
454       if (FLAG_inline_new) {
455         // Verify that the new target is a JSFunction.
456         __ CompareObjectType(r6, r8, r7, JS_FUNCTION_TYPE);
457         __ bne(&rt_call);
458 
459         // Load the initial map and verify that it is in fact a map.
460         // r6: new target
461         __ LoadP(r5,
462                  FieldMemOperand(r6, JSFunction::kPrototypeOrInitialMapOffset));
463         __ JumpIfSmi(r5, &rt_call);
464         __ CompareObjectType(r5, r8, r7, MAP_TYPE);
465         __ bne(&rt_call);
466 
467         // Fall back to runtime if the expected base constructor and base
468         // constructor differ.
469         __ LoadP(r8, FieldMemOperand(r5, Map::kConstructorOrBackPointerOffset));
470         __ cmp(r4, r8);
471         __ bne(&rt_call);
472 
473         // Check that the constructor is not constructing a JSFunction (see
474         // comments in Runtime_NewObject in runtime.cc). In which case the
475         // initial map's instance type would be JS_FUNCTION_TYPE.
476         // r4: constructor function
477         // r5: initial map
478         // r6: new target
479         __ CompareInstanceType(r5, r8, JS_FUNCTION_TYPE);
480         __ beq(&rt_call);
481 
482         // Now allocate the JSObject on the heap.
483         // r4: constructor function
484         // r5: initial map
485         // r6: new target
486         __ lbz(r10, FieldMemOperand(r5, Map::kInstanceSizeOffset));
487 
488         __ Allocate(r10, r7, r10, r9, &rt_call, SIZE_IN_WORDS);
489 
490         // Allocated the JSObject, now initialize the fields. Map is set to
491         // initial map and properties and elements are set to empty fixed array.
492         // r4: constructor function
493         // r5: initial map
494         // r6: new target
495         // r7: JSObject (not HeapObject tagged - the actual address).
496         // r10: start of next object
497         __ LoadRoot(r9, Heap::kEmptyFixedArrayRootIndex);
498         __ StoreP(r5, MemOperand(r7, JSObject::kMapOffset));
499         __ StoreP(r9, MemOperand(r7, JSObject::kPropertiesOffset));
500         __ StoreP(r9, MemOperand(r7, JSObject::kElementsOffset));
501         __ addi(r8, r7, Operand(JSObject::kElementsOffset + kPointerSize));
502 
503         // Add the object tag to make the JSObject real, so that we can continue
504         // and jump into the continuation code at any time from now on.
505         __ addi(r7, r7, Operand(kHeapObjectTag));
506 
507         // Fill all the in-object properties with the appropriate filler.
508         // r7: JSObject (tagged)
509         // r8: First in-object property of JSObject (not tagged)
510         __ LoadRoot(r9, Heap::kUndefinedValueRootIndex);
511 
512         if (!is_api_function) {
513           Label no_inobject_slack_tracking;
514 
515           MemOperand bit_field3 = FieldMemOperand(r5, Map::kBitField3Offset);
516           // Check if slack tracking is enabled.
517           __ lwz(r3, bit_field3);
518           __ DecodeField<Map::ConstructionCounter>(r11, r3);
519           // r11: slack tracking counter
520           __ cmpi(r11, Operand(Map::kSlackTrackingCounterEnd));
521           __ blt(&no_inobject_slack_tracking);
522           // Decrease generous allocation count.
523           __ Add(r3, r3, -(1 << Map::ConstructionCounter::kShift), r0);
524           __ stw(r3, bit_field3);
525 
526           // Allocate object with a slack.
527           __ lbz(r3, FieldMemOperand(r5, Map::kUnusedPropertyFieldsOffset));
528           __ ShiftLeftImm(r3, r3, Operand(kPointerSizeLog2));
529           __ sub(r3, r10, r3);
530           // r3: offset of first field after pre-allocated fields
531           if (FLAG_debug_code) {
532             __ cmp(r8, r3);
533             __ Assert(le, kUnexpectedNumberOfPreAllocatedPropertyFields);
534           }
535           __ InitializeFieldsWithFiller(r8, r3, r9);
536 
537           // To allow truncation fill the remaining fields with one pointer
538           // filler map.
539           __ LoadRoot(r9, Heap::kOnePointerFillerMapRootIndex);
540           __ InitializeFieldsWithFiller(r8, r10, r9);
541 
542           // r11: slack tracking counter value before decreasing.
543           __ cmpi(r11, Operand(Map::kSlackTrackingCounterEnd));
544           __ bne(&allocated);
545 
546           // Push the constructor, new_target and the object to the stack,
547           // and then the initial map as an argument to the runtime call.
548           __ Push(r4, r6, r7, r5);
549           __ CallRuntime(Runtime::kFinalizeInstanceSize);
550           __ Pop(r4, r6, r7);
551 
552           // Continue with JSObject being successfully allocated
553           // r4: constructor function
554           // r6: new target
555           // r7: JSObject
556           __ b(&allocated);
557 
558           __ bind(&no_inobject_slack_tracking);
559         }
560 
561         __ InitializeFieldsWithFiller(r8, r10, r9);
562 
563         // Continue with JSObject being successfully allocated
564         // r4: constructor function
565         // r6: new target
566         // r7: JSObject
567         __ b(&allocated);
568       }
569 
570       // Allocate the new receiver object using the runtime call.
571       // r4: constructor function
572       // r6: new target
573       __ bind(&rt_call);
574 
575       // Push the constructor and new_target twice, second pair as arguments
576       // to the runtime call.
577       __ Push(r4, r6, r4, r6);
578       __ CallRuntime(Runtime::kNewObject);
579       __ mr(r7, r3);
580       __ Pop(r4, r6);
581 
582       // Receiver for constructor call allocated.
583       // r4: constructor function
584       // r6: new target
585       // r7: JSObject
586       __ bind(&allocated);
587 
588       // Retrieve smi-tagged arguments count from the stack.
589       __ LoadP(r3, MemOperand(sp));
590       __ SmiUntag(r3, SetRC);
591 
592       // Push the allocated receiver to the stack. We need two copies
593       // because we may have to return the original one and the calling
594       // conventions dictate that the called function pops the receiver.
595       __ Push(r7, r7);
596     }
597 
598     // Set up pointer to last argument.
599     __ addi(r5, fp, Operand(StandardFrameConstants::kCallerSPOffset));
600 
601     // Copy arguments and receiver to the expression stack.
602     // r3: number of arguments
603     // r4: constructor function
604     // r5: address of last argument (caller sp)
605     // r6: new target
606     // cr0: condition indicating whether r3 is zero
607     // sp[0]: receiver
608     // sp[1]: receiver
609     // sp[2]: number of arguments (smi-tagged)
610     Label loop, no_args;
611     __ beq(&no_args, cr0);
612     __ ShiftLeftImm(ip, r3, Operand(kPointerSizeLog2));
613     __ sub(sp, sp, ip);
614     __ mtctr(r3);
615     __ bind(&loop);
616     __ subi(ip, ip, Operand(kPointerSize));
617     __ LoadPX(r0, MemOperand(r5, ip));
618     __ StorePX(r0, MemOperand(sp, ip));
619     __ bdnz(&loop);
620     __ bind(&no_args);
621 
622     // Call the function.
623     // r3: number of arguments
624     // r4: constructor function
625     // r6: new target
626     if (is_api_function) {
627       __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset));
628       Handle<Code> code = masm->isolate()->builtins()->HandleApiCallConstruct();
629       __ Call(code, RelocInfo::CODE_TARGET);
630     } else {
631       ParameterCount actual(r3);
632       __ InvokeFunction(r4, r6, actual, CALL_FUNCTION,
633                         CheckDebugStepCallWrapper());
634     }
635 
636     // Store offset of return address for deoptimizer.
637     if (create_implicit_receiver && !is_api_function) {
638       masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
639     }
640 
641     // Restore context from the frame.
642     // r3: result
643     // sp[0]: receiver
644     // sp[1]: number of arguments (smi-tagged)
645     __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
646 
647     if (create_implicit_receiver) {
648       // If the result is an object (in the ECMA sense), we should get rid
649       // of the receiver and use the result; see ECMA-262 section 13.2.2-7
650       // on page 74.
651       Label use_receiver, exit;
652 
653       // If the result is a smi, it is *not* an object in the ECMA sense.
654       // r3: result
655       // sp[0]: receiver
656       // sp[1]: number of arguments (smi-tagged)
657       __ JumpIfSmi(r3, &use_receiver);
658 
659       // If the type of the result (stored in its map) is less than
660       // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
661       __ CompareObjectType(r3, r4, r6, FIRST_JS_RECEIVER_TYPE);
662       __ bge(&exit);
663 
664       // Throw away the result of the constructor invocation and use the
665       // on-stack receiver as the result.
666       __ bind(&use_receiver);
667       __ LoadP(r3, MemOperand(sp));
668 
669       // Remove receiver from the stack, remove caller arguments, and
670       // return.
671       __ bind(&exit);
672       // r3: result
673       // sp[0]: receiver (newly allocated object)
674       // sp[1]: number of arguments (smi-tagged)
675       __ LoadP(r4, MemOperand(sp, 1 * kPointerSize));
676     } else {
677       __ LoadP(r4, MemOperand(sp));
678     }
679 
680     // Leave construct frame.
681   }
682 
683   __ SmiToPtrArrayOffset(r4, r4);
684   __ add(sp, sp, r4);
685   __ addi(sp, sp, Operand(kPointerSize));
686   if (create_implicit_receiver) {
687     __ IncrementCounter(isolate->counters()->constructed_objects(), 1, r4, r5);
688   }
689   __ blr();
690 }
691 
692 
Generate_JSConstructStubGeneric(MacroAssembler * masm)693 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
694   Generate_JSConstructStubHelper(masm, false, true);
695 }
696 
697 
Generate_JSConstructStubApi(MacroAssembler * masm)698 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
699   Generate_JSConstructStubHelper(masm, true, true);
700 }
701 
702 
Generate_JSBuiltinsConstructStub(MacroAssembler * masm)703 void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
704   Generate_JSConstructStubHelper(masm, false, false);
705 }
706 
707 
Generate_ConstructedNonConstructable(MacroAssembler * masm)708 void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
709   FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
710   __ push(r4);
711   __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
712 }
713 
714 
715 enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt };
716 
717 
718 // Clobbers r5; preserves all other registers.
Generate_CheckStackOverflow(MacroAssembler * masm,Register argc,IsTagged argc_is_tagged)719 static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc,
720                                         IsTagged argc_is_tagged) {
721   // Check the stack for overflow. We are not trying to catch
722   // interruptions (e.g. debug break and preemption) here, so the "real stack
723   // limit" is checked.
724   Label okay;
725   __ LoadRoot(r5, Heap::kRealStackLimitRootIndex);
726   // Make r5 the space we have left. The stack might already be overflowed
727   // here which will cause r5 to become negative.
728   __ sub(r5, sp, r5);
729   // Check if the arguments will overflow the stack.
730   if (argc_is_tagged == kArgcIsSmiTagged) {
731     __ SmiToPtrArrayOffset(r0, argc);
732   } else {
733     DCHECK(argc_is_tagged == kArgcIsUntaggedInt);
734     __ ShiftLeftImm(r0, argc, Operand(kPointerSizeLog2));
735   }
736   __ cmp(r5, r0);
737   __ bgt(&okay);  // Signed comparison.
738 
739   // Out of stack space.
740   __ CallRuntime(Runtime::kThrowStackOverflow);
741 
742   __ bind(&okay);
743 }
744 
745 
Generate_JSEntryTrampolineHelper(MacroAssembler * masm,bool is_construct)746 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
747                                              bool is_construct) {
748   // Called from Generate_JS_Entry
749   // r3: new.target
750   // r4: function
751   // r5: receiver
752   // r6: argc
753   // r7: argv
754   // r0,r8-r9, cp may be clobbered
755   ProfileEntryHookStub::MaybeCallEntryHook(masm);
756 
757   // Clear the context before we push it when entering the internal frame.
758   __ li(cp, Operand::Zero());
759 
760   // Enter an internal frame.
761   {
762     FrameScope scope(masm, StackFrame::INTERNAL);
763 
764     // Setup the context (we need to use the caller context from the isolate).
765     ExternalReference context_address(Isolate::kContextAddress,
766                                       masm->isolate());
767     __ mov(cp, Operand(context_address));
768     __ LoadP(cp, MemOperand(cp));
769 
770     __ InitializeRootRegister();
771 
772     // Push the function and the receiver onto the stack.
773     __ Push(r4, r5);
774 
775     // Check if we have enough stack space to push all arguments.
776     // Clobbers r5.
777     Generate_CheckStackOverflow(masm, r6, kArgcIsUntaggedInt);
778 
779     // Copy arguments to the stack in a loop.
780     // r4: function
781     // r6: argc
782     // r7: argv, i.e. points to first arg
783     Label loop, entry;
784     __ ShiftLeftImm(r0, r6, Operand(kPointerSizeLog2));
785     __ add(r5, r7, r0);
786     // r5 points past last arg.
787     __ b(&entry);
788     __ bind(&loop);
789     __ LoadP(r8, MemOperand(r7));  // read next parameter
790     __ addi(r7, r7, Operand(kPointerSize));
791     __ LoadP(r0, MemOperand(r8));  // dereference handle
792     __ push(r0);                   // push parameter
793     __ bind(&entry);
794     __ cmp(r7, r5);
795     __ bne(&loop);
796 
797     // Setup new.target and argc.
798     __ mr(r7, r3);
799     __ mr(r3, r6);
800     __ mr(r6, r7);
801 
802     // Initialize all JavaScript callee-saved registers, since they will be seen
803     // by the garbage collector as part of handlers.
804     __ LoadRoot(r7, Heap::kUndefinedValueRootIndex);
805     __ mr(r14, r7);
806     __ mr(r15, r7);
807     __ mr(r16, r7);
808     __ mr(r17, r7);
809 
810     // Invoke the code.
811     Handle<Code> builtin = is_construct
812                                ? masm->isolate()->builtins()->Construct()
813                                : masm->isolate()->builtins()->Call();
814     __ Call(builtin, RelocInfo::CODE_TARGET);
815 
816     // Exit the JS frame and remove the parameters (except function), and
817     // return.
818   }
819   __ blr();
820 
821   // r3: result
822 }
823 
824 
Generate_JSEntryTrampoline(MacroAssembler * masm)825 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
826   Generate_JSEntryTrampolineHelper(masm, false);
827 }
828 
829 
Generate_JSConstructEntryTrampoline(MacroAssembler * masm)830 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
831   Generate_JSEntryTrampolineHelper(masm, true);
832 }
833 
834 
835 // Generate code for entering a JS function with the interpreter.
836 // On entry to the function the receiver and arguments have been pushed on the
837 // stack left to right.  The actual argument count matches the formal parameter
838 // count expected by the function.
839 //
840 // The live registers are:
841 //   o r4: the JS function object being called.
842 //   o r6: the new target
843 //   o cp: our context
844 //   o pp: the caller's constant pool pointer (if enabled)
845 //   o fp: the caller's frame pointer
846 //   o sp: stack pointer
847 //   o lr: return address
848 //
849 // The function builds a JS frame.  Please see JavaScriptFrameConstants in
850 // frames-ppc.h for its layout.
851 // TODO(rmcilroy): We will need to include the current bytecode pointer in the
852 // frame.
Generate_InterpreterEntryTrampoline(MacroAssembler * masm)853 void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
854   // Open a frame scope to indicate that there is a frame on the stack.  The
855   // MANUAL indicates that the scope shouldn't actually generate code to set up
856   // the frame (that is done below).
857   FrameScope frame_scope(masm, StackFrame::MANUAL);
858   __ PushFixedFrame(r4);
859   __ addi(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
860   __ push(r6);
861 
862   // Push zero for bytecode array offset.
863   __ li(r3, Operand::Zero());
864   __ push(r3);
865 
866   // Get the bytecode array from the function object and load the pointer to the
867   // first entry into kInterpreterBytecodeRegister.
868   __ LoadP(r3, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
869   __ LoadP(kInterpreterBytecodeArrayRegister,
870            FieldMemOperand(r3, SharedFunctionInfo::kFunctionDataOffset));
871 
872   if (FLAG_debug_code) {
873     // Check function data field is actually a BytecodeArray object.
874     __ TestIfSmi(kInterpreterBytecodeArrayRegister, r0);
875     __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
876     __ CompareObjectType(kInterpreterBytecodeArrayRegister, r3, no_reg,
877                          BYTECODE_ARRAY_TYPE);
878     __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
879   }
880 
881   // Allocate the local and temporary register file on the stack.
882   {
883     // Load frame size (word) from the BytecodeArray object.
884     __ lwz(r5, FieldMemOperand(kInterpreterBytecodeArrayRegister,
885                                BytecodeArray::kFrameSizeOffset));
886 
887     // Do a stack check to ensure we don't go over the limit.
888     Label ok;
889     __ sub(r6, sp, r5);
890     __ LoadRoot(r0, Heap::kRealStackLimitRootIndex);
891     __ cmpl(r6, r0);
892     __ bge(&ok);
893     __ CallRuntime(Runtime::kThrowStackOverflow);
894     __ bind(&ok);
895 
896     // If ok, push undefined as the initial value for all register file entries.
897     // TODO(rmcilroy): Consider doing more than one push per loop iteration.
898     Label loop, no_args;
899     __ LoadRoot(r6, Heap::kUndefinedValueRootIndex);
900     __ ShiftRightImm(r5, r5, Operand(kPointerSizeLog2), SetRC);
901     __ beq(&no_args, cr0);
902     __ mtctr(r5);
903     __ bind(&loop);
904     __ push(r6);
905     __ bdnz(&loop);
906     __ bind(&no_args);
907   }
908 
909   // TODO(rmcilroy): List of things not currently dealt with here but done in
910   // fullcodegen's prologue:
911   //  - Support profiler (specifically profiling_counter).
912   //  - Call ProfileEntryHookStub when isolate has a function_entry_hook.
913   //  - Allow simulator stop operations if FLAG_stop_at is set.
914   //  - Code aging of the BytecodeArray object.
915 
916   // Perform stack guard check.
917   {
918     Label ok;
919     __ LoadRoot(r0, Heap::kStackLimitRootIndex);
920     __ cmp(sp, r0);
921     __ bge(&ok);
922     __ push(kInterpreterBytecodeArrayRegister);
923     __ CallRuntime(Runtime::kStackGuard);
924     __ pop(kInterpreterBytecodeArrayRegister);
925     __ bind(&ok);
926   }
927 
928   // Load accumulator, register file, bytecode offset, dispatch table into
929   // registers.
930   __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
931   __ addi(kInterpreterRegisterFileRegister, fp,
932           Operand(InterpreterFrameConstants::kRegisterFilePointerFromFp));
933   __ mov(kInterpreterBytecodeOffsetRegister,
934          Operand(BytecodeArray::kHeaderSize - kHeapObjectTag));
935   __ LoadRoot(kInterpreterDispatchTableRegister,
936               Heap::kInterpreterTableRootIndex);
937   __ addi(kInterpreterDispatchTableRegister, kInterpreterDispatchTableRegister,
938           Operand(FixedArray::kHeaderSize - kHeapObjectTag));
939 
940   // Dispatch to the first bytecode handler for the function.
941   __ lbzx(r4, MemOperand(kInterpreterBytecodeArrayRegister,
942                          kInterpreterBytecodeOffsetRegister));
943   __ ShiftLeftImm(ip, r4, Operand(kPointerSizeLog2));
944   __ LoadPX(ip, MemOperand(kInterpreterDispatchTableRegister, ip));
945   // TODO(rmcilroy): Make dispatch table point to code entrys to avoid untagging
946   // and header removal.
947   __ addi(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
948   __ Call(ip);
949   __ bkpt(0);  // Does not return here.
950 }
951 
952 
Generate_InterpreterExitTrampoline(MacroAssembler * masm)953 void Builtins::Generate_InterpreterExitTrampoline(MacroAssembler* masm) {
954   // TODO(rmcilroy): List of things not currently dealt with here but done in
955   // fullcodegen's EmitReturnSequence.
956   //  - Supporting FLAG_trace for Runtime::TraceExit.
957   //  - Support profiler (specifically decrementing profiling_counter
958   //    appropriately and calling out to HandleInterrupts if necessary).
959 
960   // The return value is in accumulator, which is already in r3.
961 
962   // Leave the frame (also dropping the register file).
963   __ LeaveFrame(StackFrame::JAVA_SCRIPT);
964 
965   // Drop receiver + arguments and return.
966   __ lwz(r0, FieldMemOperand(kInterpreterBytecodeArrayRegister,
967                              BytecodeArray::kParameterSizeOffset));
968   __ add(sp, sp, r0);
969   __ blr();
970 }
971 
972 
Generate_InterpreterPushArgs(MacroAssembler * masm,Register index,Register count,Register scratch)973 static void Generate_InterpreterPushArgs(MacroAssembler* masm, Register index,
974                                          Register count, Register scratch) {
975   Label loop;
976   __ addi(index, index, Operand(kPointerSize));  // Bias up for LoadPU
977   __ mtctr(count);
978   __ bind(&loop);
979   __ LoadPU(scratch, MemOperand(index, -kPointerSize));
980   __ push(scratch);
981   __ bdnz(&loop);
982 }
983 
984 
985 // static
Generate_InterpreterPushArgsAndCall(MacroAssembler * masm)986 void Builtins::Generate_InterpreterPushArgsAndCall(MacroAssembler* masm) {
987   // ----------- S t a t e -------------
988   //  -- r3 : the number of arguments (not including the receiver)
989   //  -- r5 : the address of the first argument to be pushed. Subsequent
990   //          arguments should be consecutive above this, in the same order as
991   //          they are to be pushed onto the stack.
992   //  -- r4 : the target to call (can be any Object).
993   // -----------------------------------
994 
995   // Calculate number of arguments (add one for receiver).
996   __ addi(r6, r3, Operand(1));
997 
998   // Push the arguments.
999   Generate_InterpreterPushArgs(masm, r5, r6, r7);
1000 
1001   // Call the target.
1002   __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1003 }
1004 
1005 
1006 // static
Generate_InterpreterPushArgsAndConstruct(MacroAssembler * masm)1007 void Builtins::Generate_InterpreterPushArgsAndConstruct(MacroAssembler* masm) {
1008   // ----------- S t a t e -------------
1009   // -- r3 : argument count (not including receiver)
1010   // -- r6 : new target
1011   // -- r4 : constructor to call
1012   // -- r5 : address of the first argument
1013   // -----------------------------------
1014 
1015   // Push a slot for the receiver to be constructed.
1016   __ li(r0, Operand::Zero());
1017   __ push(r0);
1018 
1019   // Push the arguments (skip if none).
1020   Label skip;
1021   __ cmpi(r3, Operand::Zero());
1022   __ beq(&skip);
1023   Generate_InterpreterPushArgs(masm, r5, r3, r7);
1024   __ bind(&skip);
1025 
1026   // Call the constructor with r3, r4, and r6 unmodified.
1027   __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
1028 }
1029 
1030 
Generate_InterpreterNotifyDeoptimizedHelper(MacroAssembler * masm,Deoptimizer::BailoutType type)1031 static void Generate_InterpreterNotifyDeoptimizedHelper(
1032     MacroAssembler* masm, Deoptimizer::BailoutType type) {
1033   // Enter an internal frame.
1034   {
1035     FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1036     // Save accumulator register and pass the deoptimization type to
1037     // the runtime system.
1038     __ LoadSmiLiteral(r4, Smi::FromInt(static_cast<int>(type)));
1039     __ Push(kInterpreterAccumulatorRegister, r4);
1040     __ CallRuntime(Runtime::kNotifyDeoptimized);
1041     __ pop(kInterpreterAccumulatorRegister);  // Restore accumulator register.
1042     // Tear down internal frame.
1043   }
1044 
1045   // Drop state (we don't use these for interpreter deopts).
1046   __ Drop(1);
1047 
1048   // Initialize register file register and dispatch table register.
1049   __ addi(kInterpreterRegisterFileRegister, fp,
1050           Operand(InterpreterFrameConstants::kRegisterFilePointerFromFp));
1051   __ LoadRoot(kInterpreterDispatchTableRegister,
1052               Heap::kInterpreterTableRootIndex);
1053   __ addi(kInterpreterDispatchTableRegister, kInterpreterDispatchTableRegister,
1054           Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1055 
1056   // Get the context from the frame.
1057   // TODO(rmcilroy): Update interpreter frame to expect current context at the
1058   // context slot instead of the function context.
1059   __ LoadP(kContextRegister,
1060            MemOperand(kInterpreterRegisterFileRegister,
1061                       InterpreterFrameConstants::kContextFromRegisterPointer));
1062 
1063   // Get the bytecode array pointer from the frame.
1064   __ LoadP(r4,
1065            MemOperand(kInterpreterRegisterFileRegister,
1066                       InterpreterFrameConstants::kFunctionFromRegisterPointer));
1067   __ LoadP(r4, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
1068   __ LoadP(kInterpreterBytecodeArrayRegister,
1069            FieldMemOperand(r4, SharedFunctionInfo::kFunctionDataOffset));
1070 
1071   if (FLAG_debug_code) {
1072     // Check function data field is actually a BytecodeArray object.
1073     __ TestIfSmi(kInterpreterBytecodeArrayRegister, r0);
1074     __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1075     __ CompareObjectType(kInterpreterBytecodeArrayRegister, r4, no_reg,
1076                          BYTECODE_ARRAY_TYPE);
1077     __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1078   }
1079 
1080   // Get the target bytecode offset from the frame.
1081   __ LoadP(kInterpreterBytecodeOffsetRegister,
1082            MemOperand(
1083                kInterpreterRegisterFileRegister,
1084                InterpreterFrameConstants::kBytecodeOffsetFromRegisterPointer));
1085   __ SmiUntag(kInterpreterBytecodeOffsetRegister);
1086 
1087   // Dispatch to the target bytecode.
1088   __ lbzx(r4, MemOperand(kInterpreterBytecodeArrayRegister,
1089                          kInterpreterBytecodeOffsetRegister));
1090   __ ShiftLeftImm(ip, r4, Operand(kPointerSizeLog2));
1091   __ LoadPX(ip, MemOperand(kInterpreterDispatchTableRegister, ip));
1092   __ addi(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
1093   __ Jump(ip);
1094 }
1095 
1096 
Generate_InterpreterNotifyDeoptimized(MacroAssembler * masm)1097 void Builtins::Generate_InterpreterNotifyDeoptimized(MacroAssembler* masm) {
1098   Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1099 }
1100 
1101 
Generate_InterpreterNotifySoftDeoptimized(MacroAssembler * masm)1102 void Builtins::Generate_InterpreterNotifySoftDeoptimized(MacroAssembler* masm) {
1103   Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1104 }
1105 
1106 
Generate_InterpreterNotifyLazyDeoptimized(MacroAssembler * masm)1107 void Builtins::Generate_InterpreterNotifyLazyDeoptimized(MacroAssembler* masm) {
1108   Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1109 }
1110 
1111 
Generate_CompileLazy(MacroAssembler * masm)1112 void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
1113   CallRuntimePassFunction(masm, Runtime::kCompileLazy);
1114   GenerateTailCallToReturnedCode(masm);
1115 }
1116 
1117 
Generate_CompileOptimized(MacroAssembler * masm)1118 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
1119   CallRuntimePassFunction(masm, Runtime::kCompileOptimized_NotConcurrent);
1120   GenerateTailCallToReturnedCode(masm);
1121 }
1122 
1123 
Generate_CompileOptimizedConcurrent(MacroAssembler * masm)1124 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
1125   CallRuntimePassFunction(masm, Runtime::kCompileOptimized_Concurrent);
1126   GenerateTailCallToReturnedCode(masm);
1127 }
1128 
1129 
GenerateMakeCodeYoungAgainCommon(MacroAssembler * masm)1130 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
1131   // For now, we are relying on the fact that make_code_young doesn't do any
1132   // garbage collection which allows us to save/restore the registers without
1133   // worrying about which of them contain pointers. We also don't build an
1134   // internal frame to make the code faster, since we shouldn't have to do stack
1135   // crawls in MakeCodeYoung. This seems a bit fragile.
1136 
1137   // Point r3 at the start of the PlatformCodeAge sequence.
1138   __ mr(r3, ip);
1139 
1140   // The following registers must be saved and restored when calling through to
1141   // the runtime:
1142   //   r3 - contains return address (beginning of patch sequence)
1143   //   r4 - isolate
1144   //   r6 - new target
1145   //   lr - return address
1146   FrameScope scope(masm, StackFrame::MANUAL);
1147   __ mflr(r0);
1148   __ MultiPush(r0.bit() | r3.bit() | r4.bit() | r6.bit() | fp.bit());
1149   __ PrepareCallCFunction(2, 0, r5);
1150   __ mov(r4, Operand(ExternalReference::isolate_address(masm->isolate())));
1151   __ CallCFunction(
1152       ExternalReference::get_make_code_young_function(masm->isolate()), 2);
1153   __ MultiPop(r0.bit() | r3.bit() | r4.bit() | r6.bit() | fp.bit());
1154   __ mtlr(r0);
1155   __ mr(ip, r3);
1156   __ Jump(ip);
1157 }
1158 
1159 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C)                  \
1160   void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
1161       MacroAssembler* masm) {                                 \
1162     GenerateMakeCodeYoungAgainCommon(masm);                   \
1163   }                                                           \
1164   void Builtins::Generate_Make##C##CodeYoungAgainOddMarking(  \
1165       MacroAssembler* masm) {                                 \
1166     GenerateMakeCodeYoungAgainCommon(masm);                   \
1167   }
CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)1168 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
1169 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
1170 
1171 
1172 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
1173   // For now, we are relying on the fact that make_code_young doesn't do any
1174   // garbage collection which allows us to save/restore the registers without
1175   // worrying about which of them contain pointers. We also don't build an
1176   // internal frame to make the code faster, since we shouldn't have to do stack
1177   // crawls in MakeCodeYoung. This seems a bit fragile.
1178 
1179   // Point r3 at the start of the PlatformCodeAge sequence.
1180   __ mr(r3, ip);
1181 
1182   // The following registers must be saved and restored when calling through to
1183   // the runtime:
1184   //   r3 - contains return address (beginning of patch sequence)
1185   //   r4 - isolate
1186   //   r6 - new target
1187   //   lr - return address
1188   FrameScope scope(masm, StackFrame::MANUAL);
1189   __ mflr(r0);
1190   __ MultiPush(r0.bit() | r3.bit() | r4.bit() | r6.bit() | fp.bit());
1191   __ PrepareCallCFunction(2, 0, r5);
1192   __ mov(r4, Operand(ExternalReference::isolate_address(masm->isolate())));
1193   __ CallCFunction(
1194       ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
1195       2);
1196   __ MultiPop(r0.bit() | r3.bit() | r4.bit() | r6.bit() | fp.bit());
1197   __ mtlr(r0);
1198   __ mr(ip, r3);
1199 
1200   // Perform prologue operations usually performed by the young code stub.
1201   __ PushFixedFrame(r4);
1202   __ addi(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
1203 
1204   // Jump to point after the code-age stub.
1205   __ addi(r3, ip, Operand(kNoCodeAgeSequenceLength));
1206   __ Jump(r3);
1207 }
1208 
1209 
Generate_MarkCodeAsExecutedTwice(MacroAssembler * masm)1210 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
1211   GenerateMakeCodeYoungAgainCommon(masm);
1212 }
1213 
1214 
Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler * masm)1215 void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) {
1216   Generate_MarkCodeAsExecutedOnce(masm);
1217 }
1218 
1219 
Generate_NotifyStubFailureHelper(MacroAssembler * masm,SaveFPRegsMode save_doubles)1220 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
1221                                              SaveFPRegsMode save_doubles) {
1222   {
1223     FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1224 
1225     // Preserve registers across notification, this is important for compiled
1226     // stubs that tail call the runtime on deopts passing their parameters in
1227     // registers.
1228     __ MultiPush(kJSCallerSaved | kCalleeSaved);
1229     // Pass the function and deoptimization type to the runtime system.
1230     __ CallRuntime(Runtime::kNotifyStubFailure, save_doubles);
1231     __ MultiPop(kJSCallerSaved | kCalleeSaved);
1232   }
1233 
1234   __ addi(sp, sp, Operand(kPointerSize));  // Ignore state
1235   __ blr();                                // Jump to miss handler
1236 }
1237 
1238 
Generate_NotifyStubFailure(MacroAssembler * masm)1239 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
1240   Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
1241 }
1242 
1243 
Generate_NotifyStubFailureSaveDoubles(MacroAssembler * masm)1244 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
1245   Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
1246 }
1247 
1248 
Generate_NotifyDeoptimizedHelper(MacroAssembler * masm,Deoptimizer::BailoutType type)1249 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
1250                                              Deoptimizer::BailoutType type) {
1251   {
1252     FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1253     // Pass the function and deoptimization type to the runtime system.
1254     __ LoadSmiLiteral(r3, Smi::FromInt(static_cast<int>(type)));
1255     __ push(r3);
1256     __ CallRuntime(Runtime::kNotifyDeoptimized);
1257   }
1258 
1259   // Get the full codegen state from the stack and untag it -> r9.
1260   __ LoadP(r9, MemOperand(sp, 0 * kPointerSize));
1261   __ SmiUntag(r9);
1262   // Switch on the state.
1263   Label with_tos_register, unknown_state;
1264   __ cmpi(r9, Operand(FullCodeGenerator::NO_REGISTERS));
1265   __ bne(&with_tos_register);
1266   __ addi(sp, sp, Operand(1 * kPointerSize));  // Remove state.
1267   __ Ret();
1268 
1269   __ bind(&with_tos_register);
1270   __ LoadP(r3, MemOperand(sp, 1 * kPointerSize));
1271   __ cmpi(r9, Operand(FullCodeGenerator::TOS_REG));
1272   __ bne(&unknown_state);
1273   __ addi(sp, sp, Operand(2 * kPointerSize));  // Remove state.
1274   __ Ret();
1275 
1276   __ bind(&unknown_state);
1277   __ stop("no cases left");
1278 }
1279 
1280 
Generate_NotifyDeoptimized(MacroAssembler * masm)1281 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1282   Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1283 }
1284 
1285 
Generate_NotifySoftDeoptimized(MacroAssembler * masm)1286 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
1287   Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1288 }
1289 
1290 
Generate_NotifyLazyDeoptimized(MacroAssembler * masm)1291 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
1292   Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1293 }
1294 
1295 
1296 // Clobbers registers {r7, r8, r9, r10}.
CompatibleReceiverCheck(MacroAssembler * masm,Register receiver,Register function_template_info,Label * receiver_check_failed)1297 void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver,
1298                              Register function_template_info,
1299                              Label* receiver_check_failed) {
1300   Register signature = r7;
1301   Register map = r8;
1302   Register constructor = r9;
1303   Register scratch = r10;
1304 
1305   // If there is no signature, return the holder.
1306   __ LoadP(signature, FieldMemOperand(function_template_info,
1307                                       FunctionTemplateInfo::kSignatureOffset));
1308   Label receiver_check_passed;
1309   __ JumpIfRoot(signature, Heap::kUndefinedValueRootIndex,
1310                 &receiver_check_passed);
1311 
1312   // Walk the prototype chain.
1313   __ LoadP(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
1314   Label prototype_loop_start;
1315   __ bind(&prototype_loop_start);
1316 
1317   // Get the constructor, if any.
1318   __ GetMapConstructor(constructor, map, scratch, scratch);
1319   __ cmpi(scratch, Operand(JS_FUNCTION_TYPE));
1320   Label next_prototype;
1321   __ bne(&next_prototype);
1322   Register type = constructor;
1323   __ LoadP(type,
1324            FieldMemOperand(constructor, JSFunction::kSharedFunctionInfoOffset));
1325   __ LoadP(type,
1326            FieldMemOperand(type, SharedFunctionInfo::kFunctionDataOffset));
1327 
1328   // Loop through the chain of inheriting function templates.
1329   Label function_template_loop;
1330   __ bind(&function_template_loop);
1331 
1332   // If the signatures match, we have a compatible receiver.
1333   __ cmp(signature, type);
1334   __ beq(&receiver_check_passed);
1335 
1336   // If the current type is not a FunctionTemplateInfo, load the next prototype
1337   // in the chain.
1338   __ JumpIfSmi(type, &next_prototype);
1339   __ CompareObjectType(type, scratch, scratch, FUNCTION_TEMPLATE_INFO_TYPE);
1340   __ bne(&next_prototype);
1341 
1342   // Otherwise load the parent function template and iterate.
1343   __ LoadP(type,
1344            FieldMemOperand(type, FunctionTemplateInfo::kParentTemplateOffset));
1345   __ b(&function_template_loop);
1346 
1347   // Load the next prototype.
1348   __ bind(&next_prototype);
1349   __ LoadP(receiver, FieldMemOperand(map, Map::kPrototypeOffset));
1350   // End if the prototype is null or not hidden.
1351   __ JumpIfRoot(receiver, Heap::kNullValueRootIndex, receiver_check_failed);
1352   __ LoadP(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
1353   __ lwz(scratch, FieldMemOperand(map, Map::kBitField3Offset));
1354   __ DecodeField<Map::IsHiddenPrototype>(scratch, SetRC);
1355   __ beq(receiver_check_failed, cr0);
1356   // Iterate.
1357   __ b(&prototype_loop_start);
1358 
1359   __ bind(&receiver_check_passed);
1360 }
1361 
1362 
Generate_HandleFastApiCall(MacroAssembler * masm)1363 void Builtins::Generate_HandleFastApiCall(MacroAssembler* masm) {
1364   // ----------- S t a t e -------------
1365   //  -- r3                 : number of arguments excluding receiver
1366   //  -- r4                 : callee
1367   //  -- lr                 : return address
1368   //  -- sp[0]              : last argument
1369   //  -- ...
1370   //  -- sp[4 * (argc - 1)] : first argument
1371   //  -- sp[4 * argc]       : receiver
1372   // -----------------------------------
1373 
1374 
1375   // Load the FunctionTemplateInfo.
1376   __ LoadP(r6, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
1377   __ LoadP(r6, FieldMemOperand(r6, SharedFunctionInfo::kFunctionDataOffset));
1378 
1379   // Do the compatible receiver check.
1380   Label receiver_check_failed;
1381   __ ShiftLeftImm(r11, r3, Operand(kPointerSizeLog2));
1382   __ LoadPX(r5, MemOperand(sp, r11));
1383   CompatibleReceiverCheck(masm, r5, r6, &receiver_check_failed);
1384 
1385   // Get the callback offset from the FunctionTemplateInfo, and jump to the
1386   // beginning of the code.
1387   __ LoadP(r7, FieldMemOperand(r6, FunctionTemplateInfo::kCallCodeOffset));
1388   __ LoadP(r7, FieldMemOperand(r7, CallHandlerInfo::kFastHandlerOffset));
1389   __ addi(ip, r7, Operand(Code::kHeaderSize - kHeapObjectTag));
1390   __ JumpToJSEntry(ip);
1391 
1392   // Compatible receiver check failed: throw an Illegal Invocation exception.
1393   __ bind(&receiver_check_failed);
1394   // Drop the arguments (including the receiver);
1395   __ addi(r11, r11, Operand(kPointerSize));
1396   __ add(sp, sp, r11);
1397   __ TailCallRuntime(Runtime::kThrowIllegalInvocation);
1398 }
1399 
1400 
Generate_OnStackReplacement(MacroAssembler * masm)1401 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1402   // Lookup the function in the JavaScript frame.
1403   __ LoadP(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1404   {
1405     FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1406     // Pass function as argument.
1407     __ push(r3);
1408     __ CallRuntime(Runtime::kCompileForOnStackReplacement);
1409   }
1410 
1411   // If the code object is null, just return to the unoptimized code.
1412   Label skip;
1413   __ CmpSmiLiteral(r3, Smi::FromInt(0), r0);
1414   __ bne(&skip);
1415   __ Ret();
1416 
1417   __ bind(&skip);
1418 
1419   // Load deoptimization data from the code object.
1420   // <deopt_data> = <code>[#deoptimization_data_offset]
1421   __ LoadP(r4, FieldMemOperand(r3, Code::kDeoptimizationDataOffset));
1422 
1423   {
1424     ConstantPoolUnavailableScope constant_pool_unavailable(masm);
1425     __ addi(r3, r3, Operand(Code::kHeaderSize - kHeapObjectTag));  // Code start
1426 
1427     if (FLAG_enable_embedded_constant_pool) {
1428       __ LoadConstantPoolPointerRegisterFromCodeTargetAddress(r3);
1429     }
1430 
1431     // Load the OSR entrypoint offset from the deoptimization data.
1432     // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
1433     __ LoadP(r4, FieldMemOperand(
1434                      r4, FixedArray::OffsetOfElementAt(
1435                              DeoptimizationInputData::kOsrPcOffsetIndex)));
1436     __ SmiUntag(r4);
1437 
1438     // Compute the target address = code start + osr_offset
1439     __ add(r0, r3, r4);
1440 
1441     // And "return" to the OSR entry point of the function.
1442     __ mtlr(r0);
1443     __ blr();
1444   }
1445 }
1446 
1447 
Generate_OsrAfterStackCheck(MacroAssembler * masm)1448 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) {
1449   // We check the stack limit as indicator that recompilation might be done.
1450   Label ok;
1451   __ LoadRoot(ip, Heap::kStackLimitRootIndex);
1452   __ cmpl(sp, ip);
1453   __ bge(&ok);
1454   {
1455     FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1456     __ CallRuntime(Runtime::kStackGuard);
1457   }
1458   __ Jump(masm->isolate()->builtins()->OnStackReplacement(),
1459           RelocInfo::CODE_TARGET);
1460 
1461   __ bind(&ok);
1462   __ Ret();
1463 }
1464 
1465 
1466 // static
Generate_DatePrototype_GetField(MacroAssembler * masm,int field_index)1467 void Builtins::Generate_DatePrototype_GetField(MacroAssembler* masm,
1468                                                int field_index) {
1469   // ----------- S t a t e -------------
1470   //  -- lr    : return address
1471   //  -- sp[0] : receiver
1472   // -----------------------------------
1473 
1474   // 1. Pop receiver into r3 and check that it's actually a JSDate object.
1475   Label receiver_not_date;
1476   {
1477     __ Pop(r3);
1478     __ JumpIfSmi(r3, &receiver_not_date);
1479     __ CompareObjectType(r3, r4, r5, JS_DATE_TYPE);
1480     __ bne(&receiver_not_date);
1481   }
1482 
1483   // 2. Load the specified date field, falling back to the runtime as necessary.
1484   if (field_index == JSDate::kDateValue) {
1485     __ LoadP(r3, FieldMemOperand(r3, JSDate::kValueOffset));
1486   } else {
1487     if (field_index < JSDate::kFirstUncachedField) {
1488       Label stamp_mismatch;
1489       __ mov(r4, Operand(ExternalReference::date_cache_stamp(masm->isolate())));
1490       __ LoadP(r4, MemOperand(r4));
1491       __ LoadP(ip, FieldMemOperand(r3, JSDate::kCacheStampOffset));
1492       __ cmp(r4, ip);
1493       __ bne(&stamp_mismatch);
1494       __ LoadP(r3, FieldMemOperand(
1495                        r3, JSDate::kValueOffset + field_index * kPointerSize));
1496       __ Ret();
1497       __ bind(&stamp_mismatch);
1498     }
1499     FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1500     __ PrepareCallCFunction(2, r4);
1501     __ LoadSmiLiteral(r4, Smi::FromInt(field_index));
1502     __ CallCFunction(
1503         ExternalReference::get_date_field_function(masm->isolate()), 2);
1504   }
1505   __ Ret();
1506 
1507   // 3. Raise a TypeError if the receiver is not a date.
1508   __ bind(&receiver_not_date);
1509   __ TailCallRuntime(Runtime::kThrowNotDateError);
1510 }
1511 
1512 
1513 // static
Generate_FunctionPrototypeApply(MacroAssembler * masm)1514 void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
1515   // ----------- S t a t e -------------
1516   //  -- r3    : argc
1517   //  -- sp[0] : argArray
1518   //  -- sp[4] : thisArg
1519   //  -- sp[8] : receiver
1520   // -----------------------------------
1521 
1522   // 1. Load receiver into r4, argArray into r3 (if present), remove all
1523   // arguments from the stack (including the receiver), and push thisArg (if
1524   // present) instead.
1525   {
1526     Label skip;
1527     Register arg_size = r5;
1528     Register new_sp = r6;
1529     Register scratch = r7;
1530     __ ShiftLeftImm(arg_size, r3, Operand(kPointerSizeLog2));
1531     __ add(new_sp, sp, arg_size);
1532     __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
1533     __ mr(scratch, r3);
1534     __ LoadP(r4, MemOperand(new_sp, 0));  // receiver
1535     __ cmpi(arg_size, Operand(kPointerSize));
1536     __ blt(&skip);
1537     __ LoadP(scratch, MemOperand(new_sp, 1 * -kPointerSize));  // thisArg
1538     __ beq(&skip);
1539     __ LoadP(r3, MemOperand(new_sp, 2 * -kPointerSize));  // argArray
1540     __ bind(&skip);
1541     __ mr(sp, new_sp);
1542     __ StoreP(scratch, MemOperand(sp, 0));
1543   }
1544 
1545   // ----------- S t a t e -------------
1546   //  -- r3    : argArray
1547   //  -- r4    : receiver
1548   //  -- sp[0] : thisArg
1549   // -----------------------------------
1550 
1551   // 2. Make sure the receiver is actually callable.
1552   Label receiver_not_callable;
1553   __ JumpIfSmi(r4, &receiver_not_callable);
1554   __ LoadP(r7, FieldMemOperand(r4, HeapObject::kMapOffset));
1555   __ lbz(r7, FieldMemOperand(r7, Map::kBitFieldOffset));
1556   __ TestBit(r7, Map::kIsCallable, r0);
1557   __ beq(&receiver_not_callable, cr0);
1558 
1559   // 3. Tail call with no arguments if argArray is null or undefined.
1560   Label no_arguments;
1561   __ JumpIfRoot(r3, Heap::kNullValueRootIndex, &no_arguments);
1562   __ JumpIfRoot(r3, Heap::kUndefinedValueRootIndex, &no_arguments);
1563 
1564   // 4a. Apply the receiver to the given argArray (passing undefined for
1565   // new.target).
1566   __ LoadRoot(r6, Heap::kUndefinedValueRootIndex);
1567   __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1568 
1569   // 4b. The argArray is either null or undefined, so we tail call without any
1570   // arguments to the receiver.
1571   __ bind(&no_arguments);
1572   {
1573     __ li(r3, Operand::Zero());
1574     __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1575   }
1576 
1577   // 4c. The receiver is not callable, throw an appropriate TypeError.
1578   __ bind(&receiver_not_callable);
1579   {
1580     __ StoreP(r4, MemOperand(sp, 0));
1581     __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1582   }
1583 }
1584 
1585 
1586 // static
Generate_FunctionPrototypeCall(MacroAssembler * masm)1587 void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
1588   // 1. Make sure we have at least one argument.
1589   // r3: actual number of arguments
1590   {
1591     Label done;
1592     __ cmpi(r3, Operand::Zero());
1593     __ bne(&done);
1594     __ PushRoot(Heap::kUndefinedValueRootIndex);
1595     __ addi(r3, r3, Operand(1));
1596     __ bind(&done);
1597   }
1598 
1599   // 2. Get the callable to call (passed as receiver) from the stack.
1600   // r3: actual number of arguments
1601   __ ShiftLeftImm(r5, r3, Operand(kPointerSizeLog2));
1602   __ LoadPX(r4, MemOperand(sp, r5));
1603 
1604   // 3. Shift arguments and return address one slot down on the stack
1605   //    (overwriting the original receiver).  Adjust argument count to make
1606   //    the original first argument the new receiver.
1607   // r3: actual number of arguments
1608   // r4: callable
1609   {
1610     Label loop;
1611     // Calculate the copy start address (destination). Copy end address is sp.
1612     __ add(r5, sp, r5);
1613 
1614 
1615     __ mtctr(r3);
1616     __ bind(&loop);
1617     __ LoadP(ip, MemOperand(r5, -kPointerSize));
1618     __ StoreP(ip, MemOperand(r5));
1619     __ subi(r5, r5, Operand(kPointerSize));
1620     __ bdnz(&loop);
1621     // Adjust the actual number of arguments and remove the top element
1622     // (which is a copy of the last argument).
1623     __ subi(r3, r3, Operand(1));
1624     __ pop();
1625   }
1626 
1627   // 4. Call the callable.
1628   __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1629 }
1630 
1631 
Generate_ReflectApply(MacroAssembler * masm)1632 void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
1633   // ----------- S t a t e -------------
1634   //  -- r3     : argc
1635   //  -- sp[0]  : argumentsList
1636   //  -- sp[4]  : thisArgument
1637   //  -- sp[8]  : target
1638   //  -- sp[12] : receiver
1639   // -----------------------------------
1640 
1641   // 1. Load target into r4 (if present), argumentsList into r3 (if present),
1642   // remove all arguments from the stack (including the receiver), and push
1643   // thisArgument (if present) instead.
1644   {
1645     Label skip;
1646     Register arg_size = r5;
1647     Register new_sp = r6;
1648     Register scratch = r7;
1649     __ ShiftLeftImm(arg_size, r3, Operand(kPointerSizeLog2));
1650     __ add(new_sp, sp, arg_size);
1651     __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
1652     __ mr(scratch, r4);
1653     __ mr(r3, r4);
1654     __ cmpi(arg_size, Operand(kPointerSize));
1655     __ blt(&skip);
1656     __ LoadP(r4, MemOperand(new_sp, 1 * -kPointerSize));  // target
1657     __ beq(&skip);
1658     __ LoadP(scratch, MemOperand(new_sp, 2 * -kPointerSize));  // thisArgument
1659     __ cmpi(arg_size, Operand(2 * kPointerSize));
1660     __ beq(&skip);
1661     __ LoadP(r3, MemOperand(new_sp, 3 * -kPointerSize));  // argumentsList
1662     __ bind(&skip);
1663     __ mr(sp, new_sp);
1664     __ StoreP(scratch, MemOperand(sp, 0));
1665   }
1666 
1667   // ----------- S t a t e -------------
1668   //  -- r3    : argumentsList
1669   //  -- r4    : target
1670   //  -- sp[0] : thisArgument
1671   // -----------------------------------
1672 
1673   // 2. Make sure the target is actually callable.
1674   Label target_not_callable;
1675   __ JumpIfSmi(r4, &target_not_callable);
1676   __ LoadP(r7, FieldMemOperand(r4, HeapObject::kMapOffset));
1677   __ lbz(r7, FieldMemOperand(r7, Map::kBitFieldOffset));
1678   __ TestBit(r7, Map::kIsCallable, r0);
1679   __ beq(&target_not_callable, cr0);
1680 
1681   // 3a. Apply the target to the given argumentsList (passing undefined for
1682   // new.target).
1683   __ LoadRoot(r6, Heap::kUndefinedValueRootIndex);
1684   __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1685 
1686   // 3b. The target is not callable, throw an appropriate TypeError.
1687   __ bind(&target_not_callable);
1688   {
1689     __ StoreP(r4, MemOperand(sp, 0));
1690     __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1691   }
1692 }
1693 
1694 
Generate_ReflectConstruct(MacroAssembler * masm)1695 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
1696   // ----------- S t a t e -------------
1697   //  -- r3     : argc
1698   //  -- sp[0]  : new.target (optional)
1699   //  -- sp[4]  : argumentsList
1700   //  -- sp[8]  : target
1701   //  -- sp[12] : receiver
1702   // -----------------------------------
1703 
1704   // 1. Load target into r4 (if present), argumentsList into r3 (if present),
1705   // new.target into r6 (if present, otherwise use target), remove all
1706   // arguments from the stack (including the receiver), and push thisArgument
1707   // (if present) instead.
1708   {
1709     Label skip;
1710     Register arg_size = r5;
1711     Register new_sp = r7;
1712     __ ShiftLeftImm(arg_size, r3, Operand(kPointerSizeLog2));
1713     __ add(new_sp, sp, arg_size);
1714     __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
1715     __ mr(r3, r4);
1716     __ mr(r6, r4);
1717     __ StoreP(r4, MemOperand(new_sp, 0));  // receiver (undefined)
1718     __ cmpi(arg_size, Operand(kPointerSize));
1719     __ blt(&skip);
1720     __ LoadP(r4, MemOperand(new_sp, 1 * -kPointerSize));  // target
1721     __ mr(r6, r4);  // new.target defaults to target
1722     __ beq(&skip);
1723     __ LoadP(r3, MemOperand(new_sp, 2 * -kPointerSize));  // argumentsList
1724     __ cmpi(arg_size, Operand(2 * kPointerSize));
1725     __ beq(&skip);
1726     __ LoadP(r6, MemOperand(new_sp, 3 * -kPointerSize));  // new.target
1727     __ bind(&skip);
1728     __ mr(sp, new_sp);
1729   }
1730 
1731   // ----------- S t a t e -------------
1732   //  -- r3    : argumentsList
1733   //  -- r6    : new.target
1734   //  -- r4    : target
1735   //  -- sp[0] : receiver (undefined)
1736   // -----------------------------------
1737 
1738   // 2. Make sure the target is actually a constructor.
1739   Label target_not_constructor;
1740   __ JumpIfSmi(r4, &target_not_constructor);
1741   __ LoadP(r7, FieldMemOperand(r4, HeapObject::kMapOffset));
1742   __ lbz(r7, FieldMemOperand(r7, Map::kBitFieldOffset));
1743   __ TestBit(r7, Map::kIsConstructor, r0);
1744   __ beq(&target_not_constructor, cr0);
1745 
1746   // 3. Make sure the target is actually a constructor.
1747   Label new_target_not_constructor;
1748   __ JumpIfSmi(r6, &new_target_not_constructor);
1749   __ LoadP(r7, FieldMemOperand(r6, HeapObject::kMapOffset));
1750   __ lbz(r7, FieldMemOperand(r7, Map::kBitFieldOffset));
1751   __ TestBit(r7, Map::kIsConstructor, r0);
1752   __ beq(&new_target_not_constructor, cr0);
1753 
1754   // 4a. Construct the target with the given new.target and argumentsList.
1755   __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1756 
1757   // 4b. The target is not a constructor, throw an appropriate TypeError.
1758   __ bind(&target_not_constructor);
1759   {
1760     __ StoreP(r4, MemOperand(sp, 0));
1761     __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
1762   }
1763 
1764   // 4c. The new.target is not a constructor, throw an appropriate TypeError.
1765   __ bind(&new_target_not_constructor);
1766   {
1767     __ StoreP(r6, MemOperand(sp, 0));
1768     __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
1769   }
1770 }
1771 
1772 
ArgumentAdaptorStackCheck(MacroAssembler * masm,Label * stack_overflow)1773 static void ArgumentAdaptorStackCheck(MacroAssembler* masm,
1774                                       Label* stack_overflow) {
1775   // ----------- S t a t e -------------
1776   //  -- r3 : actual number of arguments
1777   //  -- r4 : function (passed through to callee)
1778   //  -- r5 : expected number of arguments
1779   //  -- r6 : new target (passed through to callee)
1780   // -----------------------------------
1781   // Check the stack for overflow. We are not trying to catch
1782   // interruptions (e.g. debug break and preemption) here, so the "real stack
1783   // limit" is checked.
1784   __ LoadRoot(r8, Heap::kRealStackLimitRootIndex);
1785   // Make r8 the space we have left. The stack might already be overflowed
1786   // here which will cause r8 to become negative.
1787   __ sub(r8, sp, r8);
1788   // Check if the arguments will overflow the stack.
1789   __ ShiftLeftImm(r0, r5, Operand(kPointerSizeLog2));
1790   __ cmp(r8, r0);
1791   __ ble(stack_overflow);  // Signed comparison.
1792 }
1793 
1794 
EnterArgumentsAdaptorFrame(MacroAssembler * masm)1795 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1796   __ SmiTag(r3);
1797   __ LoadSmiLiteral(r7, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
1798   __ mflr(r0);
1799   __ push(r0);
1800   if (FLAG_enable_embedded_constant_pool) {
1801     __ Push(fp, kConstantPoolRegister, r7, r4, r3);
1802   } else {
1803     __ Push(fp, r7, r4, r3);
1804   }
1805   __ addi(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
1806                           kPointerSize));
1807 }
1808 
1809 
LeaveArgumentsAdaptorFrame(MacroAssembler * masm)1810 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1811   // ----------- S t a t e -------------
1812   //  -- r3 : result being passed through
1813   // -----------------------------------
1814   // Get the number of arguments passed (as a smi), tear down the frame and
1815   // then tear down the parameters.
1816   __ LoadP(r4, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp +
1817                                 kPointerSize)));
1818   int stack_adjustment = kPointerSize;  // adjust for receiver
1819   __ LeaveFrame(StackFrame::ARGUMENTS_ADAPTOR, stack_adjustment);
1820   __ SmiToPtrArrayOffset(r0, r4);
1821   __ add(sp, sp, r0);
1822 }
1823 
1824 
1825 // static
Generate_Apply(MacroAssembler * masm)1826 void Builtins::Generate_Apply(MacroAssembler* masm) {
1827   // ----------- S t a t e -------------
1828   //  -- r3    : argumentsList
1829   //  -- r4    : target
1830   //  -- r6    : new.target (checked to be constructor or undefined)
1831   //  -- sp[0] : thisArgument
1832   // -----------------------------------
1833 
1834   // Create the list of arguments from the array-like argumentsList.
1835   {
1836     Label create_arguments, create_array, create_runtime, done_create;
1837     __ JumpIfSmi(r3, &create_runtime);
1838 
1839     // Load the map of argumentsList into r5.
1840     __ LoadP(r5, FieldMemOperand(r3, HeapObject::kMapOffset));
1841 
1842     // Load native context into r7.
1843     __ LoadP(r7, NativeContextMemOperand());
1844 
1845     // Check if argumentsList is an (unmodified) arguments object.
1846     __ LoadP(ip, ContextMemOperand(r7, Context::SLOPPY_ARGUMENTS_MAP_INDEX));
1847     __ cmp(ip, r5);
1848     __ beq(&create_arguments);
1849     __ LoadP(ip, ContextMemOperand(r7, Context::STRICT_ARGUMENTS_MAP_INDEX));
1850     __ cmp(ip, r5);
1851     __ beq(&create_arguments);
1852 
1853     // Check if argumentsList is a fast JSArray.
1854     __ CompareInstanceType(r5, ip, JS_ARRAY_TYPE);
1855     __ beq(&create_array);
1856 
1857     // Ask the runtime to create the list (actually a FixedArray).
1858     __ bind(&create_runtime);
1859     {
1860       FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1861       __ Push(r4, r6, r3);
1862       __ CallRuntime(Runtime::kCreateListFromArrayLike);
1863       __ Pop(r4, r6);
1864       __ LoadP(r5, FieldMemOperand(r3, FixedArray::kLengthOffset));
1865       __ SmiUntag(r5);
1866     }
1867     __ b(&done_create);
1868 
1869     // Try to create the list from an arguments object.
1870     __ bind(&create_arguments);
1871     __ LoadP(r5, FieldMemOperand(
1872                      r3, JSObject::kHeaderSize +
1873                              Heap::kArgumentsLengthIndex * kPointerSize));
1874     __ LoadP(r7, FieldMemOperand(r3, JSObject::kElementsOffset));
1875     __ LoadP(ip, FieldMemOperand(r7, FixedArray::kLengthOffset));
1876     __ cmp(r5, ip);
1877     __ bne(&create_runtime);
1878     __ SmiUntag(r5);
1879     __ mr(r3, r7);
1880     __ b(&done_create);
1881 
1882     // Try to create the list from a JSArray object.
1883     __ bind(&create_array);
1884     __ lbz(r5, FieldMemOperand(r5, Map::kBitField2Offset));
1885     __ DecodeField<Map::ElementsKindBits>(r5);
1886     STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
1887     STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
1888     STATIC_ASSERT(FAST_ELEMENTS == 2);
1889     __ cmpi(r5, Operand(FAST_ELEMENTS));
1890     __ bgt(&create_runtime);
1891     __ cmpi(r5, Operand(FAST_HOLEY_SMI_ELEMENTS));
1892     __ beq(&create_runtime);
1893     __ LoadP(r5, FieldMemOperand(r3, JSArray::kLengthOffset));
1894     __ LoadP(r3, FieldMemOperand(r3, JSArray::kElementsOffset));
1895     __ SmiUntag(r5);
1896 
1897     __ bind(&done_create);
1898   }
1899 
1900   // Check for stack overflow.
1901   {
1902     // Check the stack for overflow. We are not trying to catch interruptions
1903     // (i.e. debug break and preemption) here, so check the "real stack limit".
1904     Label done;
1905     __ LoadRoot(ip, Heap::kRealStackLimitRootIndex);
1906     // Make ip the space we have left. The stack might already be overflowed
1907     // here which will cause ip to become negative.
1908     __ sub(ip, sp, ip);
1909     // Check if the arguments will overflow the stack.
1910     __ ShiftLeftImm(r0, r5, Operand(kPointerSizeLog2));
1911     __ cmp(ip, r0);  // Signed comparison.
1912     __ bgt(&done);
1913     __ TailCallRuntime(Runtime::kThrowStackOverflow);
1914     __ bind(&done);
1915   }
1916 
1917   // ----------- S t a t e -------------
1918   //  -- r4    : target
1919   //  -- r3    : args (a FixedArray built from argumentsList)
1920   //  -- r5    : len (number of elements to push from args)
1921   //  -- r6    : new.target (checked to be constructor or undefined)
1922   //  -- sp[0] : thisArgument
1923   // -----------------------------------
1924 
1925   // Push arguments onto the stack (thisArgument is already on the stack).
1926   {
1927     Label loop, no_args;
1928     __ cmpi(r5, Operand::Zero());
1929     __ beq(&no_args);
1930     __ addi(r3, r3,
1931             Operand(FixedArray::kHeaderSize - kHeapObjectTag - kPointerSize));
1932     __ mtctr(r5);
1933     __ bind(&loop);
1934     __ LoadPU(r0, MemOperand(r3, kPointerSize));
1935     __ push(r0);
1936     __ bdnz(&loop);
1937     __ bind(&no_args);
1938     __ mr(r3, r5);
1939   }
1940 
1941   // Dispatch to Call or Construct depending on whether new.target is undefined.
1942   {
1943     __ CompareRoot(r6, Heap::kUndefinedValueRootIndex);
1944     __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET, eq);
1945     __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
1946   }
1947 }
1948 
1949 
1950 // static
Generate_CallFunction(MacroAssembler * masm,ConvertReceiverMode mode)1951 void Builtins::Generate_CallFunction(MacroAssembler* masm,
1952                                      ConvertReceiverMode mode) {
1953   // ----------- S t a t e -------------
1954   //  -- r3 : the number of arguments (not including the receiver)
1955   //  -- r4 : the function to call (checked to be a JSFunction)
1956   // -----------------------------------
1957   __ AssertFunction(r4);
1958 
1959   // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
1960   // Check that the function is not a "classConstructor".
1961   Label class_constructor;
1962   __ LoadP(r5, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
1963   __ lwz(r6, FieldMemOperand(r5, SharedFunctionInfo::kCompilerHintsOffset));
1964   __ TestBitMask(r6, SharedFunctionInfo::kClassConstructorBits, r0);
1965   __ bne(&class_constructor, cr0);
1966 
1967   // Enter the context of the function; ToObject has to run in the function
1968   // context, and we also need to take the global proxy from the function
1969   // context in case of conversion.
1970   __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset));
1971   // We need to convert the receiver for non-native sloppy mode functions.
1972   Label done_convert;
1973   __ andi(r0, r6, Operand((1 << SharedFunctionInfo::kStrictModeBit) |
1974                           (1 << SharedFunctionInfo::kNativeBit)));
1975   __ bne(&done_convert, cr0);
1976   {
1977     // ----------- S t a t e -------------
1978     //  -- r3 : the number of arguments (not including the receiver)
1979     //  -- r4 : the function to call (checked to be a JSFunction)
1980     //  -- r5 : the shared function info.
1981     //  -- cp : the function context.
1982     // -----------------------------------
1983 
1984     if (mode == ConvertReceiverMode::kNullOrUndefined) {
1985       // Patch receiver to global proxy.
1986       __ LoadGlobalProxy(r6);
1987     } else {
1988       Label convert_to_object, convert_receiver;
1989       __ ShiftLeftImm(r6, r3, Operand(kPointerSizeLog2));
1990       __ LoadPX(r6, MemOperand(sp, r6));
1991       __ JumpIfSmi(r6, &convert_to_object);
1992       STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
1993       __ CompareObjectType(r6, r7, r7, FIRST_JS_RECEIVER_TYPE);
1994       __ bge(&done_convert);
1995       if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
1996         Label convert_global_proxy;
1997         __ JumpIfRoot(r6, Heap::kUndefinedValueRootIndex,
1998                       &convert_global_proxy);
1999         __ JumpIfNotRoot(r6, Heap::kNullValueRootIndex, &convert_to_object);
2000         __ bind(&convert_global_proxy);
2001         {
2002           // Patch receiver to global proxy.
2003           __ LoadGlobalProxy(r6);
2004         }
2005         __ b(&convert_receiver);
2006       }
2007       __ bind(&convert_to_object);
2008       {
2009         // Convert receiver using ToObject.
2010         // TODO(bmeurer): Inline the allocation here to avoid building the frame
2011         // in the fast case? (fall back to AllocateInNewSpace?)
2012         FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2013         __ SmiTag(r3);
2014         __ Push(r3, r4);
2015         __ mr(r3, r6);
2016         ToObjectStub stub(masm->isolate());
2017         __ CallStub(&stub);
2018         __ mr(r6, r3);
2019         __ Pop(r3, r4);
2020         __ SmiUntag(r3);
2021       }
2022       __ LoadP(r5, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
2023       __ bind(&convert_receiver);
2024     }
2025     __ ShiftLeftImm(r7, r3, Operand(kPointerSizeLog2));
2026     __ StorePX(r6, MemOperand(sp, r7));
2027   }
2028   __ bind(&done_convert);
2029 
2030   // ----------- S t a t e -------------
2031   //  -- r3 : the number of arguments (not including the receiver)
2032   //  -- r4 : the function to call (checked to be a JSFunction)
2033   //  -- r5 : the shared function info.
2034   //  -- cp : the function context.
2035   // -----------------------------------
2036 
2037   __ LoadWordArith(
2038       r5, FieldMemOperand(r5, SharedFunctionInfo::kFormalParameterCountOffset));
2039 #if !V8_TARGET_ARCH_PPC64
2040   __ SmiUntag(r5);
2041 #endif
2042   ParameterCount actual(r3);
2043   ParameterCount expected(r5);
2044   __ InvokeFunctionCode(r4, no_reg, expected, actual, JUMP_FUNCTION,
2045                         CheckDebugStepCallWrapper());
2046 
2047   // The function is a "classConstructor", need to raise an exception.
2048   __ bind(&class_constructor);
2049   {
2050     FrameAndConstantPoolScope frame(masm, StackFrame::INTERNAL);
2051     __ push(r4);
2052     __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
2053   }
2054 }
2055 
2056 
2057 namespace {
2058 
Generate_PushBoundArguments(MacroAssembler * masm)2059 void Generate_PushBoundArguments(MacroAssembler* masm) {
2060   // ----------- S t a t e -------------
2061   //  -- r3 : the number of arguments (not including the receiver)
2062   //  -- r4 : target (checked to be a JSBoundFunction)
2063   //  -- r6 : new.target (only in case of [[Construct]])
2064   // -----------------------------------
2065 
2066   // Load [[BoundArguments]] into r5 and length of that into r7.
2067   Label no_bound_arguments;
2068   __ LoadP(r5, FieldMemOperand(r4, JSBoundFunction::kBoundArgumentsOffset));
2069   __ LoadP(r7, FieldMemOperand(r5, FixedArray::kLengthOffset));
2070   __ SmiUntag(r7, SetRC);
2071   __ beq(&no_bound_arguments, cr0);
2072   {
2073     // ----------- S t a t e -------------
2074     //  -- r3 : the number of arguments (not including the receiver)
2075     //  -- r4 : target (checked to be a JSBoundFunction)
2076     //  -- r5 : the [[BoundArguments]] (implemented as FixedArray)
2077     //  -- r6 : new.target (only in case of [[Construct]])
2078     //  -- r7 : the number of [[BoundArguments]]
2079     // -----------------------------------
2080 
2081     // Reserve stack space for the [[BoundArguments]].
2082     {
2083       Label done;
2084       __ mr(r9, sp);  // preserve previous stack pointer
2085       __ ShiftLeftImm(r10, r7, Operand(kPointerSizeLog2));
2086       __ sub(sp, sp, r10);
2087       // Check the stack for overflow. We are not trying to catch interruptions
2088       // (i.e. debug break and preemption) here, so check the "real stack
2089       // limit".
2090       __ CompareRoot(sp, Heap::kRealStackLimitRootIndex);
2091       __ bgt(&done);  // Signed comparison.
2092       // Restore the stack pointer.
2093       __ mr(sp, r9);
2094       {
2095         FrameScope scope(masm, StackFrame::MANUAL);
2096         __ EnterFrame(StackFrame::INTERNAL);
2097         __ CallRuntime(Runtime::kThrowStackOverflow, 0);
2098       }
2099       __ bind(&done);
2100     }
2101 
2102     // Relocate arguments down the stack.
2103     //  -- r3 : the number of arguments (not including the receiver)
2104     //  -- r9 : the previous stack pointer
2105     //  -- r10: the size of the [[BoundArguments]]
2106     {
2107       Label skip, loop;
2108       __ li(r8, Operand::Zero());
2109       __ cmpi(r3, Operand::Zero());
2110       __ beq(&skip);
2111       __ mtctr(r3);
2112       __ bind(&loop);
2113       __ LoadPX(r0, MemOperand(r9, r8));
2114       __ StorePX(r0, MemOperand(sp, r8));
2115       __ addi(r8, r8, Operand(kPointerSize));
2116       __ bdnz(&loop);
2117       __ bind(&skip);
2118     }
2119 
2120     // Copy [[BoundArguments]] to the stack (below the arguments).
2121     {
2122       Label loop;
2123       __ addi(r5, r5, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
2124       __ add(r5, r5, r10);
2125       __ mtctr(r7);
2126       __ bind(&loop);
2127       __ LoadPU(r0, MemOperand(r5, -kPointerSize));
2128       __ StorePX(r0, MemOperand(sp, r8));
2129       __ addi(r8, r8, Operand(kPointerSize));
2130       __ bdnz(&loop);
2131       __ add(r3, r3, r7);
2132     }
2133   }
2134   __ bind(&no_bound_arguments);
2135 }
2136 
2137 }  // namespace
2138 
2139 
2140 // static
Generate_CallBoundFunction(MacroAssembler * masm)2141 void Builtins::Generate_CallBoundFunction(MacroAssembler* masm) {
2142   // ----------- S t a t e -------------
2143   //  -- r3 : the number of arguments (not including the receiver)
2144   //  -- r4 : the function to call (checked to be a JSBoundFunction)
2145   // -----------------------------------
2146   __ AssertBoundFunction(r4);
2147 
2148   // Patch the receiver to [[BoundThis]].
2149   __ LoadP(ip, FieldMemOperand(r4, JSBoundFunction::kBoundThisOffset));
2150   __ ShiftLeftImm(r0, r3, Operand(kPointerSizeLog2));
2151   __ StorePX(ip, MemOperand(sp, r0));
2152 
2153   // Push the [[BoundArguments]] onto the stack.
2154   Generate_PushBoundArguments(masm);
2155 
2156   // Call the [[BoundTargetFunction]] via the Call builtin.
2157   __ LoadP(r4,
2158            FieldMemOperand(r4, JSBoundFunction::kBoundTargetFunctionOffset));
2159   __ mov(ip, Operand(ExternalReference(Builtins::kCall_ReceiverIsAny,
2160                                        masm->isolate())));
2161   __ LoadP(ip, MemOperand(ip));
2162   __ addi(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
2163   __ JumpToJSEntry(ip);
2164 }
2165 
2166 
2167 // static
Generate_Call(MacroAssembler * masm,ConvertReceiverMode mode)2168 void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode) {
2169   // ----------- S t a t e -------------
2170   //  -- r3 : the number of arguments (not including the receiver)
2171   //  -- r4 : the target to call (can be any Object).
2172   // -----------------------------------
2173 
2174   Label non_callable, non_function, non_smi;
2175   __ JumpIfSmi(r4, &non_callable);
2176   __ bind(&non_smi);
2177   __ CompareObjectType(r4, r7, r8, JS_FUNCTION_TYPE);
2178   __ Jump(masm->isolate()->builtins()->CallFunction(mode),
2179           RelocInfo::CODE_TARGET, eq);
2180   __ cmpi(r8, Operand(JS_BOUND_FUNCTION_TYPE));
2181   __ Jump(masm->isolate()->builtins()->CallBoundFunction(),
2182           RelocInfo::CODE_TARGET, eq);
2183   __ cmpi(r8, Operand(JS_PROXY_TYPE));
2184   __ bne(&non_function);
2185 
2186   // 1. Runtime fallback for Proxy [[Call]].
2187   __ Push(r4);
2188   // Increase the arguments size to include the pushed function and the
2189   // existing receiver on the stack.
2190   __ addi(r3, r3, Operand(2));
2191   // Tail-call to the runtime.
2192   __ JumpToExternalReference(
2193       ExternalReference(Runtime::kJSProxyCall, masm->isolate()));
2194 
2195   // 2. Call to something else, which might have a [[Call]] internal method (if
2196   // not we raise an exception).
2197   __ bind(&non_function);
2198   // Check if target has a [[Call]] internal method.
2199   __ lbz(r7, FieldMemOperand(r7, Map::kBitFieldOffset));
2200   __ TestBit(r7, Map::kIsCallable, r0);
2201   __ beq(&non_callable, cr0);
2202   // Overwrite the original receiver the (original) target.
2203   __ ShiftLeftImm(r8, r3, Operand(kPointerSizeLog2));
2204   __ StorePX(r4, MemOperand(sp, r8));
2205   // Let the "call_as_function_delegate" take care of the rest.
2206   __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, r4);
2207   __ Jump(masm->isolate()->builtins()->CallFunction(
2208               ConvertReceiverMode::kNotNullOrUndefined),
2209           RelocInfo::CODE_TARGET);
2210 
2211   // 3. Call to something that is not callable.
2212   __ bind(&non_callable);
2213   {
2214     FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2215     __ Push(r4);
2216     __ CallRuntime(Runtime::kThrowCalledNonCallable);
2217   }
2218 }
2219 
2220 
2221 // static
Generate_ConstructFunction(MacroAssembler * masm)2222 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2223   // ----------- S t a t e -------------
2224   //  -- r3 : the number of arguments (not including the receiver)
2225   //  -- r4 : the constructor to call (checked to be a JSFunction)
2226   //  -- r6 : the new target (checked to be a constructor)
2227   // -----------------------------------
2228   __ AssertFunction(r4);
2229 
2230   // Calling convention for function specific ConstructStubs require
2231   // r5 to contain either an AllocationSite or undefined.
2232   __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
2233 
2234   // Tail call to the function-specific construct stub (still in the caller
2235   // context at this point).
2236   __ LoadP(r7, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
2237   __ LoadP(r7, FieldMemOperand(r7, SharedFunctionInfo::kConstructStubOffset));
2238   __ addi(ip, r7, Operand(Code::kHeaderSize - kHeapObjectTag));
2239   __ JumpToJSEntry(ip);
2240 }
2241 
2242 
2243 // static
Generate_ConstructBoundFunction(MacroAssembler * masm)2244 void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2245   // ----------- S t a t e -------------
2246   //  -- r3 : the number of arguments (not including the receiver)
2247   //  -- r4 : the function to call (checked to be a JSBoundFunction)
2248   //  -- r6 : the new target (checked to be a constructor)
2249   // -----------------------------------
2250   __ AssertBoundFunction(r4);
2251 
2252   // Push the [[BoundArguments]] onto the stack.
2253   Generate_PushBoundArguments(masm);
2254 
2255   // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
2256   Label skip;
2257   __ cmp(r4, r6);
2258   __ bne(&skip);
2259   __ LoadP(r6,
2260            FieldMemOperand(r4, JSBoundFunction::kBoundTargetFunctionOffset));
2261   __ bind(&skip);
2262 
2263   // Construct the [[BoundTargetFunction]] via the Construct builtin.
2264   __ LoadP(r4,
2265            FieldMemOperand(r4, JSBoundFunction::kBoundTargetFunctionOffset));
2266   __ mov(ip, Operand(ExternalReference(Builtins::kConstruct, masm->isolate())));
2267   __ LoadP(ip, MemOperand(ip));
2268   __ addi(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
2269   __ JumpToJSEntry(ip);
2270 }
2271 
2272 
2273 // static
Generate_ConstructProxy(MacroAssembler * masm)2274 void Builtins::Generate_ConstructProxy(MacroAssembler* masm) {
2275   // ----------- S t a t e -------------
2276   //  -- r3 : the number of arguments (not including the receiver)
2277   //  -- r4 : the constructor to call (checked to be a JSProxy)
2278   //  -- r6 : the new target (either the same as the constructor or
2279   //          the JSFunction on which new was invoked initially)
2280   // -----------------------------------
2281 
2282   // Call into the Runtime for Proxy [[Construct]].
2283   __ Push(r4, r6);
2284   // Include the pushed new_target, constructor and the receiver.
2285   __ addi(r3, r3, Operand(3));
2286   // Tail-call to the runtime.
2287   __ JumpToExternalReference(
2288       ExternalReference(Runtime::kJSProxyConstruct, masm->isolate()));
2289 }
2290 
2291 
2292 // static
Generate_Construct(MacroAssembler * masm)2293 void Builtins::Generate_Construct(MacroAssembler* masm) {
2294   // ----------- S t a t e -------------
2295   //  -- r3 : the number of arguments (not including the receiver)
2296   //  -- r4 : the constructor to call (can be any Object)
2297   //  -- r6 : the new target (either the same as the constructor or
2298   //          the JSFunction on which new was invoked initially)
2299   // -----------------------------------
2300 
2301   // Check if target is a Smi.
2302   Label non_constructor;
2303   __ JumpIfSmi(r4, &non_constructor);
2304 
2305   // Dispatch based on instance type.
2306   __ CompareObjectType(r4, r7, r8, JS_FUNCTION_TYPE);
2307   __ Jump(masm->isolate()->builtins()->ConstructFunction(),
2308           RelocInfo::CODE_TARGET, eq);
2309 
2310   // Check if target has a [[Construct]] internal method.
2311   __ lbz(r5, FieldMemOperand(r7, Map::kBitFieldOffset));
2312   __ TestBit(r5, Map::kIsConstructor, r0);
2313   __ beq(&non_constructor, cr0);
2314 
2315   // Only dispatch to bound functions after checking whether they are
2316   // constructors.
2317   __ cmpi(r8, Operand(JS_BOUND_FUNCTION_TYPE));
2318   __ Jump(masm->isolate()->builtins()->ConstructBoundFunction(),
2319           RelocInfo::CODE_TARGET, eq);
2320 
2321   // Only dispatch to proxies after checking whether they are constructors.
2322   __ cmpi(r8, Operand(JS_PROXY_TYPE));
2323   __ Jump(masm->isolate()->builtins()->ConstructProxy(), RelocInfo::CODE_TARGET,
2324           eq);
2325 
2326   // Called Construct on an exotic Object with a [[Construct]] internal method.
2327   {
2328     // Overwrite the original receiver with the (original) target.
2329     __ ShiftLeftImm(r8, r3, Operand(kPointerSizeLog2));
2330     __ StorePX(r4, MemOperand(sp, r8));
2331     // Let the "call_as_constructor_delegate" take care of the rest.
2332     __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, r4);
2333     __ Jump(masm->isolate()->builtins()->CallFunction(),
2334             RelocInfo::CODE_TARGET);
2335   }
2336 
2337   // Called Construct on an Object that doesn't have a [[Construct]] internal
2338   // method.
2339   __ bind(&non_constructor);
2340   __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(),
2341           RelocInfo::CODE_TARGET);
2342 }
2343 
2344 
Generate_ArgumentsAdaptorTrampoline(MacroAssembler * masm)2345 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
2346   // ----------- S t a t e -------------
2347   //  -- r3 : actual number of arguments
2348   //  -- r4 : function (passed through to callee)
2349   //  -- r5 : expected number of arguments
2350   //  -- r6 : new target (passed through to callee)
2351   // -----------------------------------
2352 
2353   Label invoke, dont_adapt_arguments, stack_overflow;
2354 
2355   Label enough, too_few;
2356   __ LoadP(ip, FieldMemOperand(r4, JSFunction::kCodeEntryOffset));
2357   __ cmp(r3, r5);
2358   __ blt(&too_few);
2359   __ cmpi(r5, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
2360   __ beq(&dont_adapt_arguments);
2361 
2362   {  // Enough parameters: actual >= expected
2363     __ bind(&enough);
2364     EnterArgumentsAdaptorFrame(masm);
2365     ArgumentAdaptorStackCheck(masm, &stack_overflow);
2366 
2367     // Calculate copy start address into r3 and copy end address into r7.
2368     // r3: actual number of arguments as a smi
2369     // r4: function
2370     // r5: expected number of arguments
2371     // r6: new target (passed through to callee)
2372     // ip: code entry to call
2373     __ SmiToPtrArrayOffset(r3, r3);
2374     __ add(r3, r3, fp);
2375     // adjust for return address and receiver
2376     __ addi(r3, r3, Operand(2 * kPointerSize));
2377     __ ShiftLeftImm(r7, r5, Operand(kPointerSizeLog2));
2378     __ sub(r7, r3, r7);
2379 
2380     // Copy the arguments (including the receiver) to the new stack frame.
2381     // r3: copy start address
2382     // r4: function
2383     // r5: expected number of arguments
2384     // r6: new target (passed through to callee)
2385     // r7: copy end address
2386     // ip: code entry to call
2387 
2388     Label copy;
2389     __ bind(&copy);
2390     __ LoadP(r0, MemOperand(r3, 0));
2391     __ push(r0);
2392     __ cmp(r3, r7);  // Compare before moving to next argument.
2393     __ subi(r3, r3, Operand(kPointerSize));
2394     __ bne(&copy);
2395 
2396     __ b(&invoke);
2397   }
2398 
2399   {  // Too few parameters: Actual < expected
2400     __ bind(&too_few);
2401 
2402     // If the function is strong we need to throw an error.
2403     Label no_strong_error;
2404     __ LoadP(r7, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
2405     __ lwz(r8, FieldMemOperand(r7, SharedFunctionInfo::kCompilerHintsOffset));
2406     __ TestBit(r8, SharedFunctionInfo::kStrongModeBit, r0);
2407     __ beq(&no_strong_error, cr0);
2408 
2409     // What we really care about is the required number of arguments.
2410     __ lwz(r7, FieldMemOperand(r7, SharedFunctionInfo::kLengthOffset));
2411 #if V8_TARGET_ARCH_PPC64
2412     // See commment near kLenghtOffset in src/objects.h
2413     __ srawi(r7, r7, kSmiTagSize);
2414 #else
2415     __ SmiUntag(r7);
2416 #endif
2417     __ cmp(r3, r7);
2418     __ bge(&no_strong_error);
2419 
2420     {
2421       FrameScope frame(masm, StackFrame::MANUAL);
2422       EnterArgumentsAdaptorFrame(masm);
2423       __ CallRuntime(Runtime::kThrowStrongModeTooFewArguments);
2424     }
2425 
2426     __ bind(&no_strong_error);
2427     EnterArgumentsAdaptorFrame(masm);
2428     ArgumentAdaptorStackCheck(masm, &stack_overflow);
2429 
2430     // Calculate copy start address into r0 and copy end address is fp.
2431     // r3: actual number of arguments as a smi
2432     // r4: function
2433     // r5: expected number of arguments
2434     // r6: new target (passed through to callee)
2435     // ip: code entry to call
2436     __ SmiToPtrArrayOffset(r3, r3);
2437     __ add(r3, r3, fp);
2438 
2439     // Copy the arguments (including the receiver) to the new stack frame.
2440     // r3: copy start address
2441     // r4: function
2442     // r5: expected number of arguments
2443     // r6: new target (passed through to callee)
2444     // ip: code entry to call
2445     Label copy;
2446     __ bind(&copy);
2447     // Adjust load for return address and receiver.
2448     __ LoadP(r0, MemOperand(r3, 2 * kPointerSize));
2449     __ push(r0);
2450     __ cmp(r3, fp);  // Compare before moving to next argument.
2451     __ subi(r3, r3, Operand(kPointerSize));
2452     __ bne(&copy);
2453 
2454     // Fill the remaining expected arguments with undefined.
2455     // r4: function
2456     // r5: expected number of arguments
2457     // r6: new target (passed through to callee)
2458     // ip: code entry to call
2459     __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
2460     __ ShiftLeftImm(r7, r5, Operand(kPointerSizeLog2));
2461     __ sub(r7, fp, r7);
2462     // Adjust for frame.
2463     __ subi(r7, r7, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
2464                             2 * kPointerSize));
2465 
2466     Label fill;
2467     __ bind(&fill);
2468     __ push(r0);
2469     __ cmp(sp, r7);
2470     __ bne(&fill);
2471   }
2472 
2473   // Call the entry point.
2474   __ bind(&invoke);
2475   __ mr(r3, r5);
2476   // r3 : expected number of arguments
2477   // r4 : function (passed through to callee)
2478   // r6 : new target (passed through to callee)
2479   __ CallJSEntry(ip);
2480 
2481   // Store offset of return address for deoptimizer.
2482   masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
2483 
2484   // Exit frame and return.
2485   LeaveArgumentsAdaptorFrame(masm);
2486   __ blr();
2487 
2488 
2489   // -------------------------------------------
2490   // Dont adapt arguments.
2491   // -------------------------------------------
2492   __ bind(&dont_adapt_arguments);
2493   __ JumpToJSEntry(ip);
2494 
2495   __ bind(&stack_overflow);
2496   {
2497     FrameScope frame(masm, StackFrame::MANUAL);
2498     __ CallRuntime(Runtime::kThrowStackOverflow);
2499     __ bkpt(0);
2500   }
2501 }
2502 
2503 
2504 #undef __
2505 }  // namespace internal
2506 }  // namespace v8
2507 
2508 #endif  // V8_TARGET_ARCH_PPC
2509