• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #if V8_TARGET_ARCH_PPC
6 
7 #include "src/codegen.h"
8 #include "src/debug/debug.h"
9 #include "src/deoptimizer.h"
10 #include "src/full-codegen/full-codegen.h"
11 #include "src/runtime/runtime.h"
12 
13 namespace v8 {
14 namespace internal {
15 
16 #define __ ACCESS_MASM(masm)
17 
Generate_Adaptor(MacroAssembler * masm,Address address,ExitFrameType exit_frame_type)18 void Builtins::Generate_Adaptor(MacroAssembler* masm, Address address,
19                                 ExitFrameType exit_frame_type) {
20   // ----------- S t a t e -------------
21   //  -- r3                 : number of arguments excluding receiver
22   //  -- r4                 : target
23   //  -- r6                 : new.target
24   //  -- sp[0]              : last argument
25   //  -- ...
26   //  -- sp[4 * (argc - 1)] : first argument
27   //  -- sp[4 * argc]       : receiver
28   // -----------------------------------
29   __ AssertFunction(r4);
30 
31   // Make sure we operate in the context of the called function (for example
32   // ConstructStubs implemented in C++ will be run in the context of the caller
33   // instead of the callee, due to the way that [[Construct]] is defined for
34   // ordinary functions).
35   __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset));
36 
37   // JumpToExternalReference expects r3 to contain the number of arguments
38   // including the receiver and the extra arguments.
39   const int num_extra_args = 3;
40   __ addi(r3, r3, Operand(num_extra_args + 1));
41 
42   // Insert extra arguments.
43   __ SmiTag(r3);
44   __ Push(r3, r4, r6);
45   __ SmiUntag(r3);
46 
47   __ JumpToExternalReference(ExternalReference(address, masm->isolate()),
48                              exit_frame_type == BUILTIN_EXIT);
49 }
50 
51 // Load the built-in InternalArray function from the current context.
GenerateLoadInternalArrayFunction(MacroAssembler * masm,Register result)52 static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
53                                               Register result) {
54   // Load the InternalArray function from the current native context.
55   __ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, result);
56 }
57 
58 // Load the built-in Array function from the current context.
GenerateLoadArrayFunction(MacroAssembler * masm,Register result)59 static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
60   // Load the Array function from the current native context.
61   __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, result);
62 }
63 
Generate_InternalArrayCode(MacroAssembler * masm)64 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
65   // ----------- S t a t e -------------
66   //  -- r3     : number of arguments
67   //  -- lr     : return address
68   //  -- sp[...]: constructor arguments
69   // -----------------------------------
70   Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
71 
72   // Get the InternalArray function.
73   GenerateLoadInternalArrayFunction(masm, r4);
74 
75   if (FLAG_debug_code) {
76     // Initial map for the builtin InternalArray functions should be maps.
77     __ LoadP(r5, FieldMemOperand(r4, JSFunction::kPrototypeOrInitialMapOffset));
78     __ TestIfSmi(r5, r0);
79     __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction, cr0);
80     __ CompareObjectType(r5, r6, r7, MAP_TYPE);
81     __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction);
82   }
83 
84   // Run the native code for the InternalArray function called as a normal
85   // function.
86   // tail call a stub
87   InternalArrayConstructorStub stub(masm->isolate());
88   __ TailCallStub(&stub);
89 }
90 
Generate_ArrayCode(MacroAssembler * masm)91 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
92   // ----------- S t a t e -------------
93   //  -- r3     : number of arguments
94   //  -- lr     : return address
95   //  -- sp[...]: constructor arguments
96   // -----------------------------------
97   Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
98 
99   // Get the Array function.
100   GenerateLoadArrayFunction(masm, r4);
101 
102   if (FLAG_debug_code) {
103     // Initial map for the builtin Array functions should be maps.
104     __ LoadP(r5, FieldMemOperand(r4, JSFunction::kPrototypeOrInitialMapOffset));
105     __ TestIfSmi(r5, r0);
106     __ Assert(ne, kUnexpectedInitialMapForArrayFunction, cr0);
107     __ CompareObjectType(r5, r6, r7, MAP_TYPE);
108     __ Assert(eq, kUnexpectedInitialMapForArrayFunction);
109   }
110 
111   __ mr(r6, r4);
112   // Run the native code for the Array function called as a normal function.
113   // tail call a stub
114   __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
115   ArrayConstructorStub stub(masm->isolate());
116   __ TailCallStub(&stub);
117 }
118 
119 // static
Generate_MathMaxMin(MacroAssembler * masm,MathMaxMinKind kind)120 void Builtins::Generate_MathMaxMin(MacroAssembler* masm, MathMaxMinKind kind) {
121   // ----------- S t a t e -------------
122   //  -- r3                     : number of arguments
123   //  -- r4                     : function
124   //  -- cp                     : context
125   //  -- lr                     : return address
126   //  -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
127   //  -- sp[argc * 4]           : receiver
128   // -----------------------------------
129   Condition const cond_done = (kind == MathMaxMinKind::kMin) ? lt : gt;
130   Heap::RootListIndex const root_index =
131       (kind == MathMaxMinKind::kMin) ? Heap::kInfinityValueRootIndex
132                                      : Heap::kMinusInfinityValueRootIndex;
133   DoubleRegister const reg = (kind == MathMaxMinKind::kMin) ? d2 : d1;
134 
135   // Load the accumulator with the default return value (either -Infinity or
136   // +Infinity), with the tagged value in r8 and the double value in d1.
137   __ LoadRoot(r8, root_index);
138   __ lfd(d1, FieldMemOperand(r8, HeapNumber::kValueOffset));
139 
140   // Setup state for loop
141   // r5: address of arg[0] + kPointerSize
142   // r6: number of slots to drop at exit (arguments + receiver)
143   __ addi(r7, r3, Operand(1));
144 
145   Label done_loop, loop;
146   __ mr(r7, r3);
147   __ bind(&loop);
148   {
149     // Check if all parameters done.
150     __ subi(r7, r7, Operand(1));
151     __ cmpi(r7, Operand::Zero());
152     __ blt(&done_loop);
153 
154     // Load the next parameter tagged value into r5.
155     __ ShiftLeftImm(r5, r7, Operand(kPointerSizeLog2));
156     __ LoadPX(r5, MemOperand(sp, r5));
157 
158     // Load the double value of the parameter into d2, maybe converting the
159     // parameter to a number first using the ToNumber builtin if necessary.
160     Label convert, convert_smi, convert_number, done_convert;
161     __ bind(&convert);
162     __ JumpIfSmi(r5, &convert_smi);
163     __ LoadP(r6, FieldMemOperand(r5, HeapObject::kMapOffset));
164     __ JumpIfRoot(r6, Heap::kHeapNumberMapRootIndex, &convert_number);
165     {
166       // Parameter is not a Number, use the ToNumber builtin to convert it.
167       FrameScope scope(masm, StackFrame::MANUAL);
168       __ SmiTag(r3);
169       __ SmiTag(r7);
170       __ EnterBuiltinFrame(cp, r4, r3);
171       __ Push(r7, r8);
172       __ mr(r3, r5);
173       __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
174       __ mr(r5, r3);
175       __ Pop(r7, r8);
176       __ LeaveBuiltinFrame(cp, r4, r3);
177       __ SmiUntag(r7);
178       __ SmiUntag(r3);
179       {
180         // Restore the double accumulator value (d1).
181         Label done_restore;
182         __ SmiToDouble(d1, r8);
183         __ JumpIfSmi(r8, &done_restore);
184         __ lfd(d1, FieldMemOperand(r8, HeapNumber::kValueOffset));
185         __ bind(&done_restore);
186       }
187     }
188     __ b(&convert);
189     __ bind(&convert_number);
190     __ lfd(d2, FieldMemOperand(r5, HeapNumber::kValueOffset));
191     __ b(&done_convert);
192     __ bind(&convert_smi);
193     __ SmiToDouble(d2, r5);
194     __ bind(&done_convert);
195 
196     // Perform the actual comparison with the accumulator value on the left hand
197     // side (d1) and the next parameter value on the right hand side (d2).
198     Label compare_nan, compare_swap;
199     __ fcmpu(d1, d2);
200     __ bunordered(&compare_nan);
201     __ b(cond_done, &loop);
202     __ b(CommuteCondition(cond_done), &compare_swap);
203 
204     // Left and right hand side are equal, check for -0 vs. +0.
205     __ TestDoubleIsMinusZero(reg, r9, r0);
206     __ bne(&loop);
207 
208     // Update accumulator. Result is on the right hand side.
209     __ bind(&compare_swap);
210     __ fmr(d1, d2);
211     __ mr(r8, r5);
212     __ b(&loop);
213 
214     // At least one side is NaN, which means that the result will be NaN too.
215     // We still need to visit the rest of the arguments.
216     __ bind(&compare_nan);
217     __ LoadRoot(r8, Heap::kNanValueRootIndex);
218     __ lfd(d1, FieldMemOperand(r8, HeapNumber::kValueOffset));
219     __ b(&loop);
220   }
221 
222   __ bind(&done_loop);
223   // Drop all slots, including the receiver.
224   __ addi(r3, r3, Operand(1));
225   __ Drop(r3);
226   __ mr(r3, r8);
227   __ Ret();
228 }
229 
230 // static
Generate_NumberConstructor(MacroAssembler * masm)231 void Builtins::Generate_NumberConstructor(MacroAssembler* masm) {
232   // ----------- S t a t e -------------
233   //  -- r3                     : number of arguments
234   //  -- r4                     : constructor function
235   //  -- cp                     : context
236   //  -- lr                     : return address
237   //  -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
238   //  -- sp[argc * 4]           : receiver
239   // -----------------------------------
240 
241   // 1. Load the first argument into r3.
242   Label no_arguments;
243   {
244     __ mr(r5, r3);  // Store argc in r5.
245     __ cmpi(r3, Operand::Zero());
246     __ beq(&no_arguments);
247     __ subi(r3, r3, Operand(1));
248     __ ShiftLeftImm(r3, r3, Operand(kPointerSizeLog2));
249     __ LoadPX(r3, MemOperand(sp, r3));
250   }
251 
252   // 2a. Convert the first argument to a number.
253   {
254     FrameScope scope(masm, StackFrame::MANUAL);
255     __ SmiTag(r5);
256     __ EnterBuiltinFrame(cp, r4, r5);
257     __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
258     __ LeaveBuiltinFrame(cp, r4, r5);
259     __ SmiUntag(r5);
260   }
261 
262   {
263     // Drop all arguments including the receiver.
264     __ Drop(r5);
265     __ Ret(1);
266   }
267 
268   // 2b. No arguments, return +0.
269   __ bind(&no_arguments);
270   __ LoadSmiLiteral(r3, Smi::kZero);
271   __ Ret(1);
272 }
273 
274 // static
Generate_NumberConstructor_ConstructStub(MacroAssembler * masm)275 void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) {
276   // ----------- S t a t e -------------
277   //  -- r3                     : number of arguments
278   //  -- r4                     : constructor function
279   //  -- r6                     : new target
280   //  -- cp                     : context
281   //  -- lr                     : return address
282   //  -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
283   //  -- sp[argc * 4]           : receiver
284   // -----------------------------------
285 
286   // 1. Make sure we operate in the context of the called function.
287   __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset));
288 
289   // 2. Load the first argument into r5.
290   {
291     Label no_arguments, done;
292     __ mr(r9, r3);  // Store argc in r9.
293     __ cmpi(r3, Operand::Zero());
294     __ beq(&no_arguments);
295     __ subi(r3, r3, Operand(1));
296     __ ShiftLeftImm(r5, r3, Operand(kPointerSizeLog2));
297     __ LoadPX(r5, MemOperand(sp, r5));
298     __ b(&done);
299     __ bind(&no_arguments);
300     __ LoadSmiLiteral(r5, Smi::kZero);
301     __ bind(&done);
302   }
303 
304   // 3. Make sure r5 is a number.
305   {
306     Label done_convert;
307     __ JumpIfSmi(r5, &done_convert);
308     __ CompareObjectType(r5, r7, r7, HEAP_NUMBER_TYPE);
309     __ beq(&done_convert);
310     {
311       FrameScope scope(masm, StackFrame::MANUAL);
312       __ SmiTag(r9);
313       __ EnterBuiltinFrame(cp, r4, r9);
314       __ Push(r6);
315       __ mr(r3, r5);
316       __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
317       __ mr(r5, r3);
318       __ Pop(r6);
319       __ LeaveBuiltinFrame(cp, r4, r9);
320       __ SmiUntag(r9);
321     }
322     __ bind(&done_convert);
323   }
324 
325   // 4. Check if new target and constructor differ.
326   Label drop_frame_and_ret, new_object;
327   __ cmp(r4, r6);
328   __ bne(&new_object);
329 
330   // 5. Allocate a JSValue wrapper for the number.
331   __ AllocateJSValue(r3, r4, r5, r7, r8, &new_object);
332   __ b(&drop_frame_and_ret);
333 
334   // 6. Fallback to the runtime to create new object.
335   __ bind(&new_object);
336   {
337     FrameScope scope(masm, StackFrame::MANUAL);
338     __ SmiTag(r9);
339     __ EnterBuiltinFrame(cp, r4, r9);
340     __ Push(r5);  // first argument
341     FastNewObjectStub stub(masm->isolate());
342     __ CallStub(&stub);
343     __ Pop(r5);
344     __ LeaveBuiltinFrame(cp, r4, r9);
345     __ SmiUntag(r9);
346   }
347   __ StoreP(r5, FieldMemOperand(r3, JSValue::kValueOffset), r0);
348 
349   __ bind(&drop_frame_and_ret);
350   {
351     __ Drop(r9);
352     __ Ret(1);
353   }
354 }
355 
356 // static
Generate_StringConstructor(MacroAssembler * masm)357 void Builtins::Generate_StringConstructor(MacroAssembler* masm) {
358   // ----------- S t a t e -------------
359   //  -- r3                     : number of arguments
360   //  -- r4                     : constructor function
361   //  -- cp                     : context
362   //  -- lr                     : return address
363   //  -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
364   //  -- sp[argc * 4]           : receiver
365   // -----------------------------------
366 
367   // 1. Load the first argument into r3.
368   Label no_arguments;
369   {
370     __ mr(r5, r3);  // Store argc in r5.
371     __ cmpi(r3, Operand::Zero());
372     __ beq(&no_arguments);
373     __ subi(r3, r3, Operand(1));
374     __ ShiftLeftImm(r3, r3, Operand(kPointerSizeLog2));
375     __ LoadPX(r3, MemOperand(sp, r3));
376   }
377 
378   // 2a. At least one argument, return r3 if it's a string, otherwise
379   // dispatch to appropriate conversion.
380   Label drop_frame_and_ret, to_string, symbol_descriptive_string;
381   {
382     __ JumpIfSmi(r3, &to_string);
383     STATIC_ASSERT(FIRST_NONSTRING_TYPE == SYMBOL_TYPE);
384     __ CompareObjectType(r3, r6, r6, FIRST_NONSTRING_TYPE);
385     __ bgt(&to_string);
386     __ beq(&symbol_descriptive_string);
387     __ b(&drop_frame_and_ret);
388   }
389 
390   // 2b. No arguments, return the empty string (and pop the receiver).
391   __ bind(&no_arguments);
392   {
393     __ LoadRoot(r3, Heap::kempty_stringRootIndex);
394     __ Ret(1);
395   }
396 
397   // 3a. Convert r3 to a string.
398   __ bind(&to_string);
399   {
400     FrameScope scope(masm, StackFrame::MANUAL);
401     __ SmiTag(r5);
402     __ EnterBuiltinFrame(cp, r4, r5);
403     __ Call(masm->isolate()->builtins()->ToString(), RelocInfo::CODE_TARGET);
404     __ LeaveBuiltinFrame(cp, r4, r5);
405     __ SmiUntag(r5);
406   }
407   __ b(&drop_frame_and_ret);
408 
409   // 3b. Convert symbol in r3 to a string.
410   __ bind(&symbol_descriptive_string);
411   {
412     __ Drop(r5);
413     __ Drop(1);
414     __ Push(r3);
415     __ TailCallRuntime(Runtime::kSymbolDescriptiveString);
416   }
417 
418   __ bind(&drop_frame_and_ret);
419   {
420     __ Drop(r5);
421     __ Ret(1);
422   }
423 }
424 
425 // static
Generate_StringConstructor_ConstructStub(MacroAssembler * masm)426 void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
427   // ----------- S t a t e -------------
428   //  -- r3                     : number of arguments
429   //  -- r4                     : constructor function
430   //  -- r6                     : new target
431   //  -- cp                     : context
432   //  -- lr                     : return address
433   //  -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
434   //  -- sp[argc * 4]           : receiver
435   // -----------------------------------
436 
437   // 1. Make sure we operate in the context of the called function.
438   __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset));
439 
440   // 2. Load the first argument into r5.
441   {
442     Label no_arguments, done;
443     __ mr(r9, r3);  // Store argc in r9.
444     __ cmpi(r3, Operand::Zero());
445     __ beq(&no_arguments);
446     __ subi(r3, r3, Operand(1));
447     __ ShiftLeftImm(r5, r3, Operand(kPointerSizeLog2));
448     __ LoadPX(r5, MemOperand(sp, r5));
449     __ b(&done);
450     __ bind(&no_arguments);
451     __ LoadRoot(r5, Heap::kempty_stringRootIndex);
452     __ bind(&done);
453   }
454 
455   // 3. Make sure r5 is a string.
456   {
457     Label convert, done_convert;
458     __ JumpIfSmi(r5, &convert);
459     __ CompareObjectType(r5, r7, r7, FIRST_NONSTRING_TYPE);
460     __ blt(&done_convert);
461     __ bind(&convert);
462     {
463       FrameScope scope(masm, StackFrame::MANUAL);
464       __ SmiTag(r9);
465       __ EnterBuiltinFrame(cp, r4, r9);
466       __ Push(r6);
467       __ mr(r3, r5);
468       __ Call(masm->isolate()->builtins()->ToString(), RelocInfo::CODE_TARGET);
469       __ mr(r5, r3);
470       __ Pop(r6);
471       __ LeaveBuiltinFrame(cp, r4, r9);
472       __ SmiUntag(r9);
473     }
474     __ bind(&done_convert);
475   }
476 
477   // 4. Check if new target and constructor differ.
478   Label drop_frame_and_ret, new_object;
479   __ cmp(r4, r6);
480   __ bne(&new_object);
481 
482   // 5. Allocate a JSValue wrapper for the string.
483   __ AllocateJSValue(r3, r4, r5, r7, r8, &new_object);
484   __ b(&drop_frame_and_ret);
485 
486   // 6. Fallback to the runtime to create new object.
487   __ bind(&new_object);
488   {
489     FrameScope scope(masm, StackFrame::MANUAL);
490     __ SmiTag(r9);
491     __ EnterBuiltinFrame(cp, r4, r9);
492     __ Push(r5);  // first argument
493     FastNewObjectStub stub(masm->isolate());
494     __ CallStub(&stub);
495     __ Pop(r5);
496     __ LeaveBuiltinFrame(cp, r4, r9);
497     __ SmiUntag(r9);
498   }
499   __ StoreP(r5, FieldMemOperand(r3, JSValue::kValueOffset), r0);
500 
501   __ bind(&drop_frame_and_ret);
502   {
503     __ Drop(r9);
504     __ Ret(1);
505   }
506 }
507 
GenerateTailCallToSharedCode(MacroAssembler * masm)508 static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
509   __ LoadP(ip, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
510   __ LoadP(ip, FieldMemOperand(ip, SharedFunctionInfo::kCodeOffset));
511   __ addi(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
512   __ JumpToJSEntry(ip);
513 }
514 
GenerateTailCallToReturnedCode(MacroAssembler * masm,Runtime::FunctionId function_id)515 static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
516                                            Runtime::FunctionId function_id) {
517   // ----------- S t a t e -------------
518   //  -- r3 : argument count (preserved for callee)
519   //  -- r4 : target function (preserved for callee)
520   //  -- r6 : new target (preserved for callee)
521   // -----------------------------------
522   {
523     FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
524     // Push the number of arguments to the callee.
525     // Push a copy of the target function and the new target.
526     // Push function as parameter to the runtime call.
527     __ SmiTag(r3);
528     __ Push(r3, r4, r6, r4);
529 
530     __ CallRuntime(function_id, 1);
531     __ mr(r5, r3);
532 
533     // Restore target function and new target.
534     __ Pop(r3, r4, r6);
535     __ SmiUntag(r3);
536   }
537   __ addi(ip, r5, Operand(Code::kHeaderSize - kHeapObjectTag));
538   __ JumpToJSEntry(ip);
539 }
540 
Generate_InOptimizationQueue(MacroAssembler * masm)541 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
542   // Checking whether the queued function is ready for install is optional,
543   // since we come across interrupts and stack checks elsewhere.  However,
544   // not checking may delay installing ready functions, and always checking
545   // would be quite expensive.  A good compromise is to first check against
546   // stack limit as a cue for an interrupt signal.
547   Label ok;
548   __ LoadRoot(ip, Heap::kStackLimitRootIndex);
549   __ cmpl(sp, ip);
550   __ bge(&ok);
551 
552   GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
553 
554   __ bind(&ok);
555   GenerateTailCallToSharedCode(masm);
556 }
557 
558 namespace {
559 
Generate_JSConstructStubHelper(MacroAssembler * masm,bool is_api_function,bool create_implicit_receiver,bool check_derived_construct)560 void Generate_JSConstructStubHelper(MacroAssembler* masm, bool is_api_function,
561                                     bool create_implicit_receiver,
562                                     bool check_derived_construct) {
563   // ----------- S t a t e -------------
564   //  -- r3     : number of arguments
565   //  -- r4     : constructor function
566   //  -- r6     : new target
567   //  -- cp     : context
568   //  -- lr     : return address
569   //  -- sp[...]: constructor arguments
570   // -----------------------------------
571 
572   Isolate* isolate = masm->isolate();
573 
574   // Enter a construct frame.
575   {
576     FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT);
577 
578     // Preserve the incoming parameters on the stack.
579 
580     if (!create_implicit_receiver) {
581       __ SmiTag(r7, r3, SetRC);
582       __ Push(cp, r7);
583       __ PushRoot(Heap::kTheHoleValueRootIndex);
584     } else {
585       __ SmiTag(r3);
586       __ Push(cp, r3);
587 
588       // Allocate the new receiver object.
589       __ Push(r4, r6);
590       FastNewObjectStub stub(masm->isolate());
591       __ CallStub(&stub);
592       __ mr(r7, r3);
593       __ Pop(r4, r6);
594 
595       // ----------- S t a t e -------------
596       //  -- r4: constructor function
597       //  -- r6: new target
598       //  -- r7: newly allocated object
599       // -----------------------------------
600 
601       // Retrieve smi-tagged arguments count from the stack.
602       __ LoadP(r3, MemOperand(sp));
603       __ SmiUntag(r3, SetRC);
604 
605       // Push the allocated receiver to the stack. We need two copies
606       // because we may have to return the original one and the calling
607       // conventions dictate that the called function pops the receiver.
608       __ Push(r7, r7);
609     }
610 
611     // Set up pointer to last argument.
612     __ addi(r5, fp, Operand(StandardFrameConstants::kCallerSPOffset));
613 
614     // Copy arguments and receiver to the expression stack.
615     // r3: number of arguments
616     // r4: constructor function
617     // r5: address of last argument (caller sp)
618     // r6: new target
619     // cr0: condition indicating whether r3 is zero
620     // sp[0]: receiver
621     // sp[1]: receiver
622     // sp[2]: number of arguments (smi-tagged)
623     Label loop, no_args;
624     __ beq(&no_args, cr0);
625     __ ShiftLeftImm(ip, r3, Operand(kPointerSizeLog2));
626     __ sub(sp, sp, ip);
627     __ mtctr(r3);
628     __ bind(&loop);
629     __ subi(ip, ip, Operand(kPointerSize));
630     __ LoadPX(r0, MemOperand(r5, ip));
631     __ StorePX(r0, MemOperand(sp, ip));
632     __ bdnz(&loop);
633     __ bind(&no_args);
634 
635     // Call the function.
636     // r3: number of arguments
637     // r4: constructor function
638     // r6: new target
639 
640     ParameterCount actual(r3);
641     __ InvokeFunction(r4, r6, actual, CALL_FUNCTION,
642                       CheckDebugStepCallWrapper());
643 
644     // Store offset of return address for deoptimizer.
645     if (create_implicit_receiver && !is_api_function) {
646       masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
647     }
648 
649     // Restore context from the frame.
650     // r3: result
651     // sp[0]: receiver
652     // sp[1]: number of arguments (smi-tagged)
653     __ LoadP(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
654 
655     if (create_implicit_receiver) {
656       // If the result is an object (in the ECMA sense), we should get rid
657       // of the receiver and use the result; see ECMA-262 section 13.2.2-7
658       // on page 74.
659       Label use_receiver, exit;
660 
661       // If the result is a smi, it is *not* an object in the ECMA sense.
662       // r3: result
663       // sp[0]: receiver
664       // sp[1]: number of arguments (smi-tagged)
665       __ JumpIfSmi(r3, &use_receiver);
666 
667       // If the type of the result (stored in its map) is less than
668       // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
669       __ CompareObjectType(r3, r4, r6, FIRST_JS_RECEIVER_TYPE);
670       __ bge(&exit);
671 
672       // Throw away the result of the constructor invocation and use the
673       // on-stack receiver as the result.
674       __ bind(&use_receiver);
675       __ LoadP(r3, MemOperand(sp));
676 
677       // Remove receiver from the stack, remove caller arguments, and
678       // return.
679       __ bind(&exit);
680       // r3: result
681       // sp[0]: receiver (newly allocated object)
682       // sp[1]: number of arguments (smi-tagged)
683       __ LoadP(r4, MemOperand(sp, 1 * kPointerSize));
684     } else {
685       __ LoadP(r4, MemOperand(sp));
686     }
687 
688     // Leave construct frame.
689   }
690 
691   // ES6 9.2.2. Step 13+
692   // Check that the result is not a Smi, indicating that the constructor result
693   // from a derived class is neither undefined nor an Object.
694   if (check_derived_construct) {
695     Label dont_throw;
696     __ JumpIfNotSmi(r3, &dont_throw);
697     {
698       FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
699       __ CallRuntime(Runtime::kThrowDerivedConstructorReturnedNonObject);
700     }
701     __ bind(&dont_throw);
702   }
703 
704   __ SmiToPtrArrayOffset(r4, r4);
705   __ add(sp, sp, r4);
706   __ addi(sp, sp, Operand(kPointerSize));
707   if (create_implicit_receiver) {
708     __ IncrementCounter(isolate->counters()->constructed_objects(), 1, r4, r5);
709   }
710   __ blr();
711 }
712 
713 }  // namespace
714 
Generate_JSConstructStubGeneric(MacroAssembler * masm)715 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
716   Generate_JSConstructStubHelper(masm, false, true, false);
717 }
718 
Generate_JSConstructStubApi(MacroAssembler * masm)719 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
720   Generate_JSConstructStubHelper(masm, true, false, false);
721 }
722 
Generate_JSBuiltinsConstructStub(MacroAssembler * masm)723 void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
724   Generate_JSConstructStubHelper(masm, false, false, false);
725 }
726 
Generate_JSBuiltinsConstructStubForDerived(MacroAssembler * masm)727 void Builtins::Generate_JSBuiltinsConstructStubForDerived(
728     MacroAssembler* masm) {
729   Generate_JSConstructStubHelper(masm, false, false, true);
730 }
731 
732 // static
Generate_ResumeGeneratorTrampoline(MacroAssembler * masm)733 void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
734   // ----------- S t a t e -------------
735   //  -- r3 : the value to pass to the generator
736   //  -- r4 : the JSGeneratorObject to resume
737   //  -- r5 : the resume mode (tagged)
738   //  -- lr : return address
739   // -----------------------------------
740   __ AssertGeneratorObject(r4);
741 
742   // Store input value into generator object.
743   __ StoreP(r3, FieldMemOperand(r4, JSGeneratorObject::kInputOrDebugPosOffset),
744             r0);
745   __ RecordWriteField(r4, JSGeneratorObject::kInputOrDebugPosOffset, r3, r6,
746                       kLRHasNotBeenSaved, kDontSaveFPRegs);
747 
748   // Store resume mode into generator object.
749   __ StoreP(r5, FieldMemOperand(r4, JSGeneratorObject::kResumeModeOffset), r0);
750 
751   // Load suspended function and context.
752   __ LoadP(cp, FieldMemOperand(r4, JSGeneratorObject::kContextOffset));
753   __ LoadP(r7, FieldMemOperand(r4, JSGeneratorObject::kFunctionOffset));
754 
755   // Flood function if we are stepping.
756   Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
757   Label stepping_prepared;
758   ExternalReference last_step_action =
759       ExternalReference::debug_last_step_action_address(masm->isolate());
760   STATIC_ASSERT(StepFrame > StepIn);
761   __ mov(ip, Operand(last_step_action));
762   __ LoadByte(ip, MemOperand(ip), r0);
763   __ extsb(ip, ip);
764   __ cmpi(ip, Operand(StepIn));
765   __ bge(&prepare_step_in_if_stepping);
766 
767   // Flood function if we need to continue stepping in the suspended generator.
768 
769   ExternalReference debug_suspended_generator =
770       ExternalReference::debug_suspended_generator_address(masm->isolate());
771 
772   __ mov(ip, Operand(debug_suspended_generator));
773   __ LoadP(ip, MemOperand(ip));
774   __ cmp(ip, r4);
775   __ beq(&prepare_step_in_suspended_generator);
776   __ bind(&stepping_prepared);
777 
778   // Push receiver.
779   __ LoadP(ip, FieldMemOperand(r4, JSGeneratorObject::kReceiverOffset));
780   __ Push(ip);
781 
782   // ----------- S t a t e -------------
783   //  -- r4    : the JSGeneratorObject to resume
784   //  -- r5    : the resume mode (tagged)
785   //  -- r7    : generator function
786   //  -- cp    : generator context
787   //  -- lr    : return address
788   //  -- sp[0] : generator receiver
789   // -----------------------------------
790 
791   // Push holes for arguments to generator function. Since the parser forced
792   // context allocation for any variables in generators, the actual argument
793   // values have already been copied into the context and these dummy values
794   // will never be used.
795   __ LoadP(r6, FieldMemOperand(r7, JSFunction::kSharedFunctionInfoOffset));
796   __ LoadWordArith(
797       r3, FieldMemOperand(r6, SharedFunctionInfo::kFormalParameterCountOffset));
798   {
799     Label loop, done_loop;
800     __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
801 #if V8_TARGET_ARCH_PPC64
802     __ cmpi(r3, Operand::Zero());
803     __ beq(&done_loop);
804 #else
805     __ SmiUntag(r3, SetRC);
806     __ beq(&done_loop, cr0);
807 #endif
808     __ mtctr(r3);
809     __ bind(&loop);
810     __ push(ip);
811     __ bdnz(&loop);
812     __ bind(&done_loop);
813   }
814 
815   // Dispatch on the kind of generator object.
816   Label old_generator;
817   __ LoadP(r6, FieldMemOperand(r6, SharedFunctionInfo::kFunctionDataOffset));
818   __ CompareObjectType(r6, r6, r6, BYTECODE_ARRAY_TYPE);
819   __ bne(&old_generator);
820 
821   // New-style (ignition/turbofan) generator object
822   {
823     // We abuse new.target both to indicate that this is a resume call and to
824     // pass in the generator object.  In ordinary calls, new.target is always
825     // undefined because generator functions are non-constructable.
826     __ mr(r6, r4);
827     __ mr(r4, r7);
828     __ LoadP(ip, FieldMemOperand(r4, JSFunction::kCodeEntryOffset));
829     __ JumpToJSEntry(ip);
830   }
831 
832   // Old-style (full-codegen) generator object
833   __ bind(&old_generator);
834   {
835     // Enter a new JavaScript frame, and initialize its slots as they were when
836     // the generator was suspended.
837     FrameScope scope(masm, StackFrame::MANUAL);
838     __ PushStandardFrame(r7);
839 
840     // Restore the operand stack.
841     __ LoadP(r3, FieldMemOperand(r4, JSGeneratorObject::kOperandStackOffset));
842     __ LoadP(r6, FieldMemOperand(r3, FixedArray::kLengthOffset));
843     __ addi(r3, r3,
844             Operand(FixedArray::kHeaderSize - kHeapObjectTag - kPointerSize));
845     {
846       Label loop, done_loop;
847       __ SmiUntag(r6, SetRC);
848       __ beq(&done_loop, cr0);
849       __ mtctr(r6);
850       __ bind(&loop);
851       __ LoadPU(ip, MemOperand(r3, kPointerSize));
852       __ Push(ip);
853       __ bdnz(&loop);
854       __ bind(&done_loop);
855     }
856 
857     // Reset operand stack so we don't leak.
858     __ LoadRoot(ip, Heap::kEmptyFixedArrayRootIndex);
859     __ StoreP(ip, FieldMemOperand(r4, JSGeneratorObject::kOperandStackOffset),
860               r0);
861 
862     // Resume the generator function at the continuation.
863     __ LoadP(r6, FieldMemOperand(r7, JSFunction::kSharedFunctionInfoOffset));
864     __ LoadP(r6, FieldMemOperand(r6, SharedFunctionInfo::kCodeOffset));
865     __ addi(r6, r6, Operand(Code::kHeaderSize - kHeapObjectTag));
866     {
867       ConstantPoolUnavailableScope constant_pool_unavailable(masm);
868       if (FLAG_enable_embedded_constant_pool) {
869         __ LoadConstantPoolPointerRegisterFromCodeTargetAddress(r6);
870       }
871       __ LoadP(r5, FieldMemOperand(r4, JSGeneratorObject::kContinuationOffset));
872       __ SmiUntag(r5);
873       __ add(r6, r6, r5);
874       __ LoadSmiLiteral(r5,
875                         Smi::FromInt(JSGeneratorObject::kGeneratorExecuting));
876       __ StoreP(r5, FieldMemOperand(r4, JSGeneratorObject::kContinuationOffset),
877                 r0);
878       __ mr(r3, r4);  // Continuation expects generator object in r3.
879       __ Jump(r6);
880     }
881   }
882 
883   __ bind(&prepare_step_in_if_stepping);
884   {
885     FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
886     __ Push(r4, r5, r7);
887     __ CallRuntime(Runtime::kDebugPrepareStepInIfStepping);
888     __ Pop(r4, r5);
889     __ LoadP(r7, FieldMemOperand(r4, JSGeneratorObject::kFunctionOffset));
890   }
891   __ b(&stepping_prepared);
892 
893   __ bind(&prepare_step_in_suspended_generator);
894   {
895     FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
896     __ Push(r4, r5);
897     __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
898     __ Pop(r4, r5);
899     __ LoadP(r7, FieldMemOperand(r4, JSGeneratorObject::kFunctionOffset));
900   }
901   __ b(&stepping_prepared);
902 }
903 
Generate_ConstructedNonConstructable(MacroAssembler * masm)904 void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
905   FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
906   __ push(r4);
907   __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
908 }
909 
910 enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt };
911 
912 // Clobbers r5; preserves all other registers.
Generate_CheckStackOverflow(MacroAssembler * masm,Register argc,IsTagged argc_is_tagged)913 static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc,
914                                         IsTagged argc_is_tagged) {
915   // Check the stack for overflow. We are not trying to catch
916   // interruptions (e.g. debug break and preemption) here, so the "real stack
917   // limit" is checked.
918   Label okay;
919   __ LoadRoot(r5, Heap::kRealStackLimitRootIndex);
920   // Make r5 the space we have left. The stack might already be overflowed
921   // here which will cause r5 to become negative.
922   __ sub(r5, sp, r5);
923   // Check if the arguments will overflow the stack.
924   if (argc_is_tagged == kArgcIsSmiTagged) {
925     __ SmiToPtrArrayOffset(r0, argc);
926   } else {
927     DCHECK(argc_is_tagged == kArgcIsUntaggedInt);
928     __ ShiftLeftImm(r0, argc, Operand(kPointerSizeLog2));
929   }
930   __ cmp(r5, r0);
931   __ bgt(&okay);  // Signed comparison.
932 
933   // Out of stack space.
934   __ CallRuntime(Runtime::kThrowStackOverflow);
935 
936   __ bind(&okay);
937 }
938 
Generate_JSEntryTrampolineHelper(MacroAssembler * masm,bool is_construct)939 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
940                                              bool is_construct) {
941   // Called from Generate_JS_Entry
942   // r3: new.target
943   // r4: function
944   // r5: receiver
945   // r6: argc
946   // r7: argv
947   // r0,r8-r9, cp may be clobbered
948   ProfileEntryHookStub::MaybeCallEntryHook(masm);
949 
950   // Enter an internal frame.
951   {
952     FrameScope scope(masm, StackFrame::INTERNAL);
953 
954     // Setup the context (we need to use the caller context from the isolate).
955     ExternalReference context_address(Isolate::kContextAddress,
956                                       masm->isolate());
957     __ mov(cp, Operand(context_address));
958     __ LoadP(cp, MemOperand(cp));
959 
960     __ InitializeRootRegister();
961 
962     // Push the function and the receiver onto the stack.
963     __ Push(r4, r5);
964 
965     // Check if we have enough stack space to push all arguments.
966     // Clobbers r5.
967     Generate_CheckStackOverflow(masm, r6, kArgcIsUntaggedInt);
968 
969     // Copy arguments to the stack in a loop.
970     // r4: function
971     // r6: argc
972     // r7: argv, i.e. points to first arg
973     Label loop, entry;
974     __ ShiftLeftImm(r0, r6, Operand(kPointerSizeLog2));
975     __ add(r5, r7, r0);
976     // r5 points past last arg.
977     __ b(&entry);
978     __ bind(&loop);
979     __ LoadP(r8, MemOperand(r7));  // read next parameter
980     __ addi(r7, r7, Operand(kPointerSize));
981     __ LoadP(r0, MemOperand(r8));  // dereference handle
982     __ push(r0);                   // push parameter
983     __ bind(&entry);
984     __ cmp(r7, r5);
985     __ bne(&loop);
986 
987     // Setup new.target and argc.
988     __ mr(r7, r3);
989     __ mr(r3, r6);
990     __ mr(r6, r7);
991 
992     // Initialize all JavaScript callee-saved registers, since they will be seen
993     // by the garbage collector as part of handlers.
994     __ LoadRoot(r7, Heap::kUndefinedValueRootIndex);
995     __ mr(r14, r7);
996     __ mr(r15, r7);
997     __ mr(r16, r7);
998     __ mr(r17, r7);
999 
1000     // Invoke the code.
1001     Handle<Code> builtin = is_construct
1002                                ? masm->isolate()->builtins()->Construct()
1003                                : masm->isolate()->builtins()->Call();
1004     __ Call(builtin, RelocInfo::CODE_TARGET);
1005 
1006     // Exit the JS frame and remove the parameters (except function), and
1007     // return.
1008   }
1009   __ blr();
1010 
1011   // r3: result
1012 }
1013 
Generate_JSEntryTrampoline(MacroAssembler * masm)1014 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
1015   Generate_JSEntryTrampolineHelper(masm, false);
1016 }
1017 
Generate_JSConstructEntryTrampoline(MacroAssembler * masm)1018 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
1019   Generate_JSEntryTrampolineHelper(masm, true);
1020 }
1021 
LeaveInterpreterFrame(MacroAssembler * masm,Register scratch)1022 static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch) {
1023   Register args_count = scratch;
1024 
1025   // Get the arguments + receiver count.
1026   __ LoadP(args_count,
1027            MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1028   __ lwz(args_count,
1029          FieldMemOperand(args_count, BytecodeArray::kParameterSizeOffset));
1030 
1031   // Leave the frame (also dropping the register file).
1032   __ LeaveFrame(StackFrame::JAVA_SCRIPT);
1033 
1034   __ add(sp, sp, args_count);
1035 }
1036 
1037 // Generate code for entering a JS function with the interpreter.
1038 // On entry to the function the receiver and arguments have been pushed on the
1039 // stack left to right.  The actual argument count matches the formal parameter
1040 // count expected by the function.
1041 //
1042 // The live registers are:
1043 //   o r4: the JS function object being called.
1044 //   o r6: the new target
1045 //   o cp: our context
1046 //   o pp: the caller's constant pool pointer (if enabled)
1047 //   o fp: the caller's frame pointer
1048 //   o sp: stack pointer
1049 //   o lr: return address
1050 //
1051 // The function builds an interpreter frame.  See InterpreterFrameConstants in
1052 // frames.h for its layout.
Generate_InterpreterEntryTrampoline(MacroAssembler * masm)1053 void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
1054   ProfileEntryHookStub::MaybeCallEntryHook(masm);
1055 
1056   // Open a frame scope to indicate that there is a frame on the stack.  The
1057   // MANUAL indicates that the scope shouldn't actually generate code to set up
1058   // the frame (that is done below).
1059   FrameScope frame_scope(masm, StackFrame::MANUAL);
1060   __ PushStandardFrame(r4);
1061 
1062   // Get the bytecode array from the function object (or from the DebugInfo if
1063   // it is present) and load it into kInterpreterBytecodeArrayRegister.
1064   __ LoadP(r3, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
1065   Label array_done;
1066   Register debug_info = r5;
1067   DCHECK(!debug_info.is(r3));
1068   __ LoadP(debug_info,
1069            FieldMemOperand(r3, SharedFunctionInfo::kDebugInfoOffset));
1070   // Load original bytecode array or the debug copy.
1071   __ LoadP(kInterpreterBytecodeArrayRegister,
1072            FieldMemOperand(r3, SharedFunctionInfo::kFunctionDataOffset));
1073   __ CmpSmiLiteral(debug_info, DebugInfo::uninitialized(), r0);
1074   __ beq(&array_done);
1075   __ LoadP(kInterpreterBytecodeArrayRegister,
1076            FieldMemOperand(debug_info, DebugInfo::kDebugBytecodeArrayIndex));
1077   __ bind(&array_done);
1078 
1079   // Check whether we should continue to use the interpreter.
1080   Label switch_to_different_code_kind;
1081   __ LoadP(r3, FieldMemOperand(r3, SharedFunctionInfo::kCodeOffset));
1082   __ mov(ip, Operand(masm->CodeObject()));  // Self-reference to this code.
1083   __ cmp(r3, ip);
1084   __ bne(&switch_to_different_code_kind);
1085 
1086   // Increment invocation count for the function.
1087   __ LoadP(r7, FieldMemOperand(r4, JSFunction::kLiteralsOffset));
1088   __ LoadP(r7, FieldMemOperand(r7, LiteralsArray::kFeedbackVectorOffset));
1089   __ LoadP(r8, FieldMemOperand(r7, TypeFeedbackVector::kInvocationCountIndex *
1090                                            kPointerSize +
1091                                        TypeFeedbackVector::kHeaderSize));
1092   __ AddSmiLiteral(r8, r8, Smi::FromInt(1), r0);
1093   __ StoreP(r8, FieldMemOperand(r7, TypeFeedbackVector::kInvocationCountIndex *
1094                                             kPointerSize +
1095                                         TypeFeedbackVector::kHeaderSize),
1096             r0);
1097 
1098   // Check function data field is actually a BytecodeArray object.
1099 
1100   if (FLAG_debug_code) {
1101     __ TestIfSmi(kInterpreterBytecodeArrayRegister, r0);
1102     __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1103     __ CompareObjectType(kInterpreterBytecodeArrayRegister, r3, no_reg,
1104                          BYTECODE_ARRAY_TYPE);
1105     __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1106   }
1107 
1108   // Load initial bytecode offset.
1109   __ mov(kInterpreterBytecodeOffsetRegister,
1110          Operand(BytecodeArray::kHeaderSize - kHeapObjectTag));
1111 
1112   // Push new.target, bytecode array and Smi tagged bytecode array offset.
1113   __ SmiTag(r3, kInterpreterBytecodeOffsetRegister);
1114   __ Push(r6, kInterpreterBytecodeArrayRegister, r3);
1115 
1116   // Allocate the local and temporary register file on the stack.
1117   {
1118     // Load frame size (word) from the BytecodeArray object.
1119     __ lwz(r5, FieldMemOperand(kInterpreterBytecodeArrayRegister,
1120                                BytecodeArray::kFrameSizeOffset));
1121 
1122     // Do a stack check to ensure we don't go over the limit.
1123     Label ok;
1124     __ sub(r6, sp, r5);
1125     __ LoadRoot(r0, Heap::kRealStackLimitRootIndex);
1126     __ cmpl(r6, r0);
1127     __ bge(&ok);
1128     __ CallRuntime(Runtime::kThrowStackOverflow);
1129     __ bind(&ok);
1130 
1131     // If ok, push undefined as the initial value for all register file entries.
1132     // TODO(rmcilroy): Consider doing more than one push per loop iteration.
1133     Label loop, no_args;
1134     __ LoadRoot(r6, Heap::kUndefinedValueRootIndex);
1135     __ ShiftRightImm(r5, r5, Operand(kPointerSizeLog2), SetRC);
1136     __ beq(&no_args, cr0);
1137     __ mtctr(r5);
1138     __ bind(&loop);
1139     __ push(r6);
1140     __ bdnz(&loop);
1141     __ bind(&no_args);
1142   }
1143 
1144   // Load accumulator and dispatch table into registers.
1145   __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
1146   __ mov(kInterpreterDispatchTableRegister,
1147          Operand(ExternalReference::interpreter_dispatch_table_address(
1148              masm->isolate())));
1149 
1150   // Dispatch to the first bytecode handler for the function.
1151   __ lbzx(r4, MemOperand(kInterpreterBytecodeArrayRegister,
1152                          kInterpreterBytecodeOffsetRegister));
1153   __ ShiftLeftImm(ip, r4, Operand(kPointerSizeLog2));
1154   __ LoadPX(ip, MemOperand(kInterpreterDispatchTableRegister, ip));
1155   __ Call(ip);
1156 
1157   masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset());
1158 
1159   // The return value is in r3.
1160   LeaveInterpreterFrame(masm, r5);
1161   __ blr();
1162 
1163   // If the shared code is no longer this entry trampoline, then the underlying
1164   // function has been switched to a different kind of code and we heal the
1165   // closure by switching the code entry field over to the new code as well.
1166   __ bind(&switch_to_different_code_kind);
1167   __ LeaveFrame(StackFrame::JAVA_SCRIPT);
1168   __ LoadP(r7, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
1169   __ LoadP(r7, FieldMemOperand(r7, SharedFunctionInfo::kCodeOffset));
1170   __ addi(r7, r7, Operand(Code::kHeaderSize - kHeapObjectTag));
1171   __ StoreP(r7, FieldMemOperand(r4, JSFunction::kCodeEntryOffset), r0);
1172   __ RecordWriteCodeEntryField(r4, r7, r8);
1173   __ JumpToJSEntry(r7);
1174 }
1175 
Generate_StackOverflowCheck(MacroAssembler * masm,Register num_args,Register scratch,Label * stack_overflow)1176 static void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args,
1177                                         Register scratch,
1178                                         Label* stack_overflow) {
1179   // Check the stack for overflow. We are not trying to catch
1180   // interruptions (e.g. debug break and preemption) here, so the "real stack
1181   // limit" is checked.
1182   __ LoadRoot(scratch, Heap::kRealStackLimitRootIndex);
1183   // Make scratch the space we have left. The stack might already be overflowed
1184   // here which will cause scratch to become negative.
1185   __ sub(scratch, sp, scratch);
1186   // Check if the arguments will overflow the stack.
1187   __ ShiftLeftImm(r0, num_args, Operand(kPointerSizeLog2));
1188   __ cmp(scratch, r0);
1189   __ ble(stack_overflow);  // Signed comparison.
1190 }
1191 
Generate_InterpreterPushArgs(MacroAssembler * masm,Register num_args,Register index,Register count,Register scratch,Label * stack_overflow)1192 static void Generate_InterpreterPushArgs(MacroAssembler* masm,
1193                                          Register num_args, Register index,
1194                                          Register count, Register scratch,
1195                                          Label* stack_overflow) {
1196   // A stack check before pushing arguments.
1197   Generate_StackOverflowCheck(masm, num_args, scratch, stack_overflow);
1198 
1199   Label loop;
1200   __ addi(index, index, Operand(kPointerSize));  // Bias up for LoadPU
1201   __ mtctr(count);
1202   __ bind(&loop);
1203   __ LoadPU(scratch, MemOperand(index, -kPointerSize));
1204   __ push(scratch);
1205   __ bdnz(&loop);
1206 }
1207 
1208 // static
Generate_InterpreterPushArgsAndCallImpl(MacroAssembler * masm,TailCallMode tail_call_mode,CallableType function_type)1209 void Builtins::Generate_InterpreterPushArgsAndCallImpl(
1210     MacroAssembler* masm, TailCallMode tail_call_mode,
1211     CallableType function_type) {
1212   // ----------- S t a t e -------------
1213   //  -- r3 : the number of arguments (not including the receiver)
1214   //  -- r5 : the address of the first argument to be pushed. Subsequent
1215   //          arguments should be consecutive above this, in the same order as
1216   //          they are to be pushed onto the stack.
1217   //  -- r4 : the target to call (can be any Object).
1218   // -----------------------------------
1219   Label stack_overflow;
1220 
1221   // Calculate number of arguments (add one for receiver).
1222   __ addi(r6, r3, Operand(1));
1223 
1224   // Push the arguments. r5, r6, r7 will be modified.
1225   Generate_InterpreterPushArgs(masm, r6, r5, r6, r7, &stack_overflow);
1226 
1227   // Call the target.
1228   if (function_type == CallableType::kJSFunction) {
1229     __ Jump(masm->isolate()->builtins()->CallFunction(ConvertReceiverMode::kAny,
1230                                                       tail_call_mode),
1231             RelocInfo::CODE_TARGET);
1232   } else {
1233     DCHECK_EQ(function_type, CallableType::kAny);
1234     __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny,
1235                                               tail_call_mode),
1236             RelocInfo::CODE_TARGET);
1237   }
1238 
1239   __ bind(&stack_overflow);
1240   {
1241     __ TailCallRuntime(Runtime::kThrowStackOverflow);
1242     // Unreachable Code.
1243     __ bkpt(0);
1244   }
1245 }
1246 
1247 // static
Generate_InterpreterPushArgsAndConstructImpl(MacroAssembler * masm,CallableType construct_type)1248 void Builtins::Generate_InterpreterPushArgsAndConstructImpl(
1249     MacroAssembler* masm, CallableType construct_type) {
1250   // ----------- S t a t e -------------
1251   // -- r3 : argument count (not including receiver)
1252   // -- r6 : new target
1253   // -- r4 : constructor to call
1254   // -- r5 : allocation site feedback if available, undefined otherwise.
1255   // -- r7 : address of the first argument
1256   // -----------------------------------
1257   Label stack_overflow;
1258 
1259   // Push a slot for the receiver to be constructed.
1260   __ li(r0, Operand::Zero());
1261   __ push(r0);
1262 
1263   // Push the arguments (skip if none).
1264   Label skip;
1265   __ cmpi(r3, Operand::Zero());
1266   __ beq(&skip);
1267   // Push the arguments. r8, r7, r9 will be modified.
1268   Generate_InterpreterPushArgs(masm, r3, r7, r3, r8, &stack_overflow);
1269   __ bind(&skip);
1270 
1271   __ AssertUndefinedOrAllocationSite(r5, r8);
1272   if (construct_type == CallableType::kJSFunction) {
1273     __ AssertFunction(r4);
1274 
1275     // Tail call to the function-specific construct stub (still in the caller
1276     // context at this point).
1277     __ LoadP(r7, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
1278     __ LoadP(r7, FieldMemOperand(r7, SharedFunctionInfo::kConstructStubOffset));
1279     // Jump to the construct function.
1280     __ addi(ip, r7, Operand(Code::kHeaderSize - kHeapObjectTag));
1281     __ Jump(ip);
1282 
1283   } else {
1284     DCHECK_EQ(construct_type, CallableType::kAny);
1285     // Call the constructor with r3, r4, and r6 unmodified.
1286     __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
1287   }
1288 
1289   __ bind(&stack_overflow);
1290   {
1291     __ TailCallRuntime(Runtime::kThrowStackOverflow);
1292     // Unreachable Code.
1293     __ bkpt(0);
1294   }
1295 }
1296 
1297 // static
Generate_InterpreterPushArgsAndConstructArray(MacroAssembler * masm)1298 void Builtins::Generate_InterpreterPushArgsAndConstructArray(
1299     MacroAssembler* masm) {
1300   // ----------- S t a t e -------------
1301   // -- r3 : argument count (not including receiver)
1302   // -- r4 : target to call verified to be Array function
1303   // -- r5 : allocation site feedback if available, undefined otherwise.
1304   // -- r6 : address of the first argument
1305   // -----------------------------------
1306   Label stack_overflow;
1307 
1308   __ addi(r7, r3, Operand(1));  // Add one for receiver.
1309 
1310   // Push the arguments. r6, r8, r3 will be modified.
1311   Generate_InterpreterPushArgs(masm, r7, r6, r7, r8, &stack_overflow);
1312 
1313   // Array constructor expects constructor in r6. It is same as r4 here.
1314   __ mr(r6, r4);
1315 
1316   ArrayConstructorStub stub(masm->isolate());
1317   __ TailCallStub(&stub);
1318 
1319   __ bind(&stack_overflow);
1320   {
1321     __ TailCallRuntime(Runtime::kThrowStackOverflow);
1322     // Unreachable code.
1323     __ bkpt(0);
1324   }
1325 }
1326 
Generate_InterpreterEnterBytecode(MacroAssembler * masm)1327 static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
1328   // Set the return address to the correct point in the interpreter entry
1329   // trampoline.
1330   Smi* interpreter_entry_return_pc_offset(
1331       masm->isolate()->heap()->interpreter_entry_return_pc_offset());
1332   DCHECK_NE(interpreter_entry_return_pc_offset, Smi::kZero);
1333   __ Move(r5, masm->isolate()->builtins()->InterpreterEntryTrampoline());
1334   __ addi(r0, r5, Operand(interpreter_entry_return_pc_offset->value() +
1335                           Code::kHeaderSize - kHeapObjectTag));
1336   __ mtlr(r0);
1337 
1338   // Initialize the dispatch table register.
1339   __ mov(kInterpreterDispatchTableRegister,
1340          Operand(ExternalReference::interpreter_dispatch_table_address(
1341              masm->isolate())));
1342 
1343   // Get the bytecode array pointer from the frame.
1344   __ LoadP(kInterpreterBytecodeArrayRegister,
1345            MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1346 
1347   if (FLAG_debug_code) {
1348     // Check function data field is actually a BytecodeArray object.
1349     __ TestIfSmi(kInterpreterBytecodeArrayRegister, r0);
1350     __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1351     __ CompareObjectType(kInterpreterBytecodeArrayRegister, r4, no_reg,
1352                          BYTECODE_ARRAY_TYPE);
1353     __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1354   }
1355 
1356   // Get the target bytecode offset from the frame.
1357   __ LoadP(kInterpreterBytecodeOffsetRegister,
1358            MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1359   __ SmiUntag(kInterpreterBytecodeOffsetRegister);
1360 
1361   // Dispatch to the target bytecode.
1362   __ lbzx(r4, MemOperand(kInterpreterBytecodeArrayRegister,
1363                          kInterpreterBytecodeOffsetRegister));
1364   __ ShiftLeftImm(ip, r4, Operand(kPointerSizeLog2));
1365   __ LoadPX(ip, MemOperand(kInterpreterDispatchTableRegister, ip));
1366   __ Jump(ip);
1367 }
1368 
Generate_InterpreterEnterBytecodeAdvance(MacroAssembler * masm)1369 void Builtins::Generate_InterpreterEnterBytecodeAdvance(MacroAssembler* masm) {
1370   // Advance the current bytecode offset stored within the given interpreter
1371   // stack frame. This simulates what all bytecode handlers do upon completion
1372   // of the underlying operation.
1373   __ LoadP(r4, MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1374   __ LoadP(r5,
1375            MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1376   __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
1377   {
1378     FrameScope scope(masm, StackFrame::INTERNAL);
1379     __ Push(kInterpreterAccumulatorRegister, r4, r5);
1380     __ CallRuntime(Runtime::kInterpreterAdvanceBytecodeOffset);
1381     __ Move(r5, r3);  // Result is the new bytecode offset.
1382     __ Pop(kInterpreterAccumulatorRegister);
1383   }
1384   __ StoreP(r5,
1385             MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1386 
1387   Generate_InterpreterEnterBytecode(masm);
1388 }
1389 
Generate_InterpreterEnterBytecodeDispatch(MacroAssembler * masm)1390 void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
1391   Generate_InterpreterEnterBytecode(masm);
1392 }
1393 
Generate_CompileLazy(MacroAssembler * masm)1394 void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
1395   // ----------- S t a t e -------------
1396   //  -- r3 : argument count (preserved for callee)
1397   //  -- r6 : new target (preserved for callee)
1398   //  -- r4 : target function (preserved for callee)
1399   // -----------------------------------
1400   // First lookup code, maybe we don't need to compile!
1401   Label gotta_call_runtime;
1402   Label try_shared;
1403   Label loop_top, loop_bottom;
1404 
1405   Register closure = r4;
1406   Register map = r9;
1407   Register index = r5;
1408   __ LoadP(map,
1409            FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
1410   __ LoadP(map,
1411            FieldMemOperand(map, SharedFunctionInfo::kOptimizedCodeMapOffset));
1412   __ LoadP(index, FieldMemOperand(map, FixedArray::kLengthOffset));
1413   __ CmpSmiLiteral(index, Smi::FromInt(2), r0);
1414   __ blt(&gotta_call_runtime);
1415 
1416   // Find literals.
1417   // r10 : native context
1418   // r5  : length / index
1419   // r9  : optimized code map
1420   // r6  : new target
1421   // r4  : closure
1422   Register native_context = r10;
1423   __ LoadP(native_context, NativeContextMemOperand());
1424 
1425   __ bind(&loop_top);
1426   Register temp = r11;
1427   Register array_pointer = r8;
1428 
1429   // Does the native context match?
1430   __ SmiToPtrArrayOffset(array_pointer, index);
1431   __ add(array_pointer, map, array_pointer);
1432   __ LoadP(temp, FieldMemOperand(array_pointer,
1433                                  SharedFunctionInfo::kOffsetToPreviousContext));
1434   __ LoadP(temp, FieldMemOperand(temp, WeakCell::kValueOffset));
1435   __ cmp(temp, native_context);
1436   __ bne(&loop_bottom);
1437   // OSR id set to none?
1438   __ LoadP(temp,
1439            FieldMemOperand(array_pointer,
1440                            SharedFunctionInfo::kOffsetToPreviousOsrAstId));
1441   const int bailout_id = BailoutId::None().ToInt();
1442   __ CmpSmiLiteral(temp, Smi::FromInt(bailout_id), r0);
1443   __ bne(&loop_bottom);
1444   // Literals available?
1445   __ LoadP(temp,
1446            FieldMemOperand(array_pointer,
1447                            SharedFunctionInfo::kOffsetToPreviousLiterals));
1448   __ LoadP(temp, FieldMemOperand(temp, WeakCell::kValueOffset));
1449   __ JumpIfSmi(temp, &gotta_call_runtime);
1450 
1451   // Save the literals in the closure.
1452   __ StoreP(temp, FieldMemOperand(closure, JSFunction::kLiteralsOffset), r0);
1453   __ RecordWriteField(closure, JSFunction::kLiteralsOffset, temp, r7,
1454                       kLRHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
1455                       OMIT_SMI_CHECK);
1456 
1457   // Code available?
1458   Register entry = r7;
1459   __ LoadP(entry,
1460            FieldMemOperand(array_pointer,
1461                            SharedFunctionInfo::kOffsetToPreviousCachedCode));
1462   __ LoadP(entry, FieldMemOperand(entry, WeakCell::kValueOffset));
1463   __ JumpIfSmi(entry, &try_shared);
1464 
1465   // Found literals and code. Get them into the closure and return.
1466   // Store code entry in the closure.
1467   __ addi(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
1468   __ StoreP(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset), r0);
1469   __ RecordWriteCodeEntryField(closure, entry, r8);
1470 
1471   // Link the closure into the optimized function list.
1472   // r7 : code entry
1473   // r10: native context
1474   // r4 : closure
1475   __ LoadP(
1476       r8, ContextMemOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST));
1477   __ StoreP(r8, FieldMemOperand(closure, JSFunction::kNextFunctionLinkOffset),
1478             r0);
1479   __ RecordWriteField(closure, JSFunction::kNextFunctionLinkOffset, r8, temp,
1480                       kLRHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
1481                       OMIT_SMI_CHECK);
1482   const int function_list_offset =
1483       Context::SlotOffset(Context::OPTIMIZED_FUNCTIONS_LIST);
1484   __ StoreP(
1485       closure,
1486       ContextMemOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST), r0);
1487   // Save closure before the write barrier.
1488   __ mr(r8, closure);
1489   __ RecordWriteContextSlot(native_context, function_list_offset, r8, temp,
1490                             kLRHasNotBeenSaved, kDontSaveFPRegs);
1491   __ JumpToJSEntry(entry);
1492 
1493   __ bind(&loop_bottom);
1494   __ SubSmiLiteral(index, index, Smi::FromInt(SharedFunctionInfo::kEntryLength),
1495                    r0);
1496   __ CmpSmiLiteral(index, Smi::FromInt(1), r0);
1497   __ bgt(&loop_top);
1498 
1499   // We found neither literals nor code.
1500   __ b(&gotta_call_runtime);
1501 
1502   __ bind(&try_shared);
1503   __ LoadP(entry,
1504            FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
1505   // Is the shared function marked for tier up?
1506   __ lbz(r8, FieldMemOperand(entry,
1507                              SharedFunctionInfo::kMarkedForTierUpByteOffset));
1508   __ TestBit(r8, SharedFunctionInfo::kMarkedForTierUpBitWithinByte, r0);
1509   __ bne(&gotta_call_runtime, cr0);
1510   // Is the full code valid?
1511   __ LoadP(entry, FieldMemOperand(entry, SharedFunctionInfo::kCodeOffset));
1512   __ lwz(r8, FieldMemOperand(entry, Code::kFlagsOffset));
1513   __ DecodeField<Code::KindField>(r8);
1514   __ cmpi(r8, Operand(Code::BUILTIN));
1515   __ beq(&gotta_call_runtime);
1516   // Yes, install the full code.
1517   __ addi(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
1518   __ StoreP(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset), r0);
1519   __ RecordWriteCodeEntryField(closure, entry, r8);
1520   __ JumpToJSEntry(entry);
1521 
1522   __ bind(&gotta_call_runtime);
1523   GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
1524 }
1525 
Generate_CompileBaseline(MacroAssembler * masm)1526 void Builtins::Generate_CompileBaseline(MacroAssembler* masm) {
1527   GenerateTailCallToReturnedCode(masm, Runtime::kCompileBaseline);
1528 }
1529 
Generate_CompileOptimized(MacroAssembler * masm)1530 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
1531   GenerateTailCallToReturnedCode(masm,
1532                                  Runtime::kCompileOptimized_NotConcurrent);
1533 }
1534 
Generate_CompileOptimizedConcurrent(MacroAssembler * masm)1535 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
1536   GenerateTailCallToReturnedCode(masm, Runtime::kCompileOptimized_Concurrent);
1537 }
1538 
Generate_InstantiateAsmJs(MacroAssembler * masm)1539 void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) {
1540   // ----------- S t a t e -------------
1541   //  -- r3 : argument count (preserved for callee)
1542   //  -- r4 : new target (preserved for callee)
1543   //  -- r6 : target function (preserved for callee)
1544   // -----------------------------------
1545   Label failed;
1546   {
1547     FrameScope scope(masm, StackFrame::INTERNAL);
1548     // Preserve argument count for later compare.
1549     __ Move(r7, r3);
1550     // Push a copy of the target function and the new target.
1551     // Push function as parameter to the runtime call.
1552     __ SmiTag(r3);
1553     __ Push(r3, r4, r6, r4);
1554 
1555     // Copy arguments from caller (stdlib, foreign, heap).
1556     Label args_done;
1557     for (int j = 0; j < 4; ++j) {
1558       Label over;
1559       if (j < 3) {
1560         __ cmpi(r7, Operand(j));
1561         __ bne(&over);
1562       }
1563       for (int i = j - 1; i >= 0; --i) {
1564         __ LoadP(r7, MemOperand(fp, StandardFrameConstants::kCallerSPOffset +
1565                                         i * kPointerSize));
1566         __ push(r7);
1567       }
1568       for (int i = 0; i < 3 - j; ++i) {
1569         __ PushRoot(Heap::kUndefinedValueRootIndex);
1570       }
1571       if (j < 3) {
1572         __ jmp(&args_done);
1573         __ bind(&over);
1574       }
1575     }
1576     __ bind(&args_done);
1577 
1578     // Call runtime, on success unwind frame, and parent frame.
1579     __ CallRuntime(Runtime::kInstantiateAsmJs, 4);
1580     // A smi 0 is returned on failure, an object on success.
1581     __ JumpIfSmi(r3, &failed);
1582 
1583     __ Drop(2);
1584     __ pop(r7);
1585     __ SmiUntag(r7);
1586     scope.GenerateLeaveFrame();
1587 
1588     __ addi(r7, r7, Operand(1));
1589     __ Drop(r7);
1590     __ Ret();
1591 
1592     __ bind(&failed);
1593     // Restore target function and new target.
1594     __ Pop(r3, r4, r6);
1595     __ SmiUntag(r3);
1596   }
1597   // On failure, tail call back to regular js.
1598   GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
1599 }
1600 
GenerateMakeCodeYoungAgainCommon(MacroAssembler * masm)1601 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
1602   // For now, we are relying on the fact that make_code_young doesn't do any
1603   // garbage collection which allows us to save/restore the registers without
1604   // worrying about which of them contain pointers. We also don't build an
1605   // internal frame to make the code faster, since we shouldn't have to do stack
1606   // crawls in MakeCodeYoung. This seems a bit fragile.
1607 
1608   // Point r3 at the start of the PlatformCodeAge sequence.
1609   __ mr(r3, ip);
1610 
1611   // The following registers must be saved and restored when calling through to
1612   // the runtime:
1613   //   r3 - contains return address (beginning of patch sequence)
1614   //   r4 - isolate
1615   //   r6 - new target
1616   //   lr - return address
1617   FrameScope scope(masm, StackFrame::MANUAL);
1618   __ mflr(r0);
1619   __ MultiPush(r0.bit() | r3.bit() | r4.bit() | r6.bit() | fp.bit());
1620   __ PrepareCallCFunction(2, 0, r5);
1621   __ mov(r4, Operand(ExternalReference::isolate_address(masm->isolate())));
1622   __ CallCFunction(
1623       ExternalReference::get_make_code_young_function(masm->isolate()), 2);
1624   __ MultiPop(r0.bit() | r3.bit() | r4.bit() | r6.bit() | fp.bit());
1625   __ mtlr(r0);
1626   __ mr(ip, r3);
1627   __ Jump(ip);
1628 }
1629 
1630 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C)                  \
1631   void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
1632       MacroAssembler* masm) {                                 \
1633     GenerateMakeCodeYoungAgainCommon(masm);                   \
1634   }                                                           \
1635   void Builtins::Generate_Make##C##CodeYoungAgainOddMarking(  \
1636       MacroAssembler* masm) {                                 \
1637     GenerateMakeCodeYoungAgainCommon(masm);                   \
1638   }
CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)1639 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
1640 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
1641 
1642 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
1643   // For now, we are relying on the fact that make_code_young doesn't do any
1644   // garbage collection which allows us to save/restore the registers without
1645   // worrying about which of them contain pointers. We also don't build an
1646   // internal frame to make the code faster, since we shouldn't have to do stack
1647   // crawls in MakeCodeYoung. This seems a bit fragile.
1648 
1649   // Point r3 at the start of the PlatformCodeAge sequence.
1650   __ mr(r3, ip);
1651 
1652   // The following registers must be saved and restored when calling through to
1653   // the runtime:
1654   //   r3 - contains return address (beginning of patch sequence)
1655   //   r4 - isolate
1656   //   r6 - new target
1657   //   lr - return address
1658   FrameScope scope(masm, StackFrame::MANUAL);
1659   __ mflr(r0);
1660   __ MultiPush(r0.bit() | r3.bit() | r4.bit() | r6.bit() | fp.bit());
1661   __ PrepareCallCFunction(2, 0, r5);
1662   __ mov(r4, Operand(ExternalReference::isolate_address(masm->isolate())));
1663   __ CallCFunction(
1664       ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
1665       2);
1666   __ MultiPop(r0.bit() | r3.bit() | r4.bit() | r6.bit() | fp.bit());
1667   __ mtlr(r0);
1668   __ mr(ip, r3);
1669 
1670   // Perform prologue operations usually performed by the young code stub.
1671   __ PushStandardFrame(r4);
1672 
1673   // Jump to point after the code-age stub.
1674   __ addi(r3, ip, Operand(kNoCodeAgeSequenceLength));
1675   __ Jump(r3);
1676 }
1677 
Generate_MarkCodeAsExecutedTwice(MacroAssembler * masm)1678 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
1679   GenerateMakeCodeYoungAgainCommon(masm);
1680 }
1681 
Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler * masm)1682 void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) {
1683   Generate_MarkCodeAsExecutedOnce(masm);
1684 }
1685 
Generate_NotifyStubFailureHelper(MacroAssembler * masm,SaveFPRegsMode save_doubles)1686 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
1687                                              SaveFPRegsMode save_doubles) {
1688   {
1689     FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1690 
1691     // Preserve registers across notification, this is important for compiled
1692     // stubs that tail call the runtime on deopts passing their parameters in
1693     // registers.
1694     __ MultiPush(kJSCallerSaved | kCalleeSaved);
1695     // Pass the function and deoptimization type to the runtime system.
1696     __ CallRuntime(Runtime::kNotifyStubFailure, save_doubles);
1697     __ MultiPop(kJSCallerSaved | kCalleeSaved);
1698   }
1699 
1700   __ addi(sp, sp, Operand(kPointerSize));  // Ignore state
1701   __ blr();                                // Jump to miss handler
1702 }
1703 
Generate_NotifyStubFailure(MacroAssembler * masm)1704 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
1705   Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
1706 }
1707 
Generate_NotifyStubFailureSaveDoubles(MacroAssembler * masm)1708 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
1709   Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
1710 }
1711 
Generate_NotifyDeoptimizedHelper(MacroAssembler * masm,Deoptimizer::BailoutType type)1712 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
1713                                              Deoptimizer::BailoutType type) {
1714   {
1715     FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1716     // Pass the function and deoptimization type to the runtime system.
1717     __ LoadSmiLiteral(r3, Smi::FromInt(static_cast<int>(type)));
1718     __ push(r3);
1719     __ CallRuntime(Runtime::kNotifyDeoptimized);
1720   }
1721 
1722   // Get the full codegen state from the stack and untag it -> r9.
1723   __ LoadP(r9, MemOperand(sp, 0 * kPointerSize));
1724   __ SmiUntag(r9);
1725   // Switch on the state.
1726   Label with_tos_register, unknown_state;
1727   __ cmpi(
1728       r9,
1729       Operand(static_cast<intptr_t>(Deoptimizer::BailoutState::NO_REGISTERS)));
1730   __ bne(&with_tos_register);
1731   __ addi(sp, sp, Operand(1 * kPointerSize));  // Remove state.
1732   __ Ret();
1733 
1734   __ bind(&with_tos_register);
1735   DCHECK_EQ(kInterpreterAccumulatorRegister.code(), r3.code());
1736   __ LoadP(r3, MemOperand(sp, 1 * kPointerSize));
1737   __ cmpi(
1738       r9,
1739       Operand(static_cast<intptr_t>(Deoptimizer::BailoutState::TOS_REGISTER)));
1740   __ bne(&unknown_state);
1741   __ addi(sp, sp, Operand(2 * kPointerSize));  // Remove state.
1742   __ Ret();
1743 
1744   __ bind(&unknown_state);
1745   __ stop("no cases left");
1746 }
1747 
Generate_NotifyDeoptimized(MacroAssembler * masm)1748 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1749   Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1750 }
1751 
Generate_NotifySoftDeoptimized(MacroAssembler * masm)1752 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
1753   Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1754 }
1755 
Generate_NotifyLazyDeoptimized(MacroAssembler * masm)1756 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
1757   Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1758 }
1759 
1760 // Clobbers registers {r7, r8, r9, r10}.
CompatibleReceiverCheck(MacroAssembler * masm,Register receiver,Register function_template_info,Label * receiver_check_failed)1761 void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver,
1762                              Register function_template_info,
1763                              Label* receiver_check_failed) {
1764   Register signature = r7;
1765   Register map = r8;
1766   Register constructor = r9;
1767   Register scratch = r10;
1768 
1769   // If there is no signature, return the holder.
1770   __ LoadP(signature, FieldMemOperand(function_template_info,
1771                                       FunctionTemplateInfo::kSignatureOffset));
1772   Label receiver_check_passed;
1773   __ JumpIfRoot(signature, Heap::kUndefinedValueRootIndex,
1774                 &receiver_check_passed);
1775 
1776   // Walk the prototype chain.
1777   __ LoadP(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
1778   Label prototype_loop_start;
1779   __ bind(&prototype_loop_start);
1780 
1781   // Get the constructor, if any.
1782   __ GetMapConstructor(constructor, map, scratch, scratch);
1783   __ cmpi(scratch, Operand(JS_FUNCTION_TYPE));
1784   Label next_prototype;
1785   __ bne(&next_prototype);
1786   Register type = constructor;
1787   __ LoadP(type,
1788            FieldMemOperand(constructor, JSFunction::kSharedFunctionInfoOffset));
1789   __ LoadP(type,
1790            FieldMemOperand(type, SharedFunctionInfo::kFunctionDataOffset));
1791 
1792   // Loop through the chain of inheriting function templates.
1793   Label function_template_loop;
1794   __ bind(&function_template_loop);
1795 
1796   // If the signatures match, we have a compatible receiver.
1797   __ cmp(signature, type);
1798   __ beq(&receiver_check_passed);
1799 
1800   // If the current type is not a FunctionTemplateInfo, load the next prototype
1801   // in the chain.
1802   __ JumpIfSmi(type, &next_prototype);
1803   __ CompareObjectType(type, scratch, scratch, FUNCTION_TEMPLATE_INFO_TYPE);
1804   __ bne(&next_prototype);
1805 
1806   // Otherwise load the parent function template and iterate.
1807   __ LoadP(type,
1808            FieldMemOperand(type, FunctionTemplateInfo::kParentTemplateOffset));
1809   __ b(&function_template_loop);
1810 
1811   // Load the next prototype.
1812   __ bind(&next_prototype);
1813   __ lwz(scratch, FieldMemOperand(map, Map::kBitField3Offset));
1814   __ DecodeField<Map::HasHiddenPrototype>(scratch, SetRC);
1815   __ beq(receiver_check_failed, cr0);
1816 
1817   __ LoadP(receiver, FieldMemOperand(map, Map::kPrototypeOffset));
1818   __ LoadP(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
1819   // Iterate.
1820   __ b(&prototype_loop_start);
1821 
1822   __ bind(&receiver_check_passed);
1823 }
1824 
Generate_HandleFastApiCall(MacroAssembler * masm)1825 void Builtins::Generate_HandleFastApiCall(MacroAssembler* masm) {
1826   // ----------- S t a t e -------------
1827   //  -- r3                 : number of arguments excluding receiver
1828   //  -- r4                 : callee
1829   //  -- lr                 : return address
1830   //  -- sp[0]              : last argument
1831   //  -- ...
1832   //  -- sp[4 * (argc - 1)] : first argument
1833   //  -- sp[4 * argc]       : receiver
1834   // -----------------------------------
1835 
1836   // Load the FunctionTemplateInfo.
1837   __ LoadP(r6, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
1838   __ LoadP(r6, FieldMemOperand(r6, SharedFunctionInfo::kFunctionDataOffset));
1839 
1840   // Do the compatible receiver check.
1841   Label receiver_check_failed;
1842   __ ShiftLeftImm(r11, r3, Operand(kPointerSizeLog2));
1843   __ LoadPX(r5, MemOperand(sp, r11));
1844   CompatibleReceiverCheck(masm, r5, r6, &receiver_check_failed);
1845 
1846   // Get the callback offset from the FunctionTemplateInfo, and jump to the
1847   // beginning of the code.
1848   __ LoadP(r7, FieldMemOperand(r6, FunctionTemplateInfo::kCallCodeOffset));
1849   __ LoadP(r7, FieldMemOperand(r7, CallHandlerInfo::kFastHandlerOffset));
1850   __ addi(ip, r7, Operand(Code::kHeaderSize - kHeapObjectTag));
1851   __ JumpToJSEntry(ip);
1852 
1853   // Compatible receiver check failed: throw an Illegal Invocation exception.
1854   __ bind(&receiver_check_failed);
1855   // Drop the arguments (including the receiver);
1856   __ addi(r11, r11, Operand(kPointerSize));
1857   __ add(sp, sp, r11);
1858   __ TailCallRuntime(Runtime::kThrowIllegalInvocation);
1859 }
1860 
Generate_OnStackReplacementHelper(MacroAssembler * masm,bool has_handler_frame)1861 static void Generate_OnStackReplacementHelper(MacroAssembler* masm,
1862                                               bool has_handler_frame) {
1863   // Lookup the function in the JavaScript frame.
1864   if (has_handler_frame) {
1865     __ LoadP(r3, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1866     __ LoadP(r3, MemOperand(r3, JavaScriptFrameConstants::kFunctionOffset));
1867   } else {
1868     __ LoadP(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1869   }
1870 
1871   {
1872     FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1873     // Pass function as argument.
1874     __ push(r3);
1875     __ CallRuntime(Runtime::kCompileForOnStackReplacement);
1876   }
1877 
1878   // If the code object is null, just return to the caller.
1879   Label skip;
1880   __ CmpSmiLiteral(r3, Smi::kZero, r0);
1881   __ bne(&skip);
1882   __ Ret();
1883 
1884   __ bind(&skip);
1885 
1886   // Drop any potential handler frame that is be sitting on top of the actual
1887   // JavaScript frame. This is the case then OSR is triggered from bytecode.
1888   if (has_handler_frame) {
1889     __ LeaveFrame(StackFrame::STUB);
1890   }
1891 
1892   // Load deoptimization data from the code object.
1893   // <deopt_data> = <code>[#deoptimization_data_offset]
1894   __ LoadP(r4, FieldMemOperand(r3, Code::kDeoptimizationDataOffset));
1895 
1896   {
1897     ConstantPoolUnavailableScope constant_pool_unavailable(masm);
1898     __ addi(r3, r3, Operand(Code::kHeaderSize - kHeapObjectTag));  // Code start
1899 
1900     if (FLAG_enable_embedded_constant_pool) {
1901       __ LoadConstantPoolPointerRegisterFromCodeTargetAddress(r3);
1902     }
1903 
1904     // Load the OSR entrypoint offset from the deoptimization data.
1905     // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
1906     __ LoadP(r4, FieldMemOperand(
1907                      r4, FixedArray::OffsetOfElementAt(
1908                              DeoptimizationInputData::kOsrPcOffsetIndex)));
1909     __ SmiUntag(r4);
1910 
1911     // Compute the target address = code start + osr_offset
1912     __ add(r0, r3, r4);
1913 
1914     // And "return" to the OSR entry point of the function.
1915     __ mtlr(r0);
1916     __ blr();
1917   }
1918 }
1919 
Generate_OnStackReplacement(MacroAssembler * masm)1920 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1921   Generate_OnStackReplacementHelper(masm, false);
1922 }
1923 
Generate_InterpreterOnStackReplacement(MacroAssembler * masm)1924 void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) {
1925   Generate_OnStackReplacementHelper(masm, true);
1926 }
1927 
1928 // static
Generate_FunctionPrototypeApply(MacroAssembler * masm)1929 void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
1930   // ----------- S t a t e -------------
1931   //  -- r3    : argc
1932   //  -- sp[0] : argArray
1933   //  -- sp[4] : thisArg
1934   //  -- sp[8] : receiver
1935   // -----------------------------------
1936 
1937   // 1. Load receiver into r4, argArray into r3 (if present), remove all
1938   // arguments from the stack (including the receiver), and push thisArg (if
1939   // present) instead.
1940   {
1941     Label skip;
1942     Register arg_size = r5;
1943     Register new_sp = r6;
1944     Register scratch = r7;
1945     __ ShiftLeftImm(arg_size, r3, Operand(kPointerSizeLog2));
1946     __ add(new_sp, sp, arg_size);
1947     __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
1948     __ mr(scratch, r3);
1949     __ LoadP(r4, MemOperand(new_sp, 0));  // receiver
1950     __ cmpi(arg_size, Operand(kPointerSize));
1951     __ blt(&skip);
1952     __ LoadP(scratch, MemOperand(new_sp, 1 * -kPointerSize));  // thisArg
1953     __ beq(&skip);
1954     __ LoadP(r3, MemOperand(new_sp, 2 * -kPointerSize));  // argArray
1955     __ bind(&skip);
1956     __ mr(sp, new_sp);
1957     __ StoreP(scratch, MemOperand(sp, 0));
1958   }
1959 
1960   // ----------- S t a t e -------------
1961   //  -- r3    : argArray
1962   //  -- r4    : receiver
1963   //  -- sp[0] : thisArg
1964   // -----------------------------------
1965 
1966   // 2. Make sure the receiver is actually callable.
1967   Label receiver_not_callable;
1968   __ JumpIfSmi(r4, &receiver_not_callable);
1969   __ LoadP(r7, FieldMemOperand(r4, HeapObject::kMapOffset));
1970   __ lbz(r7, FieldMemOperand(r7, Map::kBitFieldOffset));
1971   __ TestBit(r7, Map::kIsCallable, r0);
1972   __ beq(&receiver_not_callable, cr0);
1973 
1974   // 3. Tail call with no arguments if argArray is null or undefined.
1975   Label no_arguments;
1976   __ JumpIfRoot(r3, Heap::kNullValueRootIndex, &no_arguments);
1977   __ JumpIfRoot(r3, Heap::kUndefinedValueRootIndex, &no_arguments);
1978 
1979   // 4a. Apply the receiver to the given argArray (passing undefined for
1980   // new.target).
1981   __ LoadRoot(r6, Heap::kUndefinedValueRootIndex);
1982   __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1983 
1984   // 4b. The argArray is either null or undefined, so we tail call without any
1985   // arguments to the receiver.
1986   __ bind(&no_arguments);
1987   {
1988     __ li(r3, Operand::Zero());
1989     __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1990   }
1991 
1992   // 4c. The receiver is not callable, throw an appropriate TypeError.
1993   __ bind(&receiver_not_callable);
1994   {
1995     __ StoreP(r4, MemOperand(sp, 0));
1996     __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1997   }
1998 }
1999 
2000 // static
Generate_FunctionPrototypeCall(MacroAssembler * masm)2001 void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
2002   // 1. Make sure we have at least one argument.
2003   // r3: actual number of arguments
2004   {
2005     Label done;
2006     __ cmpi(r3, Operand::Zero());
2007     __ bne(&done);
2008     __ PushRoot(Heap::kUndefinedValueRootIndex);
2009     __ addi(r3, r3, Operand(1));
2010     __ bind(&done);
2011   }
2012 
2013   // 2. Get the callable to call (passed as receiver) from the stack.
2014   // r3: actual number of arguments
2015   __ ShiftLeftImm(r5, r3, Operand(kPointerSizeLog2));
2016   __ LoadPX(r4, MemOperand(sp, r5));
2017 
2018   // 3. Shift arguments and return address one slot down on the stack
2019   //    (overwriting the original receiver).  Adjust argument count to make
2020   //    the original first argument the new receiver.
2021   // r3: actual number of arguments
2022   // r4: callable
2023   {
2024     Label loop;
2025     // Calculate the copy start address (destination). Copy end address is sp.
2026     __ add(r5, sp, r5);
2027 
2028     __ mtctr(r3);
2029     __ bind(&loop);
2030     __ LoadP(ip, MemOperand(r5, -kPointerSize));
2031     __ StoreP(ip, MemOperand(r5));
2032     __ subi(r5, r5, Operand(kPointerSize));
2033     __ bdnz(&loop);
2034     // Adjust the actual number of arguments and remove the top element
2035     // (which is a copy of the last argument).
2036     __ subi(r3, r3, Operand(1));
2037     __ pop();
2038   }
2039 
2040   // 4. Call the callable.
2041   __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
2042 }
2043 
Generate_ReflectApply(MacroAssembler * masm)2044 void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
2045   // ----------- S t a t e -------------
2046   //  -- r3     : argc
2047   //  -- sp[0]  : argumentsList
2048   //  -- sp[4]  : thisArgument
2049   //  -- sp[8]  : target
2050   //  -- sp[12] : receiver
2051   // -----------------------------------
2052 
2053   // 1. Load target into r4 (if present), argumentsList into r3 (if present),
2054   // remove all arguments from the stack (including the receiver), and push
2055   // thisArgument (if present) instead.
2056   {
2057     Label skip;
2058     Register arg_size = r5;
2059     Register new_sp = r6;
2060     Register scratch = r7;
2061     __ ShiftLeftImm(arg_size, r3, Operand(kPointerSizeLog2));
2062     __ add(new_sp, sp, arg_size);
2063     __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
2064     __ mr(scratch, r4);
2065     __ mr(r3, r4);
2066     __ cmpi(arg_size, Operand(kPointerSize));
2067     __ blt(&skip);
2068     __ LoadP(r4, MemOperand(new_sp, 1 * -kPointerSize));  // target
2069     __ beq(&skip);
2070     __ LoadP(scratch, MemOperand(new_sp, 2 * -kPointerSize));  // thisArgument
2071     __ cmpi(arg_size, Operand(2 * kPointerSize));
2072     __ beq(&skip);
2073     __ LoadP(r3, MemOperand(new_sp, 3 * -kPointerSize));  // argumentsList
2074     __ bind(&skip);
2075     __ mr(sp, new_sp);
2076     __ StoreP(scratch, MemOperand(sp, 0));
2077   }
2078 
2079   // ----------- S t a t e -------------
2080   //  -- r3    : argumentsList
2081   //  -- r4    : target
2082   //  -- sp[0] : thisArgument
2083   // -----------------------------------
2084 
2085   // 2. Make sure the target is actually callable.
2086   Label target_not_callable;
2087   __ JumpIfSmi(r4, &target_not_callable);
2088   __ LoadP(r7, FieldMemOperand(r4, HeapObject::kMapOffset));
2089   __ lbz(r7, FieldMemOperand(r7, Map::kBitFieldOffset));
2090   __ TestBit(r7, Map::kIsCallable, r0);
2091   __ beq(&target_not_callable, cr0);
2092 
2093   // 3a. Apply the target to the given argumentsList (passing undefined for
2094   // new.target).
2095   __ LoadRoot(r6, Heap::kUndefinedValueRootIndex);
2096   __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
2097 
2098   // 3b. The target is not callable, throw an appropriate TypeError.
2099   __ bind(&target_not_callable);
2100   {
2101     __ StoreP(r4, MemOperand(sp, 0));
2102     __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
2103   }
2104 }
2105 
Generate_ReflectConstruct(MacroAssembler * masm)2106 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
2107   // ----------- S t a t e -------------
2108   //  -- r3     : argc
2109   //  -- sp[0]  : new.target (optional)
2110   //  -- sp[4]  : argumentsList
2111   //  -- sp[8]  : target
2112   //  -- sp[12] : receiver
2113   // -----------------------------------
2114 
2115   // 1. Load target into r4 (if present), argumentsList into r3 (if present),
2116   // new.target into r6 (if present, otherwise use target), remove all
2117   // arguments from the stack (including the receiver), and push thisArgument
2118   // (if present) instead.
2119   {
2120     Label skip;
2121     Register arg_size = r5;
2122     Register new_sp = r7;
2123     __ ShiftLeftImm(arg_size, r3, Operand(kPointerSizeLog2));
2124     __ add(new_sp, sp, arg_size);
2125     __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
2126     __ mr(r3, r4);
2127     __ mr(r6, r4);
2128     __ StoreP(r4, MemOperand(new_sp, 0));  // receiver (undefined)
2129     __ cmpi(arg_size, Operand(kPointerSize));
2130     __ blt(&skip);
2131     __ LoadP(r4, MemOperand(new_sp, 1 * -kPointerSize));  // target
2132     __ mr(r6, r4);  // new.target defaults to target
2133     __ beq(&skip);
2134     __ LoadP(r3, MemOperand(new_sp, 2 * -kPointerSize));  // argumentsList
2135     __ cmpi(arg_size, Operand(2 * kPointerSize));
2136     __ beq(&skip);
2137     __ LoadP(r6, MemOperand(new_sp, 3 * -kPointerSize));  // new.target
2138     __ bind(&skip);
2139     __ mr(sp, new_sp);
2140   }
2141 
2142   // ----------- S t a t e -------------
2143   //  -- r3    : argumentsList
2144   //  -- r6    : new.target
2145   //  -- r4    : target
2146   //  -- sp[0] : receiver (undefined)
2147   // -----------------------------------
2148 
2149   // 2. Make sure the target is actually a constructor.
2150   Label target_not_constructor;
2151   __ JumpIfSmi(r4, &target_not_constructor);
2152   __ LoadP(r7, FieldMemOperand(r4, HeapObject::kMapOffset));
2153   __ lbz(r7, FieldMemOperand(r7, Map::kBitFieldOffset));
2154   __ TestBit(r7, Map::kIsConstructor, r0);
2155   __ beq(&target_not_constructor, cr0);
2156 
2157   // 3. Make sure the target is actually a constructor.
2158   Label new_target_not_constructor;
2159   __ JumpIfSmi(r6, &new_target_not_constructor);
2160   __ LoadP(r7, FieldMemOperand(r6, HeapObject::kMapOffset));
2161   __ lbz(r7, FieldMemOperand(r7, Map::kBitFieldOffset));
2162   __ TestBit(r7, Map::kIsConstructor, r0);
2163   __ beq(&new_target_not_constructor, cr0);
2164 
2165   // 4a. Construct the target with the given new.target and argumentsList.
2166   __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
2167 
2168   // 4b. The target is not a constructor, throw an appropriate TypeError.
2169   __ bind(&target_not_constructor);
2170   {
2171     __ StoreP(r4, MemOperand(sp, 0));
2172     __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
2173   }
2174 
2175   // 4c. The new.target is not a constructor, throw an appropriate TypeError.
2176   __ bind(&new_target_not_constructor);
2177   {
2178     __ StoreP(r6, MemOperand(sp, 0));
2179     __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
2180   }
2181 }
2182 
EnterArgumentsAdaptorFrame(MacroAssembler * masm)2183 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
2184   __ SmiTag(r3);
2185   __ LoadSmiLiteral(r7, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
2186   __ mflr(r0);
2187   __ push(r0);
2188   if (FLAG_enable_embedded_constant_pool) {
2189     __ Push(fp, kConstantPoolRegister, r7, r4, r3);
2190   } else {
2191     __ Push(fp, r7, r4, r3);
2192   }
2193   __ addi(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
2194                           kPointerSize));
2195 }
2196 
LeaveArgumentsAdaptorFrame(MacroAssembler * masm)2197 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
2198   // ----------- S t a t e -------------
2199   //  -- r3 : result being passed through
2200   // -----------------------------------
2201   // Get the number of arguments passed (as a smi), tear down the frame and
2202   // then tear down the parameters.
2203   __ LoadP(r4, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp +
2204                                 kPointerSize)));
2205   int stack_adjustment = kPointerSize;  // adjust for receiver
2206   __ LeaveFrame(StackFrame::ARGUMENTS_ADAPTOR, stack_adjustment);
2207   __ SmiToPtrArrayOffset(r0, r4);
2208   __ add(sp, sp, r0);
2209 }
2210 
2211 // static
Generate_Apply(MacroAssembler * masm)2212 void Builtins::Generate_Apply(MacroAssembler* masm) {
2213   // ----------- S t a t e -------------
2214   //  -- r3    : argumentsList
2215   //  -- r4    : target
2216   //  -- r6    : new.target (checked to be constructor or undefined)
2217   //  -- sp[0] : thisArgument
2218   // -----------------------------------
2219 
2220   // Create the list of arguments from the array-like argumentsList.
2221   {
2222     Label create_arguments, create_array, create_runtime, done_create;
2223     __ JumpIfSmi(r3, &create_runtime);
2224 
2225     // Load the map of argumentsList into r5.
2226     __ LoadP(r5, FieldMemOperand(r3, HeapObject::kMapOffset));
2227 
2228     // Load native context into r7.
2229     __ LoadP(r7, NativeContextMemOperand());
2230 
2231     // Check if argumentsList is an (unmodified) arguments object.
2232     __ LoadP(ip, ContextMemOperand(r7, Context::SLOPPY_ARGUMENTS_MAP_INDEX));
2233     __ cmp(ip, r5);
2234     __ beq(&create_arguments);
2235     __ LoadP(ip, ContextMemOperand(r7, Context::STRICT_ARGUMENTS_MAP_INDEX));
2236     __ cmp(ip, r5);
2237     __ beq(&create_arguments);
2238 
2239     // Check if argumentsList is a fast JSArray.
2240     __ CompareInstanceType(r5, ip, JS_ARRAY_TYPE);
2241     __ beq(&create_array);
2242 
2243     // Ask the runtime to create the list (actually a FixedArray).
2244     __ bind(&create_runtime);
2245     {
2246       FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2247       __ Push(r4, r6, r3);
2248       __ CallRuntime(Runtime::kCreateListFromArrayLike);
2249       __ Pop(r4, r6);
2250       __ LoadP(r5, FieldMemOperand(r3, FixedArray::kLengthOffset));
2251       __ SmiUntag(r5);
2252     }
2253     __ b(&done_create);
2254 
2255     // Try to create the list from an arguments object.
2256     __ bind(&create_arguments);
2257     __ LoadP(r5, FieldMemOperand(r3, JSArgumentsObject::kLengthOffset));
2258     __ LoadP(r7, FieldMemOperand(r3, JSObject::kElementsOffset));
2259     __ LoadP(ip, FieldMemOperand(r7, FixedArray::kLengthOffset));
2260     __ cmp(r5, ip);
2261     __ bne(&create_runtime);
2262     __ SmiUntag(r5);
2263     __ mr(r3, r7);
2264     __ b(&done_create);
2265 
2266     // Try to create the list from a JSArray object.
2267     __ bind(&create_array);
2268     __ lbz(r5, FieldMemOperand(r5, Map::kBitField2Offset));
2269     __ DecodeField<Map::ElementsKindBits>(r5);
2270     STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
2271     STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
2272     STATIC_ASSERT(FAST_ELEMENTS == 2);
2273     __ cmpi(r5, Operand(FAST_ELEMENTS));
2274     __ bgt(&create_runtime);
2275     __ cmpi(r5, Operand(FAST_HOLEY_SMI_ELEMENTS));
2276     __ beq(&create_runtime);
2277     __ LoadP(r5, FieldMemOperand(r3, JSArray::kLengthOffset));
2278     __ LoadP(r3, FieldMemOperand(r3, JSArray::kElementsOffset));
2279     __ SmiUntag(r5);
2280 
2281     __ bind(&done_create);
2282   }
2283 
2284   // Check for stack overflow.
2285   {
2286     // Check the stack for overflow. We are not trying to catch interruptions
2287     // (i.e. debug break and preemption) here, so check the "real stack limit".
2288     Label done;
2289     __ LoadRoot(ip, Heap::kRealStackLimitRootIndex);
2290     // Make ip the space we have left. The stack might already be overflowed
2291     // here which will cause ip to become negative.
2292     __ sub(ip, sp, ip);
2293     // Check if the arguments will overflow the stack.
2294     __ ShiftLeftImm(r0, r5, Operand(kPointerSizeLog2));
2295     __ cmp(ip, r0);  // Signed comparison.
2296     __ bgt(&done);
2297     __ TailCallRuntime(Runtime::kThrowStackOverflow);
2298     __ bind(&done);
2299   }
2300 
2301   // ----------- S t a t e -------------
2302   //  -- r4    : target
2303   //  -- r3    : args (a FixedArray built from argumentsList)
2304   //  -- r5    : len (number of elements to push from args)
2305   //  -- r6    : new.target (checked to be constructor or undefined)
2306   //  -- sp[0] : thisArgument
2307   // -----------------------------------
2308 
2309   // Push arguments onto the stack (thisArgument is already on the stack).
2310   {
2311     Label loop, no_args;
2312     __ cmpi(r5, Operand::Zero());
2313     __ beq(&no_args);
2314     __ addi(r3, r3,
2315             Operand(FixedArray::kHeaderSize - kHeapObjectTag - kPointerSize));
2316     __ mtctr(r5);
2317     __ bind(&loop);
2318     __ LoadPU(r0, MemOperand(r3, kPointerSize));
2319     __ push(r0);
2320     __ bdnz(&loop);
2321     __ bind(&no_args);
2322     __ mr(r3, r5);
2323   }
2324 
2325   // Dispatch to Call or Construct depending on whether new.target is undefined.
2326   {
2327     __ CompareRoot(r6, Heap::kUndefinedValueRootIndex);
2328     __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET, eq);
2329     __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
2330   }
2331 }
2332 
2333 namespace {
2334 
2335 // Drops top JavaScript frame and an arguments adaptor frame below it (if
2336 // present) preserving all the arguments prepared for current call.
2337 // Does nothing if debugger is currently active.
2338 // ES6 14.6.3. PrepareForTailCall
2339 //
2340 // Stack structure for the function g() tail calling f():
2341 //
2342 // ------- Caller frame: -------
2343 // |  ...
2344 // |  g()'s arg M
2345 // |  ...
2346 // |  g()'s arg 1
2347 // |  g()'s receiver arg
2348 // |  g()'s caller pc
2349 // ------- g()'s frame: -------
2350 // |  g()'s caller fp      <- fp
2351 // |  g()'s context
2352 // |  function pointer: g
2353 // |  -------------------------
2354 // |  ...
2355 // |  ...
2356 // |  f()'s arg N
2357 // |  ...
2358 // |  f()'s arg 1
2359 // |  f()'s receiver arg   <- sp (f()'s caller pc is not on the stack yet!)
2360 // ----------------------
2361 //
PrepareForTailCall(MacroAssembler * masm,Register args_reg,Register scratch1,Register scratch2,Register scratch3)2362 void PrepareForTailCall(MacroAssembler* masm, Register args_reg,
2363                         Register scratch1, Register scratch2,
2364                         Register scratch3) {
2365   DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3));
2366   Comment cmnt(masm, "[ PrepareForTailCall");
2367 
2368   // Prepare for tail call only if ES2015 tail call elimination is enabled.
2369   Label done;
2370   ExternalReference is_tail_call_elimination_enabled =
2371       ExternalReference::is_tail_call_elimination_enabled_address(
2372           masm->isolate());
2373   __ mov(scratch1, Operand(is_tail_call_elimination_enabled));
2374   __ lbz(scratch1, MemOperand(scratch1));
2375   __ cmpi(scratch1, Operand::Zero());
2376   __ beq(&done);
2377 
2378   // Drop possible interpreter handler/stub frame.
2379   {
2380     Label no_interpreter_frame;
2381     __ LoadP(scratch3,
2382              MemOperand(fp, CommonFrameConstants::kContextOrFrameTypeOffset));
2383     __ CmpSmiLiteral(scratch3, Smi::FromInt(StackFrame::STUB), r0);
2384     __ bne(&no_interpreter_frame);
2385     __ LoadP(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2386     __ bind(&no_interpreter_frame);
2387   }
2388 
2389   // Check if next frame is an arguments adaptor frame.
2390   Register caller_args_count_reg = scratch1;
2391   Label no_arguments_adaptor, formal_parameter_count_loaded;
2392   __ LoadP(scratch2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2393   __ LoadP(
2394       scratch3,
2395       MemOperand(scratch2, CommonFrameConstants::kContextOrFrameTypeOffset));
2396   __ CmpSmiLiteral(scratch3, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0);
2397   __ bne(&no_arguments_adaptor);
2398 
2399   // Drop current frame and load arguments count from arguments adaptor frame.
2400   __ mr(fp, scratch2);
2401   __ LoadP(caller_args_count_reg,
2402            MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset));
2403   __ SmiUntag(caller_args_count_reg);
2404   __ b(&formal_parameter_count_loaded);
2405 
2406   __ bind(&no_arguments_adaptor);
2407   // Load caller's formal parameter count
2408   __ LoadP(scratch1,
2409            MemOperand(fp, ArgumentsAdaptorFrameConstants::kFunctionOffset));
2410   __ LoadP(scratch1,
2411            FieldMemOperand(scratch1, JSFunction::kSharedFunctionInfoOffset));
2412   __ LoadWordArith(
2413       caller_args_count_reg,
2414       FieldMemOperand(scratch1,
2415                       SharedFunctionInfo::kFormalParameterCountOffset));
2416 #if !V8_TARGET_ARCH_PPC64
2417   __ SmiUntag(caller_args_count_reg);
2418 #endif
2419 
2420   __ bind(&formal_parameter_count_loaded);
2421 
2422   ParameterCount callee_args_count(args_reg);
2423   __ PrepareForTailCall(callee_args_count, caller_args_count_reg, scratch2,
2424                         scratch3);
2425   __ bind(&done);
2426 }
2427 }  // namespace
2428 
2429 // static
Generate_CallFunction(MacroAssembler * masm,ConvertReceiverMode mode,TailCallMode tail_call_mode)2430 void Builtins::Generate_CallFunction(MacroAssembler* masm,
2431                                      ConvertReceiverMode mode,
2432                                      TailCallMode tail_call_mode) {
2433   // ----------- S t a t e -------------
2434   //  -- r3 : the number of arguments (not including the receiver)
2435   //  -- r4 : the function to call (checked to be a JSFunction)
2436   // -----------------------------------
2437   __ AssertFunction(r4);
2438 
2439   // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
2440   // Check that the function is not a "classConstructor".
2441   Label class_constructor;
2442   __ LoadP(r5, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
2443   __ lwz(r6, FieldMemOperand(r5, SharedFunctionInfo::kCompilerHintsOffset));
2444   __ TestBitMask(r6, FunctionKind::kClassConstructor
2445                          << SharedFunctionInfo::kFunctionKindShift,
2446                  r0);
2447   __ bne(&class_constructor, cr0);
2448 
2449   // Enter the context of the function; ToObject has to run in the function
2450   // context, and we also need to take the global proxy from the function
2451   // context in case of conversion.
2452   __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset));
2453   // We need to convert the receiver for non-native sloppy mode functions.
2454   Label done_convert;
2455   __ andi(r0, r6, Operand((1 << SharedFunctionInfo::kStrictModeBit) |
2456                           (1 << SharedFunctionInfo::kNativeBit)));
2457   __ bne(&done_convert, cr0);
2458   {
2459     // ----------- S t a t e -------------
2460     //  -- r3 : the number of arguments (not including the receiver)
2461     //  -- r4 : the function to call (checked to be a JSFunction)
2462     //  -- r5 : the shared function info.
2463     //  -- cp : the function context.
2464     // -----------------------------------
2465 
2466     if (mode == ConvertReceiverMode::kNullOrUndefined) {
2467       // Patch receiver to global proxy.
2468       __ LoadGlobalProxy(r6);
2469     } else {
2470       Label convert_to_object, convert_receiver;
2471       __ ShiftLeftImm(r6, r3, Operand(kPointerSizeLog2));
2472       __ LoadPX(r6, MemOperand(sp, r6));
2473       __ JumpIfSmi(r6, &convert_to_object);
2474       STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2475       __ CompareObjectType(r6, r7, r7, FIRST_JS_RECEIVER_TYPE);
2476       __ bge(&done_convert);
2477       if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
2478         Label convert_global_proxy;
2479         __ JumpIfRoot(r6, Heap::kUndefinedValueRootIndex,
2480                       &convert_global_proxy);
2481         __ JumpIfNotRoot(r6, Heap::kNullValueRootIndex, &convert_to_object);
2482         __ bind(&convert_global_proxy);
2483         {
2484           // Patch receiver to global proxy.
2485           __ LoadGlobalProxy(r6);
2486         }
2487         __ b(&convert_receiver);
2488       }
2489       __ bind(&convert_to_object);
2490       {
2491         // Convert receiver using ToObject.
2492         // TODO(bmeurer): Inline the allocation here to avoid building the frame
2493         // in the fast case? (fall back to AllocateInNewSpace?)
2494         FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2495         __ SmiTag(r3);
2496         __ Push(r3, r4);
2497         __ mr(r3, r6);
2498         __ Push(cp);
2499         __ Call(masm->isolate()->builtins()->ToObject(),
2500                 RelocInfo::CODE_TARGET);
2501         __ Pop(cp);
2502         __ mr(r6, r3);
2503         __ Pop(r3, r4);
2504         __ SmiUntag(r3);
2505       }
2506       __ LoadP(r5, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
2507       __ bind(&convert_receiver);
2508     }
2509     __ ShiftLeftImm(r7, r3, Operand(kPointerSizeLog2));
2510     __ StorePX(r6, MemOperand(sp, r7));
2511   }
2512   __ bind(&done_convert);
2513 
2514   // ----------- S t a t e -------------
2515   //  -- r3 : the number of arguments (not including the receiver)
2516   //  -- r4 : the function to call (checked to be a JSFunction)
2517   //  -- r5 : the shared function info.
2518   //  -- cp : the function context.
2519   // -----------------------------------
2520 
2521   if (tail_call_mode == TailCallMode::kAllow) {
2522     PrepareForTailCall(masm, r3, r6, r7, r8);
2523   }
2524 
2525   __ LoadWordArith(
2526       r5, FieldMemOperand(r5, SharedFunctionInfo::kFormalParameterCountOffset));
2527 #if !V8_TARGET_ARCH_PPC64
2528   __ SmiUntag(r5);
2529 #endif
2530   ParameterCount actual(r3);
2531   ParameterCount expected(r5);
2532   __ InvokeFunctionCode(r4, no_reg, expected, actual, JUMP_FUNCTION,
2533                         CheckDebugStepCallWrapper());
2534 
2535   // The function is a "classConstructor", need to raise an exception.
2536   __ bind(&class_constructor);
2537   {
2538     FrameAndConstantPoolScope frame(masm, StackFrame::INTERNAL);
2539     __ push(r4);
2540     __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
2541   }
2542 }
2543 
2544 namespace {
2545 
Generate_PushBoundArguments(MacroAssembler * masm)2546 void Generate_PushBoundArguments(MacroAssembler* masm) {
2547   // ----------- S t a t e -------------
2548   //  -- r3 : the number of arguments (not including the receiver)
2549   //  -- r4 : target (checked to be a JSBoundFunction)
2550   //  -- r6 : new.target (only in case of [[Construct]])
2551   // -----------------------------------
2552 
2553   // Load [[BoundArguments]] into r5 and length of that into r7.
2554   Label no_bound_arguments;
2555   __ LoadP(r5, FieldMemOperand(r4, JSBoundFunction::kBoundArgumentsOffset));
2556   __ LoadP(r7, FieldMemOperand(r5, FixedArray::kLengthOffset));
2557   __ SmiUntag(r7, SetRC);
2558   __ beq(&no_bound_arguments, cr0);
2559   {
2560     // ----------- S t a t e -------------
2561     //  -- r3 : the number of arguments (not including the receiver)
2562     //  -- r4 : target (checked to be a JSBoundFunction)
2563     //  -- r5 : the [[BoundArguments]] (implemented as FixedArray)
2564     //  -- r6 : new.target (only in case of [[Construct]])
2565     //  -- r7 : the number of [[BoundArguments]]
2566     // -----------------------------------
2567 
2568     // Reserve stack space for the [[BoundArguments]].
2569     {
2570       Label done;
2571       __ mr(r9, sp);  // preserve previous stack pointer
2572       __ ShiftLeftImm(r10, r7, Operand(kPointerSizeLog2));
2573       __ sub(sp, sp, r10);
2574       // Check the stack for overflow. We are not trying to catch interruptions
2575       // (i.e. debug break and preemption) here, so check the "real stack
2576       // limit".
2577       __ CompareRoot(sp, Heap::kRealStackLimitRootIndex);
2578       __ bgt(&done);  // Signed comparison.
2579       // Restore the stack pointer.
2580       __ mr(sp, r9);
2581       {
2582         FrameScope scope(masm, StackFrame::MANUAL);
2583         __ EnterFrame(StackFrame::INTERNAL);
2584         __ CallRuntime(Runtime::kThrowStackOverflow);
2585       }
2586       __ bind(&done);
2587     }
2588 
2589     // Relocate arguments down the stack.
2590     //  -- r3 : the number of arguments (not including the receiver)
2591     //  -- r9 : the previous stack pointer
2592     //  -- r10: the size of the [[BoundArguments]]
2593     {
2594       Label skip, loop;
2595       __ li(r8, Operand::Zero());
2596       __ cmpi(r3, Operand::Zero());
2597       __ beq(&skip);
2598       __ mtctr(r3);
2599       __ bind(&loop);
2600       __ LoadPX(r0, MemOperand(r9, r8));
2601       __ StorePX(r0, MemOperand(sp, r8));
2602       __ addi(r8, r8, Operand(kPointerSize));
2603       __ bdnz(&loop);
2604       __ bind(&skip);
2605     }
2606 
2607     // Copy [[BoundArguments]] to the stack (below the arguments).
2608     {
2609       Label loop;
2610       __ addi(r5, r5, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
2611       __ add(r5, r5, r10);
2612       __ mtctr(r7);
2613       __ bind(&loop);
2614       __ LoadPU(r0, MemOperand(r5, -kPointerSize));
2615       __ StorePX(r0, MemOperand(sp, r8));
2616       __ addi(r8, r8, Operand(kPointerSize));
2617       __ bdnz(&loop);
2618       __ add(r3, r3, r7);
2619     }
2620   }
2621   __ bind(&no_bound_arguments);
2622 }
2623 
2624 }  // namespace
2625 
2626 // static
Generate_CallBoundFunctionImpl(MacroAssembler * masm,TailCallMode tail_call_mode)2627 void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm,
2628                                               TailCallMode tail_call_mode) {
2629   // ----------- S t a t e -------------
2630   //  -- r3 : the number of arguments (not including the receiver)
2631   //  -- r4 : the function to call (checked to be a JSBoundFunction)
2632   // -----------------------------------
2633   __ AssertBoundFunction(r4);
2634 
2635   if (tail_call_mode == TailCallMode::kAllow) {
2636     PrepareForTailCall(masm, r3, r6, r7, r8);
2637   }
2638 
2639   // Patch the receiver to [[BoundThis]].
2640   __ LoadP(ip, FieldMemOperand(r4, JSBoundFunction::kBoundThisOffset));
2641   __ ShiftLeftImm(r0, r3, Operand(kPointerSizeLog2));
2642   __ StorePX(ip, MemOperand(sp, r0));
2643 
2644   // Push the [[BoundArguments]] onto the stack.
2645   Generate_PushBoundArguments(masm);
2646 
2647   // Call the [[BoundTargetFunction]] via the Call builtin.
2648   __ LoadP(r4,
2649            FieldMemOperand(r4, JSBoundFunction::kBoundTargetFunctionOffset));
2650   __ mov(ip, Operand(ExternalReference(Builtins::kCall_ReceiverIsAny,
2651                                        masm->isolate())));
2652   __ LoadP(ip, MemOperand(ip));
2653   __ addi(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
2654   __ JumpToJSEntry(ip);
2655 }
2656 
2657 // static
Generate_Call(MacroAssembler * masm,ConvertReceiverMode mode,TailCallMode tail_call_mode)2658 void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode,
2659                              TailCallMode tail_call_mode) {
2660   // ----------- S t a t e -------------
2661   //  -- r3 : the number of arguments (not including the receiver)
2662   //  -- r4 : the target to call (can be any Object).
2663   // -----------------------------------
2664 
2665   Label non_callable, non_function, non_smi;
2666   __ JumpIfSmi(r4, &non_callable);
2667   __ bind(&non_smi);
2668   __ CompareObjectType(r4, r7, r8, JS_FUNCTION_TYPE);
2669   __ Jump(masm->isolate()->builtins()->CallFunction(mode, tail_call_mode),
2670           RelocInfo::CODE_TARGET, eq);
2671   __ cmpi(r8, Operand(JS_BOUND_FUNCTION_TYPE));
2672   __ Jump(masm->isolate()->builtins()->CallBoundFunction(tail_call_mode),
2673           RelocInfo::CODE_TARGET, eq);
2674 
2675   // Check if target has a [[Call]] internal method.
2676   __ lbz(r7, FieldMemOperand(r7, Map::kBitFieldOffset));
2677   __ TestBit(r7, Map::kIsCallable, r0);
2678   __ beq(&non_callable, cr0);
2679 
2680   __ cmpi(r8, Operand(JS_PROXY_TYPE));
2681   __ bne(&non_function);
2682 
2683   // 0. Prepare for tail call if necessary.
2684   if (tail_call_mode == TailCallMode::kAllow) {
2685     PrepareForTailCall(masm, r3, r6, r7, r8);
2686   }
2687 
2688   // 1. Runtime fallback for Proxy [[Call]].
2689   __ Push(r4);
2690   // Increase the arguments size to include the pushed function and the
2691   // existing receiver on the stack.
2692   __ addi(r3, r3, Operand(2));
2693   // Tail-call to the runtime.
2694   __ JumpToExternalReference(
2695       ExternalReference(Runtime::kJSProxyCall, masm->isolate()));
2696 
2697   // 2. Call to something else, which might have a [[Call]] internal method (if
2698   // not we raise an exception).
2699   __ bind(&non_function);
2700   // Overwrite the original receiver the (original) target.
2701   __ ShiftLeftImm(r8, r3, Operand(kPointerSizeLog2));
2702   __ StorePX(r4, MemOperand(sp, r8));
2703   // Let the "call_as_function_delegate" take care of the rest.
2704   __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, r4);
2705   __ Jump(masm->isolate()->builtins()->CallFunction(
2706               ConvertReceiverMode::kNotNullOrUndefined, tail_call_mode),
2707           RelocInfo::CODE_TARGET);
2708 
2709   // 3. Call to something that is not callable.
2710   __ bind(&non_callable);
2711   {
2712     FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2713     __ Push(r4);
2714     __ CallRuntime(Runtime::kThrowCalledNonCallable);
2715   }
2716 }
2717 
2718 // static
Generate_ConstructFunction(MacroAssembler * masm)2719 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2720   // ----------- S t a t e -------------
2721   //  -- r3 : the number of arguments (not including the receiver)
2722   //  -- r4 : the constructor to call (checked to be a JSFunction)
2723   //  -- r6 : the new target (checked to be a constructor)
2724   // -----------------------------------
2725   __ AssertFunction(r4);
2726 
2727   // Calling convention for function specific ConstructStubs require
2728   // r5 to contain either an AllocationSite or undefined.
2729   __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
2730 
2731   // Tail call to the function-specific construct stub (still in the caller
2732   // context at this point).
2733   __ LoadP(r7, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
2734   __ LoadP(r7, FieldMemOperand(r7, SharedFunctionInfo::kConstructStubOffset));
2735   __ addi(ip, r7, Operand(Code::kHeaderSize - kHeapObjectTag));
2736   __ JumpToJSEntry(ip);
2737 }
2738 
2739 // static
Generate_ConstructBoundFunction(MacroAssembler * masm)2740 void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2741   // ----------- S t a t e -------------
2742   //  -- r3 : the number of arguments (not including the receiver)
2743   //  -- r4 : the function to call (checked to be a JSBoundFunction)
2744   //  -- r6 : the new target (checked to be a constructor)
2745   // -----------------------------------
2746   __ AssertBoundFunction(r4);
2747 
2748   // Push the [[BoundArguments]] onto the stack.
2749   Generate_PushBoundArguments(masm);
2750 
2751   // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
2752   Label skip;
2753   __ cmp(r4, r6);
2754   __ bne(&skip);
2755   __ LoadP(r6,
2756            FieldMemOperand(r4, JSBoundFunction::kBoundTargetFunctionOffset));
2757   __ bind(&skip);
2758 
2759   // Construct the [[BoundTargetFunction]] via the Construct builtin.
2760   __ LoadP(r4,
2761            FieldMemOperand(r4, JSBoundFunction::kBoundTargetFunctionOffset));
2762   __ mov(ip, Operand(ExternalReference(Builtins::kConstruct, masm->isolate())));
2763   __ LoadP(ip, MemOperand(ip));
2764   __ addi(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
2765   __ JumpToJSEntry(ip);
2766 }
2767 
2768 // static
Generate_ConstructProxy(MacroAssembler * masm)2769 void Builtins::Generate_ConstructProxy(MacroAssembler* masm) {
2770   // ----------- S t a t e -------------
2771   //  -- r3 : the number of arguments (not including the receiver)
2772   //  -- r4 : the constructor to call (checked to be a JSProxy)
2773   //  -- r6 : the new target (either the same as the constructor or
2774   //          the JSFunction on which new was invoked initially)
2775   // -----------------------------------
2776 
2777   // Call into the Runtime for Proxy [[Construct]].
2778   __ Push(r4, r6);
2779   // Include the pushed new_target, constructor and the receiver.
2780   __ addi(r3, r3, Operand(3));
2781   // Tail-call to the runtime.
2782   __ JumpToExternalReference(
2783       ExternalReference(Runtime::kJSProxyConstruct, masm->isolate()));
2784 }
2785 
2786 // static
Generate_Construct(MacroAssembler * masm)2787 void Builtins::Generate_Construct(MacroAssembler* masm) {
2788   // ----------- S t a t e -------------
2789   //  -- r3 : the number of arguments (not including the receiver)
2790   //  -- r4 : the constructor to call (can be any Object)
2791   //  -- r6 : the new target (either the same as the constructor or
2792   //          the JSFunction on which new was invoked initially)
2793   // -----------------------------------
2794 
2795   // Check if target is a Smi.
2796   Label non_constructor;
2797   __ JumpIfSmi(r4, &non_constructor);
2798 
2799   // Dispatch based on instance type.
2800   __ CompareObjectType(r4, r7, r8, JS_FUNCTION_TYPE);
2801   __ Jump(masm->isolate()->builtins()->ConstructFunction(),
2802           RelocInfo::CODE_TARGET, eq);
2803 
2804   // Check if target has a [[Construct]] internal method.
2805   __ lbz(r5, FieldMemOperand(r7, Map::kBitFieldOffset));
2806   __ TestBit(r5, Map::kIsConstructor, r0);
2807   __ beq(&non_constructor, cr0);
2808 
2809   // Only dispatch to bound functions after checking whether they are
2810   // constructors.
2811   __ cmpi(r8, Operand(JS_BOUND_FUNCTION_TYPE));
2812   __ Jump(masm->isolate()->builtins()->ConstructBoundFunction(),
2813           RelocInfo::CODE_TARGET, eq);
2814 
2815   // Only dispatch to proxies after checking whether they are constructors.
2816   __ cmpi(r8, Operand(JS_PROXY_TYPE));
2817   __ Jump(masm->isolate()->builtins()->ConstructProxy(), RelocInfo::CODE_TARGET,
2818           eq);
2819 
2820   // Called Construct on an exotic Object with a [[Construct]] internal method.
2821   {
2822     // Overwrite the original receiver with the (original) target.
2823     __ ShiftLeftImm(r8, r3, Operand(kPointerSizeLog2));
2824     __ StorePX(r4, MemOperand(sp, r8));
2825     // Let the "call_as_constructor_delegate" take care of the rest.
2826     __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, r4);
2827     __ Jump(masm->isolate()->builtins()->CallFunction(),
2828             RelocInfo::CODE_TARGET);
2829   }
2830 
2831   // Called Construct on an Object that doesn't have a [[Construct]] internal
2832   // method.
2833   __ bind(&non_constructor);
2834   __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(),
2835           RelocInfo::CODE_TARGET);
2836 }
2837 
2838 // static
Generate_AllocateInNewSpace(MacroAssembler * masm)2839 void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) {
2840   // ----------- S t a t e -------------
2841   //  -- r4 : requested object size (untagged)
2842   //  -- lr : return address
2843   // -----------------------------------
2844   __ SmiTag(r4);
2845   __ Push(r4);
2846   __ LoadSmiLiteral(cp, Smi::kZero);
2847   __ TailCallRuntime(Runtime::kAllocateInNewSpace);
2848 }
2849 
2850 // static
Generate_AllocateInOldSpace(MacroAssembler * masm)2851 void Builtins::Generate_AllocateInOldSpace(MacroAssembler* masm) {
2852   // ----------- S t a t e -------------
2853   //  -- r4 : requested object size (untagged)
2854   //  -- lr : return address
2855   // -----------------------------------
2856   __ SmiTag(r4);
2857   __ LoadSmiLiteral(r5, Smi::FromInt(AllocateTargetSpace::encode(OLD_SPACE)));
2858   __ Push(r4, r5);
2859   __ LoadSmiLiteral(cp, Smi::kZero);
2860   __ TailCallRuntime(Runtime::kAllocateInTargetSpace);
2861 }
2862 
2863 // static
Generate_Abort(MacroAssembler * masm)2864 void Builtins::Generate_Abort(MacroAssembler* masm) {
2865   // ----------- S t a t e -------------
2866   //  -- r4 : message_id as Smi
2867   //  -- lr : return address
2868   // -----------------------------------
2869   __ push(r4);
2870   __ LoadSmiLiteral(cp, Smi::kZero);
2871   __ TailCallRuntime(Runtime::kAbort);
2872 }
2873 
Generate_ArgumentsAdaptorTrampoline(MacroAssembler * masm)2874 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
2875   // ----------- S t a t e -------------
2876   //  -- r3 : actual number of arguments
2877   //  -- r4 : function (passed through to callee)
2878   //  -- r5 : expected number of arguments
2879   //  -- r6 : new target (passed through to callee)
2880   // -----------------------------------
2881 
2882   Label invoke, dont_adapt_arguments, stack_overflow;
2883 
2884   Label enough, too_few;
2885   __ LoadP(ip, FieldMemOperand(r4, JSFunction::kCodeEntryOffset));
2886   __ cmp(r3, r5);
2887   __ blt(&too_few);
2888   __ cmpi(r5, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
2889   __ beq(&dont_adapt_arguments);
2890 
2891   {  // Enough parameters: actual >= expected
2892     __ bind(&enough);
2893     EnterArgumentsAdaptorFrame(masm);
2894     Generate_StackOverflowCheck(masm, r5, r8, &stack_overflow);
2895 
2896     // Calculate copy start address into r3 and copy end address into r7.
2897     // r3: actual number of arguments as a smi
2898     // r4: function
2899     // r5: expected number of arguments
2900     // r6: new target (passed through to callee)
2901     // ip: code entry to call
2902     __ SmiToPtrArrayOffset(r3, r3);
2903     __ add(r3, r3, fp);
2904     // adjust for return address and receiver
2905     __ addi(r3, r3, Operand(2 * kPointerSize));
2906     __ ShiftLeftImm(r7, r5, Operand(kPointerSizeLog2));
2907     __ sub(r7, r3, r7);
2908 
2909     // Copy the arguments (including the receiver) to the new stack frame.
2910     // r3: copy start address
2911     // r4: function
2912     // r5: expected number of arguments
2913     // r6: new target (passed through to callee)
2914     // r7: copy end address
2915     // ip: code entry to call
2916 
2917     Label copy;
2918     __ bind(&copy);
2919     __ LoadP(r0, MemOperand(r3, 0));
2920     __ push(r0);
2921     __ cmp(r3, r7);  // Compare before moving to next argument.
2922     __ subi(r3, r3, Operand(kPointerSize));
2923     __ bne(&copy);
2924 
2925     __ b(&invoke);
2926   }
2927 
2928   {  // Too few parameters: Actual < expected
2929     __ bind(&too_few);
2930 
2931     EnterArgumentsAdaptorFrame(masm);
2932     Generate_StackOverflowCheck(masm, r5, r8, &stack_overflow);
2933 
2934     // Calculate copy start address into r0 and copy end address is fp.
2935     // r3: actual number of arguments as a smi
2936     // r4: function
2937     // r5: expected number of arguments
2938     // r6: new target (passed through to callee)
2939     // ip: code entry to call
2940     __ SmiToPtrArrayOffset(r3, r3);
2941     __ add(r3, r3, fp);
2942 
2943     // Copy the arguments (including the receiver) to the new stack frame.
2944     // r3: copy start address
2945     // r4: function
2946     // r5: expected number of arguments
2947     // r6: new target (passed through to callee)
2948     // ip: code entry to call
2949     Label copy;
2950     __ bind(&copy);
2951     // Adjust load for return address and receiver.
2952     __ LoadP(r0, MemOperand(r3, 2 * kPointerSize));
2953     __ push(r0);
2954     __ cmp(r3, fp);  // Compare before moving to next argument.
2955     __ subi(r3, r3, Operand(kPointerSize));
2956     __ bne(&copy);
2957 
2958     // Fill the remaining expected arguments with undefined.
2959     // r4: function
2960     // r5: expected number of arguments
2961     // r6: new target (passed through to callee)
2962     // ip: code entry to call
2963     __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
2964     __ ShiftLeftImm(r7, r5, Operand(kPointerSizeLog2));
2965     __ sub(r7, fp, r7);
2966     // Adjust for frame.
2967     __ subi(r7, r7, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
2968                             2 * kPointerSize));
2969 
2970     Label fill;
2971     __ bind(&fill);
2972     __ push(r0);
2973     __ cmp(sp, r7);
2974     __ bne(&fill);
2975   }
2976 
2977   // Call the entry point.
2978   __ bind(&invoke);
2979   __ mr(r3, r5);
2980   // r3 : expected number of arguments
2981   // r4 : function (passed through to callee)
2982   // r6 : new target (passed through to callee)
2983   __ CallJSEntry(ip);
2984 
2985   // Store offset of return address for deoptimizer.
2986   masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
2987 
2988   // Exit frame and return.
2989   LeaveArgumentsAdaptorFrame(masm);
2990   __ blr();
2991 
2992   // -------------------------------------------
2993   // Dont adapt arguments.
2994   // -------------------------------------------
2995   __ bind(&dont_adapt_arguments);
2996   __ JumpToJSEntry(ip);
2997 
2998   __ bind(&stack_overflow);
2999   {
3000     FrameScope frame(masm, StackFrame::MANUAL);
3001     __ CallRuntime(Runtime::kThrowStackOverflow);
3002     __ bkpt(0);
3003   }
3004 }
3005 
3006 #undef __
3007 }  // namespace internal
3008 }  // namespace v8
3009 
3010 #endif  // V8_TARGET_ARCH_PPC
3011