• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #if V8_TARGET_ARCH_MIPS64
6 
7 #include "src/codegen.h"
8 #include "src/debug/debug.h"
9 #include "src/deoptimizer.h"
10 #include "src/full-codegen/full-codegen.h"
11 #include "src/runtime/runtime.h"
12 
13 namespace v8 {
14 namespace internal {
15 
16 #define __ ACCESS_MASM(masm)
17 
Generate_Adaptor(MacroAssembler * masm,Address address,ExitFrameType exit_frame_type)18 void Builtins::Generate_Adaptor(MacroAssembler* masm, Address address,
19                                 ExitFrameType exit_frame_type) {
20   // ----------- S t a t e -------------
21   //  -- a0                 : number of arguments excluding receiver
22   //  -- a1                 : target
23   //  -- a3                 : new.target
24   //  -- sp[0]              : last argument
25   //  -- ...
26   //  -- sp[8 * (argc - 1)] : first argument
27   //  -- sp[8 * agrc]       : receiver
28   // -----------------------------------
29   __ AssertFunction(a1);
30 
31   // Make sure we operate in the context of the called function (for example
32   // ConstructStubs implemented in C++ will be run in the context of the caller
33   // instead of the callee, due to the way that [[Construct]] is defined for
34   // ordinary functions).
35   __ ld(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
36 
37   // JumpToExternalReference expects a0 to contain the number of arguments
38   // including the receiver and the extra arguments.
39   const int num_extra_args = 3;
40   __ Daddu(a0, a0, num_extra_args + 1);
41 
42   // Insert extra arguments.
43   __ SmiTag(a0);
44   __ Push(a0, a1, a3);
45   __ SmiUntag(a0);
46 
47   __ JumpToExternalReference(ExternalReference(address, masm->isolate()),
48                              PROTECT, exit_frame_type == BUILTIN_EXIT);
49 }
50 
51 // Load the built-in InternalArray function from the current context.
GenerateLoadInternalArrayFunction(MacroAssembler * masm,Register result)52 static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
53                                               Register result) {
54   // Load the InternalArray function from the native context.
55   __ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, result);
56 }
57 
58 // Load the built-in Array function from the current context.
GenerateLoadArrayFunction(MacroAssembler * masm,Register result)59 static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
60   // Load the Array function from the native context.
61   __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, result);
62 }
63 
Generate_InternalArrayCode(MacroAssembler * masm)64 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
65   // ----------- S t a t e -------------
66   //  -- a0     : number of arguments
67   //  -- ra     : return address
68   //  -- sp[...]: constructor arguments
69   // -----------------------------------
70   Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
71 
72   // Get the InternalArray function.
73   GenerateLoadInternalArrayFunction(masm, a1);
74 
75   if (FLAG_debug_code) {
76     // Initial map for the builtin InternalArray functions should be maps.
77     __ ld(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
78     __ SmiTst(a2, a4);
79     __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction, a4,
80               Operand(zero_reg));
81     __ GetObjectType(a2, a3, a4);
82     __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction, a4,
83               Operand(MAP_TYPE));
84   }
85 
86   // Run the native code for the InternalArray function called as a normal
87   // function.
88   // Tail call a stub.
89   InternalArrayConstructorStub stub(masm->isolate());
90   __ TailCallStub(&stub);
91 }
92 
Generate_ArrayCode(MacroAssembler * masm)93 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
94   // ----------- S t a t e -------------
95   //  -- a0     : number of arguments
96   //  -- ra     : return address
97   //  -- sp[...]: constructor arguments
98   // -----------------------------------
99   Label generic_array_code;
100 
101   // Get the Array function.
102   GenerateLoadArrayFunction(masm, a1);
103 
104   if (FLAG_debug_code) {
105     // Initial map for the builtin Array functions should be maps.
106     __ ld(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
107     __ SmiTst(a2, a4);
108     __ Assert(ne, kUnexpectedInitialMapForArrayFunction1, a4,
109               Operand(zero_reg));
110     __ GetObjectType(a2, a3, a4);
111     __ Assert(eq, kUnexpectedInitialMapForArrayFunction2, a4,
112               Operand(MAP_TYPE));
113   }
114 
115   // Run the native code for the Array function called as a normal function.
116   // Tail call a stub.
117   __ mov(a3, a1);
118   __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
119   ArrayConstructorStub stub(masm->isolate());
120   __ TailCallStub(&stub);
121 }
122 
123 // static
Generate_MathMaxMin(MacroAssembler * masm,MathMaxMinKind kind)124 void Builtins::Generate_MathMaxMin(MacroAssembler* masm, MathMaxMinKind kind) {
125   // ----------- S t a t e -------------
126   //  -- a0                     : number of arguments
127   //  -- a1                     : function
128   //  -- cp                     : context
129   //  -- ra                     : return address
130   //  -- sp[(argc - n - 1) * 8] : arg[n] (zero-based)
131   //  -- sp[argc * 8]           : receiver
132   // -----------------------------------
133   Heap::RootListIndex const root_index =
134       (kind == MathMaxMinKind::kMin) ? Heap::kInfinityValueRootIndex
135                                      : Heap::kMinusInfinityValueRootIndex;
136 
137   // Load the accumulator with the default return value (either -Infinity or
138   // +Infinity), with the tagged value in t1 and the double value in f0.
139   __ LoadRoot(t1, root_index);
140   __ ldc1(f0, FieldMemOperand(t1, HeapNumber::kValueOffset));
141 
142   Label done_loop, loop;
143   __ mov(a3, a0);
144   __ bind(&loop);
145   {
146     // Check if all parameters done.
147     __ Dsubu(a3, a3, Operand(1));
148     __ Branch(&done_loop, lt, a3, Operand(zero_reg));
149 
150     // Load the next parameter tagged value into a2.
151     __ Dlsa(at, sp, a3, kPointerSizeLog2);
152     __ ld(a2, MemOperand(at));
153 
154     // Load the double value of the parameter into f2, maybe converting the
155     // parameter to a number first using the ToNumber builtin if necessary.
156     Label convert, convert_smi, convert_number, done_convert;
157     __ bind(&convert);
158     __ JumpIfSmi(a2, &convert_smi);
159     __ ld(a4, FieldMemOperand(a2, HeapObject::kMapOffset));
160     __ JumpIfRoot(a4, Heap::kHeapNumberMapRootIndex, &convert_number);
161     {
162       // Parameter is not a Number, use the ToNumber builtin to convert it.
163       FrameScope scope(masm, StackFrame::MANUAL);
164       __ SmiTag(a0);
165       __ SmiTag(a3);
166       __ EnterBuiltinFrame(cp, a1, a0);
167       __ Push(t1, a3);
168       __ mov(a0, a2);
169       __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
170       __ mov(a2, v0);
171       __ Pop(t1, a3);
172       __ LeaveBuiltinFrame(cp, a1, a0);
173       __ SmiUntag(a3);
174       __ SmiUntag(a0);
175       {
176         // Restore the double accumulator value (f0).
177         Label restore_smi, done_restore;
178         __ JumpIfSmi(t1, &restore_smi);
179         __ ldc1(f0, FieldMemOperand(t1, HeapNumber::kValueOffset));
180         __ jmp(&done_restore);
181         __ bind(&restore_smi);
182         __ SmiToDoubleFPURegister(t1, f0, a4);
183         __ bind(&done_restore);
184       }
185     }
186     __ jmp(&convert);
187     __ bind(&convert_number);
188     __ ldc1(f2, FieldMemOperand(a2, HeapNumber::kValueOffset));
189     __ jmp(&done_convert);
190     __ bind(&convert_smi);
191     __ SmiToDoubleFPURegister(a2, f2, a4);
192     __ bind(&done_convert);
193 
194     // Perform the actual comparison with using Min/Max macro instructions the
195     // accumulator value on the left hand side (f0) and the next parameter value
196     // on the right hand side (f2).
197     // We need to work out which HeapNumber (or smi) the result came from.
198     Label compare_nan;
199     __ BranchF(nullptr, &compare_nan, eq, f0, f2);
200     __ Move(a4, f0);
201     if (kind == MathMaxMinKind::kMin) {
202       __ MinNaNCheck_d(f0, f0, f2);
203     } else {
204       DCHECK(kind == MathMaxMinKind::kMax);
205       __ MaxNaNCheck_d(f0, f0, f2);
206     }
207     __ Move(at, f0);
208     __ Branch(&loop, eq, a4, Operand(at));
209     __ mov(t1, a2);
210     __ jmp(&loop);
211 
212     // At least one side is NaN, which means that the result will be NaN too.
213     __ bind(&compare_nan);
214     __ LoadRoot(t1, Heap::kNanValueRootIndex);
215     __ ldc1(f0, FieldMemOperand(t1, HeapNumber::kValueOffset));
216     __ jmp(&loop);
217   }
218 
219   __ bind(&done_loop);
220   // Drop all slots, including the receiver.
221   __ Daddu(a0, a0, Operand(1));
222   __ Dlsa(sp, sp, a0, kPointerSizeLog2);
223   __ Ret(USE_DELAY_SLOT);
224   __ mov(v0, t1);  // In delay slot.
225 }
226 
227 // static
Generate_NumberConstructor(MacroAssembler * masm)228 void Builtins::Generate_NumberConstructor(MacroAssembler* masm) {
229   // ----------- S t a t e -------------
230   //  -- a0                     : number of arguments
231   //  -- a1                     : constructor function
232   //  -- cp                     : context
233   //  -- ra                     : return address
234   //  -- sp[(argc - n - 1) * 8] : arg[n] (zero based)
235   //  -- sp[argc * 8]           : receiver
236   // -----------------------------------
237 
238   // 1. Load the first argument into a0 and get rid of the rest (including the
239   // receiver).
240   Label no_arguments;
241   {
242     __ Branch(USE_DELAY_SLOT, &no_arguments, eq, a0, Operand(zero_reg));
243     __ Dsubu(t1, a0, Operand(1));  // In delay slot.
244     __ mov(t0, a0);                // Store argc in t0.
245     __ Dlsa(at, sp, t1, kPointerSizeLog2);
246     __ ld(a0, MemOperand(at));
247   }
248 
249   // 2a. Convert first argument to number.
250   {
251     FrameScope scope(masm, StackFrame::MANUAL);
252     __ SmiTag(t0);
253     __ EnterBuiltinFrame(cp, a1, t0);
254     __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
255     __ LeaveBuiltinFrame(cp, a1, t0);
256     __ SmiUntag(t0);
257   }
258 
259   {
260     // Drop all arguments including the receiver.
261     __ Dlsa(sp, sp, t0, kPointerSizeLog2);
262     __ DropAndRet(1);
263   }
264 
265   // 2b. No arguments, return +0.
266   __ bind(&no_arguments);
267   __ Move(v0, Smi::kZero);
268   __ DropAndRet(1);
269 }
270 
Generate_NumberConstructor_ConstructStub(MacroAssembler * masm)271 void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) {
272   // ----------- S t a t e -------------
273   //  -- a0                     : number of arguments
274   //  -- a1                     : constructor function
275   //  -- a3                     : new target
276   //  -- cp                     : context
277   //  -- ra                     : return address
278   //  -- sp[(argc - n - 1) * 8] : arg[n] (zero based)
279   //  -- sp[argc * 8]           : receiver
280   // -----------------------------------
281 
282   // 1. Make sure we operate in the context of the called function.
283   __ ld(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
284 
285   // 2. Load the first argument into a0 and get rid of the rest (including the
286   // receiver).
287   {
288     Label no_arguments, done;
289     __ mov(t0, a0);  // Store argc in t0.
290     __ Branch(USE_DELAY_SLOT, &no_arguments, eq, a0, Operand(zero_reg));
291     __ Dsubu(a0, a0, Operand(1));  // In delay slot.
292     __ Dlsa(at, sp, a0, kPointerSizeLog2);
293     __ ld(a0, MemOperand(at));
294     __ jmp(&done);
295     __ bind(&no_arguments);
296     __ Move(a0, Smi::kZero);
297     __ bind(&done);
298   }
299 
300   // 3. Make sure a0 is a number.
301   {
302     Label done_convert;
303     __ JumpIfSmi(a0, &done_convert);
304     __ GetObjectType(a0, a2, a2);
305     __ Branch(&done_convert, eq, a2, Operand(HEAP_NUMBER_TYPE));
306     {
307       FrameScope scope(masm, StackFrame::MANUAL);
308       __ SmiTag(t0);
309       __ EnterBuiltinFrame(cp, a1, t0);
310       __ Push(a3);
311       __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
312       __ Move(a0, v0);
313       __ Pop(a3);
314       __ LeaveBuiltinFrame(cp, a1, t0);
315       __ SmiUntag(t0);
316     }
317     __ bind(&done_convert);
318   }
319 
320   // 4. Check if new target and constructor differ.
321   Label drop_frame_and_ret, new_object;
322   __ Branch(&new_object, ne, a1, Operand(a3));
323 
324   // 5. Allocate a JSValue wrapper for the number.
325   __ AllocateJSValue(v0, a1, a0, a2, t1, &new_object);
326   __ jmp(&drop_frame_and_ret);
327 
328   // 6. Fallback to the runtime to create new object.
329   __ bind(&new_object);
330   {
331     FrameScope scope(masm, StackFrame::MANUAL);
332     FastNewObjectStub stub(masm->isolate());
333     __ SmiTag(t0);
334     __ EnterBuiltinFrame(cp, a1, t0);
335     __ Push(a0);
336     __ CallStub(&stub);
337     __ Pop(a0);
338     __ LeaveBuiltinFrame(cp, a1, t0);
339     __ SmiUntag(t0);
340   }
341   __ sd(a0, FieldMemOperand(v0, JSValue::kValueOffset));
342 
343   __ bind(&drop_frame_and_ret);
344   {
345     __ Dlsa(sp, sp, t0, kPointerSizeLog2);
346     __ DropAndRet(1);
347   }
348 }
349 
350 // static
Generate_StringConstructor(MacroAssembler * masm)351 void Builtins::Generate_StringConstructor(MacroAssembler* masm) {
352   // ----------- S t a t e -------------
353   //  -- a0                     : number of arguments
354   //  -- a1                     : constructor function
355   //  -- cp                     : context
356   //  -- ra                     : return address
357   //  -- sp[(argc - n - 1) * 8] : arg[n] (zero based)
358   //  -- sp[argc * 8]           : receiver
359   // -----------------------------------
360 
361   // 1. Load the first argument into a0 and get rid of the rest (including the
362   // receiver).
363   Label no_arguments;
364   {
365     __ Branch(USE_DELAY_SLOT, &no_arguments, eq, a0, Operand(zero_reg));
366     __ Dsubu(t1, a0, Operand(1));  // In delay slot.
367     __ mov(t0, a0);                // Store argc in t0.
368     __ Dlsa(at, sp, t1, kPointerSizeLog2);
369     __ ld(a0, MemOperand(at));
370   }
371 
372   // 2a. At least one argument, return a0 if it's a string, otherwise
373   // dispatch to appropriate conversion.
374   Label drop_frame_and_ret, to_string, symbol_descriptive_string;
375   {
376     __ JumpIfSmi(a0, &to_string);
377     __ GetObjectType(a0, t1, t1);
378     STATIC_ASSERT(FIRST_NONSTRING_TYPE == SYMBOL_TYPE);
379     __ Subu(t1, t1, Operand(FIRST_NONSTRING_TYPE));
380     __ Branch(&symbol_descriptive_string, eq, t1, Operand(zero_reg));
381     __ Branch(&to_string, gt, t1, Operand(zero_reg));
382     __ mov(v0, a0);
383     __ jmp(&drop_frame_and_ret);
384   }
385 
386   // 2b. No arguments, return the empty string (and pop the receiver).
387   __ bind(&no_arguments);
388   {
389     __ LoadRoot(v0, Heap::kempty_stringRootIndex);
390     __ DropAndRet(1);
391   }
392 
393   // 3a. Convert a0 to a string.
394   __ bind(&to_string);
395   {
396     FrameScope scope(masm, StackFrame::MANUAL);
397     __ SmiTag(t0);
398     __ EnterBuiltinFrame(cp, a1, t0);
399     __ Call(masm->isolate()->builtins()->ToString(), RelocInfo::CODE_TARGET);
400     __ LeaveBuiltinFrame(cp, a1, t0);
401     __ SmiUntag(t0);
402   }
403   __ jmp(&drop_frame_and_ret);
404 
405   // 3b. Convert symbol in a0 to a string.
406   __ bind(&symbol_descriptive_string);
407   {
408     __ Dlsa(sp, sp, t0, kPointerSizeLog2);
409     __ Drop(1);
410     __ Push(a0);
411     __ TailCallRuntime(Runtime::kSymbolDescriptiveString);
412   }
413 
414   __ bind(&drop_frame_and_ret);
415   {
416     __ Dlsa(sp, sp, t0, kPointerSizeLog2);
417     __ DropAndRet(1);
418   }
419 }
420 
Generate_StringConstructor_ConstructStub(MacroAssembler * masm)421 void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
422   // ----------- S t a t e -------------
423   //  -- a0                     : number of arguments
424   //  -- a1                     : constructor function
425   //  -- a3                     : new target
426   //  -- cp                     : context
427   //  -- ra                     : return address
428   //  -- sp[(argc - n - 1) * 8] : arg[n] (zero based)
429   //  -- sp[argc * 8]           : receiver
430   // -----------------------------------
431 
432   // 1. Make sure we operate in the context of the called function.
433   __ ld(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
434 
435   // 2. Load the first argument into a0 and get rid of the rest (including the
436   // receiver).
437   {
438     Label no_arguments, done;
439     __ mov(t0, a0);  // Store argc in t0.
440     __ Branch(USE_DELAY_SLOT, &no_arguments, eq, a0, Operand(zero_reg));
441     __ Dsubu(a0, a0, Operand(1));
442     __ Dlsa(at, sp, a0, kPointerSizeLog2);
443     __ ld(a0, MemOperand(at));
444     __ jmp(&done);
445     __ bind(&no_arguments);
446     __ LoadRoot(a0, Heap::kempty_stringRootIndex);
447     __ bind(&done);
448   }
449 
450   // 3. Make sure a0 is a string.
451   {
452     Label convert, done_convert;
453     __ JumpIfSmi(a0, &convert);
454     __ GetObjectType(a0, a2, a2);
455     __ And(t1, a2, Operand(kIsNotStringMask));
456     __ Branch(&done_convert, eq, t1, Operand(zero_reg));
457     __ bind(&convert);
458     {
459       FrameScope scope(masm, StackFrame::MANUAL);
460       __ SmiTag(t0);
461       __ EnterBuiltinFrame(cp, a1, t0);
462       __ Push(a3);
463       __ Call(masm->isolate()->builtins()->ToString(), RelocInfo::CODE_TARGET);
464       __ Move(a0, v0);
465       __ Pop(a3);
466       __ LeaveBuiltinFrame(cp, a1, t0);
467       __ SmiUntag(t0);
468     }
469     __ bind(&done_convert);
470   }
471 
472   // 4. Check if new target and constructor differ.
473   Label drop_frame_and_ret, new_object;
474   __ Branch(&new_object, ne, a1, Operand(a3));
475 
476   // 5. Allocate a JSValue wrapper for the string.
477   __ AllocateJSValue(v0, a1, a0, a2, t1, &new_object);
478   __ jmp(&drop_frame_and_ret);
479 
480   // 6. Fallback to the runtime to create new object.
481   __ bind(&new_object);
482   {
483     FrameScope scope(masm, StackFrame::MANUAL);
484     FastNewObjectStub stub(masm->isolate());
485     __ SmiTag(t0);
486     __ EnterBuiltinFrame(cp, a1, t0);
487     __ Push(a0);
488     __ CallStub(&stub);
489     __ Pop(a0);
490     __ LeaveBuiltinFrame(cp, a1, t0);
491     __ SmiUntag(t0);
492   }
493   __ sd(a0, FieldMemOperand(v0, JSValue::kValueOffset));
494 
495   __ bind(&drop_frame_and_ret);
496   {
497     __ Dlsa(sp, sp, t0, kPointerSizeLog2);
498     __ DropAndRet(1);
499   }
500 }
501 
GenerateTailCallToSharedCode(MacroAssembler * masm)502 static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
503   __ ld(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
504   __ ld(a2, FieldMemOperand(a2, SharedFunctionInfo::kCodeOffset));
505   __ Daddu(at, a2, Operand(Code::kHeaderSize - kHeapObjectTag));
506   __ Jump(at);
507 }
508 
GenerateTailCallToReturnedCode(MacroAssembler * masm,Runtime::FunctionId function_id)509 static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
510                                            Runtime::FunctionId function_id) {
511   // ----------- S t a t e -------------
512   //  -- a0 : argument count (preserved for callee)
513   //  -- a1 : target function (preserved for callee)
514   //  -- a3 : new target (preserved for callee)
515   // -----------------------------------
516   {
517     FrameScope scope(masm, StackFrame::INTERNAL);
518     // Push a copy of the function onto the stack.
519     // Push a copy of the target function and the new target.
520     __ SmiTag(a0);
521     __ Push(a0, a1, a3, a1);
522 
523     __ CallRuntime(function_id, 1);
524     // Restore target function and new target.
525     __ Pop(a0, a1, a3);
526     __ SmiUntag(a0);
527   }
528 
529   __ Daddu(at, v0, Operand(Code::kHeaderSize - kHeapObjectTag));
530   __ Jump(at);
531 }
532 
Generate_InOptimizationQueue(MacroAssembler * masm)533 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
534   // Checking whether the queued function is ready for install is optional,
535   // since we come across interrupts and stack checks elsewhere.  However,
536   // not checking may delay installing ready functions, and always checking
537   // would be quite expensive.  A good compromise is to first check against
538   // stack limit as a cue for an interrupt signal.
539   Label ok;
540   __ LoadRoot(a4, Heap::kStackLimitRootIndex);
541   __ Branch(&ok, hs, sp, Operand(a4));
542 
543   GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
544 
545   __ bind(&ok);
546   GenerateTailCallToSharedCode(masm);
547 }
548 
549 namespace {
550 
Generate_JSConstructStubHelper(MacroAssembler * masm,bool is_api_function,bool create_implicit_receiver,bool check_derived_construct)551 void Generate_JSConstructStubHelper(MacroAssembler* masm, bool is_api_function,
552                                     bool create_implicit_receiver,
553                                     bool check_derived_construct) {
554   // ----------- S t a t e -------------
555   //  -- a0     : number of arguments
556   //  -- a1     : constructor function
557   //  -- a3     : new target
558   //  -- cp     : context
559   //  -- ra     : return address
560   //  -- sp[...]: constructor arguments
561   // -----------------------------------
562 
563   Isolate* isolate = masm->isolate();
564 
565   // Enter a construct frame.
566   {
567     FrameScope scope(masm, StackFrame::CONSTRUCT);
568 
569     // Preserve the incoming parameters on the stack.
570     __ SmiTag(a0);
571     __ Push(cp, a0);
572 
573     if (create_implicit_receiver) {
574       __ Push(a1, a3);
575       FastNewObjectStub stub(masm->isolate());
576       __ CallStub(&stub);
577       __ mov(t0, v0);
578       __ Pop(a1, a3);
579 
580       // ----------- S t a t e -------------
581       // -- a1: constructor function
582       // -- a3: new target
583       // -- t0: newly allocated object
584       // -----------------------------------
585       __ ld(a0, MemOperand(sp));
586     }
587     __ SmiUntag(a0);
588 
589     if (create_implicit_receiver) {
590       // Push the allocated receiver to the stack. We need two copies
591       // because we may have to return the original one and the calling
592       // conventions dictate that the called function pops the receiver.
593       __ Push(t0, t0);
594     } else {
595       __ PushRoot(Heap::kTheHoleValueRootIndex);
596     }
597 
598     // Set up pointer to last argument.
599     __ Daddu(a2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
600 
601     // Copy arguments and receiver to the expression stack.
602     // a0: number of arguments
603     // a1: constructor function
604     // a2: address of last argument (caller sp)
605     // a3: new target
606     // t0: number of arguments (smi-tagged)
607     // sp[0]: receiver
608     // sp[1]: receiver
609     // sp[2]: number of arguments (smi-tagged)
610     Label loop, entry;
611     __ mov(t0, a0);
612     __ jmp(&entry);
613     __ bind(&loop);
614     __ Dlsa(a4, a2, t0, kPointerSizeLog2);
615     __ ld(a5, MemOperand(a4));
616     __ push(a5);
617     __ bind(&entry);
618     __ Daddu(t0, t0, Operand(-1));
619     __ Branch(&loop, greater_equal, t0, Operand(zero_reg));
620 
621     // Call the function.
622     // a0: number of arguments
623     // a1: constructor function
624     // a3: new target
625     ParameterCount actual(a0);
626     __ InvokeFunction(a1, a3, actual, CALL_FUNCTION,
627                       CheckDebugStepCallWrapper());
628 
629     // Store offset of return address for deoptimizer.
630     if (create_implicit_receiver && !is_api_function) {
631       masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
632     }
633 
634     // Restore context from the frame.
635     __ ld(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
636 
637     if (create_implicit_receiver) {
638       // If the result is an object (in the ECMA sense), we should get rid
639       // of the receiver and use the result; see ECMA-262 section 13.2.2-7
640       // on page 74.
641       Label use_receiver, exit;
642 
643       // If the result is a smi, it is *not* an object in the ECMA sense.
644       // v0: result
645       // sp[0]: receiver (newly allocated object)
646       // sp[1]: number of arguments (smi-tagged)
647       __ JumpIfSmi(v0, &use_receiver);
648 
649       // If the type of the result (stored in its map) is less than
650       // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
651       __ GetObjectType(v0, a1, a3);
652       __ Branch(&exit, greater_equal, a3, Operand(FIRST_JS_RECEIVER_TYPE));
653 
654       // Throw away the result of the constructor invocation and use the
655       // on-stack receiver as the result.
656       __ bind(&use_receiver);
657       __ ld(v0, MemOperand(sp));
658 
659       // Remove receiver from the stack, remove caller arguments, and
660       // return.
661       __ bind(&exit);
662       // v0: result
663       // sp[0]: receiver (newly allocated object)
664       // sp[1]: number of arguments (smi-tagged)
665       __ ld(a1, MemOperand(sp, 1 * kPointerSize));
666     } else {
667       __ ld(a1, MemOperand(sp));
668     }
669 
670     // Leave construct frame.
671   }
672 
673   // ES6 9.2.2. Step 13+
674   // Check that the result is not a Smi, indicating that the constructor result
675   // from a derived class is neither undefined nor an Object.
676   if (check_derived_construct) {
677     Label dont_throw;
678     __ JumpIfNotSmi(v0, &dont_throw);
679     {
680       FrameScope scope(masm, StackFrame::INTERNAL);
681       __ CallRuntime(Runtime::kThrowDerivedConstructorReturnedNonObject);
682     }
683     __ bind(&dont_throw);
684   }
685 
686   __ SmiScale(a4, a1, kPointerSizeLog2);
687   __ Daddu(sp, sp, a4);
688   __ Daddu(sp, sp, kPointerSize);
689   if (create_implicit_receiver) {
690     __ IncrementCounter(isolate->counters()->constructed_objects(), 1, a1, a2);
691   }
692   __ Ret();
693 }
694 
695 }  // namespace
696 
Generate_JSConstructStubGeneric(MacroAssembler * masm)697 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
698   Generate_JSConstructStubHelper(masm, false, true, false);
699 }
700 
Generate_JSConstructStubApi(MacroAssembler * masm)701 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
702   Generate_JSConstructStubHelper(masm, true, false, false);
703 }
704 
Generate_JSBuiltinsConstructStub(MacroAssembler * masm)705 void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
706   Generate_JSConstructStubHelper(masm, false, false, false);
707 }
708 
Generate_JSBuiltinsConstructStubForDerived(MacroAssembler * masm)709 void Builtins::Generate_JSBuiltinsConstructStubForDerived(
710     MacroAssembler* masm) {
711   Generate_JSConstructStubHelper(masm, false, false, true);
712 }
713 
714 // static
Generate_ResumeGeneratorTrampoline(MacroAssembler * masm)715 void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
716   // ----------- S t a t e -------------
717   //  -- v0 : the value to pass to the generator
718   //  -- a1 : the JSGeneratorObject to resume
719   //  -- a2 : the resume mode (tagged)
720   //  -- ra : return address
721   // -----------------------------------
722   __ AssertGeneratorObject(a1);
723 
724   // Store input value into generator object.
725   __ sd(v0, FieldMemOperand(a1, JSGeneratorObject::kInputOrDebugPosOffset));
726   __ RecordWriteField(a1, JSGeneratorObject::kInputOrDebugPosOffset, v0, a3,
727                       kRAHasNotBeenSaved, kDontSaveFPRegs);
728 
729   // Store resume mode into generator object.
730   __ sd(a2, FieldMemOperand(a1, JSGeneratorObject::kResumeModeOffset));
731 
732   // Load suspended function and context.
733   __ ld(cp, FieldMemOperand(a1, JSGeneratorObject::kContextOffset));
734   __ ld(a4, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset));
735 
736   // Flood function if we are stepping.
737   Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
738   Label stepping_prepared;
739   ExternalReference last_step_action =
740       ExternalReference::debug_last_step_action_address(masm->isolate());
741   STATIC_ASSERT(StepFrame > StepIn);
742   __ li(a5, Operand(last_step_action));
743   __ lb(a5, MemOperand(a5));
744   __ Branch(&prepare_step_in_if_stepping, ge, a5, Operand(StepIn));
745 
746   // Flood function if we need to continue stepping in the suspended generator.
747   ExternalReference debug_suspended_generator =
748       ExternalReference::debug_suspended_generator_address(masm->isolate());
749   __ li(a5, Operand(debug_suspended_generator));
750   __ ld(a5, MemOperand(a5));
751   __ Branch(&prepare_step_in_suspended_generator, eq, a1, Operand(a5));
752   __ bind(&stepping_prepared);
753 
754   // Push receiver.
755   __ ld(a5, FieldMemOperand(a1, JSGeneratorObject::kReceiverOffset));
756   __ Push(a5);
757 
758   // ----------- S t a t e -------------
759   //  -- a1    : the JSGeneratorObject to resume
760   //  -- a2    : the resume mode (tagged)
761   //  -- a4    : generator function
762   //  -- cp    : generator context
763   //  -- ra    : return address
764   //  -- sp[0] : generator receiver
765   // -----------------------------------
766 
767   // Push holes for arguments to generator function. Since the parser forced
768   // context allocation for any variables in generators, the actual argument
769   // values have already been copied into the context and these dummy values
770   // will never be used.
771   __ ld(a3, FieldMemOperand(a4, JSFunction::kSharedFunctionInfoOffset));
772   __ lw(a3,
773         FieldMemOperand(a3, SharedFunctionInfo::kFormalParameterCountOffset));
774   {
775     Label done_loop, loop;
776     __ bind(&loop);
777     __ Dsubu(a3, a3, Operand(1));
778     __ Branch(&done_loop, lt, a3, Operand(zero_reg));
779     __ PushRoot(Heap::kTheHoleValueRootIndex);
780     __ Branch(&loop);
781     __ bind(&done_loop);
782   }
783 
784   // Dispatch on the kind of generator object.
785   Label old_generator;
786   __ ld(a3, FieldMemOperand(a4, JSFunction::kSharedFunctionInfoOffset));
787   __ ld(a3, FieldMemOperand(a3, SharedFunctionInfo::kFunctionDataOffset));
788   __ GetObjectType(a3, a3, a3);
789   __ Branch(&old_generator, ne, a3, Operand(BYTECODE_ARRAY_TYPE));
790 
791   // New-style (ignition/turbofan) generator object.
792   {
793     __ ld(a0, FieldMemOperand(a4, JSFunction::kSharedFunctionInfoOffset));
794     __ lw(a0,
795           FieldMemOperand(a0, SharedFunctionInfo::kFormalParameterCountOffset));
796     // We abuse new.target both to indicate that this is a resume call and to
797     // pass in the generator object.  In ordinary calls, new.target is always
798     // undefined because generator functions are non-constructable.
799     __ Move(a3, a1);
800     __ Move(a1, a4);
801     __ ld(a2, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
802     __ Jump(a2);
803   }
804 
805   // Old-style (full-codegen) generator object
806   __ bind(&old_generator);
807   {
808     // Enter a new JavaScript frame, and initialize its slots as they were when
809     // the generator was suspended.
810     FrameScope scope(masm, StackFrame::MANUAL);
811     __ Push(ra, fp);
812     __ Move(fp, sp);
813     __ Push(cp, a4);
814 
815     // Restore the operand stack.
816     __ ld(a0, FieldMemOperand(a1, JSGeneratorObject::kOperandStackOffset));
817     __ ld(a3, FieldMemOperand(a0, FixedArray::kLengthOffset));
818     __ SmiUntag(a3);
819     __ Daddu(a0, a0, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
820     __ Dlsa(a3, a0, a3, kPointerSizeLog2);
821     {
822       Label done_loop, loop;
823       __ bind(&loop);
824       __ Branch(&done_loop, eq, a0, Operand(a3));
825       __ ld(a5, MemOperand(a0));
826       __ Push(a5);
827       __ Branch(USE_DELAY_SLOT, &loop);
828       __ daddiu(a0, a0, kPointerSize);  // In delay slot.
829       __ bind(&done_loop);
830     }
831 
832     // Reset operand stack so we don't leak.
833     __ LoadRoot(a5, Heap::kEmptyFixedArrayRootIndex);
834     __ sd(a5, FieldMemOperand(a1, JSGeneratorObject::kOperandStackOffset));
835 
836     // Resume the generator function at the continuation.
837     __ ld(a3, FieldMemOperand(a4, JSFunction::kSharedFunctionInfoOffset));
838     __ ld(a3, FieldMemOperand(a3, SharedFunctionInfo::kCodeOffset));
839     __ Daddu(a3, a3, Operand(Code::kHeaderSize - kHeapObjectTag));
840     __ ld(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset));
841     __ SmiUntag(a2);
842     __ Daddu(a3, a3, Operand(a2));
843     __ li(a2, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
844     __ sd(a2, FieldMemOperand(a1, JSGeneratorObject::kContinuationOffset));
845     __ Move(v0, a1);  // Continuation expects generator object in v0.
846     __ Jump(a3);
847   }
848 
849   __ bind(&prepare_step_in_if_stepping);
850   {
851     FrameScope scope(masm, StackFrame::INTERNAL);
852     __ Push(a1, a2, a4);
853     __ CallRuntime(Runtime::kDebugPrepareStepInIfStepping);
854     __ Pop(a1, a2);
855   }
856   __ Branch(USE_DELAY_SLOT, &stepping_prepared);
857   __ ld(a4, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset));
858 
859   __ bind(&prepare_step_in_suspended_generator);
860   {
861     FrameScope scope(masm, StackFrame::INTERNAL);
862     __ Push(a1, a2);
863     __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
864     __ Pop(a1, a2);
865   }
866   __ Branch(USE_DELAY_SLOT, &stepping_prepared);
867   __ ld(a4, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset));
868 }
869 
Generate_ConstructedNonConstructable(MacroAssembler * masm)870 void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
871   FrameScope scope(masm, StackFrame::INTERNAL);
872   __ Push(a1);
873   __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
874 }
875 
876 enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt };
877 
878 // Clobbers a2; preserves all other registers.
Generate_CheckStackOverflow(MacroAssembler * masm,Register argc,IsTagged argc_is_tagged)879 static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc,
880                                         IsTagged argc_is_tagged) {
881   // Check the stack for overflow. We are not trying to catch
882   // interruptions (e.g. debug break and preemption) here, so the "real stack
883   // limit" is checked.
884   Label okay;
885   __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
886   // Make a2 the space we have left. The stack might already be overflowed
887   // here which will cause r2 to become negative.
888   __ dsubu(a2, sp, a2);
889   // Check if the arguments will overflow the stack.
890   if (argc_is_tagged == kArgcIsSmiTagged) {
891     __ SmiScale(a7, v0, kPointerSizeLog2);
892   } else {
893     DCHECK(argc_is_tagged == kArgcIsUntaggedInt);
894     __ dsll(a7, argc, kPointerSizeLog2);
895   }
896   __ Branch(&okay, gt, a2, Operand(a7));  // Signed comparison.
897 
898   // Out of stack space.
899   __ CallRuntime(Runtime::kThrowStackOverflow);
900 
901   __ bind(&okay);
902 }
903 
Generate_JSEntryTrampolineHelper(MacroAssembler * masm,bool is_construct)904 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
905                                              bool is_construct) {
906   // Called from JSEntryStub::GenerateBody
907 
908   // ----------- S t a t e -------------
909   //  -- a0: new.target
910   //  -- a1: function
911   //  -- a2: receiver_pointer
912   //  -- a3: argc
913   //  -- s0: argv
914   // -----------------------------------
915   ProfileEntryHookStub::MaybeCallEntryHook(masm);
916 
917   // Enter an internal frame.
918   {
919     FrameScope scope(masm, StackFrame::INTERNAL);
920 
921     // Setup the context (we need to use the caller context from the isolate).
922     ExternalReference context_address(Isolate::kContextAddress,
923                                       masm->isolate());
924     __ li(cp, Operand(context_address));
925     __ ld(cp, MemOperand(cp));
926 
927     // Push the function and the receiver onto the stack.
928     __ Push(a1, a2);
929 
930     // Check if we have enough stack space to push all arguments.
931     // Clobbers a2.
932     Generate_CheckStackOverflow(masm, a3, kArgcIsUntaggedInt);
933 
934     // Remember new.target.
935     __ mov(a5, a0);
936 
937     // Copy arguments to the stack in a loop.
938     // a3: argc
939     // s0: argv, i.e. points to first arg
940     Label loop, entry;
941     __ Dlsa(a6, s0, a3, kPointerSizeLog2);
942     __ b(&entry);
943     __ nop();  // Branch delay slot nop.
944     // a6 points past last arg.
945     __ bind(&loop);
946     __ ld(a4, MemOperand(s0));  // Read next parameter.
947     __ daddiu(s0, s0, kPointerSize);
948     __ ld(a4, MemOperand(a4));  // Dereference handle.
949     __ push(a4);                // Push parameter.
950     __ bind(&entry);
951     __ Branch(&loop, ne, s0, Operand(a6));
952 
953     // Setup new.target and argc.
954     __ mov(a0, a3);
955     __ mov(a3, a5);
956 
957     // Initialize all JavaScript callee-saved registers, since they will be seen
958     // by the garbage collector as part of handlers.
959     __ LoadRoot(a4, Heap::kUndefinedValueRootIndex);
960     __ mov(s1, a4);
961     __ mov(s2, a4);
962     __ mov(s3, a4);
963     __ mov(s4, a4);
964     __ mov(s5, a4);
965     // s6 holds the root address. Do not clobber.
966     // s7 is cp. Do not init.
967 
968     // Invoke the code.
969     Handle<Code> builtin = is_construct
970                                ? masm->isolate()->builtins()->Construct()
971                                : masm->isolate()->builtins()->Call();
972     __ Call(builtin, RelocInfo::CODE_TARGET);
973 
974     // Leave internal frame.
975   }
976   __ Jump(ra);
977 }
978 
Generate_JSEntryTrampoline(MacroAssembler * masm)979 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
980   Generate_JSEntryTrampolineHelper(masm, false);
981 }
982 
Generate_JSConstructEntryTrampoline(MacroAssembler * masm)983 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
984   Generate_JSEntryTrampolineHelper(masm, true);
985 }
986 
LeaveInterpreterFrame(MacroAssembler * masm,Register scratch)987 static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch) {
988   Register args_count = scratch;
989 
990   // Get the arguments + receiver count.
991   __ ld(args_count,
992         MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
993   __ lw(t0, FieldMemOperand(args_count, BytecodeArray::kParameterSizeOffset));
994 
995   // Leave the frame (also dropping the register file).
996   __ LeaveFrame(StackFrame::JAVA_SCRIPT);
997 
998   // Drop receiver + arguments.
999   __ Daddu(sp, sp, args_count);
1000 }
1001 
1002 // Generate code for entering a JS function with the interpreter.
1003 // On entry to the function the receiver and arguments have been pushed on the
1004 // stack left to right.  The actual argument count matches the formal parameter
1005 // count expected by the function.
1006 //
1007 // The live registers are:
1008 //   o a1: the JS function object being called.
1009 //   o a3: the new target
1010 //   o cp: our context
1011 //   o fp: the caller's frame pointer
1012 //   o sp: stack pointer
1013 //   o ra: return address
1014 //
1015 // The function builds an interpreter frame.  See InterpreterFrameConstants in
1016 // frames.h for its layout.
Generate_InterpreterEntryTrampoline(MacroAssembler * masm)1017 void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
1018   ProfileEntryHookStub::MaybeCallEntryHook(masm);
1019 
1020   // Open a frame scope to indicate that there is a frame on the stack.  The
1021   // MANUAL indicates that the scope shouldn't actually generate code to set up
1022   // the frame (that is done below).
1023   FrameScope frame_scope(masm, StackFrame::MANUAL);
1024   __ PushStandardFrame(a1);
1025 
1026   // Get the bytecode array from the function object (or from the DebugInfo if
1027   // it is present) and load it into kInterpreterBytecodeArrayRegister.
1028   __ ld(a0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1029   Label load_debug_bytecode_array, bytecode_array_loaded;
1030   Register debug_info = kInterpreterBytecodeArrayRegister;
1031   DCHECK(!debug_info.is(a0));
1032   __ ld(debug_info, FieldMemOperand(a0, SharedFunctionInfo::kDebugInfoOffset));
1033   __ Branch(&load_debug_bytecode_array, ne, debug_info,
1034             Operand(DebugInfo::uninitialized()));
1035   __ ld(kInterpreterBytecodeArrayRegister,
1036         FieldMemOperand(a0, SharedFunctionInfo::kFunctionDataOffset));
1037   __ bind(&bytecode_array_loaded);
1038 
1039   // Check whether we should continue to use the interpreter.
1040   Label switch_to_different_code_kind;
1041   __ ld(a0, FieldMemOperand(a0, SharedFunctionInfo::kCodeOffset));
1042   __ Branch(&switch_to_different_code_kind, ne, a0,
1043             Operand(masm->CodeObject()));  // Self-reference to this code.
1044 
1045   // Increment invocation count for the function.
1046   __ ld(a0, FieldMemOperand(a1, JSFunction::kLiteralsOffset));
1047   __ ld(a0, FieldMemOperand(a0, LiteralsArray::kFeedbackVectorOffset));
1048   __ ld(a4, FieldMemOperand(
1049                 a0, TypeFeedbackVector::kInvocationCountIndex * kPointerSize +
1050                         TypeFeedbackVector::kHeaderSize));
1051   __ Daddu(a4, a4, Operand(Smi::FromInt(1)));
1052   __ sd(a4, FieldMemOperand(
1053                 a0, TypeFeedbackVector::kInvocationCountIndex * kPointerSize +
1054                         TypeFeedbackVector::kHeaderSize));
1055 
1056   // Check function data field is actually a BytecodeArray object.
1057   if (FLAG_debug_code) {
1058     __ SmiTst(kInterpreterBytecodeArrayRegister, a4);
1059     __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, a4,
1060               Operand(zero_reg));
1061     __ GetObjectType(kInterpreterBytecodeArrayRegister, a4, a4);
1062     __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, a4,
1063               Operand(BYTECODE_ARRAY_TYPE));
1064   }
1065 
1066   // Load initial bytecode offset.
1067   __ li(kInterpreterBytecodeOffsetRegister,
1068         Operand(BytecodeArray::kHeaderSize - kHeapObjectTag));
1069 
1070   // Push new.target, bytecode array and Smi tagged bytecode array offset.
1071   __ SmiTag(a4, kInterpreterBytecodeOffsetRegister);
1072   __ Push(a3, kInterpreterBytecodeArrayRegister, a4);
1073 
1074   // Allocate the local and temporary register file on the stack.
1075   {
1076     // Load frame size (word) from the BytecodeArray object.
1077     __ lw(a4, FieldMemOperand(kInterpreterBytecodeArrayRegister,
1078                               BytecodeArray::kFrameSizeOffset));
1079 
1080     // Do a stack check to ensure we don't go over the limit.
1081     Label ok;
1082     __ Dsubu(a5, sp, Operand(a4));
1083     __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
1084     __ Branch(&ok, hs, a5, Operand(a2));
1085     __ CallRuntime(Runtime::kThrowStackOverflow);
1086     __ bind(&ok);
1087 
1088     // If ok, push undefined as the initial value for all register file entries.
1089     Label loop_header;
1090     Label loop_check;
1091     __ LoadRoot(a5, Heap::kUndefinedValueRootIndex);
1092     __ Branch(&loop_check);
1093     __ bind(&loop_header);
1094     // TODO(rmcilroy): Consider doing more than one push per loop iteration.
1095     __ push(a5);
1096     // Continue loop if not done.
1097     __ bind(&loop_check);
1098     __ Dsubu(a4, a4, Operand(kPointerSize));
1099     __ Branch(&loop_header, ge, a4, Operand(zero_reg));
1100   }
1101 
1102   // Load accumulator and dispatch table into registers.
1103   __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
1104   __ li(kInterpreterDispatchTableRegister,
1105         Operand(ExternalReference::interpreter_dispatch_table_address(
1106             masm->isolate())));
1107 
1108   // Dispatch to the first bytecode handler for the function.
1109   __ Daddu(a0, kInterpreterBytecodeArrayRegister,
1110            kInterpreterBytecodeOffsetRegister);
1111   __ lbu(a0, MemOperand(a0));
1112   __ Dlsa(at, kInterpreterDispatchTableRegister, a0, kPointerSizeLog2);
1113   __ ld(at, MemOperand(at));
1114   __ Call(at);
1115   masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset());
1116 
1117   // The return value is in v0.
1118   LeaveInterpreterFrame(masm, t0);
1119   __ Jump(ra);
1120 
1121   // Load debug copy of the bytecode array.
1122   __ bind(&load_debug_bytecode_array);
1123   __ ld(kInterpreterBytecodeArrayRegister,
1124         FieldMemOperand(debug_info, DebugInfo::kDebugBytecodeArrayIndex));
1125   __ Branch(&bytecode_array_loaded);
1126 
1127   // If the shared code is no longer this entry trampoline, then the underlying
1128   // function has been switched to a different kind of code and we heal the
1129   // closure by switching the code entry field over to the new code as well.
1130   __ bind(&switch_to_different_code_kind);
1131   __ LeaveFrame(StackFrame::JAVA_SCRIPT);
1132   __ ld(a4, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1133   __ ld(a4, FieldMemOperand(a4, SharedFunctionInfo::kCodeOffset));
1134   __ Daddu(a4, a4, Operand(Code::kHeaderSize - kHeapObjectTag));
1135   __ sd(a4, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
1136   __ RecordWriteCodeEntryField(a1, a4, a5);
1137   __ Jump(a4);
1138 }
1139 
Generate_StackOverflowCheck(MacroAssembler * masm,Register num_args,Register scratch1,Register scratch2,Label * stack_overflow)1140 static void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args,
1141                                         Register scratch1, Register scratch2,
1142                                         Label* stack_overflow) {
1143   // Check the stack for overflow. We are not trying to catch
1144   // interruptions (e.g. debug break and preemption) here, so the "real stack
1145   // limit" is checked.
1146   __ LoadRoot(scratch1, Heap::kRealStackLimitRootIndex);
1147   // Make scratch1 the space we have left. The stack might already be overflowed
1148   // here which will cause scratch1 to become negative.
1149   __ dsubu(scratch1, sp, scratch1);
1150   // Check if the arguments will overflow the stack.
1151   __ dsll(scratch2, num_args, kPointerSizeLog2);
1152   // Signed comparison.
1153   __ Branch(stack_overflow, le, scratch1, Operand(scratch2));
1154 }
1155 
Generate_InterpreterPushArgs(MacroAssembler * masm,Register num_args,Register index,Register scratch,Register scratch2,Label * stack_overflow)1156 static void Generate_InterpreterPushArgs(MacroAssembler* masm,
1157                                          Register num_args, Register index,
1158                                          Register scratch, Register scratch2,
1159                                          Label* stack_overflow) {
1160   //  Generate_StackOverflowCheck(masm, num_args, scratch, scratch2,
1161   //  stack_overflow);
1162 
1163   // Find the address of the last argument.
1164   __ mov(scratch2, num_args);
1165   __ dsll(scratch2, scratch2, kPointerSizeLog2);
1166   __ Dsubu(scratch2, index, Operand(scratch2));
1167 
1168   // Push the arguments.
1169   Label loop_header, loop_check;
1170   __ Branch(&loop_check);
1171   __ bind(&loop_header);
1172   __ ld(scratch, MemOperand(index));
1173   __ Daddu(index, index, Operand(-kPointerSize));
1174   __ push(scratch);
1175   __ bind(&loop_check);
1176   __ Branch(&loop_header, gt, index, Operand(scratch2));
1177 }
1178 
1179 // static
Generate_InterpreterPushArgsAndCallImpl(MacroAssembler * masm,TailCallMode tail_call_mode,CallableType function_type)1180 void Builtins::Generate_InterpreterPushArgsAndCallImpl(
1181     MacroAssembler* masm, TailCallMode tail_call_mode,
1182     CallableType function_type) {
1183   // ----------- S t a t e -------------
1184   //  -- a0 : the number of arguments (not including the receiver)
1185   //  -- a2 : the address of the first argument to be pushed. Subsequent
1186   //          arguments should be consecutive above this, in the same order as
1187   //          they are to be pushed onto the stack.
1188   //  -- a1 : the target to call (can be any Object).
1189   // -----------------------------------
1190   Label stack_overflow;
1191 
1192   __ Daddu(a3, a0, Operand(1));  // Add one for receiver.
1193 
1194   // This function modifies a2, t0 and a4.
1195   Generate_InterpreterPushArgs(masm, a3, a2, a4, t0, &stack_overflow);
1196 
1197   // Call the target.
1198   if (function_type == CallableType::kJSFunction) {
1199     __ Jump(masm->isolate()->builtins()->CallFunction(ConvertReceiverMode::kAny,
1200                                                       tail_call_mode),
1201             RelocInfo::CODE_TARGET);
1202   } else {
1203     DCHECK_EQ(function_type, CallableType::kAny);
1204     __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny,
1205                                               tail_call_mode),
1206             RelocInfo::CODE_TARGET);
1207   }
1208 
1209   __ bind(&stack_overflow);
1210   {
1211     __ TailCallRuntime(Runtime::kThrowStackOverflow);
1212     // Unreachable code.
1213     __ break_(0xCC);
1214   }
1215 }
1216 
1217 // static
Generate_InterpreterPushArgsAndConstructImpl(MacroAssembler * masm,CallableType construct_type)1218 void Builtins::Generate_InterpreterPushArgsAndConstructImpl(
1219     MacroAssembler* masm, CallableType construct_type) {
1220   // ----------- S t a t e -------------
1221   // -- a0 : argument count (not including receiver)
1222   // -- a3 : new target
1223   // -- a1 : constructor to call
1224   // -- a2 : allocation site feedback if available, undefined otherwise.
1225   // -- a4 : address of the first argument
1226   // -----------------------------------
1227   Label stack_overflow;
1228 
1229   // Push a slot for the receiver.
1230   __ push(zero_reg);
1231 
1232   // This function modifies t0, a4 and a5.
1233   Generate_InterpreterPushArgs(masm, a0, a4, a5, t0, &stack_overflow);
1234 
1235   __ AssertUndefinedOrAllocationSite(a2, t0);
1236   if (construct_type == CallableType::kJSFunction) {
1237     __ AssertFunction(a1);
1238 
1239     // Tail call to the function-specific construct stub (still in the caller
1240     // context at this point).
1241     __ ld(a4, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1242     __ ld(a4, FieldMemOperand(a4, SharedFunctionInfo::kConstructStubOffset));
1243     __ Daddu(at, a4, Operand(Code::kHeaderSize - kHeapObjectTag));
1244     __ Jump(at);
1245   } else {
1246     DCHECK_EQ(construct_type, CallableType::kAny);
1247     // Call the constructor with a0, a1, and a3 unmodified.
1248     __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
1249   }
1250 
1251   __ bind(&stack_overflow);
1252   {
1253     __ TailCallRuntime(Runtime::kThrowStackOverflow);
1254     // Unreachable code.
1255     __ break_(0xCC);
1256   }
1257 }
1258 
1259 // static
Generate_InterpreterPushArgsAndConstructArray(MacroAssembler * masm)1260 void Builtins::Generate_InterpreterPushArgsAndConstructArray(
1261     MacroAssembler* masm) {
1262   // ----------- S t a t e -------------
1263   //  -- a0 : the number of arguments (not including the receiver)
1264   //  -- a1 : the target to call checked to be Array function.
1265   //  -- a2 : allocation site feedback.
1266   //  -- a3 : the address of the first argument to be pushed. Subsequent
1267   //          arguments should be consecutive above this, in the same order as
1268   //          they are to be pushed onto the stack.
1269   // -----------------------------------
1270   Label stack_overflow;
1271 
1272   __ Daddu(a4, a0, Operand(1));  // Add one for receiver.
1273 
1274   // This function modifies a3, a5 and a6.
1275   Generate_InterpreterPushArgs(masm, a4, a3, a5, a6, &stack_overflow);
1276 
1277   // ArrayConstructor stub expects constructor in a3. Set it here.
1278   __ mov(a3, a1);
1279 
1280   ArrayConstructorStub stub(masm->isolate());
1281   __ TailCallStub(&stub);
1282 
1283   __ bind(&stack_overflow);
1284   {
1285     __ TailCallRuntime(Runtime::kThrowStackOverflow);
1286     // Unreachable code.
1287     __ break_(0xCC);
1288   }
1289 }
1290 
Generate_InterpreterEnterBytecode(MacroAssembler * masm)1291 static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
1292   // Set the return address to the correct point in the interpreter entry
1293   // trampoline.
1294   Smi* interpreter_entry_return_pc_offset(
1295       masm->isolate()->heap()->interpreter_entry_return_pc_offset());
1296   DCHECK_NE(interpreter_entry_return_pc_offset, Smi::kZero);
1297   __ li(t0, Operand(masm->isolate()->builtins()->InterpreterEntryTrampoline()));
1298   __ Daddu(ra, t0, Operand(interpreter_entry_return_pc_offset->value() +
1299                            Code::kHeaderSize - kHeapObjectTag));
1300 
1301   // Initialize the dispatch table register.
1302   __ li(kInterpreterDispatchTableRegister,
1303         Operand(ExternalReference::interpreter_dispatch_table_address(
1304             masm->isolate())));
1305 
1306   // Get the bytecode array pointer from the frame.
1307   __ ld(kInterpreterBytecodeArrayRegister,
1308         MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1309 
1310   if (FLAG_debug_code) {
1311     // Check function data field is actually a BytecodeArray object.
1312     __ SmiTst(kInterpreterBytecodeArrayRegister, at);
1313     __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, at,
1314               Operand(zero_reg));
1315     __ GetObjectType(kInterpreterBytecodeArrayRegister, a1, a1);
1316     __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, a1,
1317               Operand(BYTECODE_ARRAY_TYPE));
1318   }
1319 
1320   // Get the target bytecode offset from the frame.
1321   __ ld(kInterpreterBytecodeOffsetRegister,
1322         MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1323   __ SmiUntag(kInterpreterBytecodeOffsetRegister);
1324 
1325   // Dispatch to the target bytecode.
1326   __ Daddu(a1, kInterpreterBytecodeArrayRegister,
1327            kInterpreterBytecodeOffsetRegister);
1328   __ lbu(a1, MemOperand(a1));
1329   __ Dlsa(a1, kInterpreterDispatchTableRegister, a1, kPointerSizeLog2);
1330   __ ld(a1, MemOperand(a1));
1331   __ Jump(a1);
1332 }
1333 
Generate_InterpreterEnterBytecodeAdvance(MacroAssembler * masm)1334 void Builtins::Generate_InterpreterEnterBytecodeAdvance(MacroAssembler* masm) {
1335   // Advance the current bytecode offset stored within the given interpreter
1336   // stack frame. This simulates what all bytecode handlers do upon completion
1337   // of the underlying operation.
1338   __ ld(a1, MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1339   __ ld(a2, MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1340   __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
1341   {
1342     FrameScope scope(masm, StackFrame::INTERNAL);
1343     __ Push(kInterpreterAccumulatorRegister, a1, a2);
1344     __ CallRuntime(Runtime::kInterpreterAdvanceBytecodeOffset);
1345     __ mov(a2, v0);  // Result is the new bytecode offset.
1346     __ Pop(kInterpreterAccumulatorRegister);
1347   }
1348   __ sd(a2, MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1349 
1350   Generate_InterpreterEnterBytecode(masm);
1351 }
1352 
Generate_InterpreterEnterBytecodeDispatch(MacroAssembler * masm)1353 void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
1354   Generate_InterpreterEnterBytecode(masm);
1355 }
1356 
Generate_CompileLazy(MacroAssembler * masm)1357 void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
1358   // ----------- S t a t e -------------
1359   //  -- a0 : argument count (preserved for callee)
1360   //  -- a3 : new target (preserved for callee)
1361   //  -- a1 : target function (preserved for callee)
1362   // -----------------------------------
1363   // First lookup code, maybe we don't need to compile!
1364   Label gotta_call_runtime, gotta_call_runtime_no_stack;
1365   Label try_shared;
1366   Label loop_top, loop_bottom;
1367 
1368   Register argument_count = a0;
1369   Register closure = a1;
1370   Register new_target = a3;
1371   __ push(argument_count);
1372   __ push(new_target);
1373   __ push(closure);
1374 
1375   Register map = a0;
1376   Register index = a2;
1377   __ ld(map, FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
1378   __ ld(map, FieldMemOperand(map, SharedFunctionInfo::kOptimizedCodeMapOffset));
1379   __ ld(index, FieldMemOperand(map, FixedArray::kLengthOffset));
1380   __ Branch(&gotta_call_runtime, lt, index, Operand(Smi::FromInt(2)));
1381 
1382   // Find literals.
1383   // a3  : native context
1384   // a2  : length / index
1385   // a0  : optimized code map
1386   // stack[0] : new target
1387   // stack[4] : closure
1388   Register native_context = a3;
1389   __ ld(native_context, NativeContextMemOperand());
1390 
1391   __ bind(&loop_top);
1392   Register temp = a1;
1393   Register array_pointer = a5;
1394 
1395   // Does the native context match?
1396   __ SmiScale(at, index, kPointerSizeLog2);
1397   __ Daddu(array_pointer, map, Operand(at));
1398   __ ld(temp, FieldMemOperand(array_pointer,
1399                               SharedFunctionInfo::kOffsetToPreviousContext));
1400   __ ld(temp, FieldMemOperand(temp, WeakCell::kValueOffset));
1401   __ Branch(&loop_bottom, ne, temp, Operand(native_context));
1402   // OSR id set to none?
1403   __ ld(temp, FieldMemOperand(array_pointer,
1404                               SharedFunctionInfo::kOffsetToPreviousOsrAstId));
1405   const int bailout_id = BailoutId::None().ToInt();
1406   __ Branch(&loop_bottom, ne, temp, Operand(Smi::FromInt(bailout_id)));
1407   // Literals available?
1408   __ ld(temp, FieldMemOperand(array_pointer,
1409                               SharedFunctionInfo::kOffsetToPreviousLiterals));
1410   __ ld(temp, FieldMemOperand(temp, WeakCell::kValueOffset));
1411   __ JumpIfSmi(temp, &gotta_call_runtime);
1412 
1413   // Save the literals in the closure.
1414   __ ld(a4, MemOperand(sp, 0));
1415   __ sd(temp, FieldMemOperand(a4, JSFunction::kLiteralsOffset));
1416   __ push(index);
1417   __ RecordWriteField(a4, JSFunction::kLiteralsOffset, temp, index,
1418                       kRAHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
1419                       OMIT_SMI_CHECK);
1420   __ pop(index);
1421 
1422   // Code available?
1423   Register entry = a4;
1424   __ ld(entry,
1425         FieldMemOperand(array_pointer,
1426                         SharedFunctionInfo::kOffsetToPreviousCachedCode));
1427   __ ld(entry, FieldMemOperand(entry, WeakCell::kValueOffset));
1428   __ JumpIfSmi(entry, &try_shared);
1429 
1430   // Found literals and code. Get them into the closure and return.
1431   __ pop(closure);
1432   // Store code entry in the closure.
1433   __ Daddu(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
1434   __ sd(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset));
1435   __ RecordWriteCodeEntryField(closure, entry, a5);
1436 
1437   // Link the closure into the optimized function list.
1438   // a4 : code entry
1439   // a3 : native context
1440   // a1 : closure
1441   __ ld(a5,
1442         ContextMemOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST));
1443   __ sd(a5, FieldMemOperand(closure, JSFunction::kNextFunctionLinkOffset));
1444   __ RecordWriteField(closure, JSFunction::kNextFunctionLinkOffset, a5, a0,
1445                       kRAHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
1446                       OMIT_SMI_CHECK);
1447   const int function_list_offset =
1448       Context::SlotOffset(Context::OPTIMIZED_FUNCTIONS_LIST);
1449   __ sd(closure,
1450         ContextMemOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST));
1451   // Save closure before the write barrier.
1452   __ mov(a5, closure);
1453   __ RecordWriteContextSlot(native_context, function_list_offset, closure, a0,
1454                             kRAHasNotBeenSaved, kDontSaveFPRegs);
1455   __ mov(closure, a5);
1456   __ pop(new_target);
1457   __ pop(argument_count);
1458   __ Jump(entry);
1459 
1460   __ bind(&loop_bottom);
1461   __ Dsubu(index, index,
1462            Operand(Smi::FromInt(SharedFunctionInfo::kEntryLength)));
1463   __ Branch(&loop_top, gt, index, Operand(Smi::FromInt(1)));
1464 
1465   // We found neither literals nor code.
1466   __ jmp(&gotta_call_runtime);
1467 
1468   __ bind(&try_shared);
1469   __ pop(closure);
1470   __ pop(new_target);
1471   __ pop(argument_count);
1472   __ ld(entry, FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
1473   // Is the shared function marked for tier up?
1474   __ lbu(a5, FieldMemOperand(entry,
1475                              SharedFunctionInfo::kMarkedForTierUpByteOffset));
1476   __ And(a5, a5,
1477          Operand(1 << SharedFunctionInfo::kMarkedForTierUpBitWithinByte));
1478   __ Branch(&gotta_call_runtime_no_stack, ne, a5, Operand(zero_reg));
1479   // Is the full code valid?
1480   __ ld(entry, FieldMemOperand(entry, SharedFunctionInfo::kCodeOffset));
1481   __ lw(a5, FieldMemOperand(entry, Code::kFlagsOffset));
1482   __ And(a5, a5, Operand(Code::KindField::kMask));
1483   __ dsrl(a5, a5, Code::KindField::kShift);
1484   __ Branch(&gotta_call_runtime_no_stack, eq, a5, Operand(Code::BUILTIN));
1485   // Yes, install the full code.
1486   __ Daddu(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
1487   __ sd(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset));
1488   __ RecordWriteCodeEntryField(closure, entry, a5);
1489   __ Jump(entry);
1490 
1491   __ bind(&gotta_call_runtime);
1492   __ pop(closure);
1493   __ pop(new_target);
1494   __ pop(argument_count);
1495   __ bind(&gotta_call_runtime_no_stack);
1496   GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
1497 }
1498 
Generate_CompileBaseline(MacroAssembler * masm)1499 void Builtins::Generate_CompileBaseline(MacroAssembler* masm) {
1500   GenerateTailCallToReturnedCode(masm, Runtime::kCompileBaseline);
1501 }
1502 
Generate_CompileOptimized(MacroAssembler * masm)1503 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
1504   GenerateTailCallToReturnedCode(masm,
1505                                  Runtime::kCompileOptimized_NotConcurrent);
1506 }
1507 
Generate_CompileOptimizedConcurrent(MacroAssembler * masm)1508 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
1509   GenerateTailCallToReturnedCode(masm, Runtime::kCompileOptimized_Concurrent);
1510 }
1511 
Generate_InstantiateAsmJs(MacroAssembler * masm)1512 void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) {
1513   // ----------- S t a t e -------------
1514   //  -- a0 : argument count (preserved for callee)
1515   //  -- a1 : new target (preserved for callee)
1516   //  -- a3 : target function (preserved for callee)
1517   // -----------------------------------
1518   Label failed;
1519   {
1520     FrameScope scope(masm, StackFrame::INTERNAL);
1521     // Push a copy of the target function and the new target.
1522     // Push function as parameter to the runtime call.
1523     __ Move(t2, a0);
1524     __ SmiTag(a0);
1525     __ Push(a0, a1, a3, a1);
1526 
1527     // Copy arguments from caller (stdlib, foreign, heap).
1528     Label args_done;
1529     for (int j = 0; j < 4; ++j) {
1530       Label over;
1531       if (j < 3) {
1532         __ Branch(&over, ne, t2, Operand(j));
1533       }
1534       for (int i = j - 1; i >= 0; --i) {
1535         __ ld(t2, MemOperand(fp, StandardFrameConstants::kCallerSPOffset +
1536                                      i * kPointerSize));
1537         __ push(t2);
1538       }
1539       for (int i = 0; i < 3 - j; ++i) {
1540         __ PushRoot(Heap::kUndefinedValueRootIndex);
1541       }
1542       if (j < 3) {
1543         __ jmp(&args_done);
1544         __ bind(&over);
1545       }
1546     }
1547     __ bind(&args_done);
1548 
1549     // Call runtime, on success unwind frame, and parent frame.
1550     __ CallRuntime(Runtime::kInstantiateAsmJs, 4);
1551     // A smi 0 is returned on failure, an object on success.
1552     __ JumpIfSmi(v0, &failed);
1553 
1554     __ Drop(2);
1555     __ pop(t2);
1556     __ SmiUntag(t2);
1557     scope.GenerateLeaveFrame();
1558 
1559     __ Daddu(t2, t2, Operand(1));
1560     __ Dlsa(sp, sp, t2, kPointerSizeLog2);
1561     __ Ret();
1562 
1563     __ bind(&failed);
1564     // Restore target function and new target.
1565     __ Pop(a0, a1, a3);
1566     __ SmiUntag(a0);
1567   }
1568   // On failure, tail call back to regular js.
1569   GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
1570 }
1571 
GenerateMakeCodeYoungAgainCommon(MacroAssembler * masm)1572 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
1573   // For now, we are relying on the fact that make_code_young doesn't do any
1574   // garbage collection which allows us to save/restore the registers without
1575   // worrying about which of them contain pointers. We also don't build an
1576   // internal frame to make the code faster, since we shouldn't have to do stack
1577   // crawls in MakeCodeYoung. This seems a bit fragile.
1578 
1579   // Set a0 to point to the head of the PlatformCodeAge sequence.
1580   __ Dsubu(a0, a0, Operand(kNoCodeAgeSequenceLength - Assembler::kInstrSize));
1581 
1582   // The following registers must be saved and restored when calling through to
1583   // the runtime:
1584   //   a0 - contains return address (beginning of patch sequence)
1585   //   a1 - isolate
1586   //   a3 - new target
1587   RegList saved_regs =
1588       (a0.bit() | a1.bit() | a3.bit() | ra.bit() | fp.bit()) & ~sp.bit();
1589   FrameScope scope(masm, StackFrame::MANUAL);
1590   __ MultiPush(saved_regs);
1591   __ PrepareCallCFunction(2, 0, a2);
1592   __ li(a1, Operand(ExternalReference::isolate_address(masm->isolate())));
1593   __ CallCFunction(
1594       ExternalReference::get_make_code_young_function(masm->isolate()), 2);
1595   __ MultiPop(saved_regs);
1596   __ Jump(a0);
1597 }
1598 
1599 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C)                  \
1600   void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
1601       MacroAssembler* masm) {                                 \
1602     GenerateMakeCodeYoungAgainCommon(masm);                   \
1603   }                                                           \
1604   void Builtins::Generate_Make##C##CodeYoungAgainOddMarking(  \
1605       MacroAssembler* masm) {                                 \
1606     GenerateMakeCodeYoungAgainCommon(masm);                   \
1607   }
CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)1608 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
1609 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
1610 
1611 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
1612   // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
1613   // that make_code_young doesn't do any garbage collection which allows us to
1614   // save/restore the registers without worrying about which of them contain
1615   // pointers.
1616 
1617   // Set a0 to point to the head of the PlatformCodeAge sequence.
1618   __ Dsubu(a0, a0, Operand(kNoCodeAgeSequenceLength - Assembler::kInstrSize));
1619 
1620   // The following registers must be saved and restored when calling through to
1621   // the runtime:
1622   //   a0 - contains return address (beginning of patch sequence)
1623   //   a1 - isolate
1624   //   a3 - new target
1625   RegList saved_regs =
1626       (a0.bit() | a1.bit() | a3.bit() | ra.bit() | fp.bit()) & ~sp.bit();
1627   FrameScope scope(masm, StackFrame::MANUAL);
1628   __ MultiPush(saved_regs);
1629   __ PrepareCallCFunction(2, 0, a2);
1630   __ li(a1, Operand(ExternalReference::isolate_address(masm->isolate())));
1631   __ CallCFunction(
1632       ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
1633       2);
1634   __ MultiPop(saved_regs);
1635 
1636   // Perform prologue operations usually performed by the young code stub.
1637   __ PushStandardFrame(a1);
1638 
1639   // Jump to point after the code-age stub.
1640   __ Daddu(a0, a0, Operand((kNoCodeAgeSequenceLength)));
1641   __ Jump(a0);
1642 }
1643 
Generate_MarkCodeAsExecutedTwice(MacroAssembler * masm)1644 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
1645   GenerateMakeCodeYoungAgainCommon(masm);
1646 }
1647 
Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler * masm)1648 void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) {
1649   Generate_MarkCodeAsExecutedOnce(masm);
1650 }
1651 
Generate_NotifyStubFailureHelper(MacroAssembler * masm,SaveFPRegsMode save_doubles)1652 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
1653                                              SaveFPRegsMode save_doubles) {
1654   {
1655     FrameScope scope(masm, StackFrame::INTERNAL);
1656 
1657     // Preserve registers across notification, this is important for compiled
1658     // stubs that tail call the runtime on deopts passing their parameters in
1659     // registers.
1660     __ MultiPush(kJSCallerSaved | kCalleeSaved);
1661     // Pass the function and deoptimization type to the runtime system.
1662     __ CallRuntime(Runtime::kNotifyStubFailure, save_doubles);
1663     __ MultiPop(kJSCallerSaved | kCalleeSaved);
1664   }
1665 
1666   __ Daddu(sp, sp, Operand(kPointerSize));  // Ignore state
1667   __ Jump(ra);                              // Jump to miss handler
1668 }
1669 
Generate_NotifyStubFailure(MacroAssembler * masm)1670 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
1671   Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
1672 }
1673 
Generate_NotifyStubFailureSaveDoubles(MacroAssembler * masm)1674 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
1675   Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
1676 }
1677 
Generate_NotifyDeoptimizedHelper(MacroAssembler * masm,Deoptimizer::BailoutType type)1678 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
1679                                              Deoptimizer::BailoutType type) {
1680   {
1681     FrameScope scope(masm, StackFrame::INTERNAL);
1682     // Pass the function and deoptimization type to the runtime system.
1683     __ li(a0, Operand(Smi::FromInt(static_cast<int>(type))));
1684     __ push(a0);
1685     __ CallRuntime(Runtime::kNotifyDeoptimized);
1686   }
1687 
1688   // Get the full codegen state from the stack and untag it -> a6.
1689   __ ld(a6, MemOperand(sp, 0 * kPointerSize));
1690   __ SmiUntag(a6);
1691   // Switch on the state.
1692   Label with_tos_register, unknown_state;
1693   __ Branch(
1694       &with_tos_register, ne, a6,
1695       Operand(static_cast<int64_t>(Deoptimizer::BailoutState::NO_REGISTERS)));
1696   __ Ret(USE_DELAY_SLOT);
1697   // Safe to fill delay slot Addu will emit one instruction.
1698   __ Daddu(sp, sp, Operand(1 * kPointerSize));  // Remove state.
1699 
1700   __ bind(&with_tos_register);
1701   DCHECK_EQ(kInterpreterAccumulatorRegister.code(), v0.code());
1702   __ ld(v0, MemOperand(sp, 1 * kPointerSize));
1703   __ Branch(
1704       &unknown_state, ne, a6,
1705       Operand(static_cast<int64_t>(Deoptimizer::BailoutState::TOS_REGISTER)));
1706 
1707   __ Ret(USE_DELAY_SLOT);
1708   // Safe to fill delay slot Addu will emit one instruction.
1709   __ Daddu(sp, sp, Operand(2 * kPointerSize));  // Remove state.
1710 
1711   __ bind(&unknown_state);
1712   __ stop("no cases left");
1713 }
1714 
Generate_NotifyDeoptimized(MacroAssembler * masm)1715 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1716   Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1717 }
1718 
Generate_NotifySoftDeoptimized(MacroAssembler * masm)1719 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
1720   Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1721 }
1722 
Generate_NotifyLazyDeoptimized(MacroAssembler * masm)1723 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
1724   Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1725 }
1726 
1727 // Clobbers {t2, t3, a4, a5}.
CompatibleReceiverCheck(MacroAssembler * masm,Register receiver,Register function_template_info,Label * receiver_check_failed)1728 static void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver,
1729                                     Register function_template_info,
1730                                     Label* receiver_check_failed) {
1731   Register signature = t2;
1732   Register map = t3;
1733   Register constructor = a4;
1734   Register scratch = a5;
1735 
1736   // If there is no signature, return the holder.
1737   __ ld(signature, FieldMemOperand(function_template_info,
1738                                    FunctionTemplateInfo::kSignatureOffset));
1739   Label receiver_check_passed;
1740   __ JumpIfRoot(signature, Heap::kUndefinedValueRootIndex,
1741                 &receiver_check_passed);
1742 
1743   // Walk the prototype chain.
1744   __ ld(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
1745   Label prototype_loop_start;
1746   __ bind(&prototype_loop_start);
1747 
1748   // Get the constructor, if any.
1749   __ GetMapConstructor(constructor, map, scratch, scratch);
1750   Label next_prototype;
1751   __ Branch(&next_prototype, ne, scratch, Operand(JS_FUNCTION_TYPE));
1752   Register type = constructor;
1753   __ ld(type,
1754         FieldMemOperand(constructor, JSFunction::kSharedFunctionInfoOffset));
1755   __ ld(type, FieldMemOperand(type, SharedFunctionInfo::kFunctionDataOffset));
1756 
1757   // Loop through the chain of inheriting function templates.
1758   Label function_template_loop;
1759   __ bind(&function_template_loop);
1760 
1761   // If the signatures match, we have a compatible receiver.
1762   __ Branch(&receiver_check_passed, eq, signature, Operand(type),
1763             USE_DELAY_SLOT);
1764 
1765   // If the current type is not a FunctionTemplateInfo, load the next prototype
1766   // in the chain.
1767   __ JumpIfSmi(type, &next_prototype);
1768   __ GetObjectType(type, scratch, scratch);
1769   __ Branch(&next_prototype, ne, scratch, Operand(FUNCTION_TEMPLATE_INFO_TYPE));
1770 
1771   // Otherwise load the parent function template and iterate.
1772   __ ld(type,
1773         FieldMemOperand(type, FunctionTemplateInfo::kParentTemplateOffset));
1774   __ Branch(&function_template_loop);
1775 
1776   // Load the next prototype.
1777   __ bind(&next_prototype);
1778   __ lwu(scratch, FieldMemOperand(map, Map::kBitField3Offset));
1779   __ DecodeField<Map::HasHiddenPrototype>(scratch);
1780   __ Branch(receiver_check_failed, eq, scratch, Operand(zero_reg));
1781 
1782   __ ld(receiver, FieldMemOperand(map, Map::kPrototypeOffset));
1783   __ ld(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
1784   // Iterate.
1785   __ Branch(&prototype_loop_start);
1786 
1787   __ bind(&receiver_check_passed);
1788 }
1789 
Generate_HandleFastApiCall(MacroAssembler * masm)1790 void Builtins::Generate_HandleFastApiCall(MacroAssembler* masm) {
1791   // ----------- S t a t e -------------
1792   //  -- a0                 : number of arguments excluding receiver
1793   //  -- a1                 : callee
1794   //  -- ra                 : return address
1795   //  -- sp[0]              : last argument
1796   //  -- ...
1797   //  -- sp[8 * (argc - 1)] : first argument
1798   //  -- sp[8 * argc]       : receiver
1799   // -----------------------------------
1800 
1801   // Load the FunctionTemplateInfo.
1802   __ ld(t1, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1803   __ ld(t1, FieldMemOperand(t1, SharedFunctionInfo::kFunctionDataOffset));
1804 
1805   // Do the compatible receiver check
1806   Label receiver_check_failed;
1807   __ Dlsa(t8, sp, a0, kPointerSizeLog2);
1808   __ ld(t0, MemOperand(t8));
1809   CompatibleReceiverCheck(masm, t0, t1, &receiver_check_failed);
1810 
1811   // Get the callback offset from the FunctionTemplateInfo, and jump to the
1812   // beginning of the code.
1813   __ ld(t2, FieldMemOperand(t1, FunctionTemplateInfo::kCallCodeOffset));
1814   __ ld(t2, FieldMemOperand(t2, CallHandlerInfo::kFastHandlerOffset));
1815   __ Daddu(t2, t2, Operand(Code::kHeaderSize - kHeapObjectTag));
1816   __ Jump(t2);
1817 
1818   // Compatible receiver check failed: throw an Illegal Invocation exception.
1819   __ bind(&receiver_check_failed);
1820   // Drop the arguments (including the receiver);
1821   __ Daddu(t8, t8, Operand(kPointerSize));
1822   __ daddu(sp, t8, zero_reg);
1823   __ TailCallRuntime(Runtime::kThrowIllegalInvocation);
1824 }
1825 
Generate_OnStackReplacementHelper(MacroAssembler * masm,bool has_handler_frame)1826 static void Generate_OnStackReplacementHelper(MacroAssembler* masm,
1827                                               bool has_handler_frame) {
1828   // Lookup the function in the JavaScript frame.
1829   if (has_handler_frame) {
1830     __ ld(a0, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1831     __ ld(a0, MemOperand(a0, JavaScriptFrameConstants::kFunctionOffset));
1832   } else {
1833     __ ld(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1834   }
1835 
1836   {
1837     FrameScope scope(masm, StackFrame::INTERNAL);
1838     // Pass function as argument.
1839     __ push(a0);
1840     __ CallRuntime(Runtime::kCompileForOnStackReplacement);
1841   }
1842 
1843   // If the code object is null, just return to the caller.
1844   __ Ret(eq, v0, Operand(Smi::kZero));
1845 
1846   // Drop any potential handler frame that is be sitting on top of the actual
1847   // JavaScript frame. This is the case then OSR is triggered from bytecode.
1848   if (has_handler_frame) {
1849     __ LeaveFrame(StackFrame::STUB);
1850   }
1851 
1852   // Load deoptimization data from the code object.
1853   // <deopt_data> = <code>[#deoptimization_data_offset]
1854   __ ld(a1, MemOperand(v0, Code::kDeoptimizationDataOffset - kHeapObjectTag));
1855 
1856   // Load the OSR entrypoint offset from the deoptimization data.
1857   // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
1858   __ ld(a1, MemOperand(a1, FixedArray::OffsetOfElementAt(
1859                                DeoptimizationInputData::kOsrPcOffsetIndex) -
1860                                kHeapObjectTag));
1861   __ SmiUntag(a1);
1862 
1863   // Compute the target address = code_obj + header_size + osr_offset
1864   // <entry_addr> = <code_obj> + #header_size + <osr_offset>
1865   __ daddu(v0, v0, a1);
1866   __ daddiu(ra, v0, Code::kHeaderSize - kHeapObjectTag);
1867 
1868   // And "return" to the OSR entry point of the function.
1869   __ Ret();
1870 }
1871 
Generate_OnStackReplacement(MacroAssembler * masm)1872 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1873   Generate_OnStackReplacementHelper(masm, false);
1874 }
1875 
Generate_InterpreterOnStackReplacement(MacroAssembler * masm)1876 void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) {
1877   Generate_OnStackReplacementHelper(masm, true);
1878 }
1879 
1880 // static
Generate_FunctionPrototypeApply(MacroAssembler * masm)1881 void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
1882   // ----------- S t a t e -------------
1883   //  -- a0    : argc
1884   //  -- sp[0] : argArray
1885   //  -- sp[4] : thisArg
1886   //  -- sp[8] : receiver
1887   // -----------------------------------
1888 
1889   // 1. Load receiver into a1, argArray into a0 (if present), remove all
1890   // arguments from the stack (including the receiver), and push thisArg (if
1891   // present) instead.
1892   {
1893     Label no_arg;
1894     Register scratch = a4;
1895     __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
1896     __ mov(a3, a2);
1897     // Dlsa() cannot be used hare as scratch value used later.
1898     __ dsll(scratch, a0, kPointerSizeLog2);
1899     __ Daddu(a0, sp, Operand(scratch));
1900     __ ld(a1, MemOperand(a0));  // receiver
1901     __ Dsubu(a0, a0, Operand(kPointerSize));
1902     __ Branch(&no_arg, lt, a0, Operand(sp));
1903     __ ld(a2, MemOperand(a0));  // thisArg
1904     __ Dsubu(a0, a0, Operand(kPointerSize));
1905     __ Branch(&no_arg, lt, a0, Operand(sp));
1906     __ ld(a3, MemOperand(a0));  // argArray
1907     __ bind(&no_arg);
1908     __ Daddu(sp, sp, Operand(scratch));
1909     __ sd(a2, MemOperand(sp));
1910     __ mov(a0, a3);
1911   }
1912 
1913   // ----------- S t a t e -------------
1914   //  -- a0    : argArray
1915   //  -- a1    : receiver
1916   //  -- sp[0] : thisArg
1917   // -----------------------------------
1918 
1919   // 2. Make sure the receiver is actually callable.
1920   Label receiver_not_callable;
1921   __ JumpIfSmi(a1, &receiver_not_callable);
1922   __ ld(a4, FieldMemOperand(a1, HeapObject::kMapOffset));
1923   __ lbu(a4, FieldMemOperand(a4, Map::kBitFieldOffset));
1924   __ And(a4, a4, Operand(1 << Map::kIsCallable));
1925   __ Branch(&receiver_not_callable, eq, a4, Operand(zero_reg));
1926 
1927   // 3. Tail call with no arguments if argArray is null or undefined.
1928   Label no_arguments;
1929   __ JumpIfRoot(a0, Heap::kNullValueRootIndex, &no_arguments);
1930   __ JumpIfRoot(a0, Heap::kUndefinedValueRootIndex, &no_arguments);
1931 
1932   // 4a. Apply the receiver to the given argArray (passing undefined for
1933   // new.target).
1934   __ LoadRoot(a3, Heap::kUndefinedValueRootIndex);
1935   __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1936 
1937   // 4b. The argArray is either null or undefined, so we tail call without any
1938   // arguments to the receiver.
1939   __ bind(&no_arguments);
1940   {
1941     __ mov(a0, zero_reg);
1942     __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1943   }
1944 
1945   // 4c. The receiver is not callable, throw an appropriate TypeError.
1946   __ bind(&receiver_not_callable);
1947   {
1948     __ sd(a1, MemOperand(sp));
1949     __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1950   }
1951 }
1952 
1953 // static
Generate_FunctionPrototypeCall(MacroAssembler * masm)1954 void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
1955   // 1. Make sure we have at least one argument.
1956   // a0: actual number of arguments
1957   {
1958     Label done;
1959     __ Branch(&done, ne, a0, Operand(zero_reg));
1960     __ PushRoot(Heap::kUndefinedValueRootIndex);
1961     __ Daddu(a0, a0, Operand(1));
1962     __ bind(&done);
1963   }
1964 
1965   // 2. Get the function to call (passed as receiver) from the stack.
1966   // a0: actual number of arguments
1967   __ Dlsa(at, sp, a0, kPointerSizeLog2);
1968   __ ld(a1, MemOperand(at));
1969 
1970   // 3. Shift arguments and return address one slot down on the stack
1971   //    (overwriting the original receiver).  Adjust argument count to make
1972   //    the original first argument the new receiver.
1973   // a0: actual number of arguments
1974   // a1: function
1975   {
1976     Label loop;
1977     // Calculate the copy start address (destination). Copy end address is sp.
1978     __ Dlsa(a2, sp, a0, kPointerSizeLog2);
1979 
1980     __ bind(&loop);
1981     __ ld(at, MemOperand(a2, -kPointerSize));
1982     __ sd(at, MemOperand(a2));
1983     __ Dsubu(a2, a2, Operand(kPointerSize));
1984     __ Branch(&loop, ne, a2, Operand(sp));
1985     // Adjust the actual number of arguments and remove the top element
1986     // (which is a copy of the last argument).
1987     __ Dsubu(a0, a0, Operand(1));
1988     __ Pop();
1989   }
1990 
1991   // 4. Call the callable.
1992   __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1993 }
1994 
Generate_ReflectApply(MacroAssembler * masm)1995 void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
1996   // ----------- S t a t e -------------
1997   //  -- a0     : argc
1998   //  -- sp[0]  : argumentsList
1999   //  -- sp[4]  : thisArgument
2000   //  -- sp[8]  : target
2001   //  -- sp[12] : receiver
2002   // -----------------------------------
2003 
2004   // 1. Load target into a1 (if present), argumentsList into a0 (if present),
2005   // remove all arguments from the stack (including the receiver), and push
2006   // thisArgument (if present) instead.
2007   {
2008     Label no_arg;
2009     Register scratch = a4;
2010     __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
2011     __ mov(a2, a1);
2012     __ mov(a3, a1);
2013     __ dsll(scratch, a0, kPointerSizeLog2);
2014     __ mov(a0, scratch);
2015     __ Dsubu(a0, a0, Operand(kPointerSize));
2016     __ Branch(&no_arg, lt, a0, Operand(zero_reg));
2017     __ Daddu(a0, sp, Operand(a0));
2018     __ ld(a1, MemOperand(a0));  // target
2019     __ Dsubu(a0, a0, Operand(kPointerSize));
2020     __ Branch(&no_arg, lt, a0, Operand(sp));
2021     __ ld(a2, MemOperand(a0));  // thisArgument
2022     __ Dsubu(a0, a0, Operand(kPointerSize));
2023     __ Branch(&no_arg, lt, a0, Operand(sp));
2024     __ ld(a3, MemOperand(a0));  // argumentsList
2025     __ bind(&no_arg);
2026     __ Daddu(sp, sp, Operand(scratch));
2027     __ sd(a2, MemOperand(sp));
2028     __ mov(a0, a3);
2029   }
2030 
2031   // ----------- S t a t e -------------
2032   //  -- a0    : argumentsList
2033   //  -- a1    : target
2034   //  -- sp[0] : thisArgument
2035   // -----------------------------------
2036 
2037   // 2. Make sure the target is actually callable.
2038   Label target_not_callable;
2039   __ JumpIfSmi(a1, &target_not_callable);
2040   __ ld(a4, FieldMemOperand(a1, HeapObject::kMapOffset));
2041   __ lbu(a4, FieldMemOperand(a4, Map::kBitFieldOffset));
2042   __ And(a4, a4, Operand(1 << Map::kIsCallable));
2043   __ Branch(&target_not_callable, eq, a4, Operand(zero_reg));
2044 
2045   // 3a. Apply the target to the given argumentsList (passing undefined for
2046   // new.target).
2047   __ LoadRoot(a3, Heap::kUndefinedValueRootIndex);
2048   __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
2049 
2050   // 3b. The target is not callable, throw an appropriate TypeError.
2051   __ bind(&target_not_callable);
2052   {
2053     __ sd(a1, MemOperand(sp));
2054     __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
2055   }
2056 }
2057 
Generate_ReflectConstruct(MacroAssembler * masm)2058 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
2059   // ----------- S t a t e -------------
2060   //  -- a0     : argc
2061   //  -- sp[0]  : new.target (optional)
2062   //  -- sp[4]  : argumentsList
2063   //  -- sp[8]  : target
2064   //  -- sp[12] : receiver
2065   // -----------------------------------
2066 
2067   // 1. Load target into a1 (if present), argumentsList into a0 (if present),
2068   // new.target into a3 (if present, otherwise use target), remove all
2069   // arguments from the stack (including the receiver), and push thisArgument
2070   // (if present) instead.
2071   {
2072     Label no_arg;
2073     Register scratch = a4;
2074     __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
2075     __ mov(a2, a1);
2076     // Dlsa() cannot be used hare as scratch value used later.
2077     __ dsll(scratch, a0, kPointerSizeLog2);
2078     __ Daddu(a0, sp, Operand(scratch));
2079     __ sd(a2, MemOperand(a0));  // receiver
2080     __ Dsubu(a0, a0, Operand(kPointerSize));
2081     __ Branch(&no_arg, lt, a0, Operand(sp));
2082     __ ld(a1, MemOperand(a0));  // target
2083     __ mov(a3, a1);             // new.target defaults to target
2084     __ Dsubu(a0, a0, Operand(kPointerSize));
2085     __ Branch(&no_arg, lt, a0, Operand(sp));
2086     __ ld(a2, MemOperand(a0));  // argumentsList
2087     __ Dsubu(a0, a0, Operand(kPointerSize));
2088     __ Branch(&no_arg, lt, a0, Operand(sp));
2089     __ ld(a3, MemOperand(a0));  // new.target
2090     __ bind(&no_arg);
2091     __ Daddu(sp, sp, Operand(scratch));
2092     __ mov(a0, a2);
2093   }
2094 
2095   // ----------- S t a t e -------------
2096   //  -- a0    : argumentsList
2097   //  -- a3    : new.target
2098   //  -- a1    : target
2099   //  -- sp[0] : receiver (undefined)
2100   // -----------------------------------
2101 
2102   // 2. Make sure the target is actually a constructor.
2103   Label target_not_constructor;
2104   __ JumpIfSmi(a1, &target_not_constructor);
2105   __ ld(a4, FieldMemOperand(a1, HeapObject::kMapOffset));
2106   __ lbu(a4, FieldMemOperand(a4, Map::kBitFieldOffset));
2107   __ And(a4, a4, Operand(1 << Map::kIsConstructor));
2108   __ Branch(&target_not_constructor, eq, a4, Operand(zero_reg));
2109 
2110   // 3. Make sure the target is actually a constructor.
2111   Label new_target_not_constructor;
2112   __ JumpIfSmi(a3, &new_target_not_constructor);
2113   __ ld(a4, FieldMemOperand(a3, HeapObject::kMapOffset));
2114   __ lbu(a4, FieldMemOperand(a4, Map::kBitFieldOffset));
2115   __ And(a4, a4, Operand(1 << Map::kIsConstructor));
2116   __ Branch(&new_target_not_constructor, eq, a4, Operand(zero_reg));
2117 
2118   // 4a. Construct the target with the given new.target and argumentsList.
2119   __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
2120 
2121   // 4b. The target is not a constructor, throw an appropriate TypeError.
2122   __ bind(&target_not_constructor);
2123   {
2124     __ sd(a1, MemOperand(sp));
2125     __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
2126   }
2127 
2128   // 4c. The new.target is not a constructor, throw an appropriate TypeError.
2129   __ bind(&new_target_not_constructor);
2130   {
2131     __ sd(a3, MemOperand(sp));
2132     __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
2133   }
2134 }
2135 
EnterArgumentsAdaptorFrame(MacroAssembler * masm)2136 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
2137   // __ sll(a0, a0, kSmiTagSize);
2138   __ dsll32(a0, a0, 0);
2139   __ li(a4, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2140   __ MultiPush(a0.bit() | a1.bit() | a4.bit() | fp.bit() | ra.bit());
2141   __ Daddu(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
2142                            kPointerSize));
2143 }
2144 
LeaveArgumentsAdaptorFrame(MacroAssembler * masm)2145 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
2146   // ----------- S t a t e -------------
2147   //  -- v0 : result being passed through
2148   // -----------------------------------
2149   // Get the number of arguments passed (as a smi), tear down the frame and
2150   // then tear down the parameters.
2151   __ ld(a1, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp +
2152                              kPointerSize)));
2153   __ mov(sp, fp);
2154   __ MultiPop(fp.bit() | ra.bit());
2155   __ SmiScale(a4, a1, kPointerSizeLog2);
2156   __ Daddu(sp, sp, a4);
2157   // Adjust for the receiver.
2158   __ Daddu(sp, sp, Operand(kPointerSize));
2159 }
2160 
2161 // static
Generate_Apply(MacroAssembler * masm)2162 void Builtins::Generate_Apply(MacroAssembler* masm) {
2163   // ----------- S t a t e -------------
2164   //  -- a0    : argumentsList
2165   //  -- a1    : target
2166   //  -- a3    : new.target (checked to be constructor or undefined)
2167   //  -- sp[0] : thisArgument
2168   // -----------------------------------
2169 
2170   // Create the list of arguments from the array-like argumentsList.
2171   {
2172     Label create_arguments, create_array, create_runtime, done_create;
2173     __ JumpIfSmi(a0, &create_runtime);
2174 
2175     // Load the map of argumentsList into a2.
2176     __ ld(a2, FieldMemOperand(a0, HeapObject::kMapOffset));
2177 
2178     // Load native context into a4.
2179     __ ld(a4, NativeContextMemOperand());
2180 
2181     // Check if argumentsList is an (unmodified) arguments object.
2182     __ ld(at, ContextMemOperand(a4, Context::SLOPPY_ARGUMENTS_MAP_INDEX));
2183     __ Branch(&create_arguments, eq, a2, Operand(at));
2184     __ ld(at, ContextMemOperand(a4, Context::STRICT_ARGUMENTS_MAP_INDEX));
2185     __ Branch(&create_arguments, eq, a2, Operand(at));
2186 
2187     // Check if argumentsList is a fast JSArray.
2188     __ ld(v0, FieldMemOperand(a2, HeapObject::kMapOffset));
2189     __ lbu(v0, FieldMemOperand(v0, Map::kInstanceTypeOffset));
2190     __ Branch(&create_array, eq, v0, Operand(JS_ARRAY_TYPE));
2191 
2192     // Ask the runtime to create the list (actually a FixedArray).
2193     __ bind(&create_runtime);
2194     {
2195       FrameScope scope(masm, StackFrame::INTERNAL);
2196       __ Push(a1, a3, a0);
2197       __ CallRuntime(Runtime::kCreateListFromArrayLike);
2198       __ mov(a0, v0);
2199       __ Pop(a1, a3);
2200       __ ld(a2, FieldMemOperand(v0, FixedArray::kLengthOffset));
2201       __ SmiUntag(a2);
2202     }
2203     __ Branch(&done_create);
2204 
2205     // Try to create the list from an arguments object.
2206     __ bind(&create_arguments);
2207     __ ld(a2, FieldMemOperand(a0, JSArgumentsObject::kLengthOffset));
2208     __ ld(a4, FieldMemOperand(a0, JSObject::kElementsOffset));
2209     __ ld(at, FieldMemOperand(a4, FixedArray::kLengthOffset));
2210     __ Branch(&create_runtime, ne, a2, Operand(at));
2211     __ SmiUntag(a2);
2212     __ mov(a0, a4);
2213     __ Branch(&done_create);
2214 
2215     // Try to create the list from a JSArray object.
2216     __ bind(&create_array);
2217     __ ld(a2, FieldMemOperand(a2, Map::kBitField2Offset));
2218     __ DecodeField<Map::ElementsKindBits>(a2);
2219     STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
2220     STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
2221     STATIC_ASSERT(FAST_ELEMENTS == 2);
2222     __ Branch(&create_runtime, hi, a2, Operand(FAST_ELEMENTS));
2223     __ Branch(&create_runtime, eq, a2, Operand(FAST_HOLEY_SMI_ELEMENTS));
2224     __ ld(a2, FieldMemOperand(a0, JSArray::kLengthOffset));
2225     __ ld(a0, FieldMemOperand(a0, JSArray::kElementsOffset));
2226     __ SmiUntag(a2);
2227 
2228     __ bind(&done_create);
2229   }
2230 
2231   // Check for stack overflow.
2232   {
2233     // Check the stack for overflow. We are not trying to catch interruptions
2234     // (i.e. debug break and preemption) here, so check the "real stack limit".
2235     Label done;
2236     __ LoadRoot(a4, Heap::kRealStackLimitRootIndex);
2237     // Make ip the space we have left. The stack might already be overflowed
2238     // here which will cause ip to become negative.
2239     __ Dsubu(a4, sp, a4);
2240     // Check if the arguments will overflow the stack.
2241     __ dsll(at, a2, kPointerSizeLog2);
2242     __ Branch(&done, gt, a4, Operand(at));  // Signed comparison.
2243     __ TailCallRuntime(Runtime::kThrowStackOverflow);
2244     __ bind(&done);
2245   }
2246 
2247   // ----------- S t a t e -------------
2248   //  -- a1    : target
2249   //  -- a0    : args (a FixedArray built from argumentsList)
2250   //  -- a2    : len (number of elements to push from args)
2251   //  -- a3    : new.target (checked to be constructor or undefined)
2252   //  -- sp[0] : thisArgument
2253   // -----------------------------------
2254 
2255   // Push arguments onto the stack (thisArgument is already on the stack).
2256   {
2257     __ mov(a4, zero_reg);
2258     Label done, loop;
2259     __ bind(&loop);
2260     __ Branch(&done, eq, a4, Operand(a2));
2261     __ Dlsa(at, a0, a4, kPointerSizeLog2);
2262     __ ld(at, FieldMemOperand(at, FixedArray::kHeaderSize));
2263     __ Push(at);
2264     __ Daddu(a4, a4, Operand(1));
2265     __ Branch(&loop);
2266     __ bind(&done);
2267     __ Move(a0, a4);
2268   }
2269 
2270   // Dispatch to Call or Construct depending on whether new.target is undefined.
2271   {
2272     Label construct;
2273     __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
2274     __ Branch(&construct, ne, a3, Operand(at));
2275     __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
2276     __ bind(&construct);
2277     __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
2278   }
2279 }
2280 
2281 namespace {
2282 
2283 // Drops top JavaScript frame and an arguments adaptor frame below it (if
2284 // present) preserving all the arguments prepared for current call.
2285 // Does nothing if debugger is currently active.
2286 // ES6 14.6.3. PrepareForTailCall
2287 //
2288 // Stack structure for the function g() tail calling f():
2289 //
2290 // ------- Caller frame: -------
2291 // |  ...
2292 // |  g()'s arg M
2293 // |  ...
2294 // |  g()'s arg 1
2295 // |  g()'s receiver arg
2296 // |  g()'s caller pc
2297 // ------- g()'s frame: -------
2298 // |  g()'s caller fp      <- fp
2299 // |  g()'s context
2300 // |  function pointer: g
2301 // |  -------------------------
2302 // |  ...
2303 // |  ...
2304 // |  f()'s arg N
2305 // |  ...
2306 // |  f()'s arg 1
2307 // |  f()'s receiver arg   <- sp (f()'s caller pc is not on the stack yet!)
2308 // ----------------------
2309 //
PrepareForTailCall(MacroAssembler * masm,Register args_reg,Register scratch1,Register scratch2,Register scratch3)2310 void PrepareForTailCall(MacroAssembler* masm, Register args_reg,
2311                         Register scratch1, Register scratch2,
2312                         Register scratch3) {
2313   DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3));
2314   Comment cmnt(masm, "[ PrepareForTailCall");
2315 
2316   // Prepare for tail call only if ES2015 tail call elimination is enabled.
2317   Label done;
2318   ExternalReference is_tail_call_elimination_enabled =
2319       ExternalReference::is_tail_call_elimination_enabled_address(
2320           masm->isolate());
2321   __ li(at, Operand(is_tail_call_elimination_enabled));
2322   __ lb(scratch1, MemOperand(at));
2323   __ Branch(&done, eq, scratch1, Operand(zero_reg));
2324 
2325   // Drop possible interpreter handler/stub frame.
2326   {
2327     Label no_interpreter_frame;
2328     __ ld(scratch3,
2329           MemOperand(fp, CommonFrameConstants::kContextOrFrameTypeOffset));
2330     __ Branch(&no_interpreter_frame, ne, scratch3,
2331               Operand(Smi::FromInt(StackFrame::STUB)));
2332     __ ld(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2333     __ bind(&no_interpreter_frame);
2334   }
2335 
2336   // Check if next frame is an arguments adaptor frame.
2337   Register caller_args_count_reg = scratch1;
2338   Label no_arguments_adaptor, formal_parameter_count_loaded;
2339   __ ld(scratch2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2340   __ ld(scratch3,
2341         MemOperand(scratch2, CommonFrameConstants::kContextOrFrameTypeOffset));
2342   __ Branch(&no_arguments_adaptor, ne, scratch3,
2343             Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2344 
2345   // Drop current frame and load arguments count from arguments adaptor frame.
2346   __ mov(fp, scratch2);
2347   __ ld(caller_args_count_reg,
2348         MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset));
2349   __ SmiUntag(caller_args_count_reg);
2350   __ Branch(&formal_parameter_count_loaded);
2351 
2352   __ bind(&no_arguments_adaptor);
2353   // Load caller's formal parameter count
2354   __ ld(scratch1,
2355         MemOperand(fp, ArgumentsAdaptorFrameConstants::kFunctionOffset));
2356   __ ld(scratch1,
2357         FieldMemOperand(scratch1, JSFunction::kSharedFunctionInfoOffset));
2358   __ lw(caller_args_count_reg,
2359         FieldMemOperand(scratch1,
2360                         SharedFunctionInfo::kFormalParameterCountOffset));
2361 
2362   __ bind(&formal_parameter_count_loaded);
2363 
2364   ParameterCount callee_args_count(args_reg);
2365   __ PrepareForTailCall(callee_args_count, caller_args_count_reg, scratch2,
2366                         scratch3);
2367   __ bind(&done);
2368 }
2369 }  // namespace
2370 
2371 // static
Generate_CallFunction(MacroAssembler * masm,ConvertReceiverMode mode,TailCallMode tail_call_mode)2372 void Builtins::Generate_CallFunction(MacroAssembler* masm,
2373                                      ConvertReceiverMode mode,
2374                                      TailCallMode tail_call_mode) {
2375   // ----------- S t a t e -------------
2376   //  -- a0 : the number of arguments (not including the receiver)
2377   //  -- a1 : the function to call (checked to be a JSFunction)
2378   // -----------------------------------
2379   __ AssertFunction(a1);
2380 
2381   // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
2382   // Check that function is not a "classConstructor".
2383   Label class_constructor;
2384   __ ld(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
2385   __ lbu(a3, FieldMemOperand(a2, SharedFunctionInfo::kFunctionKindByteOffset));
2386   __ And(at, a3, Operand(SharedFunctionInfo::kClassConstructorBitsWithinByte));
2387   __ Branch(&class_constructor, ne, at, Operand(zero_reg));
2388 
2389   // Enter the context of the function; ToObject has to run in the function
2390   // context, and we also need to take the global proxy from the function
2391   // context in case of conversion.
2392   STATIC_ASSERT(SharedFunctionInfo::kNativeByteOffset ==
2393                 SharedFunctionInfo::kStrictModeByteOffset);
2394   __ ld(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
2395   // We need to convert the receiver for non-native sloppy mode functions.
2396   Label done_convert;
2397   __ lbu(a3, FieldMemOperand(a2, SharedFunctionInfo::kNativeByteOffset));
2398   __ And(at, a3, Operand((1 << SharedFunctionInfo::kNativeBitWithinByte) |
2399                          (1 << SharedFunctionInfo::kStrictModeBitWithinByte)));
2400   __ Branch(&done_convert, ne, at, Operand(zero_reg));
2401   {
2402     // ----------- S t a t e -------------
2403     //  -- a0 : the number of arguments (not including the receiver)
2404     //  -- a1 : the function to call (checked to be a JSFunction)
2405     //  -- a2 : the shared function info.
2406     //  -- cp : the function context.
2407     // -----------------------------------
2408 
2409     if (mode == ConvertReceiverMode::kNullOrUndefined) {
2410       // Patch receiver to global proxy.
2411       __ LoadGlobalProxy(a3);
2412     } else {
2413       Label convert_to_object, convert_receiver;
2414       __ Dlsa(at, sp, a0, kPointerSizeLog2);
2415       __ ld(a3, MemOperand(at));
2416       __ JumpIfSmi(a3, &convert_to_object);
2417       STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2418       __ GetObjectType(a3, a4, a4);
2419       __ Branch(&done_convert, hs, a4, Operand(FIRST_JS_RECEIVER_TYPE));
2420       if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
2421         Label convert_global_proxy;
2422         __ JumpIfRoot(a3, Heap::kUndefinedValueRootIndex,
2423                       &convert_global_proxy);
2424         __ JumpIfNotRoot(a3, Heap::kNullValueRootIndex, &convert_to_object);
2425         __ bind(&convert_global_proxy);
2426         {
2427           // Patch receiver to global proxy.
2428           __ LoadGlobalProxy(a3);
2429         }
2430         __ Branch(&convert_receiver);
2431       }
2432       __ bind(&convert_to_object);
2433       {
2434         // Convert receiver using ToObject.
2435         // TODO(bmeurer): Inline the allocation here to avoid building the frame
2436         // in the fast case? (fall back to AllocateInNewSpace?)
2437         FrameScope scope(masm, StackFrame::INTERNAL);
2438         __ SmiTag(a0);
2439         __ Push(a0, a1);
2440         __ mov(a0, a3);
2441         __ Push(cp);
2442         __ Call(masm->isolate()->builtins()->ToObject(),
2443                 RelocInfo::CODE_TARGET);
2444         __ Pop(cp);
2445         __ mov(a3, v0);
2446         __ Pop(a0, a1);
2447         __ SmiUntag(a0);
2448       }
2449       __ ld(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
2450       __ bind(&convert_receiver);
2451     }
2452     __ Dlsa(at, sp, a0, kPointerSizeLog2);
2453     __ sd(a3, MemOperand(at));
2454   }
2455   __ bind(&done_convert);
2456 
2457   // ----------- S t a t e -------------
2458   //  -- a0 : the number of arguments (not including the receiver)
2459   //  -- a1 : the function to call (checked to be a JSFunction)
2460   //  -- a2 : the shared function info.
2461   //  -- cp : the function context.
2462   // -----------------------------------
2463 
2464   if (tail_call_mode == TailCallMode::kAllow) {
2465     PrepareForTailCall(masm, a0, t0, t1, t2);
2466   }
2467 
2468   __ lw(a2,
2469         FieldMemOperand(a2, SharedFunctionInfo::kFormalParameterCountOffset));
2470   ParameterCount actual(a0);
2471   ParameterCount expected(a2);
2472   __ InvokeFunctionCode(a1, no_reg, expected, actual, JUMP_FUNCTION,
2473                         CheckDebugStepCallWrapper());
2474 
2475   // The function is a "classConstructor", need to raise an exception.
2476   __ bind(&class_constructor);
2477   {
2478     FrameScope frame(masm, StackFrame::INTERNAL);
2479     __ Push(a1);
2480     __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
2481   }
2482 }
2483 
2484 // static
Generate_CallBoundFunctionImpl(MacroAssembler * masm,TailCallMode tail_call_mode)2485 void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm,
2486                                               TailCallMode tail_call_mode) {
2487   // ----------- S t a t e -------------
2488   //  -- a0 : the number of arguments (not including the receiver)
2489   //  -- a1 : the function to call (checked to be a JSBoundFunction)
2490   // -----------------------------------
2491   __ AssertBoundFunction(a1);
2492 
2493   if (tail_call_mode == TailCallMode::kAllow) {
2494     PrepareForTailCall(masm, a0, t0, t1, t2);
2495   }
2496 
2497   // Patch the receiver to [[BoundThis]].
2498   {
2499     __ ld(at, FieldMemOperand(a1, JSBoundFunction::kBoundThisOffset));
2500     __ Dlsa(a4, sp, a0, kPointerSizeLog2);
2501     __ sd(at, MemOperand(a4));
2502   }
2503 
2504   // Load [[BoundArguments]] into a2 and length of that into a4.
2505   __ ld(a2, FieldMemOperand(a1, JSBoundFunction::kBoundArgumentsOffset));
2506   __ ld(a4, FieldMemOperand(a2, FixedArray::kLengthOffset));
2507   __ SmiUntag(a4);
2508 
2509   // ----------- S t a t e -------------
2510   //  -- a0 : the number of arguments (not including the receiver)
2511   //  -- a1 : the function to call (checked to be a JSBoundFunction)
2512   //  -- a2 : the [[BoundArguments]] (implemented as FixedArray)
2513   //  -- a4 : the number of [[BoundArguments]]
2514   // -----------------------------------
2515 
2516   // Reserve stack space for the [[BoundArguments]].
2517   {
2518     Label done;
2519     __ dsll(a5, a4, kPointerSizeLog2);
2520     __ Dsubu(sp, sp, Operand(a5));
2521     // Check the stack for overflow. We are not trying to catch interruptions
2522     // (i.e. debug break and preemption) here, so check the "real stack limit".
2523     __ LoadRoot(at, Heap::kRealStackLimitRootIndex);
2524     __ Branch(&done, gt, sp, Operand(at));  // Signed comparison.
2525     // Restore the stack pointer.
2526     __ Daddu(sp, sp, Operand(a5));
2527     {
2528       FrameScope scope(masm, StackFrame::MANUAL);
2529       __ EnterFrame(StackFrame::INTERNAL);
2530       __ CallRuntime(Runtime::kThrowStackOverflow);
2531     }
2532     __ bind(&done);
2533   }
2534 
2535   // Relocate arguments down the stack.
2536   {
2537     Label loop, done_loop;
2538     __ mov(a5, zero_reg);
2539     __ bind(&loop);
2540     __ Branch(&done_loop, gt, a5, Operand(a0));
2541     __ Dlsa(a6, sp, a4, kPointerSizeLog2);
2542     __ ld(at, MemOperand(a6));
2543     __ Dlsa(a6, sp, a5, kPointerSizeLog2);
2544     __ sd(at, MemOperand(a6));
2545     __ Daddu(a4, a4, Operand(1));
2546     __ Daddu(a5, a5, Operand(1));
2547     __ Branch(&loop);
2548     __ bind(&done_loop);
2549   }
2550 
2551   // Copy [[BoundArguments]] to the stack (below the arguments).
2552   {
2553     Label loop, done_loop;
2554     __ ld(a4, FieldMemOperand(a2, FixedArray::kLengthOffset));
2555     __ SmiUntag(a4);
2556     __ Daddu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
2557     __ bind(&loop);
2558     __ Dsubu(a4, a4, Operand(1));
2559     __ Branch(&done_loop, lt, a4, Operand(zero_reg));
2560     __ Dlsa(a5, a2, a4, kPointerSizeLog2);
2561     __ ld(at, MemOperand(a5));
2562     __ Dlsa(a5, sp, a0, kPointerSizeLog2);
2563     __ sd(at, MemOperand(a5));
2564     __ Daddu(a0, a0, Operand(1));
2565     __ Branch(&loop);
2566     __ bind(&done_loop);
2567   }
2568 
2569   // Call the [[BoundTargetFunction]] via the Call builtin.
2570   __ ld(a1, FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset));
2571   __ li(at, Operand(ExternalReference(Builtins::kCall_ReceiverIsAny,
2572                                       masm->isolate())));
2573   __ ld(at, MemOperand(at));
2574   __ Daddu(at, at, Operand(Code::kHeaderSize - kHeapObjectTag));
2575   __ Jump(at);
2576 }
2577 
2578 // static
Generate_Call(MacroAssembler * masm,ConvertReceiverMode mode,TailCallMode tail_call_mode)2579 void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode,
2580                              TailCallMode tail_call_mode) {
2581   // ----------- S t a t e -------------
2582   //  -- a0 : the number of arguments (not including the receiver)
2583   //  -- a1 : the target to call (can be any Object).
2584   // -----------------------------------
2585 
2586   Label non_callable, non_function, non_smi;
2587   __ JumpIfSmi(a1, &non_callable);
2588   __ bind(&non_smi);
2589   __ GetObjectType(a1, t1, t2);
2590   __ Jump(masm->isolate()->builtins()->CallFunction(mode, tail_call_mode),
2591           RelocInfo::CODE_TARGET, eq, t2, Operand(JS_FUNCTION_TYPE));
2592   __ Jump(masm->isolate()->builtins()->CallBoundFunction(tail_call_mode),
2593           RelocInfo::CODE_TARGET, eq, t2, Operand(JS_BOUND_FUNCTION_TYPE));
2594 
2595   // Check if target has a [[Call]] internal method.
2596   __ lbu(t1, FieldMemOperand(t1, Map::kBitFieldOffset));
2597   __ And(t1, t1, Operand(1 << Map::kIsCallable));
2598   __ Branch(&non_callable, eq, t1, Operand(zero_reg));
2599 
2600   __ Branch(&non_function, ne, t2, Operand(JS_PROXY_TYPE));
2601 
2602   // 0. Prepare for tail call if necessary.
2603   if (tail_call_mode == TailCallMode::kAllow) {
2604     PrepareForTailCall(masm, a0, t0, t1, t2);
2605   }
2606 
2607   // 1. Runtime fallback for Proxy [[Call]].
2608   __ Push(a1);
2609   // Increase the arguments size to include the pushed function and the
2610   // existing receiver on the stack.
2611   __ Daddu(a0, a0, 2);
2612   // Tail-call to the runtime.
2613   __ JumpToExternalReference(
2614       ExternalReference(Runtime::kJSProxyCall, masm->isolate()));
2615 
2616   // 2. Call to something else, which might have a [[Call]] internal method (if
2617   // not we raise an exception).
2618   __ bind(&non_function);
2619   // Overwrite the original receiver with the (original) target.
2620   __ Dlsa(at, sp, a0, kPointerSizeLog2);
2621   __ sd(a1, MemOperand(at));
2622   // Let the "call_as_function_delegate" take care of the rest.
2623   __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, a1);
2624   __ Jump(masm->isolate()->builtins()->CallFunction(
2625               ConvertReceiverMode::kNotNullOrUndefined, tail_call_mode),
2626           RelocInfo::CODE_TARGET);
2627 
2628   // 3. Call to something that is not callable.
2629   __ bind(&non_callable);
2630   {
2631     FrameScope scope(masm, StackFrame::INTERNAL);
2632     __ Push(a1);
2633     __ CallRuntime(Runtime::kThrowCalledNonCallable);
2634   }
2635 }
2636 
Generate_ConstructFunction(MacroAssembler * masm)2637 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2638   // ----------- S t a t e -------------
2639   //  -- a0 : the number of arguments (not including the receiver)
2640   //  -- a1 : the constructor to call (checked to be a JSFunction)
2641   //  -- a3 : the new target (checked to be a constructor)
2642   // -----------------------------------
2643   __ AssertFunction(a1);
2644 
2645   // Calling convention for function specific ConstructStubs require
2646   // a2 to contain either an AllocationSite or undefined.
2647   __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
2648 
2649   // Tail call to the function-specific construct stub (still in the caller
2650   // context at this point).
2651   __ ld(a4, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
2652   __ ld(a4, FieldMemOperand(a4, SharedFunctionInfo::kConstructStubOffset));
2653   __ Daddu(at, a4, Operand(Code::kHeaderSize - kHeapObjectTag));
2654   __ Jump(at);
2655 }
2656 
2657 // static
Generate_ConstructBoundFunction(MacroAssembler * masm)2658 void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2659   // ----------- S t a t e -------------
2660   //  -- a0 : the number of arguments (not including the receiver)
2661   //  -- a1 : the function to call (checked to be a JSBoundFunction)
2662   //  -- a3 : the new target (checked to be a constructor)
2663   // -----------------------------------
2664   __ AssertBoundFunction(a1);
2665 
2666   // Load [[BoundArguments]] into a2 and length of that into a4.
2667   __ ld(a2, FieldMemOperand(a1, JSBoundFunction::kBoundArgumentsOffset));
2668   __ ld(a4, FieldMemOperand(a2, FixedArray::kLengthOffset));
2669   __ SmiUntag(a4);
2670 
2671   // ----------- S t a t e -------------
2672   //  -- a0 : the number of arguments (not including the receiver)
2673   //  -- a1 : the function to call (checked to be a JSBoundFunction)
2674   //  -- a2 : the [[BoundArguments]] (implemented as FixedArray)
2675   //  -- a3 : the new target (checked to be a constructor)
2676   //  -- a4 : the number of [[BoundArguments]]
2677   // -----------------------------------
2678 
2679   // Reserve stack space for the [[BoundArguments]].
2680   {
2681     Label done;
2682     __ dsll(a5, a4, kPointerSizeLog2);
2683     __ Dsubu(sp, sp, Operand(a5));
2684     // Check the stack for overflow. We are not trying to catch interruptions
2685     // (i.e. debug break and preemption) here, so check the "real stack limit".
2686     __ LoadRoot(at, Heap::kRealStackLimitRootIndex);
2687     __ Branch(&done, gt, sp, Operand(at));  // Signed comparison.
2688     // Restore the stack pointer.
2689     __ Daddu(sp, sp, Operand(a5));
2690     {
2691       FrameScope scope(masm, StackFrame::MANUAL);
2692       __ EnterFrame(StackFrame::INTERNAL);
2693       __ CallRuntime(Runtime::kThrowStackOverflow);
2694     }
2695     __ bind(&done);
2696   }
2697 
2698   // Relocate arguments down the stack.
2699   {
2700     Label loop, done_loop;
2701     __ mov(a5, zero_reg);
2702     __ bind(&loop);
2703     __ Branch(&done_loop, ge, a5, Operand(a0));
2704     __ Dlsa(a6, sp, a4, kPointerSizeLog2);
2705     __ ld(at, MemOperand(a6));
2706     __ Dlsa(a6, sp, a5, kPointerSizeLog2);
2707     __ sd(at, MemOperand(a6));
2708     __ Daddu(a4, a4, Operand(1));
2709     __ Daddu(a5, a5, Operand(1));
2710     __ Branch(&loop);
2711     __ bind(&done_loop);
2712   }
2713 
2714   // Copy [[BoundArguments]] to the stack (below the arguments).
2715   {
2716     Label loop, done_loop;
2717     __ ld(a4, FieldMemOperand(a2, FixedArray::kLengthOffset));
2718     __ SmiUntag(a4);
2719     __ Daddu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
2720     __ bind(&loop);
2721     __ Dsubu(a4, a4, Operand(1));
2722     __ Branch(&done_loop, lt, a4, Operand(zero_reg));
2723     __ Dlsa(a5, a2, a4, kPointerSizeLog2);
2724     __ ld(at, MemOperand(a5));
2725     __ Dlsa(a5, sp, a0, kPointerSizeLog2);
2726     __ sd(at, MemOperand(a5));
2727     __ Daddu(a0, a0, Operand(1));
2728     __ Branch(&loop);
2729     __ bind(&done_loop);
2730   }
2731 
2732   // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
2733   {
2734     Label skip_load;
2735     __ Branch(&skip_load, ne, a1, Operand(a3));
2736     __ ld(a3, FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset));
2737     __ bind(&skip_load);
2738   }
2739 
2740   // Construct the [[BoundTargetFunction]] via the Construct builtin.
2741   __ ld(a1, FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset));
2742   __ li(at, Operand(ExternalReference(Builtins::kConstruct, masm->isolate())));
2743   __ ld(at, MemOperand(at));
2744   __ Daddu(at, at, Operand(Code::kHeaderSize - kHeapObjectTag));
2745   __ Jump(at);
2746 }
2747 
2748 // static
Generate_ConstructProxy(MacroAssembler * masm)2749 void Builtins::Generate_ConstructProxy(MacroAssembler* masm) {
2750   // ----------- S t a t e -------------
2751   //  -- a0 : the number of arguments (not including the receiver)
2752   //  -- a1 : the constructor to call (checked to be a JSProxy)
2753   //  -- a3 : the new target (either the same as the constructor or
2754   //          the JSFunction on which new was invoked initially)
2755   // -----------------------------------
2756 
2757   // Call into the Runtime for Proxy [[Construct]].
2758   __ Push(a1, a3);
2759   // Include the pushed new_target, constructor and the receiver.
2760   __ Daddu(a0, a0, Operand(3));
2761   // Tail-call to the runtime.
2762   __ JumpToExternalReference(
2763       ExternalReference(Runtime::kJSProxyConstruct, masm->isolate()));
2764 }
2765 
2766 // static
Generate_Construct(MacroAssembler * masm)2767 void Builtins::Generate_Construct(MacroAssembler* masm) {
2768   // ----------- S t a t e -------------
2769   //  -- a0 : the number of arguments (not including the receiver)
2770   //  -- a1 : the constructor to call (can be any Object)
2771   //  -- a3 : the new target (either the same as the constructor or
2772   //          the JSFunction on which new was invoked initially)
2773   // -----------------------------------
2774 
2775   // Check if target is a Smi.
2776   Label non_constructor;
2777   __ JumpIfSmi(a1, &non_constructor);
2778 
2779   // Dispatch based on instance type.
2780   __ ld(t1, FieldMemOperand(a1, HeapObject::kMapOffset));
2781   __ lbu(t2, FieldMemOperand(t1, Map::kInstanceTypeOffset));
2782   __ Jump(masm->isolate()->builtins()->ConstructFunction(),
2783           RelocInfo::CODE_TARGET, eq, t2, Operand(JS_FUNCTION_TYPE));
2784 
2785   // Check if target has a [[Construct]] internal method.
2786   __ lbu(t3, FieldMemOperand(t1, Map::kBitFieldOffset));
2787   __ And(t3, t3, Operand(1 << Map::kIsConstructor));
2788   __ Branch(&non_constructor, eq, t3, Operand(zero_reg));
2789 
2790   // Only dispatch to bound functions after checking whether they are
2791   // constructors.
2792   __ Jump(masm->isolate()->builtins()->ConstructBoundFunction(),
2793           RelocInfo::CODE_TARGET, eq, t2, Operand(JS_BOUND_FUNCTION_TYPE));
2794 
2795   // Only dispatch to proxies after checking whether they are constructors.
2796   __ Jump(masm->isolate()->builtins()->ConstructProxy(), RelocInfo::CODE_TARGET,
2797           eq, t2, Operand(JS_PROXY_TYPE));
2798 
2799   // Called Construct on an exotic Object with a [[Construct]] internal method.
2800   {
2801     // Overwrite the original receiver with the (original) target.
2802     __ Dlsa(at, sp, a0, kPointerSizeLog2);
2803     __ sd(a1, MemOperand(at));
2804     // Let the "call_as_constructor_delegate" take care of the rest.
2805     __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, a1);
2806     __ Jump(masm->isolate()->builtins()->CallFunction(),
2807             RelocInfo::CODE_TARGET);
2808   }
2809 
2810   // Called Construct on an Object that doesn't have a [[Construct]] internal
2811   // method.
2812   __ bind(&non_constructor);
2813   __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(),
2814           RelocInfo::CODE_TARGET);
2815 }
2816 
2817 // static
Generate_AllocateInNewSpace(MacroAssembler * masm)2818 void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) {
2819   // ----------- S t a t e -------------
2820   //  -- a0 : requested object size (untagged)
2821   //  -- ra : return address
2822   // -----------------------------------
2823   __ SmiTag(a0);
2824   __ Push(a0);
2825   __ Move(cp, Smi::kZero);
2826   __ TailCallRuntime(Runtime::kAllocateInNewSpace);
2827 }
2828 
2829 // static
Generate_AllocateInOldSpace(MacroAssembler * masm)2830 void Builtins::Generate_AllocateInOldSpace(MacroAssembler* masm) {
2831   // ----------- S t a t e -------------
2832   //  -- a0 : requested object size (untagged)
2833   //  -- ra : return address
2834   // -----------------------------------
2835   __ SmiTag(a0);
2836   __ Move(a1, Smi::FromInt(AllocateTargetSpace::encode(OLD_SPACE)));
2837   __ Push(a0, a1);
2838   __ Move(cp, Smi::kZero);
2839   __ TailCallRuntime(Runtime::kAllocateInTargetSpace);
2840 }
2841 
2842 // static
Generate_Abort(MacroAssembler * masm)2843 void Builtins::Generate_Abort(MacroAssembler* masm) {
2844   // ----------- S t a t e -------------
2845   //  -- a0 : message_id as Smi
2846   //  -- ra : return address
2847   // -----------------------------------
2848   __ Push(a0);
2849   __ Move(cp, Smi::kZero);
2850   __ TailCallRuntime(Runtime::kAbort);
2851 }
2852 
Generate_ArgumentsAdaptorTrampoline(MacroAssembler * masm)2853 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
2854   // State setup as expected by MacroAssembler::InvokePrologue.
2855   // ----------- S t a t e -------------
2856   //  -- a0: actual arguments count
2857   //  -- a1: function (passed through to callee)
2858   //  -- a2: expected arguments count
2859   //  -- a3: new target (passed through to callee)
2860   // -----------------------------------
2861 
2862   Label invoke, dont_adapt_arguments, stack_overflow;
2863 
2864   Label enough, too_few;
2865   __ Branch(&dont_adapt_arguments, eq, a2,
2866             Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
2867   // We use Uless as the number of argument should always be greater than 0.
2868   __ Branch(&too_few, Uless, a0, Operand(a2));
2869 
2870   {  // Enough parameters: actual >= expected.
2871     // a0: actual number of arguments as a smi
2872     // a1: function
2873     // a2: expected number of arguments
2874     // a3: new target (passed through to callee)
2875     __ bind(&enough);
2876     EnterArgumentsAdaptorFrame(masm);
2877     Generate_StackOverflowCheck(masm, a2, a5, at, &stack_overflow);
2878 
2879     // Calculate copy start address into a0 and copy end address into a4.
2880     __ SmiScale(a0, a0, kPointerSizeLog2);
2881     __ Daddu(a0, fp, a0);
2882     // Adjust for return address and receiver.
2883     __ Daddu(a0, a0, Operand(2 * kPointerSize));
2884     // Compute copy end address.
2885     __ dsll(a4, a2, kPointerSizeLog2);
2886     __ dsubu(a4, a0, a4);
2887 
2888     // Copy the arguments (including the receiver) to the new stack frame.
2889     // a0: copy start address
2890     // a1: function
2891     // a2: expected number of arguments
2892     // a3: new target (passed through to callee)
2893     // a4: copy end address
2894 
2895     Label copy;
2896     __ bind(&copy);
2897     __ ld(a5, MemOperand(a0));
2898     __ push(a5);
2899     __ Branch(USE_DELAY_SLOT, &copy, ne, a0, Operand(a4));
2900     __ daddiu(a0, a0, -kPointerSize);  // In delay slot.
2901 
2902     __ jmp(&invoke);
2903   }
2904 
2905   {  // Too few parameters: Actual < expected.
2906     __ bind(&too_few);
2907     EnterArgumentsAdaptorFrame(masm);
2908     Generate_StackOverflowCheck(masm, a2, a5, at, &stack_overflow);
2909 
2910     // Calculate copy start address into a0 and copy end address into a7.
2911     // a0: actual number of arguments as a smi
2912     // a1: function
2913     // a2: expected number of arguments
2914     // a3: new target (passed through to callee)
2915     __ SmiScale(a0, a0, kPointerSizeLog2);
2916     __ Daddu(a0, fp, a0);
2917     // Adjust for return address and receiver.
2918     __ Daddu(a0, a0, Operand(2 * kPointerSize));
2919     // Compute copy end address. Also adjust for return address.
2920     __ Daddu(a7, fp, kPointerSize);
2921 
2922     // Copy the arguments (including the receiver) to the new stack frame.
2923     // a0: copy start address
2924     // a1: function
2925     // a2: expected number of arguments
2926     // a3: new target (passed through to callee)
2927     // a7: copy end address
2928     Label copy;
2929     __ bind(&copy);
2930     __ ld(a4, MemOperand(a0));  // Adjusted above for return addr and receiver.
2931     __ Dsubu(sp, sp, kPointerSize);
2932     __ Dsubu(a0, a0, kPointerSize);
2933     __ Branch(USE_DELAY_SLOT, &copy, ne, a0, Operand(a7));
2934     __ sd(a4, MemOperand(sp));  // In the delay slot.
2935 
2936     // Fill the remaining expected arguments with undefined.
2937     // a1: function
2938     // a2: expected number of arguments
2939     // a3: new target (passed through to callee)
2940     __ LoadRoot(a5, Heap::kUndefinedValueRootIndex);
2941     __ dsll(a6, a2, kPointerSizeLog2);
2942     __ Dsubu(a4, fp, Operand(a6));
2943     // Adjust for frame.
2944     __ Dsubu(a4, a4, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
2945                              2 * kPointerSize));
2946 
2947     Label fill;
2948     __ bind(&fill);
2949     __ Dsubu(sp, sp, kPointerSize);
2950     __ Branch(USE_DELAY_SLOT, &fill, ne, sp, Operand(a4));
2951     __ sd(a5, MemOperand(sp));
2952   }
2953 
2954   // Call the entry point.
2955   __ bind(&invoke);
2956   __ mov(a0, a2);
2957   // a0 : expected number of arguments
2958   // a1 : function (passed through to callee)
2959   // a3: new target (passed through to callee)
2960   __ ld(a4, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
2961   __ Call(a4);
2962 
2963   // Store offset of return address for deoptimizer.
2964   masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
2965 
2966   // Exit frame and return.
2967   LeaveArgumentsAdaptorFrame(masm);
2968   __ Ret();
2969 
2970   // -------------------------------------------
2971   // Don't adapt arguments.
2972   // -------------------------------------------
2973   __ bind(&dont_adapt_arguments);
2974   __ ld(a4, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
2975   __ Jump(a4);
2976 
2977   __ bind(&stack_overflow);
2978   {
2979     FrameScope frame(masm, StackFrame::MANUAL);
2980     __ CallRuntime(Runtime::kThrowStackOverflow);
2981     __ break_(0xCC);
2982   }
2983 }
2984 
2985 #undef __
2986 
2987 }  // namespace internal
2988 }  // namespace v8
2989 
2990 #endif  // V8_TARGET_ARCH_MIPS64
2991