• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #if V8_TARGET_ARCH_MIPS64
6 
7 #include "src/codegen.h"
8 #include "src/debug/debug.h"
9 #include "src/deoptimizer.h"
10 #include "src/full-codegen/full-codegen.h"
11 #include "src/runtime/runtime.h"
12 
13 namespace v8 {
14 namespace internal {
15 
16 
17 #define __ ACCESS_MASM(masm)
18 
19 
Generate_Adaptor(MacroAssembler * masm,CFunctionId id,BuiltinExtraArguments extra_args)20 void Builtins::Generate_Adaptor(MacroAssembler* masm,
21                                 CFunctionId id,
22                                 BuiltinExtraArguments extra_args) {
23   // ----------- S t a t e -------------
24   //  -- a0                 : number of arguments excluding receiver
25   //  -- a1                 : target
26   //  -- a3                 : new.target
27   //  -- sp[0]              : last argument
28   //  -- ...
29   //  -- sp[8 * (argc - 1)] : first argument
30   //  -- sp[8 * agrc]       : receiver
31   // -----------------------------------
32   __ AssertFunction(a1);
33 
34   // Make sure we operate in the context of the called function (for example
35   // ConstructStubs implemented in C++ will be run in the context of the caller
36   // instead of the callee, due to the way that [[Construct]] is defined for
37   // ordinary functions).
38   __ ld(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
39 
40   // Insert extra arguments.
41   int num_extra_args = 0;
42   switch (extra_args) {
43     case BuiltinExtraArguments::kTarget:
44       __ Push(a1);
45       ++num_extra_args;
46       break;
47     case BuiltinExtraArguments::kNewTarget:
48       __ Push(a3);
49       ++num_extra_args;
50       break;
51     case BuiltinExtraArguments::kTargetAndNewTarget:
52       __ Push(a1, a3);
53       num_extra_args += 2;
54       break;
55     case BuiltinExtraArguments::kNone:
56       break;
57   }
58 
59   // JumpToExternalReference expects a0 to contain the number of arguments
60   // including the receiver and the extra arguments.
61   __ Daddu(a0, a0, num_extra_args + 1);
62 
63   __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
64 }
65 
66 
67 // Load the built-in InternalArray function from the current context.
GenerateLoadInternalArrayFunction(MacroAssembler * masm,Register result)68 static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
69                                               Register result) {
70   // Load the InternalArray function from the native context.
71   __ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, result);
72 }
73 
74 
75 // Load the built-in Array function from the current context.
GenerateLoadArrayFunction(MacroAssembler * masm,Register result)76 static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
77   // Load the Array function from the native context.
78   __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, result);
79 }
80 
81 
Generate_InternalArrayCode(MacroAssembler * masm)82 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
83   // ----------- S t a t e -------------
84   //  -- a0     : number of arguments
85   //  -- ra     : return address
86   //  -- sp[...]: constructor arguments
87   // -----------------------------------
88   Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
89 
90   // Get the InternalArray function.
91   GenerateLoadInternalArrayFunction(masm, a1);
92 
93   if (FLAG_debug_code) {
94     // Initial map for the builtin InternalArray functions should be maps.
95     __ ld(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
96     __ SmiTst(a2, a4);
97     __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction,
98               a4, Operand(zero_reg));
99     __ GetObjectType(a2, a3, a4);
100     __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction,
101               a4, Operand(MAP_TYPE));
102   }
103 
104   // Run the native code for the InternalArray function called as a normal
105   // function.
106   // Tail call a stub.
107   InternalArrayConstructorStub stub(masm->isolate());
108   __ TailCallStub(&stub);
109 }
110 
111 
Generate_ArrayCode(MacroAssembler * masm)112 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
113   // ----------- S t a t e -------------
114   //  -- a0     : number of arguments
115   //  -- ra     : return address
116   //  -- sp[...]: constructor arguments
117   // -----------------------------------
118   Label generic_array_code;
119 
120   // Get the Array function.
121   GenerateLoadArrayFunction(masm, a1);
122 
123   if (FLAG_debug_code) {
124     // Initial map for the builtin Array functions should be maps.
125     __ ld(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
126     __ SmiTst(a2, a4);
127     __ Assert(ne, kUnexpectedInitialMapForArrayFunction1,
128               a4, Operand(zero_reg));
129     __ GetObjectType(a2, a3, a4);
130     __ Assert(eq, kUnexpectedInitialMapForArrayFunction2,
131               a4, Operand(MAP_TYPE));
132   }
133 
134   // Run the native code for the Array function called as a normal function.
135   // Tail call a stub.
136   __ mov(a3, a1);
137   __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
138   ArrayConstructorStub stub(masm->isolate());
139   __ TailCallStub(&stub);
140 }
141 
142 
143 // static
Generate_NumberConstructor(MacroAssembler * masm)144 void Builtins::Generate_NumberConstructor(MacroAssembler* masm) {
145   // ----------- S t a t e -------------
146   //  -- a0                     : number of arguments
147   //  -- a1                     : constructor function
148   //  -- ra                     : return address
149   //  -- sp[(argc - n - 1) * 8] : arg[n] (zero based)
150   //  -- sp[argc * 8]           : receiver
151   // -----------------------------------
152 
153   // 1. Load the first argument into a0 and get rid of the rest (including the
154   // receiver).
155   Label no_arguments;
156   {
157     __ Branch(USE_DELAY_SLOT, &no_arguments, eq, a0, Operand(zero_reg));
158     __ Dsubu(a0, a0, Operand(1));
159     __ dsll(a0, a0, kPointerSizeLog2);
160     __ Daddu(sp, a0, sp);
161     __ ld(a0, MemOperand(sp));
162     __ Drop(2);
163   }
164 
165   // 2a. Convert first argument to number.
166   ToNumberStub stub(masm->isolate());
167   __ TailCallStub(&stub);
168 
169   // 2b. No arguments, return +0.
170   __ bind(&no_arguments);
171   __ Move(v0, Smi::FromInt(0));
172   __ DropAndRet(1);
173 }
174 
175 
Generate_NumberConstructor_ConstructStub(MacroAssembler * masm)176 void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) {
177   // ----------- S t a t e -------------
178   //  -- a0                     : number of arguments
179   //  -- a1                     : constructor function
180   //  -- a3                     : new target
181   //  -- ra                     : return address
182   //  -- sp[(argc - n - 1) * 8] : arg[n] (zero based)
183   //  -- sp[argc * 8]           : receiver
184   // -----------------------------------
185 
186   // 1. Make sure we operate in the context of the called function.
187   __ ld(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
188 
189   // 2. Load the first argument into a0 and get rid of the rest (including the
190   // receiver).
191   {
192     Label no_arguments, done;
193     __ Branch(USE_DELAY_SLOT, &no_arguments, eq, a0, Operand(zero_reg));
194     __ Dsubu(a0, a0, Operand(1));
195     __ dsll(a0, a0, kPointerSizeLog2);
196     __ Daddu(sp, a0, sp);
197     __ ld(a0, MemOperand(sp));
198     __ Drop(2);
199     __ jmp(&done);
200     __ bind(&no_arguments);
201     __ Move(a0, Smi::FromInt(0));
202     __ Drop(1);
203     __ bind(&done);
204   }
205 
206   // 3. Make sure a0 is a number.
207   {
208     Label done_convert;
209     __ JumpIfSmi(a0, &done_convert);
210     __ GetObjectType(a0, a2, a2);
211     __ Branch(&done_convert, eq, t0, Operand(HEAP_NUMBER_TYPE));
212     {
213       FrameScope scope(masm, StackFrame::INTERNAL);
214       __ Push(a1, a3);
215       ToNumberStub stub(masm->isolate());
216       __ CallStub(&stub);
217       __ Move(a0, v0);
218       __ Pop(a1, a3);
219     }
220     __ bind(&done_convert);
221   }
222 
223   // 4. Check if new target and constructor differ.
224   Label new_object;
225   __ Branch(&new_object, ne, a1, Operand(a3));
226 
227   // 5. Allocate a JSValue wrapper for the number.
228   __ AllocateJSValue(v0, a1, a0, a2, t0, &new_object);
229   __ Ret();
230 
231   // 6. Fallback to the runtime to create new object.
232   __ bind(&new_object);
233   {
234     FrameScope scope(masm, StackFrame::INTERNAL);
235     __ Push(a0, a1, a3);  // first argument, constructor, new target
236     __ CallRuntime(Runtime::kNewObject);
237     __ Pop(a0);
238   }
239   __ Ret(USE_DELAY_SLOT);
240   __ sd(a0, FieldMemOperand(v0, JSValue::kValueOffset));  // In delay slot.
241 }
242 
243 
244 // static
Generate_StringConstructor(MacroAssembler * masm)245 void Builtins::Generate_StringConstructor(MacroAssembler* masm) {
246   // ----------- S t a t e -------------
247   //  -- a0                     : number of arguments
248   //  -- a1                     : constructor function
249   //  -- ra                     : return address
250   //  -- sp[(argc - n - 1) * 8] : arg[n] (zero based)
251   //  -- sp[argc * 8]           : receiver
252   // -----------------------------------
253 
254   // 1. Load the first argument into a0 and get rid of the rest (including the
255   // receiver).
256   Label no_arguments;
257   {
258     __ Branch(USE_DELAY_SLOT, &no_arguments, eq, a0, Operand(zero_reg));
259     __ Dsubu(a0, a0, Operand(1));
260     __ dsll(a0, a0, kPointerSizeLog2);
261     __ Daddu(sp, a0, sp);
262     __ ld(a0, MemOperand(sp));
263     __ Drop(2);
264   }
265 
266   // 2a. At least one argument, return a0 if it's a string, otherwise
267   // dispatch to appropriate conversion.
268   Label to_string, symbol_descriptive_string;
269   {
270     __ JumpIfSmi(a0, &to_string);
271     __ GetObjectType(a0, a1, a1);
272     STATIC_ASSERT(FIRST_NONSTRING_TYPE == SYMBOL_TYPE);
273     __ Subu(a1, a1, Operand(FIRST_NONSTRING_TYPE));
274     __ Branch(&symbol_descriptive_string, eq, a1, Operand(zero_reg));
275     __ Branch(&to_string, gt, a1, Operand(zero_reg));
276     __ Ret(USE_DELAY_SLOT);
277     __ mov(v0, a0);
278   }
279 
280   // 2b. No arguments, return the empty string (and pop the receiver).
281   __ bind(&no_arguments);
282   {
283     __ LoadRoot(v0, Heap::kempty_stringRootIndex);
284     __ DropAndRet(1);
285   }
286 
287   // 3a. Convert a0 to a string.
288   __ bind(&to_string);
289   {
290     ToStringStub stub(masm->isolate());
291     __ TailCallStub(&stub);
292   }
293 
294   // 3b. Convert symbol in a0 to a string.
295   __ bind(&symbol_descriptive_string);
296   {
297     __ Push(a0);
298     __ TailCallRuntime(Runtime::kSymbolDescriptiveString);
299   }
300 }
301 
302 
Generate_StringConstructor_ConstructStub(MacroAssembler * masm)303 void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
304   // ----------- S t a t e -------------
305   //  -- a0                     : number of arguments
306   //  -- a1                     : constructor function
307   //  -- a3                     : new target
308   //  -- ra                     : return address
309   //  -- sp[(argc - n - 1) * 8] : arg[n] (zero based)
310   //  -- sp[argc * 8]           : receiver
311   // -----------------------------------
312 
313   // 1. Make sure we operate in the context of the called function.
314   __ ld(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
315 
316   // 2. Load the first argument into a0 and get rid of the rest (including the
317   // receiver).
318   {
319     Label no_arguments, done;
320     __ Branch(USE_DELAY_SLOT, &no_arguments, eq, a0, Operand(zero_reg));
321     __ Dsubu(a0, a0, Operand(1));
322     __ dsll(a0, a0, kPointerSizeLog2);
323     __ Daddu(sp, a0, sp);
324     __ ld(a0, MemOperand(sp));
325     __ Drop(2);
326     __ jmp(&done);
327     __ bind(&no_arguments);
328     __ LoadRoot(a0, Heap::kempty_stringRootIndex);
329     __ Drop(1);
330     __ bind(&done);
331   }
332 
333   // 3. Make sure a0 is a string.
334   {
335     Label convert, done_convert;
336     __ JumpIfSmi(a0, &convert);
337     __ GetObjectType(a0, a2, a2);
338     __ And(t0, a2, Operand(kIsNotStringMask));
339     __ Branch(&done_convert, eq, t0, Operand(zero_reg));
340     __ bind(&convert);
341     {
342       FrameScope scope(masm, StackFrame::INTERNAL);
343       ToStringStub stub(masm->isolate());
344       __ Push(a1, a3);
345       __ CallStub(&stub);
346       __ Move(a0, v0);
347       __ Pop(a1, a3);
348     }
349     __ bind(&done_convert);
350   }
351 
352   // 4. Check if new target and constructor differ.
353   Label new_object;
354   __ Branch(&new_object, ne, a1, Operand(a3));
355 
356   // 5. Allocate a JSValue wrapper for the string.
357   __ AllocateJSValue(v0, a1, a0, a2, t0, &new_object);
358   __ Ret();
359 
360   // 6. Fallback to the runtime to create new object.
361   __ bind(&new_object);
362   {
363     FrameScope scope(masm, StackFrame::INTERNAL);
364     __ Push(a0, a1, a3);  // first argument, constructor, new target
365     __ CallRuntime(Runtime::kNewObject);
366     __ Pop(a0);
367   }
368   __ Ret(USE_DELAY_SLOT);
369   __ sd(a0, FieldMemOperand(v0, JSValue::kValueOffset));  // In delay slot.
370 }
371 
372 
CallRuntimePassFunction(MacroAssembler * masm,Runtime::FunctionId function_id)373 static void CallRuntimePassFunction(
374     MacroAssembler* masm, Runtime::FunctionId function_id) {
375   // ----------- S t a t e -------------
376   //  -- a1 : target function (preserved for callee)
377   //  -- a3 : new target (preserved for callee)
378   // -----------------------------------
379 
380   FrameScope scope(masm, StackFrame::INTERNAL);
381   // Push a copy of the function onto the stack.
382   // Push a copy of the target function and the new target.
383   __ Push(a1, a3, a1);
384 
385   __ CallRuntime(function_id, 1);
386   // Restore target function and new target.
387   __ Pop(a1, a3);
388 }
389 
390 
GenerateTailCallToSharedCode(MacroAssembler * masm)391 static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
392   __ ld(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
393   __ ld(a2, FieldMemOperand(a2, SharedFunctionInfo::kCodeOffset));
394   __ Daddu(at, a2, Operand(Code::kHeaderSize - kHeapObjectTag));
395   __ Jump(at);
396 }
397 
398 
GenerateTailCallToReturnedCode(MacroAssembler * masm)399 static void GenerateTailCallToReturnedCode(MacroAssembler* masm) {
400   __ Daddu(at, v0, Operand(Code::kHeaderSize - kHeapObjectTag));
401   __ Jump(at);
402 }
403 
404 
Generate_InOptimizationQueue(MacroAssembler * masm)405 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
406   // Checking whether the queued function is ready for install is optional,
407   // since we come across interrupts and stack checks elsewhere.  However,
408   // not checking may delay installing ready functions, and always checking
409   // would be quite expensive.  A good compromise is to first check against
410   // stack limit as a cue for an interrupt signal.
411   Label ok;
412   __ LoadRoot(a4, Heap::kStackLimitRootIndex);
413   __ Branch(&ok, hs, sp, Operand(a4));
414 
415   CallRuntimePassFunction(masm, Runtime::kTryInstallOptimizedCode);
416   GenerateTailCallToReturnedCode(masm);
417 
418   __ bind(&ok);
419   GenerateTailCallToSharedCode(masm);
420 }
421 
422 
Generate_JSConstructStubHelper(MacroAssembler * masm,bool is_api_function,bool create_implicit_receiver)423 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
424                                            bool is_api_function,
425                                            bool create_implicit_receiver) {
426   // ----------- S t a t e -------------
427   //  -- a0     : number of arguments
428   //  -- a1     : constructor function
429   //  -- a2     : allocation site or undefined
430   //  -- a3     : new target
431   //  -- ra     : return address
432   //  -- sp[...]: constructor arguments
433   // -----------------------------------
434 
435   Isolate* isolate = masm->isolate();
436 
437   // Enter a construct frame.
438   {
439     FrameScope scope(masm, StackFrame::CONSTRUCT);
440 
441     // Preserve the incoming parameters on the stack.
442     __ AssertUndefinedOrAllocationSite(a2, t0);
443     __ SmiTag(a0);
444     __ Push(a2, a0);
445 
446     if (create_implicit_receiver) {
447       // Try to allocate the object without transitioning into C code. If any of
448       // the preconditions is not met, the code bails out to the runtime call.
449       Label rt_call, allocated;
450       if (FLAG_inline_new) {
451         // Verify that the new target is a JSFunction.
452         __ GetObjectType(a3, a5, a4);
453         __ Branch(&rt_call, ne, a4, Operand(JS_FUNCTION_TYPE));
454 
455         // Load the initial map and verify that it is in fact a map.
456         // a3: new target
457         __ ld(a2,
458               FieldMemOperand(a3, JSFunction::kPrototypeOrInitialMapOffset));
459         __ JumpIfSmi(a2, &rt_call);
460         __ GetObjectType(a2, t1, t0);
461         __ Branch(&rt_call, ne, t0, Operand(MAP_TYPE));
462 
463         // Fall back to runtime if the expected base constructor and base
464         // constructor differ.
465         __ ld(a5, FieldMemOperand(a2, Map::kConstructorOrBackPointerOffset));
466         __ Branch(&rt_call, ne, a1, Operand(a5));
467 
468         // Check that the constructor is not constructing a JSFunction (see
469         // comments in Runtime_NewObject in runtime.cc). In which case the
470         // initial map's instance type would be JS_FUNCTION_TYPE.
471         // a1: constructor function
472         // a2: initial map
473         __ lbu(t1, FieldMemOperand(a2, Map::kInstanceTypeOffset));
474         __ Branch(&rt_call, eq, t1, Operand(JS_FUNCTION_TYPE));
475 
476         // Now allocate the JSObject on the heap.
477         // a1: constructor function
478         // a2: initial map
479         __ lbu(a4, FieldMemOperand(a2, Map::kInstanceSizeOffset));
480         __ Allocate(a4, t0, a4, t2, &rt_call, SIZE_IN_WORDS);
481 
482         // Allocated the JSObject, now initialize the fields. Map is set to
483         // initial map and properties and elements are set to empty fixed array.
484         // a1: constructor function
485         // a2: initial map
486         // a3: object size
487         // t0: JSObject (not HeapObject tagged - the actual address).
488         // a4: start of next object
489         __ LoadRoot(t2, Heap::kEmptyFixedArrayRootIndex);
490         __ mov(t1, t0);
491         STATIC_ASSERT(0 * kPointerSize == JSObject::kMapOffset);
492         __ sd(a2, MemOperand(t1, JSObject::kMapOffset));
493         STATIC_ASSERT(1 * kPointerSize == JSObject::kPropertiesOffset);
494         __ sd(t2, MemOperand(t1, JSObject::kPropertiesOffset));
495         STATIC_ASSERT(2 * kPointerSize == JSObject::kElementsOffset);
496         __ sd(t2, MemOperand(t1, JSObject::kElementsOffset));
497         STATIC_ASSERT(3 * kPointerSize == JSObject::kHeaderSize);
498         __ Daddu(t1, t1, Operand(3 * kPointerSize));
499 
500         // Add the object tag to make the JSObject real, so that we can continue
501         // and jump into the continuation code at any time from now on.
502         __ Daddu(t0, t0, Operand(kHeapObjectTag));
503 
504         // Fill all the in-object properties with appropriate filler.
505         // t0: JSObject (tagged)
506         // t1: First in-object property of JSObject (not tagged)
507         __ LoadRoot(t3, Heap::kUndefinedValueRootIndex);
508 
509         if (!is_api_function) {
510           Label no_inobject_slack_tracking;
511 
512           MemOperand bit_field3 = FieldMemOperand(a2, Map::kBitField3Offset);
513           // Check if slack tracking is enabled.
514           __ lwu(t2, bit_field3);
515           __ DecodeField<Map::ConstructionCounter>(a6, t2);
516           // a6: slack tracking counter
517           __ Branch(&no_inobject_slack_tracking, lt, a6,
518                     Operand(Map::kSlackTrackingCounterEnd));
519           // Decrease generous allocation count.
520           __ Dsubu(t2, t2, Operand(1 << Map::ConstructionCounter::kShift));
521           __ sw(t2, bit_field3);
522 
523           // Allocate object with a slack.
524           __ lbu(a0, FieldMemOperand(a2, Map::kUnusedPropertyFieldsOffset));
525           __ dsll(a0, a0, kPointerSizeLog2);
526           __ dsubu(a0, a4, a0);
527           // a0: offset of first field after pre-allocated fields
528           if (FLAG_debug_code) {
529             __ Assert(le, kUnexpectedNumberOfPreAllocatedPropertyFields, t1,
530                       Operand(a0));
531           }
532           __ InitializeFieldsWithFiller(t1, a0, t3);
533 
534           // To allow truncation fill the remaining fields with one pointer
535           // filler map.
536           __ LoadRoot(t3, Heap::kOnePointerFillerMapRootIndex);
537           __ InitializeFieldsWithFiller(t1, a4, t3);
538 
539           // a6: slack tracking counter value before decreasing.
540           __ Branch(&allocated, ne, a6, Operand(Map::kSlackTrackingCounterEnd));
541 
542           // Push the constructor, new_target and the object to the stack,
543           // and then the initial map as an argument to the runtime call.
544           __ Push(a1, a3, t0, a2);
545           __ CallRuntime(Runtime::kFinalizeInstanceSize);
546           __ Pop(a1, a3, t0);
547 
548           // Continue with JSObject being successfully allocated.
549           // a1: constructor function
550           // a3: new target
551           // t0: JSObject
552           __ jmp(&allocated);
553 
554           __ bind(&no_inobject_slack_tracking);
555         }
556 
557         __ InitializeFieldsWithFiller(t1, a4, t3);
558 
559         // Continue with JSObject being successfully allocated.
560         // a1: constructor function
561         // a3: new target
562         // t0: JSObject
563         __ jmp(&allocated);
564       }
565 
566       // Allocate the new receiver object using the runtime call.
567       // a1: constructor function
568       // a3: new target
569       __ bind(&rt_call);
570 
571       // Push the constructor and new_target twice, second pair as arguments
572       // to the runtime call.
573       __ Push(a1, a3, a1, a3);  // constructor function, new target
574       __ CallRuntime(Runtime::kNewObject);
575       __ mov(t0, v0);
576       __ Pop(a1, a3);
577 
578       // Receiver for constructor call allocated.
579       // a1: constructor function
580       // a3: new target
581       // t0: JSObject
582       __ bind(&allocated);
583 
584       __ ld(a0, MemOperand(sp));
585     }
586     __ SmiUntag(a0);
587 
588     if (create_implicit_receiver) {
589       // Push the allocated receiver to the stack. We need two copies
590       // because we may have to return the original one and the calling
591       // conventions dictate that the called function pops the receiver.
592       __ Push(t0, t0);
593     } else {
594       __ PushRoot(Heap::kTheHoleValueRootIndex);
595     }
596 
597     // Set up pointer to last argument.
598     __ Daddu(a2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
599 
600     // Copy arguments and receiver to the expression stack.
601     // a0: number of arguments
602     // a1: constructor function
603     // a2: address of last argument (caller sp)
604     // a3: new target
605     // t0: number of arguments (smi-tagged)
606     // sp[0]: receiver
607     // sp[1]: receiver
608     // sp[2]: number of arguments (smi-tagged)
609     Label loop, entry;
610     __ mov(t0, a0);
611     __ jmp(&entry);
612     __ bind(&loop);
613     __ dsll(a4, t0, kPointerSizeLog2);
614     __ Daddu(a4, a2, Operand(a4));
615     __ ld(a5, MemOperand(a4));
616     __ push(a5);
617     __ bind(&entry);
618     __ Daddu(t0, t0, Operand(-1));
619     __ Branch(&loop, greater_equal, t0, Operand(zero_reg));
620 
621     // Call the function.
622     // a0: number of arguments
623     // a1: constructor function
624     // a3: new target
625     if (is_api_function) {
626       __ ld(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
627       Handle<Code> code =
628           masm->isolate()->builtins()->HandleApiCallConstruct();
629       __ Call(code, RelocInfo::CODE_TARGET);
630     } else {
631       ParameterCount actual(a0);
632       __ InvokeFunction(a1, a3, actual, CALL_FUNCTION,
633                         CheckDebugStepCallWrapper());
634     }
635 
636     // Store offset of return address for deoptimizer.
637     if (create_implicit_receiver && !is_api_function) {
638       masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
639     }
640 
641     // Restore context from the frame.
642     __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
643 
644     if (create_implicit_receiver) {
645       // If the result is an object (in the ECMA sense), we should get rid
646       // of the receiver and use the result; see ECMA-262 section 13.2.2-7
647       // on page 74.
648       Label use_receiver, exit;
649 
650       // If the result is a smi, it is *not* an object in the ECMA sense.
651       // v0: result
652       // sp[0]: receiver (newly allocated object)
653       // sp[1]: number of arguments (smi-tagged)
654       __ JumpIfSmi(v0, &use_receiver);
655 
656       // If the type of the result (stored in its map) is less than
657       // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
658       __ GetObjectType(v0, a1, a3);
659       __ Branch(&exit, greater_equal, a3, Operand(FIRST_JS_RECEIVER_TYPE));
660 
661       // Throw away the result of the constructor invocation and use the
662       // on-stack receiver as the result.
663       __ bind(&use_receiver);
664       __ ld(v0, MemOperand(sp));
665 
666       // Remove receiver from the stack, remove caller arguments, and
667       // return.
668       __ bind(&exit);
669       // v0: result
670       // sp[0]: receiver (newly allocated object)
671       // sp[1]: number of arguments (smi-tagged)
672       __ ld(a1, MemOperand(sp, 1 * kPointerSize));
673     } else {
674       __ ld(a1, MemOperand(sp));
675     }
676 
677     // Leave construct frame.
678   }
679 
680   __ SmiScale(a4, a1, kPointerSizeLog2);
681   __ Daddu(sp, sp, a4);
682   __ Daddu(sp, sp, kPointerSize);
683   if (create_implicit_receiver) {
684     __ IncrementCounter(isolate->counters()->constructed_objects(), 1, a1, a2);
685   }
686   __ Ret();
687 }
688 
689 
Generate_JSConstructStubGeneric(MacroAssembler * masm)690 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
691   Generate_JSConstructStubHelper(masm, false, true);
692 }
693 
694 
Generate_JSConstructStubApi(MacroAssembler * masm)695 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
696   Generate_JSConstructStubHelper(masm, true, true);
697 }
698 
699 
Generate_JSBuiltinsConstructStub(MacroAssembler * masm)700 void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
701   Generate_JSConstructStubHelper(masm, false, false);
702 }
703 
704 
Generate_ConstructedNonConstructable(MacroAssembler * masm)705 void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
706   FrameScope scope(masm, StackFrame::INTERNAL);
707   __ Push(a1);
708   __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
709 }
710 
711 
712 enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt };
713 
714 
715 // Clobbers a2; preserves all other registers.
Generate_CheckStackOverflow(MacroAssembler * masm,Register argc,IsTagged argc_is_tagged)716 static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc,
717                                         IsTagged argc_is_tagged) {
718   // Check the stack for overflow. We are not trying to catch
719   // interruptions (e.g. debug break and preemption) here, so the "real stack
720   // limit" is checked.
721   Label okay;
722   __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
723   // Make a2 the space we have left. The stack might already be overflowed
724   // here which will cause r2 to become negative.
725   __ dsubu(a2, sp, a2);
726   // Check if the arguments will overflow the stack.
727   if (argc_is_tagged == kArgcIsSmiTagged) {
728     __ SmiScale(a7, v0, kPointerSizeLog2);
729   } else {
730     DCHECK(argc_is_tagged == kArgcIsUntaggedInt);
731     __ dsll(a7, argc, kPointerSizeLog2);
732   }
733   __ Branch(&okay, gt, a2, Operand(a7));  // Signed comparison.
734 
735   // Out of stack space.
736   __ CallRuntime(Runtime::kThrowStackOverflow);
737 
738   __ bind(&okay);
739 }
740 
741 
Generate_JSEntryTrampolineHelper(MacroAssembler * masm,bool is_construct)742 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
743                                              bool is_construct) {
744   // Called from JSEntryStub::GenerateBody
745 
746   // ----------- S t a t e -------------
747   //  -- a0: new.target
748   //  -- a1: function
749   //  -- a2: receiver_pointer
750   //  -- a3: argc
751   //  -- s0: argv
752   // -----------------------------------
753   ProfileEntryHookStub::MaybeCallEntryHook(masm);
754   // Clear the context before we push it when entering the JS frame.
755   __ mov(cp, zero_reg);
756 
757   // Enter an internal frame.
758   {
759     FrameScope scope(masm, StackFrame::INTERNAL);
760 
761     // Setup the context (we need to use the caller context from the isolate).
762     ExternalReference context_address(Isolate::kContextAddress,
763                                       masm->isolate());
764     __ li(cp, Operand(context_address));
765     __ ld(cp, MemOperand(cp));
766 
767     // Push the function and the receiver onto the stack.
768     __ Push(a1, a2);
769 
770     // Check if we have enough stack space to push all arguments.
771     // Clobbers a2.
772     Generate_CheckStackOverflow(masm, a3, kArgcIsUntaggedInt);
773 
774     // Remember new.target.
775     __ mov(a5, a0);
776 
777     // Copy arguments to the stack in a loop.
778     // a3: argc
779     // s0: argv, i.e. points to first arg
780     Label loop, entry;
781     __ dsll(a4, a3, kPointerSizeLog2);
782     __ daddu(a6, s0, a4);
783     __ b(&entry);
784     __ nop();   // Branch delay slot nop.
785     // a6 points past last arg.
786     __ bind(&loop);
787     __ ld(a4, MemOperand(s0));  // Read next parameter.
788     __ daddiu(s0, s0, kPointerSize);
789     __ ld(a4, MemOperand(a4));  // Dereference handle.
790     __ push(a4);  // Push parameter.
791     __ bind(&entry);
792     __ Branch(&loop, ne, s0, Operand(a6));
793 
794     // Setup new.target and argc.
795     __ mov(a0, a3);
796     __ mov(a3, a5);
797 
798     // Initialize all JavaScript callee-saved registers, since they will be seen
799     // by the garbage collector as part of handlers.
800     __ LoadRoot(a4, Heap::kUndefinedValueRootIndex);
801     __ mov(s1, a4);
802     __ mov(s2, a4);
803     __ mov(s3, a4);
804     __ mov(s4, a4);
805     __ mov(s5, a4);
806     // s6 holds the root address. Do not clobber.
807     // s7 is cp. Do not init.
808 
809     // Invoke the code.
810     Handle<Code> builtin = is_construct
811                                ? masm->isolate()->builtins()->Construct()
812                                : masm->isolate()->builtins()->Call();
813     __ Call(builtin, RelocInfo::CODE_TARGET);
814 
815     // Leave internal frame.
816   }
817   __ Jump(ra);
818 }
819 
820 
Generate_JSEntryTrampoline(MacroAssembler * masm)821 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
822   Generate_JSEntryTrampolineHelper(masm, false);
823 }
824 
825 
Generate_JSConstructEntryTrampoline(MacroAssembler * masm)826 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
827   Generate_JSEntryTrampolineHelper(masm, true);
828 }
829 
830 
831 // Generate code for entering a JS function with the interpreter.
832 // On entry to the function the receiver and arguments have been pushed on the
833 // stack left to right.  The actual argument count matches the formal parameter
834 // count expected by the function.
835 //
836 // The live registers are:
837 //   o a1: the JS function object being called.
838 //   o a3: the new target
839 //   o cp: our context
840 //   o fp: the caller's frame pointer
841 //   o sp: stack pointer
842 //   o ra: return address
843 //
844 // The function builds a JS frame. Please see JavaScriptFrameConstants in
845 // frames-mips.h for its layout.
846 // TODO(rmcilroy): We will need to include the current bytecode pointer in the
847 // frame.
Generate_InterpreterEntryTrampoline(MacroAssembler * masm)848 void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
849   // Open a frame scope to indicate that there is a frame on the stack.  The
850   // MANUAL indicates that the scope shouldn't actually generate code to set up
851   // the frame (that is done below).
852   FrameScope frame_scope(masm, StackFrame::MANUAL);
853 
854   __ Push(ra, fp, cp, a1);
855   __ Daddu(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
856   __ Push(a3);
857 
858   // Push zero for bytecode array offset.
859   __ Push(zero_reg);
860 
861   // Get the bytecode array from the function object and load the pointer to the
862   // first entry into kInterpreterBytecodeRegister.
863   __ ld(a0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
864   __ ld(kInterpreterBytecodeArrayRegister,
865         FieldMemOperand(a0, SharedFunctionInfo::kFunctionDataOffset));
866 
867   if (FLAG_debug_code) {
868     // Check function data field is actually a BytecodeArray object.
869     __ SmiTst(kInterpreterBytecodeArrayRegister, a4);
870     __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, a4,
871               Operand(zero_reg));
872     __ GetObjectType(kInterpreterBytecodeArrayRegister, a4, a4);
873     __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, a4,
874               Operand(BYTECODE_ARRAY_TYPE));
875   }
876 
877   // Allocate the local and temporary register file on the stack.
878   {
879     // Load frame size (word) from the BytecodeArray object.
880     __ lw(a4, FieldMemOperand(kInterpreterBytecodeArrayRegister,
881                               BytecodeArray::kFrameSizeOffset));
882 
883     // Do a stack check to ensure we don't go over the limit.
884     Label ok;
885     __ Dsubu(a5, sp, Operand(a4));
886     __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
887     __ Branch(&ok, hs, a5, Operand(a2));
888     __ CallRuntime(Runtime::kThrowStackOverflow);
889     __ bind(&ok);
890 
891     // If ok, push undefined as the initial value for all register file entries.
892     Label loop_header;
893     Label loop_check;
894     __ LoadRoot(a5, Heap::kUndefinedValueRootIndex);
895     __ Branch(&loop_check);
896     __ bind(&loop_header);
897     // TODO(rmcilroy): Consider doing more than one push per loop iteration.
898     __ push(a5);
899     // Continue loop if not done.
900     __ bind(&loop_check);
901     __ Dsubu(a4, a4, Operand(kPointerSize));
902     __ Branch(&loop_header, ge, a4, Operand(zero_reg));
903   }
904 
905   // TODO(rmcilroy): List of things not currently dealt with here but done in
906   // fullcodegen's prologue:
907   //  - Support profiler (specifically profiling_counter).
908   //  - Call ProfileEntryHookStub when isolate has a function_entry_hook.
909   //  - Allow simulator stop operations if FLAG_stop_at is set.
910   //  - Code aging of the BytecodeArray object.
911 
912   // Perform stack guard check.
913   {
914     Label ok;
915     __ LoadRoot(at, Heap::kStackLimitRootIndex);
916     __ Branch(&ok, hs, sp, Operand(at));
917     __ push(kInterpreterBytecodeArrayRegister);
918     __ CallRuntime(Runtime::kStackGuard);
919     __ pop(kInterpreterBytecodeArrayRegister);
920     __ bind(&ok);
921   }
922 
923   // Load bytecode offset and dispatch table into registers.
924   __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
925   __ Daddu(kInterpreterRegisterFileRegister, fp,
926            Operand(InterpreterFrameConstants::kRegisterFilePointerFromFp));
927   __ li(kInterpreterBytecodeOffsetRegister,
928         Operand(BytecodeArray::kHeaderSize - kHeapObjectTag));
929   __ LoadRoot(kInterpreterDispatchTableRegister,
930               Heap::kInterpreterTableRootIndex);
931   __ Daddu(kInterpreterDispatchTableRegister, kInterpreterDispatchTableRegister,
932            Operand(FixedArray::kHeaderSize - kHeapObjectTag));
933 
934   // Dispatch to the first bytecode handler for the function.
935   __ Daddu(a0, kInterpreterBytecodeArrayRegister,
936            kInterpreterBytecodeOffsetRegister);
937   __ lbu(a0, MemOperand(a0));
938   __ dsll(at, a0, kPointerSizeLog2);
939   __ Daddu(at, kInterpreterDispatchTableRegister, at);
940   __ ld(at, MemOperand(at));
941   // TODO(rmcilroy): Make dispatch table point to code entrys to avoid untagging
942   // and header removal.
943   __ Daddu(at, at, Operand(Code::kHeaderSize - kHeapObjectTag));
944   __ Call(at);
945 }
946 
947 
Generate_InterpreterExitTrampoline(MacroAssembler * masm)948 void Builtins::Generate_InterpreterExitTrampoline(MacroAssembler* masm) {
949   // TODO(rmcilroy): List of things not currently dealt with here but done in
950   // fullcodegen's EmitReturnSequence.
951   //  - Supporting FLAG_trace for Runtime::TraceExit.
952   //  - Support profiler (specifically decrementing profiling_counter
953   //    appropriately and calling out to HandleInterrupts if necessary).
954 
955   // The return value is in accumulator, which is already in v0.
956 
957   // Leave the frame (also dropping the register file).
958   __ LeaveFrame(StackFrame::JAVA_SCRIPT);
959 
960   // Drop receiver + arguments and return.
961   __ lw(at, FieldMemOperand(kInterpreterBytecodeArrayRegister,
962                             BytecodeArray::kParameterSizeOffset));
963   __ Daddu(sp, sp, at);
964   __ Jump(ra);
965 }
966 
967 
968 // static
Generate_InterpreterPushArgsAndCall(MacroAssembler * masm)969 void Builtins::Generate_InterpreterPushArgsAndCall(MacroAssembler* masm) {
970   // ----------- S t a t e -------------
971   //  -- a0 : the number of arguments (not including the receiver)
972   //  -- a2 : the address of the first argument to be pushed. Subsequent
973   //          arguments should be consecutive above this, in the same order as
974   //          they are to be pushed onto the stack.
975   //  -- a1 : the target to call (can be any Object).
976   // -----------------------------------
977 
978   // Find the address of the last argument.
979   __ Daddu(a3, a0, Operand(1));  // Add one for receiver.
980   __ dsll(a3, a3, kPointerSizeLog2);
981   __ Dsubu(a3, a2, Operand(a3));
982 
983   // Push the arguments.
984   Label loop_header, loop_check;
985   __ Branch(&loop_check);
986   __ bind(&loop_header);
987   __ ld(t0, MemOperand(a2));
988   __ Daddu(a2, a2, Operand(-kPointerSize));
989   __ push(t0);
990   __ bind(&loop_check);
991   __ Branch(&loop_header, gt, a2, Operand(a3));
992 
993   // Call the target.
994   __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
995 }
996 
997 
998 // static
Generate_InterpreterPushArgsAndConstruct(MacroAssembler * masm)999 void Builtins::Generate_InterpreterPushArgsAndConstruct(MacroAssembler* masm) {
1000   // ----------- S t a t e -------------
1001   // -- a0 : argument count (not including receiver)
1002   // -- a3 : new target
1003   // -- a1 : constructor to call
1004   // -- a2 : address of the first argument
1005   // -----------------------------------
1006 
1007   // Find the address of the last argument.
1008   __ dsll(t0, a0, kPointerSizeLog2);
1009   __ Dsubu(t0, a2, Operand(t0));
1010 
1011   // Push a slot for the receiver.
1012   __ push(zero_reg);
1013 
1014   // Push the arguments.
1015   Label loop_header, loop_check;
1016   __ Branch(&loop_check);
1017   __ bind(&loop_header);
1018   __ ld(t1, MemOperand(a2));
1019   __ Daddu(a2, a2, Operand(-kPointerSize));
1020   __ push(t1);
1021   __ bind(&loop_check);
1022   __ Branch(&loop_header, gt, a2, Operand(t0));
1023 
1024   // Call the constructor with a0, a1, and a3 unmodified.
1025   __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
1026 }
1027 
1028 
Generate_InterpreterNotifyDeoptimizedHelper(MacroAssembler * masm,Deoptimizer::BailoutType type)1029 static void Generate_InterpreterNotifyDeoptimizedHelper(
1030     MacroAssembler* masm, Deoptimizer::BailoutType type) {
1031   // Enter an internal frame.
1032   {
1033     FrameScope scope(masm, StackFrame::INTERNAL);
1034     __ push(kInterpreterAccumulatorRegister);  // Save accumulator register.
1035 
1036     // Pass the deoptimization type to the runtime system.
1037     __ li(a1, Operand(Smi::FromInt(static_cast<int>(type))));
1038     __ push(a1);
1039     __ CallRuntime(Runtime::kNotifyDeoptimized);
1040 
1041     __ pop(kInterpreterAccumulatorRegister);  // Restore accumulator register.
1042     // Tear down internal frame.
1043   }
1044 
1045   // Drop state (we don't use this for interpreter deopts).
1046   __ Drop(1);
1047 
1048   // Initialize register file register and dispatch table register.
1049   __ Daddu(kInterpreterRegisterFileRegister, fp,
1050            Operand(InterpreterFrameConstants::kRegisterFilePointerFromFp));
1051   __ LoadRoot(kInterpreterDispatchTableRegister,
1052               Heap::kInterpreterTableRootIndex);
1053   __ Daddu(kInterpreterDispatchTableRegister, kInterpreterDispatchTableRegister,
1054            Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1055 
1056   // Get the context from the frame.
1057   // TODO(rmcilroy): Update interpreter frame to expect current context at the
1058   // context slot instead of the function context.
1059   __ ld(kContextRegister,
1060         MemOperand(kInterpreterRegisterFileRegister,
1061                    InterpreterFrameConstants::kContextFromRegisterPointer));
1062 
1063   // Get the bytecode array pointer from the frame.
1064   __ ld(a1,
1065         MemOperand(kInterpreterRegisterFileRegister,
1066                    InterpreterFrameConstants::kFunctionFromRegisterPointer));
1067   __ ld(a1, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1068   __ ld(kInterpreterBytecodeArrayRegister,
1069         FieldMemOperand(a1, SharedFunctionInfo::kFunctionDataOffset));
1070 
1071   if (FLAG_debug_code) {
1072     // Check function data field is actually a BytecodeArray object.
1073     __ SmiTst(kInterpreterBytecodeArrayRegister, at);
1074     __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, at,
1075               Operand(zero_reg));
1076     __ GetObjectType(kInterpreterBytecodeArrayRegister, a1, a1);
1077     __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, a1,
1078               Operand(BYTECODE_ARRAY_TYPE));
1079   }
1080 
1081   // Get the target bytecode offset from the frame.
1082   __ ld(kInterpreterBytecodeOffsetRegister,
1083         MemOperand(
1084             kInterpreterRegisterFileRegister,
1085             InterpreterFrameConstants::kBytecodeOffsetFromRegisterPointer));
1086   __ SmiUntag(kInterpreterBytecodeOffsetRegister);
1087 
1088   // Dispatch to the target bytecode.
1089   __ Daddu(a1, kInterpreterBytecodeArrayRegister,
1090            kInterpreterBytecodeOffsetRegister);
1091   __ lbu(a1, MemOperand(a1));
1092   __ dsll(a1, a1, kPointerSizeLog2);
1093   __ Daddu(a1, kInterpreterDispatchTableRegister, a1);
1094   __ ld(a1, MemOperand(a1));
1095   __ Daddu(a1, a1, Operand(Code::kHeaderSize - kHeapObjectTag));
1096   __ Jump(a1);
1097 }
1098 
1099 
Generate_InterpreterNotifyDeoptimized(MacroAssembler * masm)1100 void Builtins::Generate_InterpreterNotifyDeoptimized(MacroAssembler* masm) {
1101   Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1102 }
1103 
1104 
Generate_InterpreterNotifySoftDeoptimized(MacroAssembler * masm)1105 void Builtins::Generate_InterpreterNotifySoftDeoptimized(MacroAssembler* masm) {
1106   Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1107 }
1108 
1109 
Generate_InterpreterNotifyLazyDeoptimized(MacroAssembler * masm)1110 void Builtins::Generate_InterpreterNotifyLazyDeoptimized(MacroAssembler* masm) {
1111   Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1112 }
1113 
1114 
Generate_CompileLazy(MacroAssembler * masm)1115 void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
1116   CallRuntimePassFunction(masm, Runtime::kCompileLazy);
1117   GenerateTailCallToReturnedCode(masm);
1118 }
1119 
1120 
Generate_CompileOptimized(MacroAssembler * masm)1121 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
1122   CallRuntimePassFunction(masm, Runtime::kCompileOptimized_NotConcurrent);
1123   GenerateTailCallToReturnedCode(masm);
1124 }
1125 
1126 
Generate_CompileOptimizedConcurrent(MacroAssembler * masm)1127 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
1128   CallRuntimePassFunction(masm, Runtime::kCompileOptimized_Concurrent);
1129   GenerateTailCallToReturnedCode(masm);
1130 }
1131 
1132 
GenerateMakeCodeYoungAgainCommon(MacroAssembler * masm)1133 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
1134   // For now, we are relying on the fact that make_code_young doesn't do any
1135   // garbage collection which allows us to save/restore the registers without
1136   // worrying about which of them contain pointers. We also don't build an
1137   // internal frame to make the code faster, since we shouldn't have to do stack
1138   // crawls in MakeCodeYoung. This seems a bit fragile.
1139 
1140   // Set a0 to point to the head of the PlatformCodeAge sequence.
1141   __ Dsubu(a0, a0,
1142       Operand(kNoCodeAgeSequenceLength - Assembler::kInstrSize));
1143 
1144   // The following registers must be saved and restored when calling through to
1145   // the runtime:
1146   //   a0 - contains return address (beginning of patch sequence)
1147   //   a1 - isolate
1148   //   a3 - new target
1149   RegList saved_regs =
1150       (a0.bit() | a1.bit() | a3.bit() | ra.bit() | fp.bit()) & ~sp.bit();
1151   FrameScope scope(masm, StackFrame::MANUAL);
1152   __ MultiPush(saved_regs);
1153   __ PrepareCallCFunction(2, 0, a2);
1154   __ li(a1, Operand(ExternalReference::isolate_address(masm->isolate())));
1155   __ CallCFunction(
1156       ExternalReference::get_make_code_young_function(masm->isolate()), 2);
1157   __ MultiPop(saved_regs);
1158   __ Jump(a0);
1159 }
1160 
1161 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C)                 \
1162 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking(  \
1163     MacroAssembler* masm) {                                  \
1164   GenerateMakeCodeYoungAgainCommon(masm);                    \
1165 }                                                            \
1166 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking(   \
1167     MacroAssembler* masm) {                                  \
1168   GenerateMakeCodeYoungAgainCommon(masm);                    \
1169 }
CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)1170 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
1171 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
1172 
1173 
1174 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
1175   // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
1176   // that make_code_young doesn't do any garbage collection which allows us to
1177   // save/restore the registers without worrying about which of them contain
1178   // pointers.
1179 
1180   // Set a0 to point to the head of the PlatformCodeAge sequence.
1181   __ Dsubu(a0, a0,
1182       Operand(kNoCodeAgeSequenceLength - Assembler::kInstrSize));
1183 
1184   // The following registers must be saved and restored when calling through to
1185   // the runtime:
1186   //   a0 - contains return address (beginning of patch sequence)
1187   //   a1 - isolate
1188   //   a3 - new target
1189   RegList saved_regs =
1190       (a0.bit() | a1.bit() | a3.bit() | ra.bit() | fp.bit()) & ~sp.bit();
1191   FrameScope scope(masm, StackFrame::MANUAL);
1192   __ MultiPush(saved_regs);
1193   __ PrepareCallCFunction(2, 0, a2);
1194   __ li(a1, Operand(ExternalReference::isolate_address(masm->isolate())));
1195   __ CallCFunction(
1196       ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
1197       2);
1198   __ MultiPop(saved_regs);
1199 
1200   // Perform prologue operations usually performed by the young code stub.
1201   __ Push(ra, fp, cp, a1);
1202   __ Daddu(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
1203 
1204   // Jump to point after the code-age stub.
1205   __ Daddu(a0, a0, Operand((kNoCodeAgeSequenceLength)));
1206   __ Jump(a0);
1207 }
1208 
1209 
Generate_MarkCodeAsExecutedTwice(MacroAssembler * masm)1210 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
1211   GenerateMakeCodeYoungAgainCommon(masm);
1212 }
1213 
1214 
Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler * masm)1215 void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) {
1216   Generate_MarkCodeAsExecutedOnce(masm);
1217 }
1218 
1219 
Generate_NotifyStubFailureHelper(MacroAssembler * masm,SaveFPRegsMode save_doubles)1220 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
1221                                              SaveFPRegsMode save_doubles) {
1222   {
1223     FrameScope scope(masm, StackFrame::INTERNAL);
1224 
1225     // Preserve registers across notification, this is important for compiled
1226     // stubs that tail call the runtime on deopts passing their parameters in
1227     // registers.
1228     __ MultiPush(kJSCallerSaved | kCalleeSaved);
1229     // Pass the function and deoptimization type to the runtime system.
1230     __ CallRuntime(Runtime::kNotifyStubFailure, save_doubles);
1231     __ MultiPop(kJSCallerSaved | kCalleeSaved);
1232   }
1233 
1234   __ Daddu(sp, sp, Operand(kPointerSize));  // Ignore state
1235   __ Jump(ra);  // Jump to miss handler
1236 }
1237 
1238 
Generate_NotifyStubFailure(MacroAssembler * masm)1239 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
1240   Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
1241 }
1242 
1243 
Generate_NotifyStubFailureSaveDoubles(MacroAssembler * masm)1244 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
1245   Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
1246 }
1247 
1248 
Generate_NotifyDeoptimizedHelper(MacroAssembler * masm,Deoptimizer::BailoutType type)1249 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
1250                                              Deoptimizer::BailoutType type) {
1251   {
1252     FrameScope scope(masm, StackFrame::INTERNAL);
1253     // Pass the function and deoptimization type to the runtime system.
1254     __ li(a0, Operand(Smi::FromInt(static_cast<int>(type))));
1255     __ push(a0);
1256     __ CallRuntime(Runtime::kNotifyDeoptimized);
1257   }
1258 
1259   // Get the full codegen state from the stack and untag it -> a6.
1260   __ ld(a6, MemOperand(sp, 0 * kPointerSize));
1261   __ SmiUntag(a6);
1262   // Switch on the state.
1263   Label with_tos_register, unknown_state;
1264   __ Branch(&with_tos_register,
1265             ne, a6, Operand(FullCodeGenerator::NO_REGISTERS));
1266   __ Ret(USE_DELAY_SLOT);
1267   // Safe to fill delay slot Addu will emit one instruction.
1268   __ Daddu(sp, sp, Operand(1 * kPointerSize));  // Remove state.
1269 
1270   __ bind(&with_tos_register);
1271   __ ld(v0, MemOperand(sp, 1 * kPointerSize));
1272   __ Branch(&unknown_state, ne, a6, Operand(FullCodeGenerator::TOS_REG));
1273 
1274   __ Ret(USE_DELAY_SLOT);
1275   // Safe to fill delay slot Addu will emit one instruction.
1276   __ Daddu(sp, sp, Operand(2 * kPointerSize));  // Remove state.
1277 
1278   __ bind(&unknown_state);
1279   __ stop("no cases left");
1280 }
1281 
1282 
Generate_NotifyDeoptimized(MacroAssembler * masm)1283 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1284   Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1285 }
1286 
1287 
Generate_NotifySoftDeoptimized(MacroAssembler * masm)1288 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
1289   Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1290 }
1291 
1292 
Generate_NotifyLazyDeoptimized(MacroAssembler * masm)1293 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
1294   Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1295 }
1296 
1297 
1298 // Clobbers {t2, t3, a4, a5}.
CompatibleReceiverCheck(MacroAssembler * masm,Register receiver,Register function_template_info,Label * receiver_check_failed)1299 static void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver,
1300                                     Register function_template_info,
1301                                     Label* receiver_check_failed) {
1302   Register signature = t2;
1303   Register map = t3;
1304   Register constructor = a4;
1305   Register scratch = a5;
1306 
1307   // If there is no signature, return the holder.
1308   __ ld(signature, FieldMemOperand(function_template_info,
1309                                    FunctionTemplateInfo::kSignatureOffset));
1310   Label receiver_check_passed;
1311   __ JumpIfRoot(signature, Heap::kUndefinedValueRootIndex,
1312                 &receiver_check_passed);
1313 
1314   // Walk the prototype chain.
1315   __ ld(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
1316   Label prototype_loop_start;
1317   __ bind(&prototype_loop_start);
1318 
1319   // Get the constructor, if any.
1320   __ GetMapConstructor(constructor, map, scratch, scratch);
1321   Label next_prototype;
1322   __ Branch(&next_prototype, ne, scratch, Operand(JS_FUNCTION_TYPE));
1323   Register type = constructor;
1324   __ ld(type,
1325         FieldMemOperand(constructor, JSFunction::kSharedFunctionInfoOffset));
1326   __ ld(type, FieldMemOperand(type, SharedFunctionInfo::kFunctionDataOffset));
1327 
1328   // Loop through the chain of inheriting function templates.
1329   Label function_template_loop;
1330   __ bind(&function_template_loop);
1331 
1332   // If the signatures match, we have a compatible receiver.
1333   __ Branch(&receiver_check_passed, eq, signature, Operand(type),
1334             USE_DELAY_SLOT);
1335 
1336   // If the current type is not a FunctionTemplateInfo, load the next prototype
1337   // in the chain.
1338   __ JumpIfSmi(type, &next_prototype);
1339   __ GetObjectType(type, scratch, scratch);
1340   __ Branch(&next_prototype, ne, scratch, Operand(FUNCTION_TEMPLATE_INFO_TYPE));
1341 
1342   // Otherwise load the parent function template and iterate.
1343   __ ld(type,
1344         FieldMemOperand(type, FunctionTemplateInfo::kParentTemplateOffset));
1345   __ Branch(&function_template_loop);
1346 
1347   // Load the next prototype.
1348   __ bind(&next_prototype);
1349   __ ld(receiver, FieldMemOperand(map, Map::kPrototypeOffset));
1350   // End if the prototype is null or not hidden.
1351   __ JumpIfRoot(receiver, Heap::kNullValueRootIndex, receiver_check_failed);
1352   __ ld(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
1353   __ lwu(scratch, FieldMemOperand(map, Map::kBitField3Offset));
1354   __ DecodeField<Map::IsHiddenPrototype>(scratch);
1355   __ Branch(receiver_check_failed, eq, scratch, Operand(zero_reg));
1356   // Iterate.
1357   __ Branch(&prototype_loop_start);
1358 
1359   __ bind(&receiver_check_passed);
1360 }
1361 
1362 
Generate_HandleFastApiCall(MacroAssembler * masm)1363 void Builtins::Generate_HandleFastApiCall(MacroAssembler* masm) {
1364   // ----------- S t a t e -------------
1365   //  -- a0                 : number of arguments excluding receiver
1366   //  -- a1                 : callee
1367   //  -- ra                 : return address
1368   //  -- sp[0]              : last argument
1369   //  -- ...
1370   //  -- sp[8 * (argc - 1)] : first argument
1371   //  -- sp[8 * argc]       : receiver
1372   // -----------------------------------
1373 
1374   // Load the FunctionTemplateInfo.
1375   __ ld(t1, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1376   __ ld(t1, FieldMemOperand(t1, SharedFunctionInfo::kFunctionDataOffset));
1377 
1378   // Do the compatible receiver check
1379   Label receiver_check_failed;
1380   __ sll(at, a0, kPointerSizeLog2);
1381   __ Daddu(t8, sp, at);
1382   __ ld(t0, MemOperand(t8));
1383   CompatibleReceiverCheck(masm, t0, t1, &receiver_check_failed);
1384 
1385   // Get the callback offset from the FunctionTemplateInfo, and jump to the
1386   // beginning of the code.
1387   __ ld(t2, FieldMemOperand(t1, FunctionTemplateInfo::kCallCodeOffset));
1388   __ ld(t2, FieldMemOperand(t2, CallHandlerInfo::kFastHandlerOffset));
1389   __ Daddu(t2, t2, Operand(Code::kHeaderSize - kHeapObjectTag));
1390   __ Jump(t2);
1391 
1392   // Compatible receiver check failed: throw an Illegal Invocation exception.
1393   __ bind(&receiver_check_failed);
1394   // Drop the arguments (including the receiver);
1395   __ Daddu(t8, t8, Operand(kPointerSize));
1396   __ daddu(sp, t8, zero_reg);
1397   __ TailCallRuntime(Runtime::kThrowIllegalInvocation);
1398 }
1399 
1400 
Generate_OnStackReplacement(MacroAssembler * masm)1401 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1402   // Lookup the function in the JavaScript frame.
1403   __ ld(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1404   {
1405     FrameScope scope(masm, StackFrame::INTERNAL);
1406     // Pass function as argument.
1407     __ push(a0);
1408     __ CallRuntime(Runtime::kCompileForOnStackReplacement);
1409   }
1410 
1411   // If the code object is null, just return to the unoptimized code.
1412   __ Ret(eq, v0, Operand(Smi::FromInt(0)));
1413 
1414   // Load deoptimization data from the code object.
1415   // <deopt_data> = <code>[#deoptimization_data_offset]
1416   __ ld(a1, MemOperand(v0, Code::kDeoptimizationDataOffset - kHeapObjectTag));
1417 
1418   // Load the OSR entrypoint offset from the deoptimization data.
1419   // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
1420   __ ld(a1, MemOperand(a1, FixedArray::OffsetOfElementAt(
1421       DeoptimizationInputData::kOsrPcOffsetIndex) - kHeapObjectTag));
1422   __ SmiUntag(a1);
1423 
1424   // Compute the target address = code_obj + header_size + osr_offset
1425   // <entry_addr> = <code_obj> + #header_size + <osr_offset>
1426   __ daddu(v0, v0, a1);
1427   __ daddiu(ra, v0, Code::kHeaderSize - kHeapObjectTag);
1428 
1429   // And "return" to the OSR entry point of the function.
1430   __ Ret();
1431 }
1432 
1433 
Generate_OsrAfterStackCheck(MacroAssembler * masm)1434 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) {
1435   // We check the stack limit as indicator that recompilation might be done.
1436   Label ok;
1437   __ LoadRoot(at, Heap::kStackLimitRootIndex);
1438   __ Branch(&ok, hs, sp, Operand(at));
1439   {
1440     FrameScope scope(masm, StackFrame::INTERNAL);
1441     __ CallRuntime(Runtime::kStackGuard);
1442   }
1443   __ Jump(masm->isolate()->builtins()->OnStackReplacement(),
1444           RelocInfo::CODE_TARGET);
1445 
1446   __ bind(&ok);
1447   __ Ret();
1448 }
1449 
1450 
1451 // static
Generate_DatePrototype_GetField(MacroAssembler * masm,int field_index)1452 void Builtins::Generate_DatePrototype_GetField(MacroAssembler* masm,
1453                                                int field_index) {
1454   // ----------- S t a t e -------------
1455   //  -- sp[0] : receiver
1456   // -----------------------------------
1457 
1458   // 1. Pop receiver into a0 and check that it's actually a JSDate object.
1459   Label receiver_not_date;
1460   {
1461     __ Pop(a0);
1462     __ JumpIfSmi(a0, &receiver_not_date);
1463     __ GetObjectType(a0, t0, t0);
1464     __ Branch(&receiver_not_date, ne, t0, Operand(JS_DATE_TYPE));
1465   }
1466 
1467   // 2. Load the specified date field, falling back to the runtime as necessary.
1468   if (field_index == JSDate::kDateValue) {
1469     __ Ret(USE_DELAY_SLOT);
1470     __ ld(v0, FieldMemOperand(a0, JSDate::kValueOffset));  // In delay slot.
1471   } else {
1472     if (field_index < JSDate::kFirstUncachedField) {
1473       Label stamp_mismatch;
1474       __ li(a1, Operand(ExternalReference::date_cache_stamp(masm->isolate())));
1475       __ ld(a1, MemOperand(a1));
1476       __ ld(t0, FieldMemOperand(a0, JSDate::kCacheStampOffset));
1477       __ Branch(&stamp_mismatch, ne, t0, Operand(a1));
1478       __ Ret(USE_DELAY_SLOT);
1479       __ ld(v0, FieldMemOperand(
1480                     a0, JSDate::kValueOffset +
1481                             field_index * kPointerSize));  // In delay slot.
1482       __ bind(&stamp_mismatch);
1483     }
1484     FrameScope scope(masm, StackFrame::INTERNAL);
1485     __ PrepareCallCFunction(2, t0);
1486     __ li(a1, Operand(Smi::FromInt(field_index)));
1487     __ CallCFunction(
1488         ExternalReference::get_date_field_function(masm->isolate()), 2);
1489   }
1490   __ Ret();
1491 
1492   // 3. Raise a TypeError if the receiver is not a date.
1493   __ bind(&receiver_not_date);
1494   __ TailCallRuntime(Runtime::kThrowNotDateError);
1495 }
1496 
1497 
1498 // static
Generate_FunctionPrototypeApply(MacroAssembler * masm)1499 void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
1500   // ----------- S t a t e -------------
1501   //  -- a0    : argc
1502   //  -- sp[0] : argArray
1503   //  -- sp[4] : thisArg
1504   //  -- sp[8] : receiver
1505   // -----------------------------------
1506 
1507   // 1. Load receiver into a1, argArray into a0 (if present), remove all
1508   // arguments from the stack (including the receiver), and push thisArg (if
1509   // present) instead.
1510   {
1511     Label no_arg;
1512     Register scratch = a4;
1513     __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
1514     __ mov(a3, a2);
1515     __ dsll(scratch, a0, kPointerSizeLog2);
1516     __ Daddu(a0, sp, Operand(scratch));
1517     __ ld(a1, MemOperand(a0));  // receiver
1518     __ Dsubu(a0, a0, Operand(kPointerSize));
1519     __ Branch(&no_arg, lt, a0, Operand(sp));
1520     __ ld(a2, MemOperand(a0));  // thisArg
1521     __ Dsubu(a0, a0, Operand(kPointerSize));
1522     __ Branch(&no_arg, lt, a0, Operand(sp));
1523     __ ld(a3, MemOperand(a0));  // argArray
1524     __ bind(&no_arg);
1525     __ Daddu(sp, sp, Operand(scratch));
1526     __ sd(a2, MemOperand(sp));
1527     __ mov(a0, a3);
1528   }
1529 
1530   // ----------- S t a t e -------------
1531   //  -- a0    : argArray
1532   //  -- a1    : receiver
1533   //  -- sp[0] : thisArg
1534   // -----------------------------------
1535 
1536   // 2. Make sure the receiver is actually callable.
1537   Label receiver_not_callable;
1538   __ JumpIfSmi(a1, &receiver_not_callable);
1539   __ ld(a4, FieldMemOperand(a1, HeapObject::kMapOffset));
1540   __ lbu(a4, FieldMemOperand(a4, Map::kBitFieldOffset));
1541   __ And(a4, a4, Operand(1 << Map::kIsCallable));
1542   __ Branch(&receiver_not_callable, eq, a4, Operand(zero_reg));
1543 
1544   // 3. Tail call with no arguments if argArray is null or undefined.
1545   Label no_arguments;
1546   __ JumpIfRoot(a0, Heap::kNullValueRootIndex, &no_arguments);
1547   __ JumpIfRoot(a0, Heap::kUndefinedValueRootIndex, &no_arguments);
1548 
1549   // 4a. Apply the receiver to the given argArray (passing undefined for
1550   // new.target).
1551   __ LoadRoot(a3, Heap::kUndefinedValueRootIndex);
1552   __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1553 
1554   // 4b. The argArray is either null or undefined, so we tail call without any
1555   // arguments to the receiver.
1556   __ bind(&no_arguments);
1557   {
1558     __ mov(a0, zero_reg);
1559     __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1560   }
1561 
1562   // 4c. The receiver is not callable, throw an appropriate TypeError.
1563   __ bind(&receiver_not_callable);
1564   {
1565     __ sd(a1, MemOperand(sp));
1566     __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1567   }
1568 }
1569 
1570 
1571 // static
Generate_FunctionPrototypeCall(MacroAssembler * masm)1572 void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
1573   // 1. Make sure we have at least one argument.
1574   // a0: actual number of arguments
1575   {
1576     Label done;
1577     __ Branch(&done, ne, a0, Operand(zero_reg));
1578     __ PushRoot(Heap::kUndefinedValueRootIndex);
1579     __ Daddu(a0, a0, Operand(1));
1580     __ bind(&done);
1581   }
1582 
1583   // 2. Get the function to call (passed as receiver) from the stack.
1584   // a0: actual number of arguments
1585   __ dsll(at, a0, kPointerSizeLog2);
1586   __ daddu(at, sp, at);
1587   __ ld(a1, MemOperand(at));
1588 
1589   // 3. Shift arguments and return address one slot down on the stack
1590   //    (overwriting the original receiver).  Adjust argument count to make
1591   //    the original first argument the new receiver.
1592   // a0: actual number of arguments
1593   // a1: function
1594   {
1595     Label loop;
1596     // Calculate the copy start address (destination). Copy end address is sp.
1597     __ dsll(at, a0, kPointerSizeLog2);
1598     __ daddu(a2, sp, at);
1599 
1600     __ bind(&loop);
1601     __ ld(at, MemOperand(a2, -kPointerSize));
1602     __ sd(at, MemOperand(a2));
1603     __ Dsubu(a2, a2, Operand(kPointerSize));
1604     __ Branch(&loop, ne, a2, Operand(sp));
1605     // Adjust the actual number of arguments and remove the top element
1606     // (which is a copy of the last argument).
1607     __ Dsubu(a0, a0, Operand(1));
1608     __ Pop();
1609   }
1610 
1611   // 4. Call the callable.
1612   __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1613 }
1614 
1615 
Generate_ReflectApply(MacroAssembler * masm)1616 void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
1617   // ----------- S t a t e -------------
1618   //  -- a0     : argc
1619   //  -- sp[0]  : argumentsList
1620   //  -- sp[4]  : thisArgument
1621   //  -- sp[8]  : target
1622   //  -- sp[12] : receiver
1623   // -----------------------------------
1624 
1625   // 1. Load target into a1 (if present), argumentsList into a0 (if present),
1626   // remove all arguments from the stack (including the receiver), and push
1627   // thisArgument (if present) instead.
1628   {
1629     Label no_arg;
1630     Register scratch = a4;
1631     __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
1632     __ mov(a2, a1);
1633     __ mov(a3, a1);
1634     __ dsll(scratch, a0, kPointerSizeLog2);
1635     __ mov(a0, scratch);
1636     __ Dsubu(a0, a0, Operand(kPointerSize));
1637     __ Branch(&no_arg, lt, a0, Operand(zero_reg));
1638     __ Daddu(a0, sp, Operand(a0));
1639     __ ld(a1, MemOperand(a0));  // target
1640     __ Dsubu(a0, a0, Operand(kPointerSize));
1641     __ Branch(&no_arg, lt, a0, Operand(sp));
1642     __ ld(a2, MemOperand(a0));  // thisArgument
1643     __ Dsubu(a0, a0, Operand(kPointerSize));
1644     __ Branch(&no_arg, lt, a0, Operand(sp));
1645     __ ld(a3, MemOperand(a0));  // argumentsList
1646     __ bind(&no_arg);
1647     __ Daddu(sp, sp, Operand(scratch));
1648     __ sd(a2, MemOperand(sp));
1649     __ mov(a0, a3);
1650   }
1651 
1652   // ----------- S t a t e -------------
1653   //  -- a0    : argumentsList
1654   //  -- a1    : target
1655   //  -- sp[0] : thisArgument
1656   // -----------------------------------
1657 
1658   // 2. Make sure the target is actually callable.
1659   Label target_not_callable;
1660   __ JumpIfSmi(a1, &target_not_callable);
1661   __ ld(a4, FieldMemOperand(a1, HeapObject::kMapOffset));
1662   __ lbu(a4, FieldMemOperand(a4, Map::kBitFieldOffset));
1663   __ And(a4, a4, Operand(1 << Map::kIsCallable));
1664   __ Branch(&target_not_callable, eq, a4, Operand(zero_reg));
1665 
1666   // 3a. Apply the target to the given argumentsList (passing undefined for
1667   // new.target).
1668   __ LoadRoot(a3, Heap::kUndefinedValueRootIndex);
1669   __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1670 
1671   // 3b. The target is not callable, throw an appropriate TypeError.
1672   __ bind(&target_not_callable);
1673   {
1674     __ sd(a1, MemOperand(sp));
1675     __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1676   }
1677 }
1678 
1679 
Generate_ReflectConstruct(MacroAssembler * masm)1680 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
1681   // ----------- S t a t e -------------
1682   //  -- a0     : argc
1683   //  -- sp[0]  : new.target (optional)
1684   //  -- sp[4]  : argumentsList
1685   //  -- sp[8]  : target
1686   //  -- sp[12] : receiver
1687   // -----------------------------------
1688 
1689   // 1. Load target into a1 (if present), argumentsList into a0 (if present),
1690   // new.target into a3 (if present, otherwise use target), remove all
1691   // arguments from the stack (including the receiver), and push thisArgument
1692   // (if present) instead.
1693   {
1694     Label no_arg;
1695     Register scratch = a4;
1696     __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
1697     __ mov(a2, a1);
1698     __ dsll(scratch, a0, kPointerSizeLog2);
1699     __ Daddu(a0, sp, Operand(scratch));
1700     __ sd(a2, MemOperand(a0));  // receiver
1701     __ Dsubu(a0, a0, Operand(kPointerSize));
1702     __ Branch(&no_arg, lt, a0, Operand(sp));
1703     __ ld(a1, MemOperand(a0));  // target
1704     __ mov(a3, a1);             // new.target defaults to target
1705     __ Dsubu(a0, a0, Operand(kPointerSize));
1706     __ Branch(&no_arg, lt, a0, Operand(sp));
1707     __ ld(a2, MemOperand(a0));  // argumentsList
1708     __ Dsubu(a0, a0, Operand(kPointerSize));
1709     __ Branch(&no_arg, lt, a0, Operand(sp));
1710     __ ld(a3, MemOperand(a0));  // new.target
1711     __ bind(&no_arg);
1712     __ Daddu(sp, sp, Operand(scratch));
1713     __ mov(a0, a2);
1714   }
1715 
1716   // ----------- S t a t e -------------
1717   //  -- a0    : argumentsList
1718   //  -- a3    : new.target
1719   //  -- a1    : target
1720   //  -- sp[0] : receiver (undefined)
1721   // -----------------------------------
1722 
1723   // 2. Make sure the target is actually a constructor.
1724   Label target_not_constructor;
1725   __ JumpIfSmi(a1, &target_not_constructor);
1726   __ ld(a4, FieldMemOperand(a1, HeapObject::kMapOffset));
1727   __ lbu(a4, FieldMemOperand(a4, Map::kBitFieldOffset));
1728   __ And(a4, a4, Operand(1 << Map::kIsConstructor));
1729   __ Branch(&target_not_constructor, eq, a4, Operand(zero_reg));
1730 
1731   // 3. Make sure the target is actually a constructor.
1732   Label new_target_not_constructor;
1733   __ JumpIfSmi(a3, &new_target_not_constructor);
1734   __ ld(a4, FieldMemOperand(a3, HeapObject::kMapOffset));
1735   __ lbu(a4, FieldMemOperand(a4, Map::kBitFieldOffset));
1736   __ And(a4, a4, Operand(1 << Map::kIsConstructor));
1737   __ Branch(&new_target_not_constructor, eq, a4, Operand(zero_reg));
1738 
1739   // 4a. Construct the target with the given new.target and argumentsList.
1740   __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1741 
1742   // 4b. The target is not a constructor, throw an appropriate TypeError.
1743   __ bind(&target_not_constructor);
1744   {
1745     __ sd(a1, MemOperand(sp));
1746     __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
1747   }
1748 
1749   // 4c. The new.target is not a constructor, throw an appropriate TypeError.
1750   __ bind(&new_target_not_constructor);
1751   {
1752     __ sd(a3, MemOperand(sp));
1753     __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
1754   }
1755 }
1756 
1757 
ArgumentAdaptorStackCheck(MacroAssembler * masm,Label * stack_overflow)1758 static void ArgumentAdaptorStackCheck(MacroAssembler* masm,
1759                                       Label* stack_overflow) {
1760   // ----------- S t a t e -------------
1761   //  -- a0 : actual number of arguments
1762   //  -- a1 : function (passed through to callee)
1763   //  -- a2 : expected number of arguments
1764   //  -- a3 : new target (passed through to callee)
1765   // -----------------------------------
1766   // Check the stack for overflow. We are not trying to catch
1767   // interruptions (e.g. debug break and preemption) here, so the "real stack
1768   // limit" is checked.
1769   __ LoadRoot(a5, Heap::kRealStackLimitRootIndex);
1770   // Make a5 the space we have left. The stack might already be overflowed
1771   // here which will cause a5 to become negative.
1772   __ dsubu(a5, sp, a5);
1773   // Check if the arguments will overflow the stack.
1774   __ dsll(at, a2, kPointerSizeLog2);
1775   // Signed comparison.
1776   __ Branch(stack_overflow, le, a5, Operand(at));
1777 }
1778 
1779 
EnterArgumentsAdaptorFrame(MacroAssembler * masm)1780 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1781   // __ sll(a0, a0, kSmiTagSize);
1782   __ dsll32(a0, a0, 0);
1783   __ li(a4, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1784   __ MultiPush(a0.bit() | a1.bit() | a4.bit() | fp.bit() | ra.bit());
1785   __ Daddu(fp, sp,
1786       Operand(StandardFrameConstants::kFixedFrameSizeFromFp + kPointerSize));
1787 }
1788 
1789 
LeaveArgumentsAdaptorFrame(MacroAssembler * masm)1790 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1791   // ----------- S t a t e -------------
1792   //  -- v0 : result being passed through
1793   // -----------------------------------
1794   // Get the number of arguments passed (as a smi), tear down the frame and
1795   // then tear down the parameters.
1796   __ ld(a1, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp +
1797                              kPointerSize)));
1798   __ mov(sp, fp);
1799   __ MultiPop(fp.bit() | ra.bit());
1800   __ SmiScale(a4, a1, kPointerSizeLog2);
1801   __ Daddu(sp, sp, a4);
1802   // Adjust for the receiver.
1803   __ Daddu(sp, sp, Operand(kPointerSize));
1804 }
1805 
1806 
1807 // static
Generate_Apply(MacroAssembler * masm)1808 void Builtins::Generate_Apply(MacroAssembler* masm) {
1809   // ----------- S t a t e -------------
1810   //  -- a0    : argumentsList
1811   //  -- a1    : target
1812   //  -- a3    : new.target (checked to be constructor or undefined)
1813   //  -- sp[0] : thisArgument
1814   // -----------------------------------
1815 
1816   // Create the list of arguments from the array-like argumentsList.
1817   {
1818     Label create_arguments, create_array, create_runtime, done_create;
1819     __ JumpIfSmi(a0, &create_runtime);
1820 
1821     // Load the map of argumentsList into a2.
1822     __ ld(a2, FieldMemOperand(a0, HeapObject::kMapOffset));
1823 
1824     // Load native context into a4.
1825     __ ld(a4, NativeContextMemOperand());
1826 
1827     // Check if argumentsList is an (unmodified) arguments object.
1828     __ ld(at, ContextMemOperand(a4, Context::SLOPPY_ARGUMENTS_MAP_INDEX));
1829     __ Branch(&create_arguments, eq, a2, Operand(at));
1830     __ ld(at, ContextMemOperand(a4, Context::STRICT_ARGUMENTS_MAP_INDEX));
1831     __ Branch(&create_arguments, eq, a2, Operand(at));
1832 
1833     // Check if argumentsList is a fast JSArray.
1834     __ ld(v0, FieldMemOperand(a2, HeapObject::kMapOffset));
1835     __ lbu(v0, FieldMemOperand(v0, Map::kInstanceTypeOffset));
1836     __ Branch(&create_array, eq, v0, Operand(JS_ARRAY_TYPE));
1837 
1838     // Ask the runtime to create the list (actually a FixedArray).
1839     __ bind(&create_runtime);
1840     {
1841       FrameScope scope(masm, StackFrame::INTERNAL);
1842       __ Push(a1, a3, a0);
1843       __ CallRuntime(Runtime::kCreateListFromArrayLike);
1844       __ mov(a0, v0);
1845       __ Pop(a1, a3);
1846       __ ld(a2, FieldMemOperand(v0, FixedArray::kLengthOffset));
1847       __ SmiUntag(a2);
1848     }
1849     __ Branch(&done_create);
1850 
1851     // Try to create the list from an arguments object.
1852     __ bind(&create_arguments);
1853     __ ld(a2,
1854           FieldMemOperand(a0, JSObject::kHeaderSize +
1855                                   Heap::kArgumentsLengthIndex * kPointerSize));
1856     __ ld(a4, FieldMemOperand(a0, JSObject::kElementsOffset));
1857     __ ld(at, FieldMemOperand(a4, FixedArray::kLengthOffset));
1858     __ Branch(&create_runtime, ne, a2, Operand(at));
1859     __ SmiUntag(a2);
1860     __ mov(a0, a4);
1861     __ Branch(&done_create);
1862 
1863     // Try to create the list from a JSArray object.
1864     __ bind(&create_array);
1865     __ ld(a2, FieldMemOperand(a2, Map::kBitField2Offset));
1866     __ DecodeField<Map::ElementsKindBits>(a2);
1867     STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
1868     STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
1869     STATIC_ASSERT(FAST_ELEMENTS == 2);
1870     __ Branch(&create_runtime, hi, a2, Operand(FAST_ELEMENTS));
1871     __ Branch(&create_runtime, eq, a2, Operand(FAST_HOLEY_SMI_ELEMENTS));
1872     __ ld(a2, FieldMemOperand(a0, JSArray::kLengthOffset));
1873     __ ld(a0, FieldMemOperand(a0, JSArray::kElementsOffset));
1874     __ SmiUntag(a2);
1875 
1876     __ bind(&done_create);
1877   }
1878 
1879   // Check for stack overflow.
1880   {
1881     // Check the stack for overflow. We are not trying to catch interruptions
1882     // (i.e. debug break and preemption) here, so check the "real stack limit".
1883     Label done;
1884     __ LoadRoot(a4, Heap::kRealStackLimitRootIndex);
1885     // Make ip the space we have left. The stack might already be overflowed
1886     // here which will cause ip to become negative.
1887     __ Dsubu(a4, sp, a4);
1888     // Check if the arguments will overflow the stack.
1889     __ dsll(at, a2, kPointerSizeLog2);
1890     __ Branch(&done, gt, a4, Operand(at));  // Signed comparison.
1891     __ TailCallRuntime(Runtime::kThrowStackOverflow);
1892     __ bind(&done);
1893   }
1894 
1895   // ----------- S t a t e -------------
1896   //  -- a1    : target
1897   //  -- a0    : args (a FixedArray built from argumentsList)
1898   //  -- a2    : len (number of elements to push from args)
1899   //  -- a3    : new.target (checked to be constructor or undefined)
1900   //  -- sp[0] : thisArgument
1901   // -----------------------------------
1902 
1903   // Push arguments onto the stack (thisArgument is already on the stack).
1904   {
1905     __ mov(a4, zero_reg);
1906     Label done, loop;
1907     __ bind(&loop);
1908     __ Branch(&done, eq, a4, Operand(a2));
1909     __ dsll(at, a4, kPointerSizeLog2);
1910     __ Daddu(at, a0, at);
1911     __ ld(at, FieldMemOperand(at, FixedArray::kHeaderSize));
1912     __ Push(at);
1913     __ Daddu(a4, a4, Operand(1));
1914     __ Branch(&loop);
1915     __ bind(&done);
1916     __ Move(a0, a4);
1917   }
1918 
1919   // Dispatch to Call or Construct depending on whether new.target is undefined.
1920   {
1921     Label construct;
1922     __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
1923     __ Branch(&construct, ne, a3, Operand(at));
1924     __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1925     __ bind(&construct);
1926     __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
1927   }
1928 }
1929 
1930 
1931 // static
Generate_CallFunction(MacroAssembler * masm,ConvertReceiverMode mode)1932 void Builtins::Generate_CallFunction(MacroAssembler* masm,
1933                                      ConvertReceiverMode mode) {
1934   // ----------- S t a t e -------------
1935   //  -- a0 : the number of arguments (not including the receiver)
1936   //  -- a1 : the function to call (checked to be a JSFunction)
1937   // -----------------------------------
1938   __ AssertFunction(a1);
1939 
1940   // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
1941   // Check that function is not a "classConstructor".
1942   Label class_constructor;
1943   __ ld(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1944   __ lbu(a3, FieldMemOperand(a2, SharedFunctionInfo::kFunctionKindByteOffset));
1945   __ And(at, a3, Operand(SharedFunctionInfo::kClassConstructorBitsWithinByte));
1946   __ Branch(&class_constructor, ne, at, Operand(zero_reg));
1947 
1948   // Enter the context of the function; ToObject has to run in the function
1949   // context, and we also need to take the global proxy from the function
1950   // context in case of conversion.
1951   STATIC_ASSERT(SharedFunctionInfo::kNativeByteOffset ==
1952                 SharedFunctionInfo::kStrictModeByteOffset);
1953   __ ld(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
1954   // We need to convert the receiver for non-native sloppy mode functions.
1955   Label done_convert;
1956   __ lbu(a3, FieldMemOperand(a2, SharedFunctionInfo::kNativeByteOffset));
1957   __ And(at, a3, Operand((1 << SharedFunctionInfo::kNativeBitWithinByte) |
1958                          (1 << SharedFunctionInfo::kStrictModeBitWithinByte)));
1959   __ Branch(&done_convert, ne, at, Operand(zero_reg));
1960   {
1961     // ----------- S t a t e -------------
1962     //  -- a0 : the number of arguments (not including the receiver)
1963     //  -- a1 : the function to call (checked to be a JSFunction)
1964     //  -- a2 : the shared function info.
1965     //  -- cp : the function context.
1966     // -----------------------------------
1967 
1968     if (mode == ConvertReceiverMode::kNullOrUndefined) {
1969       // Patch receiver to global proxy.
1970       __ LoadGlobalProxy(a3);
1971     } else {
1972       Label convert_to_object, convert_receiver;
1973       __ dsll(at, a0, kPointerSizeLog2);
1974       __ daddu(at, sp, at);
1975       __ ld(a3, MemOperand(at));
1976       __ JumpIfSmi(a3, &convert_to_object);
1977       STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
1978       __ GetObjectType(a3, a4, a4);
1979       __ Branch(&done_convert, hs, a4, Operand(FIRST_JS_RECEIVER_TYPE));
1980       if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
1981         Label convert_global_proxy;
1982         __ JumpIfRoot(a3, Heap::kUndefinedValueRootIndex,
1983                       &convert_global_proxy);
1984         __ JumpIfNotRoot(a3, Heap::kNullValueRootIndex, &convert_to_object);
1985         __ bind(&convert_global_proxy);
1986         {
1987           // Patch receiver to global proxy.
1988           __ LoadGlobalProxy(a3);
1989         }
1990         __ Branch(&convert_receiver);
1991       }
1992       __ bind(&convert_to_object);
1993       {
1994         // Convert receiver using ToObject.
1995         // TODO(bmeurer): Inline the allocation here to avoid building the frame
1996         // in the fast case? (fall back to AllocateInNewSpace?)
1997         FrameScope scope(masm, StackFrame::INTERNAL);
1998         __ SmiTag(a0);
1999         __ Push(a0, a1);
2000         __ mov(a0, a3);
2001         ToObjectStub stub(masm->isolate());
2002         __ CallStub(&stub);
2003         __ mov(a3, v0);
2004         __ Pop(a0, a1);
2005         __ SmiUntag(a0);
2006       }
2007       __ ld(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
2008       __ bind(&convert_receiver);
2009     }
2010     __ dsll(at, a0, kPointerSizeLog2);
2011     __ daddu(at, sp, at);
2012     __ sd(a3, MemOperand(at));
2013   }
2014   __ bind(&done_convert);
2015 
2016   // ----------- S t a t e -------------
2017   //  -- a0 : the number of arguments (not including the receiver)
2018   //  -- a1 : the function to call (checked to be a JSFunction)
2019   //  -- a2 : the shared function info.
2020   //  -- cp : the function context.
2021   // -----------------------------------
2022 
2023   __ lw(a2,
2024         FieldMemOperand(a2, SharedFunctionInfo::kFormalParameterCountOffset));
2025   ParameterCount actual(a0);
2026   ParameterCount expected(a2);
2027   __ InvokeFunctionCode(a1, no_reg, expected, actual, JUMP_FUNCTION,
2028                         CheckDebugStepCallWrapper());
2029 
2030   // The function is a "classConstructor", need to raise an exception.
2031   __ bind(&class_constructor);
2032   {
2033     FrameScope frame(masm, StackFrame::INTERNAL);
2034     __ Push(a1);
2035     __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
2036   }
2037 }
2038 
2039 
2040 // static
Generate_CallBoundFunction(MacroAssembler * masm)2041 void Builtins::Generate_CallBoundFunction(MacroAssembler* masm) {
2042   // ----------- S t a t e -------------
2043   //  -- a0 : the number of arguments (not including the receiver)
2044   //  -- a1 : the function to call (checked to be a JSBoundFunction)
2045   // -----------------------------------
2046   __ AssertBoundFunction(a1);
2047 
2048   // Patch the receiver to [[BoundThis]].
2049   {
2050     __ ld(at, FieldMemOperand(a1, JSBoundFunction::kBoundThisOffset));
2051     __ dsll(a4, a0, kPointerSizeLog2);
2052     __ daddu(a4, a4, sp);
2053     __ sd(at, MemOperand(a4));
2054   }
2055 
2056   // Load [[BoundArguments]] into a2 and length of that into a4.
2057   __ ld(a2, FieldMemOperand(a1, JSBoundFunction::kBoundArgumentsOffset));
2058   __ ld(a4, FieldMemOperand(a2, FixedArray::kLengthOffset));
2059   __ SmiUntag(a4);
2060 
2061   // ----------- S t a t e -------------
2062   //  -- a0 : the number of arguments (not including the receiver)
2063   //  -- a1 : the function to call (checked to be a JSBoundFunction)
2064   //  -- a2 : the [[BoundArguments]] (implemented as FixedArray)
2065   //  -- a4 : the number of [[BoundArguments]]
2066   // -----------------------------------
2067 
2068   // Reserve stack space for the [[BoundArguments]].
2069   {
2070     Label done;
2071     __ dsll(a5, a4, kPointerSizeLog2);
2072     __ Dsubu(sp, sp, Operand(a5));
2073     // Check the stack for overflow. We are not trying to catch interruptions
2074     // (i.e. debug break and preemption) here, so check the "real stack limit".
2075     __ LoadRoot(at, Heap::kRealStackLimitRootIndex);
2076     __ Branch(&done, gt, sp, Operand(at));  // Signed comparison.
2077     // Restore the stack pointer.
2078     __ Daddu(sp, sp, Operand(a5));
2079     {
2080       FrameScope scope(masm, StackFrame::MANUAL);
2081       __ EnterFrame(StackFrame::INTERNAL);
2082       __ CallRuntime(Runtime::kThrowStackOverflow);
2083     }
2084     __ bind(&done);
2085   }
2086 
2087   // Relocate arguments down the stack.
2088   {
2089     Label loop, done_loop;
2090     __ mov(a5, zero_reg);
2091     __ bind(&loop);
2092     __ Branch(&done_loop, gt, a5, Operand(a0));
2093     __ dsll(a6, a4, kPointerSizeLog2);
2094     __ daddu(a6, a6, sp);
2095     __ ld(at, MemOperand(a6));
2096     __ dsll(a6, a5, kPointerSizeLog2);
2097     __ daddu(a6, a6, sp);
2098     __ sd(at, MemOperand(a6));
2099     __ Daddu(a4, a4, Operand(1));
2100     __ Daddu(a5, a5, Operand(1));
2101     __ Branch(&loop);
2102     __ bind(&done_loop);
2103   }
2104 
2105   // Copy [[BoundArguments]] to the stack (below the arguments).
2106   {
2107     Label loop, done_loop;
2108     __ ld(a4, FieldMemOperand(a2, FixedArray::kLengthOffset));
2109     __ SmiUntag(a4);
2110     __ Daddu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
2111     __ bind(&loop);
2112     __ Dsubu(a4, a4, Operand(1));
2113     __ Branch(&done_loop, lt, a4, Operand(zero_reg));
2114     __ dsll(a5, a4, kPointerSizeLog2);
2115     __ daddu(a5, a5, a2);
2116     __ ld(at, MemOperand(a5));
2117     __ dsll(a5, a0, kPointerSizeLog2);
2118     __ daddu(a5, a5, sp);
2119     __ sd(at, MemOperand(a5));
2120     __ Daddu(a0, a0, Operand(1));
2121     __ Branch(&loop);
2122     __ bind(&done_loop);
2123   }
2124 
2125   // Call the [[BoundTargetFunction]] via the Call builtin.
2126   __ ld(a1, FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset));
2127   __ li(at, Operand(ExternalReference(Builtins::kCall_ReceiverIsAny,
2128                                       masm->isolate())));
2129   __ ld(at, MemOperand(at));
2130   __ Daddu(at, at, Operand(Code::kHeaderSize - kHeapObjectTag));
2131   __ Jump(at);
2132 }
2133 
2134 
2135 // static
Generate_Call(MacroAssembler * masm,ConvertReceiverMode mode)2136 void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode) {
2137   // ----------- S t a t e -------------
2138   //  -- a0 : the number of arguments (not including the receiver)
2139   //  -- a1 : the target to call (can be any Object).
2140   // -----------------------------------
2141 
2142   Label non_callable, non_function, non_smi;
2143   __ JumpIfSmi(a1, &non_callable);
2144   __ bind(&non_smi);
2145   __ GetObjectType(a1, t1, t2);
2146   __ Jump(masm->isolate()->builtins()->CallFunction(mode),
2147           RelocInfo::CODE_TARGET, eq, t2, Operand(JS_FUNCTION_TYPE));
2148   __ Jump(masm->isolate()->builtins()->CallBoundFunction(),
2149           RelocInfo::CODE_TARGET, eq, t2, Operand(JS_BOUND_FUNCTION_TYPE));
2150   __ Branch(&non_function, ne, t2, Operand(JS_PROXY_TYPE));
2151 
2152   // 1. Runtime fallback for Proxy [[Call]].
2153   __ Push(a1);
2154   // Increase the arguments size to include the pushed function and the
2155   // existing receiver on the stack.
2156   __ Daddu(a0, a0, 2);
2157   // Tail-call to the runtime.
2158   __ JumpToExternalReference(
2159       ExternalReference(Runtime::kJSProxyCall, masm->isolate()));
2160 
2161   // 2. Call to something else, which might have a [[Call]] internal method (if
2162   // not we raise an exception).
2163   __ bind(&non_function);
2164   // Check if target has a [[Call]] internal method.
2165   __ lbu(t1, FieldMemOperand(t1, Map::kBitFieldOffset));
2166   __ And(t1, t1, Operand(1 << Map::kIsCallable));
2167   __ Branch(&non_callable, eq, t1, Operand(zero_reg));
2168   // Overwrite the original receiver with the (original) target.
2169   __ dsll(at, a0, kPointerSizeLog2);
2170   __ daddu(at, sp, at);
2171   __ sd(a1, MemOperand(at));
2172   // Let the "call_as_function_delegate" take care of the rest.
2173   __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, a1);
2174   __ Jump(masm->isolate()->builtins()->CallFunction(
2175               ConvertReceiverMode::kNotNullOrUndefined),
2176           RelocInfo::CODE_TARGET);
2177 
2178   // 3. Call to something that is not callable.
2179   __ bind(&non_callable);
2180   {
2181     FrameScope scope(masm, StackFrame::INTERNAL);
2182     __ Push(a1);
2183     __ CallRuntime(Runtime::kThrowCalledNonCallable);
2184   }
2185 }
2186 
2187 
Generate_ConstructFunction(MacroAssembler * masm)2188 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2189   // ----------- S t a t e -------------
2190   //  -- a0 : the number of arguments (not including the receiver)
2191   //  -- a1 : the constructor to call (checked to be a JSFunction)
2192   //  -- a3 : the new target (checked to be a constructor)
2193   // -----------------------------------
2194   __ AssertFunction(a1);
2195 
2196   // Calling convention for function specific ConstructStubs require
2197   // a2 to contain either an AllocationSite or undefined.
2198   __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
2199 
2200   // Tail call to the function-specific construct stub (still in the caller
2201   // context at this point).
2202   __ ld(a4, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
2203   __ ld(a4, FieldMemOperand(a4, SharedFunctionInfo::kConstructStubOffset));
2204   __ Daddu(at, a4, Operand(Code::kHeaderSize - kHeapObjectTag));
2205   __ Jump(at);
2206 }
2207 
2208 
2209 // static
Generate_ConstructBoundFunction(MacroAssembler * masm)2210 void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2211   // ----------- S t a t e -------------
2212   //  -- a0 : the number of arguments (not including the receiver)
2213   //  -- a1 : the function to call (checked to be a JSBoundFunction)
2214   //  -- a3 : the new target (checked to be a constructor)
2215   // -----------------------------------
2216   __ AssertBoundFunction(a1);
2217 
2218   // Load [[BoundArguments]] into a2 and length of that into a4.
2219   __ ld(a2, FieldMemOperand(a1, JSBoundFunction::kBoundArgumentsOffset));
2220   __ ld(a4, FieldMemOperand(a2, FixedArray::kLengthOffset));
2221   __ SmiUntag(a4);
2222 
2223   // ----------- S t a t e -------------
2224   //  -- a0 : the number of arguments (not including the receiver)
2225   //  -- a1 : the function to call (checked to be a JSBoundFunction)
2226   //  -- a2 : the [[BoundArguments]] (implemented as FixedArray)
2227   //  -- a3 : the new target (checked to be a constructor)
2228   //  -- a4 : the number of [[BoundArguments]]
2229   // -----------------------------------
2230 
2231   // Reserve stack space for the [[BoundArguments]].
2232   {
2233     Label done;
2234     __ dsll(a5, a4, kPointerSizeLog2);
2235     __ Dsubu(sp, sp, Operand(a5));
2236     // Check the stack for overflow. We are not trying to catch interruptions
2237     // (i.e. debug break and preemption) here, so check the "real stack limit".
2238     __ LoadRoot(at, Heap::kRealStackLimitRootIndex);
2239     __ Branch(&done, gt, sp, Operand(at));  // Signed comparison.
2240     // Restore the stack pointer.
2241     __ Daddu(sp, sp, Operand(a5));
2242     {
2243       FrameScope scope(masm, StackFrame::MANUAL);
2244       __ EnterFrame(StackFrame::INTERNAL);
2245       __ CallRuntime(Runtime::kThrowStackOverflow);
2246     }
2247     __ bind(&done);
2248   }
2249 
2250   // Relocate arguments down the stack.
2251   {
2252     Label loop, done_loop;
2253     __ mov(a5, zero_reg);
2254     __ bind(&loop);
2255     __ Branch(&done_loop, ge, a5, Operand(a0));
2256     __ dsll(a6, a4, kPointerSizeLog2);
2257     __ daddu(a6, a6, sp);
2258     __ ld(at, MemOperand(a6));
2259     __ dsll(a6, a5, kPointerSizeLog2);
2260     __ daddu(a6, a6, sp);
2261     __ sd(at, MemOperand(a6));
2262     __ Daddu(a4, a4, Operand(1));
2263     __ Daddu(a5, a5, Operand(1));
2264     __ Branch(&loop);
2265     __ bind(&done_loop);
2266   }
2267 
2268   // Copy [[BoundArguments]] to the stack (below the arguments).
2269   {
2270     Label loop, done_loop;
2271     __ ld(a4, FieldMemOperand(a2, FixedArray::kLengthOffset));
2272     __ SmiUntag(a4);
2273     __ Daddu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
2274     __ bind(&loop);
2275     __ Dsubu(a4, a4, Operand(1));
2276     __ Branch(&done_loop, lt, a4, Operand(zero_reg));
2277     __ dsll(a5, a4, kPointerSizeLog2);
2278     __ daddu(a5, a5, a2);
2279     __ ld(at, MemOperand(a5));
2280     __ dsll(a5, a0, kPointerSizeLog2);
2281     __ daddu(a5, a5, sp);
2282     __ sd(at, MemOperand(a5));
2283     __ Daddu(a0, a0, Operand(1));
2284     __ Branch(&loop);
2285     __ bind(&done_loop);
2286   }
2287 
2288   // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
2289   {
2290     Label skip_load;
2291     __ Branch(&skip_load, ne, a1, Operand(a3));
2292     __ ld(a3, FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset));
2293     __ bind(&skip_load);
2294   }
2295 
2296   // Construct the [[BoundTargetFunction]] via the Construct builtin.
2297   __ ld(a1, FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset));
2298   __ li(at, Operand(ExternalReference(Builtins::kConstruct, masm->isolate())));
2299   __ ld(at, MemOperand(at));
2300   __ Daddu(at, at, Operand(Code::kHeaderSize - kHeapObjectTag));
2301   __ Jump(at);
2302 }
2303 
2304 
2305 // static
Generate_ConstructProxy(MacroAssembler * masm)2306 void Builtins::Generate_ConstructProxy(MacroAssembler* masm) {
2307   // ----------- S t a t e -------------
2308   //  -- a0 : the number of arguments (not including the receiver)
2309   //  -- a1 : the constructor to call (checked to be a JSProxy)
2310   //  -- a3 : the new target (either the same as the constructor or
2311   //          the JSFunction on which new was invoked initially)
2312   // -----------------------------------
2313 
2314   // Call into the Runtime for Proxy [[Construct]].
2315   __ Push(a1, a3);
2316   // Include the pushed new_target, constructor and the receiver.
2317   __ Daddu(a0, a0, Operand(3));
2318   // Tail-call to the runtime.
2319   __ JumpToExternalReference(
2320       ExternalReference(Runtime::kJSProxyConstruct, masm->isolate()));
2321 }
2322 
2323 
2324 // static
Generate_Construct(MacroAssembler * masm)2325 void Builtins::Generate_Construct(MacroAssembler* masm) {
2326   // ----------- S t a t e -------------
2327   //  -- a0 : the number of arguments (not including the receiver)
2328   //  -- a1 : the constructor to call (can be any Object)
2329   //  -- a3 : the new target (either the same as the constructor or
2330   //          the JSFunction on which new was invoked initially)
2331   // -----------------------------------
2332 
2333   // Check if target is a Smi.
2334   Label non_constructor;
2335   __ JumpIfSmi(a1, &non_constructor);
2336 
2337   // Dispatch based on instance type.
2338   __ ld(t1, FieldMemOperand(a1, HeapObject::kMapOffset));
2339   __ lbu(t2, FieldMemOperand(t1, Map::kInstanceTypeOffset));
2340   __ Jump(masm->isolate()->builtins()->ConstructFunction(),
2341           RelocInfo::CODE_TARGET, eq, t2, Operand(JS_FUNCTION_TYPE));
2342 
2343   // Check if target has a [[Construct]] internal method.
2344   __ lbu(t3, FieldMemOperand(t1, Map::kBitFieldOffset));
2345   __ And(t3, t3, Operand(1 << Map::kIsConstructor));
2346   __ Branch(&non_constructor, eq, t3, Operand(zero_reg));
2347 
2348   // Only dispatch to bound functions after checking whether they are
2349   // constructors.
2350   __ Jump(masm->isolate()->builtins()->ConstructBoundFunction(),
2351           RelocInfo::CODE_TARGET, eq, t2, Operand(JS_BOUND_FUNCTION_TYPE));
2352 
2353   // Only dispatch to proxies after checking whether they are constructors.
2354   __ Jump(masm->isolate()->builtins()->ConstructProxy(), RelocInfo::CODE_TARGET,
2355           eq, t2, Operand(JS_PROXY_TYPE));
2356 
2357   // Called Construct on an exotic Object with a [[Construct]] internal method.
2358   {
2359     // Overwrite the original receiver with the (original) target.
2360     __ dsll(at, a0, kPointerSizeLog2);
2361     __ daddu(at, sp, at);
2362     __ sd(a1, MemOperand(at));
2363     // Let the "call_as_constructor_delegate" take care of the rest.
2364     __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, a1);
2365     __ Jump(masm->isolate()->builtins()->CallFunction(),
2366             RelocInfo::CODE_TARGET);
2367   }
2368 
2369   // Called Construct on an Object that doesn't have a [[Construct]] internal
2370   // method.
2371   __ bind(&non_constructor);
2372   __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(),
2373           RelocInfo::CODE_TARGET);
2374 }
2375 
2376 
Generate_ArgumentsAdaptorTrampoline(MacroAssembler * masm)2377 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
2378   // State setup as expected by MacroAssembler::InvokePrologue.
2379   // ----------- S t a t e -------------
2380   //  -- a0: actual arguments count
2381   //  -- a1: function (passed through to callee)
2382   //  -- a2: expected arguments count
2383   //  -- a3: new target (passed through to callee)
2384   // -----------------------------------
2385 
2386   Label invoke, dont_adapt_arguments, stack_overflow;
2387 
2388   Label enough, too_few;
2389   __ Branch(&dont_adapt_arguments, eq,
2390       a2, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
2391   // We use Uless as the number of argument should always be greater than 0.
2392   __ Branch(&too_few, Uless, a0, Operand(a2));
2393 
2394   {  // Enough parameters: actual >= expected.
2395     // a0: actual number of arguments as a smi
2396     // a1: function
2397     // a2: expected number of arguments
2398     // a3: new target (passed through to callee)
2399     __ bind(&enough);
2400     EnterArgumentsAdaptorFrame(masm);
2401     ArgumentAdaptorStackCheck(masm, &stack_overflow);
2402 
2403     // Calculate copy start address into a0 and copy end address into a4.
2404     __ SmiScale(a0, a0, kPointerSizeLog2);
2405     __ Daddu(a0, fp, a0);
2406     // Adjust for return address and receiver.
2407     __ Daddu(a0, a0, Operand(2 * kPointerSize));
2408     // Compute copy end address.
2409     __ dsll(a4, a2, kPointerSizeLog2);
2410     __ dsubu(a4, a0, a4);
2411 
2412     // Copy the arguments (including the receiver) to the new stack frame.
2413     // a0: copy start address
2414     // a1: function
2415     // a2: expected number of arguments
2416     // a3: new target (passed through to callee)
2417     // a4: copy end address
2418 
2419     Label copy;
2420     __ bind(&copy);
2421     __ ld(a5, MemOperand(a0));
2422     __ push(a5);
2423     __ Branch(USE_DELAY_SLOT, &copy, ne, a0, Operand(a4));
2424     __ daddiu(a0, a0, -kPointerSize);  // In delay slot.
2425 
2426     __ jmp(&invoke);
2427   }
2428 
2429   {  // Too few parameters: Actual < expected.
2430     __ bind(&too_few);
2431 
2432     // If the function is strong we need to throw an error.
2433     Label no_strong_error;
2434     __ ld(a4, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
2435     __ lbu(a5, FieldMemOperand(a4, SharedFunctionInfo::kStrongModeByteOffset));
2436     __ And(a5, a5, Operand(1 << SharedFunctionInfo::kStrongModeBitWithinByte));
2437     __ Branch(&no_strong_error, eq, a5, Operand(zero_reg));
2438 
2439     // What we really care about is the required number of arguments.
2440     DCHECK_EQ(kPointerSize, kInt64Size);
2441     __ lw(a5, FieldMemOperand(a4, SharedFunctionInfo::kLengthOffset));
2442     __ srl(a5, a5, 1);
2443     __ Branch(&no_strong_error, ge, a0, Operand(a5));
2444 
2445     {
2446       FrameScope frame(masm, StackFrame::MANUAL);
2447       EnterArgumentsAdaptorFrame(masm);
2448       __ CallRuntime(Runtime::kThrowStrongModeTooFewArguments);
2449     }
2450 
2451     __ bind(&no_strong_error);
2452     EnterArgumentsAdaptorFrame(masm);
2453     ArgumentAdaptorStackCheck(masm, &stack_overflow);
2454 
2455     // Calculate copy start address into a0 and copy end address into a7.
2456     // a0: actual number of arguments as a smi
2457     // a1: function
2458     // a2: expected number of arguments
2459     // a3: new target (passed through to callee)
2460     __ SmiScale(a0, a0, kPointerSizeLog2);
2461     __ Daddu(a0, fp, a0);
2462     // Adjust for return address and receiver.
2463     __ Daddu(a0, a0, Operand(2 * kPointerSize));
2464     // Compute copy end address. Also adjust for return address.
2465     __ Daddu(a7, fp, kPointerSize);
2466 
2467     // Copy the arguments (including the receiver) to the new stack frame.
2468     // a0: copy start address
2469     // a1: function
2470     // a2: expected number of arguments
2471     // a3: new target (passed through to callee)
2472     // a7: copy end address
2473     Label copy;
2474     __ bind(&copy);
2475     __ ld(a4, MemOperand(a0));  // Adjusted above for return addr and receiver.
2476     __ Dsubu(sp, sp, kPointerSize);
2477     __ Dsubu(a0, a0, kPointerSize);
2478     __ Branch(USE_DELAY_SLOT, &copy, ne, a0, Operand(a7));
2479     __ sd(a4, MemOperand(sp));  // In the delay slot.
2480 
2481     // Fill the remaining expected arguments with undefined.
2482     // a1: function
2483     // a2: expected number of arguments
2484     // a3: new target (passed through to callee)
2485     __ LoadRoot(a5, Heap::kUndefinedValueRootIndex);
2486     __ dsll(a6, a2, kPointerSizeLog2);
2487     __ Dsubu(a4, fp, Operand(a6));
2488     // Adjust for frame.
2489     __ Dsubu(a4, a4, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
2490                              2 * kPointerSize));
2491 
2492     Label fill;
2493     __ bind(&fill);
2494     __ Dsubu(sp, sp, kPointerSize);
2495     __ Branch(USE_DELAY_SLOT, &fill, ne, sp, Operand(a4));
2496     __ sd(a5, MemOperand(sp));
2497   }
2498 
2499   // Call the entry point.
2500   __ bind(&invoke);
2501   __ mov(a0, a2);
2502   // a0 : expected number of arguments
2503   // a1 : function (passed through to callee)
2504   // a3: new target (passed through to callee)
2505   __ ld(a4, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
2506   __ Call(a4);
2507 
2508   // Store offset of return address for deoptimizer.
2509   masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
2510 
2511   // Exit frame and return.
2512   LeaveArgumentsAdaptorFrame(masm);
2513   __ Ret();
2514 
2515 
2516   // -------------------------------------------
2517   // Don't adapt arguments.
2518   // -------------------------------------------
2519   __ bind(&dont_adapt_arguments);
2520   __ ld(a4, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
2521   __ Jump(a4);
2522 
2523   __ bind(&stack_overflow);
2524   {
2525     FrameScope frame(masm, StackFrame::MANUAL);
2526     __ CallRuntime(Runtime::kThrowStackOverflow);
2527     __ break_(0xCC);
2528   }
2529 }
2530 
2531 
2532 #undef __
2533 
2534 }  // namespace internal
2535 }  // namespace v8
2536 
2537 #endif  // V8_TARGET_ARCH_MIPS64
2538