1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #if V8_TARGET_ARCH_MIPS
6
7 #include "src/codegen.h"
8 #include "src/debug/debug.h"
9 #include "src/deoptimizer.h"
10 #include "src/full-codegen/full-codegen.h"
11 #include "src/runtime/runtime.h"
12
13 namespace v8 {
14 namespace internal {
15
16 #define __ ACCESS_MASM(masm)
17
Generate_Adaptor(MacroAssembler * masm,Address address,ExitFrameType exit_frame_type)18 void Builtins::Generate_Adaptor(MacroAssembler* masm, Address address,
19 ExitFrameType exit_frame_type) {
20 // ----------- S t a t e -------------
21 // -- a0 : number of arguments excluding receiver
22 // -- a1 : target
23 // -- a3 : new.target
24 // -- sp[0] : last argument
25 // -- ...
26 // -- sp[4 * (argc - 1)] : first argument
27 // -- sp[4 * agrc] : receiver
28 // -----------------------------------
29 __ AssertFunction(a1);
30
31 // Make sure we operate in the context of the called function (for example
32 // ConstructStubs implemented in C++ will be run in the context of the caller
33 // instead of the callee, due to the way that [[Construct]] is defined for
34 // ordinary functions).
35 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
36
37 // JumpToExternalReference expects a0 to contain the number of arguments
38 // including the receiver and the extra arguments.
39 const int num_extra_args = 3;
40 __ Addu(a0, a0, num_extra_args + 1);
41
42 // Insert extra arguments.
43 __ SmiTag(a0);
44 __ Push(a0, a1, a3);
45 __ SmiUntag(a0);
46
47 __ JumpToExternalReference(ExternalReference(address, masm->isolate()),
48 PROTECT, exit_frame_type == BUILTIN_EXIT);
49 }
50
51 // Load the built-in InternalArray function from the current context.
GenerateLoadInternalArrayFunction(MacroAssembler * masm,Register result)52 static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
53 Register result) {
54 // Load the InternalArray function from the native context.
55 __ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, result);
56 }
57
58 // Load the built-in Array function from the current context.
GenerateLoadArrayFunction(MacroAssembler * masm,Register result)59 static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
60 // Load the Array function from the native context.
61 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, result);
62 }
63
Generate_InternalArrayCode(MacroAssembler * masm)64 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
65 // ----------- S t a t e -------------
66 // -- a0 : number of arguments
67 // -- ra : return address
68 // -- sp[...]: constructor arguments
69 // -----------------------------------
70 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
71
72 // Get the InternalArray function.
73 GenerateLoadInternalArrayFunction(masm, a1);
74
75 if (FLAG_debug_code) {
76 // Initial map for the builtin InternalArray functions should be maps.
77 __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
78 __ SmiTst(a2, t0);
79 __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction, t0,
80 Operand(zero_reg));
81 __ GetObjectType(a2, a3, t0);
82 __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction, t0,
83 Operand(MAP_TYPE));
84 }
85
86 // Run the native code for the InternalArray function called as a normal
87 // function.
88 // Tail call a stub.
89 InternalArrayConstructorStub stub(masm->isolate());
90 __ TailCallStub(&stub);
91 }
92
Generate_ArrayCode(MacroAssembler * masm)93 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
94 // ----------- S t a t e -------------
95 // -- a0 : number of arguments
96 // -- ra : return address
97 // -- sp[...]: constructor arguments
98 // -----------------------------------
99 Label generic_array_code;
100
101 // Get the Array function.
102 GenerateLoadArrayFunction(masm, a1);
103
104 if (FLAG_debug_code) {
105 // Initial map for the builtin Array functions should be maps.
106 __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
107 __ SmiTst(a2, t0);
108 __ Assert(ne, kUnexpectedInitialMapForArrayFunction1, t0,
109 Operand(zero_reg));
110 __ GetObjectType(a2, a3, t0);
111 __ Assert(eq, kUnexpectedInitialMapForArrayFunction2, t0,
112 Operand(MAP_TYPE));
113 }
114
115 // Run the native code for the Array function called as a normal function.
116 // Tail call a stub.
117 __ mov(a3, a1);
118 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
119 ArrayConstructorStub stub(masm->isolate());
120 __ TailCallStub(&stub);
121 }
122
123 // static
Generate_MathMaxMin(MacroAssembler * masm,MathMaxMinKind kind)124 void Builtins::Generate_MathMaxMin(MacroAssembler* masm, MathMaxMinKind kind) {
125 // ----------- S t a t e -------------
126 // -- a0 : number of arguments
127 // -- a1 : function
128 // -- cp : context
129 // -- ra : return address
130 // -- sp[(argc - n - 1) * 4] : arg[n] (zero-based)
131 // -- sp[argc * 4] : receiver
132 // -----------------------------------
133 Heap::RootListIndex const root_index =
134 (kind == MathMaxMinKind::kMin) ? Heap::kInfinityValueRootIndex
135 : Heap::kMinusInfinityValueRootIndex;
136
137 // Load the accumulator with the default return value (either -Infinity or
138 // +Infinity), with the tagged value in t2 and the double value in f0.
139 __ LoadRoot(t2, root_index);
140 __ ldc1(f0, FieldMemOperand(t2, HeapNumber::kValueOffset));
141
142 Label done_loop, loop, done;
143 __ mov(a3, a0);
144 __ bind(&loop);
145 {
146 // Check if all parameters done.
147 __ Subu(a3, a3, Operand(1));
148 __ Branch(&done_loop, lt, a3, Operand(zero_reg));
149
150 // Load the next parameter tagged value into a2.
151 __ Lsa(at, sp, a3, kPointerSizeLog2);
152 __ lw(a2, MemOperand(at));
153
154 // Load the double value of the parameter into f2, maybe converting the
155 // parameter to a number first using the ToNumber builtin if necessary.
156 Label convert, convert_smi, convert_number, done_convert;
157 __ bind(&convert);
158 __ JumpIfSmi(a2, &convert_smi);
159 __ lw(t0, FieldMemOperand(a2, HeapObject::kMapOffset));
160 __ JumpIfRoot(t0, Heap::kHeapNumberMapRootIndex, &convert_number);
161 {
162 // Parameter is not a Number, use the ToNumber builtin to convert it.
163 FrameScope scope(masm, StackFrame::MANUAL);
164 __ SmiTag(a0);
165 __ SmiTag(a3);
166 __ EnterBuiltinFrame(cp, a1, a0);
167 __ Push(t2, a3);
168 __ mov(a0, a2);
169 __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
170 __ mov(a2, v0);
171 __ Pop(t2, a3);
172 __ LeaveBuiltinFrame(cp, a1, a0);
173 __ SmiUntag(a3);
174 __ SmiUntag(a0);
175 {
176 // Restore the double accumulator value (f0).
177 Label restore_smi, done_restore;
178 __ JumpIfSmi(t2, &restore_smi);
179 __ ldc1(f0, FieldMemOperand(t2, HeapNumber::kValueOffset));
180 __ jmp(&done_restore);
181 __ bind(&restore_smi);
182 __ SmiToDoubleFPURegister(t2, f0, t0);
183 __ bind(&done_restore);
184 }
185 }
186 __ jmp(&convert);
187 __ bind(&convert_number);
188 __ ldc1(f2, FieldMemOperand(a2, HeapNumber::kValueOffset));
189 __ jmp(&done_convert);
190 __ bind(&convert_smi);
191 __ SmiToDoubleFPURegister(a2, f2, t0);
192 __ bind(&done_convert);
193
194 // Perform the actual comparison with using Min/Max macro instructions the
195 // accumulator value on the left hand side (f0) and the next parameter value
196 // on the right hand side (f2).
197 // We need to work out which HeapNumber (or smi) the result came from.
198 Label compare_nan, set_value, ool_min, ool_max;
199 __ BranchF(nullptr, &compare_nan, eq, f0, f2);
200 __ Move(t0, t1, f0);
201 if (kind == MathMaxMinKind::kMin) {
202 __ Float64Min(f0, f0, f2, &ool_min);
203 } else {
204 DCHECK(kind == MathMaxMinKind::kMax);
205 __ Float64Max(f0, f0, f2, &ool_max);
206 }
207 __ jmp(&done);
208
209 __ bind(&ool_min);
210 __ Float64MinOutOfLine(f0, f0, f2);
211 __ jmp(&done);
212
213 __ bind(&ool_max);
214 __ Float64MaxOutOfLine(f0, f0, f2);
215
216 __ bind(&done);
217 __ Move(at, t8, f0);
218 __ Branch(&set_value, ne, t0, Operand(at));
219 __ Branch(&set_value, ne, t1, Operand(t8));
220 __ jmp(&loop);
221 __ bind(&set_value);
222 __ mov(t2, a2);
223 __ jmp(&loop);
224
225 // At least one side is NaN, which means that the result will be NaN too.
226 __ bind(&compare_nan);
227 __ LoadRoot(t2, Heap::kNanValueRootIndex);
228 __ ldc1(f0, FieldMemOperand(t2, HeapNumber::kValueOffset));
229 __ jmp(&loop);
230 }
231
232 __ bind(&done_loop);
233 // Drop all slots, including the receiver.
234 __ Addu(a0, a0, Operand(1));
235 __ Lsa(sp, sp, a0, kPointerSizeLog2);
236 __ Ret(USE_DELAY_SLOT);
237 __ mov(v0, t2); // In delay slot.
238 }
239
240 // static
Generate_NumberConstructor(MacroAssembler * masm)241 void Builtins::Generate_NumberConstructor(MacroAssembler* masm) {
242 // ----------- S t a t e -------------
243 // -- a0 : number of arguments
244 // -- a1 : constructor function
245 // -- cp : context
246 // -- ra : return address
247 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
248 // -- sp[argc * 4] : receiver
249 // -----------------------------------
250
251 // 1. Load the first argument into a0.
252 Label no_arguments;
253 {
254 __ Branch(USE_DELAY_SLOT, &no_arguments, eq, a0, Operand(zero_reg));
255 __ Subu(t1, a0, Operand(1)); // In delay slot.
256 __ mov(t0, a0); // Store argc in t0.
257 __ Lsa(at, sp, t1, kPointerSizeLog2);
258 __ lw(a0, MemOperand(at));
259 }
260
261 // 2a. Convert first argument to number.
262 {
263 FrameScope scope(masm, StackFrame::MANUAL);
264 __ SmiTag(t0);
265 __ EnterBuiltinFrame(cp, a1, t0);
266 __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
267 __ LeaveBuiltinFrame(cp, a1, t0);
268 __ SmiUntag(t0);
269 }
270
271 {
272 // Drop all arguments including the receiver.
273 __ Lsa(sp, sp, t0, kPointerSizeLog2);
274 __ DropAndRet(1);
275 }
276
277 // 2b. No arguments, return +0.
278 __ bind(&no_arguments);
279 __ Move(v0, Smi::kZero);
280 __ DropAndRet(1);
281 }
282
283 // static
Generate_NumberConstructor_ConstructStub(MacroAssembler * masm)284 void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) {
285 // ----------- S t a t e -------------
286 // -- a0 : number of arguments
287 // -- a1 : constructor function
288 // -- a3 : new target
289 // -- cp : context
290 // -- ra : return address
291 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
292 // -- sp[argc * 4] : receiver
293 // -----------------------------------
294
295 // 1. Make sure we operate in the context of the called function.
296 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
297
298 // 2. Load the first argument into a0.
299 {
300 Label no_arguments, done;
301 __ mov(t0, a0); // Store argc in t0.
302 __ Branch(USE_DELAY_SLOT, &no_arguments, eq, a0, Operand(zero_reg));
303 __ Subu(t1, a0, Operand(1)); // In delay slot.
304 __ Lsa(at, sp, t1, kPointerSizeLog2);
305 __ lw(a0, MemOperand(at));
306 __ jmp(&done);
307 __ bind(&no_arguments);
308 __ Move(a0, Smi::kZero);
309 __ bind(&done);
310 }
311
312 // 3. Make sure a0 is a number.
313 {
314 Label done_convert;
315 __ JumpIfSmi(a0, &done_convert);
316 __ GetObjectType(a0, a2, a2);
317 __ Branch(&done_convert, eq, a2, Operand(HEAP_NUMBER_TYPE));
318 {
319 FrameScope scope(masm, StackFrame::MANUAL);
320 __ SmiTag(t0);
321 __ EnterBuiltinFrame(cp, a1, t0);
322 __ Push(a3);
323 __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
324 __ Move(a0, v0);
325 __ Pop(a3);
326 __ LeaveBuiltinFrame(cp, a1, t0);
327 __ SmiUntag(t0);
328 }
329 __ bind(&done_convert);
330 }
331
332 // 4. Check if new target and constructor differ.
333 Label drop_frame_and_ret, new_object;
334 __ Branch(&new_object, ne, a1, Operand(a3));
335
336 // 5. Allocate a JSValue wrapper for the number.
337 __ AllocateJSValue(v0, a1, a0, a2, t1, &new_object);
338 __ jmp(&drop_frame_and_ret);
339
340 // 6. Fallback to the runtime to create new object.
341 __ bind(&new_object);
342 {
343 FrameScope scope(masm, StackFrame::MANUAL);
344 __ SmiTag(t0);
345 __ EnterBuiltinFrame(cp, a1, t0);
346 __ Push(a0); // first argument
347 __ Call(CodeFactory::FastNewObject(masm->isolate()).code(),
348 RelocInfo::CODE_TARGET);
349 __ Pop(a0);
350 __ LeaveBuiltinFrame(cp, a1, t0);
351 __ SmiUntag(t0);
352 }
353 __ sw(a0, FieldMemOperand(v0, JSValue::kValueOffset));
354
355 __ bind(&drop_frame_and_ret);
356 {
357 __ Lsa(sp, sp, t0, kPointerSizeLog2);
358 __ DropAndRet(1);
359 }
360 }
361
362 // static
Generate_StringConstructor(MacroAssembler * masm)363 void Builtins::Generate_StringConstructor(MacroAssembler* masm) {
364 // ----------- S t a t e -------------
365 // -- a0 : number of arguments
366 // -- a1 : constructor function
367 // -- cp : context
368 // -- ra : return address
369 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
370 // -- sp[argc * 4] : receiver
371 // -----------------------------------
372
373 // 1. Load the first argument into a0.
374 Label no_arguments;
375 {
376 __ Branch(USE_DELAY_SLOT, &no_arguments, eq, a0, Operand(zero_reg));
377 __ Subu(t1, a0, Operand(1));
378 __ mov(t0, a0); // Store argc in t0.
379 __ Lsa(at, sp, t1, kPointerSizeLog2);
380 __ lw(a0, MemOperand(at));
381 }
382
383 // 2a. At least one argument, return a0 if it's a string, otherwise
384 // dispatch to appropriate conversion.
385 Label drop_frame_and_ret, to_string, symbol_descriptive_string;
386 {
387 __ JumpIfSmi(a0, &to_string);
388 __ GetObjectType(a0, t1, t1);
389 STATIC_ASSERT(FIRST_NONSTRING_TYPE == SYMBOL_TYPE);
390 __ Subu(t1, t1, Operand(FIRST_NONSTRING_TYPE));
391 __ Branch(&symbol_descriptive_string, eq, t1, Operand(zero_reg));
392 __ Branch(&to_string, gt, t1, Operand(zero_reg));
393 __ mov(v0, a0);
394 __ jmp(&drop_frame_and_ret);
395 }
396
397 // 2b. No arguments, return the empty string (and pop the receiver).
398 __ bind(&no_arguments);
399 {
400 __ LoadRoot(v0, Heap::kempty_stringRootIndex);
401 __ DropAndRet(1);
402 }
403
404 // 3a. Convert a0 to a string.
405 __ bind(&to_string);
406 {
407 FrameScope scope(masm, StackFrame::MANUAL);
408 __ SmiTag(t0);
409 __ EnterBuiltinFrame(cp, a1, t0);
410 __ Call(masm->isolate()->builtins()->ToString(), RelocInfo::CODE_TARGET);
411 __ LeaveBuiltinFrame(cp, a1, t0);
412 __ SmiUntag(t0);
413 }
414 __ jmp(&drop_frame_and_ret);
415
416 // 3b. Convert symbol in a0 to a string.
417 __ bind(&symbol_descriptive_string);
418 {
419 __ Lsa(sp, sp, t0, kPointerSizeLog2);
420 __ Drop(1);
421 __ Push(a0);
422 __ TailCallRuntime(Runtime::kSymbolDescriptiveString);
423 }
424
425 __ bind(&drop_frame_and_ret);
426 {
427 __ Lsa(sp, sp, t0, kPointerSizeLog2);
428 __ DropAndRet(1);
429 }
430 }
431
432 // static
Generate_StringConstructor_ConstructStub(MacroAssembler * masm)433 void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
434 // ----------- S t a t e -------------
435 // -- a0 : number of arguments
436 // -- a1 : constructor function
437 // -- a3 : new target
438 // -- cp : context
439 // -- ra : return address
440 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
441 // -- sp[argc * 4] : receiver
442 // -----------------------------------
443
444 // 1. Make sure we operate in the context of the called function.
445 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
446
447 // 2. Load the first argument into a0.
448 {
449 Label no_arguments, done;
450 __ mov(t0, a0); // Store argc in t0.
451 __ Branch(USE_DELAY_SLOT, &no_arguments, eq, a0, Operand(zero_reg));
452 __ Subu(t1, a0, Operand(1));
453 __ Lsa(at, sp, t1, kPointerSizeLog2);
454 __ lw(a0, MemOperand(at));
455 __ jmp(&done);
456 __ bind(&no_arguments);
457 __ LoadRoot(a0, Heap::kempty_stringRootIndex);
458 __ bind(&done);
459 }
460
461 // 3. Make sure a0 is a string.
462 {
463 Label convert, done_convert;
464 __ JumpIfSmi(a0, &convert);
465 __ GetObjectType(a0, a2, a2);
466 __ And(t1, a2, Operand(kIsNotStringMask));
467 __ Branch(&done_convert, eq, t1, Operand(zero_reg));
468 __ bind(&convert);
469 {
470 FrameScope scope(masm, StackFrame::MANUAL);
471 __ SmiTag(t0);
472 __ EnterBuiltinFrame(cp, a1, t0);
473 __ Push(a3);
474 __ Call(masm->isolate()->builtins()->ToString(), RelocInfo::CODE_TARGET);
475 __ Move(a0, v0);
476 __ Pop(a3);
477 __ LeaveBuiltinFrame(cp, a1, t0);
478 __ SmiUntag(t0);
479 }
480 __ bind(&done_convert);
481 }
482
483 // 4. Check if new target and constructor differ.
484 Label drop_frame_and_ret, new_object;
485 __ Branch(&new_object, ne, a1, Operand(a3));
486
487 // 5. Allocate a JSValue wrapper for the string.
488 __ AllocateJSValue(v0, a1, a0, a2, t1, &new_object);
489 __ jmp(&drop_frame_and_ret);
490
491 // 6. Fallback to the runtime to create new object.
492 __ bind(&new_object);
493 {
494 FrameScope scope(masm, StackFrame::MANUAL);
495 __ SmiTag(t0);
496 __ EnterBuiltinFrame(cp, a1, t0);
497 __ Push(a0); // first argument
498 __ Call(CodeFactory::FastNewObject(masm->isolate()).code(),
499 RelocInfo::CODE_TARGET);
500 __ Pop(a0);
501 __ LeaveBuiltinFrame(cp, a1, t0);
502 __ SmiUntag(t0);
503 }
504 __ sw(a0, FieldMemOperand(v0, JSValue::kValueOffset));
505
506 __ bind(&drop_frame_and_ret);
507 {
508 __ Lsa(sp, sp, t0, kPointerSizeLog2);
509 __ DropAndRet(1);
510 }
511 }
512
GenerateTailCallToSharedCode(MacroAssembler * masm)513 static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
514 __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
515 __ lw(a2, FieldMemOperand(a2, SharedFunctionInfo::kCodeOffset));
516 __ Addu(at, a2, Operand(Code::kHeaderSize - kHeapObjectTag));
517 __ Jump(at);
518 }
519
GenerateTailCallToReturnedCode(MacroAssembler * masm,Runtime::FunctionId function_id)520 static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
521 Runtime::FunctionId function_id) {
522 // ----------- S t a t e -------------
523 // -- a0 : argument count (preserved for callee)
524 // -- a1 : target function (preserved for callee)
525 // -- a3 : new target (preserved for callee)
526 // -----------------------------------
527 {
528 FrameScope scope(masm, StackFrame::INTERNAL);
529 // Push a copy of the target function and the new target.
530 // Push function as parameter to the runtime call.
531 __ SmiTag(a0);
532 __ Push(a0, a1, a3, a1);
533
534 __ CallRuntime(function_id, 1);
535
536 // Restore target function and new target.
537 __ Pop(a0, a1, a3);
538 __ SmiUntag(a0);
539 }
540
541 __ Addu(at, v0, Operand(Code::kHeaderSize - kHeapObjectTag));
542 __ Jump(at);
543 }
544
Generate_InOptimizationQueue(MacroAssembler * masm)545 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
546 // Checking whether the queued function is ready for install is optional,
547 // since we come across interrupts and stack checks elsewhere. However,
548 // not checking may delay installing ready functions, and always checking
549 // would be quite expensive. A good compromise is to first check against
550 // stack limit as a cue for an interrupt signal.
551 Label ok;
552 __ LoadRoot(t0, Heap::kStackLimitRootIndex);
553 __ Branch(&ok, hs, sp, Operand(t0));
554
555 GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
556
557 __ bind(&ok);
558 GenerateTailCallToSharedCode(masm);
559 }
560
561 namespace {
562
Generate_JSConstructStubHelper(MacroAssembler * masm,bool is_api_function,bool create_implicit_receiver,bool check_derived_construct)563 void Generate_JSConstructStubHelper(MacroAssembler* masm, bool is_api_function,
564 bool create_implicit_receiver,
565 bool check_derived_construct) {
566 Label post_instantiation_deopt_entry;
567
568 // ----------- S t a t e -------------
569 // -- a0 : number of arguments
570 // -- a1 : constructor function
571 // -- a3 : new target
572 // -- cp : context
573 // -- ra : return address
574 // -- sp[...]: constructor arguments
575 // -----------------------------------
576
577 Isolate* isolate = masm->isolate();
578
579 // Enter a construct frame.
580 {
581 FrameScope scope(masm, StackFrame::CONSTRUCT);
582
583 // Preserve the incoming parameters on the stack.
584 __ SmiTag(a0);
585 __ Push(cp, a0);
586
587 if (create_implicit_receiver) {
588 // Allocate the new receiver object.
589 __ Push(a1, a3);
590 __ Call(CodeFactory::FastNewObject(masm->isolate()).code(),
591 RelocInfo::CODE_TARGET);
592 __ mov(t4, v0);
593 __ Pop(a1, a3);
594
595 // ----------- S t a t e -------------
596 // -- a1: constructor function
597 // -- a3: new target
598 // -- t0: newly allocated object
599 // -----------------------------------
600
601 // Retrieve smi-tagged arguments count from the stack.
602 __ lw(a0, MemOperand(sp));
603 }
604
605 __ SmiUntag(a0);
606
607 if (create_implicit_receiver) {
608 // Push the allocated receiver to the stack. We need two copies
609 // because we may have to return the original one and the calling
610 // conventions dictate that the called function pops the receiver.
611 __ Push(t4, t4);
612 } else {
613 __ PushRoot(Heap::kTheHoleValueRootIndex);
614 }
615
616 // Deoptimizer re-enters stub code here.
617 __ bind(&post_instantiation_deopt_entry);
618
619 // Set up pointer to last argument.
620 __ Addu(a2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
621
622 // Copy arguments and receiver to the expression stack.
623 // a0: number of arguments
624 // a1: constructor function
625 // a2: address of last argument (caller sp)
626 // a3: new target
627 // t4: number of arguments (smi-tagged)
628 // sp[0]: receiver
629 // sp[1]: receiver
630 // sp[2]: number of arguments (smi-tagged)
631 Label loop, entry;
632 __ SmiTag(t4, a0);
633 __ jmp(&entry);
634 __ bind(&loop);
635 __ Lsa(t0, a2, t4, kPointerSizeLog2 - kSmiTagSize);
636 __ lw(t1, MemOperand(t0));
637 __ push(t1);
638 __ bind(&entry);
639 __ Addu(t4, t4, Operand(-2));
640 __ Branch(&loop, greater_equal, t4, Operand(zero_reg));
641
642 // Call the function.
643 // a0: number of arguments
644 // a1: constructor function
645 // a3: new target
646 ParameterCount actual(a0);
647 __ InvokeFunction(a1, a3, actual, CALL_FUNCTION,
648 CheckDebugStepCallWrapper());
649
650 // Store offset of return address for deoptimizer.
651 if (create_implicit_receiver && !is_api_function) {
652 masm->isolate()->heap()->SetConstructStubInvokeDeoptPCOffset(
653 masm->pc_offset());
654 }
655
656 // Restore context from the frame.
657 __ lw(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
658
659 if (create_implicit_receiver) {
660 // If the result is an object (in the ECMA sense), we should get rid
661 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
662 // on page 74.
663 Label use_receiver, exit;
664
665 // If the result is a smi, it is *not* an object in the ECMA sense.
666 // v0: result
667 // sp[0]: receiver (newly allocated object)
668 // sp[1]: number of arguments (smi-tagged)
669 __ JumpIfSmi(v0, &use_receiver);
670
671 // If the type of the result (stored in its map) is less than
672 // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
673 __ GetObjectType(v0, a1, a3);
674 __ Branch(&exit, greater_equal, a3, Operand(FIRST_JS_RECEIVER_TYPE));
675
676 // Throw away the result of the constructor invocation and use the
677 // on-stack receiver as the result.
678 __ bind(&use_receiver);
679 __ lw(v0, MemOperand(sp));
680
681 // Remove receiver from the stack, remove caller arguments, and
682 // return.
683 __ bind(&exit);
684 // v0: result
685 // sp[0]: receiver (newly allocated object)
686 // sp[1]: number of arguments (smi-tagged)
687 __ lw(a1, MemOperand(sp, 1 * kPointerSize));
688 } else {
689 __ lw(a1, MemOperand(sp));
690 }
691
692 // Leave construct frame.
693 }
694
695 // ES6 9.2.2. Step 13+
696 // Check that the result is not a Smi, indicating that the constructor result
697 // from a derived class is neither undefined nor an Object.
698 if (check_derived_construct) {
699 Label dont_throw;
700 __ JumpIfNotSmi(v0, &dont_throw);
701 {
702 FrameScope scope(masm, StackFrame::INTERNAL);
703 __ CallRuntime(Runtime::kThrowDerivedConstructorReturnedNonObject);
704 }
705 __ bind(&dont_throw);
706 }
707
708 __ Lsa(sp, sp, a1, kPointerSizeLog2 - 1);
709 __ Addu(sp, sp, kPointerSize);
710 if (create_implicit_receiver) {
711 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, a1, a2);
712 }
713 __ Ret();
714
715 // Store offset of trampoline address for deoptimizer. This is the bailout
716 // point after the receiver instantiation but before the function invocation.
717 // We need to restore some registers in order to continue the above code.
718 if (create_implicit_receiver && !is_api_function) {
719 masm->isolate()->heap()->SetConstructStubCreateDeoptPCOffset(
720 masm->pc_offset());
721
722 // ----------- S t a t e -------------
723 // -- a0 : newly allocated object
724 // -- sp[0] : constructor function
725 // -----------------------------------
726
727 __ Pop(a1);
728 __ Push(a0, a0);
729
730 // Retrieve smi-tagged arguments count from the stack.
731 __ lw(a0, MemOperand(fp, ConstructFrameConstants::kLengthOffset));
732 __ SmiUntag(a0);
733
734 // Retrieve the new target value from the stack. This was placed into the
735 // frame description in place of the receiver by the optimizing compiler.
736 __ Addu(a3, fp, Operand(StandardFrameConstants::kCallerSPOffset));
737 __ Lsa(a3, a3, a0, kPointerSizeLog2);
738 __ lw(a3, MemOperand(a3));
739
740 // Continue with constructor function invocation.
741 __ jmp(&post_instantiation_deopt_entry);
742 }
743 }
744
745 } // namespace
746
Generate_JSConstructStubGeneric(MacroAssembler * masm)747 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
748 Generate_JSConstructStubHelper(masm, false, true, false);
749 }
750
Generate_JSConstructStubApi(MacroAssembler * masm)751 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
752 Generate_JSConstructStubHelper(masm, true, false, false);
753 }
754
Generate_JSBuiltinsConstructStub(MacroAssembler * masm)755 void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
756 Generate_JSConstructStubHelper(masm, false, false, false);
757 }
758
Generate_JSBuiltinsConstructStubForDerived(MacroAssembler * masm)759 void Builtins::Generate_JSBuiltinsConstructStubForDerived(
760 MacroAssembler* masm) {
761 Generate_JSConstructStubHelper(masm, false, false, true);
762 }
763
Generate_ConstructedNonConstructable(MacroAssembler * masm)764 void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
765 FrameScope scope(masm, StackFrame::INTERNAL);
766 __ Push(a1);
767 __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
768 }
769
770 enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt };
771
772 // Clobbers a2; preserves all other registers.
Generate_CheckStackOverflow(MacroAssembler * masm,Register argc,IsTagged argc_is_tagged)773 static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc,
774 IsTagged argc_is_tagged) {
775 // Check the stack for overflow. We are not trying to catch
776 // interruptions (e.g. debug break and preemption) here, so the "real stack
777 // limit" is checked.
778 Label okay;
779 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
780 // Make a2 the space we have left. The stack might already be overflowed
781 // here which will cause a2 to become negative.
782 __ Subu(a2, sp, a2);
783 // Check if the arguments will overflow the stack.
784 if (argc_is_tagged == kArgcIsSmiTagged) {
785 __ sll(t3, argc, kPointerSizeLog2 - kSmiTagSize);
786 } else {
787 DCHECK(argc_is_tagged == kArgcIsUntaggedInt);
788 __ sll(t3, argc, kPointerSizeLog2);
789 }
790 // Signed comparison.
791 __ Branch(&okay, gt, a2, Operand(t3));
792
793 // Out of stack space.
794 __ CallRuntime(Runtime::kThrowStackOverflow);
795
796 __ bind(&okay);
797 }
798
Generate_JSEntryTrampolineHelper(MacroAssembler * masm,bool is_construct)799 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
800 bool is_construct) {
801 // Called from JSEntryStub::GenerateBody
802
803 // ----------- S t a t e -------------
804 // -- a0: new.target
805 // -- a1: function
806 // -- a2: receiver_pointer
807 // -- a3: argc
808 // -- s0: argv
809 // -----------------------------------
810 ProfileEntryHookStub::MaybeCallEntryHook(masm);
811
812 // Enter an internal frame.
813 {
814 FrameScope scope(masm, StackFrame::INTERNAL);
815
816 // Setup the context (we need to use the caller context from the isolate).
817 ExternalReference context_address(Isolate::kContextAddress,
818 masm->isolate());
819 __ li(cp, Operand(context_address));
820 __ lw(cp, MemOperand(cp));
821
822 // Push the function and the receiver onto the stack.
823 __ Push(a1, a2);
824
825 // Check if we have enough stack space to push all arguments.
826 // Clobbers a2.
827 Generate_CheckStackOverflow(masm, a3, kArgcIsUntaggedInt);
828
829 // Remember new.target.
830 __ mov(t1, a0);
831
832 // Copy arguments to the stack in a loop.
833 // a3: argc
834 // s0: argv, i.e. points to first arg
835 Label loop, entry;
836 __ Lsa(t2, s0, a3, kPointerSizeLog2);
837 __ b(&entry);
838 __ nop(); // Branch delay slot nop.
839 // t2 points past last arg.
840 __ bind(&loop);
841 __ lw(t0, MemOperand(s0)); // Read next parameter.
842 __ addiu(s0, s0, kPointerSize);
843 __ lw(t0, MemOperand(t0)); // Dereference handle.
844 __ push(t0); // Push parameter.
845 __ bind(&entry);
846 __ Branch(&loop, ne, s0, Operand(t2));
847
848 // Setup new.target and argc.
849 __ mov(a0, a3);
850 __ mov(a3, t1);
851
852 // Initialize all JavaScript callee-saved registers, since they will be seen
853 // by the garbage collector as part of handlers.
854 __ LoadRoot(t0, Heap::kUndefinedValueRootIndex);
855 __ mov(s1, t0);
856 __ mov(s2, t0);
857 __ mov(s3, t0);
858 __ mov(s4, t0);
859 __ mov(s5, t0);
860 // s6 holds the root address. Do not clobber.
861 // s7 is cp. Do not init.
862
863 // Invoke the code.
864 Handle<Code> builtin = is_construct
865 ? masm->isolate()->builtins()->Construct()
866 : masm->isolate()->builtins()->Call();
867 __ Call(builtin, RelocInfo::CODE_TARGET);
868
869 // Leave internal frame.
870 }
871
872 __ Jump(ra);
873 }
874
Generate_JSEntryTrampoline(MacroAssembler * masm)875 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
876 Generate_JSEntryTrampolineHelper(masm, false);
877 }
878
Generate_JSConstructEntryTrampoline(MacroAssembler * masm)879 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
880 Generate_JSEntryTrampolineHelper(masm, true);
881 }
882
883 // static
Generate_ResumeGeneratorTrampoline(MacroAssembler * masm)884 void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
885 // ----------- S t a t e -------------
886 // -- v0 : the value to pass to the generator
887 // -- a1 : the JSGeneratorObject to resume
888 // -- a2 : the resume mode (tagged)
889 // -- ra : return address
890 // -----------------------------------
891 __ AssertGeneratorObject(a1);
892
893 // Store input value into generator object.
894 __ sw(v0, FieldMemOperand(a1, JSGeneratorObject::kInputOrDebugPosOffset));
895 __ RecordWriteField(a1, JSGeneratorObject::kInputOrDebugPosOffset, v0, a3,
896 kRAHasNotBeenSaved, kDontSaveFPRegs);
897
898 // Store resume mode into generator object.
899 __ sw(a2, FieldMemOperand(a1, JSGeneratorObject::kResumeModeOffset));
900
901 // Load suspended function and context.
902 __ lw(t0, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset));
903 __ lw(cp, FieldMemOperand(t0, JSFunction::kContextOffset));
904
905 // Flood function if we are stepping.
906 Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
907 Label stepping_prepared;
908 ExternalReference debug_hook =
909 ExternalReference::debug_hook_on_function_call_address(masm->isolate());
910 __ li(t1, Operand(debug_hook));
911 __ lb(t1, MemOperand(t1));
912 __ Branch(&prepare_step_in_if_stepping, ne, t1, Operand(zero_reg));
913
914 // Flood function if we need to continue stepping in the suspended generator.
915 ExternalReference debug_suspended_generator =
916 ExternalReference::debug_suspended_generator_address(masm->isolate());
917 __ li(t1, Operand(debug_suspended_generator));
918 __ lw(t1, MemOperand(t1));
919 __ Branch(&prepare_step_in_suspended_generator, eq, a1, Operand(t1));
920 __ bind(&stepping_prepared);
921
922 // Push receiver.
923 __ lw(t1, FieldMemOperand(a1, JSGeneratorObject::kReceiverOffset));
924 __ Push(t1);
925
926 // ----------- S t a t e -------------
927 // -- a1 : the JSGeneratorObject to resume
928 // -- a2 : the resume mode (tagged)
929 // -- t0 : generator function
930 // -- cp : generator context
931 // -- ra : return address
932 // -- sp[0] : generator receiver
933 // -----------------------------------
934
935 // Push holes for arguments to generator function. Since the parser forced
936 // context allocation for any variables in generators, the actual argument
937 // values have already been copied into the context and these dummy values
938 // will never be used.
939 __ lw(a3, FieldMemOperand(t0, JSFunction::kSharedFunctionInfoOffset));
940 __ lw(a3,
941 FieldMemOperand(a3, SharedFunctionInfo::kFormalParameterCountOffset));
942 {
943 Label done_loop, loop;
944 __ bind(&loop);
945 __ Subu(a3, a3, Operand(Smi::FromInt(1)));
946 __ Branch(&done_loop, lt, a3, Operand(zero_reg));
947 __ PushRoot(Heap::kTheHoleValueRootIndex);
948 __ Branch(&loop);
949 __ bind(&done_loop);
950 }
951
952 // Underlying function needs to have bytecode available.
953 if (FLAG_debug_code) {
954 __ lw(a3, FieldMemOperand(t0, JSFunction::kSharedFunctionInfoOffset));
955 __ lw(a3, FieldMemOperand(a3, SharedFunctionInfo::kFunctionDataOffset));
956 __ GetObjectType(a3, a3, a3);
957 __ Assert(eq, kMissingBytecodeArray, a3, Operand(BYTECODE_ARRAY_TYPE));
958 }
959
960 // Resume (Ignition/TurboFan) generator object.
961 {
962 __ lw(a0, FieldMemOperand(t0, JSFunction::kSharedFunctionInfoOffset));
963 __ lw(a0,
964 FieldMemOperand(a0, SharedFunctionInfo::kFormalParameterCountOffset));
965 __ SmiUntag(a0);
966 // We abuse new.target both to indicate that this is a resume call and to
967 // pass in the generator object. In ordinary calls, new.target is always
968 // undefined because generator functions are non-constructable.
969 __ Move(a3, a1);
970 __ Move(a1, t0);
971 __ lw(a2, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
972 __ Jump(a2);
973 }
974
975 __ bind(&prepare_step_in_if_stepping);
976 {
977 FrameScope scope(masm, StackFrame::INTERNAL);
978 __ Push(a1, a2, t0);
979 __ CallRuntime(Runtime::kDebugOnFunctionCall);
980 __ Pop(a1, a2);
981 }
982 __ Branch(USE_DELAY_SLOT, &stepping_prepared);
983 __ lw(t0, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset));
984
985 __ bind(&prepare_step_in_suspended_generator);
986 {
987 FrameScope scope(masm, StackFrame::INTERNAL);
988 __ Push(a1, a2);
989 __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
990 __ Pop(a1, a2);
991 }
992 __ Branch(USE_DELAY_SLOT, &stepping_prepared);
993 __ lw(t0, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset));
994 }
995
LeaveInterpreterFrame(MacroAssembler * masm,Register scratch)996 static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch) {
997 Register args_count = scratch;
998
999 // Get the arguments + receiver count.
1000 __ lw(args_count,
1001 MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1002 __ lw(args_count,
1003 FieldMemOperand(args_count, BytecodeArray::kParameterSizeOffset));
1004
1005 // Leave the frame (also dropping the register file).
1006 __ LeaveFrame(StackFrame::JAVA_SCRIPT);
1007
1008 // Drop receiver + arguments.
1009 __ Addu(sp, sp, args_count);
1010 }
1011
1012 // Generate code for entering a JS function with the interpreter.
1013 // On entry to the function the receiver and arguments have been pushed on the
1014 // stack left to right. The actual argument count matches the formal parameter
1015 // count expected by the function.
1016 //
1017 // The live registers are:
1018 // o a1: the JS function object being called.
1019 // o a3: the new target
1020 // o cp: our context
1021 // o fp: the caller's frame pointer
1022 // o sp: stack pointer
1023 // o ra: return address
1024 //
1025 // The function builds an interpreter frame. See InterpreterFrameConstants in
1026 // frames.h for its layout.
Generate_InterpreterEntryTrampoline(MacroAssembler * masm)1027 void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
1028 ProfileEntryHookStub::MaybeCallEntryHook(masm);
1029
1030 // Open a frame scope to indicate that there is a frame on the stack. The
1031 // MANUAL indicates that the scope shouldn't actually generate code to set up
1032 // the frame (that is done below).
1033 FrameScope frame_scope(masm, StackFrame::MANUAL);
1034 __ PushStandardFrame(a1);
1035
1036 // Get the bytecode array from the function object (or from the DebugInfo if
1037 // it is present) and load it into kInterpreterBytecodeArrayRegister.
1038 __ lw(a0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1039 Label load_debug_bytecode_array, bytecode_array_loaded;
1040 Register debug_info = kInterpreterBytecodeArrayRegister;
1041 DCHECK(!debug_info.is(a0));
1042 __ lw(debug_info, FieldMemOperand(a0, SharedFunctionInfo::kDebugInfoOffset));
1043 __ JumpIfNotSmi(debug_info, &load_debug_bytecode_array);
1044 __ lw(kInterpreterBytecodeArrayRegister,
1045 FieldMemOperand(a0, SharedFunctionInfo::kFunctionDataOffset));
1046 __ bind(&bytecode_array_loaded);
1047
1048 // Check whether we should continue to use the interpreter.
1049 Label switch_to_different_code_kind;
1050 __ lw(a0, FieldMemOperand(a0, SharedFunctionInfo::kCodeOffset));
1051 __ Branch(&switch_to_different_code_kind, ne, a0,
1052 Operand(masm->CodeObject())); // Self-reference to this code.
1053
1054 // Increment invocation count for the function.
1055 __ lw(a0, FieldMemOperand(a1, JSFunction::kFeedbackVectorOffset));
1056 __ lw(a0, FieldMemOperand(a0, Cell::kValueOffset));
1057 __ lw(t0, FieldMemOperand(
1058 a0, FeedbackVector::kInvocationCountIndex * kPointerSize +
1059 FeedbackVector::kHeaderSize));
1060 __ Addu(t0, t0, Operand(Smi::FromInt(1)));
1061 __ sw(t0, FieldMemOperand(
1062 a0, FeedbackVector::kInvocationCountIndex * kPointerSize +
1063 FeedbackVector::kHeaderSize));
1064
1065 // Check function data field is actually a BytecodeArray object.
1066 if (FLAG_debug_code) {
1067 __ SmiTst(kInterpreterBytecodeArrayRegister, t0);
1068 __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, t0,
1069 Operand(zero_reg));
1070 __ GetObjectType(kInterpreterBytecodeArrayRegister, t0, t0);
1071 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, t0,
1072 Operand(BYTECODE_ARRAY_TYPE));
1073 }
1074
1075 // Reset code age.
1076 DCHECK_EQ(0, BytecodeArray::kNoAgeBytecodeAge);
1077 __ sb(zero_reg, FieldMemOperand(kInterpreterBytecodeArrayRegister,
1078 BytecodeArray::kBytecodeAgeOffset));
1079
1080 // Load initial bytecode offset.
1081 __ li(kInterpreterBytecodeOffsetRegister,
1082 Operand(BytecodeArray::kHeaderSize - kHeapObjectTag));
1083
1084 // Push new.target, bytecode array and Smi tagged bytecode array offset.
1085 __ SmiTag(t0, kInterpreterBytecodeOffsetRegister);
1086 __ Push(a3, kInterpreterBytecodeArrayRegister, t0);
1087
1088 // Allocate the local and temporary register file on the stack.
1089 {
1090 // Load frame size from the BytecodeArray object.
1091 __ lw(t0, FieldMemOperand(kInterpreterBytecodeArrayRegister,
1092 BytecodeArray::kFrameSizeOffset));
1093
1094 // Do a stack check to ensure we don't go over the limit.
1095 Label ok;
1096 __ Subu(t1, sp, Operand(t0));
1097 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
1098 __ Branch(&ok, hs, t1, Operand(a2));
1099 __ CallRuntime(Runtime::kThrowStackOverflow);
1100 __ bind(&ok);
1101
1102 // If ok, push undefined as the initial value for all register file entries.
1103 Label loop_header;
1104 Label loop_check;
1105 __ LoadRoot(t1, Heap::kUndefinedValueRootIndex);
1106 __ Branch(&loop_check);
1107 __ bind(&loop_header);
1108 // TODO(rmcilroy): Consider doing more than one push per loop iteration.
1109 __ push(t1);
1110 // Continue loop if not done.
1111 __ bind(&loop_check);
1112 __ Subu(t0, t0, Operand(kPointerSize));
1113 __ Branch(&loop_header, ge, t0, Operand(zero_reg));
1114 }
1115
1116 // Load accumulator and dispatch table into registers.
1117 __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
1118 __ li(kInterpreterDispatchTableRegister,
1119 Operand(ExternalReference::interpreter_dispatch_table_address(
1120 masm->isolate())));
1121
1122 // Dispatch to the first bytecode handler for the function.
1123 __ Addu(a0, kInterpreterBytecodeArrayRegister,
1124 kInterpreterBytecodeOffsetRegister);
1125 __ lbu(a0, MemOperand(a0));
1126 __ Lsa(at, kInterpreterDispatchTableRegister, a0, kPointerSizeLog2);
1127 __ lw(at, MemOperand(at));
1128 __ Call(at);
1129 masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset());
1130
1131 // The return value is in v0.
1132 LeaveInterpreterFrame(masm, t0);
1133 __ Jump(ra);
1134
1135 // Load debug copy of the bytecode array.
1136 __ bind(&load_debug_bytecode_array);
1137 __ lw(kInterpreterBytecodeArrayRegister,
1138 FieldMemOperand(debug_info, DebugInfo::kDebugBytecodeArrayIndex));
1139 __ Branch(&bytecode_array_loaded);
1140
1141 // If the shared code is no longer this entry trampoline, then the underlying
1142 // function has been switched to a different kind of code and we heal the
1143 // closure by switching the code entry field over to the new code as well.
1144 __ bind(&switch_to_different_code_kind);
1145 __ LeaveFrame(StackFrame::JAVA_SCRIPT);
1146 __ lw(t0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1147 __ lw(t0, FieldMemOperand(t0, SharedFunctionInfo::kCodeOffset));
1148 __ Addu(t0, t0, Operand(Code::kHeaderSize - kHeapObjectTag));
1149 __ sw(t0, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
1150 __ RecordWriteCodeEntryField(a1, t0, t1);
1151 __ Jump(t0);
1152 }
1153
Generate_StackOverflowCheck(MacroAssembler * masm,Register num_args,Register scratch1,Register scratch2,Label * stack_overflow)1154 static void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args,
1155 Register scratch1, Register scratch2,
1156 Label* stack_overflow) {
1157 // Check the stack for overflow. We are not trying to catch
1158 // interruptions (e.g. debug break and preemption) here, so the "real stack
1159 // limit" is checked.
1160 __ LoadRoot(scratch1, Heap::kRealStackLimitRootIndex);
1161 // Make scratch1 the space we have left. The stack might already be overflowed
1162 // here which will cause scratch1 to become negative.
1163 __ subu(scratch1, sp, scratch1);
1164 // Check if the arguments will overflow the stack.
1165 __ sll(scratch2, num_args, kPointerSizeLog2);
1166 // Signed comparison.
1167 __ Branch(stack_overflow, le, scratch1, Operand(scratch2));
1168 }
1169
Generate_InterpreterPushArgs(MacroAssembler * masm,Register num_args,Register index,Register scratch,Register scratch2,Label * stack_overflow)1170 static void Generate_InterpreterPushArgs(MacroAssembler* masm,
1171 Register num_args, Register index,
1172 Register scratch, Register scratch2,
1173 Label* stack_overflow) {
1174 Generate_StackOverflowCheck(masm, num_args, scratch, scratch2,
1175 stack_overflow);
1176
1177 // Find the address of the last argument.
1178 __ mov(scratch2, num_args);
1179 __ sll(scratch2, scratch2, kPointerSizeLog2);
1180 __ Subu(scratch2, index, Operand(scratch2));
1181
1182 // Push the arguments.
1183 Label loop_header, loop_check;
1184 __ Branch(&loop_check);
1185 __ bind(&loop_header);
1186 __ lw(scratch, MemOperand(index));
1187 __ Addu(index, index, Operand(-kPointerSize));
1188 __ push(scratch);
1189 __ bind(&loop_check);
1190 __ Branch(&loop_header, gt, index, Operand(scratch2));
1191 }
1192
1193 // static
Generate_InterpreterPushArgsAndCallImpl(MacroAssembler * masm,TailCallMode tail_call_mode,InterpreterPushArgsMode mode)1194 void Builtins::Generate_InterpreterPushArgsAndCallImpl(
1195 MacroAssembler* masm, TailCallMode tail_call_mode,
1196 InterpreterPushArgsMode mode) {
1197 // ----------- S t a t e -------------
1198 // -- a0 : the number of arguments (not including the receiver)
1199 // -- a2 : the address of the first argument to be pushed. Subsequent
1200 // arguments should be consecutive above this, in the same order as
1201 // they are to be pushed onto the stack.
1202 // -- a1 : the target to call (can be any Object).
1203 // -----------------------------------
1204 Label stack_overflow;
1205
1206 __ Addu(t0, a0, Operand(1)); // Add one for receiver.
1207
1208 // This function modifies a2, t4 and t1.
1209 Generate_InterpreterPushArgs(masm, t0, a2, t4, t1, &stack_overflow);
1210
1211 // Call the target.
1212 if (mode == InterpreterPushArgsMode::kJSFunction) {
1213 __ Jump(masm->isolate()->builtins()->CallFunction(ConvertReceiverMode::kAny,
1214 tail_call_mode),
1215 RelocInfo::CODE_TARGET);
1216 } else if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1217 __ Jump(masm->isolate()->builtins()->CallWithSpread(),
1218 RelocInfo::CODE_TARGET);
1219 } else {
1220 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny,
1221 tail_call_mode),
1222 RelocInfo::CODE_TARGET);
1223 }
1224
1225 __ bind(&stack_overflow);
1226 {
1227 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1228 // Unreachable code.
1229 __ break_(0xCC);
1230 }
1231 }
1232
1233 // static
Generate_InterpreterPushArgsAndConstructImpl(MacroAssembler * masm,InterpreterPushArgsMode mode)1234 void Builtins::Generate_InterpreterPushArgsAndConstructImpl(
1235 MacroAssembler* masm, InterpreterPushArgsMode mode) {
1236 // ----------- S t a t e -------------
1237 // -- a0 : argument count (not including receiver)
1238 // -- a3 : new target
1239 // -- a1 : constructor to call
1240 // -- a2 : allocation site feedback if available, undefined otherwise.
1241 // -- t4 : address of the first argument
1242 // -----------------------------------
1243 Label stack_overflow;
1244
1245 // Push a slot for the receiver.
1246 __ push(zero_reg);
1247
1248 // This function modified t4, t1 and t0.
1249 Generate_InterpreterPushArgs(masm, a0, t4, t1, t0, &stack_overflow);
1250
1251 __ AssertUndefinedOrAllocationSite(a2, t0);
1252 if (mode == InterpreterPushArgsMode::kJSFunction) {
1253 __ AssertFunction(a1);
1254
1255 // Tail call to the function-specific construct stub (still in the caller
1256 // context at this point).
1257 __ lw(t0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1258 __ lw(t0, FieldMemOperand(t0, SharedFunctionInfo::kConstructStubOffset));
1259 __ Addu(at, t0, Operand(Code::kHeaderSize - kHeapObjectTag));
1260 __ Jump(at);
1261 } else if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1262 // Call the constructor with a0, a1, and a3 unmodified.
1263 __ Jump(masm->isolate()->builtins()->ConstructWithSpread(),
1264 RelocInfo::CODE_TARGET);
1265 } else {
1266 DCHECK_EQ(InterpreterPushArgsMode::kOther, mode);
1267 // Call the constructor with a0, a1, and a3 unmodified.
1268 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
1269 }
1270
1271 __ bind(&stack_overflow);
1272 {
1273 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1274 // Unreachable code.
1275 __ break_(0xCC);
1276 }
1277 }
1278
1279 // static
Generate_InterpreterPushArgsAndConstructArray(MacroAssembler * masm)1280 void Builtins::Generate_InterpreterPushArgsAndConstructArray(
1281 MacroAssembler* masm) {
1282 // ----------- S t a t e -------------
1283 // -- a0 : the number of arguments (not including the receiver)
1284 // -- a1 : the target to call checked to be Array function.
1285 // -- a2 : allocation site feedback.
1286 // -- a3 : the address of the first argument to be pushed. Subsequent
1287 // arguments should be consecutive above this, in the same order as
1288 // they are to be pushed onto the stack.
1289 // -----------------------------------
1290 Label stack_overflow;
1291
1292 __ Addu(t0, a0, Operand(1)); // Add one for receiver.
1293
1294 // This function modifies a3, t4, and t1.
1295 Generate_InterpreterPushArgs(masm, t0, a3, t1, t4, &stack_overflow);
1296
1297 // ArrayConstructor stub expects constructor in a3. Set it here.
1298 __ mov(a3, a1);
1299
1300 ArrayConstructorStub stub(masm->isolate());
1301 __ TailCallStub(&stub);
1302
1303 __ bind(&stack_overflow);
1304 {
1305 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1306 // Unreachable code.
1307 __ break_(0xCC);
1308 }
1309 }
1310
Generate_InterpreterEnterBytecode(MacroAssembler * masm)1311 static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
1312 // Set the return address to the correct point in the interpreter entry
1313 // trampoline.
1314 Smi* interpreter_entry_return_pc_offset(
1315 masm->isolate()->heap()->interpreter_entry_return_pc_offset());
1316 DCHECK_NE(interpreter_entry_return_pc_offset, Smi::kZero);
1317 __ li(t0, Operand(masm->isolate()->builtins()->InterpreterEntryTrampoline()));
1318 __ Addu(ra, t0, Operand(interpreter_entry_return_pc_offset->value() +
1319 Code::kHeaderSize - kHeapObjectTag));
1320
1321 // Initialize the dispatch table register.
1322 __ li(kInterpreterDispatchTableRegister,
1323 Operand(ExternalReference::interpreter_dispatch_table_address(
1324 masm->isolate())));
1325
1326 // Get the bytecode array pointer from the frame.
1327 __ lw(kInterpreterBytecodeArrayRegister,
1328 MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1329
1330 if (FLAG_debug_code) {
1331 // Check function data field is actually a BytecodeArray object.
1332 __ SmiTst(kInterpreterBytecodeArrayRegister, at);
1333 __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, at,
1334 Operand(zero_reg));
1335 __ GetObjectType(kInterpreterBytecodeArrayRegister, a1, a1);
1336 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, a1,
1337 Operand(BYTECODE_ARRAY_TYPE));
1338 }
1339
1340 // Get the target bytecode offset from the frame.
1341 __ lw(kInterpreterBytecodeOffsetRegister,
1342 MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1343 __ SmiUntag(kInterpreterBytecodeOffsetRegister);
1344
1345 // Dispatch to the target bytecode.
1346 __ Addu(a1, kInterpreterBytecodeArrayRegister,
1347 kInterpreterBytecodeOffsetRegister);
1348 __ lbu(a1, MemOperand(a1));
1349 __ Lsa(a1, kInterpreterDispatchTableRegister, a1, kPointerSizeLog2);
1350 __ lw(a1, MemOperand(a1));
1351 __ Jump(a1);
1352 }
1353
Generate_InterpreterEnterBytecodeAdvance(MacroAssembler * masm)1354 void Builtins::Generate_InterpreterEnterBytecodeAdvance(MacroAssembler* masm) {
1355 // Advance the current bytecode offset stored within the given interpreter
1356 // stack frame. This simulates what all bytecode handlers do upon completion
1357 // of the underlying operation.
1358 __ lw(a1, MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1359 __ lw(a2, MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1360 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
1361 {
1362 FrameScope scope(masm, StackFrame::INTERNAL);
1363 __ Push(kInterpreterAccumulatorRegister, a1, a2);
1364 __ CallRuntime(Runtime::kInterpreterAdvanceBytecodeOffset);
1365 __ mov(a2, v0); // Result is the new bytecode offset.
1366 __ Pop(kInterpreterAccumulatorRegister);
1367 }
1368 __ sw(a2, MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1369
1370 Generate_InterpreterEnterBytecode(masm);
1371 }
1372
Generate_InterpreterEnterBytecodeDispatch(MacroAssembler * masm)1373 void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
1374 Generate_InterpreterEnterBytecode(masm);
1375 }
1376
Generate_CompileLazy(MacroAssembler * masm)1377 void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
1378 // ----------- S t a t e -------------
1379 // -- a0 : argument count (preserved for callee)
1380 // -- a3 : new target (preserved for callee)
1381 // -- a1 : target function (preserved for callee)
1382 // -----------------------------------
1383 // First lookup code, maybe we don't need to compile!
1384 Label gotta_call_runtime, gotta_call_runtime_no_stack;
1385 Label try_shared;
1386 Label loop_top, loop_bottom;
1387
1388 Register argument_count = a0;
1389 Register closure = a1;
1390 Register new_target = a3;
1391 Register map = a0;
1392 Register index = a2;
1393
1394 // Do we have a valid feedback vector?
1395 __ lw(index, FieldMemOperand(closure, JSFunction::kFeedbackVectorOffset));
1396 __ lw(index, FieldMemOperand(index, Cell::kValueOffset));
1397 __ JumpIfRoot(index, Heap::kUndefinedValueRootIndex,
1398 &gotta_call_runtime_no_stack);
1399
1400 __ push(argument_count);
1401 __ push(new_target);
1402 __ push(closure);
1403
1404 __ lw(map, FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
1405 __ lw(map, FieldMemOperand(map, SharedFunctionInfo::kOptimizedCodeMapOffset));
1406 __ lw(index, FieldMemOperand(map, FixedArray::kLengthOffset));
1407 __ Branch(&try_shared, lt, index, Operand(Smi::FromInt(2)));
1408
1409 // a3 : native context
1410 // a2 : length / index
1411 // a0 : optimized code map
1412 // stack[0] : new target
1413 // stack[4] : closure
1414 Register native_context = a3;
1415 __ lw(native_context, NativeContextMemOperand());
1416
1417 __ bind(&loop_top);
1418 Register temp = a1;
1419 Register array_pointer = t1;
1420
1421 // Does the native context match?
1422 __ sll(at, index, kPointerSizeLog2 - kSmiTagSize);
1423 __ Addu(array_pointer, map, Operand(at));
1424 __ lw(temp, FieldMemOperand(array_pointer,
1425 SharedFunctionInfo::kOffsetToPreviousContext));
1426 __ lw(temp, FieldMemOperand(temp, WeakCell::kValueOffset));
1427 __ Branch(&loop_bottom, ne, temp, Operand(native_context));
1428
1429 // Code available?
1430 Register entry = t0;
1431 __ lw(entry,
1432 FieldMemOperand(array_pointer,
1433 SharedFunctionInfo::kOffsetToPreviousCachedCode));
1434 __ lw(entry, FieldMemOperand(entry, WeakCell::kValueOffset));
1435 __ JumpIfSmi(entry, &try_shared);
1436
1437 // Found code. Get it into the closure and return.
1438 __ pop(closure);
1439 // Store code entry in the closure.
1440 __ Addu(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
1441 __ sw(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset));
1442 __ RecordWriteCodeEntryField(closure, entry, t1);
1443
1444 // Link the closure into the optimized function list.
1445 // t0 : code entry
1446 // a3 : native context
1447 // a1 : closure
1448 __ lw(t1,
1449 ContextMemOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST));
1450 __ sw(t1, FieldMemOperand(closure, JSFunction::kNextFunctionLinkOffset));
1451 __ RecordWriteField(closure, JSFunction::kNextFunctionLinkOffset, t1, a0,
1452 kRAHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
1453 OMIT_SMI_CHECK);
1454 const int function_list_offset =
1455 Context::SlotOffset(Context::OPTIMIZED_FUNCTIONS_LIST);
1456 __ sw(closure,
1457 ContextMemOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST));
1458 // Save closure before the write barrier.
1459 __ mov(t1, closure);
1460 __ RecordWriteContextSlot(native_context, function_list_offset, closure, a0,
1461 kRAHasNotBeenSaved, kDontSaveFPRegs);
1462 __ mov(closure, t1);
1463 __ pop(new_target);
1464 __ pop(argument_count);
1465 __ Jump(entry);
1466
1467 __ bind(&loop_bottom);
1468 __ Subu(index, index,
1469 Operand(Smi::FromInt(SharedFunctionInfo::kEntryLength)));
1470 __ Branch(&loop_top, gt, index, Operand(Smi::FromInt(1)));
1471
1472 // We found no code.
1473 __ bind(&try_shared);
1474 __ pop(closure);
1475 __ pop(new_target);
1476 __ pop(argument_count);
1477 __ lw(entry, FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
1478 // Is the shared function marked for tier up?
1479 __ lbu(t1, FieldMemOperand(entry,
1480 SharedFunctionInfo::kMarkedForTierUpByteOffset));
1481 __ And(t1, t1,
1482 Operand(1 << SharedFunctionInfo::kMarkedForTierUpBitWithinByte));
1483 __ Branch(&gotta_call_runtime_no_stack, ne, t1, Operand(zero_reg));
1484
1485 // If SFI points to anything other than CompileLazy, install that.
1486 __ lw(entry, FieldMemOperand(entry, SharedFunctionInfo::kCodeOffset));
1487 __ Move(t1, masm->CodeObject());
1488 __ Branch(&gotta_call_runtime_no_stack, eq, entry, Operand(t1));
1489
1490 // Install the SFI's code entry.
1491 __ Addu(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
1492 __ sw(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset));
1493 __ RecordWriteCodeEntryField(closure, entry, t1);
1494 __ Jump(entry);
1495
1496 __ bind(&gotta_call_runtime);
1497 __ pop(closure);
1498 __ pop(new_target);
1499 __ pop(argument_count);
1500 __ bind(&gotta_call_runtime_no_stack);
1501 GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
1502 }
1503
Generate_CompileBaseline(MacroAssembler * masm)1504 void Builtins::Generate_CompileBaseline(MacroAssembler* masm) {
1505 GenerateTailCallToReturnedCode(masm, Runtime::kCompileBaseline);
1506 }
1507
Generate_CompileOptimized(MacroAssembler * masm)1508 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
1509 GenerateTailCallToReturnedCode(masm,
1510 Runtime::kCompileOptimized_NotConcurrent);
1511 }
1512
Generate_CompileOptimizedConcurrent(MacroAssembler * masm)1513 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
1514 GenerateTailCallToReturnedCode(masm, Runtime::kCompileOptimized_Concurrent);
1515 }
1516
Generate_InstantiateAsmJs(MacroAssembler * masm)1517 void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) {
1518 // ----------- S t a t e -------------
1519 // -- a0 : argument count (preserved for callee)
1520 // -- a1 : new target (preserved for callee)
1521 // -- a3 : target function (preserved for callee)
1522 // -----------------------------------
1523 Label failed;
1524 {
1525 FrameScope scope(masm, StackFrame::INTERNAL);
1526 // Preserve argument count for later compare.
1527 __ Move(t4, a0);
1528 // Push a copy of the target function and the new target.
1529 // Push function as parameter to the runtime call.
1530 __ SmiTag(a0);
1531 __ Push(a0, a1, a3, a1);
1532
1533 // Copy arguments from caller (stdlib, foreign, heap).
1534 Label args_done;
1535 for (int j = 0; j < 4; ++j) {
1536 Label over;
1537 if (j < 3) {
1538 __ Branch(&over, ne, t4, Operand(j));
1539 }
1540 for (int i = j - 1; i >= 0; --i) {
1541 __ lw(t4, MemOperand(fp, StandardFrameConstants::kCallerSPOffset +
1542 i * kPointerSize));
1543 __ push(t4);
1544 }
1545 for (int i = 0; i < 3 - j; ++i) {
1546 __ PushRoot(Heap::kUndefinedValueRootIndex);
1547 }
1548 if (j < 3) {
1549 __ jmp(&args_done);
1550 __ bind(&over);
1551 }
1552 }
1553 __ bind(&args_done);
1554
1555 // Call runtime, on success unwind frame, and parent frame.
1556 __ CallRuntime(Runtime::kInstantiateAsmJs, 4);
1557 // A smi 0 is returned on failure, an object on success.
1558 __ JumpIfSmi(v0, &failed);
1559
1560 __ Drop(2);
1561 __ pop(t4);
1562 __ SmiUntag(t4);
1563 scope.GenerateLeaveFrame();
1564
1565 __ Addu(t4, t4, Operand(1));
1566 __ Lsa(sp, sp, t4, kPointerSizeLog2);
1567 __ Ret();
1568
1569 __ bind(&failed);
1570 // Restore target function and new target.
1571 __ Pop(a0, a1, a3);
1572 __ SmiUntag(a0);
1573 }
1574 // On failure, tail call back to regular js.
1575 GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
1576 }
1577
GenerateMakeCodeYoungAgainCommon(MacroAssembler * masm)1578 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
1579 // For now, we are relying on the fact that make_code_young doesn't do any
1580 // garbage collection which allows us to save/restore the registers without
1581 // worrying about which of them contain pointers. We also don't build an
1582 // internal frame to make the code faster, since we shouldn't have to do stack
1583 // crawls in MakeCodeYoung. This seems a bit fragile.
1584
1585 // Set a0 to point to the head of the PlatformCodeAge sequence.
1586 __ Subu(a0, a0, Operand(kNoCodeAgeSequenceLength - Assembler::kInstrSize));
1587
1588 // The following registers must be saved and restored when calling through to
1589 // the runtime:
1590 // a0 - contains return address (beginning of patch sequence)
1591 // a1 - isolate
1592 // a3 - new target
1593 RegList saved_regs =
1594 (a0.bit() | a1.bit() | a3.bit() | ra.bit() | fp.bit()) & ~sp.bit();
1595 FrameScope scope(masm, StackFrame::MANUAL);
1596 __ MultiPush(saved_regs);
1597 __ PrepareCallCFunction(2, 0, a2);
1598 __ li(a1, Operand(ExternalReference::isolate_address(masm->isolate())));
1599 __ CallCFunction(
1600 ExternalReference::get_make_code_young_function(masm->isolate()), 2);
1601 __ MultiPop(saved_regs);
1602 __ Jump(a0);
1603 }
1604
1605 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
1606 void Builtins::Generate_Make##C##CodeYoungAgain(MacroAssembler* masm) { \
1607 GenerateMakeCodeYoungAgainCommon(masm); \
1608 }
CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)1609 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
1610 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
1611
1612 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
1613 // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
1614 // that make_code_young doesn't do any garbage collection which allows us to
1615 // save/restore the registers without worrying about which of them contain
1616 // pointers.
1617
1618 // Set a0 to point to the head of the PlatformCodeAge sequence.
1619 __ Subu(a0, a0, Operand(kNoCodeAgeSequenceLength - Assembler::kInstrSize));
1620
1621 // The following registers must be saved and restored when calling through to
1622 // the runtime:
1623 // a0 - contains return address (beginning of patch sequence)
1624 // a1 - isolate
1625 // a3 - new target
1626 RegList saved_regs =
1627 (a0.bit() | a1.bit() | a3.bit() | ra.bit() | fp.bit()) & ~sp.bit();
1628 FrameScope scope(masm, StackFrame::MANUAL);
1629 __ MultiPush(saved_regs);
1630 __ PrepareCallCFunction(2, 0, a2);
1631 __ li(a1, Operand(ExternalReference::isolate_address(masm->isolate())));
1632 __ CallCFunction(
1633 ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
1634 2);
1635 __ MultiPop(saved_regs);
1636
1637 // Perform prologue operations usually performed by the young code stub.
1638 __ PushStandardFrame(a1);
1639
1640 // Jump to point after the code-age stub.
1641 __ Addu(a0, a0, Operand(kNoCodeAgeSequenceLength));
1642 __ Jump(a0);
1643 }
1644
Generate_MarkCodeAsExecutedTwice(MacroAssembler * masm)1645 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
1646 GenerateMakeCodeYoungAgainCommon(masm);
1647 }
1648
Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler * masm)1649 void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) {
1650 Generate_MarkCodeAsExecutedOnce(masm);
1651 }
1652
Generate_NotifyStubFailureHelper(MacroAssembler * masm,SaveFPRegsMode save_doubles)1653 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
1654 SaveFPRegsMode save_doubles) {
1655 {
1656 FrameScope scope(masm, StackFrame::INTERNAL);
1657
1658 // Preserve registers across notification, this is important for compiled
1659 // stubs that tail call the runtime on deopts passing their parameters in
1660 // registers.
1661 __ MultiPush(kJSCallerSaved | kCalleeSaved);
1662 // Pass the function and deoptimization type to the runtime system.
1663 __ CallRuntime(Runtime::kNotifyStubFailure, save_doubles);
1664 __ MultiPop(kJSCallerSaved | kCalleeSaved);
1665 }
1666
1667 __ Addu(sp, sp, Operand(kPointerSize)); // Ignore state
1668 __ Jump(ra); // Jump to miss handler
1669 }
1670
Generate_NotifyStubFailure(MacroAssembler * masm)1671 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
1672 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
1673 }
1674
Generate_NotifyStubFailureSaveDoubles(MacroAssembler * masm)1675 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
1676 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
1677 }
1678
Generate_NotifyDeoptimizedHelper(MacroAssembler * masm,Deoptimizer::BailoutType type)1679 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
1680 Deoptimizer::BailoutType type) {
1681 {
1682 FrameScope scope(masm, StackFrame::INTERNAL);
1683 // Pass the function and deoptimization type to the runtime system.
1684 __ li(a0, Operand(Smi::FromInt(static_cast<int>(type))));
1685 __ push(a0);
1686 __ CallRuntime(Runtime::kNotifyDeoptimized);
1687 }
1688
1689 // Get the full codegen state from the stack and untag it -> t2.
1690 __ lw(t2, MemOperand(sp, 0 * kPointerSize));
1691 __ SmiUntag(t2);
1692 // Switch on the state.
1693 Label with_tos_register, unknown_state;
1694 __ Branch(&with_tos_register, ne, t2,
1695 Operand(static_cast<int>(Deoptimizer::BailoutState::NO_REGISTERS)));
1696 __ Ret(USE_DELAY_SLOT);
1697 // Safe to fill delay slot Addu will emit one instruction.
1698 __ Addu(sp, sp, Operand(1 * kPointerSize)); // Remove state.
1699
1700 __ bind(&with_tos_register);
1701 DCHECK_EQ(kInterpreterAccumulatorRegister.code(), v0.code());
1702 __ lw(v0, MemOperand(sp, 1 * kPointerSize));
1703 __ Branch(&unknown_state, ne, t2,
1704 Operand(static_cast<int>(Deoptimizer::BailoutState::TOS_REGISTER)));
1705
1706 __ Ret(USE_DELAY_SLOT);
1707 // Safe to fill delay slot Addu will emit one instruction.
1708 __ Addu(sp, sp, Operand(2 * kPointerSize)); // Remove state.
1709
1710 __ bind(&unknown_state);
1711 __ stop("no cases left");
1712 }
1713
Generate_NotifyDeoptimized(MacroAssembler * masm)1714 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1715 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1716 }
1717
Generate_NotifySoftDeoptimized(MacroAssembler * masm)1718 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
1719 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1720 }
1721
Generate_NotifyLazyDeoptimized(MacroAssembler * masm)1722 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
1723 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1724 }
1725
1726 // Clobbers {t2, t3, t4, t5}.
CompatibleReceiverCheck(MacroAssembler * masm,Register receiver,Register function_template_info,Label * receiver_check_failed)1727 static void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver,
1728 Register function_template_info,
1729 Label* receiver_check_failed) {
1730 Register signature = t2;
1731 Register map = t3;
1732 Register constructor = t4;
1733 Register scratch = t5;
1734
1735 // If there is no signature, return the holder.
1736 __ lw(signature, FieldMemOperand(function_template_info,
1737 FunctionTemplateInfo::kSignatureOffset));
1738 Label receiver_check_passed;
1739 __ JumpIfRoot(signature, Heap::kUndefinedValueRootIndex,
1740 &receiver_check_passed);
1741
1742 // Walk the prototype chain.
1743 __ lw(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
1744 Label prototype_loop_start;
1745 __ bind(&prototype_loop_start);
1746
1747 // Get the constructor, if any.
1748 __ GetMapConstructor(constructor, map, scratch, scratch);
1749 Label next_prototype;
1750 __ Branch(&next_prototype, ne, scratch, Operand(JS_FUNCTION_TYPE));
1751 Register type = constructor;
1752 __ lw(type,
1753 FieldMemOperand(constructor, JSFunction::kSharedFunctionInfoOffset));
1754 __ lw(type, FieldMemOperand(type, SharedFunctionInfo::kFunctionDataOffset));
1755
1756 // Loop through the chain of inheriting function templates.
1757 Label function_template_loop;
1758 __ bind(&function_template_loop);
1759
1760 // If the signatures match, we have a compatible receiver.
1761 __ Branch(&receiver_check_passed, eq, signature, Operand(type),
1762 USE_DELAY_SLOT);
1763
1764 // If the current type is not a FunctionTemplateInfo, load the next prototype
1765 // in the chain.
1766 __ JumpIfSmi(type, &next_prototype);
1767 __ GetObjectType(type, scratch, scratch);
1768 __ Branch(&next_prototype, ne, scratch, Operand(FUNCTION_TEMPLATE_INFO_TYPE));
1769
1770 // Otherwise load the parent function template and iterate.
1771 __ lw(type,
1772 FieldMemOperand(type, FunctionTemplateInfo::kParentTemplateOffset));
1773 __ Branch(&function_template_loop);
1774
1775 // Load the next prototype and iterate.
1776 __ bind(&next_prototype);
1777 __ lw(scratch, FieldMemOperand(map, Map::kBitField3Offset));
1778 __ DecodeField<Map::HasHiddenPrototype>(scratch);
1779 __ Branch(receiver_check_failed, eq, scratch, Operand(zero_reg));
1780 __ lw(receiver, FieldMemOperand(map, Map::kPrototypeOffset));
1781 __ lw(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
1782
1783 __ Branch(&prototype_loop_start);
1784
1785 __ bind(&receiver_check_passed);
1786 }
1787
Generate_HandleFastApiCall(MacroAssembler * masm)1788 void Builtins::Generate_HandleFastApiCall(MacroAssembler* masm) {
1789 // ----------- S t a t e -------------
1790 // -- a0 : number of arguments excluding receiver
1791 // -- a1 : callee
1792 // -- ra : return address
1793 // -- sp[0] : last argument
1794 // -- ...
1795 // -- sp[4 * (argc - 1)] : first argument
1796 // -- sp[4 * argc] : receiver
1797 // -----------------------------------
1798
1799 // Load the FunctionTemplateInfo.
1800 __ lw(t1, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1801 __ lw(t1, FieldMemOperand(t1, SharedFunctionInfo::kFunctionDataOffset));
1802
1803 // Do the compatible receiver check.
1804 Label receiver_check_failed;
1805 __ Lsa(t8, sp, a0, kPointerSizeLog2);
1806 __ lw(t0, MemOperand(t8));
1807 CompatibleReceiverCheck(masm, t0, t1, &receiver_check_failed);
1808
1809 // Get the callback offset from the FunctionTemplateInfo, and jump to the
1810 // beginning of the code.
1811 __ lw(t2, FieldMemOperand(t1, FunctionTemplateInfo::kCallCodeOffset));
1812 __ lw(t2, FieldMemOperand(t2, CallHandlerInfo::kFastHandlerOffset));
1813 __ Addu(t2, t2, Operand(Code::kHeaderSize - kHeapObjectTag));
1814 __ Jump(t2);
1815
1816 // Compatible receiver check failed: throw an Illegal Invocation exception.
1817 __ bind(&receiver_check_failed);
1818 // Drop the arguments (including the receiver);
1819 __ Addu(t8, t8, Operand(kPointerSize));
1820 __ addu(sp, t8, zero_reg);
1821 __ TailCallRuntime(Runtime::kThrowIllegalInvocation);
1822 }
1823
Generate_OnStackReplacementHelper(MacroAssembler * masm,bool has_handler_frame)1824 static void Generate_OnStackReplacementHelper(MacroAssembler* masm,
1825 bool has_handler_frame) {
1826 // Lookup the function in the JavaScript frame.
1827 if (has_handler_frame) {
1828 __ lw(a0, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1829 __ lw(a0, MemOperand(a0, JavaScriptFrameConstants::kFunctionOffset));
1830 } else {
1831 __ lw(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1832 }
1833
1834 {
1835 FrameScope scope(masm, StackFrame::INTERNAL);
1836 // Pass function as argument.
1837 __ push(a0);
1838 __ CallRuntime(Runtime::kCompileForOnStackReplacement);
1839 }
1840
1841 // If the code object is null, just return to the caller.
1842 __ Ret(eq, v0, Operand(Smi::kZero));
1843
1844 // Drop any potential handler frame that is be sitting on top of the actual
1845 // JavaScript frame. This is the case then OSR is triggered from bytecode.
1846 if (has_handler_frame) {
1847 __ LeaveFrame(StackFrame::STUB);
1848 }
1849
1850 // Load deoptimization data from the code object.
1851 // <deopt_data> = <code>[#deoptimization_data_offset]
1852 __ lw(a1, MemOperand(v0, Code::kDeoptimizationDataOffset - kHeapObjectTag));
1853
1854 // Load the OSR entrypoint offset from the deoptimization data.
1855 // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
1856 __ lw(a1, MemOperand(a1, FixedArray::OffsetOfElementAt(
1857 DeoptimizationInputData::kOsrPcOffsetIndex) -
1858 kHeapObjectTag));
1859 __ SmiUntag(a1);
1860
1861 // Compute the target address = code_obj + header_size + osr_offset
1862 // <entry_addr> = <code_obj> + #header_size + <osr_offset>
1863 __ addu(v0, v0, a1);
1864 __ addiu(ra, v0, Code::kHeaderSize - kHeapObjectTag);
1865
1866 // And "return" to the OSR entry point of the function.
1867 __ Ret();
1868 }
1869
Generate_OnStackReplacement(MacroAssembler * masm)1870 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1871 Generate_OnStackReplacementHelper(masm, false);
1872 }
1873
Generate_InterpreterOnStackReplacement(MacroAssembler * masm)1874 void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) {
1875 Generate_OnStackReplacementHelper(masm, true);
1876 }
1877
1878 // static
Generate_FunctionPrototypeApply(MacroAssembler * masm)1879 void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
1880 // ----------- S t a t e -------------
1881 // -- a0 : argc
1882 // -- sp[0] : argArray
1883 // -- sp[4] : thisArg
1884 // -- sp[8] : receiver
1885 // -----------------------------------
1886
1887 // 1. Load receiver into a1, argArray into a0 (if present), remove all
1888 // arguments from the stack (including the receiver), and push thisArg (if
1889 // present) instead.
1890 {
1891 Label no_arg;
1892 Register scratch = t0;
1893 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
1894 __ mov(a3, a2);
1895 // Lsa() cannot be used hare as scratch value used later.
1896 __ sll(scratch, a0, kPointerSizeLog2);
1897 __ Addu(a0, sp, Operand(scratch));
1898 __ lw(a1, MemOperand(a0)); // receiver
1899 __ Subu(a0, a0, Operand(kPointerSize));
1900 __ Branch(&no_arg, lt, a0, Operand(sp));
1901 __ lw(a2, MemOperand(a0)); // thisArg
1902 __ Subu(a0, a0, Operand(kPointerSize));
1903 __ Branch(&no_arg, lt, a0, Operand(sp));
1904 __ lw(a3, MemOperand(a0)); // argArray
1905 __ bind(&no_arg);
1906 __ Addu(sp, sp, Operand(scratch));
1907 __ sw(a2, MemOperand(sp));
1908 __ mov(a0, a3);
1909 }
1910
1911 // ----------- S t a t e -------------
1912 // -- a0 : argArray
1913 // -- a1 : receiver
1914 // -- sp[0] : thisArg
1915 // -----------------------------------
1916
1917 // 2. Make sure the receiver is actually callable.
1918 Label receiver_not_callable;
1919 __ JumpIfSmi(a1, &receiver_not_callable);
1920 __ lw(t0, FieldMemOperand(a1, HeapObject::kMapOffset));
1921 __ lbu(t0, FieldMemOperand(t0, Map::kBitFieldOffset));
1922 __ And(t0, t0, Operand(1 << Map::kIsCallable));
1923 __ Branch(&receiver_not_callable, eq, t0, Operand(zero_reg));
1924
1925 // 3. Tail call with no arguments if argArray is null or undefined.
1926 Label no_arguments;
1927 __ JumpIfRoot(a0, Heap::kNullValueRootIndex, &no_arguments);
1928 __ JumpIfRoot(a0, Heap::kUndefinedValueRootIndex, &no_arguments);
1929
1930 // 4a. Apply the receiver to the given argArray (passing undefined for
1931 // new.target).
1932 __ LoadRoot(a3, Heap::kUndefinedValueRootIndex);
1933 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1934
1935 // 4b. The argArray is either null or undefined, so we tail call without any
1936 // arguments to the receiver.
1937 __ bind(&no_arguments);
1938 {
1939 __ mov(a0, zero_reg);
1940 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1941 }
1942
1943 // 4c. The receiver is not callable, throw an appropriate TypeError.
1944 __ bind(&receiver_not_callable);
1945 {
1946 __ sw(a1, MemOperand(sp));
1947 __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1948 }
1949 }
1950
1951 // static
Generate_FunctionPrototypeCall(MacroAssembler * masm)1952 void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
1953 // 1. Make sure we have at least one argument.
1954 // a0: actual number of arguments
1955 {
1956 Label done;
1957 __ Branch(&done, ne, a0, Operand(zero_reg));
1958 __ PushRoot(Heap::kUndefinedValueRootIndex);
1959 __ Addu(a0, a0, Operand(1));
1960 __ bind(&done);
1961 }
1962
1963 // 2. Get the function to call (passed as receiver) from the stack.
1964 // a0: actual number of arguments
1965 __ Lsa(at, sp, a0, kPointerSizeLog2);
1966 __ lw(a1, MemOperand(at));
1967
1968 // 3. Shift arguments and return address one slot down on the stack
1969 // (overwriting the original receiver). Adjust argument count to make
1970 // the original first argument the new receiver.
1971 // a0: actual number of arguments
1972 // a1: function
1973 {
1974 Label loop;
1975 // Calculate the copy start address (destination). Copy end address is sp.
1976 __ Lsa(a2, sp, a0, kPointerSizeLog2);
1977
1978 __ bind(&loop);
1979 __ lw(at, MemOperand(a2, -kPointerSize));
1980 __ sw(at, MemOperand(a2));
1981 __ Subu(a2, a2, Operand(kPointerSize));
1982 __ Branch(&loop, ne, a2, Operand(sp));
1983 // Adjust the actual number of arguments and remove the top element
1984 // (which is a copy of the last argument).
1985 __ Subu(a0, a0, Operand(1));
1986 __ Pop();
1987 }
1988
1989 // 4. Call the callable.
1990 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1991 }
1992
Generate_ReflectApply(MacroAssembler * masm)1993 void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
1994 // ----------- S t a t e -------------
1995 // -- a0 : argc
1996 // -- sp[0] : argumentsList
1997 // -- sp[4] : thisArgument
1998 // -- sp[8] : target
1999 // -- sp[12] : receiver
2000 // -----------------------------------
2001
2002 // 1. Load target into a1 (if present), argumentsList into a0 (if present),
2003 // remove all arguments from the stack (including the receiver), and push
2004 // thisArgument (if present) instead.
2005 {
2006 Label no_arg;
2007 Register scratch = t0;
2008 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
2009 __ mov(a2, a1);
2010 __ mov(a3, a1);
2011 __ sll(scratch, a0, kPointerSizeLog2);
2012 __ mov(a0, scratch);
2013 __ Subu(a0, a0, Operand(kPointerSize));
2014 __ Branch(&no_arg, lt, a0, Operand(zero_reg));
2015 __ Addu(a0, sp, Operand(a0));
2016 __ lw(a1, MemOperand(a0)); // target
2017 __ Subu(a0, a0, Operand(kPointerSize));
2018 __ Branch(&no_arg, lt, a0, Operand(sp));
2019 __ lw(a2, MemOperand(a0)); // thisArgument
2020 __ Subu(a0, a0, Operand(kPointerSize));
2021 __ Branch(&no_arg, lt, a0, Operand(sp));
2022 __ lw(a3, MemOperand(a0)); // argumentsList
2023 __ bind(&no_arg);
2024 __ Addu(sp, sp, Operand(scratch));
2025 __ sw(a2, MemOperand(sp));
2026 __ mov(a0, a3);
2027 }
2028
2029 // ----------- S t a t e -------------
2030 // -- a0 : argumentsList
2031 // -- a1 : target
2032 // -- sp[0] : thisArgument
2033 // -----------------------------------
2034
2035 // 2. Make sure the target is actually callable.
2036 Label target_not_callable;
2037 __ JumpIfSmi(a1, &target_not_callable);
2038 __ lw(t0, FieldMemOperand(a1, HeapObject::kMapOffset));
2039 __ lbu(t0, FieldMemOperand(t0, Map::kBitFieldOffset));
2040 __ And(t0, t0, Operand(1 << Map::kIsCallable));
2041 __ Branch(&target_not_callable, eq, t0, Operand(zero_reg));
2042
2043 // 3a. Apply the target to the given argumentsList (passing undefined for
2044 // new.target).
2045 __ LoadRoot(a3, Heap::kUndefinedValueRootIndex);
2046 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
2047
2048 // 3b. The target is not callable, throw an appropriate TypeError.
2049 __ bind(&target_not_callable);
2050 {
2051 __ sw(a1, MemOperand(sp));
2052 __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
2053 }
2054 }
2055
Generate_ReflectConstruct(MacroAssembler * masm)2056 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
2057 // ----------- S t a t e -------------
2058 // -- a0 : argc
2059 // -- sp[0] : new.target (optional)
2060 // -- sp[4] : argumentsList
2061 // -- sp[8] : target
2062 // -- sp[12] : receiver
2063 // -----------------------------------
2064
2065 // 1. Load target into a1 (if present), argumentsList into a0 (if present),
2066 // new.target into a3 (if present, otherwise use target), remove all
2067 // arguments from the stack (including the receiver), and push thisArgument
2068 // (if present) instead.
2069 {
2070 Label no_arg;
2071 Register scratch = t0;
2072 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex);
2073 __ mov(a2, a1);
2074 // Lsa() cannot be used hare as scratch value used later.
2075 __ sll(scratch, a0, kPointerSizeLog2);
2076 __ Addu(a0, sp, Operand(scratch));
2077 __ sw(a2, MemOperand(a0)); // receiver
2078 __ Subu(a0, a0, Operand(kPointerSize));
2079 __ Branch(&no_arg, lt, a0, Operand(sp));
2080 __ lw(a1, MemOperand(a0)); // target
2081 __ mov(a3, a1); // new.target defaults to target
2082 __ Subu(a0, a0, Operand(kPointerSize));
2083 __ Branch(&no_arg, lt, a0, Operand(sp));
2084 __ lw(a2, MemOperand(a0)); // argumentsList
2085 __ Subu(a0, a0, Operand(kPointerSize));
2086 __ Branch(&no_arg, lt, a0, Operand(sp));
2087 __ lw(a3, MemOperand(a0)); // new.target
2088 __ bind(&no_arg);
2089 __ Addu(sp, sp, Operand(scratch));
2090 __ mov(a0, a2);
2091 }
2092
2093 // ----------- S t a t e -------------
2094 // -- a0 : argumentsList
2095 // -- a3 : new.target
2096 // -- a1 : target
2097 // -- sp[0] : receiver (undefined)
2098 // -----------------------------------
2099
2100 // 2. Make sure the target is actually a constructor.
2101 Label target_not_constructor;
2102 __ JumpIfSmi(a1, &target_not_constructor);
2103 __ lw(t0, FieldMemOperand(a1, HeapObject::kMapOffset));
2104 __ lbu(t0, FieldMemOperand(t0, Map::kBitFieldOffset));
2105 __ And(t0, t0, Operand(1 << Map::kIsConstructor));
2106 __ Branch(&target_not_constructor, eq, t0, Operand(zero_reg));
2107
2108 // 3. Make sure the target is actually a constructor.
2109 Label new_target_not_constructor;
2110 __ JumpIfSmi(a3, &new_target_not_constructor);
2111 __ lw(t0, FieldMemOperand(a3, HeapObject::kMapOffset));
2112 __ lbu(t0, FieldMemOperand(t0, Map::kBitFieldOffset));
2113 __ And(t0, t0, Operand(1 << Map::kIsConstructor));
2114 __ Branch(&new_target_not_constructor, eq, t0, Operand(zero_reg));
2115
2116 // 4a. Construct the target with the given new.target and argumentsList.
2117 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
2118
2119 // 4b. The target is not a constructor, throw an appropriate TypeError.
2120 __ bind(&target_not_constructor);
2121 {
2122 __ sw(a1, MemOperand(sp));
2123 __ TailCallRuntime(Runtime::kThrowNotConstructor);
2124 }
2125
2126 // 4c. The new.target is not a constructor, throw an appropriate TypeError.
2127 __ bind(&new_target_not_constructor);
2128 {
2129 __ sw(a3, MemOperand(sp));
2130 __ TailCallRuntime(Runtime::kThrowNotConstructor);
2131 }
2132 }
2133
EnterArgumentsAdaptorFrame(MacroAssembler * masm)2134 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
2135 __ sll(a0, a0, kSmiTagSize);
2136 __ li(t0, Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
2137 __ MultiPush(a0.bit() | a1.bit() | t0.bit() | fp.bit() | ra.bit());
2138 __ Addu(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
2139 kPointerSize));
2140 }
2141
LeaveArgumentsAdaptorFrame(MacroAssembler * masm)2142 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
2143 // ----------- S t a t e -------------
2144 // -- v0 : result being passed through
2145 // -----------------------------------
2146 // Get the number of arguments passed (as a smi), tear down the frame and
2147 // then tear down the parameters.
2148 __ lw(a1, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp +
2149 kPointerSize)));
2150 __ mov(sp, fp);
2151 __ MultiPop(fp.bit() | ra.bit());
2152 __ Lsa(sp, sp, a1, kPointerSizeLog2 - kSmiTagSize);
2153 // Adjust for the receiver.
2154 __ Addu(sp, sp, Operand(kPointerSize));
2155 }
2156
2157 // static
Generate_Apply(MacroAssembler * masm)2158 void Builtins::Generate_Apply(MacroAssembler* masm) {
2159 // ----------- S t a t e -------------
2160 // -- a0 : argumentsList
2161 // -- a1 : target
2162 // -- a3 : new.target (checked to be constructor or undefined)
2163 // -- sp[0] : thisArgument
2164 // -----------------------------------
2165
2166 // Create the list of arguments from the array-like argumentsList.
2167 {
2168 Label create_arguments, create_array, create_holey_array, create_runtime,
2169 done_create;
2170 __ JumpIfSmi(a0, &create_runtime);
2171
2172 // Load the map of argumentsList into a2.
2173 __ lw(a2, FieldMemOperand(a0, HeapObject::kMapOffset));
2174
2175 // Load native context into t0.
2176 __ lw(t0, NativeContextMemOperand());
2177
2178 // Check if argumentsList is an (unmodified) arguments object.
2179 __ lw(at, ContextMemOperand(t0, Context::SLOPPY_ARGUMENTS_MAP_INDEX));
2180 __ Branch(&create_arguments, eq, a2, Operand(at));
2181 __ lw(at, ContextMemOperand(t0, Context::STRICT_ARGUMENTS_MAP_INDEX));
2182 __ Branch(&create_arguments, eq, a2, Operand(at));
2183
2184 // Check if argumentsList is a fast JSArray.
2185 __ lbu(v0, FieldMemOperand(a2, Map::kInstanceTypeOffset));
2186 __ Branch(&create_array, eq, v0, Operand(JS_ARRAY_TYPE));
2187
2188 // Ask the runtime to create the list (actually a FixedArray).
2189 __ bind(&create_runtime);
2190 {
2191 FrameScope scope(masm, StackFrame::INTERNAL);
2192 __ Push(a1, a3, a0);
2193 __ CallRuntime(Runtime::kCreateListFromArrayLike);
2194 __ mov(a0, v0);
2195 __ Pop(a1, a3);
2196 __ lw(a2, FieldMemOperand(v0, FixedArray::kLengthOffset));
2197 __ SmiUntag(a2);
2198 }
2199 __ Branch(&done_create);
2200
2201 // Try to create the list from an arguments object.
2202 __ bind(&create_arguments);
2203 __ lw(a2, FieldMemOperand(a0, JSArgumentsObject::kLengthOffset));
2204 __ lw(t0, FieldMemOperand(a0, JSObject::kElementsOffset));
2205 __ lw(at, FieldMemOperand(t0, FixedArray::kLengthOffset));
2206 __ Branch(&create_runtime, ne, a2, Operand(at));
2207 __ SmiUntag(a2);
2208 __ mov(a0, t0);
2209 __ Branch(&done_create);
2210
2211 // For holey JSArrays we need to check that the array prototype chain
2212 // protector is intact and our prototype is the Array.prototype actually.
2213 __ bind(&create_holey_array);
2214 __ lw(a2, FieldMemOperand(a2, Map::kPrototypeOffset));
2215 __ lw(at, ContextMemOperand(t0, Context::INITIAL_ARRAY_PROTOTYPE_INDEX));
2216 __ Branch(&create_runtime, ne, a2, Operand(at));
2217 __ LoadRoot(at, Heap::kArrayProtectorRootIndex);
2218 __ lw(a2, FieldMemOperand(at, PropertyCell::kValueOffset));
2219 __ Branch(&create_runtime, ne, a2,
2220 Operand(Smi::FromInt(Isolate::kProtectorValid)));
2221 __ lw(a2, FieldMemOperand(a0, JSArray::kLengthOffset));
2222 __ lw(a0, FieldMemOperand(a0, JSArray::kElementsOffset));
2223 __ SmiUntag(a2);
2224 __ Branch(&done_create);
2225
2226 // Try to create the list from a JSArray object.
2227 __ bind(&create_array);
2228 __ lbu(t1, FieldMemOperand(a2, Map::kBitField2Offset));
2229 __ DecodeField<Map::ElementsKindBits>(t1);
2230 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
2231 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
2232 STATIC_ASSERT(FAST_ELEMENTS == 2);
2233 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
2234 __ Branch(&create_holey_array, eq, t1, Operand(FAST_HOLEY_SMI_ELEMENTS));
2235 __ Branch(&create_holey_array, eq, t1, Operand(FAST_HOLEY_ELEMENTS));
2236 __ Branch(&create_runtime, hi, t1, Operand(FAST_ELEMENTS));
2237 __ lw(a2, FieldMemOperand(a0, JSArray::kLengthOffset));
2238 __ lw(a0, FieldMemOperand(a0, JSArray::kElementsOffset));
2239 __ SmiUntag(a2);
2240
2241 __ bind(&done_create);
2242 }
2243
2244 // Check for stack overflow.
2245 {
2246 // Check the stack for overflow. We are not trying to catch interruptions
2247 // (i.e. debug break and preemption) here, so check the "real stack limit".
2248 Label done;
2249 __ LoadRoot(t0, Heap::kRealStackLimitRootIndex);
2250 // Make ip the space we have left. The stack might already be overflowed
2251 // here which will cause ip to become negative.
2252 __ Subu(t0, sp, t0);
2253 // Check if the arguments will overflow the stack.
2254 __ sll(at, a2, kPointerSizeLog2);
2255 __ Branch(&done, gt, t0, Operand(at)); // Signed comparison.
2256 __ TailCallRuntime(Runtime::kThrowStackOverflow);
2257 __ bind(&done);
2258 }
2259
2260 // ----------- S t a t e -------------
2261 // -- a1 : target
2262 // -- a0 : args (a FixedArray built from argumentsList)
2263 // -- a2 : len (number of elements to push from args)
2264 // -- a3 : new.target (checked to be constructor or undefined)
2265 // -- sp[0] : thisArgument
2266 // -----------------------------------
2267
2268 // Push arguments onto the stack (thisArgument is already on the stack).
2269 {
2270 __ mov(t0, zero_reg);
2271 Label done, push, loop;
2272 __ LoadRoot(t1, Heap::kTheHoleValueRootIndex);
2273 __ bind(&loop);
2274 __ Branch(&done, eq, t0, Operand(a2));
2275 __ Lsa(at, a0, t0, kPointerSizeLog2);
2276 __ lw(at, FieldMemOperand(at, FixedArray::kHeaderSize));
2277 __ Branch(&push, ne, t1, Operand(at));
2278 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
2279 __ bind(&push);
2280 __ Push(at);
2281 __ Addu(t0, t0, Operand(1));
2282 __ Branch(&loop);
2283 __ bind(&done);
2284 __ Move(a0, t0);
2285 }
2286
2287 // Dispatch to Call or Construct depending on whether new.target is undefined.
2288 {
2289 Label construct;
2290 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
2291 __ Branch(&construct, ne, a3, Operand(at));
2292 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
2293 __ bind(&construct);
2294 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
2295 }
2296 }
2297
2298 // static
Generate_CallForwardVarargs(MacroAssembler * masm,Handle<Code> code)2299 void Builtins::Generate_CallForwardVarargs(MacroAssembler* masm,
2300 Handle<Code> code) {
2301 // ----------- S t a t e -------------
2302 // -- a1 : the target to call (can be any Object)
2303 // -- a2 : start index (to support rest parameters)
2304 // -- ra : return address.
2305 // -- sp[0] : thisArgument
2306 // -----------------------------------
2307
2308 // Check if we have an arguments adaptor frame below the function frame.
2309 Label arguments_adaptor, arguments_done;
2310 __ lw(a3, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2311 __ lw(a0, MemOperand(a3, CommonFrameConstants::kContextOrFrameTypeOffset));
2312 __ Branch(&arguments_adaptor, eq, a0,
2313 Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
2314 {
2315 __ lw(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
2316 __ lw(a0, FieldMemOperand(a0, JSFunction::kSharedFunctionInfoOffset));
2317 __ lw(a0,
2318 FieldMemOperand(a0, SharedFunctionInfo::kFormalParameterCountOffset));
2319 __ mov(a3, fp);
2320 }
2321 __ Branch(&arguments_done);
2322 __ bind(&arguments_adaptor);
2323 {
2324 // Just get the length from the ArgumentsAdaptorFrame.
2325 __ lw(a0, MemOperand(a3, ArgumentsAdaptorFrameConstants::kLengthOffset));
2326 }
2327 __ bind(&arguments_done);
2328
2329 Label stack_empty, stack_done, stack_overflow;
2330 __ SmiUntag(a0);
2331 __ Subu(a0, a0, a2);
2332 __ Branch(&stack_empty, le, a0, Operand(zero_reg));
2333 {
2334 // Check for stack overflow.
2335 Generate_StackOverflowCheck(masm, a0, t0, t1, &stack_overflow);
2336
2337 // Forward the arguments from the caller frame.
2338 {
2339 Label loop;
2340 __ mov(a2, a0);
2341 __ bind(&loop);
2342 {
2343 __ Lsa(at, a3, a2, kPointerSizeLog2);
2344 __ lw(at, MemOperand(at, 1 * kPointerSize));
2345 __ push(at);
2346 __ Subu(a2, a2, Operand(1));
2347 __ Branch(&loop, ne, a2, Operand(zero_reg));
2348 }
2349 }
2350 }
2351 __ Branch(&stack_done);
2352 __ bind(&stack_overflow);
2353 __ TailCallRuntime(Runtime::kThrowStackOverflow);
2354 __ bind(&stack_empty);
2355 {
2356 // We just pass the receiver, which is already on the stack.
2357 __ li(a0, Operand(0));
2358 }
2359 __ bind(&stack_done);
2360
2361 __ Jump(code, RelocInfo::CODE_TARGET);
2362 }
2363
2364 namespace {
2365
2366 // Drops top JavaScript frame and an arguments adaptor frame below it (if
2367 // present) preserving all the arguments prepared for current call.
2368 // Does nothing if debugger is currently active.
2369 // ES6 14.6.3. PrepareForTailCall
2370 //
2371 // Stack structure for the function g() tail calling f():
2372 //
2373 // ------- Caller frame: -------
2374 // | ...
2375 // | g()'s arg M
2376 // | ...
2377 // | g()'s arg 1
2378 // | g()'s receiver arg
2379 // | g()'s caller pc
2380 // ------- g()'s frame: -------
2381 // | g()'s caller fp <- fp
2382 // | g()'s context
2383 // | function pointer: g
2384 // | -------------------------
2385 // | ...
2386 // | ...
2387 // | f()'s arg N
2388 // | ...
2389 // | f()'s arg 1
2390 // | f()'s receiver arg <- sp (f()'s caller pc is not on the stack yet!)
2391 // ----------------------
2392 //
PrepareForTailCall(MacroAssembler * masm,Register args_reg,Register scratch1,Register scratch2,Register scratch3)2393 void PrepareForTailCall(MacroAssembler* masm, Register args_reg,
2394 Register scratch1, Register scratch2,
2395 Register scratch3) {
2396 DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3));
2397 Comment cmnt(masm, "[ PrepareForTailCall");
2398
2399 // Prepare for tail call only if ES2015 tail call elimination is enabled.
2400 Label done;
2401 ExternalReference is_tail_call_elimination_enabled =
2402 ExternalReference::is_tail_call_elimination_enabled_address(
2403 masm->isolate());
2404 __ li(at, Operand(is_tail_call_elimination_enabled));
2405 __ lb(scratch1, MemOperand(at));
2406 __ Branch(&done, eq, scratch1, Operand(zero_reg));
2407
2408 // Drop possible interpreter handler/stub frame.
2409 {
2410 Label no_interpreter_frame;
2411 __ lw(scratch3,
2412 MemOperand(fp, CommonFrameConstants::kContextOrFrameTypeOffset));
2413 __ Branch(&no_interpreter_frame, ne, scratch3,
2414 Operand(StackFrame::TypeToMarker(StackFrame::STUB)));
2415 __ lw(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2416 __ bind(&no_interpreter_frame);
2417 }
2418
2419 // Check if next frame is an arguments adaptor frame.
2420 Register caller_args_count_reg = scratch1;
2421 Label no_arguments_adaptor, formal_parameter_count_loaded;
2422 __ lw(scratch2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2423 __ lw(scratch3,
2424 MemOperand(scratch2, CommonFrameConstants::kContextOrFrameTypeOffset));
2425 __ Branch(&no_arguments_adaptor, ne, scratch3,
2426 Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
2427
2428 // Drop current frame and load arguments count from arguments adaptor frame.
2429 __ mov(fp, scratch2);
2430 __ lw(caller_args_count_reg,
2431 MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset));
2432 __ SmiUntag(caller_args_count_reg);
2433 __ Branch(&formal_parameter_count_loaded);
2434
2435 __ bind(&no_arguments_adaptor);
2436 // Load caller's formal parameter count
2437 __ lw(scratch1,
2438 MemOperand(fp, ArgumentsAdaptorFrameConstants::kFunctionOffset));
2439 __ lw(scratch1,
2440 FieldMemOperand(scratch1, JSFunction::kSharedFunctionInfoOffset));
2441 __ lw(caller_args_count_reg,
2442 FieldMemOperand(scratch1,
2443 SharedFunctionInfo::kFormalParameterCountOffset));
2444 __ SmiUntag(caller_args_count_reg);
2445
2446 __ bind(&formal_parameter_count_loaded);
2447
2448 ParameterCount callee_args_count(args_reg);
2449 __ PrepareForTailCall(callee_args_count, caller_args_count_reg, scratch2,
2450 scratch3);
2451 __ bind(&done);
2452 }
2453 } // namespace
2454
2455 // static
Generate_CallFunction(MacroAssembler * masm,ConvertReceiverMode mode,TailCallMode tail_call_mode)2456 void Builtins::Generate_CallFunction(MacroAssembler* masm,
2457 ConvertReceiverMode mode,
2458 TailCallMode tail_call_mode) {
2459 // ----------- S t a t e -------------
2460 // -- a0 : the number of arguments (not including the receiver)
2461 // -- a1 : the function to call (checked to be a JSFunction)
2462 // -----------------------------------
2463 __ AssertFunction(a1);
2464
2465 // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
2466 // Check that the function is not a "classConstructor".
2467 Label class_constructor;
2468 __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
2469 __ lbu(a3, FieldMemOperand(a2, SharedFunctionInfo::kFunctionKindByteOffset));
2470 __ And(at, a3, Operand(SharedFunctionInfo::kClassConstructorBitsWithinByte));
2471 __ Branch(&class_constructor, ne, at, Operand(zero_reg));
2472
2473 // Enter the context of the function; ToObject has to run in the function
2474 // context, and we also need to take the global proxy from the function
2475 // context in case of conversion.
2476 STATIC_ASSERT(SharedFunctionInfo::kNativeByteOffset ==
2477 SharedFunctionInfo::kStrictModeByteOffset);
2478 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
2479 // We need to convert the receiver for non-native sloppy mode functions.
2480 Label done_convert;
2481 __ lbu(a3, FieldMemOperand(a2, SharedFunctionInfo::kNativeByteOffset));
2482 __ And(at, a3, Operand((1 << SharedFunctionInfo::kNativeBitWithinByte) |
2483 (1 << SharedFunctionInfo::kStrictModeBitWithinByte)));
2484 __ Branch(&done_convert, ne, at, Operand(zero_reg));
2485 {
2486 // ----------- S t a t e -------------
2487 // -- a0 : the number of arguments (not including the receiver)
2488 // -- a1 : the function to call (checked to be a JSFunction)
2489 // -- a2 : the shared function info.
2490 // -- cp : the function context.
2491 // -----------------------------------
2492
2493 if (mode == ConvertReceiverMode::kNullOrUndefined) {
2494 // Patch receiver to global proxy.
2495 __ LoadGlobalProxy(a3);
2496 } else {
2497 Label convert_to_object, convert_receiver;
2498 __ Lsa(at, sp, a0, kPointerSizeLog2);
2499 __ lw(a3, MemOperand(at));
2500 __ JumpIfSmi(a3, &convert_to_object);
2501 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2502 __ GetObjectType(a3, t0, t0);
2503 __ Branch(&done_convert, hs, t0, Operand(FIRST_JS_RECEIVER_TYPE));
2504 if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
2505 Label convert_global_proxy;
2506 __ JumpIfRoot(a3, Heap::kUndefinedValueRootIndex,
2507 &convert_global_proxy);
2508 __ JumpIfNotRoot(a3, Heap::kNullValueRootIndex, &convert_to_object);
2509 __ bind(&convert_global_proxy);
2510 {
2511 // Patch receiver to global proxy.
2512 __ LoadGlobalProxy(a3);
2513 }
2514 __ Branch(&convert_receiver);
2515 }
2516 __ bind(&convert_to_object);
2517 {
2518 // Convert receiver using ToObject.
2519 // TODO(bmeurer): Inline the allocation here to avoid building the frame
2520 // in the fast case? (fall back to AllocateInNewSpace?)
2521 FrameScope scope(masm, StackFrame::INTERNAL);
2522 __ sll(a0, a0, kSmiTagSize); // Smi tagged.
2523 __ Push(a0, a1);
2524 __ mov(a0, a3);
2525 __ Push(cp);
2526 __ Call(masm->isolate()->builtins()->ToObject(),
2527 RelocInfo::CODE_TARGET);
2528 __ Pop(cp);
2529 __ mov(a3, v0);
2530 __ Pop(a0, a1);
2531 __ sra(a0, a0, kSmiTagSize); // Un-tag.
2532 }
2533 __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
2534 __ bind(&convert_receiver);
2535 }
2536 __ Lsa(at, sp, a0, kPointerSizeLog2);
2537 __ sw(a3, MemOperand(at));
2538 }
2539 __ bind(&done_convert);
2540
2541 // ----------- S t a t e -------------
2542 // -- a0 : the number of arguments (not including the receiver)
2543 // -- a1 : the function to call (checked to be a JSFunction)
2544 // -- a2 : the shared function info.
2545 // -- cp : the function context.
2546 // -----------------------------------
2547
2548 if (tail_call_mode == TailCallMode::kAllow) {
2549 PrepareForTailCall(masm, a0, t0, t1, t2);
2550 }
2551
2552 __ lw(a2,
2553 FieldMemOperand(a2, SharedFunctionInfo::kFormalParameterCountOffset));
2554 __ sra(a2, a2, kSmiTagSize); // Un-tag.
2555 ParameterCount actual(a0);
2556 ParameterCount expected(a2);
2557 __ InvokeFunctionCode(a1, no_reg, expected, actual, JUMP_FUNCTION,
2558 CheckDebugStepCallWrapper());
2559
2560 // The function is a "classConstructor", need to raise an exception.
2561 __ bind(&class_constructor);
2562 {
2563 FrameScope frame(masm, StackFrame::INTERNAL);
2564 __ Push(a1);
2565 __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
2566 }
2567 }
2568
2569 // static
Generate_CallBoundFunctionImpl(MacroAssembler * masm,TailCallMode tail_call_mode)2570 void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm,
2571 TailCallMode tail_call_mode) {
2572 // ----------- S t a t e -------------
2573 // -- a0 : the number of arguments (not including the receiver)
2574 // -- a1 : the function to call (checked to be a JSBoundFunction)
2575 // -----------------------------------
2576 __ AssertBoundFunction(a1);
2577
2578 if (tail_call_mode == TailCallMode::kAllow) {
2579 PrepareForTailCall(masm, a0, t0, t1, t2);
2580 }
2581
2582 // Patch the receiver to [[BoundThis]].
2583 {
2584 __ lw(at, FieldMemOperand(a1, JSBoundFunction::kBoundThisOffset));
2585 __ Lsa(t0, sp, a0, kPointerSizeLog2);
2586 __ sw(at, MemOperand(t0));
2587 }
2588
2589 // Load [[BoundArguments]] into a2 and length of that into t0.
2590 __ lw(a2, FieldMemOperand(a1, JSBoundFunction::kBoundArgumentsOffset));
2591 __ lw(t0, FieldMemOperand(a2, FixedArray::kLengthOffset));
2592 __ SmiUntag(t0);
2593
2594 // ----------- S t a t e -------------
2595 // -- a0 : the number of arguments (not including the receiver)
2596 // -- a1 : the function to call (checked to be a JSBoundFunction)
2597 // -- a2 : the [[BoundArguments]] (implemented as FixedArray)
2598 // -- t0 : the number of [[BoundArguments]]
2599 // -----------------------------------
2600
2601 // Reserve stack space for the [[BoundArguments]].
2602 {
2603 Label done;
2604 __ sll(t1, t0, kPointerSizeLog2);
2605 __ Subu(sp, sp, Operand(t1));
2606 // Check the stack for overflow. We are not trying to catch interruptions
2607 // (i.e. debug break and preemption) here, so check the "real stack limit".
2608 __ LoadRoot(at, Heap::kRealStackLimitRootIndex);
2609 __ Branch(&done, gt, sp, Operand(at)); // Signed comparison.
2610 // Restore the stack pointer.
2611 __ Addu(sp, sp, Operand(t1));
2612 {
2613 FrameScope scope(masm, StackFrame::MANUAL);
2614 __ EnterFrame(StackFrame::INTERNAL);
2615 __ CallRuntime(Runtime::kThrowStackOverflow);
2616 }
2617 __ bind(&done);
2618 }
2619
2620 // Relocate arguments down the stack.
2621 {
2622 Label loop, done_loop;
2623 __ mov(t1, zero_reg);
2624 __ bind(&loop);
2625 __ Branch(&done_loop, gt, t1, Operand(a0));
2626 __ Lsa(t2, sp, t0, kPointerSizeLog2);
2627 __ lw(at, MemOperand(t2));
2628 __ Lsa(t2, sp, t1, kPointerSizeLog2);
2629 __ sw(at, MemOperand(t2));
2630 __ Addu(t0, t0, Operand(1));
2631 __ Addu(t1, t1, Operand(1));
2632 __ Branch(&loop);
2633 __ bind(&done_loop);
2634 }
2635
2636 // Copy [[BoundArguments]] to the stack (below the arguments).
2637 {
2638 Label loop, done_loop;
2639 __ lw(t0, FieldMemOperand(a2, FixedArray::kLengthOffset));
2640 __ SmiUntag(t0);
2641 __ Addu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
2642 __ bind(&loop);
2643 __ Subu(t0, t0, Operand(1));
2644 __ Branch(&done_loop, lt, t0, Operand(zero_reg));
2645 __ Lsa(t1, a2, t0, kPointerSizeLog2);
2646 __ lw(at, MemOperand(t1));
2647 __ Lsa(t1, sp, a0, kPointerSizeLog2);
2648 __ sw(at, MemOperand(t1));
2649 __ Addu(a0, a0, Operand(1));
2650 __ Branch(&loop);
2651 __ bind(&done_loop);
2652 }
2653
2654 // Call the [[BoundTargetFunction]] via the Call builtin.
2655 __ lw(a1, FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset));
2656 __ li(at, Operand(ExternalReference(Builtins::kCall_ReceiverIsAny,
2657 masm->isolate())));
2658 __ lw(at, MemOperand(at));
2659 __ Addu(at, at, Operand(Code::kHeaderSize - kHeapObjectTag));
2660 __ Jump(at);
2661 }
2662
2663 // static
Generate_Call(MacroAssembler * masm,ConvertReceiverMode mode,TailCallMode tail_call_mode)2664 void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode,
2665 TailCallMode tail_call_mode) {
2666 // ----------- S t a t e -------------
2667 // -- a0 : the number of arguments (not including the receiver)
2668 // -- a1 : the target to call (can be any Object).
2669 // -----------------------------------
2670
2671 Label non_callable, non_function, non_smi;
2672 __ JumpIfSmi(a1, &non_callable);
2673 __ bind(&non_smi);
2674 __ GetObjectType(a1, t1, t2);
2675 __ Jump(masm->isolate()->builtins()->CallFunction(mode, tail_call_mode),
2676 RelocInfo::CODE_TARGET, eq, t2, Operand(JS_FUNCTION_TYPE));
2677 __ Jump(masm->isolate()->builtins()->CallBoundFunction(tail_call_mode),
2678 RelocInfo::CODE_TARGET, eq, t2, Operand(JS_BOUND_FUNCTION_TYPE));
2679
2680 // Check if target has a [[Call]] internal method.
2681 __ lbu(t1, FieldMemOperand(t1, Map::kBitFieldOffset));
2682 __ And(t1, t1, Operand(1 << Map::kIsCallable));
2683 __ Branch(&non_callable, eq, t1, Operand(zero_reg));
2684
2685 __ Branch(&non_function, ne, t2, Operand(JS_PROXY_TYPE));
2686
2687 // 0. Prepare for tail call if necessary.
2688 if (tail_call_mode == TailCallMode::kAllow) {
2689 PrepareForTailCall(masm, a0, t0, t1, t2);
2690 }
2691
2692 // 1. Runtime fallback for Proxy [[Call]].
2693 __ Push(a1);
2694 // Increase the arguments size to include the pushed function and the
2695 // existing receiver on the stack.
2696 __ Addu(a0, a0, 2);
2697 // Tail-call to the runtime.
2698 __ JumpToExternalReference(
2699 ExternalReference(Runtime::kJSProxyCall, masm->isolate()));
2700
2701 // 2. Call to something else, which might have a [[Call]] internal method (if
2702 // not we raise an exception).
2703 __ bind(&non_function);
2704 // Overwrite the original receiver with the (original) target.
2705 __ Lsa(at, sp, a0, kPointerSizeLog2);
2706 __ sw(a1, MemOperand(at));
2707 // Let the "call_as_function_delegate" take care of the rest.
2708 __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, a1);
2709 __ Jump(masm->isolate()->builtins()->CallFunction(
2710 ConvertReceiverMode::kNotNullOrUndefined, tail_call_mode),
2711 RelocInfo::CODE_TARGET);
2712
2713 // 3. Call to something that is not callable.
2714 __ bind(&non_callable);
2715 {
2716 FrameScope scope(masm, StackFrame::INTERNAL);
2717 __ Push(a1);
2718 __ CallRuntime(Runtime::kThrowCalledNonCallable);
2719 }
2720 }
2721
CheckSpreadAndPushToStack(MacroAssembler * masm)2722 static void CheckSpreadAndPushToStack(MacroAssembler* masm) {
2723 Register argc = a0;
2724 Register constructor = a1;
2725 Register new_target = a3;
2726
2727 Register scratch = t0;
2728 Register scratch2 = t1;
2729
2730 Register spread = a2;
2731 Register spread_map = t3;
2732
2733 Register spread_len = t3;
2734
2735 Register native_context = t4;
2736
2737 Label runtime_call, push_args;
2738 __ lw(spread, MemOperand(sp, 0));
2739 __ JumpIfSmi(spread, &runtime_call);
2740 __ lw(spread_map, FieldMemOperand(spread, HeapObject::kMapOffset));
2741 __ lw(native_context, NativeContextMemOperand());
2742
2743 // Check that the spread is an array.
2744 __ lbu(scratch, FieldMemOperand(spread_map, Map::kInstanceTypeOffset));
2745 __ Branch(&runtime_call, ne, scratch, Operand(JS_ARRAY_TYPE));
2746
2747 // Check that we have the original ArrayPrototype.
2748 __ lw(scratch, FieldMemOperand(spread_map, Map::kPrototypeOffset));
2749 __ lw(scratch2, ContextMemOperand(native_context,
2750 Context::INITIAL_ARRAY_PROTOTYPE_INDEX));
2751 __ Branch(&runtime_call, ne, scratch, Operand(scratch2));
2752
2753 // Check that the ArrayPrototype hasn't been modified in a way that would
2754 // affect iteration.
2755 __ LoadRoot(scratch, Heap::kArrayIteratorProtectorRootIndex);
2756 __ lw(scratch, FieldMemOperand(scratch, PropertyCell::kValueOffset));
2757 __ Branch(&runtime_call, ne, scratch,
2758 Operand(Smi::FromInt(Isolate::kProtectorValid)));
2759
2760 // Check that the map of the initial array iterator hasn't changed.
2761 __ lw(scratch,
2762 ContextMemOperand(native_context,
2763 Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_INDEX));
2764 __ lw(scratch, FieldMemOperand(scratch, HeapObject::kMapOffset));
2765 __ lw(scratch2,
2766 ContextMemOperand(native_context,
2767 Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_MAP_INDEX));
2768 __ Branch(&runtime_call, ne, scratch, Operand(scratch2));
2769
2770 // For FastPacked kinds, iteration will have the same effect as simply
2771 // accessing each property in order.
2772 Label no_protector_check;
2773 __ lbu(scratch, FieldMemOperand(spread_map, Map::kBitField2Offset));
2774 __ DecodeField<Map::ElementsKindBits>(scratch);
2775 __ Branch(&runtime_call, hi, scratch, Operand(FAST_HOLEY_ELEMENTS));
2776 // For non-FastHoley kinds, we can skip the protector check.
2777 __ Branch(&no_protector_check, eq, scratch, Operand(FAST_SMI_ELEMENTS));
2778 __ Branch(&no_protector_check, eq, scratch, Operand(FAST_ELEMENTS));
2779 // Check the ArrayProtector cell.
2780 __ LoadRoot(scratch, Heap::kArrayProtectorRootIndex);
2781 __ lw(scratch, FieldMemOperand(scratch, PropertyCell::kValueOffset));
2782 __ Branch(&runtime_call, ne, scratch,
2783 Operand(Smi::FromInt(Isolate::kProtectorValid)));
2784
2785 __ bind(&no_protector_check);
2786 // Load the FixedArray backing store, but use the length from the array.
2787 __ lw(spread_len, FieldMemOperand(spread, JSArray::kLengthOffset));
2788 __ SmiUntag(spread_len);
2789 __ lw(spread, FieldMemOperand(spread, JSArray::kElementsOffset));
2790 __ Branch(&push_args);
2791
2792 __ bind(&runtime_call);
2793 {
2794 // Call the builtin for the result of the spread.
2795 FrameScope scope(masm, StackFrame::INTERNAL);
2796 __ SmiTag(argc);
2797 __ Push(constructor, new_target, argc, spread);
2798 __ CallRuntime(Runtime::kSpreadIterableFixed);
2799 __ mov(spread, v0);
2800 __ Pop(constructor, new_target, argc);
2801 __ SmiUntag(argc);
2802 }
2803
2804 {
2805 // Calculate the new nargs including the result of the spread.
2806 __ lw(spread_len, FieldMemOperand(spread, FixedArray::kLengthOffset));
2807 __ SmiUntag(spread_len);
2808
2809 __ bind(&push_args);
2810 // argc += spread_len - 1. Subtract 1 for the spread itself.
2811 __ Addu(argc, argc, spread_len);
2812 __ Subu(argc, argc, Operand(1));
2813
2814 // Pop the spread argument off the stack.
2815 __ Pop(scratch);
2816 }
2817
2818 // Check for stack overflow.
2819 {
2820 // Check the stack for overflow. We are not trying to catch interruptions
2821 // (i.e. debug break and preemption) here, so check the "real stack limit".
2822 Label done;
2823 __ LoadRoot(scratch, Heap::kRealStackLimitRootIndex);
2824 // Make scratch the space we have left. The stack might already be
2825 // overflowed here which will cause ip to become negative.
2826 __ Subu(scratch, sp, scratch);
2827 // Check if the arguments will overflow the stack.
2828 __ sll(at, spread_len, kPointerSizeLog2);
2829 __ Branch(&done, gt, scratch, Operand(at)); // Signed comparison.
2830 __ TailCallRuntime(Runtime::kThrowStackOverflow);
2831 __ bind(&done);
2832 }
2833
2834 // Put the evaluated spread onto the stack as additional arguments.
2835 {
2836 __ mov(scratch, zero_reg);
2837 Label done, push, loop;
2838 __ bind(&loop);
2839 __ Branch(&done, eq, scratch, Operand(spread_len));
2840 __ Lsa(scratch2, spread, scratch, kPointerSizeLog2);
2841 __ lw(scratch2, FieldMemOperand(scratch2, FixedArray::kHeaderSize));
2842 __ JumpIfNotRoot(scratch2, Heap::kTheHoleValueRootIndex, &push);
2843 __ LoadRoot(scratch2, Heap::kUndefinedValueRootIndex);
2844 __ bind(&push);
2845 __ Push(scratch2);
2846 __ Addu(scratch, scratch, Operand(1));
2847 __ Branch(&loop);
2848 __ bind(&done);
2849 }
2850 }
2851
2852 // static
Generate_CallWithSpread(MacroAssembler * masm)2853 void Builtins::Generate_CallWithSpread(MacroAssembler* masm) {
2854 // ----------- S t a t e -------------
2855 // -- a0 : the number of arguments (not including the receiver)
2856 // -- a1 : the target to call (can be any Object).
2857 // -----------------------------------
2858
2859 // CheckSpreadAndPushToStack will push a3 to save it.
2860 __ LoadRoot(a3, Heap::kUndefinedValueRootIndex);
2861 CheckSpreadAndPushToStack(masm);
2862 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny,
2863 TailCallMode::kDisallow),
2864 RelocInfo::CODE_TARGET);
2865 }
2866
2867 // static
Generate_ConstructFunction(MacroAssembler * masm)2868 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2869 // ----------- S t a t e -------------
2870 // -- a0 : the number of arguments (not including the receiver)
2871 // -- a1 : the constructor to call (checked to be a JSFunction)
2872 // -- a3 : the new target (checked to be a constructor)
2873 // -----------------------------------
2874 __ AssertFunction(a1);
2875
2876 // Calling convention for function specific ConstructStubs require
2877 // a2 to contain either an AllocationSite or undefined.
2878 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
2879
2880 // Tail call to the function-specific construct stub (still in the caller
2881 // context at this point).
2882 __ lw(t0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
2883 __ lw(t0, FieldMemOperand(t0, SharedFunctionInfo::kConstructStubOffset));
2884 __ Addu(at, t0, Operand(Code::kHeaderSize - kHeapObjectTag));
2885 __ Jump(at);
2886 }
2887
2888 // static
Generate_ConstructBoundFunction(MacroAssembler * masm)2889 void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2890 // ----------- S t a t e -------------
2891 // -- a0 : the number of arguments (not including the receiver)
2892 // -- a1 : the function to call (checked to be a JSBoundFunction)
2893 // -- a3 : the new target (checked to be a constructor)
2894 // -----------------------------------
2895 __ AssertBoundFunction(a1);
2896
2897 // Load [[BoundArguments]] into a2 and length of that into t0.
2898 __ lw(a2, FieldMemOperand(a1, JSBoundFunction::kBoundArgumentsOffset));
2899 __ lw(t0, FieldMemOperand(a2, FixedArray::kLengthOffset));
2900 __ SmiUntag(t0);
2901
2902 // ----------- S t a t e -------------
2903 // -- a0 : the number of arguments (not including the receiver)
2904 // -- a1 : the function to call (checked to be a JSBoundFunction)
2905 // -- a2 : the [[BoundArguments]] (implemented as FixedArray)
2906 // -- a3 : the new target (checked to be a constructor)
2907 // -- t0 : the number of [[BoundArguments]]
2908 // -----------------------------------
2909
2910 // Reserve stack space for the [[BoundArguments]].
2911 {
2912 Label done;
2913 __ sll(t1, t0, kPointerSizeLog2);
2914 __ Subu(sp, sp, Operand(t1));
2915 // Check the stack for overflow. We are not trying to catch interruptions
2916 // (i.e. debug break and preemption) here, so check the "real stack limit".
2917 __ LoadRoot(at, Heap::kRealStackLimitRootIndex);
2918 __ Branch(&done, gt, sp, Operand(at)); // Signed comparison.
2919 // Restore the stack pointer.
2920 __ Addu(sp, sp, Operand(t1));
2921 {
2922 FrameScope scope(masm, StackFrame::MANUAL);
2923 __ EnterFrame(StackFrame::INTERNAL);
2924 __ CallRuntime(Runtime::kThrowStackOverflow);
2925 }
2926 __ bind(&done);
2927 }
2928
2929 // Relocate arguments down the stack.
2930 {
2931 Label loop, done_loop;
2932 __ mov(t1, zero_reg);
2933 __ bind(&loop);
2934 __ Branch(&done_loop, ge, t1, Operand(a0));
2935 __ Lsa(t2, sp, t0, kPointerSizeLog2);
2936 __ lw(at, MemOperand(t2));
2937 __ Lsa(t2, sp, t1, kPointerSizeLog2);
2938 __ sw(at, MemOperand(t2));
2939 __ Addu(t0, t0, Operand(1));
2940 __ Addu(t1, t1, Operand(1));
2941 __ Branch(&loop);
2942 __ bind(&done_loop);
2943 }
2944
2945 // Copy [[BoundArguments]] to the stack (below the arguments).
2946 {
2947 Label loop, done_loop;
2948 __ lw(t0, FieldMemOperand(a2, FixedArray::kLengthOffset));
2949 __ SmiUntag(t0);
2950 __ Addu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
2951 __ bind(&loop);
2952 __ Subu(t0, t0, Operand(1));
2953 __ Branch(&done_loop, lt, t0, Operand(zero_reg));
2954 __ Lsa(t1, a2, t0, kPointerSizeLog2);
2955 __ lw(at, MemOperand(t1));
2956 __ Lsa(t1, sp, a0, kPointerSizeLog2);
2957 __ sw(at, MemOperand(t1));
2958 __ Addu(a0, a0, Operand(1));
2959 __ Branch(&loop);
2960 __ bind(&done_loop);
2961 }
2962
2963 // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
2964 {
2965 Label skip_load;
2966 __ Branch(&skip_load, ne, a1, Operand(a3));
2967 __ lw(a3, FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset));
2968 __ bind(&skip_load);
2969 }
2970
2971 // Construct the [[BoundTargetFunction]] via the Construct builtin.
2972 __ lw(a1, FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset));
2973 __ li(at, Operand(ExternalReference(Builtins::kConstruct, masm->isolate())));
2974 __ lw(at, MemOperand(at));
2975 __ Addu(at, at, Operand(Code::kHeaderSize - kHeapObjectTag));
2976 __ Jump(at);
2977 }
2978
2979 // static
Generate_ConstructProxy(MacroAssembler * masm)2980 void Builtins::Generate_ConstructProxy(MacroAssembler* masm) {
2981 // ----------- S t a t e -------------
2982 // -- a0 : the number of arguments (not including the receiver)
2983 // -- a1 : the constructor to call (checked to be a JSProxy)
2984 // -- a3 : the new target (either the same as the constructor or
2985 // the JSFunction on which new was invoked initially)
2986 // -----------------------------------
2987
2988 // Call into the Runtime for Proxy [[Construct]].
2989 __ Push(a1, a3);
2990 // Include the pushed new_target, constructor and the receiver.
2991 __ Addu(a0, a0, Operand(3));
2992 // Tail-call to the runtime.
2993 __ JumpToExternalReference(
2994 ExternalReference(Runtime::kJSProxyConstruct, masm->isolate()));
2995 }
2996
2997 // static
Generate_Construct(MacroAssembler * masm)2998 void Builtins::Generate_Construct(MacroAssembler* masm) {
2999 // ----------- S t a t e -------------
3000 // -- a0 : the number of arguments (not including the receiver)
3001 // -- a1 : the constructor to call (can be any Object)
3002 // -- a3 : the new target (either the same as the constructor or
3003 // the JSFunction on which new was invoked initially)
3004 // -----------------------------------
3005
3006 // Check if target is a Smi.
3007 Label non_constructor;
3008 __ JumpIfSmi(a1, &non_constructor);
3009
3010 // Dispatch based on instance type.
3011 __ lw(t1, FieldMemOperand(a1, HeapObject::kMapOffset));
3012 __ lbu(t2, FieldMemOperand(t1, Map::kInstanceTypeOffset));
3013 __ Jump(masm->isolate()->builtins()->ConstructFunction(),
3014 RelocInfo::CODE_TARGET, eq, t2, Operand(JS_FUNCTION_TYPE));
3015
3016 // Check if target has a [[Construct]] internal method.
3017 __ lbu(t3, FieldMemOperand(t1, Map::kBitFieldOffset));
3018 __ And(t3, t3, Operand(1 << Map::kIsConstructor));
3019 __ Branch(&non_constructor, eq, t3, Operand(zero_reg));
3020
3021 // Only dispatch to bound functions after checking whether they are
3022 // constructors.
3023 __ Jump(masm->isolate()->builtins()->ConstructBoundFunction(),
3024 RelocInfo::CODE_TARGET, eq, t2, Operand(JS_BOUND_FUNCTION_TYPE));
3025
3026 // Only dispatch to proxies after checking whether they are constructors.
3027 __ Jump(masm->isolate()->builtins()->ConstructProxy(), RelocInfo::CODE_TARGET,
3028 eq, t2, Operand(JS_PROXY_TYPE));
3029
3030 // Called Construct on an exotic Object with a [[Construct]] internal method.
3031 {
3032 // Overwrite the original receiver with the (original) target.
3033 __ Lsa(at, sp, a0, kPointerSizeLog2);
3034 __ sw(a1, MemOperand(at));
3035 // Let the "call_as_constructor_delegate" take care of the rest.
3036 __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, a1);
3037 __ Jump(masm->isolate()->builtins()->CallFunction(),
3038 RelocInfo::CODE_TARGET);
3039 }
3040
3041 // Called Construct on an Object that doesn't have a [[Construct]] internal
3042 // method.
3043 __ bind(&non_constructor);
3044 __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(),
3045 RelocInfo::CODE_TARGET);
3046 }
3047
3048 // static
Generate_ConstructWithSpread(MacroAssembler * masm)3049 void Builtins::Generate_ConstructWithSpread(MacroAssembler* masm) {
3050 // ----------- S t a t e -------------
3051 // -- a0 : the number of arguments (not including the receiver)
3052 // -- a1 : the constructor to call (can be any Object)
3053 // -- a3 : the new target (either the same as the constructor or
3054 // the JSFunction on which new was invoked initially)
3055 // -----------------------------------
3056
3057 CheckSpreadAndPushToStack(masm);
3058 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
3059 }
3060
3061 // static
Generate_AllocateInNewSpace(MacroAssembler * masm)3062 void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) {
3063 // ----------- S t a t e -------------
3064 // -- a0 : requested object size (untagged)
3065 // -- ra : return address
3066 // -----------------------------------
3067 __ SmiTag(a0);
3068 __ Push(a0);
3069 __ Move(cp, Smi::kZero);
3070 __ TailCallRuntime(Runtime::kAllocateInNewSpace);
3071 }
3072
3073 // static
Generate_AllocateInOldSpace(MacroAssembler * masm)3074 void Builtins::Generate_AllocateInOldSpace(MacroAssembler* masm) {
3075 // ----------- S t a t e -------------
3076 // -- a0 : requested object size (untagged)
3077 // -- ra : return address
3078 // -----------------------------------
3079 __ SmiTag(a0);
3080 __ Move(a1, Smi::FromInt(AllocateTargetSpace::encode(OLD_SPACE)));
3081 __ Push(a0, a1);
3082 __ Move(cp, Smi::kZero);
3083 __ TailCallRuntime(Runtime::kAllocateInTargetSpace);
3084 }
3085
3086 // static
Generate_Abort(MacroAssembler * masm)3087 void Builtins::Generate_Abort(MacroAssembler* masm) {
3088 // ----------- S t a t e -------------
3089 // -- a0 : message_id as Smi
3090 // -- ra : return address
3091 // -----------------------------------
3092 __ Push(a0);
3093 __ Move(cp, Smi::kZero);
3094 __ TailCallRuntime(Runtime::kAbort);
3095 }
3096
Generate_ArgumentsAdaptorTrampoline(MacroAssembler * masm)3097 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
3098 // State setup as expected by MacroAssembler::InvokePrologue.
3099 // ----------- S t a t e -------------
3100 // -- a0: actual arguments count
3101 // -- a1: function (passed through to callee)
3102 // -- a2: expected arguments count
3103 // -- a3: new target (passed through to callee)
3104 // -----------------------------------
3105
3106 Label invoke, dont_adapt_arguments, stack_overflow;
3107
3108 Label enough, too_few;
3109 __ Branch(&dont_adapt_arguments, eq, a2,
3110 Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
3111 // We use Uless as the number of argument should always be greater than 0.
3112 __ Branch(&too_few, Uless, a0, Operand(a2));
3113
3114 { // Enough parameters: actual >= expected.
3115 // a0: actual number of arguments as a smi
3116 // a1: function
3117 // a2: expected number of arguments
3118 // a3: new target (passed through to callee)
3119 __ bind(&enough);
3120 EnterArgumentsAdaptorFrame(masm);
3121 Generate_StackOverflowCheck(masm, a2, t1, at, &stack_overflow);
3122
3123 // Calculate copy start address into a0 and copy end address into t1.
3124 __ Lsa(a0, fp, a0, kPointerSizeLog2 - kSmiTagSize);
3125 // Adjust for return address and receiver.
3126 __ Addu(a0, a0, Operand(2 * kPointerSize));
3127 // Compute copy end address.
3128 __ sll(t1, a2, kPointerSizeLog2);
3129 __ subu(t1, a0, t1);
3130
3131 // Copy the arguments (including the receiver) to the new stack frame.
3132 // a0: copy start address
3133 // a1: function
3134 // a2: expected number of arguments
3135 // a3: new target (passed through to callee)
3136 // t1: copy end address
3137
3138 Label copy;
3139 __ bind(©);
3140 __ lw(t0, MemOperand(a0));
3141 __ push(t0);
3142 __ Branch(USE_DELAY_SLOT, ©, ne, a0, Operand(t1));
3143 __ addiu(a0, a0, -kPointerSize); // In delay slot.
3144
3145 __ jmp(&invoke);
3146 }
3147
3148 { // Too few parameters: Actual < expected.
3149 __ bind(&too_few);
3150 EnterArgumentsAdaptorFrame(masm);
3151 Generate_StackOverflowCheck(masm, a2, t1, at, &stack_overflow);
3152
3153 // Calculate copy start address into a0 and copy end address into t3.
3154 // a0: actual number of arguments as a smi
3155 // a1: function
3156 // a2: expected number of arguments
3157 // a3: new target (passed through to callee)
3158 __ Lsa(a0, fp, a0, kPointerSizeLog2 - kSmiTagSize);
3159 // Adjust for return address and receiver.
3160 __ Addu(a0, a0, Operand(2 * kPointerSize));
3161 // Compute copy end address. Also adjust for return address.
3162 __ Addu(t3, fp, kPointerSize);
3163
3164 // Copy the arguments (including the receiver) to the new stack frame.
3165 // a0: copy start address
3166 // a1: function
3167 // a2: expected number of arguments
3168 // a3: new target (passed through to callee)
3169 // t3: copy end address
3170 Label copy;
3171 __ bind(©);
3172 __ lw(t0, MemOperand(a0)); // Adjusted above for return addr and receiver.
3173 __ Subu(sp, sp, kPointerSize);
3174 __ Subu(a0, a0, kPointerSize);
3175 __ Branch(USE_DELAY_SLOT, ©, ne, a0, Operand(t3));
3176 __ sw(t0, MemOperand(sp)); // In the delay slot.
3177
3178 // Fill the remaining expected arguments with undefined.
3179 // a1: function
3180 // a2: expected number of arguments
3181 // a3: new target (passed through to callee)
3182 __ LoadRoot(t0, Heap::kUndefinedValueRootIndex);
3183 __ sll(t2, a2, kPointerSizeLog2);
3184 __ Subu(t1, fp, Operand(t2));
3185 // Adjust for frame.
3186 __ Subu(t1, t1, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
3187 2 * kPointerSize));
3188
3189 Label fill;
3190 __ bind(&fill);
3191 __ Subu(sp, sp, kPointerSize);
3192 __ Branch(USE_DELAY_SLOT, &fill, ne, sp, Operand(t1));
3193 __ sw(t0, MemOperand(sp));
3194 }
3195
3196 // Call the entry point.
3197 __ bind(&invoke);
3198 __ mov(a0, a2);
3199 // a0 : expected number of arguments
3200 // a1 : function (passed through to callee)
3201 // a3 : new target (passed through to callee)
3202 __ lw(t0, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
3203 __ Call(t0);
3204
3205 // Store offset of return address for deoptimizer.
3206 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
3207
3208 // Exit frame and return.
3209 LeaveArgumentsAdaptorFrame(masm);
3210 __ Ret();
3211
3212 // -------------------------------------------
3213 // Don't adapt arguments.
3214 // -------------------------------------------
3215 __ bind(&dont_adapt_arguments);
3216 __ lw(t0, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
3217 __ Jump(t0);
3218
3219 __ bind(&stack_overflow);
3220 {
3221 FrameScope frame(masm, StackFrame::MANUAL);
3222 __ CallRuntime(Runtime::kThrowStackOverflow);
3223 __ break_(0xCC);
3224 }
3225 }
3226
3227 #undef __
3228
3229 } // namespace internal
3230 } // namespace v8
3231
3232 #endif // V8_TARGET_ARCH_MIPS
3233