1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #if V8_TARGET_ARCH_S390
6
7 #include "src/codegen.h"
8 #include "src/debug/debug.h"
9 #include "src/deoptimizer.h"
10 #include "src/full-codegen/full-codegen.h"
11 #include "src/runtime/runtime.h"
12
13 namespace v8 {
14 namespace internal {
15
16 #define __ ACCESS_MASM(masm)
17
Generate_Adaptor(MacroAssembler * masm,Address address,ExitFrameType exit_frame_type)18 void Builtins::Generate_Adaptor(MacroAssembler* masm, Address address,
19 ExitFrameType exit_frame_type) {
20 // ----------- S t a t e -------------
21 // -- r2 : number of arguments excluding receiver
22 // -- r3 : target
23 // -- r5 : new.target
24 // -- sp[0] : last argument
25 // -- ...
26 // -- sp[4 * (argc - 1)] : first argument
27 // -- sp[4 * argc] : receiver
28 // -----------------------------------
29 __ AssertFunction(r3);
30
31 // Make sure we operate in the context of the called function (for example
32 // ConstructStubs implemented in C++ will be run in the context of the caller
33 // instead of the callee, due to the way that [[Construct]] is defined for
34 // ordinary functions).
35 __ LoadP(cp, FieldMemOperand(r3, JSFunction::kContextOffset));
36
37 // JumpToExternalReference expects r2 to contain the number of arguments
38 // including the receiver and the extra arguments.
39 const int num_extra_args = 3;
40 __ AddP(r2, r2, Operand(num_extra_args + 1));
41
42 // Insert extra arguments.
43 __ SmiTag(r2);
44 __ Push(r2, r3, r5);
45 __ SmiUntag(r2);
46
47 __ JumpToExternalReference(ExternalReference(address, masm->isolate()),
48 exit_frame_type == BUILTIN_EXIT);
49 }
50
51 // Load the built-in InternalArray function from the current context.
GenerateLoadInternalArrayFunction(MacroAssembler * masm,Register result)52 static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
53 Register result) {
54 // Load the InternalArray function from the current native context.
55 __ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, result);
56 }
57
58 // Load the built-in Array function from the current context.
GenerateLoadArrayFunction(MacroAssembler * masm,Register result)59 static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
60 // Load the Array function from the current native context.
61 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, result);
62 }
63
Generate_InternalArrayCode(MacroAssembler * masm)64 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
65 // ----------- S t a t e -------------
66 // -- r2 : number of arguments
67 // -- lr : return address
68 // -- sp[...]: constructor arguments
69 // -----------------------------------
70 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
71
72 // Get the InternalArray function.
73 GenerateLoadInternalArrayFunction(masm, r3);
74
75 if (FLAG_debug_code) {
76 // Initial map for the builtin InternalArray functions should be maps.
77 __ LoadP(r4, FieldMemOperand(r3, JSFunction::kPrototypeOrInitialMapOffset));
78 __ TestIfSmi(r4);
79 __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction, cr0);
80 __ CompareObjectType(r4, r5, r6, MAP_TYPE);
81 __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction);
82 }
83
84 // Run the native code for the InternalArray function called as a normal
85 // function.
86 // tail call a stub
87 InternalArrayConstructorStub stub(masm->isolate());
88 __ TailCallStub(&stub);
89 }
90
Generate_ArrayCode(MacroAssembler * masm)91 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
92 // ----------- S t a t e -------------
93 // -- r2 : number of arguments
94 // -- lr : return address
95 // -- sp[...]: constructor arguments
96 // -----------------------------------
97 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
98
99 // Get the Array function.
100 GenerateLoadArrayFunction(masm, r3);
101
102 if (FLAG_debug_code) {
103 // Initial map for the builtin Array functions should be maps.
104 __ LoadP(r4, FieldMemOperand(r3, JSFunction::kPrototypeOrInitialMapOffset));
105 __ TestIfSmi(r4);
106 __ Assert(ne, kUnexpectedInitialMapForArrayFunction, cr0);
107 __ CompareObjectType(r4, r5, r6, MAP_TYPE);
108 __ Assert(eq, kUnexpectedInitialMapForArrayFunction);
109 }
110
111 __ LoadRR(r5, r3);
112 // Run the native code for the Array function called as a normal function.
113 // tail call a stub
114 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
115 ArrayConstructorStub stub(masm->isolate());
116 __ TailCallStub(&stub);
117 }
118
119 // static
Generate_MathMaxMin(MacroAssembler * masm,MathMaxMinKind kind)120 void Builtins::Generate_MathMaxMin(MacroAssembler* masm, MathMaxMinKind kind) {
121 // ----------- S t a t e -------------
122 // -- r2 : number of arguments
123 // -- r3 : function
124 // -- cp : context
125 // -- lr : return address
126 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
127 // -- sp[argc * 4] : receiver
128 // -----------------------------------
129 Condition const cond_done = (kind == MathMaxMinKind::kMin) ? lt : gt;
130 Heap::RootListIndex const root_index =
131 (kind == MathMaxMinKind::kMin) ? Heap::kInfinityValueRootIndex
132 : Heap::kMinusInfinityValueRootIndex;
133 DoubleRegister const reg = (kind == MathMaxMinKind::kMin) ? d2 : d1;
134
135 // Load the accumulator with the default return value (either -Infinity or
136 // +Infinity), with the tagged value in r7 and the double value in d1.
137 __ LoadRoot(r7, root_index);
138 __ LoadDouble(d1, FieldMemOperand(r7, HeapNumber::kValueOffset));
139
140 // Setup state for loop
141 // r4: address of arg[0] + kPointerSize
142 // r5: number of slots to drop at exit (arguments + receiver)
143 __ AddP(r6, r2, Operand(1));
144
145 Label done_loop, loop;
146 __ LoadRR(r6, r2);
147 __ bind(&loop);
148 {
149 // Check if all parameters done.
150 __ SubP(r6, Operand(1));
151 __ blt(&done_loop);
152
153 // Load the next parameter tagged value into r2.
154 __ ShiftLeftP(r1, r6, Operand(kPointerSizeLog2));
155 __ LoadP(r4, MemOperand(sp, r1));
156
157 // Load the double value of the parameter into d2, maybe converting the
158 // parameter to a number first using the ToNumber builtin if necessary.
159 Label convert, convert_smi, convert_number, done_convert;
160 __ bind(&convert);
161 __ JumpIfSmi(r4, &convert_smi);
162 __ LoadP(r5, FieldMemOperand(r4, HeapObject::kMapOffset));
163 __ JumpIfRoot(r5, Heap::kHeapNumberMapRootIndex, &convert_number);
164 {
165 // Parameter is not a Number, use the ToNumber builtin to convert it.
166 DCHECK(!FLAG_enable_embedded_constant_pool);
167 FrameScope scope(masm, StackFrame::MANUAL);
168 __ SmiTag(r2);
169 __ SmiTag(r6);
170 __ EnterBuiltinFrame(cp, r3, r2);
171 __ Push(r6, r7);
172 __ LoadRR(r2, r4);
173 __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
174 __ LoadRR(r4, r2);
175 __ Pop(r6, r7);
176 __ LeaveBuiltinFrame(cp, r3, r2);
177 __ SmiUntag(r6);
178 __ SmiUntag(r2);
179 {
180 // Restore the double accumulator value (d1).
181 Label done_restore;
182 __ SmiToDouble(d1, r7);
183 __ JumpIfSmi(r7, &done_restore);
184 __ LoadDouble(d1, FieldMemOperand(r7, HeapNumber::kValueOffset));
185 __ bind(&done_restore);
186 }
187 }
188 __ b(&convert);
189 __ bind(&convert_number);
190 __ LoadDouble(d2, FieldMemOperand(r4, HeapNumber::kValueOffset));
191 __ b(&done_convert);
192 __ bind(&convert_smi);
193 __ SmiToDouble(d2, r4);
194 __ bind(&done_convert);
195
196 // Perform the actual comparison with the accumulator value on the left hand
197 // side (d1) and the next parameter value on the right hand side (d2).
198 Label compare_nan, compare_swap;
199 __ cdbr(d1, d2);
200 __ bunordered(&compare_nan);
201 __ b(cond_done, &loop);
202 __ b(CommuteCondition(cond_done), &compare_swap);
203
204 // Left and right hand side are equal, check for -0 vs. +0.
205 __ TestDoubleIsMinusZero(reg, r1, r0);
206 __ bne(&loop);
207
208 // Update accumulator. Result is on the right hand side.
209 __ bind(&compare_swap);
210 __ ldr(d1, d2);
211 __ LoadRR(r7, r4);
212 __ b(&loop);
213
214 // At least one side is NaN, which means that the result will be NaN too.
215 // We still need to visit the rest of the arguments.
216 __ bind(&compare_nan);
217 __ LoadRoot(r7, Heap::kNanValueRootIndex);
218 __ LoadDouble(d1, FieldMemOperand(r7, HeapNumber::kValueOffset));
219 __ b(&loop);
220 }
221
222 __ bind(&done_loop);
223 // Drop all slots, including the receiver.
224 __ AddP(r2, Operand(1));
225 __ Drop(r2);
226 __ LoadRR(r2, r7);
227 __ Ret();
228 }
229
230 // static
Generate_NumberConstructor(MacroAssembler * masm)231 void Builtins::Generate_NumberConstructor(MacroAssembler* masm) {
232 // ----------- S t a t e -------------
233 // -- r2 : number of arguments
234 // -- r3 : constructor function
235 // -- cp : context
236 // -- lr : return address
237 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
238 // -- sp[argc * 4] : receiver
239 // -----------------------------------
240
241 // 1. Load the first argument into r2.
242 Label no_arguments;
243 {
244 __ LoadRR(r4, r2); // Store argc in r4.
245 __ CmpP(r2, Operand::Zero());
246 __ beq(&no_arguments);
247 __ SubP(r2, r2, Operand(1));
248 __ ShiftLeftP(r2, r2, Operand(kPointerSizeLog2));
249 __ LoadP(r2, MemOperand(sp, r2));
250 }
251
252 // 2a. Convert the first argument to a number.
253 {
254 FrameScope scope(masm, StackFrame::MANUAL);
255 __ SmiTag(r4);
256 __ EnterBuiltinFrame(cp, r3, r4);
257 __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
258 __ LeaveBuiltinFrame(cp, r3, r4);
259 __ SmiUntag(r4);
260 }
261
262 {
263 // Drop all arguments including the receiver.
264 __ Drop(r4);
265 __ Ret(1);
266 }
267
268 // 2b. No arguments, return +0.
269 __ bind(&no_arguments);
270 __ LoadSmiLiteral(r2, Smi::kZero);
271 __ Ret(1);
272 }
273
274 // static
Generate_NumberConstructor_ConstructStub(MacroAssembler * masm)275 void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) {
276 // ----------- S t a t e -------------
277 // -- r2 : number of arguments
278 // -- r3 : constructor function
279 // -- r5 : new target
280 // -- lr : return address
281 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
282 // -- sp[argc * 4] : receiver
283 // -----------------------------------
284
285 // 1. Make sure we operate in the context of the called function.
286 __ LoadP(cp, FieldMemOperand(r3, JSFunction::kContextOffset));
287
288 // 2. Load the first argument into r4.
289 {
290 Label no_arguments, done;
291 __ LoadRR(r8, r2); // Store argc in r8.
292 __ CmpP(r2, Operand::Zero());
293 __ beq(&no_arguments);
294 __ SubP(r2, r2, Operand(1));
295 __ ShiftLeftP(r4, r2, Operand(kPointerSizeLog2));
296 __ LoadP(r4, MemOperand(sp, r4));
297 __ b(&done);
298 __ bind(&no_arguments);
299 __ LoadSmiLiteral(r4, Smi::kZero);
300 __ bind(&done);
301 }
302
303 // 3. Make sure r4 is a number.
304 {
305 Label done_convert;
306 __ JumpIfSmi(r4, &done_convert);
307 __ CompareObjectType(r4, r6, r6, HEAP_NUMBER_TYPE);
308 __ beq(&done_convert);
309 {
310 FrameScope scope(masm, StackFrame::MANUAL);
311 __ SmiTag(r8);
312 __ EnterBuiltinFrame(cp, r3, r8);
313 __ Push(r5);
314 __ LoadRR(r2, r4);
315 __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET);
316 __ LoadRR(r4, r2);
317 __ Pop(r5);
318 __ LeaveBuiltinFrame(cp, r3, r8);
319 __ SmiUntag(r8);
320 }
321 __ bind(&done_convert);
322 }
323
324 // 4. Check if new target and constructor differ.
325 Label drop_frame_and_ret, new_object;
326 __ CmpP(r3, r5);
327 __ bne(&new_object);
328
329 // 5. Allocate a JSValue wrapper for the number.
330 __ AllocateJSValue(r2, r3, r4, r6, r7, &new_object);
331 __ b(&drop_frame_and_ret);
332
333 // 6. Fallback to the runtime to create new object.
334 __ bind(&new_object);
335 {
336 FrameScope scope(masm, StackFrame::MANUAL);
337 FastNewObjectStub stub(masm->isolate());
338 __ SmiTag(r8);
339 __ EnterBuiltinFrame(cp, r3, r8);
340 __ Push(r4); // first argument
341 __ CallStub(&stub);
342 __ Pop(r4);
343 __ LeaveBuiltinFrame(cp, r3, r8);
344 __ SmiUntag(r8);
345 }
346 __ StoreP(r4, FieldMemOperand(r2, JSValue::kValueOffset), r0);
347
348 __ bind(&drop_frame_and_ret);
349 {
350 __ Drop(r8);
351 __ Ret(1);
352 }
353 }
354
355 // static
Generate_StringConstructor(MacroAssembler * masm)356 void Builtins::Generate_StringConstructor(MacroAssembler* masm) {
357 // ----------- S t a t e -------------
358 // -- r2 : number of arguments
359 // -- r3 : constructor function
360 // -- cp : context
361 // -- lr : return address
362 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
363 // -- sp[argc * 4] : receiver
364 // -----------------------------------
365 // 1. Load the first argument into r2.
366 Label no_arguments;
367 {
368 __ LoadRR(r4, r2); // Store argc in r4
369 __ CmpP(r2, Operand::Zero());
370 __ beq(&no_arguments);
371 __ SubP(r2, r2, Operand(1));
372 __ ShiftLeftP(r2, r2, Operand(kPointerSizeLog2));
373 __ LoadP(r2, MemOperand(sp, r2));
374 }
375
376 // 2a. At least one argument, return r2 if it's a string, otherwise
377 // dispatch to appropriate conversion.
378 Label drop_frame_and_ret, to_string, symbol_descriptive_string;
379 {
380 __ JumpIfSmi(r2, &to_string);
381 STATIC_ASSERT(FIRST_NONSTRING_TYPE == SYMBOL_TYPE);
382 __ CompareObjectType(r2, r5, r5, FIRST_NONSTRING_TYPE);
383 __ bgt(&to_string);
384 __ beq(&symbol_descriptive_string);
385 __ b(&drop_frame_and_ret);
386 }
387
388 // 2b. No arguments, return the empty string (and pop the receiver).
389 __ bind(&no_arguments);
390 {
391 __ LoadRoot(r2, Heap::kempty_stringRootIndex);
392 __ Ret(1);
393 }
394
395 // 3a. Convert r2 to a string.
396 __ bind(&to_string);
397 {
398 FrameScope scope(masm, StackFrame::MANUAL);
399 __ SmiTag(r4);
400 __ EnterBuiltinFrame(cp, r3, r4);
401 __ Call(masm->isolate()->builtins()->ToString(), RelocInfo::CODE_TARGET);
402 __ LeaveBuiltinFrame(cp, r3, r4);
403 __ SmiUntag(r4);
404 }
405 __ b(&drop_frame_and_ret);
406 // 3b. Convert symbol in r2 to a string.
407 __ bind(&symbol_descriptive_string);
408 {
409 __ Drop(r4);
410 __ Drop(1);
411 __ Push(r2);
412 __ TailCallRuntime(Runtime::kSymbolDescriptiveString);
413 }
414
415 __ bind(&drop_frame_and_ret);
416 {
417 __ Drop(r4);
418 __ Ret(1);
419 }
420 }
421
422 // static
Generate_StringConstructor_ConstructStub(MacroAssembler * masm)423 void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
424 // ----------- S t a t e -------------
425 // -- r2 : number of arguments
426 // -- r3 : constructor function
427 // -- r5 : new target
428 // -- cp : context
429 // -- lr : return address
430 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
431 // -- sp[argc * 4] : receiver
432 // -----------------------------------
433
434 // 1. Make sure we operate in the context of the called function.
435 __ LoadP(cp, FieldMemOperand(r3, JSFunction::kContextOffset));
436
437 // 2. Load the first argument into r4.
438 {
439 Label no_arguments, done;
440 __ LoadRR(r8, r2); // Store argc in r8.
441 __ CmpP(r2, Operand::Zero());
442 __ beq(&no_arguments);
443 __ SubP(r2, r2, Operand(1));
444 __ ShiftLeftP(r4, r2, Operand(kPointerSizeLog2));
445 __ LoadP(r4, MemOperand(sp, r4));
446 __ b(&done);
447 __ bind(&no_arguments);
448 __ LoadRoot(r4, Heap::kempty_stringRootIndex);
449 __ bind(&done);
450 }
451
452 // 3. Make sure r4 is a string.
453 {
454 Label convert, done_convert;
455 __ JumpIfSmi(r4, &convert);
456 __ CompareObjectType(r4, r6, r6, FIRST_NONSTRING_TYPE);
457 __ blt(&done_convert);
458 __ bind(&convert);
459 {
460 FrameScope scope(masm, StackFrame::MANUAL);
461 __ SmiTag(r8);
462 __ EnterBuiltinFrame(cp, r3, r8);
463 __ Push(r5);
464 __ LoadRR(r2, r4);
465 __ Call(masm->isolate()->builtins()->ToString(), RelocInfo::CODE_TARGET);
466 __ LoadRR(r4, r2);
467 __ Pop(r5);
468 __ LeaveBuiltinFrame(cp, r3, r8);
469 __ SmiUntag(r8);
470 }
471 __ bind(&done_convert);
472 }
473
474 // 4. Check if new target and constructor differ.
475 Label drop_frame_and_ret, new_object;
476 __ CmpP(r3, r5);
477 __ bne(&new_object);
478
479 // 5. Allocate a JSValue wrapper for the string.
480 __ AllocateJSValue(r2, r3, r4, r6, r7, &new_object);
481 __ b(&drop_frame_and_ret);
482
483 // 6. Fallback to the runtime to create new object.
484 __ bind(&new_object);
485 {
486 FrameScope scope(masm, StackFrame::MANUAL);
487 FastNewObjectStub stub(masm->isolate());
488 __ SmiTag(r8);
489 __ EnterBuiltinFrame(cp, r3, r8);
490 __ Push(r4); // first argument
491 __ CallStub(&stub);
492 __ Pop(r4);
493 __ LeaveBuiltinFrame(cp, r3, r8);
494 __ SmiUntag(r8);
495 }
496 __ StoreP(r4, FieldMemOperand(r2, JSValue::kValueOffset), r0);
497
498 __ bind(&drop_frame_and_ret);
499 {
500 __ Drop(r8);
501 __ Ret(1);
502 }
503 }
504
GenerateTailCallToSharedCode(MacroAssembler * masm)505 static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
506 __ LoadP(ip, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
507 __ LoadP(ip, FieldMemOperand(ip, SharedFunctionInfo::kCodeOffset));
508 __ AddP(ip, Operand(Code::kHeaderSize - kHeapObjectTag));
509 __ JumpToJSEntry(ip);
510 }
511
GenerateTailCallToReturnedCode(MacroAssembler * masm,Runtime::FunctionId function_id)512 static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
513 Runtime::FunctionId function_id) {
514 // ----------- S t a t e -------------
515 // -- r2 : argument count (preserved for callee)
516 // -- r3 : target function (preserved for callee)
517 // -- r5 : new target (preserved for callee)
518 // -----------------------------------
519 {
520 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
521 // Push the number of arguments to the callee.
522 // Push a copy of the target function and the new target.
523 // Push function as parameter to the runtime call.
524 __ SmiTag(r2);
525 __ Push(r2, r3, r5, r3);
526
527 __ CallRuntime(function_id, 1);
528 __ LoadRR(r4, r2);
529
530 // Restore target function and new target.
531 __ Pop(r2, r3, r5);
532 __ SmiUntag(r2);
533 }
534 __ AddP(ip, r4, Operand(Code::kHeaderSize - kHeapObjectTag));
535 __ JumpToJSEntry(ip);
536 }
537
Generate_InOptimizationQueue(MacroAssembler * masm)538 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
539 // Checking whether the queued function is ready for install is optional,
540 // since we come across interrupts and stack checks elsewhere. However,
541 // not checking may delay installing ready functions, and always checking
542 // would be quite expensive. A good compromise is to first check against
543 // stack limit as a cue for an interrupt signal.
544 Label ok;
545 __ CmpLogicalP(sp, RootMemOperand(Heap::kStackLimitRootIndex));
546 __ bge(&ok, Label::kNear);
547
548 GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode);
549
550 __ bind(&ok);
551 GenerateTailCallToSharedCode(masm);
552 }
553
554 namespace {
555
Generate_JSConstructStubHelper(MacroAssembler * masm,bool is_api_function,bool create_implicit_receiver,bool check_derived_construct)556 void Generate_JSConstructStubHelper(MacroAssembler* masm, bool is_api_function,
557 bool create_implicit_receiver,
558 bool check_derived_construct) {
559 // ----------- S t a t e -------------
560 // -- r2 : number of arguments
561 // -- r3 : constructor function
562 // -- r5 : new target
563 // -- cp : context
564 // -- lr : return address
565 // -- sp[...]: constructor arguments
566 // -----------------------------------
567
568 Isolate* isolate = masm->isolate();
569
570 // Enter a construct frame.
571 {
572 FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT);
573
574 // Preserve the incoming parameters on the stack.
575
576 if (!create_implicit_receiver) {
577 __ SmiTag(r6, r2);
578 __ LoadAndTestP(r6, r6);
579 __ Push(cp, r6);
580 __ PushRoot(Heap::kTheHoleValueRootIndex);
581 } else {
582 __ SmiTag(r2);
583 __ Push(cp, r2);
584
585 // Allocate the new receiver object.
586 __ Push(r3, r5);
587 FastNewObjectStub stub(masm->isolate());
588 __ CallStub(&stub);
589 __ LoadRR(r6, r2);
590 __ Pop(r3, r5);
591
592 // ----------- S t a t e -------------
593 // -- r3: constructor function
594 // -- r5: new target
595 // -- r6: newly allocated object
596 // -----------------------------------
597
598 // Retrieve smi-tagged arguments count from the stack.
599 __ LoadP(r2, MemOperand(sp));
600 __ SmiUntag(r2);
601 __ LoadAndTestP(r2, r2);
602
603 // Push the allocated receiver to the stack. We need two copies
604 // because we may have to return the original one and the calling
605 // conventions dictate that the called function pops the receiver.
606 __ Push(r6, r6);
607 }
608
609 // Set up pointer to last argument.
610 __ la(r4, MemOperand(fp, StandardFrameConstants::kCallerSPOffset));
611
612 // Copy arguments and receiver to the expression stack.
613 // r2: number of arguments
614 // r3: constructor function
615 // r4: address of last argument (caller sp)
616 // r5: new target
617 // cr0: condition indicating whether r2 is zero
618 // sp[0]: receiver
619 // sp[1]: receiver
620 // sp[2]: number of arguments (smi-tagged)
621 Label loop, no_args;
622 __ beq(&no_args);
623 __ ShiftLeftP(ip, r2, Operand(kPointerSizeLog2));
624 __ SubP(sp, sp, ip);
625 __ LoadRR(r1, r2);
626 __ bind(&loop);
627 __ lay(ip, MemOperand(ip, -kPointerSize));
628 __ LoadP(r0, MemOperand(ip, r4));
629 __ StoreP(r0, MemOperand(ip, sp));
630 __ BranchOnCount(r1, &loop);
631 __ bind(&no_args);
632
633 // Call the function.
634 // r2: number of arguments
635 // r3: constructor function
636 // r5: new target
637
638 ParameterCount actual(r2);
639 __ InvokeFunction(r3, r5, actual, CALL_FUNCTION,
640 CheckDebugStepCallWrapper());
641
642 // Store offset of return address for deoptimizer.
643 if (create_implicit_receiver && !is_api_function) {
644 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
645 }
646
647 // Restore context from the frame.
648 // r2: result
649 // sp[0]: receiver
650 // sp[1]: number of arguments (smi-tagged)
651 __ LoadP(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
652
653 if (create_implicit_receiver) {
654 // If the result is an object (in the ECMA sense), we should get rid
655 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
656 // on page 74.
657 Label use_receiver, exit;
658
659 // If the result is a smi, it is *not* an object in the ECMA sense.
660 // r2: result
661 // sp[0]: receiver
662 // sp[1]: new.target
663 // sp[2]: number of arguments (smi-tagged)
664 __ JumpIfSmi(r2, &use_receiver);
665
666 // If the type of the result (stored in its map) is less than
667 // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
668 __ CompareObjectType(r2, r3, r5, FIRST_JS_RECEIVER_TYPE);
669 __ bge(&exit);
670
671 // Throw away the result of the constructor invocation and use the
672 // on-stack receiver as the result.
673 __ bind(&use_receiver);
674 __ LoadP(r2, MemOperand(sp));
675
676 // Remove receiver from the stack, remove caller arguments, and
677 // return.
678 __ bind(&exit);
679 // r2: result
680 // sp[0]: receiver (newly allocated object)
681 // sp[1]: number of arguments (smi-tagged)
682 __ LoadP(r3, MemOperand(sp, 1 * kPointerSize));
683 } else {
684 __ LoadP(r3, MemOperand(sp));
685 }
686
687 // Leave construct frame.
688 }
689
690 // ES6 9.2.2. Step 13+
691 // Check that the result is not a Smi, indicating that the constructor result
692 // from a derived class is neither undefined nor an Object.
693 if (check_derived_construct) {
694 Label dont_throw;
695 __ JumpIfNotSmi(r2, &dont_throw);
696 {
697 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
698 __ CallRuntime(Runtime::kThrowDerivedConstructorReturnedNonObject);
699 }
700 __ bind(&dont_throw);
701 }
702
703 __ SmiToPtrArrayOffset(r3, r3);
704 __ AddP(sp, sp, r3);
705 __ AddP(sp, sp, Operand(kPointerSize));
706 if (create_implicit_receiver) {
707 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, r3, r4);
708 }
709 __ Ret();
710 }
711
712 } // namespace
713
Generate_JSConstructStubGeneric(MacroAssembler * masm)714 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
715 Generate_JSConstructStubHelper(masm, false, true, false);
716 }
717
Generate_JSConstructStubApi(MacroAssembler * masm)718 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
719 Generate_JSConstructStubHelper(masm, true, false, false);
720 }
721
Generate_JSBuiltinsConstructStub(MacroAssembler * masm)722 void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
723 Generate_JSConstructStubHelper(masm, false, false, false);
724 }
725
Generate_JSBuiltinsConstructStubForDerived(MacroAssembler * masm)726 void Builtins::Generate_JSBuiltinsConstructStubForDerived(
727 MacroAssembler* masm) {
728 Generate_JSConstructStubHelper(masm, false, false, true);
729 }
730
731 // static
Generate_ResumeGeneratorTrampoline(MacroAssembler * masm)732 void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
733 // ----------- S t a t e -------------
734 // -- r2 : the value to pass to the generator
735 // -- r3 : the JSGeneratorObject to resume
736 // -- r4 : the resume mode (tagged)
737 // -- lr : return address
738 // -----------------------------------
739 __ AssertGeneratorObject(r3);
740
741 // Store input value into generator object.
742 __ StoreP(r2, FieldMemOperand(r3, JSGeneratorObject::kInputOrDebugPosOffset),
743 r0);
744 __ RecordWriteField(r3, JSGeneratorObject::kInputOrDebugPosOffset, r2, r5,
745 kLRHasNotBeenSaved, kDontSaveFPRegs);
746
747 // Store resume mode into generator object.
748 __ StoreP(r4, FieldMemOperand(r3, JSGeneratorObject::kResumeModeOffset));
749
750 // Load suspended function and context.
751 __ LoadP(cp, FieldMemOperand(r3, JSGeneratorObject::kContextOffset));
752 __ LoadP(r6, FieldMemOperand(r3, JSGeneratorObject::kFunctionOffset));
753
754 // Flood function if we are stepping.
755 Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
756 Label stepping_prepared;
757 ExternalReference last_step_action =
758 ExternalReference::debug_last_step_action_address(masm->isolate());
759 STATIC_ASSERT(StepFrame > StepIn);
760 __ mov(ip, Operand(last_step_action));
761 __ LoadB(ip, MemOperand(ip));
762 __ CmpP(ip, Operand(StepIn));
763 __ bge(&prepare_step_in_if_stepping);
764
765 // Flood function if we need to continue stepping in the suspended generator.
766
767 ExternalReference debug_suspended_generator =
768 ExternalReference::debug_suspended_generator_address(masm->isolate());
769
770 __ mov(ip, Operand(debug_suspended_generator));
771 __ LoadP(ip, MemOperand(ip));
772 __ CmpP(ip, r3);
773 __ beq(&prepare_step_in_suspended_generator);
774 __ bind(&stepping_prepared);
775
776 // Push receiver.
777 __ LoadP(ip, FieldMemOperand(r3, JSGeneratorObject::kReceiverOffset));
778 __ Push(ip);
779
780 // ----------- S t a t e -------------
781 // -- r3 : the JSGeneratorObject to resume
782 // -- r4 : the resume mode (tagged)
783 // -- r6 : generator function
784 // -- cp : generator context
785 // -- lr : return address
786 // -- sp[0] : generator receiver
787 // -----------------------------------
788
789 // Push holes for arguments to generator function. Since the parser forced
790 // context allocation for any variables in generators, the actual argument
791 // values have already been copied into the context and these dummy values
792 // will never be used.
793 __ LoadP(r5, FieldMemOperand(r6, JSFunction::kSharedFunctionInfoOffset));
794 __ LoadW(
795 r2, FieldMemOperand(r5, SharedFunctionInfo::kFormalParameterCountOffset));
796 {
797 Label loop, done_loop;
798 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
799 #if V8_TARGET_ARCH_S390X
800 __ CmpP(r2, Operand::Zero());
801 __ beq(&done_loop);
802 #else
803 __ SmiUntag(r2);
804 __ LoadAndTestP(r2, r2);
805 __ beq(&done_loop);
806 #endif
807 __ LoadRR(r1, r2);
808 __ bind(&loop);
809 __ push(ip);
810 __ BranchOnCount(r1, &loop);
811 __ bind(&done_loop);
812 }
813
814 // Dispatch on the kind of generator object.
815 Label old_generator;
816 __ LoadP(r5, FieldMemOperand(r5, SharedFunctionInfo::kFunctionDataOffset));
817 __ CompareObjectType(r5, r5, r5, BYTECODE_ARRAY_TYPE);
818 __ bne(&old_generator, Label::kNear);
819
820 // New-style (ignition/turbofan) generator object
821 {
822 // We abuse new.target both to indicate that this is a resume call and to
823 // pass in the generator object. In ordinary calls, new.target is always
824 // undefined because generator functions are non-constructable.
825 __ LoadRR(r5, r3);
826 __ LoadRR(r3, r6);
827 __ LoadP(ip, FieldMemOperand(r3, JSFunction::kCodeEntryOffset));
828 __ JumpToJSEntry(ip);
829 }
830 // Old-style (full-codegen) generator object
831 __ bind(&old_generator);
832 {
833 // Enter a new JavaScript frame, and initialize its slots as they were when
834 // the generator was suspended.
835 FrameScope scope(masm, StackFrame::MANUAL);
836 __ PushStandardFrame(r6);
837
838 // Restore the operand stack.
839 __ LoadP(r2, FieldMemOperand(r3, JSGeneratorObject::kOperandStackOffset));
840 __ LoadP(r5, FieldMemOperand(r2, FixedArray::kLengthOffset));
841 __ AddP(r2, r2,
842 Operand(FixedArray::kHeaderSize - kHeapObjectTag - kPointerSize));
843 {
844 Label loop, done_loop;
845 __ SmiUntag(r5);
846 __ LoadAndTestP(r5, r5);
847 __ beq(&done_loop);
848 __ LoadRR(r1, r5);
849 __ bind(&loop);
850 __ LoadP(ip, MemOperand(r2, kPointerSize));
851 __ la(r2, MemOperand(r2, kPointerSize));
852 __ Push(ip);
853 __ BranchOnCount(r1, &loop);
854 __ bind(&done_loop);
855 }
856
857 // Reset operand stack so we don't leak.
858 __ LoadRoot(ip, Heap::kEmptyFixedArrayRootIndex);
859 __ StoreP(ip, FieldMemOperand(r3, JSGeneratorObject::kOperandStackOffset),
860 r0);
861
862 // Resume the generator function at the continuation.
863 __ LoadP(r5, FieldMemOperand(r6, JSFunction::kSharedFunctionInfoOffset));
864 __ LoadP(r5, FieldMemOperand(r5, SharedFunctionInfo::kCodeOffset));
865 __ AddP(r5, r5, Operand(Code::kHeaderSize - kHeapObjectTag));
866 {
867 ConstantPoolUnavailableScope constant_pool_unavailable(masm);
868 __ LoadP(r4, FieldMemOperand(r3, JSGeneratorObject::kContinuationOffset));
869 __ SmiUntag(r4);
870 __ AddP(r5, r5, r4);
871 __ LoadSmiLiteral(r4,
872 Smi::FromInt(JSGeneratorObject::kGeneratorExecuting));
873 __ StoreP(r4, FieldMemOperand(r3, JSGeneratorObject::kContinuationOffset),
874 r0);
875 __ LoadRR(r2, r3); // Continuation expects generator object in r2.
876 __ Jump(r5);
877 }
878 }
879
880 __ bind(&prepare_step_in_if_stepping);
881 {
882 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
883 __ Push(r3, r4, r6);
884 __ CallRuntime(Runtime::kDebugPrepareStepInIfStepping);
885 __ Pop(r3, r4);
886 __ LoadP(r6, FieldMemOperand(r3, JSGeneratorObject::kFunctionOffset));
887 }
888 __ b(&stepping_prepared);
889
890 __ bind(&prepare_step_in_suspended_generator);
891 {
892 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
893 __ Push(r3, r4);
894 __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
895 __ Pop(r3, r4);
896 __ LoadP(r6, FieldMemOperand(r3, JSGeneratorObject::kFunctionOffset));
897 }
898 __ b(&stepping_prepared);
899 }
900
Generate_ConstructedNonConstructable(MacroAssembler * masm)901 void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
902 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
903 __ push(r3);
904 __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
905 }
906
907 enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt };
908
909 // Clobbers r4; preserves all other registers.
Generate_CheckStackOverflow(MacroAssembler * masm,Register argc,IsTagged argc_is_tagged)910 static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc,
911 IsTagged argc_is_tagged) {
912 // Check the stack for overflow. We are not trying to catch
913 // interruptions (e.g. debug break and preemption) here, so the "real stack
914 // limit" is checked.
915 Label okay;
916 __ LoadRoot(r4, Heap::kRealStackLimitRootIndex);
917 // Make r4 the space we have left. The stack might already be overflowed
918 // here which will cause r4 to become negative.
919 __ SubP(r4, sp, r4);
920 // Check if the arguments will overflow the stack.
921 if (argc_is_tagged == kArgcIsSmiTagged) {
922 __ SmiToPtrArrayOffset(r0, argc);
923 } else {
924 DCHECK(argc_is_tagged == kArgcIsUntaggedInt);
925 __ ShiftLeftP(r0, argc, Operand(kPointerSizeLog2));
926 }
927 __ CmpP(r4, r0);
928 __ bgt(&okay); // Signed comparison.
929
930 // Out of stack space.
931 __ CallRuntime(Runtime::kThrowStackOverflow);
932
933 __ bind(&okay);
934 }
935
Generate_JSEntryTrampolineHelper(MacroAssembler * masm,bool is_construct)936 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
937 bool is_construct) {
938 // Called from Generate_JS_Entry
939 // r2: new.target
940 // r3: function
941 // r4: receiver
942 // r5: argc
943 // r6: argv
944 // r0,r7-r9, cp may be clobbered
945 ProfileEntryHookStub::MaybeCallEntryHook(masm);
946
947 // Enter an internal frame.
948 {
949 // FrameScope ends up calling MacroAssembler::EnterFrame here
950 FrameScope scope(masm, StackFrame::INTERNAL);
951
952 // Setup the context (we need to use the caller context from the isolate).
953 ExternalReference context_address(Isolate::kContextAddress,
954 masm->isolate());
955 __ mov(cp, Operand(context_address));
956 __ LoadP(cp, MemOperand(cp));
957
958 __ InitializeRootRegister();
959
960 // Push the function and the receiver onto the stack.
961 __ Push(r3, r4);
962
963 // Check if we have enough stack space to push all arguments.
964 // Clobbers r4.
965 Generate_CheckStackOverflow(masm, r5, kArgcIsUntaggedInt);
966
967 // Copy arguments to the stack in a loop from argv to sp.
968 // The arguments are actually placed in reverse order on sp
969 // compared to argv (i.e. arg1 is highest memory in sp).
970 // r3: function
971 // r5: argc
972 // r6: argv, i.e. points to first arg
973 // r7: scratch reg to hold scaled argc
974 // r8: scratch reg to hold arg handle
975 // r9: scratch reg to hold index into argv
976 Label argLoop, argExit;
977 intptr_t zero = 0;
978 __ ShiftLeftP(r7, r5, Operand(kPointerSizeLog2));
979 __ SubRR(sp, r7); // Buy the stack frame to fit args
980 __ LoadImmP(r9, Operand(zero)); // Initialize argv index
981 __ bind(&argLoop);
982 __ CmpPH(r7, Operand(zero));
983 __ beq(&argExit, Label::kNear);
984 __ lay(r7, MemOperand(r7, -kPointerSize));
985 __ LoadP(r8, MemOperand(r9, r6)); // read next parameter
986 __ la(r9, MemOperand(r9, kPointerSize)); // r9++;
987 __ LoadP(r0, MemOperand(r8)); // dereference handle
988 __ StoreP(r0, MemOperand(r7, sp)); // push parameter
989 __ b(&argLoop);
990 __ bind(&argExit);
991
992 // Setup new.target and argc.
993 __ LoadRR(r6, r2);
994 __ LoadRR(r2, r5);
995 __ LoadRR(r5, r6);
996
997 // Initialize all JavaScript callee-saved registers, since they will be seen
998 // by the garbage collector as part of handlers.
999 __ LoadRoot(r6, Heap::kUndefinedValueRootIndex);
1000 __ LoadRR(r7, r6);
1001 __ LoadRR(r8, r6);
1002 __ LoadRR(r9, r6);
1003
1004 // Invoke the code.
1005 Handle<Code> builtin = is_construct
1006 ? masm->isolate()->builtins()->Construct()
1007 : masm->isolate()->builtins()->Call();
1008 __ Call(builtin, RelocInfo::CODE_TARGET);
1009
1010 // Exit the JS frame and remove the parameters (except function), and
1011 // return.
1012 }
1013 __ b(r14);
1014
1015 // r2: result
1016 }
1017
Generate_JSEntryTrampoline(MacroAssembler * masm)1018 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
1019 Generate_JSEntryTrampolineHelper(masm, false);
1020 }
1021
Generate_JSConstructEntryTrampoline(MacroAssembler * masm)1022 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
1023 Generate_JSEntryTrampolineHelper(masm, true);
1024 }
1025
LeaveInterpreterFrame(MacroAssembler * masm,Register scratch)1026 static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch) {
1027 Register args_count = scratch;
1028
1029 // Get the arguments + receiver count.
1030 __ LoadP(args_count,
1031 MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1032 __ LoadlW(args_count,
1033 FieldMemOperand(args_count, BytecodeArray::kParameterSizeOffset));
1034
1035 // Leave the frame (also dropping the register file).
1036 __ LeaveFrame(StackFrame::JAVA_SCRIPT);
1037
1038 __ AddP(sp, sp, args_count);
1039 }
1040
1041 // Generate code for entering a JS function with the interpreter.
1042 // On entry to the function the receiver and arguments have been pushed on the
1043 // stack left to right. The actual argument count matches the formal parameter
1044 // count expected by the function.
1045 //
1046 // The live registers are:
1047 // o r3: the JS function object being called.
1048 // o r5: the new target
1049 // o cp: our context
1050 // o pp: the caller's constant pool pointer (if enabled)
1051 // o fp: the caller's frame pointer
1052 // o sp: stack pointer
1053 // o lr: return address
1054 //
1055 // The function builds an interpreter frame. See InterpreterFrameConstants in
1056 // frames.h for its layout.
Generate_InterpreterEntryTrampoline(MacroAssembler * masm)1057 void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
1058 ProfileEntryHookStub::MaybeCallEntryHook(masm);
1059
1060 // Open a frame scope to indicate that there is a frame on the stack. The
1061 // MANUAL indicates that the scope shouldn't actually generate code to set up
1062 // the frame (that is done below).
1063 FrameScope frame_scope(masm, StackFrame::MANUAL);
1064 __ PushStandardFrame(r3);
1065
1066 // Get the bytecode array from the function object (or from the DebugInfo if
1067 // it is present) and load it into kInterpreterBytecodeArrayRegister.
1068 __ LoadP(r2, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
1069 Label array_done;
1070 Register debug_info = r4;
1071 DCHECK(!debug_info.is(r2));
1072 __ LoadP(debug_info,
1073 FieldMemOperand(r2, SharedFunctionInfo::kDebugInfoOffset));
1074 // Load original bytecode array or the debug copy.
1075 __ LoadP(kInterpreterBytecodeArrayRegister,
1076 FieldMemOperand(r2, SharedFunctionInfo::kFunctionDataOffset));
1077 __ CmpSmiLiteral(debug_info, DebugInfo::uninitialized(), r0);
1078 __ beq(&array_done);
1079 __ LoadP(kInterpreterBytecodeArrayRegister,
1080 FieldMemOperand(debug_info, DebugInfo::kDebugBytecodeArrayIndex));
1081 __ bind(&array_done);
1082
1083 // Check whether we should continue to use the interpreter.
1084 Label switch_to_different_code_kind;
1085 __ LoadP(r2, FieldMemOperand(r2, SharedFunctionInfo::kCodeOffset));
1086 __ CmpP(r2, Operand(masm->CodeObject())); // Self-reference to this code.
1087 __ bne(&switch_to_different_code_kind);
1088
1089 // Increment invocation count for the function.
1090 __ LoadP(r6, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
1091 __ LoadP(r6, FieldMemOperand(r6, LiteralsArray::kFeedbackVectorOffset));
1092 __ LoadP(r1, FieldMemOperand(r6, TypeFeedbackVector::kInvocationCountIndex *
1093 kPointerSize +
1094 TypeFeedbackVector::kHeaderSize));
1095 __ AddSmiLiteral(r1, r1, Smi::FromInt(1), r0);
1096 __ StoreP(r1, FieldMemOperand(r6, TypeFeedbackVector::kInvocationCountIndex *
1097 kPointerSize +
1098 TypeFeedbackVector::kHeaderSize));
1099
1100 // Check function data field is actually a BytecodeArray object.
1101 if (FLAG_debug_code) {
1102 __ TestIfSmi(kInterpreterBytecodeArrayRegister);
1103 __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1104 __ CompareObjectType(kInterpreterBytecodeArrayRegister, r2, no_reg,
1105 BYTECODE_ARRAY_TYPE);
1106 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1107 }
1108
1109 // Load the initial bytecode offset.
1110 __ mov(kInterpreterBytecodeOffsetRegister,
1111 Operand(BytecodeArray::kHeaderSize - kHeapObjectTag));
1112
1113 // Push new.target, bytecode array and Smi tagged bytecode array offset.
1114 __ SmiTag(r4, kInterpreterBytecodeOffsetRegister);
1115 __ Push(r5, kInterpreterBytecodeArrayRegister, r4);
1116
1117 // Allocate the local and temporary register file on the stack.
1118 {
1119 // Load frame size (word) from the BytecodeArray object.
1120 __ LoadlW(r4, FieldMemOperand(kInterpreterBytecodeArrayRegister,
1121 BytecodeArray::kFrameSizeOffset));
1122
1123 // Do a stack check to ensure we don't go over the limit.
1124 Label ok;
1125 __ SubP(r5, sp, r4);
1126 __ LoadRoot(r0, Heap::kRealStackLimitRootIndex);
1127 __ CmpLogicalP(r5, r0);
1128 __ bge(&ok);
1129 __ CallRuntime(Runtime::kThrowStackOverflow);
1130 __ bind(&ok);
1131
1132 // If ok, push undefined as the initial value for all register file entries.
1133 // TODO(rmcilroy): Consider doing more than one push per loop iteration.
1134 Label loop, no_args;
1135 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
1136 __ ShiftRightP(r4, r4, Operand(kPointerSizeLog2));
1137 __ LoadAndTestP(r4, r4);
1138 __ beq(&no_args);
1139 __ LoadRR(r1, r4);
1140 __ bind(&loop);
1141 __ push(r5);
1142 __ SubP(r1, Operand(1));
1143 __ bne(&loop);
1144 __ bind(&no_args);
1145 }
1146
1147 // Load accumulator and dispatch table into registers.
1148 __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
1149 __ mov(kInterpreterDispatchTableRegister,
1150 Operand(ExternalReference::interpreter_dispatch_table_address(
1151 masm->isolate())));
1152
1153 // Dispatch to the first bytecode handler for the function.
1154 __ LoadlB(r3, MemOperand(kInterpreterBytecodeArrayRegister,
1155 kInterpreterBytecodeOffsetRegister));
1156 __ ShiftLeftP(ip, r3, Operand(kPointerSizeLog2));
1157 __ LoadP(ip, MemOperand(kInterpreterDispatchTableRegister, ip));
1158 __ Call(ip);
1159
1160 masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset());
1161
1162 // The return value is in r2.
1163 LeaveInterpreterFrame(masm, r4);
1164 __ Ret();
1165
1166 // If the shared code is no longer this entry trampoline, then the underlying
1167 // function has been switched to a different kind of code and we heal the
1168 // closure by switching the code entry field over to the new code as well.
1169 __ bind(&switch_to_different_code_kind);
1170 __ LeaveFrame(StackFrame::JAVA_SCRIPT);
1171 __ LoadP(r6, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
1172 __ LoadP(r6, FieldMemOperand(r6, SharedFunctionInfo::kCodeOffset));
1173 __ AddP(r6, r6, Operand(Code::kHeaderSize - kHeapObjectTag));
1174 __ StoreP(r6, FieldMemOperand(r3, JSFunction::kCodeEntryOffset), r0);
1175 __ RecordWriteCodeEntryField(r3, r6, r7);
1176 __ JumpToJSEntry(r6);
1177 }
1178
Generate_StackOverflowCheck(MacroAssembler * masm,Register num_args,Register scratch,Label * stack_overflow)1179 static void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args,
1180 Register scratch,
1181 Label* stack_overflow) {
1182 // Check the stack for overflow. We are not trying to catch
1183 // interruptions (e.g. debug break and preemption) here, so the "real stack
1184 // limit" is checked.
1185 __ LoadRoot(scratch, Heap::kRealStackLimitRootIndex);
1186 // Make scratch the space we have left. The stack might already be overflowed
1187 // here which will cause scratch to become negative.
1188 __ SubP(scratch, sp, scratch);
1189 // Check if the arguments will overflow the stack.
1190 __ ShiftLeftP(r0, num_args, Operand(kPointerSizeLog2));
1191 __ CmpP(scratch, r0);
1192 __ ble(stack_overflow); // Signed comparison.
1193 }
1194
Generate_InterpreterPushArgs(MacroAssembler * masm,Register num_args,Register index,Register count,Register scratch,Label * stack_overflow)1195 static void Generate_InterpreterPushArgs(MacroAssembler* masm,
1196 Register num_args, Register index,
1197 Register count, Register scratch,
1198 Label* stack_overflow) {
1199 // Add a stack check before pushing arguments.
1200 Generate_StackOverflowCheck(masm, num_args, scratch, stack_overflow);
1201
1202 Label loop;
1203 __ AddP(index, index, Operand(kPointerSize)); // Bias up for LoadPU
1204 __ LoadRR(r0, count);
1205 __ bind(&loop);
1206 __ LoadP(scratch, MemOperand(index, -kPointerSize));
1207 __ lay(index, MemOperand(index, -kPointerSize));
1208 __ push(scratch);
1209 __ SubP(r0, Operand(1));
1210 __ bne(&loop);
1211 }
1212
1213 // static
Generate_InterpreterPushArgsAndCallImpl(MacroAssembler * masm,TailCallMode tail_call_mode,CallableType function_type)1214 void Builtins::Generate_InterpreterPushArgsAndCallImpl(
1215 MacroAssembler* masm, TailCallMode tail_call_mode,
1216 CallableType function_type) {
1217 // ----------- S t a t e -------------
1218 // -- r2 : the number of arguments (not including the receiver)
1219 // -- r4 : the address of the first argument to be pushed. Subsequent
1220 // arguments should be consecutive above this, in the same order as
1221 // they are to be pushed onto the stack.
1222 // -- r3 : the target to call (can be any Object).
1223 // -----------------------------------
1224 Label stack_overflow;
1225
1226 // Calculate number of arguments (AddP one for receiver).
1227 __ AddP(r5, r2, Operand(1));
1228
1229 // Push the arguments.
1230 Generate_InterpreterPushArgs(masm, r5, r4, r5, r6, &stack_overflow);
1231
1232 // Call the target.
1233 if (function_type == CallableType::kJSFunction) {
1234 __ Jump(masm->isolate()->builtins()->CallFunction(ConvertReceiverMode::kAny,
1235 tail_call_mode),
1236 RelocInfo::CODE_TARGET);
1237 } else {
1238 DCHECK_EQ(function_type, CallableType::kAny);
1239 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny,
1240 tail_call_mode),
1241 RelocInfo::CODE_TARGET);
1242 }
1243
1244 __ bind(&stack_overflow);
1245 {
1246 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1247 // Unreachable Code.
1248 __ bkpt(0);
1249 }
1250 }
1251
1252 // static
Generate_InterpreterPushArgsAndConstructImpl(MacroAssembler * masm,CallableType construct_type)1253 void Builtins::Generate_InterpreterPushArgsAndConstructImpl(
1254 MacroAssembler* masm, CallableType construct_type) {
1255 // ----------- S t a t e -------------
1256 // -- r2 : argument count (not including receiver)
1257 // -- r5 : new target
1258 // -- r3 : constructor to call
1259 // -- r4 : allocation site feedback if available, undefined otherwise.
1260 // -- r6 : address of the first argument
1261 // -----------------------------------
1262 Label stack_overflow;
1263
1264 // Push a slot for the receiver to be constructed.
1265 __ LoadImmP(r0, Operand::Zero());
1266 __ push(r0);
1267
1268 // Push the arguments (skip if none).
1269 Label skip;
1270 __ CmpP(r2, Operand::Zero());
1271 __ beq(&skip);
1272 Generate_InterpreterPushArgs(masm, r2, r6, r2, r7, &stack_overflow);
1273 __ bind(&skip);
1274
1275 __ AssertUndefinedOrAllocationSite(r4, r7);
1276 if (construct_type == CallableType::kJSFunction) {
1277 __ AssertFunction(r3);
1278
1279 // Tail call to the function-specific construct stub (still in the caller
1280 // context at this point).
1281 __ LoadP(r6, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
1282 __ LoadP(r6, FieldMemOperand(r6, SharedFunctionInfo::kConstructStubOffset));
1283 // Jump to the construct function.
1284 __ AddP(ip, r6, Operand(Code::kHeaderSize - kHeapObjectTag));
1285 __ Jump(ip);
1286
1287 } else {
1288 DCHECK_EQ(construct_type, CallableType::kAny);
1289 // Call the constructor with r2, r3, and r5 unmodified.
1290 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
1291 }
1292
1293 __ bind(&stack_overflow);
1294 {
1295 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1296 // Unreachable Code.
1297 __ bkpt(0);
1298 }
1299 }
1300
1301 // static
Generate_InterpreterPushArgsAndConstructArray(MacroAssembler * masm)1302 void Builtins::Generate_InterpreterPushArgsAndConstructArray(
1303 MacroAssembler* masm) {
1304 // ----------- S t a t e -------------
1305 // -- r2 : argument count (not including receiver)
1306 // -- r3 : target to call verified to be Array function
1307 // -- r4 : allocation site feedback if available, undefined otherwise.
1308 // -- r5 : address of the first argument
1309 // -----------------------------------
1310 Label stack_overflow;
1311
1312 __ AddP(r6, r2, Operand(1)); // Add one for receiver.
1313
1314 // Push the arguments. r6, r8, r3 will be modified.
1315 Generate_InterpreterPushArgs(masm, r6, r5, r6, r7, &stack_overflow);
1316
1317 // Array constructor expects constructor in r5. It is same as r3 here.
1318 __ LoadRR(r5, r3);
1319
1320 ArrayConstructorStub stub(masm->isolate());
1321 __ TailCallStub(&stub);
1322
1323 __ bind(&stack_overflow);
1324 {
1325 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1326 // Unreachable Code.
1327 __ bkpt(0);
1328 }
1329 }
1330
Generate_InterpreterEnterBytecode(MacroAssembler * masm)1331 static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
1332 // Set the return address to the correct point in the interpreter entry
1333 // trampoline.
1334 Smi* interpreter_entry_return_pc_offset(
1335 masm->isolate()->heap()->interpreter_entry_return_pc_offset());
1336 DCHECK_NE(interpreter_entry_return_pc_offset, Smi::kZero);
1337 __ Move(r4, masm->isolate()->builtins()->InterpreterEntryTrampoline());
1338 __ AddP(r14, r4, Operand(interpreter_entry_return_pc_offset->value() +
1339 Code::kHeaderSize - kHeapObjectTag));
1340
1341 // Initialize the dispatch table register.
1342 __ mov(kInterpreterDispatchTableRegister,
1343 Operand(ExternalReference::interpreter_dispatch_table_address(
1344 masm->isolate())));
1345
1346 // Get the bytecode array pointer from the frame.
1347 __ LoadP(kInterpreterBytecodeArrayRegister,
1348 MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1349
1350 if (FLAG_debug_code) {
1351 // Check function data field is actually a BytecodeArray object.
1352 __ TestIfSmi(kInterpreterBytecodeArrayRegister);
1353 __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1354 __ CompareObjectType(kInterpreterBytecodeArrayRegister, r3, no_reg,
1355 BYTECODE_ARRAY_TYPE);
1356 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1357 }
1358
1359 // Get the target bytecode offset from the frame.
1360 __ LoadP(kInterpreterBytecodeOffsetRegister,
1361 MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1362 __ SmiUntag(kInterpreterBytecodeOffsetRegister);
1363
1364 // Dispatch to the target bytecode.
1365 __ LoadlB(r3, MemOperand(kInterpreterBytecodeArrayRegister,
1366 kInterpreterBytecodeOffsetRegister));
1367 __ ShiftLeftP(ip, r3, Operand(kPointerSizeLog2));
1368 __ LoadP(ip, MemOperand(kInterpreterDispatchTableRegister, ip));
1369 __ Jump(ip);
1370 }
1371
Generate_InterpreterEnterBytecodeAdvance(MacroAssembler * masm)1372 void Builtins::Generate_InterpreterEnterBytecodeAdvance(MacroAssembler* masm) {
1373 // Advance the current bytecode offset stored within the given interpreter
1374 // stack frame. This simulates what all bytecode handlers do upon completion
1375 // of the underlying operation.
1376 __ LoadP(r3, MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1377 __ LoadP(r4,
1378 MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1379 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
1380 {
1381 FrameScope scope(masm, StackFrame::INTERNAL);
1382 __ Push(kInterpreterAccumulatorRegister, r3, r4);
1383 __ CallRuntime(Runtime::kInterpreterAdvanceBytecodeOffset);
1384 __ Move(r4, r2); // Result is the new bytecode offset.
1385 __ Pop(kInterpreterAccumulatorRegister);
1386 }
1387 __ StoreP(r4,
1388 MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1389
1390 Generate_InterpreterEnterBytecode(masm);
1391 }
1392
Generate_InterpreterEnterBytecodeDispatch(MacroAssembler * masm)1393 void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
1394 Generate_InterpreterEnterBytecode(masm);
1395 }
1396
Generate_CompileLazy(MacroAssembler * masm)1397 void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
1398 // ----------- S t a t e -------------
1399 // -- r2 : argument count (preserved for callee)
1400 // -- r5 : new target (preserved for callee)
1401 // -- r3 : target function (preserved for callee)
1402 // -----------------------------------
1403 // First lookup code, maybe we don't need to compile!
1404 Label gotta_call_runtime;
1405 Label try_shared;
1406 Label loop_top, loop_bottom;
1407
1408 Register closure = r3;
1409 Register map = r8;
1410 Register index = r4;
1411 __ LoadP(map,
1412 FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
1413 __ LoadP(map,
1414 FieldMemOperand(map, SharedFunctionInfo::kOptimizedCodeMapOffset));
1415 __ LoadP(index, FieldMemOperand(map, FixedArray::kLengthOffset));
1416 __ CmpSmiLiteral(index, Smi::FromInt(2), r0);
1417 __ blt(&gotta_call_runtime);
1418
1419 // Find literals.
1420 // r9 : native context
1421 // r4 : length / index
1422 // r8 : optimized code map
1423 // r5 : new target
1424 // r3 : closure
1425 Register native_context = r9;
1426 __ LoadP(native_context, NativeContextMemOperand());
1427
1428 __ bind(&loop_top);
1429 Register temp = r1;
1430 Register array_pointer = r7;
1431
1432 // Does the native context match?
1433 __ SmiToPtrArrayOffset(array_pointer, index);
1434 __ AddP(array_pointer, map, array_pointer);
1435 __ LoadP(temp, FieldMemOperand(array_pointer,
1436 SharedFunctionInfo::kOffsetToPreviousContext));
1437 __ LoadP(temp, FieldMemOperand(temp, WeakCell::kValueOffset));
1438 __ CmpP(temp, native_context);
1439 __ bne(&loop_bottom, Label::kNear);
1440 // OSR id set to none?
1441 __ LoadP(temp,
1442 FieldMemOperand(array_pointer,
1443 SharedFunctionInfo::kOffsetToPreviousOsrAstId));
1444 const int bailout_id = BailoutId::None().ToInt();
1445 __ CmpSmiLiteral(temp, Smi::FromInt(bailout_id), r0);
1446 __ bne(&loop_bottom, Label::kNear);
1447 // Literals available?
1448 __ LoadP(temp,
1449 FieldMemOperand(array_pointer,
1450 SharedFunctionInfo::kOffsetToPreviousLiterals));
1451 __ LoadP(temp, FieldMemOperand(temp, WeakCell::kValueOffset));
1452 __ JumpIfSmi(temp, &gotta_call_runtime);
1453
1454 // Save the literals in the closure.
1455 __ StoreP(temp, FieldMemOperand(closure, JSFunction::kLiteralsOffset), r0);
1456 __ RecordWriteField(closure, JSFunction::kLiteralsOffset, temp, r6,
1457 kLRHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
1458 OMIT_SMI_CHECK);
1459
1460 // Code available?
1461 Register entry = r6;
1462 __ LoadP(entry,
1463 FieldMemOperand(array_pointer,
1464 SharedFunctionInfo::kOffsetToPreviousCachedCode));
1465 __ LoadP(entry, FieldMemOperand(entry, WeakCell::kValueOffset));
1466 __ JumpIfSmi(entry, &try_shared);
1467
1468 // Found literals and code. Get them into the closure and return.
1469 // Store code entry in the closure.
1470 __ AddP(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
1471 __ StoreP(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset), r0);
1472 __ RecordWriteCodeEntryField(closure, entry, r7);
1473
1474 // Link the closure into the optimized function list.
1475 // r6 : code entry
1476 // r9: native context
1477 // r3 : closure
1478 __ LoadP(
1479 r7, ContextMemOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST));
1480 __ StoreP(r7, FieldMemOperand(closure, JSFunction::kNextFunctionLinkOffset),
1481 r0);
1482 __ RecordWriteField(closure, JSFunction::kNextFunctionLinkOffset, r7, temp,
1483 kLRHasNotBeenSaved, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
1484 OMIT_SMI_CHECK);
1485 const int function_list_offset =
1486 Context::SlotOffset(Context::OPTIMIZED_FUNCTIONS_LIST);
1487 __ StoreP(
1488 closure,
1489 ContextMemOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST), r0);
1490 // Save closure before the write barrier.
1491 __ LoadRR(r7, closure);
1492 __ RecordWriteContextSlot(native_context, function_list_offset, r7, temp,
1493 kLRHasNotBeenSaved, kDontSaveFPRegs);
1494 __ JumpToJSEntry(entry);
1495
1496 __ bind(&loop_bottom);
1497 __ SubSmiLiteral(index, index, Smi::FromInt(SharedFunctionInfo::kEntryLength),
1498 r0);
1499 __ CmpSmiLiteral(index, Smi::FromInt(1), r0);
1500 __ bgt(&loop_top);
1501
1502 // We found neither literals nor code.
1503 __ b(&gotta_call_runtime);
1504
1505 __ bind(&try_shared);
1506 __ LoadP(entry,
1507 FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
1508 // Is the shared function marked for tier up?
1509 __ LoadlB(temp, FieldMemOperand(
1510 entry, SharedFunctionInfo::kMarkedForTierUpByteOffset));
1511 __ TestBit(temp, SharedFunctionInfo::kMarkedForTierUpBitWithinByte, r0);
1512 __ bne(&gotta_call_runtime);
1513 // Is the full code valid?
1514 __ LoadP(entry, FieldMemOperand(entry, SharedFunctionInfo::kCodeOffset));
1515 __ LoadlW(r7, FieldMemOperand(entry, Code::kFlagsOffset));
1516 __ DecodeField<Code::KindField>(r7);
1517 __ CmpP(r7, Operand(Code::BUILTIN));
1518 __ beq(&gotta_call_runtime);
1519 // Yes, install the full code.
1520 __ AddP(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
1521 __ StoreP(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset), r0);
1522 __ RecordWriteCodeEntryField(closure, entry, r7);
1523 __ JumpToJSEntry(entry);
1524
1525 __ bind(&gotta_call_runtime);
1526 GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
1527 }
1528
Generate_CompileBaseline(MacroAssembler * masm)1529 void Builtins::Generate_CompileBaseline(MacroAssembler* masm) {
1530 GenerateTailCallToReturnedCode(masm, Runtime::kCompileBaseline);
1531 }
1532
Generate_CompileOptimized(MacroAssembler * masm)1533 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
1534 GenerateTailCallToReturnedCode(masm,
1535 Runtime::kCompileOptimized_NotConcurrent);
1536 }
1537
Generate_CompileOptimizedConcurrent(MacroAssembler * masm)1538 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
1539 GenerateTailCallToReturnedCode(masm, Runtime::kCompileOptimized_Concurrent);
1540 }
1541
Generate_InstantiateAsmJs(MacroAssembler * masm)1542 void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) {
1543 // ----------- S t a t e -------------
1544 // -- r2 : argument count (preserved for callee)
1545 // -- r3 : new target (preserved for callee)
1546 // -- r5 : target function (preserved for callee)
1547 // -----------------------------------
1548 Label failed;
1549 {
1550 FrameScope scope(masm, StackFrame::INTERNAL);
1551 // Preserve argument count for later compare.
1552 __ Move(r6, r2);
1553 // Push a copy of the target function and the new target.
1554 __ SmiTag(r2);
1555 // Push another copy as a parameter to the runtime call.
1556 __ Push(r2, r3, r5, r3);
1557
1558 // Copy arguments from caller (stdlib, foreign, heap).
1559 Label args_done;
1560 for (int j = 0; j < 4; ++j) {
1561 Label over;
1562 if (j < 3) {
1563 __ CmpP(r6, Operand(j));
1564 __ b(ne, &over);
1565 }
1566 for (int i = j - 1; i >= 0; --i) {
1567 __ LoadP(r6, MemOperand(fp, StandardFrameConstants::kCallerSPOffset +
1568 i * kPointerSize));
1569 __ push(r6);
1570 }
1571 for (int i = 0; i < 3 - j; ++i) {
1572 __ PushRoot(Heap::kUndefinedValueRootIndex);
1573 }
1574 if (j < 3) {
1575 __ jmp(&args_done);
1576 __ bind(&over);
1577 }
1578 }
1579 __ bind(&args_done);
1580
1581 // Call runtime, on success unwind frame, and parent frame.
1582 __ CallRuntime(Runtime::kInstantiateAsmJs, 4);
1583 // A smi 0 is returned on failure, an object on success.
1584 __ JumpIfSmi(r2, &failed);
1585
1586 __ Drop(2);
1587 __ pop(r6);
1588 __ SmiUntag(r6);
1589 scope.GenerateLeaveFrame();
1590
1591 __ AddP(r6, r6, Operand(1));
1592 __ Drop(r6);
1593 __ Ret();
1594
1595 __ bind(&failed);
1596 // Restore target function and new target.
1597 __ Pop(r2, r3, r5);
1598 __ SmiUntag(r2);
1599 }
1600 // On failure, tail call back to regular js.
1601 GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
1602 }
1603
GenerateMakeCodeYoungAgainCommon(MacroAssembler * masm)1604 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
1605 // For now, we are relying on the fact that make_code_young doesn't do any
1606 // garbage collection which allows us to save/restore the registers without
1607 // worrying about which of them contain pointers. We also don't build an
1608 // internal frame to make the code faster, since we shouldn't have to do stack
1609 // crawls in MakeCodeYoung. This seems a bit fragile.
1610
1611 // Point r2 at the start of the PlatformCodeAge sequence.
1612 __ CleanseP(r14);
1613 __ SubP(r14, Operand(kCodeAgingSequenceLength));
1614 __ LoadRR(r2, r14);
1615
1616 __ pop(r14);
1617
1618 // The following registers must be saved and restored when calling through to
1619 // the runtime:
1620 // r2 - contains return address (beginning of patch sequence)
1621 // r3 - isolate
1622 // r5 - new target
1623 // lr - return address
1624 FrameScope scope(masm, StackFrame::MANUAL);
1625 __ MultiPush(r14.bit() | r2.bit() | r3.bit() | r5.bit() | fp.bit());
1626 __ PrepareCallCFunction(2, 0, r4);
1627 __ mov(r3, Operand(ExternalReference::isolate_address(masm->isolate())));
1628 __ CallCFunction(
1629 ExternalReference::get_make_code_young_function(masm->isolate()), 2);
1630 __ MultiPop(r14.bit() | r2.bit() | r3.bit() | r5.bit() | fp.bit());
1631 __ LoadRR(ip, r2);
1632 __ Jump(ip);
1633 }
1634
1635 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
1636 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
1637 MacroAssembler* masm) { \
1638 GenerateMakeCodeYoungAgainCommon(masm); \
1639 } \
1640 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
1641 MacroAssembler* masm) { \
1642 GenerateMakeCodeYoungAgainCommon(masm); \
1643 }
CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)1644 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
1645 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
1646
1647 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
1648 // For now, we are relying on the fact that make_code_young doesn't do any
1649 // garbage collection which allows us to save/restore the registers without
1650 // worrying about which of them contain pointers. We also don't build an
1651 // internal frame to make the code faster, since we shouldn't have to do stack
1652 // crawls in MakeCodeYoung. This seems a bit fragile.
1653
1654 // Point r2 at the start of the PlatformCodeAge sequence.
1655 __ CleanseP(r14);
1656 __ SubP(r14, Operand(kCodeAgingSequenceLength));
1657 __ LoadRR(r2, r14);
1658
1659 __ pop(r14);
1660
1661 // The following registers must be saved and restored when calling through to
1662 // the runtime:
1663 // r2 - contains return address (beginning of patch sequence)
1664 // r3 - isolate
1665 // r5 - new target
1666 // lr - return address
1667 FrameScope scope(masm, StackFrame::MANUAL);
1668 __ MultiPush(r14.bit() | r2.bit() | r3.bit() | r5.bit() | fp.bit());
1669 __ PrepareCallCFunction(2, 0, r4);
1670 __ mov(r3, Operand(ExternalReference::isolate_address(masm->isolate())));
1671 __ CallCFunction(
1672 ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
1673 2);
1674 __ MultiPop(r14.bit() | r2.bit() | r3.bit() | r5.bit() | fp.bit());
1675 __ LoadRR(ip, r2);
1676
1677 // Perform prologue operations usually performed by the young code stub.
1678 __ PushStandardFrame(r3);
1679
1680 // Jump to point after the code-age stub.
1681 __ AddP(r2, ip, Operand(kNoCodeAgeSequenceLength));
1682 __ Jump(r2);
1683 }
1684
Generate_MarkCodeAsExecutedTwice(MacroAssembler * masm)1685 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
1686 GenerateMakeCodeYoungAgainCommon(masm);
1687 }
1688
Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler * masm)1689 void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) {
1690 Generate_MarkCodeAsExecutedOnce(masm);
1691 }
1692
Generate_NotifyStubFailureHelper(MacroAssembler * masm,SaveFPRegsMode save_doubles)1693 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
1694 SaveFPRegsMode save_doubles) {
1695 {
1696 FrameScope scope(masm, StackFrame::INTERNAL);
1697
1698 // Preserve registers across notification, this is important for compiled
1699 // stubs that tail call the runtime on deopts passing their parameters in
1700 // registers.
1701 __ MultiPush(kJSCallerSaved | kCalleeSaved);
1702 // Pass the function and deoptimization type to the runtime system.
1703 __ CallRuntime(Runtime::kNotifyStubFailure, save_doubles);
1704 __ MultiPop(kJSCallerSaved | kCalleeSaved);
1705 }
1706
1707 __ la(sp, MemOperand(sp, kPointerSize)); // Ignore state
1708 __ Ret(); // Jump to miss handler
1709 }
1710
Generate_NotifyStubFailure(MacroAssembler * masm)1711 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
1712 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
1713 }
1714
Generate_NotifyStubFailureSaveDoubles(MacroAssembler * masm)1715 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
1716 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
1717 }
1718
Generate_NotifyDeoptimizedHelper(MacroAssembler * masm,Deoptimizer::BailoutType type)1719 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
1720 Deoptimizer::BailoutType type) {
1721 {
1722 FrameScope scope(masm, StackFrame::INTERNAL);
1723 // Pass the function and deoptimization type to the runtime system.
1724 __ LoadSmiLiteral(r2, Smi::FromInt(static_cast<int>(type)));
1725 __ push(r2);
1726 __ CallRuntime(Runtime::kNotifyDeoptimized);
1727 }
1728
1729 // Get the full codegen state from the stack and untag it -> r8.
1730 __ LoadP(r8, MemOperand(sp, 0 * kPointerSize));
1731 __ SmiUntag(r8);
1732 // Switch on the state.
1733 Label with_tos_register, unknown_state;
1734 __ CmpP(
1735 r8,
1736 Operand(static_cast<intptr_t>(Deoptimizer::BailoutState::NO_REGISTERS)));
1737 __ bne(&with_tos_register);
1738 __ la(sp, MemOperand(sp, 1 * kPointerSize)); // Remove state.
1739 __ Ret();
1740
1741 __ bind(&with_tos_register);
1742 DCHECK_EQ(kInterpreterAccumulatorRegister.code(), r2.code());
1743 __ LoadP(r2, MemOperand(sp, 1 * kPointerSize));
1744 __ CmpP(
1745 r8,
1746 Operand(static_cast<intptr_t>(Deoptimizer::BailoutState::TOS_REGISTER)));
1747 __ bne(&unknown_state);
1748 __ la(sp, MemOperand(sp, 2 * kPointerSize)); // Remove state.
1749 __ Ret();
1750
1751 __ bind(&unknown_state);
1752 __ stop("no cases left");
1753 }
1754
Generate_NotifyDeoptimized(MacroAssembler * masm)1755 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1756 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1757 }
1758
Generate_NotifySoftDeoptimized(MacroAssembler * masm)1759 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
1760 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1761 }
1762
Generate_NotifyLazyDeoptimized(MacroAssembler * masm)1763 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
1764 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1765 }
1766
1767 // Clobbers registers {r6, r7, r8, r9}.
CompatibleReceiverCheck(MacroAssembler * masm,Register receiver,Register function_template_info,Label * receiver_check_failed)1768 void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver,
1769 Register function_template_info,
1770 Label* receiver_check_failed) {
1771 Register signature = r6;
1772 Register map = r7;
1773 Register constructor = r8;
1774 Register scratch = r9;
1775
1776 // If there is no signature, return the holder.
1777 __ LoadP(signature, FieldMemOperand(function_template_info,
1778 FunctionTemplateInfo::kSignatureOffset));
1779 Label receiver_check_passed;
1780 __ JumpIfRoot(signature, Heap::kUndefinedValueRootIndex,
1781 &receiver_check_passed);
1782
1783 // Walk the prototype chain.
1784 __ LoadP(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
1785 Label prototype_loop_start;
1786 __ bind(&prototype_loop_start);
1787
1788 // Get the constructor, if any.
1789 __ GetMapConstructor(constructor, map, scratch, scratch);
1790 __ CmpP(scratch, Operand(JS_FUNCTION_TYPE));
1791 Label next_prototype;
1792 __ bne(&next_prototype);
1793 Register type = constructor;
1794 __ LoadP(type,
1795 FieldMemOperand(constructor, JSFunction::kSharedFunctionInfoOffset));
1796 __ LoadP(type,
1797 FieldMemOperand(type, SharedFunctionInfo::kFunctionDataOffset));
1798
1799 // Loop through the chain of inheriting function templates.
1800 Label function_template_loop;
1801 __ bind(&function_template_loop);
1802
1803 // If the signatures match, we have a compatible receiver.
1804 __ CmpP(signature, type);
1805 __ beq(&receiver_check_passed);
1806
1807 // If the current type is not a FunctionTemplateInfo, load the next prototype
1808 // in the chain.
1809 __ JumpIfSmi(type, &next_prototype);
1810 __ CompareObjectType(type, scratch, scratch, FUNCTION_TEMPLATE_INFO_TYPE);
1811 __ bne(&next_prototype);
1812
1813 // Otherwise load the parent function template and iterate.
1814 __ LoadP(type,
1815 FieldMemOperand(type, FunctionTemplateInfo::kParentTemplateOffset));
1816 __ b(&function_template_loop);
1817
1818 // Load the next prototype.
1819 __ bind(&next_prototype);
1820 __ LoadlW(scratch, FieldMemOperand(map, Map::kBitField3Offset));
1821 __ DecodeField<Map::HasHiddenPrototype>(scratch);
1822 __ beq(receiver_check_failed);
1823
1824 __ LoadP(receiver, FieldMemOperand(map, Map::kPrototypeOffset));
1825 __ LoadP(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
1826 // Iterate.
1827 __ b(&prototype_loop_start);
1828
1829 __ bind(&receiver_check_passed);
1830 }
1831
Generate_HandleFastApiCall(MacroAssembler * masm)1832 void Builtins::Generate_HandleFastApiCall(MacroAssembler* masm) {
1833 // ----------- S t a t e -------------
1834 // -- r2 : number of arguments excluding receiver
1835 // -- r3 : callee
1836 // -- lr : return address
1837 // -- sp[0] : last argument
1838 // -- ...
1839 // -- sp[4 * (argc - 1)] : first argument
1840 // -- sp[4 * argc] : receiver
1841 // -----------------------------------
1842
1843 // Load the FunctionTemplateInfo.
1844 __ LoadP(r5, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
1845 __ LoadP(r5, FieldMemOperand(r5, SharedFunctionInfo::kFunctionDataOffset));
1846
1847 // Do the compatible receiver check.
1848 Label receiver_check_failed;
1849 __ ShiftLeftP(r1, r2, Operand(kPointerSizeLog2));
1850 __ LoadP(r4, MemOperand(sp, r1));
1851 CompatibleReceiverCheck(masm, r4, r5, &receiver_check_failed);
1852
1853 // Get the callback offset from the FunctionTemplateInfo, and jump to the
1854 // beginning of the code.
1855 __ LoadP(r6, FieldMemOperand(r5, FunctionTemplateInfo::kCallCodeOffset));
1856 __ LoadP(r6, FieldMemOperand(r6, CallHandlerInfo::kFastHandlerOffset));
1857 __ AddP(ip, r6, Operand(Code::kHeaderSize - kHeapObjectTag));
1858 __ JumpToJSEntry(ip);
1859
1860 // Compatible receiver check failed: throw an Illegal Invocation exception.
1861 __ bind(&receiver_check_failed);
1862 // Drop the arguments (including the receiver);
1863 __ AddP(r1, r1, Operand(kPointerSize));
1864 __ AddP(sp, sp, r1);
1865 __ TailCallRuntime(Runtime::kThrowIllegalInvocation);
1866 }
1867
Generate_OnStackReplacementHelper(MacroAssembler * masm,bool has_handler_frame)1868 static void Generate_OnStackReplacementHelper(MacroAssembler* masm,
1869 bool has_handler_frame) {
1870 // Lookup the function in the JavaScript frame.
1871 if (has_handler_frame) {
1872 __ LoadP(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1873 __ LoadP(r2, MemOperand(r2, JavaScriptFrameConstants::kFunctionOffset));
1874 } else {
1875 __ LoadP(r2, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1876 }
1877
1878 {
1879 FrameScope scope(masm, StackFrame::INTERNAL);
1880 // Pass function as argument.
1881 __ push(r2);
1882 __ CallRuntime(Runtime::kCompileForOnStackReplacement);
1883 }
1884
1885 // If the code object is null, just return to the caller.
1886 Label skip;
1887 __ CmpSmiLiteral(r2, Smi::kZero, r0);
1888 __ bne(&skip);
1889 __ Ret();
1890
1891 __ bind(&skip);
1892
1893 // Drop any potential handler frame that is be sitting on top of the actual
1894 // JavaScript frame. This is the case then OSR is triggered from bytecode.
1895 if (has_handler_frame) {
1896 __ LeaveFrame(StackFrame::STUB);
1897 }
1898
1899 // Load deoptimization data from the code object.
1900 // <deopt_data> = <code>[#deoptimization_data_offset]
1901 __ LoadP(r3, FieldMemOperand(r2, Code::kDeoptimizationDataOffset));
1902
1903 // Load the OSR entrypoint offset from the deoptimization data.
1904 // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
1905 __ LoadP(
1906 r3, FieldMemOperand(r3, FixedArray::OffsetOfElementAt(
1907 DeoptimizationInputData::kOsrPcOffsetIndex)));
1908 __ SmiUntag(r3);
1909
1910 // Compute the target address = code_obj + header_size + osr_offset
1911 // <entry_addr> = <code_obj> + #header_size + <osr_offset>
1912 __ AddP(r2, r3);
1913 __ AddP(r0, r2, Operand(Code::kHeaderSize - kHeapObjectTag));
1914 __ LoadRR(r14, r0);
1915
1916 // And "return" to the OSR entry point of the function.
1917 __ Ret();
1918 }
1919
Generate_OnStackReplacement(MacroAssembler * masm)1920 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1921 Generate_OnStackReplacementHelper(masm, false);
1922 }
1923
Generate_InterpreterOnStackReplacement(MacroAssembler * masm)1924 void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) {
1925 Generate_OnStackReplacementHelper(masm, true);
1926 }
1927
1928 // static
Generate_FunctionPrototypeApply(MacroAssembler * masm)1929 void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
1930 // ----------- S t a t e -------------
1931 // -- r2 : argc
1932 // -- sp[0] : argArray
1933 // -- sp[4] : thisArg
1934 // -- sp[8] : receiver
1935 // -----------------------------------
1936
1937 // 1. Load receiver into r3, argArray into r2 (if present), remove all
1938 // arguments from the stack (including the receiver), and push thisArg (if
1939 // present) instead.
1940 {
1941 Label skip;
1942 Register arg_size = r4;
1943 Register new_sp = r5;
1944 Register scratch = r6;
1945 __ ShiftLeftP(arg_size, r2, Operand(kPointerSizeLog2));
1946 __ AddP(new_sp, sp, arg_size);
1947 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
1948 __ LoadRR(scratch, r2);
1949 __ LoadP(r3, MemOperand(new_sp, 0)); // receiver
1950 __ CmpP(arg_size, Operand(kPointerSize));
1951 __ blt(&skip);
1952 __ LoadP(scratch, MemOperand(new_sp, 1 * -kPointerSize)); // thisArg
1953 __ beq(&skip);
1954 __ LoadP(r2, MemOperand(new_sp, 2 * -kPointerSize)); // argArray
1955 __ bind(&skip);
1956 __ LoadRR(sp, new_sp);
1957 __ StoreP(scratch, MemOperand(sp, 0));
1958 }
1959
1960 // ----------- S t a t e -------------
1961 // -- r2 : argArray
1962 // -- r3 : receiver
1963 // -- sp[0] : thisArg
1964 // -----------------------------------
1965
1966 // 2. Make sure the receiver is actually callable.
1967 Label receiver_not_callable;
1968 __ JumpIfSmi(r3, &receiver_not_callable);
1969 __ LoadP(r6, FieldMemOperand(r3, HeapObject::kMapOffset));
1970 __ LoadlB(r6, FieldMemOperand(r6, Map::kBitFieldOffset));
1971 __ TestBit(r6, Map::kIsCallable);
1972 __ beq(&receiver_not_callable);
1973
1974 // 3. Tail call with no arguments if argArray is null or undefined.
1975 Label no_arguments;
1976 __ JumpIfRoot(r2, Heap::kNullValueRootIndex, &no_arguments);
1977 __ JumpIfRoot(r2, Heap::kUndefinedValueRootIndex, &no_arguments);
1978
1979 // 4a. Apply the receiver to the given argArray (passing undefined for
1980 // new.target).
1981 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
1982 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
1983
1984 // 4b. The argArray is either null or undefined, so we tail call without any
1985 // arguments to the receiver.
1986 __ bind(&no_arguments);
1987 {
1988 __ LoadImmP(r2, Operand::Zero());
1989 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1990 }
1991
1992 // 4c. The receiver is not callable, throw an appropriate TypeError.
1993 __ bind(&receiver_not_callable);
1994 {
1995 __ StoreP(r3, MemOperand(sp, 0));
1996 __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
1997 }
1998 }
1999
2000 // static
Generate_FunctionPrototypeCall(MacroAssembler * masm)2001 void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
2002 // 1. Make sure we have at least one argument.
2003 // r2: actual number of arguments
2004 {
2005 Label done;
2006 __ CmpP(r2, Operand::Zero());
2007 __ bne(&done, Label::kNear);
2008 __ PushRoot(Heap::kUndefinedValueRootIndex);
2009 __ AddP(r2, Operand(1));
2010 __ bind(&done);
2011 }
2012
2013 // r2: actual number of arguments
2014 // 2. Get the callable to call (passed as receiver) from the stack.
2015 __ ShiftLeftP(r4, r2, Operand(kPointerSizeLog2));
2016 __ LoadP(r3, MemOperand(sp, r4));
2017
2018 // 3. Shift arguments and return address one slot down on the stack
2019 // (overwriting the original receiver). Adjust argument count to make
2020 // the original first argument the new receiver.
2021 // r2: actual number of arguments
2022 // r3: callable
2023 {
2024 Label loop;
2025 // Calculate the copy start address (destination). Copy end address is sp.
2026 __ AddP(r4, sp, r4);
2027
2028 __ bind(&loop);
2029 __ LoadP(ip, MemOperand(r4, -kPointerSize));
2030 __ StoreP(ip, MemOperand(r4));
2031 __ SubP(r4, Operand(kPointerSize));
2032 __ CmpP(r4, sp);
2033 __ bne(&loop);
2034 // Adjust the actual number of arguments and remove the top element
2035 // (which is a copy of the last argument).
2036 __ SubP(r2, Operand(1));
2037 __ pop();
2038 }
2039
2040 // 4. Call the callable.
2041 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
2042 }
2043
Generate_ReflectApply(MacroAssembler * masm)2044 void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
2045 // ----------- S t a t e -------------
2046 // -- r2 : argc
2047 // -- sp[0] : argumentsList
2048 // -- sp[4] : thisArgument
2049 // -- sp[8] : target
2050 // -- sp[12] : receiver
2051 // -----------------------------------
2052
2053 // 1. Load target into r3 (if present), argumentsList into r2 (if present),
2054 // remove all arguments from the stack (including the receiver), and push
2055 // thisArgument (if present) instead.
2056 {
2057 Label skip;
2058 Register arg_size = r4;
2059 Register new_sp = r5;
2060 Register scratch = r6;
2061 __ ShiftLeftP(arg_size, r2, Operand(kPointerSizeLog2));
2062 __ AddP(new_sp, sp, arg_size);
2063 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
2064 __ LoadRR(scratch, r3);
2065 __ LoadRR(r2, r3);
2066 __ CmpP(arg_size, Operand(kPointerSize));
2067 __ blt(&skip);
2068 __ LoadP(r3, MemOperand(new_sp, 1 * -kPointerSize)); // target
2069 __ beq(&skip);
2070 __ LoadP(scratch, MemOperand(new_sp, 2 * -kPointerSize)); // thisArgument
2071 __ CmpP(arg_size, Operand(2 * kPointerSize));
2072 __ beq(&skip);
2073 __ LoadP(r2, MemOperand(new_sp, 3 * -kPointerSize)); // argumentsList
2074 __ bind(&skip);
2075 __ LoadRR(sp, new_sp);
2076 __ StoreP(scratch, MemOperand(sp, 0));
2077 }
2078
2079 // ----------- S t a t e -------------
2080 // -- r2 : argumentsList
2081 // -- r3 : target
2082 // -- sp[0] : thisArgument
2083 // -----------------------------------
2084
2085 // 2. Make sure the target is actually callable.
2086 Label target_not_callable;
2087 __ JumpIfSmi(r3, &target_not_callable);
2088 __ LoadP(r6, FieldMemOperand(r3, HeapObject::kMapOffset));
2089 __ LoadlB(r6, FieldMemOperand(r6, Map::kBitFieldOffset));
2090 __ TestBit(r6, Map::kIsCallable);
2091 __ beq(&target_not_callable);
2092
2093 // 3a. Apply the target to the given argumentsList (passing undefined for
2094 // new.target).
2095 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
2096 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
2097
2098 // 3b. The target is not callable, throw an appropriate TypeError.
2099 __ bind(&target_not_callable);
2100 {
2101 __ StoreP(r3, MemOperand(sp, 0));
2102 __ TailCallRuntime(Runtime::kThrowApplyNonFunction);
2103 }
2104 }
2105
Generate_ReflectConstruct(MacroAssembler * masm)2106 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
2107 // ----------- S t a t e -------------
2108 // -- r2 : argc
2109 // -- sp[0] : new.target (optional)
2110 // -- sp[4] : argumentsList
2111 // -- sp[8] : target
2112 // -- sp[12] : receiver
2113 // -----------------------------------
2114
2115 // 1. Load target into r3 (if present), argumentsList into r2 (if present),
2116 // new.target into r5 (if present, otherwise use target), remove all
2117 // arguments from the stack (including the receiver), and push thisArgument
2118 // (if present) instead.
2119 {
2120 Label skip;
2121 Register arg_size = r4;
2122 Register new_sp = r6;
2123 __ ShiftLeftP(arg_size, r2, Operand(kPointerSizeLog2));
2124 __ AddP(new_sp, sp, arg_size);
2125 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
2126 __ LoadRR(r2, r3);
2127 __ LoadRR(r5, r3);
2128 __ StoreP(r3, MemOperand(new_sp, 0)); // receiver (undefined)
2129 __ CmpP(arg_size, Operand(kPointerSize));
2130 __ blt(&skip);
2131 __ LoadP(r3, MemOperand(new_sp, 1 * -kPointerSize)); // target
2132 __ LoadRR(r5, r3); // new.target defaults to target
2133 __ beq(&skip);
2134 __ LoadP(r2, MemOperand(new_sp, 2 * -kPointerSize)); // argumentsList
2135 __ CmpP(arg_size, Operand(2 * kPointerSize));
2136 __ beq(&skip);
2137 __ LoadP(r5, MemOperand(new_sp, 3 * -kPointerSize)); // new.target
2138 __ bind(&skip);
2139 __ LoadRR(sp, new_sp);
2140 }
2141
2142 // ----------- S t a t e -------------
2143 // -- r2 : argumentsList
2144 // -- r5 : new.target
2145 // -- r3 : target
2146 // -- sp[0] : receiver (undefined)
2147 // -----------------------------------
2148
2149 // 2. Make sure the target is actually a constructor.
2150 Label target_not_constructor;
2151 __ JumpIfSmi(r3, &target_not_constructor);
2152 __ LoadP(r6, FieldMemOperand(r3, HeapObject::kMapOffset));
2153 __ LoadlB(r6, FieldMemOperand(r6, Map::kBitFieldOffset));
2154 __ TestBit(r6, Map::kIsConstructor);
2155 __ beq(&target_not_constructor);
2156
2157 // 3. Make sure the target is actually a constructor.
2158 Label new_target_not_constructor;
2159 __ JumpIfSmi(r5, &new_target_not_constructor);
2160 __ LoadP(r6, FieldMemOperand(r5, HeapObject::kMapOffset));
2161 __ LoadlB(r6, FieldMemOperand(r6, Map::kBitFieldOffset));
2162 __ TestBit(r6, Map::kIsConstructor);
2163 __ beq(&new_target_not_constructor);
2164
2165 // 4a. Construct the target with the given new.target and argumentsList.
2166 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET);
2167
2168 // 4b. The target is not a constructor, throw an appropriate TypeError.
2169 __ bind(&target_not_constructor);
2170 {
2171 __ StoreP(r3, MemOperand(sp, 0));
2172 __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
2173 }
2174
2175 // 4c. The new.target is not a constructor, throw an appropriate TypeError.
2176 __ bind(&new_target_not_constructor);
2177 {
2178 __ StoreP(r5, MemOperand(sp, 0));
2179 __ TailCallRuntime(Runtime::kThrowCalledNonCallable);
2180 }
2181 }
2182
EnterArgumentsAdaptorFrame(MacroAssembler * masm)2183 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
2184 __ SmiTag(r2);
2185 __ LoadSmiLiteral(r6, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
2186 // Stack updated as such:
2187 // old SP --->
2188 // R14 Return Addr
2189 // Old FP <--- New FP
2190 // Argument Adapter SMI
2191 // Function
2192 // ArgC as SMI <--- New SP
2193 __ lay(sp, MemOperand(sp, -5 * kPointerSize));
2194
2195 // Cleanse the top nibble of 31-bit pointers.
2196 __ CleanseP(r14);
2197 __ StoreP(r14, MemOperand(sp, 4 * kPointerSize));
2198 __ StoreP(fp, MemOperand(sp, 3 * kPointerSize));
2199 __ StoreP(r6, MemOperand(sp, 2 * kPointerSize));
2200 __ StoreP(r3, MemOperand(sp, 1 * kPointerSize));
2201 __ StoreP(r2, MemOperand(sp, 0 * kPointerSize));
2202 __ la(fp, MemOperand(sp, StandardFrameConstants::kFixedFrameSizeFromFp +
2203 kPointerSize));
2204 }
2205
LeaveArgumentsAdaptorFrame(MacroAssembler * masm)2206 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
2207 // ----------- S t a t e -------------
2208 // -- r2 : result being passed through
2209 // -----------------------------------
2210 // Get the number of arguments passed (as a smi), tear down the frame and
2211 // then tear down the parameters.
2212 __ LoadP(r3, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp +
2213 kPointerSize)));
2214 int stack_adjustment = kPointerSize; // adjust for receiver
2215 __ LeaveFrame(StackFrame::ARGUMENTS_ADAPTOR, stack_adjustment);
2216 __ SmiToPtrArrayOffset(r3, r3);
2217 __ lay(sp, MemOperand(sp, r3));
2218 }
2219
2220 // static
Generate_Apply(MacroAssembler * masm)2221 void Builtins::Generate_Apply(MacroAssembler* masm) {
2222 // ----------- S t a t e -------------
2223 // -- r2 : argumentsList
2224 // -- r3 : target
2225 // -- r5 : new.target (checked to be constructor or undefined)
2226 // -- sp[0] : thisArgument
2227 // -----------------------------------
2228
2229 // Create the list of arguments from the array-like argumentsList.
2230 {
2231 Label create_arguments, create_array, create_runtime, done_create;
2232 __ JumpIfSmi(r2, &create_runtime);
2233
2234 // Load the map of argumentsList into r4.
2235 __ LoadP(r4, FieldMemOperand(r2, HeapObject::kMapOffset));
2236
2237 // Load native context into r6.
2238 __ LoadP(r6, NativeContextMemOperand());
2239
2240 // Check if argumentsList is an (unmodified) arguments object.
2241 __ LoadP(ip, ContextMemOperand(r6, Context::SLOPPY_ARGUMENTS_MAP_INDEX));
2242 __ CmpP(ip, r4);
2243 __ beq(&create_arguments);
2244 __ LoadP(ip, ContextMemOperand(r6, Context::STRICT_ARGUMENTS_MAP_INDEX));
2245 __ CmpP(ip, r4);
2246 __ beq(&create_arguments);
2247
2248 // Check if argumentsList is a fast JSArray.
2249 __ CompareInstanceType(r4, ip, JS_ARRAY_TYPE);
2250 __ beq(&create_array);
2251
2252 // Ask the runtime to create the list (actually a FixedArray).
2253 __ bind(&create_runtime);
2254 {
2255 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2256 __ Push(r3, r5, r2);
2257 __ CallRuntime(Runtime::kCreateListFromArrayLike);
2258 __ Pop(r3, r5);
2259 __ LoadP(r4, FieldMemOperand(r2, FixedArray::kLengthOffset));
2260 __ SmiUntag(r4);
2261 }
2262 __ b(&done_create);
2263
2264 // Try to create the list from an arguments object.
2265 __ bind(&create_arguments);
2266 __ LoadP(r4, FieldMemOperand(r2, JSArgumentsObject::kLengthOffset));
2267 __ LoadP(r6, FieldMemOperand(r2, JSObject::kElementsOffset));
2268 __ LoadP(ip, FieldMemOperand(r6, FixedArray::kLengthOffset));
2269 __ CmpP(r4, ip);
2270 __ bne(&create_runtime);
2271 __ SmiUntag(r4);
2272 __ LoadRR(r2, r6);
2273 __ b(&done_create);
2274
2275 // Try to create the list from a JSArray object.
2276 __ bind(&create_array);
2277 __ LoadlB(r4, FieldMemOperand(r4, Map::kBitField2Offset));
2278 __ DecodeField<Map::ElementsKindBits>(r4);
2279 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
2280 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
2281 STATIC_ASSERT(FAST_ELEMENTS == 2);
2282 __ CmpP(r4, Operand(FAST_ELEMENTS));
2283 __ bgt(&create_runtime);
2284 __ CmpP(r4, Operand(FAST_HOLEY_SMI_ELEMENTS));
2285 __ beq(&create_runtime);
2286 __ LoadP(r4, FieldMemOperand(r2, JSArray::kLengthOffset));
2287 __ LoadP(r2, FieldMemOperand(r2, JSArray::kElementsOffset));
2288 __ SmiUntag(r4);
2289
2290 __ bind(&done_create);
2291 }
2292
2293 // Check for stack overflow.
2294 {
2295 // Check the stack for overflow. We are not trying to catch interruptions
2296 // (i.e. debug break and preemption) here, so check the "real stack limit".
2297 Label done;
2298 __ LoadRoot(ip, Heap::kRealStackLimitRootIndex);
2299 // Make ip the space we have left. The stack might already be overflowed
2300 // here which will cause ip to become negative.
2301 __ SubP(ip, sp, ip);
2302 // Check if the arguments will overflow the stack.
2303 __ ShiftLeftP(r0, r4, Operand(kPointerSizeLog2));
2304 __ CmpP(ip, r0); // Signed comparison.
2305 __ bgt(&done);
2306 __ TailCallRuntime(Runtime::kThrowStackOverflow);
2307 __ bind(&done);
2308 }
2309
2310 // ----------- S t a t e -------------
2311 // -- r3 : target
2312 // -- r2 : args (a FixedArray built from argumentsList)
2313 // -- r4 : len (number of elements to push from args)
2314 // -- r5 : new.target (checked to be constructor or undefined)
2315 // -- sp[0] : thisArgument
2316 // -----------------------------------
2317
2318 // Push arguments onto the stack (thisArgument is already on the stack).
2319 {
2320 Label loop, no_args;
2321 __ CmpP(r4, Operand::Zero());
2322 __ beq(&no_args);
2323 __ AddP(r2, r2,
2324 Operand(FixedArray::kHeaderSize - kHeapObjectTag - kPointerSize));
2325 __ LoadRR(r1, r4);
2326 __ bind(&loop);
2327 __ LoadP(r0, MemOperand(r2, kPointerSize));
2328 __ la(r2, MemOperand(r2, kPointerSize));
2329 __ push(r0);
2330 __ BranchOnCount(r1, &loop);
2331 __ bind(&no_args);
2332 __ LoadRR(r2, r4);
2333 }
2334
2335 // Dispatch to Call or Construct depending on whether new.target is undefined.
2336 {
2337 __ CompareRoot(r5, Heap::kUndefinedValueRootIndex);
2338 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET, eq);
2339 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
2340 }
2341 }
2342
2343 namespace {
2344
2345 // Drops top JavaScript frame and an arguments adaptor frame below it (if
2346 // present) preserving all the arguments prepared for current call.
2347 // Does nothing if debugger is currently active.
2348 // ES6 14.6.3. PrepareForTailCall
2349 //
2350 // Stack structure for the function g() tail calling f():
2351 //
2352 // ------- Caller frame: -------
2353 // | ...
2354 // | g()'s arg M
2355 // | ...
2356 // | g()'s arg 1
2357 // | g()'s receiver arg
2358 // | g()'s caller pc
2359 // ------- g()'s frame: -------
2360 // | g()'s caller fp <- fp
2361 // | g()'s context
2362 // | function pointer: g
2363 // | -------------------------
2364 // | ...
2365 // | ...
2366 // | f()'s arg N
2367 // | ...
2368 // | f()'s arg 1
2369 // | f()'s receiver arg <- sp (f()'s caller pc is not on the stack yet!)
2370 // ----------------------
2371 //
PrepareForTailCall(MacroAssembler * masm,Register args_reg,Register scratch1,Register scratch2,Register scratch3)2372 void PrepareForTailCall(MacroAssembler* masm, Register args_reg,
2373 Register scratch1, Register scratch2,
2374 Register scratch3) {
2375 DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3));
2376 Comment cmnt(masm, "[ PrepareForTailCall");
2377
2378 // Prepare for tail call only if ES2015 tail call elimination is active.
2379 Label done;
2380 ExternalReference is_tail_call_elimination_enabled =
2381 ExternalReference::is_tail_call_elimination_enabled_address(
2382 masm->isolate());
2383 __ mov(scratch1, Operand(is_tail_call_elimination_enabled));
2384 __ LoadlB(scratch1, MemOperand(scratch1));
2385 __ CmpP(scratch1, Operand::Zero());
2386 __ beq(&done);
2387
2388 // Drop possible interpreter handler/stub frame.
2389 {
2390 Label no_interpreter_frame;
2391 __ LoadP(scratch3,
2392 MemOperand(fp, CommonFrameConstants::kContextOrFrameTypeOffset));
2393 __ CmpSmiLiteral(scratch3, Smi::FromInt(StackFrame::STUB), r0);
2394 __ bne(&no_interpreter_frame);
2395 __ LoadP(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2396 __ bind(&no_interpreter_frame);
2397 }
2398
2399 // Check if next frame is an arguments adaptor frame.
2400 Register caller_args_count_reg = scratch1;
2401 Label no_arguments_adaptor, formal_parameter_count_loaded;
2402 __ LoadP(scratch2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
2403 __ LoadP(
2404 scratch3,
2405 MemOperand(scratch2, CommonFrameConstants::kContextOrFrameTypeOffset));
2406 __ CmpSmiLiteral(scratch3, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0);
2407 __ bne(&no_arguments_adaptor);
2408
2409 // Drop current frame and load arguments count from arguments adaptor frame.
2410 __ LoadRR(fp, scratch2);
2411 __ LoadP(caller_args_count_reg,
2412 MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset));
2413 __ SmiUntag(caller_args_count_reg);
2414 __ b(&formal_parameter_count_loaded);
2415
2416 __ bind(&no_arguments_adaptor);
2417 // Load caller's formal parameter count
2418 __ LoadP(scratch1,
2419 MemOperand(fp, ArgumentsAdaptorFrameConstants::kFunctionOffset));
2420 __ LoadP(scratch1,
2421 FieldMemOperand(scratch1, JSFunction::kSharedFunctionInfoOffset));
2422 __ LoadW(caller_args_count_reg,
2423 FieldMemOperand(scratch1,
2424 SharedFunctionInfo::kFormalParameterCountOffset));
2425 #if !V8_TARGET_ARCH_S390X
2426 __ SmiUntag(caller_args_count_reg);
2427 #endif
2428
2429 __ bind(&formal_parameter_count_loaded);
2430
2431 ParameterCount callee_args_count(args_reg);
2432 __ PrepareForTailCall(callee_args_count, caller_args_count_reg, scratch2,
2433 scratch3);
2434 __ bind(&done);
2435 }
2436 } // namespace
2437
2438 // static
Generate_CallFunction(MacroAssembler * masm,ConvertReceiverMode mode,TailCallMode tail_call_mode)2439 void Builtins::Generate_CallFunction(MacroAssembler* masm,
2440 ConvertReceiverMode mode,
2441 TailCallMode tail_call_mode) {
2442 // ----------- S t a t e -------------
2443 // -- r2 : the number of arguments (not including the receiver)
2444 // -- r3 : the function to call (checked to be a JSFunction)
2445 // -----------------------------------
2446 __ AssertFunction(r3);
2447
2448 // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
2449 // Check that the function is not a "classConstructor".
2450 Label class_constructor;
2451 __ LoadP(r4, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
2452 __ LoadlW(r5, FieldMemOperand(r4, SharedFunctionInfo::kCompilerHintsOffset));
2453 __ TestBitMask(r5, FunctionKind::kClassConstructor
2454 << SharedFunctionInfo::kFunctionKindShift,
2455 r0);
2456 __ bne(&class_constructor);
2457
2458 // Enter the context of the function; ToObject has to run in the function
2459 // context, and we also need to take the global proxy from the function
2460 // context in case of conversion.
2461 __ LoadP(cp, FieldMemOperand(r3, JSFunction::kContextOffset));
2462 // We need to convert the receiver for non-native sloppy mode functions.
2463 Label done_convert;
2464 __ AndP(r0, r5, Operand((1 << SharedFunctionInfo::kStrictModeBit) |
2465 (1 << SharedFunctionInfo::kNativeBit)));
2466 __ bne(&done_convert);
2467 {
2468 // ----------- S t a t e -------------
2469 // -- r2 : the number of arguments (not including the receiver)
2470 // -- r3 : the function to call (checked to be a JSFunction)
2471 // -- r4 : the shared function info.
2472 // -- cp : the function context.
2473 // -----------------------------------
2474
2475 if (mode == ConvertReceiverMode::kNullOrUndefined) {
2476 // Patch receiver to global proxy.
2477 __ LoadGlobalProxy(r5);
2478 } else {
2479 Label convert_to_object, convert_receiver;
2480 __ ShiftLeftP(r5, r2, Operand(kPointerSizeLog2));
2481 __ LoadP(r5, MemOperand(sp, r5));
2482 __ JumpIfSmi(r5, &convert_to_object);
2483 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2484 __ CompareObjectType(r5, r6, r6, FIRST_JS_RECEIVER_TYPE);
2485 __ bge(&done_convert);
2486 if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
2487 Label convert_global_proxy;
2488 __ JumpIfRoot(r5, Heap::kUndefinedValueRootIndex,
2489 &convert_global_proxy);
2490 __ JumpIfNotRoot(r5, Heap::kNullValueRootIndex, &convert_to_object);
2491 __ bind(&convert_global_proxy);
2492 {
2493 // Patch receiver to global proxy.
2494 __ LoadGlobalProxy(r5);
2495 }
2496 __ b(&convert_receiver);
2497 }
2498 __ bind(&convert_to_object);
2499 {
2500 // Convert receiver using ToObject.
2501 // TODO(bmeurer): Inline the allocation here to avoid building the frame
2502 // in the fast case? (fall back to AllocateInNewSpace?)
2503 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2504 __ SmiTag(r2);
2505 __ Push(r2, r3);
2506 __ LoadRR(r2, r5);
2507 __ Push(cp);
2508 __ Call(masm->isolate()->builtins()->ToObject(),
2509 RelocInfo::CODE_TARGET);
2510 __ Pop(cp);
2511 __ LoadRR(r5, r2);
2512 __ Pop(r2, r3);
2513 __ SmiUntag(r2);
2514 }
2515 __ LoadP(r4, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
2516 __ bind(&convert_receiver);
2517 }
2518 __ ShiftLeftP(r6, r2, Operand(kPointerSizeLog2));
2519 __ StoreP(r5, MemOperand(sp, r6));
2520 }
2521 __ bind(&done_convert);
2522
2523 // ----------- S t a t e -------------
2524 // -- r2 : the number of arguments (not including the receiver)
2525 // -- r3 : the function to call (checked to be a JSFunction)
2526 // -- r4 : the shared function info.
2527 // -- cp : the function context.
2528 // -----------------------------------
2529
2530 if (tail_call_mode == TailCallMode::kAllow) {
2531 PrepareForTailCall(masm, r2, r5, r6, r7);
2532 }
2533
2534 __ LoadW(
2535 r4, FieldMemOperand(r4, SharedFunctionInfo::kFormalParameterCountOffset));
2536 #if !V8_TARGET_ARCH_S390X
2537 __ SmiUntag(r4);
2538 #endif
2539 ParameterCount actual(r2);
2540 ParameterCount expected(r4);
2541 __ InvokeFunctionCode(r3, no_reg, expected, actual, JUMP_FUNCTION,
2542 CheckDebugStepCallWrapper());
2543
2544 // The function is a "classConstructor", need to raise an exception.
2545 __ bind(&class_constructor);
2546 {
2547 FrameAndConstantPoolScope frame(masm, StackFrame::INTERNAL);
2548 __ push(r3);
2549 __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
2550 }
2551 }
2552
2553 namespace {
2554
Generate_PushBoundArguments(MacroAssembler * masm)2555 void Generate_PushBoundArguments(MacroAssembler* masm) {
2556 // ----------- S t a t e -------------
2557 // -- r2 : the number of arguments (not including the receiver)
2558 // -- r3 : target (checked to be a JSBoundFunction)
2559 // -- r5 : new.target (only in case of [[Construct]])
2560 // -----------------------------------
2561
2562 // Load [[BoundArguments]] into r4 and length of that into r6.
2563 Label no_bound_arguments;
2564 __ LoadP(r4, FieldMemOperand(r3, JSBoundFunction::kBoundArgumentsOffset));
2565 __ LoadP(r6, FieldMemOperand(r4, FixedArray::kLengthOffset));
2566 __ SmiUntag(r6);
2567 __ LoadAndTestP(r6, r6);
2568 __ beq(&no_bound_arguments);
2569 {
2570 // ----------- S t a t e -------------
2571 // -- r2 : the number of arguments (not including the receiver)
2572 // -- r3 : target (checked to be a JSBoundFunction)
2573 // -- r4 : the [[BoundArguments]] (implemented as FixedArray)
2574 // -- r5 : new.target (only in case of [[Construct]])
2575 // -- r6 : the number of [[BoundArguments]]
2576 // -----------------------------------
2577
2578 // Reserve stack space for the [[BoundArguments]].
2579 {
2580 Label done;
2581 __ LoadRR(r8, sp); // preserve previous stack pointer
2582 __ ShiftLeftP(r9, r6, Operand(kPointerSizeLog2));
2583 __ SubP(sp, sp, r9);
2584 // Check the stack for overflow. We are not trying to catch interruptions
2585 // (i.e. debug break and preemption) here, so check the "real stack
2586 // limit".
2587 __ CompareRoot(sp, Heap::kRealStackLimitRootIndex);
2588 __ bgt(&done); // Signed comparison.
2589 // Restore the stack pointer.
2590 __ LoadRR(sp, r8);
2591 {
2592 FrameScope scope(masm, StackFrame::MANUAL);
2593 __ EnterFrame(StackFrame::INTERNAL);
2594 __ CallRuntime(Runtime::kThrowStackOverflow);
2595 }
2596 __ bind(&done);
2597 }
2598
2599 // Relocate arguments down the stack.
2600 // -- r2 : the number of arguments (not including the receiver)
2601 // -- r8 : the previous stack pointer
2602 // -- r9: the size of the [[BoundArguments]]
2603 {
2604 Label skip, loop;
2605 __ LoadImmP(r7, Operand::Zero());
2606 __ CmpP(r2, Operand::Zero());
2607 __ beq(&skip);
2608 __ LoadRR(r1, r2);
2609 __ bind(&loop);
2610 __ LoadP(r0, MemOperand(r8, r7));
2611 __ StoreP(r0, MemOperand(sp, r7));
2612 __ AddP(r7, r7, Operand(kPointerSize));
2613 __ BranchOnCount(r1, &loop);
2614 __ bind(&skip);
2615 }
2616
2617 // Copy [[BoundArguments]] to the stack (below the arguments).
2618 {
2619 Label loop;
2620 __ AddP(r4, r4, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
2621 __ AddP(r4, r4, r9);
2622 __ LoadRR(r1, r6);
2623 __ bind(&loop);
2624 __ LoadP(r0, MemOperand(r4, -kPointerSize));
2625 __ lay(r4, MemOperand(r4, -kPointerSize));
2626 __ StoreP(r0, MemOperand(sp, r7));
2627 __ AddP(r7, r7, Operand(kPointerSize));
2628 __ BranchOnCount(r1, &loop);
2629 __ AddP(r2, r2, r6);
2630 }
2631 }
2632 __ bind(&no_bound_arguments);
2633 }
2634
2635 } // namespace
2636
2637 // static
Generate_CallBoundFunctionImpl(MacroAssembler * masm,TailCallMode tail_call_mode)2638 void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm,
2639 TailCallMode tail_call_mode) {
2640 // ----------- S t a t e -------------
2641 // -- r2 : the number of arguments (not including the receiver)
2642 // -- r3 : the function to call (checked to be a JSBoundFunction)
2643 // -----------------------------------
2644 __ AssertBoundFunction(r3);
2645
2646 if (tail_call_mode == TailCallMode::kAllow) {
2647 PrepareForTailCall(masm, r2, r5, r6, r7);
2648 }
2649
2650 // Patch the receiver to [[BoundThis]].
2651 __ LoadP(ip, FieldMemOperand(r3, JSBoundFunction::kBoundThisOffset));
2652 __ ShiftLeftP(r1, r2, Operand(kPointerSizeLog2));
2653 __ StoreP(ip, MemOperand(sp, r1));
2654
2655 // Push the [[BoundArguments]] onto the stack.
2656 Generate_PushBoundArguments(masm);
2657
2658 // Call the [[BoundTargetFunction]] via the Call builtin.
2659 __ LoadP(r3,
2660 FieldMemOperand(r3, JSBoundFunction::kBoundTargetFunctionOffset));
2661 __ mov(ip, Operand(ExternalReference(Builtins::kCall_ReceiverIsAny,
2662 masm->isolate())));
2663 __ LoadP(ip, MemOperand(ip));
2664 __ AddP(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
2665 __ JumpToJSEntry(ip);
2666 }
2667
2668 // static
Generate_Call(MacroAssembler * masm,ConvertReceiverMode mode,TailCallMode tail_call_mode)2669 void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode,
2670 TailCallMode tail_call_mode) {
2671 // ----------- S t a t e -------------
2672 // -- r2 : the number of arguments (not including the receiver)
2673 // -- r3 : the target to call (can be any Object).
2674 // -----------------------------------
2675
2676 Label non_callable, non_function, non_smi;
2677 __ JumpIfSmi(r3, &non_callable);
2678 __ bind(&non_smi);
2679 __ CompareObjectType(r3, r6, r7, JS_FUNCTION_TYPE);
2680 __ Jump(masm->isolate()->builtins()->CallFunction(mode, tail_call_mode),
2681 RelocInfo::CODE_TARGET, eq);
2682 __ CmpP(r7, Operand(JS_BOUND_FUNCTION_TYPE));
2683 __ Jump(masm->isolate()->builtins()->CallBoundFunction(tail_call_mode),
2684 RelocInfo::CODE_TARGET, eq);
2685
2686 // Check if target has a [[Call]] internal method.
2687 __ LoadlB(r6, FieldMemOperand(r6, Map::kBitFieldOffset));
2688 __ TestBit(r6, Map::kIsCallable);
2689 __ beq(&non_callable);
2690
2691 __ CmpP(r7, Operand(JS_PROXY_TYPE));
2692 __ bne(&non_function);
2693
2694 // 0. Prepare for tail call if necessary.
2695 if (tail_call_mode == TailCallMode::kAllow) {
2696 PrepareForTailCall(masm, r2, r5, r6, r7);
2697 }
2698
2699 // 1. Runtime fallback for Proxy [[Call]].
2700 __ Push(r3);
2701 // Increase the arguments size to include the pushed function and the
2702 // existing receiver on the stack.
2703 __ AddP(r2, r2, Operand(2));
2704 // Tail-call to the runtime.
2705 __ JumpToExternalReference(
2706 ExternalReference(Runtime::kJSProxyCall, masm->isolate()));
2707
2708 // 2. Call to something else, which might have a [[Call]] internal method (if
2709 // not we raise an exception).
2710 __ bind(&non_function);
2711 // Overwrite the original receiver the (original) target.
2712 __ ShiftLeftP(r7, r2, Operand(kPointerSizeLog2));
2713 __ StoreP(r3, MemOperand(sp, r7));
2714 // Let the "call_as_function_delegate" take care of the rest.
2715 __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, r3);
2716 __ Jump(masm->isolate()->builtins()->CallFunction(
2717 ConvertReceiverMode::kNotNullOrUndefined, tail_call_mode),
2718 RelocInfo::CODE_TARGET);
2719
2720 // 3. Call to something that is not callable.
2721 __ bind(&non_callable);
2722 {
2723 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2724 __ Push(r3);
2725 __ CallRuntime(Runtime::kThrowCalledNonCallable);
2726 }
2727 }
2728
2729 // static
Generate_ConstructFunction(MacroAssembler * masm)2730 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2731 // ----------- S t a t e -------------
2732 // -- r2 : the number of arguments (not including the receiver)
2733 // -- r3 : the constructor to call (checked to be a JSFunction)
2734 // -- r5 : the new target (checked to be a constructor)
2735 // -----------------------------------
2736 __ AssertFunction(r3);
2737
2738 // Calling convention for function specific ConstructStubs require
2739 // r4 to contain either an AllocationSite or undefined.
2740 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
2741
2742 // Tail call to the function-specific construct stub (still in the caller
2743 // context at this point).
2744 __ LoadP(r6, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
2745 __ LoadP(r6, FieldMemOperand(r6, SharedFunctionInfo::kConstructStubOffset));
2746 __ AddP(ip, r6, Operand(Code::kHeaderSize - kHeapObjectTag));
2747 __ JumpToJSEntry(ip);
2748 }
2749
2750 // static
Generate_ConstructBoundFunction(MacroAssembler * masm)2751 void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2752 // ----------- S t a t e -------------
2753 // -- r2 : the number of arguments (not including the receiver)
2754 // -- r3 : the function to call (checked to be a JSBoundFunction)
2755 // -- r5 : the new target (checked to be a constructor)
2756 // -----------------------------------
2757 __ AssertBoundFunction(r3);
2758
2759 // Push the [[BoundArguments]] onto the stack.
2760 Generate_PushBoundArguments(masm);
2761
2762 // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
2763 Label skip;
2764 __ CmpP(r3, r5);
2765 __ bne(&skip);
2766 __ LoadP(r5,
2767 FieldMemOperand(r3, JSBoundFunction::kBoundTargetFunctionOffset));
2768 __ bind(&skip);
2769
2770 // Construct the [[BoundTargetFunction]] via the Construct builtin.
2771 __ LoadP(r3,
2772 FieldMemOperand(r3, JSBoundFunction::kBoundTargetFunctionOffset));
2773 __ mov(ip, Operand(ExternalReference(Builtins::kConstruct, masm->isolate())));
2774 __ LoadP(ip, MemOperand(ip));
2775 __ AddP(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
2776 __ JumpToJSEntry(ip);
2777 }
2778
2779 // static
Generate_ConstructProxy(MacroAssembler * masm)2780 void Builtins::Generate_ConstructProxy(MacroAssembler* masm) {
2781 // ----------- S t a t e -------------
2782 // -- r2 : the number of arguments (not including the receiver)
2783 // -- r3 : the constructor to call (checked to be a JSProxy)
2784 // -- r5 : the new target (either the same as the constructor or
2785 // the JSFunction on which new was invoked initially)
2786 // -----------------------------------
2787
2788 // Call into the Runtime for Proxy [[Construct]].
2789 __ Push(r3, r5);
2790 // Include the pushed new_target, constructor and the receiver.
2791 __ AddP(r2, r2, Operand(3));
2792 // Tail-call to the runtime.
2793 __ JumpToExternalReference(
2794 ExternalReference(Runtime::kJSProxyConstruct, masm->isolate()));
2795 }
2796
2797 // static
Generate_Construct(MacroAssembler * masm)2798 void Builtins::Generate_Construct(MacroAssembler* masm) {
2799 // ----------- S t a t e -------------
2800 // -- r2 : the number of arguments (not including the receiver)
2801 // -- r3 : the constructor to call (can be any Object)
2802 // -- r5 : the new target (either the same as the constructor or
2803 // the JSFunction on which new was invoked initially)
2804 // -----------------------------------
2805
2806 // Check if target is a Smi.
2807 Label non_constructor;
2808 __ JumpIfSmi(r3, &non_constructor);
2809
2810 // Dispatch based on instance type.
2811 __ CompareObjectType(r3, r6, r7, JS_FUNCTION_TYPE);
2812 __ Jump(masm->isolate()->builtins()->ConstructFunction(),
2813 RelocInfo::CODE_TARGET, eq);
2814
2815 // Check if target has a [[Construct]] internal method.
2816 __ LoadlB(r4, FieldMemOperand(r6, Map::kBitFieldOffset));
2817 __ TestBit(r4, Map::kIsConstructor);
2818 __ beq(&non_constructor);
2819
2820 // Only dispatch to bound functions after checking whether they are
2821 // constructors.
2822 __ CmpP(r7, Operand(JS_BOUND_FUNCTION_TYPE));
2823 __ Jump(masm->isolate()->builtins()->ConstructBoundFunction(),
2824 RelocInfo::CODE_TARGET, eq);
2825
2826 // Only dispatch to proxies after checking whether they are constructors.
2827 __ CmpP(r7, Operand(JS_PROXY_TYPE));
2828 __ Jump(masm->isolate()->builtins()->ConstructProxy(), RelocInfo::CODE_TARGET,
2829 eq);
2830
2831 // Called Construct on an exotic Object with a [[Construct]] internal method.
2832 {
2833 // Overwrite the original receiver with the (original) target.
2834 __ ShiftLeftP(r7, r2, Operand(kPointerSizeLog2));
2835 __ StoreP(r3, MemOperand(sp, r7));
2836 // Let the "call_as_constructor_delegate" take care of the rest.
2837 __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, r3);
2838 __ Jump(masm->isolate()->builtins()->CallFunction(),
2839 RelocInfo::CODE_TARGET);
2840 }
2841
2842 // Called Construct on an Object that doesn't have a [[Construct]] internal
2843 // method.
2844 __ bind(&non_constructor);
2845 __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(),
2846 RelocInfo::CODE_TARGET);
2847 }
2848
2849 // static
Generate_AllocateInNewSpace(MacroAssembler * masm)2850 void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) {
2851 // ----------- S t a t e -------------
2852 // -- r3 : requested object size (untagged)
2853 // -- lr : return address
2854 // -----------------------------------
2855 __ SmiTag(r3);
2856 __ Push(r3);
2857 __ LoadSmiLiteral(cp, Smi::kZero);
2858 __ TailCallRuntime(Runtime::kAllocateInNewSpace);
2859 }
2860
2861 // static
Generate_AllocateInOldSpace(MacroAssembler * masm)2862 void Builtins::Generate_AllocateInOldSpace(MacroAssembler* masm) {
2863 // ----------- S t a t e -------------
2864 // -- r3 : requested object size (untagged)
2865 // -- lr : return address
2866 // -----------------------------------
2867 __ SmiTag(r3);
2868 __ LoadSmiLiteral(r4, Smi::FromInt(AllocateTargetSpace::encode(OLD_SPACE)));
2869 __ Push(r3, r4);
2870 __ LoadSmiLiteral(cp, Smi::kZero);
2871 __ TailCallRuntime(Runtime::kAllocateInTargetSpace);
2872 }
2873
2874 // static
Generate_Abort(MacroAssembler * masm)2875 void Builtins::Generate_Abort(MacroAssembler* masm) {
2876 // ----------- S t a t e -------------
2877 // -- r3 : message_id as Smi
2878 // -- lr : return address
2879 // -----------------------------------
2880 __ push(r3);
2881 __ LoadSmiLiteral(cp, Smi::kZero);
2882 __ TailCallRuntime(Runtime::kAbort);
2883 }
2884
Generate_ArgumentsAdaptorTrampoline(MacroAssembler * masm)2885 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
2886 // ----------- S t a t e -------------
2887 // -- r2 : actual number of arguments
2888 // -- r3 : function (passed through to callee)
2889 // -- r4 : expected number of arguments
2890 // -- r5 : new target (passed through to callee)
2891 // -----------------------------------
2892
2893 Label invoke, dont_adapt_arguments, stack_overflow;
2894
2895 Label enough, too_few;
2896 __ LoadP(ip, FieldMemOperand(r3, JSFunction::kCodeEntryOffset));
2897 __ CmpP(r2, r4);
2898 __ blt(&too_few);
2899 __ CmpP(r4, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
2900 __ beq(&dont_adapt_arguments);
2901
2902 { // Enough parameters: actual >= expected
2903 __ bind(&enough);
2904 EnterArgumentsAdaptorFrame(masm);
2905 Generate_StackOverflowCheck(masm, r4, r7, &stack_overflow);
2906
2907 // Calculate copy start address into r2 and copy end address into r6.
2908 // r2: actual number of arguments as a smi
2909 // r3: function
2910 // r4: expected number of arguments
2911 // r5: new target (passed through to callee)
2912 // ip: code entry to call
2913 __ SmiToPtrArrayOffset(r2, r2);
2914 __ AddP(r2, fp);
2915 // adjust for return address and receiver
2916 __ AddP(r2, r2, Operand(2 * kPointerSize));
2917 __ ShiftLeftP(r6, r4, Operand(kPointerSizeLog2));
2918 __ SubP(r6, r2, r6);
2919
2920 // Copy the arguments (including the receiver) to the new stack frame.
2921 // r2: copy start address
2922 // r3: function
2923 // r4: expected number of arguments
2924 // r5: new target (passed through to callee)
2925 // r6: copy end address
2926 // ip: code entry to call
2927
2928 Label copy;
2929 __ bind(©);
2930 __ LoadP(r0, MemOperand(r2, 0));
2931 __ push(r0);
2932 __ CmpP(r2, r6); // Compare before moving to next argument.
2933 __ lay(r2, MemOperand(r2, -kPointerSize));
2934 __ bne(©);
2935
2936 __ b(&invoke);
2937 }
2938
2939 { // Too few parameters: Actual < expected
2940 __ bind(&too_few);
2941
2942 EnterArgumentsAdaptorFrame(masm);
2943 Generate_StackOverflowCheck(masm, r4, r7, &stack_overflow);
2944
2945 // Calculate copy start address into r0 and copy end address is fp.
2946 // r2: actual number of arguments as a smi
2947 // r3: function
2948 // r4: expected number of arguments
2949 // r5: new target (passed through to callee)
2950 // ip: code entry to call
2951 __ SmiToPtrArrayOffset(r2, r2);
2952 __ lay(r2, MemOperand(r2, fp));
2953
2954 // Copy the arguments (including the receiver) to the new stack frame.
2955 // r2: copy start address
2956 // r3: function
2957 // r4: expected number of arguments
2958 // r5: new target (passed through to callee)
2959 // ip: code entry to call
2960 Label copy;
2961 __ bind(©);
2962 // Adjust load for return address and receiver.
2963 __ LoadP(r0, MemOperand(r2, 2 * kPointerSize));
2964 __ push(r0);
2965 __ CmpP(r2, fp); // Compare before moving to next argument.
2966 __ lay(r2, MemOperand(r2, -kPointerSize));
2967 __ bne(©);
2968
2969 // Fill the remaining expected arguments with undefined.
2970 // r3: function
2971 // r4: expected number of argumentus
2972 // ip: code entry to call
2973 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
2974 __ ShiftLeftP(r6, r4, Operand(kPointerSizeLog2));
2975 __ SubP(r6, fp, r6);
2976 // Adjust for frame.
2977 __ SubP(r6, r6, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
2978 2 * kPointerSize));
2979
2980 Label fill;
2981 __ bind(&fill);
2982 __ push(r0);
2983 __ CmpP(sp, r6);
2984 __ bne(&fill);
2985 }
2986
2987 // Call the entry point.
2988 __ bind(&invoke);
2989 __ LoadRR(r2, r4);
2990 // r2 : expected number of arguments
2991 // r3 : function (passed through to callee)
2992 // r5 : new target (passed through to callee)
2993 __ CallJSEntry(ip);
2994
2995 // Store offset of return address for deoptimizer.
2996 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
2997
2998 // Exit frame and return.
2999 LeaveArgumentsAdaptorFrame(masm);
3000 __ Ret();
3001
3002 // -------------------------------------------
3003 // Dont adapt arguments.
3004 // -------------------------------------------
3005 __ bind(&dont_adapt_arguments);
3006 __ JumpToJSEntry(ip);
3007
3008 __ bind(&stack_overflow);
3009 {
3010 FrameScope frame(masm, StackFrame::MANUAL);
3011 __ CallRuntime(Runtime::kThrowStackOverflow);
3012 __ bkpt(0);
3013 }
3014 }
3015
3016 #undef __
3017
3018 } // namespace internal
3019 } // namespace v8
3020
3021 #endif // V8_TARGET_ARCH_S390
3022