1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #if V8_TARGET_ARCH_S390
6
7 #include "src/assembler-inl.h"
8 #include "src/code-factory.h"
9 #include "src/code-stubs.h"
10 #include "src/debug/debug.h"
11 #include "src/deoptimizer.h"
12 #include "src/frame-constants.h"
13 #include "src/frames.h"
14 #include "src/objects/js-generator.h"
15 #include "src/runtime/runtime.h"
16 #include "src/wasm/wasm-objects.h"
17
18 namespace v8 {
19 namespace internal {
20
21 #define __ ACCESS_MASM(masm)
22
Generate_Adaptor(MacroAssembler * masm,Address address,ExitFrameType exit_frame_type)23 void Builtins::Generate_Adaptor(MacroAssembler* masm, Address address,
24 ExitFrameType exit_frame_type) {
25 __ Move(kJavaScriptCallExtraArg1Register, ExternalReference::Create(address));
26 if (exit_frame_type == BUILTIN_EXIT) {
27 __ Jump(BUILTIN_CODE(masm->isolate(), AdaptorWithBuiltinExitFrame),
28 RelocInfo::CODE_TARGET);
29 } else {
30 DCHECK(exit_frame_type == EXIT);
31 __ Jump(BUILTIN_CODE(masm->isolate(), AdaptorWithExitFrame),
32 RelocInfo::CODE_TARGET);
33 }
34 }
35
Generate_InternalArrayConstructor(MacroAssembler * masm)36 void Builtins::Generate_InternalArrayConstructor(MacroAssembler* masm) {
37 // ----------- S t a t e -------------
38 // -- r2 : number of arguments
39 // -- lr : return address
40 // -- sp[...]: constructor arguments
41 // -----------------------------------
42 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
43
44 if (FLAG_debug_code) {
45 // Initial map for the builtin InternalArray functions should be maps.
46 __ LoadP(r4, FieldMemOperand(r3, JSFunction::kPrototypeOrInitialMapOffset));
47 __ TestIfSmi(r4);
48 __ Assert(ne, AbortReason::kUnexpectedInitialMapForInternalArrayFunction,
49 cr0);
50 __ CompareObjectType(r4, r5, r6, MAP_TYPE);
51 __ Assert(eq, AbortReason::kUnexpectedInitialMapForInternalArrayFunction);
52 }
53
54 // Run the native code for the InternalArray function called as a normal
55 // function.
56 // tail call a stub
57 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
58 __ Jump(BUILTIN_CODE(masm->isolate(), InternalArrayConstructorImpl),
59 RelocInfo::CODE_TARGET);
60 }
61
GenerateTailCallToReturnedCode(MacroAssembler * masm,Runtime::FunctionId function_id)62 static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
63 Runtime::FunctionId function_id) {
64 // ----------- S t a t e -------------
65 // -- r2 : argument count (preserved for callee)
66 // -- r3 : target function (preserved for callee)
67 // -- r5 : new target (preserved for callee)
68 // -----------------------------------
69 {
70 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
71 // Push the number of arguments to the callee.
72 // Push a copy of the target function and the new target.
73 // Push function as parameter to the runtime call.
74 __ SmiTag(r2);
75 __ Push(r2, r3, r5, r3);
76
77 __ CallRuntime(function_id, 1);
78 __ LoadRR(r4, r2);
79
80 // Restore target function and new target.
81 __ Pop(r2, r3, r5);
82 __ SmiUntag(r2);
83 }
84 static_assert(kJavaScriptCallCodeStartRegister == r4, "ABI mismatch");
85 __ AddP(r4, r4, Operand(Code::kHeaderSize - kHeapObjectTag));
86 __ JumpToJSEntry(r4);
87 }
88
89 namespace {
90
Generate_JSBuiltinsConstructStubHelper(MacroAssembler * masm)91 void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) {
92 Label post_instantiation_deopt_entry;
93 // ----------- S t a t e -------------
94 // -- r2 : number of arguments
95 // -- r3 : constructor function
96 // -- r5 : new target
97 // -- cp : context
98 // -- lr : return address
99 // -- sp[...]: constructor arguments
100 // -----------------------------------
101
102 // Enter a construct frame.
103 {
104 FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT);
105
106 // Preserve the incoming parameters on the stack.
107 __ SmiTag(r2);
108 __ Push(cp, r2);
109 __ SmiUntag(r2);
110 // The receiver for the builtin/api call.
111 __ PushRoot(Heap::kTheHoleValueRootIndex);
112 // Set up pointer to last argument.
113 __ la(r6, MemOperand(fp, StandardFrameConstants::kCallerSPOffset));
114
115 // Copy arguments and receiver to the expression stack.
116 // r2: number of arguments
117 // r3: constructor function
118 // r4: address of last argument (caller sp)
119 // r5: new target
120 // cr0: condition indicating whether r2 is zero
121 // sp[0]: receiver
122 // sp[1]: receiver
123 // sp[2]: number of arguments (smi-tagged)
124 Label loop, no_args;
125 __ beq(&no_args);
126 __ ShiftLeftP(ip, r2, Operand(kPointerSizeLog2));
127 __ SubP(sp, sp, ip);
128 __ LoadRR(r1, r2);
129 __ bind(&loop);
130 __ lay(ip, MemOperand(ip, -kPointerSize));
131 __ LoadP(r0, MemOperand(ip, r6));
132 __ StoreP(r0, MemOperand(ip, sp));
133 __ BranchOnCount(r1, &loop);
134 __ bind(&no_args);
135
136 // Call the function.
137 // r2: number of arguments
138 // r3: constructor function
139 // r5: new target
140
141 ParameterCount actual(r2);
142 __ InvokeFunction(r3, r5, actual, CALL_FUNCTION);
143
144 // Restore context from the frame.
145 __ LoadP(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
146 // Restore smi-tagged arguments count from the frame.
147 __ LoadP(r3, MemOperand(fp, ConstructFrameConstants::kLengthOffset));
148
149 // Leave construct frame.
150 }
151 // Remove caller arguments from the stack and return.
152 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
153
154 __ SmiToPtrArrayOffset(r3, r3);
155 __ AddP(sp, sp, r3);
156 __ AddP(sp, sp, Operand(kPointerSize));
157 __ Ret();
158 }
159
160 } // namespace
161
162 // The construct stub for ES5 constructor functions and ES6 class constructors.
Generate_JSConstructStubGeneric(MacroAssembler * masm)163 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
164 // ----------- S t a t e -------------
165 // -- r2: number of arguments (untagged)
166 // -- r3: constructor function
167 // -- r5: new target
168 // -- cp: context
169 // -- lr: return address
170 // -- sp[...]: constructor arguments
171 // -----------------------------------
172
173 // Enter a construct frame.
174 {
175 FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT);
176 Label post_instantiation_deopt_entry, not_create_implicit_receiver;
177
178 // Preserve the incoming parameters on the stack.
179 __ SmiTag(r2);
180 __ Push(cp, r2, r3);
181 __ PushRoot(Heap::kUndefinedValueRootIndex);
182 __ Push(r5);
183
184 // ----------- S t a t e -------------
185 // -- sp[0*kPointerSize]: new target
186 // -- sp[1*kPointerSize]: padding
187 // -- r3 and sp[2*kPointerSize]: constructor function
188 // -- sp[3*kPointerSize]: number of arguments (tagged)
189 // -- sp[4*kPointerSize]: context
190 // -----------------------------------
191
192 __ LoadP(r6, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
193 __ LoadlW(r6, FieldMemOperand(r6, SharedFunctionInfo::kFlagsOffset));
194 __ TestBitMask(r6, SharedFunctionInfo::IsDerivedConstructorBit::kMask, r0);
195 __ bne(¬_create_implicit_receiver);
196
197 // If not derived class constructor: Allocate the new receiver object.
198 __ IncrementCounter(masm->isolate()->counters()->constructed_objects(), 1,
199 r6, r7);
200 __ Call(BUILTIN_CODE(masm->isolate(), FastNewObject),
201 RelocInfo::CODE_TARGET);
202 __ b(&post_instantiation_deopt_entry);
203
204 // Else: use TheHoleValue as receiver for constructor call
205 __ bind(¬_create_implicit_receiver);
206 __ LoadRoot(r2, Heap::kTheHoleValueRootIndex);
207
208 // ----------- S t a t e -------------
209 // -- r2: receiver
210 // -- Slot 4 / sp[0*kPointerSize]: new target
211 // -- Slot 3 / sp[1*kPointerSize]: padding
212 // -- Slot 2 / sp[2*kPointerSize]: constructor function
213 // -- Slot 1 / sp[3*kPointerSize]: number of arguments (tagged)
214 // -- Slot 0 / sp[4*kPointerSize]: context
215 // -----------------------------------
216 // Deoptimizer enters here.
217 masm->isolate()->heap()->SetConstructStubCreateDeoptPCOffset(
218 masm->pc_offset());
219 __ bind(&post_instantiation_deopt_entry);
220
221 // Restore new target.
222 __ Pop(r5);
223 // Push the allocated receiver to the stack. We need two copies
224 // because we may have to return the original one and the calling
225 // conventions dictate that the called function pops the receiver.
226 __ Push(r2, r2);
227
228 // ----------- S t a t e -------------
229 // -- r5: new target
230 // -- sp[0*kPointerSize]: implicit receiver
231 // -- sp[1*kPointerSize]: implicit receiver
232 // -- sp[2*kPointerSize]: padding
233 // -- sp[3*kPointerSize]: constructor function
234 // -- sp[4*kPointerSize]: number of arguments (tagged)
235 // -- sp[5*kPointerSize]: context
236 // -----------------------------------
237
238 // Restore constructor function and argument count.
239 __ LoadP(r3, MemOperand(fp, ConstructFrameConstants::kConstructorOffset));
240 __ LoadP(r2, MemOperand(fp, ConstructFrameConstants::kLengthOffset));
241 __ SmiUntag(r2);
242
243 // Set up pointer to last argument.
244 __ la(r6, MemOperand(fp, StandardFrameConstants::kCallerSPOffset));
245
246 // Copy arguments and receiver to the expression stack.
247 Label loop, no_args;
248 // ----------- S t a t e -------------
249 // -- r2: number of arguments (untagged)
250 // -- r5: new target
251 // -- r6: pointer to last argument
252 // -- cr0: condition indicating whether r2 is zero
253 // -- sp[0*kPointerSize]: implicit receiver
254 // -- sp[1*kPointerSize]: implicit receiver
255 // -- sp[2*kPointerSize]: padding
256 // -- r3 and sp[3*kPointerSize]: constructor function
257 // -- sp[4*kPointerSize]: number of arguments (tagged)
258 // -- sp[5*kPointerSize]: context
259 // -----------------------------------
260
261 __ beq(&no_args);
262 __ ShiftLeftP(ip, r2, Operand(kPointerSizeLog2));
263 __ SubP(sp, sp, ip);
264 __ LoadRR(r1, r2);
265 __ bind(&loop);
266 __ lay(ip, MemOperand(ip, -kPointerSize));
267 __ LoadP(r0, MemOperand(ip, r6));
268 __ StoreP(r0, MemOperand(ip, sp));
269 __ BranchOnCount(r1, &loop);
270 __ bind(&no_args);
271
272 // Call the function.
273 ParameterCount actual(r2);
274 __ InvokeFunction(r3, r5, actual, CALL_FUNCTION);
275
276 // ----------- S t a t e -------------
277 // -- r0: constructor result
278 // -- sp[0*kPointerSize]: implicit receiver
279 // -- sp[1*kPointerSize]: padding
280 // -- sp[2*kPointerSize]: constructor function
281 // -- sp[3*kPointerSize]: number of arguments
282 // -- sp[4*kPointerSize]: context
283 // -----------------------------------
284
285 // Store offset of return address for deoptimizer.
286 masm->isolate()->heap()->SetConstructStubInvokeDeoptPCOffset(
287 masm->pc_offset());
288
289 // Restore the context from the frame.
290 __ LoadP(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
291
292 // If the result is an object (in the ECMA sense), we should get rid
293 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
294 // on page 74.
295 Label use_receiver, do_throw, leave_frame;
296
297 // If the result is undefined, we jump out to using the implicit receiver.
298 __ JumpIfRoot(r2, Heap::kUndefinedValueRootIndex, &use_receiver);
299
300 // Otherwise we do a smi check and fall through to check if the return value
301 // is a valid receiver.
302
303 // If the result is a smi, it is *not* an object in the ECMA sense.
304 __ JumpIfSmi(r2, &use_receiver);
305
306 // If the type of the result (stored in its map) is less than
307 // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
308 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
309 __ CompareObjectType(r2, r6, r6, FIRST_JS_RECEIVER_TYPE);
310 __ bge(&leave_frame);
311 __ b(&use_receiver);
312
313 __ bind(&do_throw);
314 __ CallRuntime(Runtime::kThrowConstructorReturnedNonObject);
315
316 // Throw away the result of the constructor invocation and use the
317 // on-stack receiver as the result.
318 __ bind(&use_receiver);
319 __ LoadP(r2, MemOperand(sp));
320 __ JumpIfRoot(r2, Heap::kTheHoleValueRootIndex, &do_throw);
321
322 __ bind(&leave_frame);
323 // Restore smi-tagged arguments count from the frame.
324 __ LoadP(r3, MemOperand(fp, ConstructFrameConstants::kLengthOffset));
325 // Leave construct frame.
326 }
327
328 // Remove caller arguments from the stack and return.
329 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
330
331 __ SmiToPtrArrayOffset(r3, r3);
332 __ AddP(sp, sp, r3);
333 __ AddP(sp, sp, Operand(kPointerSize));
334 __ Ret();
335 }
336
Generate_JSBuiltinsConstructStub(MacroAssembler * masm)337 void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
338 Generate_JSBuiltinsConstructStubHelper(masm);
339 }
340
GetSharedFunctionInfoBytecode(MacroAssembler * masm,Register sfi_data,Register scratch1)341 static void GetSharedFunctionInfoBytecode(MacroAssembler* masm,
342 Register sfi_data,
343 Register scratch1) {
344 Label done;
345
346 __ CompareObjectType(sfi_data, scratch1, scratch1, INTERPRETER_DATA_TYPE);
347 __ bne(&done, Label::kNear);
348 __ LoadP(sfi_data,
349 FieldMemOperand(sfi_data, InterpreterData::kBytecodeArrayOffset));
350 __ bind(&done);
351 }
352
353 // static
Generate_ResumeGeneratorTrampoline(MacroAssembler * masm)354 void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
355 // ----------- S t a t e -------------
356 // -- r2 : the value to pass to the generator
357 // -- r3 : the JSGeneratorObject to resume
358 // -- lr : return address
359 // -----------------------------------
360 __ AssertGeneratorObject(r3);
361
362 // Store input value into generator object.
363 __ StoreP(r2, FieldMemOperand(r3, JSGeneratorObject::kInputOrDebugPosOffset),
364 r0);
365 __ RecordWriteField(r3, JSGeneratorObject::kInputOrDebugPosOffset, r2, r5,
366 kLRHasNotBeenSaved, kDontSaveFPRegs);
367
368 // Load suspended function and context.
369 __ LoadP(r6, FieldMemOperand(r3, JSGeneratorObject::kFunctionOffset));
370 __ LoadP(cp, FieldMemOperand(r6, JSFunction::kContextOffset));
371
372 // Flood function if we are stepping.
373 Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
374 Label stepping_prepared;
375 ExternalReference debug_hook =
376 ExternalReference::debug_hook_on_function_call_address(masm->isolate());
377 __ Move(ip, debug_hook);
378 __ LoadB(ip, MemOperand(ip));
379 __ CmpSmiLiteral(ip, Smi::kZero, r0);
380 __ bne(&prepare_step_in_if_stepping);
381
382 // Flood function if we need to continue stepping in the suspended generator.
383
384 ExternalReference debug_suspended_generator =
385 ExternalReference::debug_suspended_generator_address(masm->isolate());
386
387 __ Move(ip, debug_suspended_generator);
388 __ LoadP(ip, MemOperand(ip));
389 __ CmpP(ip, r3);
390 __ beq(&prepare_step_in_suspended_generator);
391 __ bind(&stepping_prepared);
392
393 // Check the stack for overflow. We are not trying to catch interruptions
394 // (i.e. debug break and preemption) here, so check the "real stack limit".
395 Label stack_overflow;
396 __ CompareRoot(sp, Heap::kRealStackLimitRootIndex);
397 __ blt(&stack_overflow);
398
399 // Push receiver.
400 __ LoadP(ip, FieldMemOperand(r3, JSGeneratorObject::kReceiverOffset));
401 __ Push(ip);
402
403 // ----------- S t a t e -------------
404 // -- r3 : the JSGeneratorObject to resume
405 // -- r6 : generator function
406 // -- cp : generator context
407 // -- lr : return address
408 // -- sp[0] : generator receiver
409 // -----------------------------------
410
411 // Copy the function arguments from the generator object's register file.
412 __ LoadP(r5, FieldMemOperand(r6, JSFunction::kSharedFunctionInfoOffset));
413 __ LoadLogicalHalfWordP(
414 r5, FieldMemOperand(r5, SharedFunctionInfo::kFormalParameterCountOffset));
415 __ LoadP(r4, FieldMemOperand(
416 r3, JSGeneratorObject::kParametersAndRegistersOffset));
417 {
418 Label loop, done_loop;
419 __ ShiftLeftP(r5, r5, Operand(kPointerSizeLog2));
420 __ SubP(sp, r5);
421
422 // ip = stack offset
423 // r5 = parameter array offset
424 __ LoadImmP(ip, Operand::Zero());
425 __ SubP(r5, Operand(kPointerSize));
426 __ blt(&done_loop);
427
428 __ lgfi(r1, Operand(-kPointerSize));
429
430 __ bind(&loop);
431
432 // parameter copy loop
433 __ LoadP(r0, FieldMemOperand(r4, r5, FixedArray::kHeaderSize));
434 __ StoreP(r0, MemOperand(sp, ip));
435
436 // update offsets
437 __ lay(ip, MemOperand(ip, kPointerSize));
438
439 __ BranchRelativeOnIdxHighP(r5, r1, &loop);
440
441 __ bind(&done_loop);
442 }
443
444 // Underlying function needs to have bytecode available.
445 if (FLAG_debug_code) {
446 __ LoadP(r5, FieldMemOperand(r6, JSFunction::kSharedFunctionInfoOffset));
447 __ LoadP(r5, FieldMemOperand(r5, SharedFunctionInfo::kFunctionDataOffset));
448 GetSharedFunctionInfoBytecode(masm, r5, ip);
449 __ CompareObjectType(r5, r5, r5, BYTECODE_ARRAY_TYPE);
450 __ Assert(eq, AbortReason::kMissingBytecodeArray);
451 }
452
453 // Resume (Ignition/TurboFan) generator object.
454 {
455 // We abuse new.target both to indicate that this is a resume call and to
456 // pass in the generator object. In ordinary calls, new.target is always
457 // undefined because generator functions are non-constructable.
458 __ LoadRR(r5, r3);
459 __ LoadRR(r3, r6);
460 static_assert(kJavaScriptCallCodeStartRegister == r4, "ABI mismatch");
461 __ LoadP(r4, FieldMemOperand(r3, JSFunction::kCodeOffset));
462 __ AddP(r4, r4, Operand(Code::kHeaderSize - kHeapObjectTag));
463 __ JumpToJSEntry(r4);
464 }
465
466 __ bind(&prepare_step_in_if_stepping);
467 {
468 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
469 __ Push(r3, r6);
470 // Push hole as receiver since we do not use it for stepping.
471 __ PushRoot(Heap::kTheHoleValueRootIndex);
472 __ CallRuntime(Runtime::kDebugOnFunctionCall);
473 __ Pop(r3);
474 __ LoadP(r6, FieldMemOperand(r3, JSGeneratorObject::kFunctionOffset));
475 }
476 __ b(&stepping_prepared);
477
478 __ bind(&prepare_step_in_suspended_generator);
479 {
480 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
481 __ Push(r3);
482 __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
483 __ Pop(r3);
484 __ LoadP(r6, FieldMemOperand(r3, JSGeneratorObject::kFunctionOffset));
485 }
486 __ b(&stepping_prepared);
487
488 __ bind(&stack_overflow);
489 {
490 FrameScope scope(masm, StackFrame::INTERNAL);
491 __ CallRuntime(Runtime::kThrowStackOverflow);
492 __ bkpt(0); // This should be unreachable.
493 }
494 }
495
Generate_ConstructedNonConstructable(MacroAssembler * masm)496 void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
497 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
498 __ push(r3);
499 __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
500 }
501
502 // Clobbers r4; preserves all other registers.
Generate_CheckStackOverflow(MacroAssembler * masm,Register argc)503 static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc) {
504 // Check the stack for overflow. We are not trying to catch
505 // interruptions (e.g. debug break and preemption) here, so the "real stack
506 // limit" is checked.
507 Label okay;
508 __ LoadRoot(r4, Heap::kRealStackLimitRootIndex);
509 // Make r4 the space we have left. The stack might already be overflowed
510 // here which will cause r4 to become negative.
511 __ SubP(r4, sp, r4);
512 // Check if the arguments will overflow the stack.
513 __ ShiftLeftP(r0, argc, Operand(kPointerSizeLog2));
514 __ CmpP(r4, r0);
515 __ bgt(&okay); // Signed comparison.
516
517 // Out of stack space.
518 __ CallRuntime(Runtime::kThrowStackOverflow);
519
520 __ bind(&okay);
521 }
522
Generate_JSEntryTrampolineHelper(MacroAssembler * masm,bool is_construct)523 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
524 bool is_construct) {
525 // Called from Generate_JS_Entry
526 // r2: new.target
527 // r3: function
528 // r4: receiver
529 // r5: argc
530 // r6: argv
531 // r0,r7-r9, cp may be clobbered
532 ProfileEntryHookStub::MaybeCallEntryHook(masm);
533
534 // Enter an internal frame.
535 {
536 // FrameScope ends up calling MacroAssembler::EnterFrame here
537 FrameScope scope(masm, StackFrame::INTERNAL);
538
539 // Setup the context (we need to use the caller context from the isolate).
540 ExternalReference context_address = ExternalReference::Create(
541 IsolateAddressId::kContextAddress, masm->isolate());
542 __ Move(cp, context_address);
543 __ LoadP(cp, MemOperand(cp));
544
545 // Push the function and the receiver onto the stack.
546 __ Push(r3, r4);
547
548 // Check if we have enough stack space to push all arguments.
549 // Clobbers r4.
550 Generate_CheckStackOverflow(masm, r5);
551
552 // Copy arguments to the stack in a loop from argv to sp.
553 // The arguments are actually placed in reverse order on sp
554 // compared to argv (i.e. arg1 is highest memory in sp).
555 // r3: function
556 // r5: argc
557 // r6: argv, i.e. points to first arg
558 // r7: scratch reg to hold scaled argc
559 // r8: scratch reg to hold arg handle
560 // r9: scratch reg to hold index into argv
561 Label argLoop, argExit;
562 intptr_t zero = 0;
563 __ ShiftLeftP(r7, r5, Operand(kPointerSizeLog2));
564 __ SubRR(sp, r7); // Buy the stack frame to fit args
565 __ LoadImmP(r9, Operand(zero)); // Initialize argv index
566 __ bind(&argLoop);
567 __ CmpPH(r7, Operand(zero));
568 __ beq(&argExit, Label::kNear);
569 __ lay(r7, MemOperand(r7, -kPointerSize));
570 __ LoadP(r8, MemOperand(r9, r6)); // read next parameter
571 __ la(r9, MemOperand(r9, kPointerSize)); // r9++;
572 __ LoadP(r0, MemOperand(r8)); // dereference handle
573 __ StoreP(r0, MemOperand(r7, sp)); // push parameter
574 __ b(&argLoop);
575 __ bind(&argExit);
576
577 // Setup new.target and argc.
578 __ LoadRR(r6, r2);
579 __ LoadRR(r2, r5);
580 __ LoadRR(r5, r6);
581
582 // Initialize all JavaScript callee-saved registers, since they will be seen
583 // by the garbage collector as part of handlers.
584 __ LoadRoot(r6, Heap::kUndefinedValueRootIndex);
585 __ LoadRR(r7, r6);
586 __ LoadRR(r8, r6);
587 __ LoadRR(r9, r6);
588
589 // Invoke the code.
590 Handle<Code> builtin = is_construct
591 ? BUILTIN_CODE(masm->isolate(), Construct)
592 : masm->isolate()->builtins()->Call();
593 __ Call(builtin, RelocInfo::CODE_TARGET);
594
595 // Exit the JS frame and remove the parameters (except function), and
596 // return.
597 }
598 __ b(r14);
599
600 // r2: result
601 }
602
Generate_JSEntryTrampoline(MacroAssembler * masm)603 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
604 Generate_JSEntryTrampolineHelper(masm, false);
605 }
606
Generate_JSConstructEntryTrampoline(MacroAssembler * masm)607 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
608 Generate_JSEntryTrampolineHelper(masm, true);
609 }
610
ReplaceClosureCodeWithOptimizedCode(MacroAssembler * masm,Register optimized_code,Register closure,Register scratch1,Register scratch2,Register scratch3)611 static void ReplaceClosureCodeWithOptimizedCode(
612 MacroAssembler* masm, Register optimized_code, Register closure,
613 Register scratch1, Register scratch2, Register scratch3) {
614 // Store code entry in the closure.
615 __ StoreP(optimized_code, FieldMemOperand(closure, JSFunction::kCodeOffset),
616 r0);
617 __ LoadRR(scratch1,
618 optimized_code); // Write barrier clobbers scratch1 below.
619 __ RecordWriteField(closure, JSFunction::kCodeOffset, scratch1, scratch2,
620 kLRHasNotBeenSaved, kDontSaveFPRegs, OMIT_REMEMBERED_SET,
621 OMIT_SMI_CHECK);
622 }
623
LeaveInterpreterFrame(MacroAssembler * masm,Register scratch)624 static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch) {
625 Register args_count = scratch;
626
627 // Get the arguments + receiver count.
628 __ LoadP(args_count,
629 MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
630 __ LoadlW(args_count,
631 FieldMemOperand(args_count, BytecodeArray::kParameterSizeOffset));
632
633 // Leave the frame (also dropping the register file).
634 __ LeaveFrame(StackFrame::INTERPRETED);
635
636 __ AddP(sp, sp, args_count);
637 }
638
639 // Tail-call |function_id| if |smi_entry| == |marker|
TailCallRuntimeIfMarkerEquals(MacroAssembler * masm,Register smi_entry,OptimizationMarker marker,Runtime::FunctionId function_id)640 static void TailCallRuntimeIfMarkerEquals(MacroAssembler* masm,
641 Register smi_entry,
642 OptimizationMarker marker,
643 Runtime::FunctionId function_id) {
644 Label no_match;
645 __ CmpSmiLiteral(smi_entry, Smi::FromEnum(marker), r0);
646 __ bne(&no_match);
647 GenerateTailCallToReturnedCode(masm, function_id);
648 __ bind(&no_match);
649 }
650
MaybeTailCallOptimizedCodeSlot(MacroAssembler * masm,Register feedback_vector,Register scratch1,Register scratch2,Register scratch3)651 static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
652 Register feedback_vector,
653 Register scratch1, Register scratch2,
654 Register scratch3) {
655 // ----------- S t a t e -------------
656 // -- r0 : argument count (preserved for callee if needed, and caller)
657 // -- r3 : new target (preserved for callee if needed, and caller)
658 // -- r1 : target function (preserved for callee if needed, and caller)
659 // -- feedback vector (preserved for caller if needed)
660 // -----------------------------------
661 DCHECK(
662 !AreAliased(feedback_vector, r2, r3, r5, scratch1, scratch2, scratch3));
663
664 Label optimized_code_slot_is_weak_ref, fallthrough;
665
666 Register closure = r3;
667 Register optimized_code_entry = scratch1;
668
669 __ LoadP(
670 optimized_code_entry,
671 FieldMemOperand(feedback_vector, FeedbackVector::kOptimizedCodeOffset));
672
673 // Check if the code entry is a Smi. If yes, we interpret it as an
674 // optimisation marker. Otherwise, interpret it as a weak reference to a code
675 // object.
676 __ JumpIfNotSmi(optimized_code_entry, &optimized_code_slot_is_weak_ref);
677
678 {
679 // Optimized code slot is a Smi optimization marker.
680
681 // Fall through if no optimization trigger.
682 __ CmpSmiLiteral(optimized_code_entry,
683 Smi::FromEnum(OptimizationMarker::kNone), r0);
684 __ beq(&fallthrough);
685
686 TailCallRuntimeIfMarkerEquals(masm, optimized_code_entry,
687 OptimizationMarker::kLogFirstExecution,
688 Runtime::kFunctionFirstExecution);
689 TailCallRuntimeIfMarkerEquals(masm, optimized_code_entry,
690 OptimizationMarker::kCompileOptimized,
691 Runtime::kCompileOptimized_NotConcurrent);
692 TailCallRuntimeIfMarkerEquals(
693 masm, optimized_code_entry,
694 OptimizationMarker::kCompileOptimizedConcurrent,
695 Runtime::kCompileOptimized_Concurrent);
696
697 {
698 // Otherwise, the marker is InOptimizationQueue, so fall through hoping
699 // that an interrupt will eventually update the slot with optimized code.
700 if (FLAG_debug_code) {
701 __ CmpSmiLiteral(
702 optimized_code_entry,
703 Smi::FromEnum(OptimizationMarker::kInOptimizationQueue), r0);
704 __ Assert(eq, AbortReason::kExpectedOptimizationSentinel);
705 }
706 __ b(&fallthrough, Label::kNear);
707 }
708 }
709
710 {
711 // Optimized code slot is a weak reference.
712 __ bind(&optimized_code_slot_is_weak_ref);
713
714 __ LoadWeakValue(optimized_code_entry, optimized_code_entry, &fallthrough);
715
716 // Check if the optimized code is marked for deopt. If it is, call the
717 // runtime to clear it.
718 Label found_deoptimized_code;
719 __ LoadP(scratch2, FieldMemOperand(optimized_code_entry,
720 Code::kCodeDataContainerOffset));
721 __ LoadW(
722 scratch2,
723 FieldMemOperand(scratch2, CodeDataContainer::kKindSpecificFlagsOffset));
724 __ TestBit(scratch2, Code::kMarkedForDeoptimizationBit, r0);
725 __ bne(&found_deoptimized_code);
726
727 // Optimized code is good, get it into the closure and link the closure into
728 // the optimized functions list, then tail call the optimized code.
729 // The feedback vector is no longer used, so re-use it as a scratch
730 // register.
731 ReplaceClosureCodeWithOptimizedCode(masm, optimized_code_entry, closure,
732 scratch2, scratch3, feedback_vector);
733 static_assert(kJavaScriptCallCodeStartRegister == r4, "ABI mismatch");
734 __ AddP(r4, optimized_code_entry,
735 Operand(Code::kHeaderSize - kHeapObjectTag));
736 __ Jump(r4);
737
738 // Optimized code slot contains deoptimized code, evict it and re-enter the
739 // closure's code.
740 __ bind(&found_deoptimized_code);
741 GenerateTailCallToReturnedCode(masm, Runtime::kEvictOptimizedCodeSlot);
742 }
743
744 // Fall-through if the optimized code cell is clear and there is no
745 // optimization marker.
746 __ bind(&fallthrough);
747 }
748
749 // Advance the current bytecode offset. This simulates what all bytecode
750 // handlers do upon completion of the underlying operation. Will bail out to a
751 // label if the bytecode (without prefix) is a return bytecode.
AdvanceBytecodeOffsetOrReturn(MacroAssembler * masm,Register bytecode_array,Register bytecode_offset,Register bytecode,Register scratch1,Label * if_return)752 static void AdvanceBytecodeOffsetOrReturn(MacroAssembler* masm,
753 Register bytecode_array,
754 Register bytecode_offset,
755 Register bytecode, Register scratch1,
756 Label* if_return) {
757 Register bytecode_size_table = scratch1;
758 Register scratch2 = bytecode;
759 DCHECK(!AreAliased(bytecode_array, bytecode_offset, bytecode_size_table,
760 bytecode));
761 __ Move(bytecode_size_table,
762 ExternalReference::bytecode_size_table_address());
763
764 // Check if the bytecode is a Wide or ExtraWide prefix bytecode.
765 Label process_bytecode, extra_wide;
766 STATIC_ASSERT(0 == static_cast<int>(interpreter::Bytecode::kWide));
767 STATIC_ASSERT(1 == static_cast<int>(interpreter::Bytecode::kExtraWide));
768 STATIC_ASSERT(2 == static_cast<int>(interpreter::Bytecode::kDebugBreakWide));
769 STATIC_ASSERT(3 ==
770 static_cast<int>(interpreter::Bytecode::kDebugBreakExtraWide));
771 __ CmpP(bytecode, Operand(0x3));
772 __ bgt(&process_bytecode);
773 __ tmll(bytecode, Operand(0x1));
774 __ bne(&extra_wide);
775
776 // Load the next bytecode and update table to the wide scaled table.
777 __ AddP(bytecode_offset, bytecode_offset, Operand(1));
778 __ LoadlB(bytecode, MemOperand(bytecode_array, bytecode_offset));
779 __ AddP(bytecode_size_table, bytecode_size_table,
780 Operand(kIntSize * interpreter::Bytecodes::kBytecodeCount));
781 __ b(&process_bytecode);
782
783 __ bind(&extra_wide);
784 // Load the next bytecode and update table to the extra wide scaled table.
785 __ AddP(bytecode_offset, bytecode_offset, Operand(1));
786 __ LoadlB(bytecode, MemOperand(bytecode_array, bytecode_offset));
787 __ AddP(bytecode_size_table, bytecode_size_table,
788 Operand(2 * kIntSize * interpreter::Bytecodes::kBytecodeCount));
789
790 // Load the size of the current bytecode.
791 __ bind(&process_bytecode);
792
793 // Bailout to the return label if this is a return bytecode.
794 #define JUMP_IF_EQUAL(NAME) \
795 __ CmpP(bytecode, \
796 Operand(static_cast<int>(interpreter::Bytecode::k##NAME))); \
797 __ beq(if_return);
798 RETURN_BYTECODE_LIST(JUMP_IF_EQUAL)
799 #undef JUMP_IF_EQUAL
800
801 // Otherwise, load the size of the current bytecode and advance the offset.
802 __ ShiftLeftP(scratch2, bytecode, Operand(2));
803 __ LoadlW(scratch2, MemOperand(bytecode_size_table, scratch2));
804 __ AddP(bytecode_offset, bytecode_offset, scratch2);
805 }
806
807 // Generate code for entering a JS function with the interpreter.
808 // On entry to the function the receiver and arguments have been pushed on the
809 // stack left to right. The actual argument count matches the formal parameter
810 // count expected by the function.
811 //
812 // The live registers are:
813 // o r3: the JS function object being called.
814 // o r5: the incoming new target or generator object
815 // o cp: our context
816 // o pp: the caller's constant pool pointer (if enabled)
817 // o fp: the caller's frame pointer
818 // o sp: stack pointer
819 // o lr: return address
820 //
821 // The function builds an interpreter frame. See InterpreterFrameConstants in
822 // frames.h for its layout.
Generate_InterpreterEntryTrampoline(MacroAssembler * masm)823 void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
824 ProfileEntryHookStub::MaybeCallEntryHook(masm);
825
826 Register closure = r3;
827 Register feedback_vector = r4;
828
829 // Load the feedback vector from the closure.
830 __ LoadP(feedback_vector,
831 FieldMemOperand(closure, JSFunction::kFeedbackCellOffset));
832 __ LoadP(feedback_vector,
833 FieldMemOperand(feedback_vector, Cell::kValueOffset));
834 // Read off the optimized code slot in the feedback vector, and if there
835 // is optimized code or an optimization marker, call that instead.
836 MaybeTailCallOptimizedCodeSlot(masm, feedback_vector, r6, r8, r7);
837
838 // Open a frame scope to indicate that there is a frame on the stack. The
839 // MANUAL indicates that the scope shouldn't actually generate code to set up
840 // the frame (that is done below).
841 FrameScope frame_scope(masm, StackFrame::MANUAL);
842 __ PushStandardFrame(closure);
843
844 // Get the bytecode array from the function object and load it into
845 // kInterpreterBytecodeArrayRegister.
846 __ LoadP(r2, FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
847 // Load original bytecode array or the debug copy.
848 __ LoadP(kInterpreterBytecodeArrayRegister,
849 FieldMemOperand(r2, SharedFunctionInfo::kFunctionDataOffset));
850 GetSharedFunctionInfoBytecode(masm, kInterpreterBytecodeArrayRegister, r6);
851
852 // Increment invocation count for the function.
853 __ LoadW(r1, FieldMemOperand(feedback_vector,
854 FeedbackVector::kInvocationCountOffset));
855 __ AddP(r1, r1, Operand(1));
856 __ StoreW(r1, FieldMemOperand(feedback_vector,
857 FeedbackVector::kInvocationCountOffset));
858
859 // Check function data field is actually a BytecodeArray object.
860 if (FLAG_debug_code) {
861 __ TestIfSmi(kInterpreterBytecodeArrayRegister);
862 __ Assert(
863 ne, AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
864 __ CompareObjectType(kInterpreterBytecodeArrayRegister, r2, no_reg,
865 BYTECODE_ARRAY_TYPE);
866 __ Assert(
867 eq, AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
868 }
869
870 // Reset code age.
871 __ mov(r1, Operand(BytecodeArray::kNoAgeBytecodeAge));
872 __ StoreByte(r1, FieldMemOperand(kInterpreterBytecodeArrayRegister,
873 BytecodeArray::kBytecodeAgeOffset),
874 r0);
875
876 // Load the initial bytecode offset.
877 __ mov(kInterpreterBytecodeOffsetRegister,
878 Operand(BytecodeArray::kHeaderSize - kHeapObjectTag));
879
880 // Push bytecode array and Smi tagged bytecode array offset.
881 __ SmiTag(r4, kInterpreterBytecodeOffsetRegister);
882 __ Push(kInterpreterBytecodeArrayRegister, r4);
883
884 // Allocate the local and temporary register file on the stack.
885 {
886 // Load frame size (word) from the BytecodeArray object.
887 __ LoadlW(r4, FieldMemOperand(kInterpreterBytecodeArrayRegister,
888 BytecodeArray::kFrameSizeOffset));
889
890 // Do a stack check to ensure we don't go over the limit.
891 Label ok;
892 __ SubP(r8, sp, r4);
893 __ LoadRoot(r0, Heap::kRealStackLimitRootIndex);
894 __ CmpLogicalP(r8, r0);
895 __ bge(&ok);
896 __ CallRuntime(Runtime::kThrowStackOverflow);
897 __ bind(&ok);
898
899 // If ok, push undefined as the initial value for all register file entries.
900 // TODO(rmcilroy): Consider doing more than one push per loop iteration.
901 Label loop, no_args;
902 __ LoadRoot(r8, Heap::kUndefinedValueRootIndex);
903 __ ShiftRightP(r4, r4, Operand(kPointerSizeLog2));
904 __ LoadAndTestP(r4, r4);
905 __ beq(&no_args);
906 __ LoadRR(r1, r4);
907 __ bind(&loop);
908 __ push(r8);
909 __ SubP(r1, Operand(1));
910 __ bne(&loop);
911 __ bind(&no_args);
912 }
913
914 // If the bytecode array has a valid incoming new target or generator object
915 // register, initialize it with incoming value which was passed in r6.
916 Label no_incoming_new_target_or_generator_register;
917 __ LoadW(r8, FieldMemOperand(
918 kInterpreterBytecodeArrayRegister,
919 BytecodeArray::kIncomingNewTargetOrGeneratorRegisterOffset));
920 __ CmpP(r8, Operand::Zero());
921 __ beq(&no_incoming_new_target_or_generator_register);
922 __ ShiftLeftP(r8, r8, Operand(kPointerSizeLog2));
923 __ StoreP(r5, MemOperand(fp, r8));
924 __ bind(&no_incoming_new_target_or_generator_register);
925
926 // Load accumulator with undefined.
927 __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
928 // Load the dispatch table into a register and dispatch to the bytecode
929 // handler at the current bytecode offset.
930 Label do_dispatch;
931 __ bind(&do_dispatch);
932 __ mov(kInterpreterDispatchTableRegister,
933 Operand(ExternalReference::interpreter_dispatch_table_address(
934 masm->isolate())));
935
936 __ LoadlB(r5, MemOperand(kInterpreterBytecodeArrayRegister,
937 kInterpreterBytecodeOffsetRegister));
938 __ ShiftLeftP(r5, r5, Operand(kPointerSizeLog2));
939 __ LoadP(kJavaScriptCallCodeStartRegister,
940 MemOperand(kInterpreterDispatchTableRegister, r5));
941 __ Call(kJavaScriptCallCodeStartRegister);
942
943 masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset());
944
945 // Any returns to the entry trampoline are either due to the return bytecode
946 // or the interpreter tail calling a builtin and then a dispatch.
947
948 // Get bytecode array and bytecode offset from the stack frame.
949 __ LoadP(kInterpreterBytecodeArrayRegister,
950 MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
951 __ LoadP(kInterpreterBytecodeOffsetRegister,
952 MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
953 __ SmiUntag(kInterpreterBytecodeOffsetRegister);
954
955 // Either return, or advance to the next bytecode and dispatch.
956 Label do_return;
957 __ LoadlB(r3, MemOperand(kInterpreterBytecodeArrayRegister,
958 kInterpreterBytecodeOffsetRegister));
959 AdvanceBytecodeOffsetOrReturn(masm, kInterpreterBytecodeArrayRegister,
960 kInterpreterBytecodeOffsetRegister, r3, r4,
961 &do_return);
962 __ b(&do_dispatch);
963
964 __ bind(&do_return);
965 // The return value is in r2.
966 LeaveInterpreterFrame(masm, r4);
967 __ Ret();
968 }
969
Generate_StackOverflowCheck(MacroAssembler * masm,Register num_args,Register scratch,Label * stack_overflow)970 static void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args,
971 Register scratch,
972 Label* stack_overflow) {
973 // Check the stack for overflow. We are not trying to catch
974 // interruptions (e.g. debug break and preemption) here, so the "real stack
975 // limit" is checked.
976 __ LoadRoot(scratch, Heap::kRealStackLimitRootIndex);
977 // Make scratch the space we have left. The stack might already be overflowed
978 // here which will cause scratch to become negative.
979 __ SubP(scratch, sp, scratch);
980 // Check if the arguments will overflow the stack.
981 __ ShiftLeftP(r0, num_args, Operand(kPointerSizeLog2));
982 __ CmpP(scratch, r0);
983 __ ble(stack_overflow); // Signed comparison.
984 }
985
Generate_InterpreterPushArgs(MacroAssembler * masm,Register num_args,Register index,Register count,Register scratch)986 static void Generate_InterpreterPushArgs(MacroAssembler* masm,
987 Register num_args, Register index,
988 Register count, Register scratch) {
989 Label loop, skip;
990 __ CmpP(count, Operand::Zero());
991 __ beq(&skip);
992 __ AddP(index, index, Operand(kPointerSize)); // Bias up for LoadPU
993 __ LoadRR(r0, count);
994 __ bind(&loop);
995 __ LoadP(scratch, MemOperand(index, -kPointerSize));
996 __ lay(index, MemOperand(index, -kPointerSize));
997 __ push(scratch);
998 __ SubP(r0, Operand(1));
999 __ bne(&loop);
1000 __ bind(&skip);
1001 }
1002
1003 // static
Generate_InterpreterPushArgsThenCallImpl(MacroAssembler * masm,ConvertReceiverMode receiver_mode,InterpreterPushArgsMode mode)1004 void Builtins::Generate_InterpreterPushArgsThenCallImpl(
1005 MacroAssembler* masm, ConvertReceiverMode receiver_mode,
1006 InterpreterPushArgsMode mode) {
1007 DCHECK(mode != InterpreterPushArgsMode::kArrayFunction);
1008 // ----------- S t a t e -------------
1009 // -- r2 : the number of arguments (not including the receiver)
1010 // -- r4 : the address of the first argument to be pushed. Subsequent
1011 // arguments should be consecutive above this, in the same order as
1012 // they are to be pushed onto the stack.
1013 // -- r3 : the target to call (can be any Object).
1014 // -----------------------------------
1015 Label stack_overflow;
1016
1017 // Calculate number of arguments (AddP one for receiver).
1018 __ AddP(r5, r2, Operand(1));
1019 Generate_StackOverflowCheck(masm, r5, ip, &stack_overflow);
1020
1021 // Push "undefined" as the receiver arg if we need to.
1022 if (receiver_mode == ConvertReceiverMode::kNullOrUndefined) {
1023 __ PushRoot(Heap::kUndefinedValueRootIndex);
1024 __ LoadRR(r5, r2); // Argument count is correct.
1025 }
1026
1027 // Push the arguments.
1028 Generate_InterpreterPushArgs(masm, r5, r4, r5, r6);
1029 if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1030 __ Pop(r4); // Pass the spread in a register
1031 __ SubP(r2, r2, Operand(1)); // Subtract one for spread
1032 }
1033
1034 // Call the target.
1035 if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1036 __ Jump(BUILTIN_CODE(masm->isolate(), CallWithSpread),
1037 RelocInfo::CODE_TARGET);
1038 } else {
1039 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny),
1040 RelocInfo::CODE_TARGET);
1041 }
1042
1043 __ bind(&stack_overflow);
1044 {
1045 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1046 // Unreachable Code.
1047 __ bkpt(0);
1048 }
1049 }
1050
1051 // static
Generate_InterpreterPushArgsThenConstructImpl(MacroAssembler * masm,InterpreterPushArgsMode mode)1052 void Builtins::Generate_InterpreterPushArgsThenConstructImpl(
1053 MacroAssembler* masm, InterpreterPushArgsMode mode) {
1054 // ----------- S t a t e -------------
1055 // -- r2 : argument count (not including receiver)
1056 // -- r5 : new target
1057 // -- r3 : constructor to call
1058 // -- r4 : allocation site feedback if available, undefined otherwise.
1059 // -- r6 : address of the first argument
1060 // -----------------------------------
1061 Label stack_overflow;
1062
1063 // Push a slot for the receiver to be constructed.
1064 __ LoadImmP(r0, Operand::Zero());
1065 __ push(r0);
1066
1067 // Push the arguments (skip if none).
1068 Label skip;
1069 __ CmpP(r2, Operand::Zero());
1070 __ beq(&skip);
1071 Generate_StackOverflowCheck(masm, r2, ip, &stack_overflow);
1072 Generate_InterpreterPushArgs(masm, r2, r6, r2, r7);
1073 __ bind(&skip);
1074
1075 if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1076 __ Pop(r4); // Pass the spread in a register
1077 __ SubP(r2, r2, Operand(1)); // Subtract one for spread
1078 } else {
1079 __ AssertUndefinedOrAllocationSite(r4, r7);
1080 }
1081 if (mode == InterpreterPushArgsMode::kArrayFunction) {
1082 __ AssertFunction(r3);
1083
1084 // Tail call to the array construct stub (still in the caller
1085 // context at this point).
1086 Handle<Code> code = BUILTIN_CODE(masm->isolate(), ArrayConstructorImpl);
1087 __ Jump(code, RelocInfo::CODE_TARGET);
1088 } else if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1089 // Call the constructor with r2, r3, and r5 unmodified.
1090 __ Jump(BUILTIN_CODE(masm->isolate(), ConstructWithSpread),
1091 RelocInfo::CODE_TARGET);
1092 } else {
1093 DCHECK_EQ(InterpreterPushArgsMode::kOther, mode);
1094 // Call the constructor with r2, r3, and r5 unmodified.
1095 __ Jump(BUILTIN_CODE(masm->isolate(), Construct), RelocInfo::CODE_TARGET);
1096 }
1097
1098 __ bind(&stack_overflow);
1099 {
1100 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1101 // Unreachable Code.
1102 __ bkpt(0);
1103 }
1104 }
1105
Generate_InterpreterEnterBytecode(MacroAssembler * masm)1106 static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
1107 // Set the return address to the correct point in the interpreter entry
1108 // trampoline.
1109 Label builtin_trampoline, trampoline_loaded;
1110 Smi* interpreter_entry_return_pc_offset(
1111 masm->isolate()->heap()->interpreter_entry_return_pc_offset());
1112 DCHECK_NE(interpreter_entry_return_pc_offset, Smi::kZero);
1113
1114 // If the SFI function_data is an InterpreterData, get the trampoline stored
1115 // in it, otherwise get the trampoline from the builtins list.
1116 __ LoadP(r4, MemOperand(fp, StandardFrameConstants::kFunctionOffset));
1117 __ LoadP(r4, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
1118 __ LoadP(r4, FieldMemOperand(r4, SharedFunctionInfo::kFunctionDataOffset));
1119 __ CompareObjectType(r4, kInterpreterDispatchTableRegister,
1120 kInterpreterDispatchTableRegister,
1121 INTERPRETER_DATA_TYPE);
1122 __ bne(&builtin_trampoline);
1123
1124 __ LoadP(r4,
1125 FieldMemOperand(r4, InterpreterData::kInterpreterTrampolineOffset));
1126 __ b(&trampoline_loaded);
1127
1128 __ bind(&builtin_trampoline);
1129 __ Move(r4, BUILTIN_CODE(masm->isolate(), InterpreterEntryTrampoline));
1130
1131 __ bind(&trampoline_loaded);
1132 __ AddP(r14, r4, Operand(interpreter_entry_return_pc_offset->value() +
1133 Code::kHeaderSize - kHeapObjectTag));
1134
1135 // Initialize the dispatch table register.
1136 __ Move(
1137 kInterpreterDispatchTableRegister,
1138 ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
1139
1140 // Get the bytecode array pointer from the frame.
1141 __ LoadP(kInterpreterBytecodeArrayRegister,
1142 MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1143
1144 if (FLAG_debug_code) {
1145 // Check function data field is actually a BytecodeArray object.
1146 __ TestIfSmi(kInterpreterBytecodeArrayRegister);
1147 __ Assert(
1148 ne, AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1149 __ CompareObjectType(kInterpreterBytecodeArrayRegister, r3, no_reg,
1150 BYTECODE_ARRAY_TYPE);
1151 __ Assert(
1152 eq, AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1153 }
1154
1155 // Get the target bytecode offset from the frame.
1156 __ LoadP(kInterpreterBytecodeOffsetRegister,
1157 MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1158 __ SmiUntag(kInterpreterBytecodeOffsetRegister);
1159
1160 // Dispatch to the target bytecode.
1161 __ LoadlB(ip, MemOperand(kInterpreterBytecodeArrayRegister,
1162 kInterpreterBytecodeOffsetRegister));
1163 __ ShiftLeftP(ip, ip, Operand(kPointerSizeLog2));
1164 __ LoadP(kJavaScriptCallCodeStartRegister,
1165 MemOperand(kInterpreterDispatchTableRegister, ip));
1166 __ Jump(kJavaScriptCallCodeStartRegister);
1167 }
1168
Generate_InterpreterEnterBytecodeAdvance(MacroAssembler * masm)1169 void Builtins::Generate_InterpreterEnterBytecodeAdvance(MacroAssembler* masm) {
1170 // Get bytecode array and bytecode offset from the stack frame.
1171 __ LoadP(kInterpreterBytecodeArrayRegister,
1172 MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1173 __ LoadP(kInterpreterBytecodeOffsetRegister,
1174 MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1175 __ SmiUntag(kInterpreterBytecodeOffsetRegister);
1176
1177 // Load the current bytecode.
1178 __ LoadlB(r3, MemOperand(kInterpreterBytecodeArrayRegister,
1179 kInterpreterBytecodeOffsetRegister));
1180
1181 // Advance to the next bytecode.
1182 Label if_return;
1183 AdvanceBytecodeOffsetOrReturn(masm, kInterpreterBytecodeArrayRegister,
1184 kInterpreterBytecodeOffsetRegister, r3, r4,
1185 &if_return);
1186
1187 // Convert new bytecode offset to a Smi and save in the stackframe.
1188 __ SmiTag(r4, kInterpreterBytecodeOffsetRegister);
1189 __ StoreP(r4,
1190 MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1191
1192 Generate_InterpreterEnterBytecode(masm);
1193
1194 // We should never take the if_return path.
1195 __ bind(&if_return);
1196 __ Abort(AbortReason::kInvalidBytecodeAdvance);
1197 }
1198
Generate_InterpreterEnterBytecodeDispatch(MacroAssembler * masm)1199 void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
1200 Generate_InterpreterEnterBytecode(masm);
1201 }
1202
Generate_InstantiateAsmJs(MacroAssembler * masm)1203 void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) {
1204 // ----------- S t a t e -------------
1205 // -- r2 : argument count (preserved for callee)
1206 // -- r3 : new target (preserved for callee)
1207 // -- r5 : target function (preserved for callee)
1208 // -----------------------------------
1209 Label failed;
1210 {
1211 FrameScope scope(masm, StackFrame::INTERNAL);
1212 // Preserve argument count for later compare.
1213 __ Move(r6, r2);
1214 // Push a copy of the target function and the new target.
1215 __ SmiTag(r2);
1216 // Push another copy as a parameter to the runtime call.
1217 __ Push(r2, r3, r5, r3);
1218
1219 // Copy arguments from caller (stdlib, foreign, heap).
1220 Label args_done;
1221 for (int j = 0; j < 4; ++j) {
1222 Label over;
1223 if (j < 3) {
1224 __ CmpP(r6, Operand(j));
1225 __ b(ne, &over);
1226 }
1227 for (int i = j - 1; i >= 0; --i) {
1228 __ LoadP(r6, MemOperand(fp, StandardFrameConstants::kCallerSPOffset +
1229 i * kPointerSize));
1230 __ push(r6);
1231 }
1232 for (int i = 0; i < 3 - j; ++i) {
1233 __ PushRoot(Heap::kUndefinedValueRootIndex);
1234 }
1235 if (j < 3) {
1236 __ jmp(&args_done);
1237 __ bind(&over);
1238 }
1239 }
1240 __ bind(&args_done);
1241
1242 // Call runtime, on success unwind frame, and parent frame.
1243 __ CallRuntime(Runtime::kInstantiateAsmJs, 4);
1244 // A smi 0 is returned on failure, an object on success.
1245 __ JumpIfSmi(r2, &failed);
1246
1247 __ Drop(2);
1248 __ pop(r6);
1249 __ SmiUntag(r6);
1250 scope.GenerateLeaveFrame();
1251
1252 __ AddP(r6, r6, Operand(1));
1253 __ Drop(r6);
1254 __ Ret();
1255
1256 __ bind(&failed);
1257 // Restore target function and new target.
1258 __ Pop(r2, r3, r5);
1259 __ SmiUntag(r2);
1260 }
1261 // On failure, tail call back to regular js by re-calling the function
1262 // which has be reset to the compile lazy builtin.
1263 static_assert(kJavaScriptCallCodeStartRegister == r4, "ABI mismatch");
1264 __ LoadP(r4, FieldMemOperand(r3, JSFunction::kCodeOffset));
1265 __ AddP(r4, r4, Operand(Code::kHeaderSize - kHeapObjectTag));
1266 __ JumpToJSEntry(r4);
1267 }
1268
1269 namespace {
Generate_ContinueToBuiltinHelper(MacroAssembler * masm,bool java_script_builtin,bool with_result)1270 void Generate_ContinueToBuiltinHelper(MacroAssembler* masm,
1271 bool java_script_builtin,
1272 bool with_result) {
1273 const RegisterConfiguration* config(RegisterConfiguration::Default());
1274 int allocatable_register_count = config->num_allocatable_general_registers();
1275 if (with_result) {
1276 // Overwrite the hole inserted by the deoptimizer with the return value from
1277 // the LAZY deopt point.
1278 __ StoreP(
1279 r2, MemOperand(
1280 sp, config->num_allocatable_general_registers() * kPointerSize +
1281 BuiltinContinuationFrameConstants::kFixedFrameSize));
1282 }
1283 for (int i = allocatable_register_count - 1; i >= 0; --i) {
1284 int code = config->GetAllocatableGeneralCode(i);
1285 __ Pop(Register::from_code(code));
1286 if (java_script_builtin && code == kJavaScriptCallArgCountRegister.code()) {
1287 __ SmiUntag(Register::from_code(code));
1288 }
1289 }
1290 __ LoadP(
1291 fp,
1292 MemOperand(sp, BuiltinContinuationFrameConstants::kFixedFrameSizeFromFp));
1293 __ Pop(ip);
1294 __ AddP(sp, sp,
1295 Operand(BuiltinContinuationFrameConstants::kFixedFrameSizeFromFp));
1296 __ Pop(r0);
1297 __ LoadRR(r14, r0);
1298 __ AddP(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
1299 __ Jump(ip);
1300 }
1301 } // namespace
1302
Generate_ContinueToCodeStubBuiltin(MacroAssembler * masm)1303 void Builtins::Generate_ContinueToCodeStubBuiltin(MacroAssembler* masm) {
1304 Generate_ContinueToBuiltinHelper(masm, false, false);
1305 }
1306
Generate_ContinueToCodeStubBuiltinWithResult(MacroAssembler * masm)1307 void Builtins::Generate_ContinueToCodeStubBuiltinWithResult(
1308 MacroAssembler* masm) {
1309 Generate_ContinueToBuiltinHelper(masm, false, true);
1310 }
1311
Generate_ContinueToJavaScriptBuiltin(MacroAssembler * masm)1312 void Builtins::Generate_ContinueToJavaScriptBuiltin(MacroAssembler* masm) {
1313 Generate_ContinueToBuiltinHelper(masm, true, false);
1314 }
1315
Generate_ContinueToJavaScriptBuiltinWithResult(MacroAssembler * masm)1316 void Builtins::Generate_ContinueToJavaScriptBuiltinWithResult(
1317 MacroAssembler* masm) {
1318 Generate_ContinueToBuiltinHelper(masm, true, true);
1319 }
1320
Generate_NotifyDeoptimized(MacroAssembler * masm)1321 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1322 {
1323 FrameScope scope(masm, StackFrame::INTERNAL);
1324 __ CallRuntime(Runtime::kNotifyDeoptimized);
1325 }
1326
1327 DCHECK_EQ(kInterpreterAccumulatorRegister.code(), r2.code());
1328 __ pop(r2);
1329 __ Ret();
1330 }
1331
Generate_OnStackReplacementHelper(MacroAssembler * masm,bool has_handler_frame)1332 static void Generate_OnStackReplacementHelper(MacroAssembler* masm,
1333 bool has_handler_frame) {
1334 // Lookup the function in the JavaScript frame.
1335 if (has_handler_frame) {
1336 __ LoadP(r2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1337 __ LoadP(r2, MemOperand(r2, JavaScriptFrameConstants::kFunctionOffset));
1338 } else {
1339 __ LoadP(r2, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1340 }
1341
1342 {
1343 FrameScope scope(masm, StackFrame::INTERNAL);
1344 // Pass function as argument.
1345 __ push(r2);
1346 __ CallRuntime(Runtime::kCompileForOnStackReplacement);
1347 }
1348
1349 // If the code object is null, just return to the caller.
1350 Label skip;
1351 __ CmpSmiLiteral(r2, Smi::kZero, r0);
1352 __ bne(&skip);
1353 __ Ret();
1354
1355 __ bind(&skip);
1356
1357 // Drop any potential handler frame that is be sitting on top of the actual
1358 // JavaScript frame. This is the case then OSR is triggered from bytecode.
1359 if (has_handler_frame) {
1360 __ LeaveFrame(StackFrame::STUB);
1361 }
1362
1363 // Load deoptimization data from the code object.
1364 // <deopt_data> = <code>[#deoptimization_data_offset]
1365 __ LoadP(r3, FieldMemOperand(r2, Code::kDeoptimizationDataOffset));
1366
1367 // Load the OSR entrypoint offset from the deoptimization data.
1368 // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
1369 __ LoadP(r3, FieldMemOperand(r3, FixedArray::OffsetOfElementAt(
1370 DeoptimizationData::kOsrPcOffsetIndex)));
1371 __ SmiUntag(r3);
1372
1373 // Compute the target address = code_obj + header_size + osr_offset
1374 // <entry_addr> = <code_obj> + #header_size + <osr_offset>
1375 __ AddP(r2, r3);
1376 __ AddP(r0, r2, Operand(Code::kHeaderSize - kHeapObjectTag));
1377 __ LoadRR(r14, r0);
1378
1379 // And "return" to the OSR entry point of the function.
1380 __ Ret();
1381 }
1382
Generate_OnStackReplacement(MacroAssembler * masm)1383 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1384 Generate_OnStackReplacementHelper(masm, false);
1385 }
1386
Generate_InterpreterOnStackReplacement(MacroAssembler * masm)1387 void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) {
1388 Generate_OnStackReplacementHelper(masm, true);
1389 }
1390
1391 // static
Generate_FunctionPrototypeApply(MacroAssembler * masm)1392 void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
1393 // ----------- S t a t e -------------
1394 // -- r2 : argc
1395 // -- sp[0] : argArray
1396 // -- sp[4] : thisArg
1397 // -- sp[8] : receiver
1398 // -----------------------------------
1399
1400 // 1. Load receiver into r3, argArray into r4 (if present), remove all
1401 // arguments from the stack (including the receiver), and push thisArg (if
1402 // present) instead.
1403 {
1404 Label skip;
1405 Register arg_size = r7;
1406 Register new_sp = r5;
1407 Register scratch = r6;
1408 __ ShiftLeftP(arg_size, r2, Operand(kPointerSizeLog2));
1409 __ AddP(new_sp, sp, arg_size);
1410 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
1411 __ LoadRR(r4, scratch);
1412 __ LoadP(r3, MemOperand(new_sp, 0)); // receiver
1413 __ CmpP(arg_size, Operand(kPointerSize));
1414 __ blt(&skip);
1415 __ LoadP(scratch, MemOperand(new_sp, 1 * -kPointerSize)); // thisArg
1416 __ beq(&skip);
1417 __ LoadP(r4, MemOperand(new_sp, 2 * -kPointerSize)); // argArray
1418 __ bind(&skip);
1419 __ LoadRR(sp, new_sp);
1420 __ StoreP(scratch, MemOperand(sp, 0));
1421 }
1422
1423 // ----------- S t a t e -------------
1424 // -- r4 : argArray
1425 // -- r3 : receiver
1426 // -- sp[0] : thisArg
1427 // -----------------------------------
1428
1429 // 2. We don't need to check explicitly for callable receiver here,
1430 // since that's the first thing the Call/CallWithArrayLike builtins
1431 // will do.
1432
1433 // 3. Tail call with no arguments if argArray is null or undefined.
1434 Label no_arguments;
1435 __ JumpIfRoot(r4, Heap::kNullValueRootIndex, &no_arguments);
1436 __ JumpIfRoot(r4, Heap::kUndefinedValueRootIndex, &no_arguments);
1437
1438 // 4a. Apply the receiver to the given argArray.
1439 __ Jump(BUILTIN_CODE(masm->isolate(), CallWithArrayLike),
1440 RelocInfo::CODE_TARGET);
1441
1442 // 4b. The argArray is either null or undefined, so we tail call without any
1443 // arguments to the receiver.
1444 __ bind(&no_arguments);
1445 {
1446 __ LoadImmP(r2, Operand::Zero());
1447 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1448 }
1449 }
1450
1451 // static
Generate_FunctionPrototypeCall(MacroAssembler * masm)1452 void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
1453 // 1. Make sure we have at least one argument.
1454 // r2: actual number of arguments
1455 {
1456 Label done;
1457 __ CmpP(r2, Operand::Zero());
1458 __ bne(&done, Label::kNear);
1459 __ PushRoot(Heap::kUndefinedValueRootIndex);
1460 __ AddP(r2, Operand(1));
1461 __ bind(&done);
1462 }
1463
1464 // r2: actual number of arguments
1465 // 2. Get the callable to call (passed as receiver) from the stack.
1466 __ ShiftLeftP(r4, r2, Operand(kPointerSizeLog2));
1467 __ LoadP(r3, MemOperand(sp, r4));
1468
1469 // 3. Shift arguments and return address one slot down on the stack
1470 // (overwriting the original receiver). Adjust argument count to make
1471 // the original first argument the new receiver.
1472 // r2: actual number of arguments
1473 // r3: callable
1474 {
1475 Label loop;
1476 // Calculate the copy start address (destination). Copy end address is sp.
1477 __ AddP(r4, sp, r4);
1478
1479 __ bind(&loop);
1480 __ LoadP(ip, MemOperand(r4, -kPointerSize));
1481 __ StoreP(ip, MemOperand(r4));
1482 __ SubP(r4, Operand(kPointerSize));
1483 __ CmpP(r4, sp);
1484 __ bne(&loop);
1485 // Adjust the actual number of arguments and remove the top element
1486 // (which is a copy of the last argument).
1487 __ SubP(r2, Operand(1));
1488 __ pop();
1489 }
1490
1491 // 4. Call the callable.
1492 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1493 }
1494
Generate_ReflectApply(MacroAssembler * masm)1495 void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
1496 // ----------- S t a t e -------------
1497 // -- r2 : argc
1498 // -- sp[0] : argumentsList
1499 // -- sp[4] : thisArgument
1500 // -- sp[8] : target
1501 // -- sp[12] : receiver
1502 // -----------------------------------
1503
1504 // 1. Load target into r3 (if present), argumentsList into r4 (if present),
1505 // remove all arguments from the stack (including the receiver), and push
1506 // thisArgument (if present) instead.
1507 {
1508 Label skip;
1509 Register arg_size = r7;
1510 Register new_sp = r5;
1511 Register scratch = r6;
1512 __ ShiftLeftP(arg_size, r2, Operand(kPointerSizeLog2));
1513 __ AddP(new_sp, sp, arg_size);
1514 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
1515 __ LoadRR(scratch, r3);
1516 __ LoadRR(r4, r3);
1517 __ CmpP(arg_size, Operand(kPointerSize));
1518 __ blt(&skip);
1519 __ LoadP(r3, MemOperand(new_sp, 1 * -kPointerSize)); // target
1520 __ beq(&skip);
1521 __ LoadP(scratch, MemOperand(new_sp, 2 * -kPointerSize)); // thisArgument
1522 __ CmpP(arg_size, Operand(2 * kPointerSize));
1523 __ beq(&skip);
1524 __ LoadP(r4, MemOperand(new_sp, 3 * -kPointerSize)); // argumentsList
1525 __ bind(&skip);
1526 __ LoadRR(sp, new_sp);
1527 __ StoreP(scratch, MemOperand(sp, 0));
1528 }
1529
1530 // ----------- S t a t e -------------
1531 // -- r4 : argumentsList
1532 // -- r3 : target
1533 // -- sp[0] : thisArgument
1534 // -----------------------------------
1535
1536 // 2. We don't need to check explicitly for callable target here,
1537 // since that's the first thing the Call/CallWithArrayLike builtins
1538 // will do.
1539
1540 // 3 Apply the target to the given argumentsList.
1541 __ Jump(BUILTIN_CODE(masm->isolate(), CallWithArrayLike),
1542 RelocInfo::CODE_TARGET);
1543 }
1544
Generate_ReflectConstruct(MacroAssembler * masm)1545 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
1546 // ----------- S t a t e -------------
1547 // -- r2 : argc
1548 // -- sp[0] : new.target (optional)
1549 // -- sp[4] : argumentsList
1550 // -- sp[8] : target
1551 // -- sp[12] : receiver
1552 // -----------------------------------
1553
1554 // 1. Load target into r3 (if present), argumentsList into r4 (if present),
1555 // new.target into r5 (if present, otherwise use target), remove all
1556 // arguments from the stack (including the receiver), and push thisArgument
1557 // (if present) instead.
1558 {
1559 Label skip;
1560 Register arg_size = r7;
1561 Register new_sp = r6;
1562 __ ShiftLeftP(arg_size, r2, Operand(kPointerSizeLog2));
1563 __ AddP(new_sp, sp, arg_size);
1564 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex);
1565 __ LoadRR(r4, r3);
1566 __ LoadRR(r5, r3);
1567 __ StoreP(r3, MemOperand(new_sp, 0)); // receiver (undefined)
1568 __ CmpP(arg_size, Operand(kPointerSize));
1569 __ blt(&skip);
1570 __ LoadP(r3, MemOperand(new_sp, 1 * -kPointerSize)); // target
1571 __ LoadRR(r5, r3); // new.target defaults to target
1572 __ beq(&skip);
1573 __ LoadP(r4, MemOperand(new_sp, 2 * -kPointerSize)); // argumentsList
1574 __ CmpP(arg_size, Operand(2 * kPointerSize));
1575 __ beq(&skip);
1576 __ LoadP(r5, MemOperand(new_sp, 3 * -kPointerSize)); // new.target
1577 __ bind(&skip);
1578 __ LoadRR(sp, new_sp);
1579 }
1580
1581 // ----------- S t a t e -------------
1582 // -- r4 : argumentsList
1583 // -- r5 : new.target
1584 // -- r3 : target
1585 // -- sp[0] : receiver (undefined)
1586 // -----------------------------------
1587
1588 // 2. We don't need to check explicitly for constructor target here,
1589 // since that's the first thing the Construct/ConstructWithArrayLike
1590 // builtins will do.
1591
1592 // 3. We don't need to check explicitly for constructor new.target here,
1593 // since that's the second thing the Construct/ConstructWithArrayLike
1594 // builtins will do.
1595
1596 // 4. Construct the target with the given new.target and argumentsList.
1597 __ Jump(BUILTIN_CODE(masm->isolate(), ConstructWithArrayLike),
1598 RelocInfo::CODE_TARGET);
1599 }
1600
EnterArgumentsAdaptorFrame(MacroAssembler * masm)1601 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1602 __ SmiTag(r2);
1603 __ Load(r6, Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
1604 // Stack updated as such:
1605 // old SP --->
1606 // R14 Return Addr
1607 // Old FP <--- New FP
1608 // Argument Adapter SMI
1609 // Function
1610 // ArgC as SMI
1611 // Padding <--- New SP
1612 __ lay(sp, MemOperand(sp, -5 * kPointerSize));
1613
1614 // Cleanse the top nibble of 31-bit pointers.
1615 __ CleanseP(r14);
1616 __ StoreP(r14, MemOperand(sp, 4 * kPointerSize));
1617 __ StoreP(fp, MemOperand(sp, 3 * kPointerSize));
1618 __ StoreP(r6, MemOperand(sp, 2 * kPointerSize));
1619 __ StoreP(r3, MemOperand(sp, 1 * kPointerSize));
1620 __ StoreP(r2, MemOperand(sp, 0 * kPointerSize));
1621 __ Push(Smi::kZero); // Padding.
1622 __ la(fp,
1623 MemOperand(sp, ArgumentsAdaptorFrameConstants::kFixedFrameSizeFromFp));
1624 }
1625
LeaveArgumentsAdaptorFrame(MacroAssembler * masm)1626 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1627 // ----------- S t a t e -------------
1628 // -- r2 : result being passed through
1629 // -----------------------------------
1630 // Get the number of arguments passed (as a smi), tear down the frame and
1631 // then tear down the parameters.
1632 __ LoadP(r3, MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset));
1633 int stack_adjustment = kPointerSize; // adjust for receiver
1634 __ LeaveFrame(StackFrame::ARGUMENTS_ADAPTOR, stack_adjustment);
1635 __ SmiToPtrArrayOffset(r3, r3);
1636 __ lay(sp, MemOperand(sp, r3));
1637 }
1638
1639 // static
Generate_CallOrConstructVarargs(MacroAssembler * masm,Handle<Code> code)1640 void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm,
1641 Handle<Code> code) {
1642 // ----------- S t a t e -------------
1643 // -- r3 : target
1644 // -- r2 : number of parameters on the stack (not including the receiver)
1645 // -- r4 : arguments list (a FixedArray)
1646 // -- r6 : len (number of elements to push from args)
1647 // -- r5 : new.target (for [[Construct]])
1648 // -----------------------------------
1649
1650 Register scratch = ip;
1651
1652 if (masm->emit_debug_code()) {
1653 // Allow r4 to be a FixedArray, or a FixedDoubleArray if r6 == 0.
1654 Label ok, fail;
1655 __ AssertNotSmi(r4);
1656 __ LoadP(scratch, FieldMemOperand(r4, HeapObject::kMapOffset));
1657 __ LoadHalfWordP(scratch,
1658 FieldMemOperand(scratch, Map::kInstanceTypeOffset));
1659 __ CmpP(scratch, Operand(FIXED_ARRAY_TYPE));
1660 __ beq(&ok);
1661 __ CmpP(scratch, Operand(FIXED_DOUBLE_ARRAY_TYPE));
1662 __ bne(&fail);
1663 __ CmpP(r6, Operand::Zero());
1664 __ beq(&ok);
1665 // Fall through.
1666 __ bind(&fail);
1667 __ Abort(AbortReason::kOperandIsNotAFixedArray);
1668
1669 __ bind(&ok);
1670 }
1671
1672 // Check for stack overflow.
1673 {
1674 // Check the stack for overflow. We are not trying to catch interruptions
1675 // (i.e. debug break and preemption) here, so check the "real stack limit".
1676 Label done;
1677 __ LoadRoot(ip, Heap::kRealStackLimitRootIndex);
1678 // Make ip the space we have left. The stack might already be overflowed
1679 // here which will cause ip to become negative.
1680 __ SubP(ip, sp, ip);
1681 // Check if the arguments will overflow the stack.
1682 __ ShiftLeftP(r0, r6, Operand(kPointerSizeLog2));
1683 __ CmpP(ip, r0); // Signed comparison.
1684 __ bgt(&done);
1685 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1686 __ bind(&done);
1687 }
1688
1689 // Push arguments onto the stack (thisArgument is already on the stack).
1690 {
1691 Label loop, no_args, skip;
1692 __ CmpP(r6, Operand::Zero());
1693 __ beq(&no_args);
1694 __ AddP(r4, r4,
1695 Operand(FixedArray::kHeaderSize - kHeapObjectTag - kPointerSize));
1696 __ LoadRR(r1, r6);
1697 __ bind(&loop);
1698 __ LoadP(ip, MemOperand(r4, kPointerSize));
1699 __ la(r4, MemOperand(r4, kPointerSize));
1700 __ CompareRoot(ip, Heap::kTheHoleValueRootIndex);
1701 __ bne(&skip, Label::kNear);
1702 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1703 __ bind(&skip);
1704 __ push(ip);
1705 __ BranchOnCount(r1, &loop);
1706 __ bind(&no_args);
1707 __ AddP(r2, r2, r6);
1708 }
1709
1710 // Tail-call to the actual Call or Construct builtin.
1711 __ Jump(code, RelocInfo::CODE_TARGET);
1712 }
1713
1714 // static
Generate_CallOrConstructForwardVarargs(MacroAssembler * masm,CallOrConstructMode mode,Handle<Code> code)1715 void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm,
1716 CallOrConstructMode mode,
1717 Handle<Code> code) {
1718 // ----------- S t a t e -------------
1719 // -- r2 : the number of arguments (not including the receiver)
1720 // -- r5 : the new.target (for [[Construct]] calls)
1721 // -- r3 : the target to call (can be any Object)
1722 // -- r4 : start index (to support rest parameters)
1723 // -----------------------------------
1724
1725 Register scratch = r8;
1726
1727 if (mode == CallOrConstructMode::kConstruct) {
1728 Label new_target_constructor, new_target_not_constructor;
1729 __ JumpIfSmi(r5, &new_target_not_constructor);
1730 __ LoadP(scratch, FieldMemOperand(r5, HeapObject::kMapOffset));
1731 __ LoadlB(scratch, FieldMemOperand(scratch, Map::kBitFieldOffset));
1732 __ tmll(scratch, Operand(Map::IsConstructorBit::kShift));
1733 __ bne(&new_target_constructor);
1734 __ bind(&new_target_not_constructor);
1735 {
1736 FrameScope scope(masm, StackFrame::MANUAL);
1737 __ EnterFrame(StackFrame::INTERNAL);
1738 __ Push(r5);
1739 __ CallRuntime(Runtime::kThrowNotConstructor);
1740 }
1741 __ bind(&new_target_constructor);
1742 }
1743
1744 // Check if we have an arguments adaptor frame below the function frame.
1745 Label arguments_adaptor, arguments_done;
1746 __ LoadP(r6, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1747 __ LoadP(ip, MemOperand(r6, CommonFrameConstants::kContextOrFrameTypeOffset));
1748 __ CmpP(ip, Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
1749 __ beq(&arguments_adaptor);
1750 {
1751 __ LoadP(r7, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1752 __ LoadP(r7, FieldMemOperand(r7, JSFunction::kSharedFunctionInfoOffset));
1753 __ LoadLogicalHalfWordP(
1754 r7,
1755 FieldMemOperand(r7, SharedFunctionInfo::kFormalParameterCountOffset));
1756 __ LoadRR(r6, fp);
1757 }
1758 __ b(&arguments_done);
1759 __ bind(&arguments_adaptor);
1760 {
1761 // Load the length from the ArgumentsAdaptorFrame.
1762 __ LoadP(r7, MemOperand(r6, ArgumentsAdaptorFrameConstants::kLengthOffset));
1763 __ SmiUntag(r7);
1764 }
1765 __ bind(&arguments_done);
1766
1767 Label stack_done, stack_overflow;
1768 __ SubP(r7, r7, r4);
1769 __ CmpP(r7, Operand::Zero());
1770 __ ble(&stack_done);
1771 {
1772 // Check for stack overflow.
1773 Generate_StackOverflowCheck(masm, r7, r4, &stack_overflow);
1774
1775 // Forward the arguments from the caller frame.
1776 {
1777 Label loop;
1778 __ AddP(r6, r6, Operand(kPointerSize));
1779 __ AddP(r2, r2, r7);
1780 __ bind(&loop);
1781 {
1782 __ ShiftLeftP(ip, r7, Operand(kPointerSizeLog2));
1783 __ LoadP(ip, MemOperand(r6, ip));
1784 __ push(ip);
1785 __ SubP(r7, r7, Operand(1));
1786 __ CmpP(r7, Operand::Zero());
1787 __ bne(&loop);
1788 }
1789 }
1790 }
1791 __ b(&stack_done);
1792 __ bind(&stack_overflow);
1793 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1794 __ bind(&stack_done);
1795
1796 // Tail-call to the {code} handler.
1797 __ Jump(code, RelocInfo::CODE_TARGET);
1798 }
1799
1800 // static
Generate_CallFunction(MacroAssembler * masm,ConvertReceiverMode mode)1801 void Builtins::Generate_CallFunction(MacroAssembler* masm,
1802 ConvertReceiverMode mode) {
1803 // ----------- S t a t e -------------
1804 // -- r2 : the number of arguments (not including the receiver)
1805 // -- r3 : the function to call (checked to be a JSFunction)
1806 // -----------------------------------
1807 __ AssertFunction(r3);
1808
1809 // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
1810 // Check that the function is not a "classConstructor".
1811 Label class_constructor;
1812 __ LoadP(r4, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
1813 __ LoadlW(r5, FieldMemOperand(r4, SharedFunctionInfo::kFlagsOffset));
1814 __ TestBitMask(r5, SharedFunctionInfo::IsClassConstructorBit::kMask, r0);
1815 __ bne(&class_constructor);
1816
1817 // Enter the context of the function; ToObject has to run in the function
1818 // context, and we also need to take the global proxy from the function
1819 // context in case of conversion.
1820 __ LoadP(cp, FieldMemOperand(r3, JSFunction::kContextOffset));
1821 // We need to convert the receiver for non-native sloppy mode functions.
1822 Label done_convert;
1823 __ AndP(r0, r5,
1824 Operand(SharedFunctionInfo::IsStrictBit::kMask |
1825 SharedFunctionInfo::IsNativeBit::kMask));
1826 __ bne(&done_convert);
1827 {
1828 // ----------- S t a t e -------------
1829 // -- r2 : the number of arguments (not including the receiver)
1830 // -- r3 : the function to call (checked to be a JSFunction)
1831 // -- r4 : the shared function info.
1832 // -- cp : the function context.
1833 // -----------------------------------
1834
1835 if (mode == ConvertReceiverMode::kNullOrUndefined) {
1836 // Patch receiver to global proxy.
1837 __ LoadGlobalProxy(r5);
1838 } else {
1839 Label convert_to_object, convert_receiver;
1840 __ ShiftLeftP(r5, r2, Operand(kPointerSizeLog2));
1841 __ LoadP(r5, MemOperand(sp, r5));
1842 __ JumpIfSmi(r5, &convert_to_object);
1843 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
1844 __ CompareObjectType(r5, r6, r6, FIRST_JS_RECEIVER_TYPE);
1845 __ bge(&done_convert);
1846 if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
1847 Label convert_global_proxy;
1848 __ JumpIfRoot(r5, Heap::kUndefinedValueRootIndex,
1849 &convert_global_proxy);
1850 __ JumpIfNotRoot(r5, Heap::kNullValueRootIndex, &convert_to_object);
1851 __ bind(&convert_global_proxy);
1852 {
1853 // Patch receiver to global proxy.
1854 __ LoadGlobalProxy(r5);
1855 }
1856 __ b(&convert_receiver);
1857 }
1858 __ bind(&convert_to_object);
1859 {
1860 // Convert receiver using ToObject.
1861 // TODO(bmeurer): Inline the allocation here to avoid building the frame
1862 // in the fast case? (fall back to AllocateInNewSpace?)
1863 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1864 __ SmiTag(r2);
1865 __ Push(r2, r3);
1866 __ LoadRR(r2, r5);
1867 __ Push(cp);
1868 __ Call(BUILTIN_CODE(masm->isolate(), ToObject),
1869 RelocInfo::CODE_TARGET);
1870 __ Pop(cp);
1871 __ LoadRR(r5, r2);
1872 __ Pop(r2, r3);
1873 __ SmiUntag(r2);
1874 }
1875 __ LoadP(r4, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
1876 __ bind(&convert_receiver);
1877 }
1878 __ ShiftLeftP(r6, r2, Operand(kPointerSizeLog2));
1879 __ StoreP(r5, MemOperand(sp, r6));
1880 }
1881 __ bind(&done_convert);
1882
1883 // ----------- S t a t e -------------
1884 // -- r2 : the number of arguments (not including the receiver)
1885 // -- r3 : the function to call (checked to be a JSFunction)
1886 // -- r4 : the shared function info.
1887 // -- cp : the function context.
1888 // -----------------------------------
1889
1890 __ LoadLogicalHalfWordP(
1891 r4, FieldMemOperand(r4, SharedFunctionInfo::kFormalParameterCountOffset));
1892 ParameterCount actual(r2);
1893 ParameterCount expected(r4);
1894 __ InvokeFunctionCode(r3, no_reg, expected, actual, JUMP_FUNCTION);
1895
1896 // The function is a "classConstructor", need to raise an exception.
1897 __ bind(&class_constructor);
1898 {
1899 FrameAndConstantPoolScope frame(masm, StackFrame::INTERNAL);
1900 __ push(r3);
1901 __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
1902 }
1903 }
1904
1905 namespace {
1906
Generate_PushBoundArguments(MacroAssembler * masm)1907 void Generate_PushBoundArguments(MacroAssembler* masm) {
1908 // ----------- S t a t e -------------
1909 // -- r2 : the number of arguments (not including the receiver)
1910 // -- r3 : target (checked to be a JSBoundFunction)
1911 // -- r5 : new.target (only in case of [[Construct]])
1912 // -----------------------------------
1913
1914 // Load [[BoundArguments]] into r4 and length of that into r6.
1915 Label no_bound_arguments;
1916 __ LoadP(r4, FieldMemOperand(r3, JSBoundFunction::kBoundArgumentsOffset));
1917 __ LoadP(r6, FieldMemOperand(r4, FixedArray::kLengthOffset));
1918 __ SmiUntag(r6);
1919 __ LoadAndTestP(r6, r6);
1920 __ beq(&no_bound_arguments);
1921 {
1922 // ----------- S t a t e -------------
1923 // -- r2 : the number of arguments (not including the receiver)
1924 // -- r3 : target (checked to be a JSBoundFunction)
1925 // -- r4 : the [[BoundArguments]] (implemented as FixedArray)
1926 // -- r5 : new.target (only in case of [[Construct]])
1927 // -- r6 : the number of [[BoundArguments]]
1928 // -----------------------------------
1929
1930 // Reserve stack space for the [[BoundArguments]].
1931 {
1932 Label done;
1933 __ LoadRR(r8, sp); // preserve previous stack pointer
1934 __ ShiftLeftP(r9, r6, Operand(kPointerSizeLog2));
1935 __ SubP(sp, sp, r9);
1936 // Check the stack for overflow. We are not trying to catch interruptions
1937 // (i.e. debug break and preemption) here, so check the "real stack
1938 // limit".
1939 __ CompareRoot(sp, Heap::kRealStackLimitRootIndex);
1940 __ bgt(&done); // Signed comparison.
1941 // Restore the stack pointer.
1942 __ LoadRR(sp, r8);
1943 {
1944 FrameScope scope(masm, StackFrame::MANUAL);
1945 __ EnterFrame(StackFrame::INTERNAL);
1946 __ CallRuntime(Runtime::kThrowStackOverflow);
1947 }
1948 __ bind(&done);
1949 }
1950
1951 // Relocate arguments down the stack.
1952 // -- r2 : the number of arguments (not including the receiver)
1953 // -- r8 : the previous stack pointer
1954 // -- r9: the size of the [[BoundArguments]]
1955 {
1956 Label skip, loop;
1957 __ LoadImmP(r7, Operand::Zero());
1958 __ CmpP(r2, Operand::Zero());
1959 __ beq(&skip);
1960 __ LoadRR(r1, r2);
1961 __ bind(&loop);
1962 __ LoadP(r0, MemOperand(r8, r7));
1963 __ StoreP(r0, MemOperand(sp, r7));
1964 __ AddP(r7, r7, Operand(kPointerSize));
1965 __ BranchOnCount(r1, &loop);
1966 __ bind(&skip);
1967 }
1968
1969 // Copy [[BoundArguments]] to the stack (below the arguments).
1970 {
1971 Label loop;
1972 __ AddP(r4, r4, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1973 __ AddP(r4, r4, r9);
1974 __ LoadRR(r1, r6);
1975 __ bind(&loop);
1976 __ LoadP(r0, MemOperand(r4, -kPointerSize));
1977 __ lay(r4, MemOperand(r4, -kPointerSize));
1978 __ StoreP(r0, MemOperand(sp, r7));
1979 __ AddP(r7, r7, Operand(kPointerSize));
1980 __ BranchOnCount(r1, &loop);
1981 __ AddP(r2, r2, r6);
1982 }
1983 }
1984 __ bind(&no_bound_arguments);
1985 }
1986
1987 } // namespace
1988
1989 // static
Generate_CallBoundFunctionImpl(MacroAssembler * masm)1990 void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm) {
1991 // ----------- S t a t e -------------
1992 // -- r2 : the number of arguments (not including the receiver)
1993 // -- r3 : the function to call (checked to be a JSBoundFunction)
1994 // -----------------------------------
1995 __ AssertBoundFunction(r3);
1996
1997 // Patch the receiver to [[BoundThis]].
1998 __ LoadP(ip, FieldMemOperand(r3, JSBoundFunction::kBoundThisOffset));
1999 __ ShiftLeftP(r1, r2, Operand(kPointerSizeLog2));
2000 __ StoreP(ip, MemOperand(sp, r1));
2001
2002 // Push the [[BoundArguments]] onto the stack.
2003 Generate_PushBoundArguments(masm);
2004
2005 // Call the [[BoundTargetFunction]] via the Call builtin.
2006 __ LoadP(r3,
2007 FieldMemOperand(r3, JSBoundFunction::kBoundTargetFunctionOffset));
2008 __ Jump(BUILTIN_CODE(masm->isolate(), Call_ReceiverIsAny),
2009 RelocInfo::CODE_TARGET);
2010 }
2011
2012 // static
Generate_Call(MacroAssembler * masm,ConvertReceiverMode mode)2013 void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode) {
2014 // ----------- S t a t e -------------
2015 // -- r2 : the number of arguments (not including the receiver)
2016 // -- r3 : the target to call (can be any Object).
2017 // -----------------------------------
2018
2019 Label non_callable, non_function, non_smi;
2020 __ JumpIfSmi(r3, &non_callable);
2021 __ bind(&non_smi);
2022 __ CompareObjectType(r3, r6, r7, JS_FUNCTION_TYPE);
2023 __ Jump(masm->isolate()->builtins()->CallFunction(mode),
2024 RelocInfo::CODE_TARGET, eq);
2025 __ CmpP(r7, Operand(JS_BOUND_FUNCTION_TYPE));
2026 __ Jump(BUILTIN_CODE(masm->isolate(), CallBoundFunction),
2027 RelocInfo::CODE_TARGET, eq);
2028
2029 // Check if target has a [[Call]] internal method.
2030 __ LoadlB(r6, FieldMemOperand(r6, Map::kBitFieldOffset));
2031 __ TestBit(r6, Map::IsCallableBit::kShift);
2032 __ beq(&non_callable);
2033
2034 // Check if target is a proxy and call CallProxy external builtin
2035 __ CmpP(r7, Operand(JS_PROXY_TYPE));
2036 __ bne(&non_function);
2037 __ Jump(BUILTIN_CODE(masm->isolate(), CallProxy), RelocInfo::CODE_TARGET);
2038
2039 // 2. Call to something else, which might have a [[Call]] internal method (if
2040 // not we raise an exception).
2041 __ bind(&non_function);
2042 // Overwrite the original receiver the (original) target.
2043 __ ShiftLeftP(r7, r2, Operand(kPointerSizeLog2));
2044 __ StoreP(r3, MemOperand(sp, r7));
2045 // Let the "call_as_function_delegate" take care of the rest.
2046 __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, r3);
2047 __ Jump(masm->isolate()->builtins()->CallFunction(
2048 ConvertReceiverMode::kNotNullOrUndefined),
2049 RelocInfo::CODE_TARGET);
2050
2051 // 3. Call to something that is not callable.
2052 __ bind(&non_callable);
2053 {
2054 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2055 __ Push(r3);
2056 __ CallRuntime(Runtime::kThrowCalledNonCallable);
2057 }
2058 }
2059
2060 // static
Generate_ConstructFunction(MacroAssembler * masm)2061 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2062 // ----------- S t a t e -------------
2063 // -- r2 : the number of arguments (not including the receiver)
2064 // -- r3 : the constructor to call (checked to be a JSFunction)
2065 // -- r5 : the new target (checked to be a constructor)
2066 // -----------------------------------
2067 __ AssertConstructor(r3, r1);
2068 __ AssertFunction(r3);
2069
2070 // Calling convention for function specific ConstructStubs require
2071 // r4 to contain either an AllocationSite or undefined.
2072 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
2073
2074 Label call_generic_stub;
2075
2076 // Jump to JSBuiltinsConstructStub or JSConstructStubGeneric.
2077 __ LoadP(r6, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset));
2078 __ LoadlW(r6, FieldMemOperand(r6, SharedFunctionInfo::kFlagsOffset));
2079 __ AndP(r6, Operand(SharedFunctionInfo::ConstructAsBuiltinBit::kMask));
2080 __ beq(&call_generic_stub);
2081
2082 __ Jump(BUILTIN_CODE(masm->isolate(), JSBuiltinsConstructStub),
2083 RelocInfo::CODE_TARGET);
2084
2085 __ bind(&call_generic_stub);
2086 __ Jump(BUILTIN_CODE(masm->isolate(), JSConstructStubGeneric),
2087 RelocInfo::CODE_TARGET);
2088 }
2089
2090 // static
Generate_ConstructBoundFunction(MacroAssembler * masm)2091 void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2092 // ----------- S t a t e -------------
2093 // -- r2 : the number of arguments (not including the receiver)
2094 // -- r3 : the function to call (checked to be a JSBoundFunction)
2095 // -- r5 : the new target (checked to be a constructor)
2096 // -----------------------------------
2097 __ AssertConstructor(r3, r1);
2098 __ AssertBoundFunction(r3);
2099
2100 // Push the [[BoundArguments]] onto the stack.
2101 Generate_PushBoundArguments(masm);
2102
2103 // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
2104 Label skip;
2105 __ CmpP(r3, r5);
2106 __ bne(&skip);
2107 __ LoadP(r5,
2108 FieldMemOperand(r3, JSBoundFunction::kBoundTargetFunctionOffset));
2109 __ bind(&skip);
2110
2111 // Construct the [[BoundTargetFunction]] via the Construct builtin.
2112 __ LoadP(r3,
2113 FieldMemOperand(r3, JSBoundFunction::kBoundTargetFunctionOffset));
2114 __ Jump(BUILTIN_CODE(masm->isolate(), Construct), RelocInfo::CODE_TARGET);
2115 }
2116
2117 // static
Generate_Construct(MacroAssembler * masm)2118 void Builtins::Generate_Construct(MacroAssembler* masm) {
2119 // ----------- S t a t e -------------
2120 // -- r2 : the number of arguments (not including the receiver)
2121 // -- r3 : the constructor to call (can be any Object)
2122 // -- r5 : the new target (either the same as the constructor or
2123 // the JSFunction on which new was invoked initially)
2124 // -----------------------------------
2125
2126 // Check if target is a Smi.
2127 Label non_constructor, non_proxy;
2128 __ JumpIfSmi(r3, &non_constructor);
2129
2130 // Check if target has a [[Construct]] internal method.
2131 __ LoadP(r6, FieldMemOperand(r3, HeapObject::kMapOffset));
2132 __ LoadlB(r4, FieldMemOperand(r6, Map::kBitFieldOffset));
2133 __ TestBit(r4, Map::IsConstructorBit::kShift);
2134 __ beq(&non_constructor);
2135
2136 // Dispatch based on instance type.
2137 __ CompareInstanceType(r6, r7, JS_FUNCTION_TYPE);
2138 __ Jump(BUILTIN_CODE(masm->isolate(), ConstructFunction),
2139 RelocInfo::CODE_TARGET, eq);
2140
2141 // Only dispatch to bound functions after checking whether they are
2142 // constructors.
2143 __ CmpP(r7, Operand(JS_BOUND_FUNCTION_TYPE));
2144 __ Jump(BUILTIN_CODE(masm->isolate(), ConstructBoundFunction),
2145 RelocInfo::CODE_TARGET, eq);
2146
2147 // Only dispatch to proxies after checking whether they are constructors.
2148 __ CmpP(r7, Operand(JS_PROXY_TYPE));
2149 __ bne(&non_proxy);
2150 __ Jump(BUILTIN_CODE(masm->isolate(), ConstructProxy),
2151 RelocInfo::CODE_TARGET);
2152
2153 // Called Construct on an exotic Object with a [[Construct]] internal method.
2154 __ bind(&non_proxy);
2155 {
2156 // Overwrite the original receiver with the (original) target.
2157 __ ShiftLeftP(r7, r2, Operand(kPointerSizeLog2));
2158 __ StoreP(r3, MemOperand(sp, r7));
2159 // Let the "call_as_constructor_delegate" take care of the rest.
2160 __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, r3);
2161 __ Jump(masm->isolate()->builtins()->CallFunction(),
2162 RelocInfo::CODE_TARGET);
2163 }
2164
2165 // Called Construct on an Object that doesn't have a [[Construct]] internal
2166 // method.
2167 __ bind(&non_constructor);
2168 __ Jump(BUILTIN_CODE(masm->isolate(), ConstructedNonConstructable),
2169 RelocInfo::CODE_TARGET);
2170 }
2171
Generate_ArgumentsAdaptorTrampoline(MacroAssembler * masm)2172 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
2173 // ----------- S t a t e -------------
2174 // -- r2 : actual number of arguments
2175 // -- r3 : function (passed through to callee)
2176 // -- r4 : expected number of arguments
2177 // -- r5 : new target (passed through to callee)
2178 // -----------------------------------
2179
2180 Label invoke, dont_adapt_arguments, stack_overflow;
2181
2182 Label enough, too_few;
2183 __ tmll(r4, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
2184 __ b(Condition(1), &dont_adapt_arguments);
2185 __ CmpLogicalP(r2, r4);
2186 __ blt(&too_few);
2187
2188 { // Enough parameters: actual >= expected
2189 __ bind(&enough);
2190 EnterArgumentsAdaptorFrame(masm);
2191 Generate_StackOverflowCheck(masm, r4, r7, &stack_overflow);
2192
2193 // Calculate copy start address into r2 and copy end address into r6.
2194 // r2: actual number of arguments as a smi
2195 // r3: function
2196 // r4: expected number of arguments
2197 // r5: new target (passed through to callee)
2198 __ SmiToPtrArrayOffset(r2, r2);
2199 __ AddP(r2, fp);
2200 // adjust for return address and receiver
2201 __ AddP(r2, r2, Operand(2 * kPointerSize));
2202 __ ShiftLeftP(r6, r4, Operand(kPointerSizeLog2));
2203 __ SubP(r6, r2, r6);
2204
2205 // Copy the arguments (including the receiver) to the new stack frame.
2206 // r2: copy start address
2207 // r3: function
2208 // r4: expected number of arguments
2209 // r5: new target (passed through to callee)
2210 // r6: copy end address
2211
2212 Label copy;
2213 __ bind(©);
2214 __ LoadP(r0, MemOperand(r2, 0));
2215 __ push(r0);
2216 __ CmpP(r2, r6); // Compare before moving to next argument.
2217 __ lay(r2, MemOperand(r2, -kPointerSize));
2218 __ bne(©);
2219
2220 __ b(&invoke);
2221 }
2222
2223 { // Too few parameters: Actual < expected
2224 __ bind(&too_few);
2225
2226 EnterArgumentsAdaptorFrame(masm);
2227 Generate_StackOverflowCheck(masm, r4, r7, &stack_overflow);
2228
2229 // Calculate copy start address into r0 and copy end address is fp.
2230 // r2: actual number of arguments as a smi
2231 // r3: function
2232 // r4: expected number of arguments
2233 // r5: new target (passed through to callee)
2234 __ SmiToPtrArrayOffset(r2, r2);
2235 __ lay(r2, MemOperand(r2, fp));
2236
2237 // Copy the arguments (including the receiver) to the new stack frame.
2238 // r2: copy start address
2239 // r3: function
2240 // r4: expected number of arguments
2241 // r5: new target (passed through to callee)
2242 Label copy;
2243 __ bind(©);
2244 // Adjust load for return address and receiver.
2245 __ LoadP(r0, MemOperand(r2, 2 * kPointerSize));
2246 __ push(r0);
2247 __ CmpP(r2, fp); // Compare before moving to next argument.
2248 __ lay(r2, MemOperand(r2, -kPointerSize));
2249 __ bne(©);
2250
2251 // Fill the remaining expected arguments with undefined.
2252 // r3: function
2253 // r4: expected number of argumentus
2254 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
2255 __ ShiftLeftP(r6, r4, Operand(kPointerSizeLog2));
2256 __ SubP(r6, fp, r6);
2257 // Adjust for frame.
2258 __ SubP(r6, r6,
2259 Operand(ArgumentsAdaptorFrameConstants::kFixedFrameSizeFromFp +
2260 kPointerSize));
2261
2262 Label fill;
2263 __ bind(&fill);
2264 __ push(r0);
2265 __ CmpP(sp, r6);
2266 __ bne(&fill);
2267 }
2268
2269 // Call the entry point.
2270 __ bind(&invoke);
2271 __ LoadRR(r2, r4);
2272 // r2 : expected number of arguments
2273 // r3 : function (passed through to callee)
2274 // r5 : new target (passed through to callee)
2275 static_assert(kJavaScriptCallCodeStartRegister == r4, "ABI mismatch");
2276 __ LoadP(r4, FieldMemOperand(r3, JSFunction::kCodeOffset));
2277 __ AddP(r4, r4, Operand(Code::kHeaderSize - kHeapObjectTag));
2278 __ CallJSEntry(r4);
2279
2280 // Store offset of return address for deoptimizer.
2281 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
2282
2283 // Exit frame and return.
2284 LeaveArgumentsAdaptorFrame(masm);
2285 __ Ret();
2286
2287 // -------------------------------------------
2288 // Dont adapt arguments.
2289 // -------------------------------------------
2290 __ bind(&dont_adapt_arguments);
2291 static_assert(kJavaScriptCallCodeStartRegister == r4, "ABI mismatch");
2292 __ LoadP(r4, FieldMemOperand(r3, JSFunction::kCodeOffset));
2293 __ AddP(r4, r4, Operand(Code::kHeaderSize - kHeapObjectTag));
2294 __ JumpToJSEntry(r4);
2295
2296 __ bind(&stack_overflow);
2297 {
2298 FrameScope frame(masm, StackFrame::MANUAL);
2299 __ CallRuntime(Runtime::kThrowStackOverflow);
2300 __ bkpt(0);
2301 }
2302 }
2303
Generate_WasmCompileLazy(MacroAssembler * masm)2304 void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) {
2305 // The function index was put in r7 by the jump table trampoline.
2306 // Convert to Smi for the runtime call.
2307 __ SmiTag(r7, r7);
2308 {
2309 HardAbortScope hard_abort(masm); // Avoid calls to Abort.
2310 FrameAndConstantPoolScope scope(masm, StackFrame::WASM_COMPILE_LAZY);
2311
2312 // Save all parameter registers (see wasm-linkage.cc). They might be
2313 // overwritten in the runtime call below. We don't have any callee-saved
2314 // registers in wasm, so no need to store anything else.
2315 constexpr RegList gp_regs = Register::ListOf<r2, r3, r4, r5, r6>();
2316 #if V8_TARGET_ARCH_S390X
2317 constexpr RegList fp_regs = DoubleRegister::ListOf<d0, d2, d4, d6>();
2318 #else
2319 constexpr RegList fp_regs = DoubleRegister::ListOf<d0, d2>();
2320 #endif
2321 __ MultiPush(gp_regs);
2322 __ MultiPushDoubles(fp_regs);
2323
2324 // Pass instance and function index as explicit arguments to the runtime
2325 // function.
2326 __ Push(kWasmInstanceRegister, r7);
2327 // Load the correct CEntry builtin from the instance object.
2328 __ LoadP(r4, FieldMemOperand(kWasmInstanceRegister,
2329 WasmInstanceObject::kCEntryStubOffset));
2330 // Initialize the JavaScript context with 0. CEntry will use it to
2331 // set the current context on the isolate.
2332 __ LoadSmiLiteral(cp, Smi::kZero);
2333 __ CallRuntimeWithCEntry(Runtime::kWasmCompileLazy, r4);
2334 // The entrypoint address is the return value.
2335 __ LoadRR(ip, r2);
2336
2337 // Restore registers.
2338 __ MultiPopDoubles(fp_regs);
2339 __ MultiPop(gp_regs);
2340 }
2341 // Finally, jump to the entrypoint.
2342 __ Jump(ip);
2343 }
2344
Generate_CEntry(MacroAssembler * masm,int result_size,SaveFPRegsMode save_doubles,ArgvMode argv_mode,bool builtin_exit_frame)2345 void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size,
2346 SaveFPRegsMode save_doubles, ArgvMode argv_mode,
2347 bool builtin_exit_frame) {
2348 // Called from JavaScript; parameters are on stack as if calling JS function.
2349 // r2: number of arguments including receiver
2350 // r3: pointer to builtin function
2351 // fp: frame pointer (restored after C call)
2352 // sp: stack pointer (restored as callee's sp after C call)
2353 // cp: current context (C callee-saved)
2354 //
2355 // If argv_mode == kArgvInRegister:
2356 // r4: pointer to the first argument
2357 ProfileEntryHookStub::MaybeCallEntryHook(masm);
2358
2359 __ LoadRR(r7, r3);
2360
2361 if (argv_mode == kArgvInRegister) {
2362 // Move argv into the correct register.
2363 __ LoadRR(r3, r4);
2364 } else {
2365 // Compute the argv pointer.
2366 __ ShiftLeftP(r3, r2, Operand(kPointerSizeLog2));
2367 __ lay(r3, MemOperand(r3, sp, -kPointerSize));
2368 }
2369
2370 // Enter the exit frame that transitions from JavaScript to C++.
2371 FrameScope scope(masm, StackFrame::MANUAL);
2372
2373 // Need at least one extra slot for return address location.
2374 int arg_stack_space = 1;
2375
2376 // Pass buffer for return value on stack if necessary
2377 bool needs_return_buffer =
2378 result_size == 2 && !ABI_RETURNS_OBJECTPAIR_IN_REGS;
2379 if (needs_return_buffer) {
2380 arg_stack_space += result_size;
2381 }
2382
2383 #if V8_TARGET_ARCH_S390X
2384 // 64-bit linux pass Argument object by reference not value
2385 arg_stack_space += 2;
2386 #endif
2387
2388 __ EnterExitFrame(
2389 save_doubles, arg_stack_space,
2390 builtin_exit_frame ? StackFrame::BUILTIN_EXIT : StackFrame::EXIT);
2391
2392 // Store a copy of argc, argv in callee-saved registers for later.
2393 __ LoadRR(r6, r2);
2394 __ LoadRR(r8, r3);
2395 // r2, r6: number of arguments including receiver (C callee-saved)
2396 // r3, r8: pointer to the first argument
2397 // r7: pointer to builtin function (C callee-saved)
2398
2399 // Result returned in registers or stack, depending on result size and ABI.
2400
2401 Register isolate_reg = r4;
2402 if (needs_return_buffer) {
2403 // The return value is 16-byte non-scalar value.
2404 // Use frame storage reserved by calling function to pass return
2405 // buffer as implicit first argument in R2. Shfit original parameters
2406 // by one register each.
2407 __ LoadRR(r4, r3);
2408 __ LoadRR(r3, r2);
2409 __ la(r2, MemOperand(sp, (kStackFrameExtraParamSlot + 1) * kPointerSize));
2410 isolate_reg = r5;
2411 }
2412 // Call C built-in.
2413 __ Move(isolate_reg, ExternalReference::isolate_address(masm->isolate()));
2414
2415 Register target = r7;
2416
2417 // To let the GC traverse the return address of the exit frames, we need to
2418 // know where the return address is. The CEntryStub is unmovable, so
2419 // we can store the address on the stack to be able to find it again and
2420 // we never have to restore it, because it will not change.
2421 {
2422 Label return_label;
2423 __ larl(r14, &return_label); // Generate the return addr of call later.
2424 __ StoreP(r14, MemOperand(sp, kStackFrameRASlot * kPointerSize));
2425
2426 // zLinux ABI requires caller's frame to have sufficient space for callee
2427 // preserved regsiter save area.
2428 // __ lay(sp, MemOperand(sp, -kCalleeRegisterSaveAreaSize));
2429 __ b(target);
2430 __ bind(&return_label);
2431 // __ la(sp, MemOperand(sp, +kCalleeRegisterSaveAreaSize));
2432 }
2433
2434 // If return value is on the stack, pop it to registers.
2435 if (needs_return_buffer) {
2436 __ LoadP(r3, MemOperand(r2, kPointerSize));
2437 __ LoadP(r2, MemOperand(r2));
2438 }
2439
2440 // Check result for exception sentinel.
2441 Label exception_returned;
2442 __ CompareRoot(r2, Heap::kExceptionRootIndex);
2443 __ beq(&exception_returned, Label::kNear);
2444
2445 // Check that there is no pending exception, otherwise we
2446 // should have returned the exception sentinel.
2447 if (FLAG_debug_code) {
2448 Label okay;
2449 ExternalReference pending_exception_address = ExternalReference::Create(
2450 IsolateAddressId::kPendingExceptionAddress, masm->isolate());
2451 __ Move(r1, pending_exception_address);
2452 __ LoadP(r1, MemOperand(r1));
2453 __ CompareRoot(r1, Heap::kTheHoleValueRootIndex);
2454 // Cannot use check here as it attempts to generate call into runtime.
2455 __ beq(&okay, Label::kNear);
2456 __ stop("Unexpected pending exception");
2457 __ bind(&okay);
2458 }
2459
2460 // Exit C frame and return.
2461 // r2:r3: result
2462 // sp: stack pointer
2463 // fp: frame pointer
2464 Register argc = argv_mode == kArgvInRegister
2465 // We don't want to pop arguments so set argc to no_reg.
2466 ? no_reg
2467 // r6: still holds argc (callee-saved).
2468 : r6;
2469 __ LeaveExitFrame(save_doubles, argc);
2470 __ b(r14);
2471
2472 // Handling of exception.
2473 __ bind(&exception_returned);
2474
2475 ExternalReference pending_handler_context_address = ExternalReference::Create(
2476 IsolateAddressId::kPendingHandlerContextAddress, masm->isolate());
2477 ExternalReference pending_handler_entrypoint_address =
2478 ExternalReference::Create(
2479 IsolateAddressId::kPendingHandlerEntrypointAddress, masm->isolate());
2480 ExternalReference pending_handler_fp_address = ExternalReference::Create(
2481 IsolateAddressId::kPendingHandlerFPAddress, masm->isolate());
2482 ExternalReference pending_handler_sp_address = ExternalReference::Create(
2483 IsolateAddressId::kPendingHandlerSPAddress, masm->isolate());
2484
2485 // Ask the runtime for help to determine the handler. This will set r3 to
2486 // contain the current pending exception, don't clobber it.
2487 ExternalReference find_handler =
2488 ExternalReference::Create(Runtime::kUnwindAndFindExceptionHandler);
2489 {
2490 FrameScope scope(masm, StackFrame::MANUAL);
2491 __ PrepareCallCFunction(3, 0, r2);
2492 __ LoadImmP(r2, Operand::Zero());
2493 __ LoadImmP(r3, Operand::Zero());
2494 __ Move(r4, ExternalReference::isolate_address(masm->isolate()));
2495 __ CallCFunction(find_handler, 3);
2496 }
2497
2498 // Retrieve the handler context, SP and FP.
2499 __ Move(cp, pending_handler_context_address);
2500 __ LoadP(cp, MemOperand(cp));
2501 __ Move(sp, pending_handler_sp_address);
2502 __ LoadP(sp, MemOperand(sp));
2503 __ Move(fp, pending_handler_fp_address);
2504 __ LoadP(fp, MemOperand(fp));
2505
2506 // If the handler is a JS frame, restore the context to the frame. Note that
2507 // the context will be set to (cp == 0) for non-JS frames.
2508 Label skip;
2509 __ CmpP(cp, Operand::Zero());
2510 __ beq(&skip, Label::kNear);
2511 __ StoreP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2512 __ bind(&skip);
2513
2514 // Reset the masking register.
2515 if (FLAG_branch_load_poisoning) {
2516 __ ResetSpeculationPoisonRegister();
2517 }
2518
2519 // Compute the handler entry address and jump to it.
2520 __ Move(r3, pending_handler_entrypoint_address);
2521 __ LoadP(r3, MemOperand(r3));
2522 __ Jump(r3);
2523 }
2524
Generate_DoubleToI(MacroAssembler * masm)2525 void Builtins::Generate_DoubleToI(MacroAssembler* masm) {
2526 Label out_of_range, only_low, negate, done, fastpath_done;
2527 Register result_reg = r2;
2528
2529 HardAbortScope hard_abort(masm); // Avoid calls to Abort.
2530
2531 // Immediate values for this stub fit in instructions, so it's safe to use ip.
2532 Register scratch = GetRegisterThatIsNotOneOf(result_reg);
2533 Register scratch_low = GetRegisterThatIsNotOneOf(result_reg, scratch);
2534 Register scratch_high =
2535 GetRegisterThatIsNotOneOf(result_reg, scratch, scratch_low);
2536 DoubleRegister double_scratch = kScratchDoubleReg;
2537
2538 __ Push(result_reg, scratch);
2539 // Account for saved regs.
2540 int argument_offset = 2 * kPointerSize;
2541
2542 // Load double input.
2543 __ LoadDouble(double_scratch, MemOperand(sp, argument_offset));
2544
2545 // Do fast-path convert from double to int.
2546 __ ConvertDoubleToInt64(result_reg, double_scratch);
2547
2548 // Test for overflow
2549 __ TestIfInt32(result_reg);
2550 __ beq(&fastpath_done, Label::kNear);
2551
2552 __ Push(scratch_high, scratch_low);
2553 // Account for saved regs.
2554 argument_offset += 2 * kPointerSize;
2555
2556 __ LoadlW(scratch_high,
2557 MemOperand(sp, argument_offset + Register::kExponentOffset));
2558 __ LoadlW(scratch_low,
2559 MemOperand(sp, argument_offset + Register::kMantissaOffset));
2560
2561 __ ExtractBitMask(scratch, scratch_high, HeapNumber::kExponentMask);
2562 // Load scratch with exponent - 1. This is faster than loading
2563 // with exponent because Bias + 1 = 1024 which is a *S390* immediate value.
2564 STATIC_ASSERT(HeapNumber::kExponentBias + 1 == 1024);
2565 __ SubP(scratch, Operand(HeapNumber::kExponentBias + 1));
2566 // If exponent is greater than or equal to 84, the 32 less significant
2567 // bits are 0s (2^84 = 1, 52 significant bits, 32 uncoded bits),
2568 // the result is 0.
2569 // Compare exponent with 84 (compare exponent - 1 with 83).
2570 __ CmpP(scratch, Operand(83));
2571 __ bge(&out_of_range, Label::kNear);
2572
2573 // If we reach this code, 31 <= exponent <= 83.
2574 // So, we don't have to handle cases where 0 <= exponent <= 20 for
2575 // which we would need to shift right the high part of the mantissa.
2576 // Scratch contains exponent - 1.
2577 // Load scratch with 52 - exponent (load with 51 - (exponent - 1)).
2578 __ Load(r0, Operand(51));
2579 __ SubP(scratch, r0, scratch);
2580 __ CmpP(scratch, Operand::Zero());
2581 __ ble(&only_low, Label::kNear);
2582 // 21 <= exponent <= 51, shift scratch_low and scratch_high
2583 // to generate the result.
2584 __ ShiftRight(scratch_low, scratch_low, scratch);
2585 // Scratch contains: 52 - exponent.
2586 // We needs: exponent - 20.
2587 // So we use: 32 - scratch = 32 - 52 + exponent = exponent - 20.
2588 __ Load(r0, Operand(32));
2589 __ SubP(scratch, r0, scratch);
2590 __ ExtractBitMask(result_reg, scratch_high, HeapNumber::kMantissaMask);
2591 // Set the implicit 1 before the mantissa part in scratch_high.
2592 STATIC_ASSERT(HeapNumber::kMantissaBitsInTopWord >= 16);
2593 __ Load(r0, Operand(1 << ((HeapNumber::kMantissaBitsInTopWord)-16)));
2594 __ ShiftLeftP(r0, r0, Operand(16));
2595 __ OrP(result_reg, result_reg, r0);
2596 __ ShiftLeft(r0, result_reg, scratch);
2597 __ OrP(result_reg, scratch_low, r0);
2598 __ b(&negate, Label::kNear);
2599
2600 __ bind(&out_of_range);
2601 __ mov(result_reg, Operand::Zero());
2602 __ b(&done, Label::kNear);
2603
2604 __ bind(&only_low);
2605 // 52 <= exponent <= 83, shift only scratch_low.
2606 // On entry, scratch contains: 52 - exponent.
2607 __ LoadComplementRR(scratch, scratch);
2608 __ ShiftLeft(result_reg, scratch_low, scratch);
2609
2610 __ bind(&negate);
2611 // If input was positive, scratch_high ASR 31 equals 0 and
2612 // scratch_high LSR 31 equals zero.
2613 // New result = (result eor 0) + 0 = result.
2614 // If the input was negative, we have to negate the result.
2615 // Input_high ASR 31 equals 0xFFFFFFFF and scratch_high LSR 31 equals 1.
2616 // New result = (result eor 0xFFFFFFFF) + 1 = 0 - result.
2617 __ ShiftRightArith(r0, scratch_high, Operand(31));
2618 #if V8_TARGET_ARCH_S390X
2619 __ lgfr(r0, r0);
2620 __ ShiftRightP(r0, r0, Operand(32));
2621 #endif
2622 __ XorP(result_reg, r0);
2623 __ ShiftRight(r0, scratch_high, Operand(31));
2624 __ AddP(result_reg, r0);
2625
2626 __ bind(&done);
2627 __ Pop(scratch_high, scratch_low);
2628 argument_offset -= 2 * kPointerSize;
2629
2630 __ bind(&fastpath_done);
2631 __ StoreP(result_reg, MemOperand(sp, argument_offset));
2632 __ Pop(result_reg, scratch);
2633
2634 __ Ret();
2635 }
2636
Generate_MathPowInternal(MacroAssembler * masm)2637 void Builtins::Generate_MathPowInternal(MacroAssembler* masm) {
2638 const Register exponent = r4;
2639 const DoubleRegister double_base = d1;
2640 const DoubleRegister double_exponent = d2;
2641 const DoubleRegister double_result = d3;
2642 const DoubleRegister double_scratch = d0;
2643 const Register scratch = r1;
2644 const Register scratch2 = r9;
2645
2646 Label call_runtime, done, int_exponent;
2647
2648 // Detect integer exponents stored as double.
2649 __ TryDoubleToInt32Exact(scratch, double_exponent, scratch2, double_scratch);
2650 __ beq(&int_exponent, Label::kNear);
2651
2652 __ push(r14);
2653 {
2654 AllowExternalCallThatCantCauseGC scope(masm);
2655 __ PrepareCallCFunction(0, 2, scratch);
2656 __ MovToFloatParameters(double_base, double_exponent);
2657 __ CallCFunction(ExternalReference::power_double_double_function(), 0, 2);
2658 }
2659 __ pop(r14);
2660 __ MovFromFloatResult(double_result);
2661 __ b(&done);
2662
2663 // Calculate power with integer exponent.
2664 __ bind(&int_exponent);
2665
2666 // Get two copies of exponent in the registers scratch and exponent.
2667 // Exponent has previously been stored into scratch as untagged integer.
2668 __ LoadRR(exponent, scratch);
2669
2670 __ ldr(double_scratch, double_base); // Back up base.
2671 __ LoadImmP(scratch2, Operand(1));
2672 __ ConvertIntToDouble(double_result, scratch2);
2673
2674 // Get absolute value of exponent.
2675 Label positive_exponent;
2676 __ CmpP(scratch, Operand::Zero());
2677 __ bge(&positive_exponent, Label::kNear);
2678 __ LoadComplementRR(scratch, scratch);
2679 __ bind(&positive_exponent);
2680
2681 Label while_true, no_carry, loop_end;
2682 __ bind(&while_true);
2683 __ mov(scratch2, Operand(1));
2684 __ AndP(scratch2, scratch);
2685 __ beq(&no_carry, Label::kNear);
2686 __ mdbr(double_result, double_scratch);
2687 __ bind(&no_carry);
2688 __ ShiftRightP(scratch, scratch, Operand(1));
2689 __ LoadAndTestP(scratch, scratch);
2690 __ beq(&loop_end, Label::kNear);
2691 __ mdbr(double_scratch, double_scratch);
2692 __ b(&while_true);
2693 __ bind(&loop_end);
2694
2695 __ CmpP(exponent, Operand::Zero());
2696 __ bge(&done);
2697
2698 // get 1/double_result:
2699 __ ldr(double_scratch, double_result);
2700 __ LoadImmP(scratch2, Operand(1));
2701 __ ConvertIntToDouble(double_result, scratch2);
2702 __ ddbr(double_result, double_scratch);
2703
2704 // Test whether result is zero. Bail out to check for subnormal result.
2705 // Due to subnormals, x^-y == (1/x)^y does not hold in all cases.
2706 __ lzdr(kDoubleRegZero);
2707 __ cdbr(double_result, kDoubleRegZero);
2708 __ bne(&done, Label::kNear);
2709 // double_exponent may not containe the exponent value if the input was a
2710 // smi. We set it with exponent value before bailing out.
2711 __ ConvertIntToDouble(double_exponent, exponent);
2712
2713 // Returning or bailing out.
2714 __ push(r14);
2715 {
2716 AllowExternalCallThatCantCauseGC scope(masm);
2717 __ PrepareCallCFunction(0, 2, scratch);
2718 __ MovToFloatParameters(double_base, double_exponent);
2719 __ CallCFunction(ExternalReference::power_double_double_function(), 0, 2);
2720 }
2721 __ pop(r14);
2722 __ MovFromFloatResult(double_result);
2723
2724 __ bind(&done);
2725 __ Ret();
2726 }
2727
2728 namespace {
2729
GenerateInternalArrayConstructorCase(MacroAssembler * masm,ElementsKind kind)2730 void GenerateInternalArrayConstructorCase(MacroAssembler* masm,
2731 ElementsKind kind) {
2732 __ CmpLogicalP(r2, Operand(1));
2733
2734 __ Jump(CodeFactory::InternalArrayNoArgumentConstructor(masm->isolate(), kind)
2735 .code(),
2736 RelocInfo::CODE_TARGET, lt);
2737
2738 __ Jump(BUILTIN_CODE(masm->isolate(), ArrayNArgumentsConstructor),
2739 RelocInfo::CODE_TARGET, gt);
2740
2741 if (IsFastPackedElementsKind(kind)) {
2742 // We might need to create a holey array
2743 // look at the first argument
2744 __ LoadP(r5, MemOperand(sp, 0));
2745 __ CmpP(r5, Operand::Zero());
2746
2747 __ Jump(CodeFactory::InternalArraySingleArgumentConstructor(
2748 masm->isolate(), GetHoleyElementsKind(kind))
2749 .code(),
2750 RelocInfo::CODE_TARGET, ne);
2751 }
2752
2753 __ Jump(
2754 CodeFactory::InternalArraySingleArgumentConstructor(masm->isolate(), kind)
2755 .code(),
2756 RelocInfo::CODE_TARGET);
2757 }
2758
2759 } // namespace
2760
Generate_InternalArrayConstructorImpl(MacroAssembler * masm)2761 void Builtins::Generate_InternalArrayConstructorImpl(MacroAssembler* masm) {
2762 // ----------- S t a t e -------------
2763 // -- r2 : argc
2764 // -- r3 : constructor
2765 // -- sp[0] : return address
2766 // -- sp[4] : last argument
2767 // -----------------------------------
2768
2769 if (FLAG_debug_code) {
2770 // The array construct code is only set for the global and natives
2771 // builtin Array functions which always have maps.
2772
2773 // Initial map for the builtin Array function should be a map.
2774 __ LoadP(r5, FieldMemOperand(r3, JSFunction::kPrototypeOrInitialMapOffset));
2775 // Will both indicate a nullptr and a Smi.
2776 __ TestIfSmi(r5);
2777 __ Assert(ne, AbortReason::kUnexpectedInitialMapForArrayFunction, cr0);
2778 __ CompareObjectType(r5, r5, r6, MAP_TYPE);
2779 __ Assert(eq, AbortReason::kUnexpectedInitialMapForArrayFunction);
2780 }
2781
2782 // Figure out the right elements kind
2783 __ LoadP(r5, FieldMemOperand(r3, JSFunction::kPrototypeOrInitialMapOffset));
2784 // Load the map's "bit field 2" into |result|.
2785 __ LoadlB(r5, FieldMemOperand(r5, Map::kBitField2Offset));
2786 // Retrieve elements_kind from bit field 2.
2787 __ DecodeField<Map::ElementsKindBits>(r5);
2788
2789 if (FLAG_debug_code) {
2790 Label done;
2791 __ CmpP(r5, Operand(PACKED_ELEMENTS));
2792 __ beq(&done);
2793 __ CmpP(r5, Operand(HOLEY_ELEMENTS));
2794 __ Assert(
2795 eq,
2796 AbortReason::kInvalidElementsKindForInternalArrayOrInternalPackedArray);
2797 __ bind(&done);
2798 }
2799
2800 Label fast_elements_case;
2801 __ CmpP(r5, Operand(PACKED_ELEMENTS));
2802 __ beq(&fast_elements_case);
2803 GenerateInternalArrayConstructorCase(masm, HOLEY_ELEMENTS);
2804
2805 __ bind(&fast_elements_case);
2806 GenerateInternalArrayConstructorCase(masm, PACKED_ELEMENTS);
2807 }
2808
2809 #undef __
2810
2811 } // namespace internal
2812 } // namespace v8
2813
2814 #endif // V8_TARGET_ARCH_S390
2815