1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #if V8_TARGET_ARCH_PPC
6
7 #include "src/assembler-inl.h"
8 #include "src/code-factory.h"
9 #include "src/code-stubs.h"
10 #include "src/debug/debug.h"
11 #include "src/deoptimizer.h"
12 #include "src/frame-constants.h"
13 #include "src/frames.h"
14 #include "src/objects/js-generator.h"
15 #include "src/runtime/runtime.h"
16 #include "src/wasm/wasm-objects.h"
17
18 namespace v8 {
19 namespace internal {
20
21 #define __ ACCESS_MASM(masm)
22
Generate_Adaptor(MacroAssembler * masm,Address address,ExitFrameType exit_frame_type)23 void Builtins::Generate_Adaptor(MacroAssembler* masm, Address address,
24 ExitFrameType exit_frame_type) {
25 __ Move(kJavaScriptCallExtraArg1Register, ExternalReference::Create(address));
26 if (exit_frame_type == BUILTIN_EXIT) {
27 __ Jump(BUILTIN_CODE(masm->isolate(), AdaptorWithBuiltinExitFrame),
28 RelocInfo::CODE_TARGET);
29 } else {
30 DCHECK(exit_frame_type == EXIT);
31 __ Jump(BUILTIN_CODE(masm->isolate(), AdaptorWithExitFrame),
32 RelocInfo::CODE_TARGET);
33 }
34 }
35
Generate_InternalArrayConstructor(MacroAssembler * masm)36 void Builtins::Generate_InternalArrayConstructor(MacroAssembler* masm) {
37 // ----------- S t a t e -------------
38 // -- r3 : number of arguments
39 // -- lr : return address
40 // -- sp[...]: constructor arguments
41 // -----------------------------------
42 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
43
44 if (FLAG_debug_code) {
45 // Initial map for the builtin InternalArray functions should be maps.
46 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kPrototypeOrInitialMapOffset));
47 __ TestIfSmi(r5, r0);
48 __ Assert(ne, AbortReason::kUnexpectedInitialMapForInternalArrayFunction,
49 cr0);
50 __ CompareObjectType(r5, r6, r7, MAP_TYPE);
51 __ Assert(eq, AbortReason::kUnexpectedInitialMapForInternalArrayFunction);
52 }
53
54 // Run the native code for the InternalArray function called as a normal
55 // function.
56 // tail call a stub
57 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
58 __ Jump(BUILTIN_CODE(masm->isolate(), InternalArrayConstructorImpl),
59 RelocInfo::CODE_TARGET);
60 }
61
GenerateTailCallToReturnedCode(MacroAssembler * masm,Runtime::FunctionId function_id)62 static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
63 Runtime::FunctionId function_id) {
64 // ----------- S t a t e -------------
65 // -- r3 : argument count (preserved for callee)
66 // -- r4 : target function (preserved for callee)
67 // -- r6 : new target (preserved for callee)
68 // -----------------------------------
69 {
70 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
71 // Push the number of arguments to the callee.
72 // Push a copy of the target function and the new target.
73 // Push function as parameter to the runtime call.
74 __ SmiTag(r3);
75 __ Push(r3, r4, r6, r4);
76
77 __ CallRuntime(function_id, 1);
78 __ mr(r5, r3);
79
80 // Restore target function and new target.
81 __ Pop(r3, r4, r6);
82 __ SmiUntag(r3);
83 }
84 static_assert(kJavaScriptCallCodeStartRegister == r5, "ABI mismatch");
85 __ addi(r5, r5, Operand(Code::kHeaderSize - kHeapObjectTag));
86 __ JumpToJSEntry(r5);
87 }
88
89 namespace {
90
Generate_JSBuiltinsConstructStubHelper(MacroAssembler * masm)91 void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) {
92 Label post_instantiation_deopt_entry;
93 // ----------- S t a t e -------------
94 // -- r3 : number of arguments
95 // -- r4 : constructor function
96 // -- r6 : new target
97 // -- cp : context
98 // -- lr : return address
99 // -- sp[...]: constructor arguments
100 // -----------------------------------
101
102 // Enter a construct frame.
103 {
104 FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT);
105
106 // Preserve the incoming parameters on the stack.
107
108 __ SmiTag(r3);
109 __ Push(cp, r3);
110 __ SmiUntag(r3, SetRC);
111 // The receiver for the builtin/api call.
112 __ PushRoot(Heap::kTheHoleValueRootIndex);
113 // Set up pointer to last argument.
114 __ addi(r7, fp, Operand(StandardFrameConstants::kCallerSPOffset));
115
116 // Copy arguments and receiver to the expression stack.
117
118 Label loop, no_args;
119 // ----------- S t a t e -------------
120 // -- r3: number of arguments (untagged)
121 // -- r4: constructor function
122 // -- r6: new target
123 // -- r7: pointer to last argument
124 // -- cr0: condition indicating whether r3 is zero
125 // -- sp[0*kPointerSize]: the hole (receiver)
126 // -- sp[1*kPointerSize]: number of arguments (tagged)
127 // -- sp[2*kPointerSize]: context
128 // -----------------------------------
129 __ beq(&no_args, cr0);
130 __ ShiftLeftImm(ip, r3, Operand(kPointerSizeLog2));
131 __ sub(sp, sp, ip);
132 __ mtctr(r3);
133 __ bind(&loop);
134 __ subi(ip, ip, Operand(kPointerSize));
135 __ LoadPX(r0, MemOperand(r7, ip));
136 __ StorePX(r0, MemOperand(sp, ip));
137 __ bdnz(&loop);
138 __ bind(&no_args);
139
140 // Call the function.
141 // r3: number of arguments (untagged)
142 // r4: constructor function
143 // r6: new target
144 {
145 ConstantPoolUnavailableScope constant_pool_unavailable(masm);
146 ParameterCount actual(r3);
147 __ InvokeFunction(r4, r6, actual, CALL_FUNCTION);
148 }
149
150 // Restore context from the frame.
151 __ LoadP(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
152 // Restore smi-tagged arguments count from the frame.
153 __ LoadP(r4, MemOperand(fp, ConstructFrameConstants::kLengthOffset));
154
155 // Leave construct frame.
156 }
157 // Remove caller arguments from the stack and return.
158 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
159
160 __ SmiToPtrArrayOffset(r4, r4);
161 __ add(sp, sp, r4);
162 __ addi(sp, sp, Operand(kPointerSize));
163 __ blr();
164 }
165
166 } // namespace
167
168 // The construct stub for ES5 constructor functions and ES6 class constructors.
Generate_JSConstructStubGeneric(MacroAssembler * masm)169 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
170 // ----------- S t a t e -------------
171 // -- r3: number of arguments (untagged)
172 // -- r4: constructor function
173 // -- r6: new target
174 // -- cp: context
175 // -- lr: return address
176 // -- sp[...]: constructor arguments
177 // -----------------------------------
178
179 // Enter a construct frame.
180 {
181 FrameAndConstantPoolScope scope(masm, StackFrame::CONSTRUCT);
182 Label post_instantiation_deopt_entry, not_create_implicit_receiver;
183
184 // Preserve the incoming parameters on the stack.
185 __ SmiTag(r3);
186 __ Push(cp, r3, r4);
187 __ PushRoot(Heap::kUndefinedValueRootIndex);
188 __ Push(r6);
189
190 // ----------- S t a t e -------------
191 // -- sp[0*kPointerSize]: new target
192 // -- sp[1*kPointerSize]: padding
193 // -- r4 and sp[2*kPointerSize]: constructor function
194 // -- sp[3*kPointerSize]: number of arguments (tagged)
195 // -- sp[4*kPointerSize]: context
196 // -----------------------------------
197
198 __ LoadP(r7, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
199 __ lwz(r7, FieldMemOperand(r7, SharedFunctionInfo::kFlagsOffset));
200 __ TestBitMask(r7, SharedFunctionInfo::IsDerivedConstructorBit::kMask, r0);
201 __ bne(¬_create_implicit_receiver, cr0);
202
203 // If not derived class constructor: Allocate the new receiver object.
204 __ IncrementCounter(masm->isolate()->counters()->constructed_objects(), 1,
205 r7, r8);
206 __ Call(BUILTIN_CODE(masm->isolate(), FastNewObject),
207 RelocInfo::CODE_TARGET);
208 __ b(&post_instantiation_deopt_entry);
209
210 // Else: use TheHoleValue as receiver for constructor call
211 __ bind(¬_create_implicit_receiver);
212 __ LoadRoot(r3, Heap::kTheHoleValueRootIndex);
213
214 // ----------- S t a t e -------------
215 // -- r3: receiver
216 // -- Slot 4 / sp[0*kPointerSize]: new target
217 // -- Slot 3 / sp[1*kPointerSize]: padding
218 // -- Slot 2 / sp[2*kPointerSize]: constructor function
219 // -- Slot 1 / sp[3*kPointerSize]: number of arguments (tagged)
220 // -- Slot 0 / sp[4*kPointerSize]: context
221 // -----------------------------------
222 // Deoptimizer enters here.
223 masm->isolate()->heap()->SetConstructStubCreateDeoptPCOffset(
224 masm->pc_offset());
225 __ bind(&post_instantiation_deopt_entry);
226
227 // Restore new target.
228 __ Pop(r6);
229 // Push the allocated receiver to the stack. We need two copies
230 // because we may have to return the original one and the calling
231 // conventions dictate that the called function pops the receiver.
232 __ Push(r3, r3);
233
234 // ----------- S t a t e -------------
235 // -- r6: new target
236 // -- sp[0*kPointerSize]: implicit receiver
237 // -- sp[1*kPointerSize]: implicit receiver
238 // -- sp[2*kPointerSize]: padding
239 // -- sp[3*kPointerSize]: constructor function
240 // -- sp[4*kPointerSize]: number of arguments (tagged)
241 // -- sp[5*kPointerSize]: context
242 // -----------------------------------
243
244 // Restore constructor function and argument count.
245 __ LoadP(r4, MemOperand(fp, ConstructFrameConstants::kConstructorOffset));
246 __ LoadP(r3, MemOperand(fp, ConstructFrameConstants::kLengthOffset));
247 __ SmiUntag(r3, SetRC);
248
249 // Set up pointer to last argument.
250 __ addi(r7, fp, Operand(StandardFrameConstants::kCallerSPOffset));
251
252 // Copy arguments and receiver to the expression stack.
253 Label loop, no_args;
254 // ----------- S t a t e -------------
255 // -- r3: number of arguments (untagged)
256 // -- r6: new target
257 // -- r7: pointer to last argument
258 // -- cr0: condition indicating whether r3 is zero
259 // -- sp[0*kPointerSize]: implicit receiver
260 // -- sp[1*kPointerSize]: implicit receiver
261 // -- sp[2*kPointerSize]: padding
262 // -- r4 and sp[3*kPointerSize]: constructor function
263 // -- sp[4*kPointerSize]: number of arguments (tagged)
264 // -- sp[5*kPointerSize]: context
265 // -----------------------------------
266 __ beq(&no_args, cr0);
267 __ ShiftLeftImm(ip, r3, Operand(kPointerSizeLog2));
268 __ sub(sp, sp, ip);
269 __ mtctr(r3);
270 __ bind(&loop);
271 __ subi(ip, ip, Operand(kPointerSize));
272 __ LoadPX(r0, MemOperand(r7, ip));
273 __ StorePX(r0, MemOperand(sp, ip));
274 __ bdnz(&loop);
275 __ bind(&no_args);
276
277 // Call the function.
278 {
279 ConstantPoolUnavailableScope constant_pool_unavailable(masm);
280 ParameterCount actual(r3);
281 __ InvokeFunction(r4, r6, actual, CALL_FUNCTION);
282 }
283
284 // ----------- S t a t e -------------
285 // -- r0: constructor result
286 // -- sp[0*kPointerSize]: implicit receiver
287 // -- sp[1*kPointerSize]: padding
288 // -- sp[2*kPointerSize]: constructor function
289 // -- sp[3*kPointerSize]: number of arguments
290 // -- sp[4*kPointerSize]: context
291 // -----------------------------------
292
293 // Store offset of return address for deoptimizer.
294 masm->isolate()->heap()->SetConstructStubInvokeDeoptPCOffset(
295 masm->pc_offset());
296
297 // Restore the context from the frame.
298 __ LoadP(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
299
300 // If the result is an object (in the ECMA sense), we should get rid
301 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
302 // on page 74.
303 Label use_receiver, do_throw, leave_frame;
304
305 // If the result is undefined, we jump out to using the implicit receiver.
306 __ JumpIfRoot(r3, Heap::kUndefinedValueRootIndex, &use_receiver);
307
308 // Otherwise we do a smi check and fall through to check if the return value
309 // is a valid receiver.
310
311 // If the result is a smi, it is *not* an object in the ECMA sense.
312 __ JumpIfSmi(r3, &use_receiver);
313
314 // If the type of the result (stored in its map) is less than
315 // FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
316 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
317 __ CompareObjectType(r3, r7, r7, FIRST_JS_RECEIVER_TYPE);
318 __ bge(&leave_frame);
319 __ b(&use_receiver);
320
321 __ bind(&do_throw);
322 __ CallRuntime(Runtime::kThrowConstructorReturnedNonObject);
323
324 // Throw away the result of the constructor invocation and use the
325 // on-stack receiver as the result.
326 __ bind(&use_receiver);
327 __ LoadP(r3, MemOperand(sp));
328 __ JumpIfRoot(r3, Heap::kTheHoleValueRootIndex, &do_throw);
329
330 __ bind(&leave_frame);
331 // Restore smi-tagged arguments count from the frame.
332 __ LoadP(r4, MemOperand(fp, ConstructFrameConstants::kLengthOffset));
333 // Leave construct frame.
334 }
335
336 // Remove caller arguments from the stack and return.
337 STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
338
339 __ SmiToPtrArrayOffset(r4, r4);
340 __ add(sp, sp, r4);
341 __ addi(sp, sp, Operand(kPointerSize));
342 __ blr();
343 }
344
Generate_JSBuiltinsConstructStub(MacroAssembler * masm)345 void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
346 Generate_JSBuiltinsConstructStubHelper(masm);
347 }
348
GetSharedFunctionInfoBytecode(MacroAssembler * masm,Register sfi_data,Register scratch1)349 static void GetSharedFunctionInfoBytecode(MacroAssembler* masm,
350 Register sfi_data,
351 Register scratch1) {
352 Label done;
353
354 __ CompareObjectType(sfi_data, scratch1, scratch1, INTERPRETER_DATA_TYPE);
355 __ bne(&done);
356 __ LoadP(sfi_data,
357 FieldMemOperand(sfi_data, InterpreterData::kBytecodeArrayOffset));
358 __ bind(&done);
359 }
360
361 // static
Generate_ResumeGeneratorTrampoline(MacroAssembler * masm)362 void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
363 // ----------- S t a t e -------------
364 // -- r3 : the value to pass to the generator
365 // -- r4 : the JSGeneratorObject to resume
366 // -- lr : return address
367 // -----------------------------------
368 __ AssertGeneratorObject(r4);
369
370 // Store input value into generator object.
371 __ StoreP(r3, FieldMemOperand(r4, JSGeneratorObject::kInputOrDebugPosOffset),
372 r0);
373 __ RecordWriteField(r4, JSGeneratorObject::kInputOrDebugPosOffset, r3, r6,
374 kLRHasNotBeenSaved, kDontSaveFPRegs);
375
376 // Load suspended function and context.
377 __ LoadP(r7, FieldMemOperand(r4, JSGeneratorObject::kFunctionOffset));
378 __ LoadP(cp, FieldMemOperand(r7, JSFunction::kContextOffset));
379
380 // Flood function if we are stepping.
381 Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
382 Label stepping_prepared;
383 ExternalReference debug_hook =
384 ExternalReference::debug_hook_on_function_call_address(masm->isolate());
385 __ Move(ip, debug_hook);
386 __ LoadByte(ip, MemOperand(ip), r0);
387 __ extsb(ip, ip);
388 __ CmpSmiLiteral(ip, Smi::kZero, r0);
389 __ bne(&prepare_step_in_if_stepping);
390
391 // Flood function if we need to continue stepping in the suspended generator.
392
393 ExternalReference debug_suspended_generator =
394 ExternalReference::debug_suspended_generator_address(masm->isolate());
395
396 __ Move(ip, debug_suspended_generator);
397 __ LoadP(ip, MemOperand(ip));
398 __ cmp(ip, r4);
399 __ beq(&prepare_step_in_suspended_generator);
400 __ bind(&stepping_prepared);
401
402 // Check the stack for overflow. We are not trying to catch interruptions
403 // (i.e. debug break and preemption) here, so check the "real stack limit".
404 Label stack_overflow;
405 __ CompareRoot(sp, Heap::kRealStackLimitRootIndex);
406 __ blt(&stack_overflow);
407
408 // Push receiver.
409 __ LoadP(ip, FieldMemOperand(r4, JSGeneratorObject::kReceiverOffset));
410 __ Push(ip);
411
412 // ----------- S t a t e -------------
413 // -- r4 : the JSGeneratorObject to resume
414 // -- r7 : generator function
415 // -- cp : generator context
416 // -- lr : return address
417 // -- sp[0] : generator receiver
418 // -----------------------------------
419
420 // Copy the function arguments from the generator object's register file.
421 __ LoadP(r6, FieldMemOperand(r7, JSFunction::kSharedFunctionInfoOffset));
422 __ LoadHalfWord(
423 r6, FieldMemOperand(r6, SharedFunctionInfo::kFormalParameterCountOffset));
424 __ LoadP(r5, FieldMemOperand(
425 r4, JSGeneratorObject::kParametersAndRegistersOffset));
426 {
427 Label loop, done_loop;
428 __ cmpi(r6, Operand::Zero());
429 __ ble(&done_loop);
430
431 // setup r9 to first element address - kPointerSize
432 __ addi(r9, r5,
433 Operand(FixedArray::kHeaderSize - kHeapObjectTag - kPointerSize));
434
435 __ mtctr(r6);
436 __ bind(&loop);
437 __ LoadPU(ip, MemOperand(r9, kPointerSize));
438 __ push(ip);
439 __ bdnz(&loop);
440
441 __ bind(&done_loop);
442 }
443
444 // Underlying function needs to have bytecode available.
445 if (FLAG_debug_code) {
446 __ LoadP(r6, FieldMemOperand(r7, JSFunction::kSharedFunctionInfoOffset));
447 __ LoadP(r6, FieldMemOperand(r6, SharedFunctionInfo::kFunctionDataOffset));
448 GetSharedFunctionInfoBytecode(masm, r6, r3);
449 __ CompareObjectType(r6, r6, r6, BYTECODE_ARRAY_TYPE);
450 __ Assert(eq, AbortReason::kMissingBytecodeArray);
451 }
452
453 // Resume (Ignition/TurboFan) generator object.
454 {
455 // We abuse new.target both to indicate that this is a resume call and to
456 // pass in the generator object. In ordinary calls, new.target is always
457 // undefined because generator functions are non-constructable.
458 __ mr(r6, r4);
459 __ mr(r4, r7);
460 static_assert(kJavaScriptCallCodeStartRegister == r5, "ABI mismatch");
461 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kCodeOffset));
462 __ addi(r5, r5, Operand(Code::kHeaderSize - kHeapObjectTag));
463 __ JumpToJSEntry(r5);
464 }
465
466 __ bind(&prepare_step_in_if_stepping);
467 {
468 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
469 __ Push(r4, r7);
470 // Push hole as receiver since we do not use it for stepping.
471 __ PushRoot(Heap::kTheHoleValueRootIndex);
472 __ CallRuntime(Runtime::kDebugOnFunctionCall);
473 __ Pop(r4);
474 __ LoadP(r7, FieldMemOperand(r4, JSGeneratorObject::kFunctionOffset));
475 }
476 __ b(&stepping_prepared);
477
478 __ bind(&prepare_step_in_suspended_generator);
479 {
480 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
481 __ Push(r4);
482 __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
483 __ Pop(r4);
484 __ LoadP(r7, FieldMemOperand(r4, JSGeneratorObject::kFunctionOffset));
485 }
486 __ b(&stepping_prepared);
487
488 __ bind(&stack_overflow);
489 {
490 FrameScope scope(masm, StackFrame::INTERNAL);
491 __ CallRuntime(Runtime::kThrowStackOverflow);
492 __ bkpt(0); // This should be unreachable.
493 }
494 }
495
Generate_ConstructedNonConstructable(MacroAssembler * masm)496 void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
497 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
498 __ push(r4);
499 __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
500 }
501
502 // Clobbers r5; preserves all other registers.
Generate_CheckStackOverflow(MacroAssembler * masm,Register argc)503 static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc) {
504 // Check the stack for overflow. We are not trying to catch
505 // interruptions (e.g. debug break and preemption) here, so the "real stack
506 // limit" is checked.
507 Label okay;
508 __ LoadRoot(r5, Heap::kRealStackLimitRootIndex);
509 // Make r5 the space we have left. The stack might already be overflowed
510 // here which will cause r5 to become negative.
511 __ sub(r5, sp, r5);
512 // Check if the arguments will overflow the stack.
513 __ ShiftLeftImm(r0, argc, Operand(kPointerSizeLog2));
514 __ cmp(r5, r0);
515 __ bgt(&okay); // Signed comparison.
516
517 // Out of stack space.
518 __ CallRuntime(Runtime::kThrowStackOverflow);
519
520 __ bind(&okay);
521 }
522
Generate_JSEntryTrampolineHelper(MacroAssembler * masm,bool is_construct)523 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
524 bool is_construct) {
525 // Called from Generate_JS_Entry
526 // r3: new.target
527 // r4: function
528 // r5: receiver
529 // r6: argc
530 // r7: argv
531 // r0,r8-r9, cp may be clobbered
532 ProfileEntryHookStub::MaybeCallEntryHook(masm);
533
534 // Enter an internal frame.
535 {
536 FrameScope scope(masm, StackFrame::INTERNAL);
537
538 // Setup the context (we need to use the caller context from the isolate).
539 ExternalReference context_address = ExternalReference::Create(
540 IsolateAddressId::kContextAddress, masm->isolate());
541 __ Move(cp, context_address);
542 __ LoadP(cp, MemOperand(cp));
543
544 // Push the function and the receiver onto the stack.
545 __ Push(r4, r5);
546
547 // Check if we have enough stack space to push all arguments.
548 // Clobbers r5.
549 Generate_CheckStackOverflow(masm, r6);
550
551 // Copy arguments to the stack in a loop.
552 // r4: function
553 // r6: argc
554 // r7: argv, i.e. points to first arg
555 Label loop, entry;
556 __ ShiftLeftImm(r0, r6, Operand(kPointerSizeLog2));
557 __ add(r5, r7, r0);
558 // r5 points past last arg.
559 __ b(&entry);
560 __ bind(&loop);
561 __ LoadP(r8, MemOperand(r7)); // read next parameter
562 __ addi(r7, r7, Operand(kPointerSize));
563 __ LoadP(r0, MemOperand(r8)); // dereference handle
564 __ push(r0); // push parameter
565 __ bind(&entry);
566 __ cmp(r7, r5);
567 __ bne(&loop);
568
569 // Setup new.target and argc.
570 __ mr(r7, r3);
571 __ mr(r3, r6);
572 __ mr(r6, r7);
573
574 // Initialize all JavaScript callee-saved registers, since they will be seen
575 // by the garbage collector as part of handlers.
576 __ LoadRoot(r7, Heap::kUndefinedValueRootIndex);
577 __ mr(r14, r7);
578 __ mr(r15, r7);
579 __ mr(r16, r7);
580 __ mr(r17, r7);
581
582 // Invoke the code.
583 Handle<Code> builtin = is_construct
584 ? BUILTIN_CODE(masm->isolate(), Construct)
585 : masm->isolate()->builtins()->Call();
586 __ Call(builtin, RelocInfo::CODE_TARGET);
587
588 // Exit the JS frame and remove the parameters (except function), and
589 // return.
590 }
591 __ blr();
592
593 // r3: result
594 }
595
Generate_JSEntryTrampoline(MacroAssembler * masm)596 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
597 Generate_JSEntryTrampolineHelper(masm, false);
598 }
599
Generate_JSConstructEntryTrampoline(MacroAssembler * masm)600 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
601 Generate_JSEntryTrampolineHelper(masm, true);
602 }
603
ReplaceClosureCodeWithOptimizedCode(MacroAssembler * masm,Register optimized_code,Register closure,Register scratch1,Register scratch2,Register scratch3)604 static void ReplaceClosureCodeWithOptimizedCode(
605 MacroAssembler* masm, Register optimized_code, Register closure,
606 Register scratch1, Register scratch2, Register scratch3) {
607 // Store code entry in the closure.
608 __ StoreP(optimized_code, FieldMemOperand(closure, JSFunction::kCodeOffset),
609 r0);
610 __ mr(scratch1, optimized_code); // Write barrier clobbers scratch1 below.
611 __ RecordWriteField(closure, JSFunction::kCodeOffset, scratch1, scratch2,
612 kLRHasNotBeenSaved, kDontSaveFPRegs, OMIT_REMEMBERED_SET,
613 OMIT_SMI_CHECK);
614 }
615
LeaveInterpreterFrame(MacroAssembler * masm,Register scratch)616 static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch) {
617 Register args_count = scratch;
618
619 // Get the arguments + receiver count.
620 __ LoadP(args_count,
621 MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
622 __ lwz(args_count,
623 FieldMemOperand(args_count, BytecodeArray::kParameterSizeOffset));
624
625 // Leave the frame (also dropping the register file).
626 __ LeaveFrame(StackFrame::INTERPRETED);
627
628 __ add(sp, sp, args_count);
629 }
630
631 // Tail-call |function_id| if |smi_entry| == |marker|
TailCallRuntimeIfMarkerEquals(MacroAssembler * masm,Register smi_entry,OptimizationMarker marker,Runtime::FunctionId function_id)632 static void TailCallRuntimeIfMarkerEquals(MacroAssembler* masm,
633 Register smi_entry,
634 OptimizationMarker marker,
635 Runtime::FunctionId function_id) {
636 Label no_match;
637 __ CmpSmiLiteral(smi_entry, Smi::FromEnum(marker), r0);
638 __ bne(&no_match);
639 GenerateTailCallToReturnedCode(masm, function_id);
640 __ bind(&no_match);
641 }
642
MaybeTailCallOptimizedCodeSlot(MacroAssembler * masm,Register feedback_vector,Register scratch1,Register scratch2,Register scratch3)643 static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
644 Register feedback_vector,
645 Register scratch1, Register scratch2,
646 Register scratch3) {
647 // ----------- S t a t e -------------
648 // -- r0 : argument count (preserved for callee if needed, and caller)
649 // -- r3 : new target (preserved for callee if needed, and caller)
650 // -- r1 : target function (preserved for callee if needed, and caller)
651 // -- feedback vector (preserved for caller if needed)
652 // -----------------------------------
653 DCHECK(
654 !AreAliased(feedback_vector, r3, r4, r6, scratch1, scratch2, scratch3));
655
656 Label optimized_code_slot_is_weak_ref, fallthrough;
657
658 Register closure = r4;
659 Register optimized_code_entry = scratch1;
660
661 __ LoadP(
662 optimized_code_entry,
663 FieldMemOperand(feedback_vector, FeedbackVector::kOptimizedCodeOffset));
664
665 // Check if the code entry is a Smi. If yes, we interpret it as an
666 // optimisation marker. Otherwise, interpret it as a weak reference to a code
667 // object.
668 __ JumpIfNotSmi(optimized_code_entry, &optimized_code_slot_is_weak_ref);
669
670 {
671 // Optimized code slot is a Smi optimization marker.
672
673 // Fall through if no optimization trigger.
674 __ CmpSmiLiteral(optimized_code_entry,
675 Smi::FromEnum(OptimizationMarker::kNone), r0);
676 __ beq(&fallthrough);
677
678 TailCallRuntimeIfMarkerEquals(masm, optimized_code_entry,
679 OptimizationMarker::kLogFirstExecution,
680 Runtime::kFunctionFirstExecution);
681 TailCallRuntimeIfMarkerEquals(masm, optimized_code_entry,
682 OptimizationMarker::kCompileOptimized,
683 Runtime::kCompileOptimized_NotConcurrent);
684 TailCallRuntimeIfMarkerEquals(
685 masm, optimized_code_entry,
686 OptimizationMarker::kCompileOptimizedConcurrent,
687 Runtime::kCompileOptimized_Concurrent);
688
689 {
690 // Otherwise, the marker is InOptimizationQueue, so fall through hoping
691 // that an interrupt will eventually update the slot with optimized code.
692 if (FLAG_debug_code) {
693 __ CmpSmiLiteral(
694 optimized_code_entry,
695 Smi::FromEnum(OptimizationMarker::kInOptimizationQueue), r0);
696 __ Assert(eq, AbortReason::kExpectedOptimizationSentinel);
697 }
698 __ b(&fallthrough);
699 }
700 }
701
702 {
703 // Optimized code slot is a weak reference.
704 __ bind(&optimized_code_slot_is_weak_ref);
705
706 __ LoadWeakValue(optimized_code_entry, optimized_code_entry, &fallthrough);
707
708 // Check if the optimized code is marked for deopt. If it is, call the
709 // runtime to clear it.
710 Label found_deoptimized_code;
711 __ LoadP(scratch2, FieldMemOperand(optimized_code_entry,
712 Code::kCodeDataContainerOffset));
713 __ LoadWordArith(
714 scratch2,
715 FieldMemOperand(scratch2, CodeDataContainer::kKindSpecificFlagsOffset));
716 __ TestBit(scratch2, Code::kMarkedForDeoptimizationBit, r0);
717 __ bne(&found_deoptimized_code, cr0);
718
719 // Optimized code is good, get it into the closure and link the closure into
720 // the optimized functions list, then tail call the optimized code.
721 // The feedback vector is no longer used, so re-use it as a scratch
722 // register.
723 ReplaceClosureCodeWithOptimizedCode(masm, optimized_code_entry, closure,
724 scratch2, scratch3, feedback_vector);
725 static_assert(kJavaScriptCallCodeStartRegister == r5, "ABI mismatch");
726 __ addi(r5, optimized_code_entry,
727 Operand(Code::kHeaderSize - kHeapObjectTag));
728 __ Jump(r5);
729
730 // Optimized code slot contains deoptimized code, evict it and re-enter the
731 // closure's code.
732 __ bind(&found_deoptimized_code);
733 GenerateTailCallToReturnedCode(masm, Runtime::kEvictOptimizedCodeSlot);
734 }
735
736 // Fall-through if the optimized code cell is clear and there is no
737 // optimization marker.
738 __ bind(&fallthrough);
739 }
740
741 // Advance the current bytecode offset. This simulates what all bytecode
742 // handlers do upon completion of the underlying operation. Will bail out to a
743 // label if the bytecode (without prefix) is a return bytecode.
AdvanceBytecodeOffsetOrReturn(MacroAssembler * masm,Register bytecode_array,Register bytecode_offset,Register bytecode,Register scratch1,Label * if_return)744 static void AdvanceBytecodeOffsetOrReturn(MacroAssembler* masm,
745 Register bytecode_array,
746 Register bytecode_offset,
747 Register bytecode, Register scratch1,
748 Label* if_return) {
749 Register bytecode_size_table = scratch1;
750 Register scratch2 = bytecode;
751 DCHECK(!AreAliased(bytecode_array, bytecode_offset, bytecode_size_table,
752 bytecode));
753 __ Move(bytecode_size_table,
754 ExternalReference::bytecode_size_table_address());
755
756 // Check if the bytecode is a Wide or ExtraWide prefix bytecode.
757 Label process_bytecode, extra_wide;
758 STATIC_ASSERT(0 == static_cast<int>(interpreter::Bytecode::kWide));
759 STATIC_ASSERT(1 == static_cast<int>(interpreter::Bytecode::kExtraWide));
760 STATIC_ASSERT(2 == static_cast<int>(interpreter::Bytecode::kDebugBreakWide));
761 STATIC_ASSERT(3 ==
762 static_cast<int>(interpreter::Bytecode::kDebugBreakExtraWide));
763 __ cmpi(bytecode, Operand(0x3));
764 __ bgt(&process_bytecode);
765 __ andi(r0, bytecode, Operand(0x1));
766 __ bne(&extra_wide, cr0);
767
768 // Load the next bytecode and update table to the wide scaled table.
769 __ addi(bytecode_offset, bytecode_offset, Operand(1));
770 __ lbzx(bytecode, MemOperand(bytecode_array, bytecode_offset));
771 __ addi(bytecode_size_table, bytecode_size_table,
772 Operand(kIntSize * interpreter::Bytecodes::kBytecodeCount));
773 __ b(&process_bytecode);
774
775 __ bind(&extra_wide);
776 // Load the next bytecode and update table to the extra wide scaled table.
777 __ addi(bytecode_offset, bytecode_offset, Operand(1));
778 __ lbzx(bytecode, MemOperand(bytecode_array, bytecode_offset));
779 __ addi(bytecode_size_table, bytecode_size_table,
780 Operand(2 * kIntSize * interpreter::Bytecodes::kBytecodeCount));
781
782 // Load the size of the current bytecode.
783 __ bind(&process_bytecode);
784
785 // Bailout to the return label if this is a return bytecode.
786 #define JUMP_IF_EQUAL(NAME) \
787 __ cmpi(bytecode, \
788 Operand(static_cast<int>(interpreter::Bytecode::k##NAME))); \
789 __ beq(if_return);
790 RETURN_BYTECODE_LIST(JUMP_IF_EQUAL)
791 #undef JUMP_IF_EQUAL
792
793 // Otherwise, load the size of the current bytecode and advance the offset.
794 __ ShiftLeftImm(scratch2, bytecode, Operand(2));
795 __ lwzx(scratch2, MemOperand(bytecode_size_table, scratch2));
796 __ add(bytecode_offset, bytecode_offset, scratch2);
797 }
798 // Generate code for entering a JS function with the interpreter.
799 // On entry to the function the receiver and arguments have been pushed on the
800 // stack left to right. The actual argument count matches the formal parameter
801 // count expected by the function.
802 //
803 // The live registers are:
804 // o r4: the JS function object being called.
805 // o r6: the incoming new target or generator object
806 // o cp: our context
807 // o pp: the caller's constant pool pointer (if enabled)
808 // o fp: the caller's frame pointer
809 // o sp: stack pointer
810 // o lr: return address
811 //
812 // The function builds an interpreter frame. See InterpreterFrameConstants in
813 // frames.h for its layout.
Generate_InterpreterEntryTrampoline(MacroAssembler * masm)814 void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
815 ProfileEntryHookStub::MaybeCallEntryHook(masm);
816
817 Register closure = r4;
818 Register feedback_vector = r5;
819
820 // Load the feedback vector from the closure.
821 __ LoadP(feedback_vector,
822 FieldMemOperand(closure, JSFunction::kFeedbackCellOffset));
823 __ LoadP(feedback_vector,
824 FieldMemOperand(feedback_vector, Cell::kValueOffset));
825 // Read off the optimized code slot in the feedback vector, and if there
826 // is optimized code or an optimization marker, call that instead.
827 MaybeTailCallOptimizedCodeSlot(masm, feedback_vector, r7, r9, r8);
828
829 // Open a frame scope to indicate that there is a frame on the stack. The
830 // MANUAL indicates that the scope shouldn't actually generate code to set up
831 // the frame (that is done below).
832 FrameScope frame_scope(masm, StackFrame::MANUAL);
833 __ PushStandardFrame(closure);
834
835 // Get the bytecode array from the function object and load it into
836 // kInterpreterBytecodeArrayRegister.
837 __ LoadP(r3, FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
838 // Load original bytecode array or the debug copy.
839 __ LoadP(kInterpreterBytecodeArrayRegister,
840 FieldMemOperand(r3, SharedFunctionInfo::kFunctionDataOffset));
841 GetSharedFunctionInfoBytecode(masm, kInterpreterBytecodeArrayRegister, r7);
842
843 // Increment invocation count for the function.
844 __ LoadWord(
845 r8,
846 FieldMemOperand(feedback_vector, FeedbackVector::kInvocationCountOffset),
847 r0);
848 __ addi(r8, r8, Operand(1));
849 __ StoreWord(
850 r8,
851 FieldMemOperand(feedback_vector, FeedbackVector::kInvocationCountOffset),
852 r0);
853
854 // Check function data field is actually a BytecodeArray object.
855
856 if (FLAG_debug_code) {
857 __ TestIfSmi(kInterpreterBytecodeArrayRegister, r0);
858 __ Assert(ne,
859 AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry,
860 cr0);
861 __ CompareObjectType(kInterpreterBytecodeArrayRegister, r3, no_reg,
862 BYTECODE_ARRAY_TYPE);
863 __ Assert(
864 eq, AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
865 }
866
867 // Reset code age.
868 __ mov(r8, Operand(BytecodeArray::kNoAgeBytecodeAge));
869 __ StoreByte(r8, FieldMemOperand(kInterpreterBytecodeArrayRegister,
870 BytecodeArray::kBytecodeAgeOffset),
871 r0);
872
873 // Load initial bytecode offset.
874 __ mov(kInterpreterBytecodeOffsetRegister,
875 Operand(BytecodeArray::kHeaderSize - kHeapObjectTag));
876
877 // Push bytecode array and Smi tagged bytecode array offset.
878 __ SmiTag(r3, kInterpreterBytecodeOffsetRegister);
879 __ Push(kInterpreterBytecodeArrayRegister, r3);
880
881 // Allocate the local and temporary register file on the stack.
882 {
883 // Load frame size (word) from the BytecodeArray object.
884 __ lwz(r5, FieldMemOperand(kInterpreterBytecodeArrayRegister,
885 BytecodeArray::kFrameSizeOffset));
886
887 // Do a stack check to ensure we don't go over the limit.
888 Label ok;
889 __ sub(r8, sp, r5);
890 __ LoadRoot(r0, Heap::kRealStackLimitRootIndex);
891 __ cmpl(r8, r0);
892 __ bge(&ok);
893 __ CallRuntime(Runtime::kThrowStackOverflow);
894 __ bind(&ok);
895
896 // If ok, push undefined as the initial value for all register file entries.
897 // TODO(rmcilroy): Consider doing more than one push per loop iteration.
898 Label loop, no_args;
899 __ LoadRoot(r8, Heap::kUndefinedValueRootIndex);
900 __ ShiftRightImm(r5, r5, Operand(kPointerSizeLog2), SetRC);
901 __ beq(&no_args, cr0);
902 __ mtctr(r5);
903 __ bind(&loop);
904 __ push(r8);
905 __ bdnz(&loop);
906 __ bind(&no_args);
907 }
908
909 // If the bytecode array has a valid incoming new target or generator object
910 // register, initialize it with incoming value which was passed in r6.
911 Label no_incoming_new_target_or_generator_register;
912 __ LoadWordArith(
913 r8, FieldMemOperand(
914 kInterpreterBytecodeArrayRegister,
915 BytecodeArray::kIncomingNewTargetOrGeneratorRegisterOffset));
916 __ cmpi(r8, Operand::Zero());
917 __ beq(&no_incoming_new_target_or_generator_register);
918 __ ShiftLeftImm(r8, r8, Operand(kPointerSizeLog2));
919 __ StorePX(r6, MemOperand(fp, r8));
920 __ bind(&no_incoming_new_target_or_generator_register);
921
922 // Load accumulator with undefined.
923 __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
924 // Load the dispatch table into a register and dispatch to the bytecode
925 // handler at the current bytecode offset.
926 Label do_dispatch;
927 __ bind(&do_dispatch);
928 __ Move(
929 kInterpreterDispatchTableRegister,
930 ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
931 __ lbzx(r6, MemOperand(kInterpreterBytecodeArrayRegister,
932 kInterpreterBytecodeOffsetRegister));
933 __ ShiftLeftImm(r6, r6, Operand(kPointerSizeLog2));
934 __ LoadPX(kJavaScriptCallCodeStartRegister,
935 MemOperand(kInterpreterDispatchTableRegister, r6));
936 __ Call(kJavaScriptCallCodeStartRegister);
937
938 masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset());
939
940 // Any returns to the entry trampoline are either due to the return bytecode
941 // or the interpreter tail calling a builtin and then a dispatch.
942
943 // Get bytecode array and bytecode offset from the stack frame.
944 __ LoadP(kInterpreterBytecodeArrayRegister,
945 MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
946 __ LoadP(kInterpreterBytecodeOffsetRegister,
947 MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
948 __ SmiUntag(kInterpreterBytecodeOffsetRegister);
949
950 // Either return, or advance to the next bytecode and dispatch.
951 Label do_return;
952 __ lbzx(r4, MemOperand(kInterpreterBytecodeArrayRegister,
953 kInterpreterBytecodeOffsetRegister));
954 AdvanceBytecodeOffsetOrReturn(masm, kInterpreterBytecodeArrayRegister,
955 kInterpreterBytecodeOffsetRegister, r4, r5,
956 &do_return);
957 __ b(&do_dispatch);
958
959 __ bind(&do_return);
960 // The return value is in r3.
961 LeaveInterpreterFrame(masm, r5);
962 __ blr();
963 }
964
Generate_StackOverflowCheck(MacroAssembler * masm,Register num_args,Register scratch,Label * stack_overflow)965 static void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args,
966 Register scratch,
967 Label* stack_overflow) {
968 // Check the stack for overflow. We are not trying to catch
969 // interruptions (e.g. debug break and preemption) here, so the "real stack
970 // limit" is checked.
971 __ LoadRoot(scratch, Heap::kRealStackLimitRootIndex);
972 // Make scratch the space we have left. The stack might already be overflowed
973 // here which will cause scratch to become negative.
974 __ sub(scratch, sp, scratch);
975 // Check if the arguments will overflow the stack.
976 __ ShiftLeftImm(r0, num_args, Operand(kPointerSizeLog2));
977 __ cmp(scratch, r0);
978 __ ble(stack_overflow); // Signed comparison.
979 }
980
Generate_InterpreterPushArgs(MacroAssembler * masm,Register num_args,Register index,Register count,Register scratch)981 static void Generate_InterpreterPushArgs(MacroAssembler* masm,
982 Register num_args, Register index,
983 Register count, Register scratch) {
984 Label loop, skip;
985 __ cmpi(count, Operand::Zero());
986 __ beq(&skip);
987 __ addi(index, index, Operand(kPointerSize)); // Bias up for LoadPU
988 __ mtctr(count);
989 __ bind(&loop);
990 __ LoadPU(scratch, MemOperand(index, -kPointerSize));
991 __ push(scratch);
992 __ bdnz(&loop);
993 __ bind(&skip);
994 }
995
996 // static
Generate_InterpreterPushArgsThenCallImpl(MacroAssembler * masm,ConvertReceiverMode receiver_mode,InterpreterPushArgsMode mode)997 void Builtins::Generate_InterpreterPushArgsThenCallImpl(
998 MacroAssembler* masm, ConvertReceiverMode receiver_mode,
999 InterpreterPushArgsMode mode) {
1000 DCHECK(mode != InterpreterPushArgsMode::kArrayFunction);
1001 // ----------- S t a t e -------------
1002 // -- r3 : the number of arguments (not including the receiver)
1003 // -- r5 : the address of the first argument to be pushed. Subsequent
1004 // arguments should be consecutive above this, in the same order as
1005 // they are to be pushed onto the stack.
1006 // -- r4 : the target to call (can be any Object).
1007 // -----------------------------------
1008 Label stack_overflow;
1009
1010 // Calculate number of arguments (add one for receiver).
1011 __ addi(r6, r3, Operand(1));
1012
1013 Generate_StackOverflowCheck(masm, r6, ip, &stack_overflow);
1014
1015 // Push "undefined" as the receiver arg if we need to.
1016 if (receiver_mode == ConvertReceiverMode::kNullOrUndefined) {
1017 __ PushRoot(Heap::kUndefinedValueRootIndex);
1018 __ mr(r6, r3); // Argument count is correct.
1019 }
1020
1021 // Push the arguments. r5, r6, r7 will be modified.
1022 Generate_InterpreterPushArgs(masm, r6, r5, r6, r7);
1023
1024 if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1025 __ Pop(r5); // Pass the spread in a register
1026 __ subi(r3, r3, Operand(1)); // Subtract one for spread
1027 }
1028
1029 // Call the target.
1030 if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1031 __ Jump(BUILTIN_CODE(masm->isolate(), CallWithSpread),
1032 RelocInfo::CODE_TARGET);
1033 } else {
1034 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny),
1035 RelocInfo::CODE_TARGET);
1036 }
1037
1038 __ bind(&stack_overflow);
1039 {
1040 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1041 // Unreachable Code.
1042 __ bkpt(0);
1043 }
1044 }
1045
1046 // static
Generate_InterpreterPushArgsThenConstructImpl(MacroAssembler * masm,InterpreterPushArgsMode mode)1047 void Builtins::Generate_InterpreterPushArgsThenConstructImpl(
1048 MacroAssembler* masm, InterpreterPushArgsMode mode) {
1049 // ----------- S t a t e -------------
1050 // -- r3 : argument count (not including receiver)
1051 // -- r6 : new target
1052 // -- r4 : constructor to call
1053 // -- r5 : allocation site feedback if available, undefined otherwise.
1054 // -- r7 : address of the first argument
1055 // -----------------------------------
1056 Label stack_overflow;
1057
1058 // Push a slot for the receiver to be constructed.
1059 __ li(r0, Operand::Zero());
1060 __ push(r0);
1061
1062 // Push the arguments (skip if none).
1063 Label skip;
1064 __ cmpi(r3, Operand::Zero());
1065 __ beq(&skip);
1066 Generate_StackOverflowCheck(masm, r3, ip, &stack_overflow);
1067 // Push the arguments. r8, r7, r9 will be modified.
1068 Generate_InterpreterPushArgs(masm, r3, r7, r3, r9);
1069 __ bind(&skip);
1070 if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1071 __ Pop(r5); // Pass the spread in a register
1072 __ subi(r3, r3, Operand(1)); // Subtract one for spread
1073 } else {
1074 __ AssertUndefinedOrAllocationSite(r5, r8);
1075 }
1076 if (mode == InterpreterPushArgsMode::kArrayFunction) {
1077 __ AssertFunction(r4);
1078
1079 // Tail call to the array construct stub (still in the caller
1080 // context at this point).
1081 Handle<Code> code = BUILTIN_CODE(masm->isolate(), ArrayConstructorImpl);
1082 __ Jump(code, RelocInfo::CODE_TARGET);
1083 } else if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
1084 // Call the constructor with r3, r4, and r6 unmodified.
1085 __ Jump(BUILTIN_CODE(masm->isolate(), ConstructWithSpread),
1086 RelocInfo::CODE_TARGET);
1087 } else {
1088 DCHECK_EQ(InterpreterPushArgsMode::kOther, mode);
1089 // Call the constructor with r3, r4, and r6 unmodified.
1090 __ Jump(BUILTIN_CODE(masm->isolate(), Construct), RelocInfo::CODE_TARGET);
1091 }
1092
1093 __ bind(&stack_overflow);
1094 {
1095 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1096 // Unreachable Code.
1097 __ bkpt(0);
1098 }
1099 }
1100
Generate_InterpreterEnterBytecode(MacroAssembler * masm)1101 static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
1102 // Set the return address to the correct point in the interpreter entry
1103 // trampoline.
1104 Label builtin_trampoline, trampoline_loaded;
1105 Smi* interpreter_entry_return_pc_offset(
1106 masm->isolate()->heap()->interpreter_entry_return_pc_offset());
1107 DCHECK_NE(interpreter_entry_return_pc_offset, Smi::kZero);
1108
1109 // If the SFI function_data is an InterpreterData, get the trampoline stored
1110 // in it, otherwise get the trampoline from the builtins list.
1111 __ LoadP(r5, MemOperand(fp, StandardFrameConstants::kFunctionOffset));
1112 __ LoadP(r5, FieldMemOperand(r5, JSFunction::kSharedFunctionInfoOffset));
1113 __ LoadP(r5, FieldMemOperand(r5, SharedFunctionInfo::kFunctionDataOffset));
1114 __ CompareObjectType(r5, kInterpreterDispatchTableRegister,
1115 kInterpreterDispatchTableRegister,
1116 INTERPRETER_DATA_TYPE);
1117 __ bne(&builtin_trampoline);
1118
1119 __ LoadP(r5,
1120 FieldMemOperand(r5, InterpreterData::kInterpreterTrampolineOffset));
1121 __ b(&trampoline_loaded);
1122
1123 __ bind(&builtin_trampoline);
1124 __ Move(r5, BUILTIN_CODE(masm->isolate(), InterpreterEntryTrampoline));
1125
1126 __ bind(&trampoline_loaded);
1127 __ addi(r0, r5, Operand(interpreter_entry_return_pc_offset->value() +
1128 Code::kHeaderSize - kHeapObjectTag));
1129 __ mtlr(r0);
1130
1131 // Initialize the dispatch table register.
1132 __ Move(
1133 kInterpreterDispatchTableRegister,
1134 ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
1135
1136 // Get the bytecode array pointer from the frame.
1137 __ LoadP(kInterpreterBytecodeArrayRegister,
1138 MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1139
1140 if (FLAG_debug_code) {
1141 // Check function data field is actually a BytecodeArray object.
1142 __ TestIfSmi(kInterpreterBytecodeArrayRegister, r0);
1143 __ Assert(ne,
1144 AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry,
1145 cr0);
1146 __ CompareObjectType(kInterpreterBytecodeArrayRegister, r4, no_reg,
1147 BYTECODE_ARRAY_TYPE);
1148 __ Assert(
1149 eq, AbortReason::kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
1150 }
1151
1152 // Get the target bytecode offset from the frame.
1153 __ LoadP(kInterpreterBytecodeOffsetRegister,
1154 MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1155 __ SmiUntag(kInterpreterBytecodeOffsetRegister);
1156
1157 // Dispatch to the target bytecode.
1158 __ lbzx(ip, MemOperand(kInterpreterBytecodeArrayRegister,
1159 kInterpreterBytecodeOffsetRegister));
1160 __ ShiftLeftImm(ip, ip, Operand(kPointerSizeLog2));
1161 __ LoadPX(kJavaScriptCallCodeStartRegister,
1162 MemOperand(kInterpreterDispatchTableRegister, ip));
1163 __ Jump(kJavaScriptCallCodeStartRegister);
1164 }
1165
Generate_InterpreterEnterBytecodeAdvance(MacroAssembler * masm)1166 void Builtins::Generate_InterpreterEnterBytecodeAdvance(MacroAssembler* masm) {
1167 // Get bytecode array and bytecode offset from the stack frame.
1168 __ LoadP(kInterpreterBytecodeArrayRegister,
1169 MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
1170 __ LoadP(kInterpreterBytecodeOffsetRegister,
1171 MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1172 __ SmiUntag(kInterpreterBytecodeOffsetRegister);
1173
1174 // Load the current bytecode.
1175 __ lbzx(r4, MemOperand(kInterpreterBytecodeArrayRegister,
1176 kInterpreterBytecodeOffsetRegister));
1177
1178 // Advance to the next bytecode.
1179 Label if_return;
1180 AdvanceBytecodeOffsetOrReturn(masm, kInterpreterBytecodeArrayRegister,
1181 kInterpreterBytecodeOffsetRegister, r4, r5,
1182 &if_return);
1183
1184 // Convert new bytecode offset to a Smi and save in the stackframe.
1185 __ SmiTag(r5, kInterpreterBytecodeOffsetRegister);
1186 __ StoreP(r5,
1187 MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
1188
1189 Generate_InterpreterEnterBytecode(masm);
1190
1191 // We should never take the if_return path.
1192 __ bind(&if_return);
1193 __ Abort(AbortReason::kInvalidBytecodeAdvance);
1194 }
1195
Generate_InterpreterEnterBytecodeDispatch(MacroAssembler * masm)1196 void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
1197 Generate_InterpreterEnterBytecode(masm);
1198 }
1199
Generate_InstantiateAsmJs(MacroAssembler * masm)1200 void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) {
1201 // ----------- S t a t e -------------
1202 // -- r3 : argument count (preserved for callee)
1203 // -- r4 : new target (preserved for callee)
1204 // -- r6 : target function (preserved for callee)
1205 // -----------------------------------
1206 Label failed;
1207 {
1208 FrameScope scope(masm, StackFrame::INTERNAL);
1209 // Preserve argument count for later compare.
1210 __ Move(r7, r3);
1211 // Push a copy of the target function and the new target.
1212 // Push function as parameter to the runtime call.
1213 __ SmiTag(r3);
1214 __ Push(r3, r4, r6, r4);
1215
1216 // Copy arguments from caller (stdlib, foreign, heap).
1217 Label args_done;
1218 for (int j = 0; j < 4; ++j) {
1219 Label over;
1220 if (j < 3) {
1221 __ cmpi(r7, Operand(j));
1222 __ bne(&over);
1223 }
1224 for (int i = j - 1; i >= 0; --i) {
1225 __ LoadP(r7, MemOperand(fp, StandardFrameConstants::kCallerSPOffset +
1226 i * kPointerSize));
1227 __ push(r7);
1228 }
1229 for (int i = 0; i < 3 - j; ++i) {
1230 __ PushRoot(Heap::kUndefinedValueRootIndex);
1231 }
1232 if (j < 3) {
1233 __ jmp(&args_done);
1234 __ bind(&over);
1235 }
1236 }
1237 __ bind(&args_done);
1238
1239 // Call runtime, on success unwind frame, and parent frame.
1240 __ CallRuntime(Runtime::kInstantiateAsmJs, 4);
1241 // A smi 0 is returned on failure, an object on success.
1242 __ JumpIfSmi(r3, &failed);
1243
1244 __ Drop(2);
1245 __ pop(r7);
1246 __ SmiUntag(r7);
1247 scope.GenerateLeaveFrame();
1248
1249 __ addi(r7, r7, Operand(1));
1250 __ Drop(r7);
1251 __ Ret();
1252
1253 __ bind(&failed);
1254 // Restore target function and new target.
1255 __ Pop(r3, r4, r6);
1256 __ SmiUntag(r3);
1257 }
1258 // On failure, tail call back to regular js by re-calling the function
1259 // which has be reset to the compile lazy builtin.
1260 static_assert(kJavaScriptCallCodeStartRegister == r5, "ABI mismatch");
1261 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kCodeOffset));
1262 __ addi(r5, r5, Operand(Code::kHeaderSize - kHeapObjectTag));
1263 __ JumpToJSEntry(r5);
1264 }
1265
1266 namespace {
Generate_ContinueToBuiltinHelper(MacroAssembler * masm,bool java_script_builtin,bool with_result)1267 void Generate_ContinueToBuiltinHelper(MacroAssembler* masm,
1268 bool java_script_builtin,
1269 bool with_result) {
1270 const RegisterConfiguration* config(RegisterConfiguration::Default());
1271 int allocatable_register_count = config->num_allocatable_general_registers();
1272 if (with_result) {
1273 // Overwrite the hole inserted by the deoptimizer with the return value from
1274 // the LAZY deopt point.
1275 __ StoreP(
1276 r3, MemOperand(
1277 sp, config->num_allocatable_general_registers() * kPointerSize +
1278 BuiltinContinuationFrameConstants::kFixedFrameSize));
1279 }
1280 for (int i = allocatable_register_count - 1; i >= 0; --i) {
1281 int code = config->GetAllocatableGeneralCode(i);
1282 __ Pop(Register::from_code(code));
1283 if (java_script_builtin && code == kJavaScriptCallArgCountRegister.code()) {
1284 __ SmiUntag(Register::from_code(code));
1285 }
1286 }
1287 __ LoadP(
1288 fp,
1289 MemOperand(sp, BuiltinContinuationFrameConstants::kFixedFrameSizeFromFp));
1290 __ Pop(ip);
1291 __ addi(sp, sp,
1292 Operand(BuiltinContinuationFrameConstants::kFixedFrameSizeFromFp));
1293 __ Pop(r0);
1294 __ mtlr(r0);
1295 __ addi(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag));
1296 __ Jump(ip);
1297 }
1298 } // namespace
1299
Generate_ContinueToCodeStubBuiltin(MacroAssembler * masm)1300 void Builtins::Generate_ContinueToCodeStubBuiltin(MacroAssembler* masm) {
1301 Generate_ContinueToBuiltinHelper(masm, false, false);
1302 }
1303
Generate_ContinueToCodeStubBuiltinWithResult(MacroAssembler * masm)1304 void Builtins::Generate_ContinueToCodeStubBuiltinWithResult(
1305 MacroAssembler* masm) {
1306 Generate_ContinueToBuiltinHelper(masm, false, true);
1307 }
1308
Generate_ContinueToJavaScriptBuiltin(MacroAssembler * masm)1309 void Builtins::Generate_ContinueToJavaScriptBuiltin(MacroAssembler* masm) {
1310 Generate_ContinueToBuiltinHelper(masm, true, false);
1311 }
1312
Generate_ContinueToJavaScriptBuiltinWithResult(MacroAssembler * masm)1313 void Builtins::Generate_ContinueToJavaScriptBuiltinWithResult(
1314 MacroAssembler* masm) {
1315 Generate_ContinueToBuiltinHelper(masm, true, true);
1316 }
1317
Generate_NotifyDeoptimized(MacroAssembler * masm)1318 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1319 {
1320 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1321 __ CallRuntime(Runtime::kNotifyDeoptimized);
1322 }
1323
1324 DCHECK_EQ(kInterpreterAccumulatorRegister.code(), r3.code());
1325 __ LoadP(r3, MemOperand(sp, 0 * kPointerSize));
1326 __ addi(sp, sp, Operand(1 * kPointerSize));
1327 __ Ret();
1328 }
1329
Generate_OnStackReplacementHelper(MacroAssembler * masm,bool has_handler_frame)1330 static void Generate_OnStackReplacementHelper(MacroAssembler* masm,
1331 bool has_handler_frame) {
1332 // Lookup the function in the JavaScript frame.
1333 if (has_handler_frame) {
1334 __ LoadP(r3, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1335 __ LoadP(r3, MemOperand(r3, JavaScriptFrameConstants::kFunctionOffset));
1336 } else {
1337 __ LoadP(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1338 }
1339
1340 {
1341 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1342 // Pass function as argument.
1343 __ push(r3);
1344 __ CallRuntime(Runtime::kCompileForOnStackReplacement);
1345 }
1346
1347 // If the code object is null, just return to the caller.
1348 Label skip;
1349 __ CmpSmiLiteral(r3, Smi::kZero, r0);
1350 __ bne(&skip);
1351 __ Ret();
1352
1353 __ bind(&skip);
1354
1355 // Drop any potential handler frame that is be sitting on top of the actual
1356 // JavaScript frame. This is the case then OSR is triggered from bytecode.
1357 if (has_handler_frame) {
1358 __ LeaveFrame(StackFrame::STUB);
1359 }
1360
1361 // Load deoptimization data from the code object.
1362 // <deopt_data> = <code>[#deoptimization_data_offset]
1363 __ LoadP(r4, FieldMemOperand(r3, Code::kDeoptimizationDataOffset));
1364
1365 {
1366 ConstantPoolUnavailableScope constant_pool_unavailable(masm);
1367 __ addi(r3, r3, Operand(Code::kHeaderSize - kHeapObjectTag)); // Code start
1368
1369 if (FLAG_enable_embedded_constant_pool) {
1370 __ LoadConstantPoolPointerRegisterFromCodeTargetAddress(r3);
1371 }
1372
1373 // Load the OSR entrypoint offset from the deoptimization data.
1374 // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
1375 __ LoadP(r4,
1376 FieldMemOperand(r4, FixedArray::OffsetOfElementAt(
1377 DeoptimizationData::kOsrPcOffsetIndex)));
1378 __ SmiUntag(r4);
1379
1380 // Compute the target address = code start + osr_offset
1381 __ add(r0, r3, r4);
1382
1383 // And "return" to the OSR entry point of the function.
1384 __ mtlr(r0);
1385 __ blr();
1386 }
1387 }
1388
Generate_OnStackReplacement(MacroAssembler * masm)1389 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1390 Generate_OnStackReplacementHelper(masm, false);
1391 }
1392
Generate_InterpreterOnStackReplacement(MacroAssembler * masm)1393 void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) {
1394 Generate_OnStackReplacementHelper(masm, true);
1395 }
1396
1397 // static
Generate_FunctionPrototypeApply(MacroAssembler * masm)1398 void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
1399 // ----------- S t a t e -------------
1400 // -- r3 : argc
1401 // -- sp[0] : argArray
1402 // -- sp[4] : thisArg
1403 // -- sp[8] : receiver
1404 // -----------------------------------
1405
1406 // 1. Load receiver into r4, argArray into r5 (if present), remove all
1407 // arguments from the stack (including the receiver), and push thisArg (if
1408 // present) instead.
1409 {
1410 Label skip;
1411 Register arg_size = r8;
1412 Register new_sp = r6;
1413 Register scratch = r7;
1414 __ ShiftLeftImm(arg_size, r3, Operand(kPointerSizeLog2));
1415 __ add(new_sp, sp, arg_size);
1416 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
1417 __ mr(r5, scratch);
1418 __ LoadP(r4, MemOperand(new_sp, 0)); // receiver
1419 __ cmpi(arg_size, Operand(kPointerSize));
1420 __ blt(&skip);
1421 __ LoadP(scratch, MemOperand(new_sp, 1 * -kPointerSize)); // thisArg
1422 __ beq(&skip);
1423 __ LoadP(r5, MemOperand(new_sp, 2 * -kPointerSize)); // argArray
1424 __ bind(&skip);
1425 __ mr(sp, new_sp);
1426 __ StoreP(scratch, MemOperand(sp, 0));
1427 }
1428
1429 // ----------- S t a t e -------------
1430 // -- r5 : argArray
1431 // -- r4 : receiver
1432 // -- sp[0] : thisArg
1433 // -----------------------------------
1434
1435 // 2. We don't need to check explicitly for callable receiver here,
1436 // since that's the first thing the Call/CallWithArrayLike builtins
1437 // will do.
1438
1439 // 3. Tail call with no arguments if argArray is null or undefined.
1440 Label no_arguments;
1441 __ JumpIfRoot(r5, Heap::kNullValueRootIndex, &no_arguments);
1442 __ JumpIfRoot(r5, Heap::kUndefinedValueRootIndex, &no_arguments);
1443
1444 // 4a. Apply the receiver to the given argArray.
1445 __ Jump(BUILTIN_CODE(masm->isolate(), CallWithArrayLike),
1446 RelocInfo::CODE_TARGET);
1447
1448 // 4b. The argArray is either null or undefined, so we tail call without any
1449 // arguments to the receiver.
1450 __ bind(&no_arguments);
1451 {
1452 __ li(r3, Operand::Zero());
1453 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1454 }
1455 }
1456
1457 // static
Generate_FunctionPrototypeCall(MacroAssembler * masm)1458 void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
1459 // 1. Make sure we have at least one argument.
1460 // r3: actual number of arguments
1461 {
1462 Label done;
1463 __ cmpi(r3, Operand::Zero());
1464 __ bne(&done);
1465 __ PushRoot(Heap::kUndefinedValueRootIndex);
1466 __ addi(r3, r3, Operand(1));
1467 __ bind(&done);
1468 }
1469
1470 // 2. Get the callable to call (passed as receiver) from the stack.
1471 // r3: actual number of arguments
1472 __ ShiftLeftImm(r5, r3, Operand(kPointerSizeLog2));
1473 __ LoadPX(r4, MemOperand(sp, r5));
1474
1475 // 3. Shift arguments and return address one slot down on the stack
1476 // (overwriting the original receiver). Adjust argument count to make
1477 // the original first argument the new receiver.
1478 // r3: actual number of arguments
1479 // r4: callable
1480 {
1481 Label loop;
1482 // Calculate the copy start address (destination). Copy end address is sp.
1483 __ add(r5, sp, r5);
1484
1485 __ mtctr(r3);
1486 __ bind(&loop);
1487 __ LoadP(ip, MemOperand(r5, -kPointerSize));
1488 __ StoreP(ip, MemOperand(r5));
1489 __ subi(r5, r5, Operand(kPointerSize));
1490 __ bdnz(&loop);
1491 // Adjust the actual number of arguments and remove the top element
1492 // (which is a copy of the last argument).
1493 __ subi(r3, r3, Operand(1));
1494 __ pop();
1495 }
1496
1497 // 4. Call the callable.
1498 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
1499 }
1500
Generate_ReflectApply(MacroAssembler * masm)1501 void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
1502 // ----------- S t a t e -------------
1503 // -- r3 : argc
1504 // -- sp[0] : argumentsList
1505 // -- sp[4] : thisArgument
1506 // -- sp[8] : target
1507 // -- sp[12] : receiver
1508 // -----------------------------------
1509
1510 // 1. Load target into r4 (if present), argumentsList into r5 (if present),
1511 // remove all arguments from the stack (including the receiver), and push
1512 // thisArgument (if present) instead.
1513 {
1514 Label skip;
1515 Register arg_size = r8;
1516 Register new_sp = r6;
1517 Register scratch = r7;
1518 __ ShiftLeftImm(arg_size, r3, Operand(kPointerSizeLog2));
1519 __ add(new_sp, sp, arg_size);
1520 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
1521 __ mr(scratch, r4);
1522 __ mr(r5, r4);
1523 __ cmpi(arg_size, Operand(kPointerSize));
1524 __ blt(&skip);
1525 __ LoadP(r4, MemOperand(new_sp, 1 * -kPointerSize)); // target
1526 __ beq(&skip);
1527 __ LoadP(scratch, MemOperand(new_sp, 2 * -kPointerSize)); // thisArgument
1528 __ cmpi(arg_size, Operand(2 * kPointerSize));
1529 __ beq(&skip);
1530 __ LoadP(r5, MemOperand(new_sp, 3 * -kPointerSize)); // argumentsList
1531 __ bind(&skip);
1532 __ mr(sp, new_sp);
1533 __ StoreP(scratch, MemOperand(sp, 0));
1534 }
1535
1536 // ----------- S t a t e -------------
1537 // -- r5 : argumentsList
1538 // -- r4 : target
1539 // -- sp[0] : thisArgument
1540 // -----------------------------------
1541
1542 // 2. We don't need to check explicitly for callable target here,
1543 // since that's the first thing the Call/CallWithArrayLike builtins
1544 // will do.
1545
1546 // 3. Apply the target to the given argumentsList.
1547 __ Jump(BUILTIN_CODE(masm->isolate(), CallWithArrayLike),
1548 RelocInfo::CODE_TARGET);
1549 }
1550
Generate_ReflectConstruct(MacroAssembler * masm)1551 void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
1552 // ----------- S t a t e -------------
1553 // -- r3 : argc
1554 // -- sp[0] : new.target (optional)
1555 // -- sp[4] : argumentsList
1556 // -- sp[8] : target
1557 // -- sp[12] : receiver
1558 // -----------------------------------
1559
1560 // 1. Load target into r4 (if present), argumentsList into r5 (if present),
1561 // new.target into r6 (if present, otherwise use target), remove all
1562 // arguments from the stack (including the receiver), and push thisArgument
1563 // (if present) instead.
1564 {
1565 Label skip;
1566 Register arg_size = r8;
1567 Register new_sp = r7;
1568 __ ShiftLeftImm(arg_size, r3, Operand(kPointerSizeLog2));
1569 __ add(new_sp, sp, arg_size);
1570 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
1571 __ mr(r5, r4);
1572 __ mr(r6, r4);
1573 __ StoreP(r4, MemOperand(new_sp, 0)); // receiver (undefined)
1574 __ cmpi(arg_size, Operand(kPointerSize));
1575 __ blt(&skip);
1576 __ LoadP(r4, MemOperand(new_sp, 1 * -kPointerSize)); // target
1577 __ mr(r6, r4); // new.target defaults to target
1578 __ beq(&skip);
1579 __ LoadP(r5, MemOperand(new_sp, 2 * -kPointerSize)); // argumentsList
1580 __ cmpi(arg_size, Operand(2 * kPointerSize));
1581 __ beq(&skip);
1582 __ LoadP(r6, MemOperand(new_sp, 3 * -kPointerSize)); // new.target
1583 __ bind(&skip);
1584 __ mr(sp, new_sp);
1585 }
1586
1587 // ----------- S t a t e -------------
1588 // -- r5 : argumentsList
1589 // -- r6 : new.target
1590 // -- r4 : target
1591 // -- sp[0] : receiver (undefined)
1592 // -----------------------------------
1593
1594 // 2. We don't need to check explicitly for constructor target here,
1595 // since that's the first thing the Construct/ConstructWithArrayLike
1596 // builtins will do.
1597
1598 // 3. We don't need to check explicitly for constructor new.target here,
1599 // since that's the second thing the Construct/ConstructWithArrayLike
1600 // builtins will do.
1601
1602 // 4. Construct the target with the given new.target and argumentsList.
1603 __ Jump(BUILTIN_CODE(masm->isolate(), ConstructWithArrayLike),
1604 RelocInfo::CODE_TARGET);
1605 }
1606
EnterArgumentsAdaptorFrame(MacroAssembler * masm)1607 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1608 __ SmiTag(r3);
1609 __ mov(r7, Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
1610 __ mflr(r0);
1611 __ push(r0);
1612 if (FLAG_enable_embedded_constant_pool) {
1613 __ Push(fp, kConstantPoolRegister, r7, r4, r3);
1614 } else {
1615 __ Push(fp, r7, r4, r3);
1616 }
1617 __ Push(Smi::kZero); // Padding.
1618 __ addi(fp, sp,
1619 Operand(ArgumentsAdaptorFrameConstants::kFixedFrameSizeFromFp));
1620 }
1621
LeaveArgumentsAdaptorFrame(MacroAssembler * masm)1622 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1623 // ----------- S t a t e -------------
1624 // -- r3 : result being passed through
1625 // -----------------------------------
1626 // Get the number of arguments passed (as a smi), tear down the frame and
1627 // then tear down the parameters.
1628 __ LoadP(r4, MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset));
1629 int stack_adjustment = kPointerSize; // adjust for receiver
1630 __ LeaveFrame(StackFrame::ARGUMENTS_ADAPTOR, stack_adjustment);
1631 __ SmiToPtrArrayOffset(r0, r4);
1632 __ add(sp, sp, r0);
1633 }
1634
1635 // static
Generate_CallOrConstructVarargs(MacroAssembler * masm,Handle<Code> code)1636 void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm,
1637 Handle<Code> code) {
1638 // ----------- S t a t e -------------
1639 // -- r4 : target
1640 // -- r3 : number of parameters on the stack (not including the receiver)
1641 // -- r5 : arguments list (a FixedArray)
1642 // -- r7 : len (number of elements to push from args)
1643 // -- r6 : new.target (for [[Construct]])
1644 // -----------------------------------
1645
1646 Register scratch = ip;
1647
1648 if (masm->emit_debug_code()) {
1649 // Allow r5 to be a FixedArray, or a FixedDoubleArray if r7 == 0.
1650 Label ok, fail;
1651 __ AssertNotSmi(r5);
1652 __ LoadP(scratch, FieldMemOperand(r5, HeapObject::kMapOffset));
1653 __ LoadHalfWord(scratch,
1654 FieldMemOperand(scratch, Map::kInstanceTypeOffset));
1655 __ cmpi(scratch, Operand(FIXED_ARRAY_TYPE));
1656 __ beq(&ok);
1657 __ cmpi(scratch, Operand(FIXED_DOUBLE_ARRAY_TYPE));
1658 __ bne(&fail);
1659 __ cmpi(r7, Operand::Zero());
1660 __ beq(&ok);
1661 // Fall through.
1662 __ bind(&fail);
1663 __ Abort(AbortReason::kOperandIsNotAFixedArray);
1664
1665 __ bind(&ok);
1666 }
1667
1668 // Check for stack overflow.
1669 {
1670 // Check the stack for overflow. We are not trying to catch interruptions
1671 // (i.e. debug break and preemption) here, so check the "real stack limit".
1672 Label done;
1673 __ LoadRoot(ip, Heap::kRealStackLimitRootIndex);
1674 // Make ip the space we have left. The stack might already be overflowed
1675 // here which will cause ip to become negative.
1676 __ sub(ip, sp, ip);
1677 // Check if the arguments will overflow the stack.
1678 __ ShiftLeftImm(r0, r7, Operand(kPointerSizeLog2));
1679 __ cmp(ip, r0); // Signed comparison.
1680 __ bgt(&done);
1681 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1682 __ bind(&done);
1683 }
1684
1685 // Push arguments onto the stack (thisArgument is already on the stack).
1686 {
1687 Label loop, no_args, skip;
1688 __ cmpi(r7, Operand::Zero());
1689 __ beq(&no_args);
1690 __ addi(r5, r5,
1691 Operand(FixedArray::kHeaderSize - kHeapObjectTag - kPointerSize));
1692 __ mtctr(r7);
1693 __ bind(&loop);
1694 __ LoadPU(ip, MemOperand(r5, kPointerSize));
1695 __ CompareRoot(ip, Heap::kTheHoleValueRootIndex);
1696 __ bne(&skip);
1697 __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
1698 __ bind(&skip);
1699 __ push(ip);
1700 __ bdnz(&loop);
1701 __ bind(&no_args);
1702 __ add(r3, r3, r7);
1703 }
1704
1705 // Tail-call to the actual Call or Construct builtin.
1706 __ Jump(code, RelocInfo::CODE_TARGET);
1707 }
1708
1709 // static
Generate_CallOrConstructForwardVarargs(MacroAssembler * masm,CallOrConstructMode mode,Handle<Code> code)1710 void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm,
1711 CallOrConstructMode mode,
1712 Handle<Code> code) {
1713 // ----------- S t a t e -------------
1714 // -- r3 : the number of arguments (not including the receiver)
1715 // -- r6 : the new.target (for [[Construct]] calls)
1716 // -- r4 : the target to call (can be any Object)
1717 // -- r5 : start index (to support rest parameters)
1718 // -----------------------------------
1719
1720 Register scratch = r9;
1721
1722 if (mode == CallOrConstructMode::kConstruct) {
1723 Label new_target_constructor, new_target_not_constructor;
1724 __ JumpIfSmi(r6, &new_target_not_constructor);
1725 __ LoadP(scratch, FieldMemOperand(r6, HeapObject::kMapOffset));
1726 __ lbz(scratch, FieldMemOperand(scratch, Map::kBitFieldOffset));
1727 __ TestBit(scratch, Map::IsConstructorBit::kShift, r0);
1728 __ bne(&new_target_constructor, cr0);
1729 __ bind(&new_target_not_constructor);
1730 {
1731 FrameScope scope(masm, StackFrame::MANUAL);
1732 __ EnterFrame(StackFrame::INTERNAL);
1733 __ Push(r6);
1734 __ CallRuntime(Runtime::kThrowNotConstructor);
1735 }
1736 __ bind(&new_target_constructor);
1737 }
1738
1739 // Check if we have an arguments adaptor frame below the function frame.
1740 Label arguments_adaptor, arguments_done;
1741 __ LoadP(r7, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
1742 __ LoadP(ip, MemOperand(r7, CommonFrameConstants::kContextOrFrameTypeOffset));
1743 __ cmpi(ip, Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)));
1744 __ beq(&arguments_adaptor);
1745 {
1746 __ LoadP(r8, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1747 __ LoadP(r8, FieldMemOperand(r8, JSFunction::kSharedFunctionInfoOffset));
1748 __ LoadHalfWord(
1749 r8,
1750 FieldMemOperand(r8, SharedFunctionInfo::kFormalParameterCountOffset));
1751 __ mr(r7, fp);
1752 }
1753 __ b(&arguments_done);
1754 __ bind(&arguments_adaptor);
1755 {
1756 // Load the length from the ArgumentsAdaptorFrame.
1757 __ LoadP(r8, MemOperand(r7, ArgumentsAdaptorFrameConstants::kLengthOffset));
1758 __ SmiUntag(r8);
1759 }
1760 __ bind(&arguments_done);
1761
1762 Label stack_done, stack_overflow;
1763 __ sub(r8, r8, r5);
1764 __ cmpi(r8, Operand::Zero());
1765 __ ble(&stack_done);
1766 {
1767 // Check for stack overflow.
1768 Generate_StackOverflowCheck(masm, r8, r5, &stack_overflow);
1769
1770 // Forward the arguments from the caller frame.
1771 {
1772 Label loop;
1773 __ addi(r7, r7, Operand(kPointerSize));
1774 __ add(r3, r3, r8);
1775 __ bind(&loop);
1776 {
1777 __ ShiftLeftImm(ip, r8, Operand(kPointerSizeLog2));
1778 __ LoadPX(ip, MemOperand(r7, ip));
1779 __ push(ip);
1780 __ subi(r8, r8, Operand(1));
1781 __ cmpi(r8, Operand::Zero());
1782 __ bne(&loop);
1783 }
1784 }
1785 }
1786 __ b(&stack_done);
1787 __ bind(&stack_overflow);
1788 __ TailCallRuntime(Runtime::kThrowStackOverflow);
1789 __ bind(&stack_done);
1790
1791 // Tail-call to the {code} handler.
1792 __ Jump(code, RelocInfo::CODE_TARGET);
1793 }
1794
1795 // static
Generate_CallFunction(MacroAssembler * masm,ConvertReceiverMode mode)1796 void Builtins::Generate_CallFunction(MacroAssembler* masm,
1797 ConvertReceiverMode mode) {
1798 // ----------- S t a t e -------------
1799 // -- r3 : the number of arguments (not including the receiver)
1800 // -- r4 : the function to call (checked to be a JSFunction)
1801 // -----------------------------------
1802 __ AssertFunction(r4);
1803
1804 // See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
1805 // Check that the function is not a "classConstructor".
1806 Label class_constructor;
1807 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
1808 __ lwz(r6, FieldMemOperand(r5, SharedFunctionInfo::kFlagsOffset));
1809 __ TestBitMask(r6, SharedFunctionInfo::IsClassConstructorBit::kMask, r0);
1810 __ bne(&class_constructor, cr0);
1811
1812 // Enter the context of the function; ToObject has to run in the function
1813 // context, and we also need to take the global proxy from the function
1814 // context in case of conversion.
1815 __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset));
1816 // We need to convert the receiver for non-native sloppy mode functions.
1817 Label done_convert;
1818 __ andi(r0, r6,
1819 Operand(SharedFunctionInfo::IsStrictBit::kMask |
1820 SharedFunctionInfo::IsNativeBit::kMask));
1821 __ bne(&done_convert, cr0);
1822 {
1823 // ----------- S t a t e -------------
1824 // -- r3 : the number of arguments (not including the receiver)
1825 // -- r4 : the function to call (checked to be a JSFunction)
1826 // -- r5 : the shared function info.
1827 // -- cp : the function context.
1828 // -----------------------------------
1829
1830 if (mode == ConvertReceiverMode::kNullOrUndefined) {
1831 // Patch receiver to global proxy.
1832 __ LoadGlobalProxy(r6);
1833 } else {
1834 Label convert_to_object, convert_receiver;
1835 __ ShiftLeftImm(r6, r3, Operand(kPointerSizeLog2));
1836 __ LoadPX(r6, MemOperand(sp, r6));
1837 __ JumpIfSmi(r6, &convert_to_object);
1838 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
1839 __ CompareObjectType(r6, r7, r7, FIRST_JS_RECEIVER_TYPE);
1840 __ bge(&done_convert);
1841 if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
1842 Label convert_global_proxy;
1843 __ JumpIfRoot(r6, Heap::kUndefinedValueRootIndex,
1844 &convert_global_proxy);
1845 __ JumpIfNotRoot(r6, Heap::kNullValueRootIndex, &convert_to_object);
1846 __ bind(&convert_global_proxy);
1847 {
1848 // Patch receiver to global proxy.
1849 __ LoadGlobalProxy(r6);
1850 }
1851 __ b(&convert_receiver);
1852 }
1853 __ bind(&convert_to_object);
1854 {
1855 // Convert receiver using ToObject.
1856 // TODO(bmeurer): Inline the allocation here to avoid building the frame
1857 // in the fast case? (fall back to AllocateInNewSpace?)
1858 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
1859 __ SmiTag(r3);
1860 __ Push(r3, r4);
1861 __ mr(r3, r6);
1862 __ Push(cp);
1863 __ Call(BUILTIN_CODE(masm->isolate(), ToObject),
1864 RelocInfo::CODE_TARGET);
1865 __ Pop(cp);
1866 __ mr(r6, r3);
1867 __ Pop(r3, r4);
1868 __ SmiUntag(r3);
1869 }
1870 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
1871 __ bind(&convert_receiver);
1872 }
1873 __ ShiftLeftImm(r7, r3, Operand(kPointerSizeLog2));
1874 __ StorePX(r6, MemOperand(sp, r7));
1875 }
1876 __ bind(&done_convert);
1877
1878 // ----------- S t a t e -------------
1879 // -- r3 : the number of arguments (not including the receiver)
1880 // -- r4 : the function to call (checked to be a JSFunction)
1881 // -- r5 : the shared function info.
1882 // -- cp : the function context.
1883 // -----------------------------------
1884
1885 __ LoadHalfWord(
1886 r5, FieldMemOperand(r5, SharedFunctionInfo::kFormalParameterCountOffset));
1887 ParameterCount actual(r3);
1888 ParameterCount expected(r5);
1889 __ InvokeFunctionCode(r4, no_reg, expected, actual, JUMP_FUNCTION);
1890
1891 // The function is a "classConstructor", need to raise an exception.
1892 __ bind(&class_constructor);
1893 {
1894 FrameAndConstantPoolScope frame(masm, StackFrame::INTERNAL);
1895 __ push(r4);
1896 __ CallRuntime(Runtime::kThrowConstructorNonCallableError);
1897 }
1898 }
1899
1900 namespace {
1901
Generate_PushBoundArguments(MacroAssembler * masm)1902 void Generate_PushBoundArguments(MacroAssembler* masm) {
1903 // ----------- S t a t e -------------
1904 // -- r3 : the number of arguments (not including the receiver)
1905 // -- r4 : target (checked to be a JSBoundFunction)
1906 // -- r6 : new.target (only in case of [[Construct]])
1907 // -----------------------------------
1908
1909 // Load [[BoundArguments]] into r5 and length of that into r7.
1910 Label no_bound_arguments;
1911 __ LoadP(r5, FieldMemOperand(r4, JSBoundFunction::kBoundArgumentsOffset));
1912 __ LoadP(r7, FieldMemOperand(r5, FixedArray::kLengthOffset));
1913 __ SmiUntag(r7, SetRC);
1914 __ beq(&no_bound_arguments, cr0);
1915 {
1916 // ----------- S t a t e -------------
1917 // -- r3 : the number of arguments (not including the receiver)
1918 // -- r4 : target (checked to be a JSBoundFunction)
1919 // -- r5 : the [[BoundArguments]] (implemented as FixedArray)
1920 // -- r6 : new.target (only in case of [[Construct]])
1921 // -- r7 : the number of [[BoundArguments]]
1922 // -----------------------------------
1923
1924 // Reserve stack space for the [[BoundArguments]].
1925 {
1926 Label done;
1927 __ mr(r9, sp); // preserve previous stack pointer
1928 __ ShiftLeftImm(r10, r7, Operand(kPointerSizeLog2));
1929 __ sub(sp, sp, r10);
1930 // Check the stack for overflow. We are not trying to catch interruptions
1931 // (i.e. debug break and preemption) here, so check the "real stack
1932 // limit".
1933 __ CompareRoot(sp, Heap::kRealStackLimitRootIndex);
1934 __ bgt(&done); // Signed comparison.
1935 // Restore the stack pointer.
1936 __ mr(sp, r9);
1937 {
1938 FrameScope scope(masm, StackFrame::MANUAL);
1939 __ EnterFrame(StackFrame::INTERNAL);
1940 __ CallRuntime(Runtime::kThrowStackOverflow);
1941 }
1942 __ bind(&done);
1943 }
1944
1945 // Relocate arguments down the stack.
1946 // -- r3 : the number of arguments (not including the receiver)
1947 // -- r9 : the previous stack pointer
1948 // -- r10: the size of the [[BoundArguments]]
1949 {
1950 Label skip, loop;
1951 __ li(r8, Operand::Zero());
1952 __ cmpi(r3, Operand::Zero());
1953 __ beq(&skip);
1954 __ mtctr(r3);
1955 __ bind(&loop);
1956 __ LoadPX(r0, MemOperand(r9, r8));
1957 __ StorePX(r0, MemOperand(sp, r8));
1958 __ addi(r8, r8, Operand(kPointerSize));
1959 __ bdnz(&loop);
1960 __ bind(&skip);
1961 }
1962
1963 // Copy [[BoundArguments]] to the stack (below the arguments).
1964 {
1965 Label loop;
1966 __ addi(r5, r5, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1967 __ add(r5, r5, r10);
1968 __ mtctr(r7);
1969 __ bind(&loop);
1970 __ LoadPU(r0, MemOperand(r5, -kPointerSize));
1971 __ StorePX(r0, MemOperand(sp, r8));
1972 __ addi(r8, r8, Operand(kPointerSize));
1973 __ bdnz(&loop);
1974 __ add(r3, r3, r7);
1975 }
1976 }
1977 __ bind(&no_bound_arguments);
1978 }
1979
1980 } // namespace
1981
1982 // static
Generate_CallBoundFunctionImpl(MacroAssembler * masm)1983 void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm) {
1984 // ----------- S t a t e -------------
1985 // -- r3 : the number of arguments (not including the receiver)
1986 // -- r4 : the function to call (checked to be a JSBoundFunction)
1987 // -----------------------------------
1988 __ AssertBoundFunction(r4);
1989
1990 // Patch the receiver to [[BoundThis]].
1991 __ LoadP(ip, FieldMemOperand(r4, JSBoundFunction::kBoundThisOffset));
1992 __ ShiftLeftImm(r0, r3, Operand(kPointerSizeLog2));
1993 __ StorePX(ip, MemOperand(sp, r0));
1994
1995 // Push the [[BoundArguments]] onto the stack.
1996 Generate_PushBoundArguments(masm);
1997
1998 // Call the [[BoundTargetFunction]] via the Call builtin.
1999 __ LoadP(r4,
2000 FieldMemOperand(r4, JSBoundFunction::kBoundTargetFunctionOffset));
2001 __ Jump(BUILTIN_CODE(masm->isolate(), Call_ReceiverIsAny),
2002 RelocInfo::CODE_TARGET);
2003 }
2004
2005 // static
Generate_Call(MacroAssembler * masm,ConvertReceiverMode mode)2006 void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode) {
2007 // ----------- S t a t e -------------
2008 // -- r3 : the number of arguments (not including the receiver)
2009 // -- r4 : the target to call (can be any Object).
2010 // -----------------------------------
2011
2012 Label non_callable, non_function, non_smi;
2013 __ JumpIfSmi(r4, &non_callable);
2014 __ bind(&non_smi);
2015 __ CompareObjectType(r4, r7, r8, JS_FUNCTION_TYPE);
2016 __ Jump(masm->isolate()->builtins()->CallFunction(mode),
2017 RelocInfo::CODE_TARGET, eq);
2018 __ cmpi(r8, Operand(JS_BOUND_FUNCTION_TYPE));
2019 __ Jump(BUILTIN_CODE(masm->isolate(), CallBoundFunction),
2020 RelocInfo::CODE_TARGET, eq);
2021
2022 // Check if target has a [[Call]] internal method.
2023 __ lbz(r7, FieldMemOperand(r7, Map::kBitFieldOffset));
2024 __ TestBit(r7, Map::IsCallableBit::kShift, r0);
2025 __ beq(&non_callable, cr0);
2026
2027 // Check if target is a proxy and call CallProxy external builtin
2028 __ cmpi(r8, Operand(JS_PROXY_TYPE));
2029 __ bne(&non_function);
2030 __ Jump(BUILTIN_CODE(masm->isolate(), CallProxy), RelocInfo::CODE_TARGET);
2031
2032 // 2. Call to something else, which might have a [[Call]] internal method (if
2033 // not we raise an exception).
2034 __ bind(&non_function);
2035 // Overwrite the original receiver the (original) target.
2036 __ ShiftLeftImm(r8, r3, Operand(kPointerSizeLog2));
2037 __ StorePX(r4, MemOperand(sp, r8));
2038 // Let the "call_as_function_delegate" take care of the rest.
2039 __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, r4);
2040 __ Jump(masm->isolate()->builtins()->CallFunction(
2041 ConvertReceiverMode::kNotNullOrUndefined),
2042 RelocInfo::CODE_TARGET);
2043
2044 // 3. Call to something that is not callable.
2045 __ bind(&non_callable);
2046 {
2047 FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
2048 __ Push(r4);
2049 __ CallRuntime(Runtime::kThrowCalledNonCallable);
2050 }
2051 }
2052
2053 // static
Generate_ConstructFunction(MacroAssembler * masm)2054 void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
2055 // ----------- S t a t e -------------
2056 // -- r3 : the number of arguments (not including the receiver)
2057 // -- r4 : the constructor to call (checked to be a JSFunction)
2058 // -- r6 : the new target (checked to be a constructor)
2059 // -----------------------------------
2060 __ AssertConstructor(r4);
2061 __ AssertFunction(r4);
2062
2063 // Calling convention for function specific ConstructStubs require
2064 // r5 to contain either an AllocationSite or undefined.
2065 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex);
2066
2067 Label call_generic_stub;
2068
2069 // Jump to JSBuiltinsConstructStub or JSConstructStubGeneric.
2070 __ LoadP(r7, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset));
2071 __ lwz(r7, FieldMemOperand(r7, SharedFunctionInfo::kFlagsOffset));
2072 __ mov(ip, Operand(SharedFunctionInfo::ConstructAsBuiltinBit::kMask));
2073 __ and_(r7, r7, ip, SetRC);
2074 __ beq(&call_generic_stub, cr0);
2075
2076 __ Jump(BUILTIN_CODE(masm->isolate(), JSBuiltinsConstructStub),
2077 RelocInfo::CODE_TARGET);
2078
2079 __ bind(&call_generic_stub);
2080 __ Jump(BUILTIN_CODE(masm->isolate(), JSConstructStubGeneric),
2081 RelocInfo::CODE_TARGET);
2082 }
2083
2084 // static
Generate_ConstructBoundFunction(MacroAssembler * masm)2085 void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
2086 // ----------- S t a t e -------------
2087 // -- r3 : the number of arguments (not including the receiver)
2088 // -- r4 : the function to call (checked to be a JSBoundFunction)
2089 // -- r6 : the new target (checked to be a constructor)
2090 // -----------------------------------
2091 __ AssertConstructor(r4);
2092 __ AssertBoundFunction(r4);
2093
2094 // Push the [[BoundArguments]] onto the stack.
2095 Generate_PushBoundArguments(masm);
2096
2097 // Patch new.target to [[BoundTargetFunction]] if new.target equals target.
2098 Label skip;
2099 __ cmp(r4, r6);
2100 __ bne(&skip);
2101 __ LoadP(r6,
2102 FieldMemOperand(r4, JSBoundFunction::kBoundTargetFunctionOffset));
2103 __ bind(&skip);
2104
2105 // Construct the [[BoundTargetFunction]] via the Construct builtin.
2106 __ LoadP(r4,
2107 FieldMemOperand(r4, JSBoundFunction::kBoundTargetFunctionOffset));
2108 __ Jump(BUILTIN_CODE(masm->isolate(), Construct), RelocInfo::CODE_TARGET);
2109 }
2110
2111 // static
Generate_Construct(MacroAssembler * masm)2112 void Builtins::Generate_Construct(MacroAssembler* masm) {
2113 // ----------- S t a t e -------------
2114 // -- r3 : the number of arguments (not including the receiver)
2115 // -- r4 : the constructor to call (can be any Object)
2116 // -- r6 : the new target (either the same as the constructor or
2117 // the JSFunction on which new was invoked initially)
2118 // -----------------------------------
2119
2120 // Check if target is a Smi.
2121 Label non_constructor, non_proxy;
2122 __ JumpIfSmi(r4, &non_constructor);
2123
2124 // Check if target has a [[Construct]] internal method.
2125 __ LoadP(r7, FieldMemOperand(r4, HeapObject::kMapOffset));
2126 __ lbz(r5, FieldMemOperand(r7, Map::kBitFieldOffset));
2127 __ TestBit(r5, Map::IsConstructorBit::kShift, r0);
2128 __ beq(&non_constructor, cr0);
2129
2130 // Dispatch based on instance type.
2131 __ CompareInstanceType(r7, r8, JS_FUNCTION_TYPE);
2132 __ Jump(BUILTIN_CODE(masm->isolate(), ConstructFunction),
2133 RelocInfo::CODE_TARGET, eq);
2134
2135 // Only dispatch to bound functions after checking whether they are
2136 // constructors.
2137 __ cmpi(r8, Operand(JS_BOUND_FUNCTION_TYPE));
2138 __ Jump(BUILTIN_CODE(masm->isolate(), ConstructBoundFunction),
2139 RelocInfo::CODE_TARGET, eq);
2140
2141 // Only dispatch to proxies after checking whether they are constructors.
2142 __ cmpi(r8, Operand(JS_PROXY_TYPE));
2143 __ bne(&non_proxy);
2144 __ Jump(BUILTIN_CODE(masm->isolate(), ConstructProxy),
2145 RelocInfo::CODE_TARGET);
2146
2147 // Called Construct on an exotic Object with a [[Construct]] internal method.
2148 __ bind(&non_proxy);
2149 {
2150 // Overwrite the original receiver with the (original) target.
2151 __ ShiftLeftImm(r8, r3, Operand(kPointerSizeLog2));
2152 __ StorePX(r4, MemOperand(sp, r8));
2153 // Let the "call_as_constructor_delegate" take care of the rest.
2154 __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, r4);
2155 __ Jump(masm->isolate()->builtins()->CallFunction(),
2156 RelocInfo::CODE_TARGET);
2157 }
2158
2159 // Called Construct on an Object that doesn't have a [[Construct]] internal
2160 // method.
2161 __ bind(&non_constructor);
2162 __ Jump(BUILTIN_CODE(masm->isolate(), ConstructedNonConstructable),
2163 RelocInfo::CODE_TARGET);
2164 }
2165
Generate_ArgumentsAdaptorTrampoline(MacroAssembler * masm)2166 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
2167 // ----------- S t a t e -------------
2168 // -- r3 : actual number of arguments
2169 // -- r4 : function (passed through to callee)
2170 // -- r5 : expected number of arguments
2171 // -- r6 : new target (passed through to callee)
2172 // -----------------------------------
2173
2174 Label invoke, dont_adapt_arguments, stack_overflow;
2175
2176 Label enough, too_few;
2177 __ cmpli(r5, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
2178 __ beq(&dont_adapt_arguments);
2179 __ cmp(r3, r5);
2180 __ blt(&too_few);
2181
2182 { // Enough parameters: actual >= expected
2183 __ bind(&enough);
2184 EnterArgumentsAdaptorFrame(masm);
2185 Generate_StackOverflowCheck(masm, r5, r8, &stack_overflow);
2186
2187 // Calculate copy start address into r3 and copy end address into r7.
2188 // r3: actual number of arguments as a smi
2189 // r4: function
2190 // r5: expected number of arguments
2191 // r6: new target (passed through to callee)
2192 __ SmiToPtrArrayOffset(r3, r3);
2193 __ add(r3, r3, fp);
2194 // adjust for return address and receiver
2195 __ addi(r3, r3, Operand(2 * kPointerSize));
2196 __ ShiftLeftImm(r7, r5, Operand(kPointerSizeLog2));
2197 __ sub(r7, r3, r7);
2198
2199 // Copy the arguments (including the receiver) to the new stack frame.
2200 // r3: copy start address
2201 // r4: function
2202 // r5: expected number of arguments
2203 // r6: new target (passed through to callee)
2204 // r7: copy end address
2205
2206 Label copy;
2207 __ bind(©);
2208 __ LoadP(r0, MemOperand(r3, 0));
2209 __ push(r0);
2210 __ cmp(r3, r7); // Compare before moving to next argument.
2211 __ subi(r3, r3, Operand(kPointerSize));
2212 __ bne(©);
2213
2214 __ b(&invoke);
2215 }
2216
2217 { // Too few parameters: Actual < expected
2218 __ bind(&too_few);
2219
2220 EnterArgumentsAdaptorFrame(masm);
2221 Generate_StackOverflowCheck(masm, r5, r8, &stack_overflow);
2222
2223 // Calculate copy start address into r0 and copy end address is fp.
2224 // r3: actual number of arguments as a smi
2225 // r4: function
2226 // r5: expected number of arguments
2227 // r6: new target (passed through to callee)
2228 __ SmiToPtrArrayOffset(r3, r3);
2229 __ add(r3, r3, fp);
2230
2231 // Copy the arguments (including the receiver) to the new stack frame.
2232 // r3: copy start address
2233 // r4: function
2234 // r5: expected number of arguments
2235 // r6: new target (passed through to callee)
2236 Label copy;
2237 __ bind(©);
2238 // Adjust load for return address and receiver.
2239 __ LoadP(r0, MemOperand(r3, 2 * kPointerSize));
2240 __ push(r0);
2241 __ cmp(r3, fp); // Compare before moving to next argument.
2242 __ subi(r3, r3, Operand(kPointerSize));
2243 __ bne(©);
2244
2245 // Fill the remaining expected arguments with undefined.
2246 // r4: function
2247 // r5: expected number of arguments
2248 // r6: new target (passed through to callee)
2249 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
2250 __ ShiftLeftImm(r7, r5, Operand(kPointerSizeLog2));
2251 __ sub(r7, fp, r7);
2252 // Adjust for frame.
2253 __ subi(r7, r7,
2254 Operand(ArgumentsAdaptorFrameConstants::kFixedFrameSizeFromFp +
2255 kPointerSize));
2256
2257 Label fill;
2258 __ bind(&fill);
2259 __ push(r0);
2260 __ cmp(sp, r7);
2261 __ bne(&fill);
2262 }
2263
2264 // Call the entry point.
2265 __ bind(&invoke);
2266 __ mr(r3, r5);
2267 // r3 : expected number of arguments
2268 // r4 : function (passed through to callee)
2269 // r6 : new target (passed through to callee)
2270 static_assert(kJavaScriptCallCodeStartRegister == r5, "ABI mismatch");
2271 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kCodeOffset));
2272 __ addi(r5, r5, Operand(Code::kHeaderSize - kHeapObjectTag));
2273 __ CallJSEntry(r5);
2274
2275 // Store offset of return address for deoptimizer.
2276 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
2277
2278 // Exit frame and return.
2279 LeaveArgumentsAdaptorFrame(masm);
2280 __ blr();
2281
2282 // -------------------------------------------
2283 // Dont adapt arguments.
2284 // -------------------------------------------
2285 __ bind(&dont_adapt_arguments);
2286 static_assert(kJavaScriptCallCodeStartRegister == r5, "ABI mismatch");
2287 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kCodeOffset));
2288 __ addi(r5, r5, Operand(Code::kHeaderSize - kHeapObjectTag));
2289 __ JumpToJSEntry(r5);
2290
2291 __ bind(&stack_overflow);
2292 {
2293 FrameScope frame(masm, StackFrame::MANUAL);
2294 __ CallRuntime(Runtime::kThrowStackOverflow);
2295 __ bkpt(0);
2296 }
2297 }
2298
Generate_WasmCompileLazy(MacroAssembler * masm)2299 void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) {
2300 // The function index was put in r15 by the jump table trampoline.
2301 // Convert to Smi for the runtime call.
2302 __ SmiTag(r15, r15);
2303 {
2304 HardAbortScope hard_abort(masm); // Avoid calls to Abort.
2305 FrameAndConstantPoolScope scope(masm, StackFrame::WASM_COMPILE_LAZY);
2306
2307 // Save all parameter registers (see wasm-linkage.cc). They might be
2308 // overwritten in the runtime call below. We don't have any callee-saved
2309 // registers in wasm, so no need to store anything else.
2310 constexpr RegList gp_regs =
2311 Register::ListOf<r3, r4, r5, r6, r7, r8, r9, r10>();
2312 constexpr RegList fp_regs =
2313 DoubleRegister::ListOf<d1, d2, d3, d4, d5, d6, d7, d8>();
2314 __ MultiPush(gp_regs);
2315 __ MultiPushDoubles(fp_regs);
2316
2317 // Pass instance and function index as explicit arguments to the runtime
2318 // function.
2319 __ Push(kWasmInstanceRegister, r15);
2320 // Load the correct CEntry builtin from the instance object.
2321 __ LoadP(r5, FieldMemOperand(kWasmInstanceRegister,
2322 WasmInstanceObject::kCEntryStubOffset));
2323 // Initialize the JavaScript context with 0. CEntry will use it to
2324 // set the current context on the isolate.
2325 __ LoadSmiLiteral(cp, Smi::kZero);
2326 __ CallRuntimeWithCEntry(Runtime::kWasmCompileLazy, r5);
2327 // The entrypoint address is the return value.
2328 __ mr(r11, kReturnRegister0);
2329
2330 // Restore registers.
2331 __ MultiPopDoubles(fp_regs);
2332 __ MultiPop(gp_regs);
2333 }
2334 // Finally, jump to the entrypoint.
2335 __ Jump(r11);
2336 }
2337
Generate_CEntry(MacroAssembler * masm,int result_size,SaveFPRegsMode save_doubles,ArgvMode argv_mode,bool builtin_exit_frame)2338 void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size,
2339 SaveFPRegsMode save_doubles, ArgvMode argv_mode,
2340 bool builtin_exit_frame) {
2341 // Called from JavaScript; parameters are on stack as if calling JS function.
2342 // r3: number of arguments including receiver
2343 // r4: pointer to builtin function
2344 // fp: frame pointer (restored after C call)
2345 // sp: stack pointer (restored as callee's sp after C call)
2346 // cp: current context (C callee-saved)
2347 //
2348 // If argv_mode == kArgvInRegister:
2349 // r5: pointer to the first argument
2350 ProfileEntryHookStub::MaybeCallEntryHook(masm);
2351
2352 __ mr(r15, r4);
2353
2354 if (argv_mode == kArgvInRegister) {
2355 // Move argv into the correct register.
2356 __ mr(r4, r5);
2357 } else {
2358 // Compute the argv pointer.
2359 __ ShiftLeftImm(r4, r3, Operand(kPointerSizeLog2));
2360 __ add(r4, r4, sp);
2361 __ subi(r4, r4, Operand(kPointerSize));
2362 }
2363
2364 // Enter the exit frame that transitions from JavaScript to C++.
2365 FrameScope scope(masm, StackFrame::MANUAL);
2366
2367 // Need at least one extra slot for return address location.
2368 int arg_stack_space = 1;
2369
2370 // Pass buffer for return value on stack if necessary
2371 bool needs_return_buffer =
2372 (result_size == 2 && !ABI_RETURNS_OBJECT_PAIRS_IN_REGS);
2373 if (needs_return_buffer) {
2374 arg_stack_space += result_size;
2375 }
2376
2377 __ EnterExitFrame(
2378 save_doubles, arg_stack_space,
2379 builtin_exit_frame ? StackFrame::BUILTIN_EXIT : StackFrame::EXIT);
2380
2381 // Store a copy of argc in callee-saved registers for later.
2382 __ mr(r14, r3);
2383
2384 // r3, r14: number of arguments including receiver (C callee-saved)
2385 // r4: pointer to the first argument
2386 // r15: pointer to builtin function (C callee-saved)
2387
2388 // Result returned in registers or stack, depending on result size and ABI.
2389
2390 Register isolate_reg = r5;
2391 if (needs_return_buffer) {
2392 // The return value is a non-scalar value.
2393 // Use frame storage reserved by calling function to pass return
2394 // buffer as implicit first argument.
2395 __ mr(r5, r4);
2396 __ mr(r4, r3);
2397 __ addi(r3, sp, Operand((kStackFrameExtraParamSlot + 1) * kPointerSize));
2398 isolate_reg = r6;
2399 }
2400
2401 // Call C built-in.
2402 __ Move(isolate_reg, ExternalReference::isolate_address(masm->isolate()));
2403
2404 Register target = r15;
2405 if (ABI_USES_FUNCTION_DESCRIPTORS) {
2406 // AIX/PPC64BE Linux use a function descriptor.
2407 __ LoadP(ToRegister(ABI_TOC_REGISTER), MemOperand(r15, kPointerSize));
2408 __ LoadP(ip, MemOperand(r15, 0)); // Instruction address
2409 target = ip;
2410 } else if (ABI_CALL_VIA_IP) {
2411 __ Move(ip, r15);
2412 target = ip;
2413 }
2414
2415 // To let the GC traverse the return address of the exit frames, we need to
2416 // know where the return address is. The CEntryStub is unmovable, so
2417 // we can store the address on the stack to be able to find it again and
2418 // we never have to restore it, because it will not change.
2419 Label start_call;
2420 constexpr int after_call_offset = 5 * kInstrSize;
2421 DCHECK_NE(r7, target);
2422 __ LoadPC(r7);
2423 __ bind(&start_call);
2424 __ addi(r7, r7, Operand(after_call_offset));
2425 __ StoreP(r7, MemOperand(sp, kStackFrameExtraParamSlot * kPointerSize));
2426 __ Call(target);
2427 DCHECK_EQ(after_call_offset - kInstrSize,
2428 __ SizeOfCodeGeneratedSince(&start_call));
2429
2430 // If return value is on the stack, pop it to registers.
2431 if (needs_return_buffer) {
2432 __ LoadP(r4, MemOperand(r3, kPointerSize));
2433 __ LoadP(r3, MemOperand(r3));
2434 }
2435
2436 // Check result for exception sentinel.
2437 Label exception_returned;
2438 __ CompareRoot(r3, Heap::kExceptionRootIndex);
2439 __ beq(&exception_returned);
2440
2441 // Check that there is no pending exception, otherwise we
2442 // should have returned the exception sentinel.
2443 if (FLAG_debug_code) {
2444 Label okay;
2445 ExternalReference pending_exception_address = ExternalReference::Create(
2446 IsolateAddressId::kPendingExceptionAddress, masm->isolate());
2447
2448 __ Move(r6, pending_exception_address);
2449 __ LoadP(r6, MemOperand(r6));
2450 __ CompareRoot(r6, Heap::kTheHoleValueRootIndex);
2451 // Cannot use check here as it attempts to generate call into runtime.
2452 __ beq(&okay);
2453 __ stop("Unexpected pending exception");
2454 __ bind(&okay);
2455 }
2456
2457 // Exit C frame and return.
2458 // r3:r4: result
2459 // sp: stack pointer
2460 // fp: frame pointer
2461 Register argc = argv_mode == kArgvInRegister
2462 // We don't want to pop arguments so set argc to no_reg.
2463 ? no_reg
2464 // r14: still holds argc (callee-saved).
2465 : r14;
2466 __ LeaveExitFrame(save_doubles, argc);
2467 __ blr();
2468
2469 // Handling of exception.
2470 __ bind(&exception_returned);
2471
2472 ExternalReference pending_handler_context_address = ExternalReference::Create(
2473 IsolateAddressId::kPendingHandlerContextAddress, masm->isolate());
2474 ExternalReference pending_handler_entrypoint_address =
2475 ExternalReference::Create(
2476 IsolateAddressId::kPendingHandlerEntrypointAddress, masm->isolate());
2477 ExternalReference pending_handler_constant_pool_address =
2478 ExternalReference::Create(
2479 IsolateAddressId::kPendingHandlerConstantPoolAddress,
2480 masm->isolate());
2481 ExternalReference pending_handler_fp_address = ExternalReference::Create(
2482 IsolateAddressId::kPendingHandlerFPAddress, masm->isolate());
2483 ExternalReference pending_handler_sp_address = ExternalReference::Create(
2484 IsolateAddressId::kPendingHandlerSPAddress, masm->isolate());
2485
2486 // Ask the runtime for help to determine the handler. This will set r3 to
2487 // contain the current pending exception, don't clobber it.
2488 ExternalReference find_handler =
2489 ExternalReference::Create(Runtime::kUnwindAndFindExceptionHandler);
2490 {
2491 FrameScope scope(masm, StackFrame::MANUAL);
2492 __ PrepareCallCFunction(3, 0, r3);
2493 __ li(r3, Operand::Zero());
2494 __ li(r4, Operand::Zero());
2495 __ Move(r5, ExternalReference::isolate_address(masm->isolate()));
2496 __ CallCFunction(find_handler, 3);
2497 }
2498
2499 // Retrieve the handler context, SP and FP.
2500 __ Move(cp, pending_handler_context_address);
2501 __ LoadP(cp, MemOperand(cp));
2502 __ Move(sp, pending_handler_sp_address);
2503 __ LoadP(sp, MemOperand(sp));
2504 __ Move(fp, pending_handler_fp_address);
2505 __ LoadP(fp, MemOperand(fp));
2506
2507 // If the handler is a JS frame, restore the context to the frame. Note that
2508 // the context will be set to (cp == 0) for non-JS frames.
2509 Label skip;
2510 __ cmpi(cp, Operand::Zero());
2511 __ beq(&skip);
2512 __ StoreP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2513 __ bind(&skip);
2514
2515 // Reset the masking register.
2516 if (FLAG_branch_load_poisoning) {
2517 __ ResetSpeculationPoisonRegister();
2518 }
2519
2520 // Compute the handler entry address and jump to it.
2521 ConstantPoolUnavailableScope constant_pool_unavailable(masm);
2522 __ Move(ip, pending_handler_entrypoint_address);
2523 __ LoadP(ip, MemOperand(ip));
2524 if (FLAG_enable_embedded_constant_pool) {
2525 __ Move(kConstantPoolRegister, pending_handler_constant_pool_address);
2526 __ LoadP(kConstantPoolRegister, MemOperand(kConstantPoolRegister));
2527 }
2528 __ Jump(ip);
2529 }
2530
Generate_DoubleToI(MacroAssembler * masm)2531 void Builtins::Generate_DoubleToI(MacroAssembler* masm) {
2532 Label out_of_range, only_low, negate, done, fastpath_done;
2533 Register result_reg = r3;
2534
2535 HardAbortScope hard_abort(masm); // Avoid calls to Abort.
2536
2537 // Immediate values for this stub fit in instructions, so it's safe to use ip.
2538 Register scratch = GetRegisterThatIsNotOneOf(result_reg);
2539 Register scratch_low = GetRegisterThatIsNotOneOf(result_reg, scratch);
2540 Register scratch_high =
2541 GetRegisterThatIsNotOneOf(result_reg, scratch, scratch_low);
2542 DoubleRegister double_scratch = kScratchDoubleReg;
2543
2544 __ Push(result_reg, scratch);
2545 // Account for saved regs.
2546 int argument_offset = 2 * kPointerSize;
2547
2548 // Load double input.
2549 __ lfd(double_scratch, MemOperand(sp, argument_offset));
2550
2551 // Do fast-path convert from double to int.
2552 __ ConvertDoubleToInt64(double_scratch,
2553 #if !V8_TARGET_ARCH_PPC64
2554 scratch,
2555 #endif
2556 result_reg, d0);
2557
2558 // Test for overflow
2559 #if V8_TARGET_ARCH_PPC64
2560 __ TestIfInt32(result_reg, r0);
2561 #else
2562 __ TestIfInt32(scratch, result_reg, r0);
2563 #endif
2564 __ beq(&fastpath_done);
2565
2566 __ Push(scratch_high, scratch_low);
2567 // Account for saved regs.
2568 argument_offset += 2 * kPointerSize;
2569
2570 __ lwz(scratch_high,
2571 MemOperand(sp, argument_offset + Register::kExponentOffset));
2572 __ lwz(scratch_low,
2573 MemOperand(sp, argument_offset + Register::kMantissaOffset));
2574
2575 __ ExtractBitMask(scratch, scratch_high, HeapNumber::kExponentMask);
2576 // Load scratch with exponent - 1. This is faster than loading
2577 // with exponent because Bias + 1 = 1024 which is a *PPC* immediate value.
2578 STATIC_ASSERT(HeapNumber::kExponentBias + 1 == 1024);
2579 __ subi(scratch, scratch, Operand(HeapNumber::kExponentBias + 1));
2580 // If exponent is greater than or equal to 84, the 32 less significant
2581 // bits are 0s (2^84 = 1, 52 significant bits, 32 uncoded bits),
2582 // the result is 0.
2583 // Compare exponent with 84 (compare exponent - 1 with 83).
2584 __ cmpi(scratch, Operand(83));
2585 __ bge(&out_of_range);
2586
2587 // If we reach this code, 31 <= exponent <= 83.
2588 // So, we don't have to handle cases where 0 <= exponent <= 20 for
2589 // which we would need to shift right the high part of the mantissa.
2590 // Scratch contains exponent - 1.
2591 // Load scratch with 52 - exponent (load with 51 - (exponent - 1)).
2592 __ subfic(scratch, scratch, Operand(51));
2593 __ cmpi(scratch, Operand::Zero());
2594 __ ble(&only_low);
2595 // 21 <= exponent <= 51, shift scratch_low and scratch_high
2596 // to generate the result.
2597 __ srw(scratch_low, scratch_low, scratch);
2598 // Scratch contains: 52 - exponent.
2599 // We needs: exponent - 20.
2600 // So we use: 32 - scratch = 32 - 52 + exponent = exponent - 20.
2601 __ subfic(scratch, scratch, Operand(32));
2602 __ ExtractBitMask(result_reg, scratch_high, HeapNumber::kMantissaMask);
2603 // Set the implicit 1 before the mantissa part in scratch_high.
2604 STATIC_ASSERT(HeapNumber::kMantissaBitsInTopWord >= 16);
2605 __ oris(result_reg, result_reg,
2606 Operand(1 << ((HeapNumber::kMantissaBitsInTopWord)-16)));
2607 __ slw(r0, result_reg, scratch);
2608 __ orx(result_reg, scratch_low, r0);
2609 __ b(&negate);
2610
2611 __ bind(&out_of_range);
2612 __ mov(result_reg, Operand::Zero());
2613 __ b(&done);
2614
2615 __ bind(&only_low);
2616 // 52 <= exponent <= 83, shift only scratch_low.
2617 // On entry, scratch contains: 52 - exponent.
2618 __ neg(scratch, scratch);
2619 __ slw(result_reg, scratch_low, scratch);
2620
2621 __ bind(&negate);
2622 // If input was positive, scratch_high ASR 31 equals 0 and
2623 // scratch_high LSR 31 equals zero.
2624 // New result = (result eor 0) + 0 = result.
2625 // If the input was negative, we have to negate the result.
2626 // Input_high ASR 31 equals 0xFFFFFFFF and scratch_high LSR 31 equals 1.
2627 // New result = (result eor 0xFFFFFFFF) + 1 = 0 - result.
2628 __ srawi(r0, scratch_high, 31);
2629 #if V8_TARGET_ARCH_PPC64
2630 __ srdi(r0, r0, Operand(32));
2631 #endif
2632 __ xor_(result_reg, result_reg, r0);
2633 __ srwi(r0, scratch_high, Operand(31));
2634 __ add(result_reg, result_reg, r0);
2635
2636 __ bind(&done);
2637 __ Pop(scratch_high, scratch_low);
2638 // Account for saved regs.
2639 argument_offset -= 2 * kPointerSize;
2640
2641 __ bind(&fastpath_done);
2642 __ StoreP(result_reg, MemOperand(sp, argument_offset));
2643 __ Pop(result_reg, scratch);
2644
2645 __ Ret();
2646 }
2647
Generate_MathPowInternal(MacroAssembler * masm)2648 void Builtins::Generate_MathPowInternal(MacroAssembler* masm) {
2649 const Register exponent = r5;
2650 const DoubleRegister double_base = d1;
2651 const DoubleRegister double_exponent = d2;
2652 const DoubleRegister double_result = d3;
2653 const DoubleRegister double_scratch = d0;
2654 const Register scratch = r11;
2655 const Register scratch2 = r10;
2656
2657 Label call_runtime, done, int_exponent;
2658
2659 // Detect integer exponents stored as double.
2660 __ TryDoubleToInt32Exact(scratch, double_exponent, scratch2, double_scratch);
2661 __ beq(&int_exponent);
2662
2663 __ mflr(r0);
2664 __ push(r0);
2665 {
2666 AllowExternalCallThatCantCauseGC scope(masm);
2667 __ PrepareCallCFunction(0, 2, scratch);
2668 __ MovToFloatParameters(double_base, double_exponent);
2669 __ CallCFunction(ExternalReference::power_double_double_function(), 0, 2);
2670 }
2671 __ pop(r0);
2672 __ mtlr(r0);
2673 __ MovFromFloatResult(double_result);
2674 __ b(&done);
2675
2676 // Calculate power with integer exponent.
2677 __ bind(&int_exponent);
2678
2679 // Get two copies of exponent in the registers scratch and exponent.
2680 // Exponent has previously been stored into scratch as untagged integer.
2681 __ mr(exponent, scratch);
2682
2683 __ fmr(double_scratch, double_base); // Back up base.
2684 __ li(scratch2, Operand(1));
2685 __ ConvertIntToDouble(scratch2, double_result);
2686
2687 // Get absolute value of exponent.
2688 __ cmpi(scratch, Operand::Zero());
2689 if (CpuFeatures::IsSupported(ISELECT)) {
2690 __ neg(scratch2, scratch);
2691 __ isel(lt, scratch, scratch2, scratch);
2692 } else {
2693 Label positive_exponent;
2694 __ bge(&positive_exponent);
2695 __ neg(scratch, scratch);
2696 __ bind(&positive_exponent);
2697 }
2698
2699 Label while_true, no_carry, loop_end;
2700 __ bind(&while_true);
2701 __ andi(scratch2, scratch, Operand(1));
2702 __ beq(&no_carry, cr0);
2703 __ fmul(double_result, double_result, double_scratch);
2704 __ bind(&no_carry);
2705 __ ShiftRightImm(scratch, scratch, Operand(1), SetRC);
2706 __ beq(&loop_end, cr0);
2707 __ fmul(double_scratch, double_scratch, double_scratch);
2708 __ b(&while_true);
2709 __ bind(&loop_end);
2710
2711 __ cmpi(exponent, Operand::Zero());
2712 __ bge(&done);
2713
2714 __ li(scratch2, Operand(1));
2715 __ ConvertIntToDouble(scratch2, double_scratch);
2716 __ fdiv(double_result, double_scratch, double_result);
2717 // Test whether result is zero. Bail out to check for subnormal result.
2718 // Due to subnormals, x^-y == (1/x)^y does not hold in all cases.
2719 __ fcmpu(double_result, kDoubleRegZero);
2720 __ bne(&done);
2721 // double_exponent may not containe the exponent value if the input was a
2722 // smi. We set it with exponent value before bailing out.
2723 __ ConvertIntToDouble(exponent, double_exponent);
2724
2725 // Returning or bailing out.
2726 __ mflr(r0);
2727 __ push(r0);
2728 {
2729 AllowExternalCallThatCantCauseGC scope(masm);
2730 __ PrepareCallCFunction(0, 2, scratch);
2731 __ MovToFloatParameters(double_base, double_exponent);
2732 __ CallCFunction(ExternalReference::power_double_double_function(), 0, 2);
2733 }
2734 __ pop(r0);
2735 __ mtlr(r0);
2736 __ MovFromFloatResult(double_result);
2737
2738 __ bind(&done);
2739 __ Ret();
2740 }
2741
2742 namespace {
2743
GenerateInternalArrayConstructorCase(MacroAssembler * masm,ElementsKind kind)2744 void GenerateInternalArrayConstructorCase(MacroAssembler* masm,
2745 ElementsKind kind) {
2746 __ cmpli(r3, Operand(1));
2747
2748 __ Jump(CodeFactory::InternalArrayNoArgumentConstructor(masm->isolate(), kind)
2749 .code(),
2750 RelocInfo::CODE_TARGET, lt);
2751
2752 __ Jump(BUILTIN_CODE(masm->isolate(), ArrayNArgumentsConstructor),
2753 RelocInfo::CODE_TARGET, gt);
2754
2755 if (IsFastPackedElementsKind(kind)) {
2756 // We might need to create a holey array
2757 // look at the first argument
2758 __ LoadP(r6, MemOperand(sp, 0));
2759 __ cmpi(r6, Operand::Zero());
2760
2761 __ Jump(CodeFactory::InternalArraySingleArgumentConstructor(
2762 masm->isolate(), GetHoleyElementsKind(kind))
2763 .code(),
2764 RelocInfo::CODE_TARGET, ne);
2765 }
2766
2767 __ Jump(
2768 CodeFactory::InternalArraySingleArgumentConstructor(masm->isolate(), kind)
2769 .code(),
2770 RelocInfo::CODE_TARGET);
2771 }
2772
2773 } // namespace
2774
Generate_InternalArrayConstructorImpl(MacroAssembler * masm)2775 void Builtins::Generate_InternalArrayConstructorImpl(MacroAssembler* masm) {
2776 // ----------- S t a t e -------------
2777 // -- r3 : argc
2778 // -- r4 : constructor
2779 // -- sp[0] : return address
2780 // -- sp[4] : last argument
2781 // -----------------------------------
2782
2783 if (FLAG_debug_code) {
2784 // The array construct code is only set for the global and natives
2785 // builtin Array functions which always have maps.
2786
2787 // Initial map for the builtin Array function should be a map.
2788 __ LoadP(r6, FieldMemOperand(r4, JSFunction::kPrototypeOrInitialMapOffset));
2789 // Will both indicate a nullptr and a Smi.
2790 __ TestIfSmi(r6, r0);
2791 __ Assert(ne, AbortReason::kUnexpectedInitialMapForArrayFunction, cr0);
2792 __ CompareObjectType(r6, r6, r7, MAP_TYPE);
2793 __ Assert(eq, AbortReason::kUnexpectedInitialMapForArrayFunction);
2794 }
2795
2796 // Figure out the right elements kind
2797 __ LoadP(r6, FieldMemOperand(r4, JSFunction::kPrototypeOrInitialMapOffset));
2798 // Load the map's "bit field 2" into |result|.
2799 __ lbz(r6, FieldMemOperand(r6, Map::kBitField2Offset));
2800 // Retrieve elements_kind from bit field 2.
2801 __ DecodeField<Map::ElementsKindBits>(r6);
2802
2803 if (FLAG_debug_code) {
2804 Label done;
2805 __ cmpi(r6, Operand(PACKED_ELEMENTS));
2806 __ beq(&done);
2807 __ cmpi(r6, Operand(HOLEY_ELEMENTS));
2808 __ Assert(
2809 eq,
2810 AbortReason::kInvalidElementsKindForInternalArrayOrInternalPackedArray);
2811 __ bind(&done);
2812 }
2813
2814 Label fast_elements_case;
2815 __ cmpi(r6, Operand(PACKED_ELEMENTS));
2816 __ beq(&fast_elements_case);
2817 GenerateInternalArrayConstructorCase(masm, HOLEY_ELEMENTS);
2818
2819 __ bind(&fast_elements_case);
2820 GenerateInternalArrayConstructorCase(masm, PACKED_ELEMENTS);
2821 }
2822
2823 #undef __
2824 } // namespace internal
2825 } // namespace v8
2826
2827 #endif // V8_TARGET_ARCH_PPC
2828