• Home
  • Raw
  • Download

Lines Matching full:__

16 #define __ ACCESS_MASM(masm)  macro
29 __ AssertFunction(r4); in Generate_Adaptor()
35 __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset)); in Generate_Adaptor()
40 __ addi(r3, r3, Operand(num_extra_args + 1)); in Generate_Adaptor()
43 __ SmiTag(r3); in Generate_Adaptor()
44 __ Push(r3, r4, r6); in Generate_Adaptor()
45 __ SmiUntag(r3); in Generate_Adaptor()
47 __ JumpToExternalReference(ExternalReference(address, masm->isolate()), in Generate_Adaptor()
55 __ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, result); in GenerateLoadInternalArrayFunction()
61 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, result); in GenerateLoadArrayFunction()
77 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kPrototypeOrInitialMapOffset)); in Generate_InternalArrayCode()
78 __ TestIfSmi(r5, r0); in Generate_InternalArrayCode()
79 __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction, cr0); in Generate_InternalArrayCode()
80 __ CompareObjectType(r5, r6, r7, MAP_TYPE); in Generate_InternalArrayCode()
81 __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction); in Generate_InternalArrayCode()
88 __ TailCallStub(&stub); in Generate_InternalArrayCode()
104 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kPrototypeOrInitialMapOffset)); in Generate_ArrayCode()
105 __ TestIfSmi(r5, r0); in Generate_ArrayCode()
106 __ Assert(ne, kUnexpectedInitialMapForArrayFunction, cr0); in Generate_ArrayCode()
107 __ CompareObjectType(r5, r6, r7, MAP_TYPE); in Generate_ArrayCode()
108 __ Assert(eq, kUnexpectedInitialMapForArrayFunction); in Generate_ArrayCode()
111 __ mr(r6, r4); in Generate_ArrayCode()
114 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex); in Generate_ArrayCode()
116 __ TailCallStub(&stub); in Generate_ArrayCode()
137 __ LoadRoot(r8, root_index); in Generate_MathMaxMin()
138 __ lfd(d1, FieldMemOperand(r8, HeapNumber::kValueOffset)); in Generate_MathMaxMin()
143 __ addi(r7, r3, Operand(1)); in Generate_MathMaxMin()
146 __ mr(r7, r3); in Generate_MathMaxMin()
147 __ bind(&loop); in Generate_MathMaxMin()
150 __ subi(r7, r7, Operand(1)); in Generate_MathMaxMin()
151 __ cmpi(r7, Operand::Zero()); in Generate_MathMaxMin()
152 __ blt(&done_loop); in Generate_MathMaxMin()
155 __ ShiftLeftImm(r5, r7, Operand(kPointerSizeLog2)); in Generate_MathMaxMin()
156 __ LoadPX(r5, MemOperand(sp, r5)); in Generate_MathMaxMin()
161 __ bind(&convert); in Generate_MathMaxMin()
162 __ JumpIfSmi(r5, &convert_smi); in Generate_MathMaxMin()
163 __ LoadP(r6, FieldMemOperand(r5, HeapObject::kMapOffset)); in Generate_MathMaxMin()
164 __ JumpIfRoot(r6, Heap::kHeapNumberMapRootIndex, &convert_number); in Generate_MathMaxMin()
168 __ SmiTag(r3); in Generate_MathMaxMin()
169 __ SmiTag(r7); in Generate_MathMaxMin()
170 __ EnterBuiltinFrame(cp, r4, r3); in Generate_MathMaxMin()
171 __ Push(r7, r8); in Generate_MathMaxMin()
172 __ mr(r3, r5); in Generate_MathMaxMin()
173 __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET); in Generate_MathMaxMin()
174 __ mr(r5, r3); in Generate_MathMaxMin()
175 __ Pop(r7, r8); in Generate_MathMaxMin()
176 __ LeaveBuiltinFrame(cp, r4, r3); in Generate_MathMaxMin()
177 __ SmiUntag(r7); in Generate_MathMaxMin()
178 __ SmiUntag(r3); in Generate_MathMaxMin()
182 __ SmiToDouble(d1, r8); in Generate_MathMaxMin()
183 __ JumpIfSmi(r8, &done_restore); in Generate_MathMaxMin()
184 __ lfd(d1, FieldMemOperand(r8, HeapNumber::kValueOffset)); in Generate_MathMaxMin()
185 __ bind(&done_restore); in Generate_MathMaxMin()
188 __ b(&convert); in Generate_MathMaxMin()
189 __ bind(&convert_number); in Generate_MathMaxMin()
190 __ lfd(d2, FieldMemOperand(r5, HeapNumber::kValueOffset)); in Generate_MathMaxMin()
191 __ b(&done_convert); in Generate_MathMaxMin()
192 __ bind(&convert_smi); in Generate_MathMaxMin()
193 __ SmiToDouble(d2, r5); in Generate_MathMaxMin()
194 __ bind(&done_convert); in Generate_MathMaxMin()
199 __ fcmpu(d1, d2); in Generate_MathMaxMin()
200 __ bunordered(&compare_nan); in Generate_MathMaxMin()
201 __ b(cond_done, &loop); in Generate_MathMaxMin()
202 __ b(CommuteCondition(cond_done), &compare_swap); in Generate_MathMaxMin()
205 __ TestDoubleIsMinusZero(reg, r9, r0); in Generate_MathMaxMin()
206 __ bne(&loop); in Generate_MathMaxMin()
209 __ bind(&compare_swap); in Generate_MathMaxMin()
210 __ fmr(d1, d2); in Generate_MathMaxMin()
211 __ mr(r8, r5); in Generate_MathMaxMin()
212 __ b(&loop); in Generate_MathMaxMin()
216 __ bind(&compare_nan); in Generate_MathMaxMin()
217 __ LoadRoot(r8, Heap::kNanValueRootIndex); in Generate_MathMaxMin()
218 __ lfd(d1, FieldMemOperand(r8, HeapNumber::kValueOffset)); in Generate_MathMaxMin()
219 __ b(&loop); in Generate_MathMaxMin()
222 __ bind(&done_loop); in Generate_MathMaxMin()
224 __ addi(r3, r3, Operand(1)); in Generate_MathMaxMin()
225 __ Drop(r3); in Generate_MathMaxMin()
226 __ mr(r3, r8); in Generate_MathMaxMin()
227 __ Ret(); in Generate_MathMaxMin()
244 __ mr(r5, r3); // Store argc in r5. in Generate_NumberConstructor()
245 __ cmpi(r3, Operand::Zero()); in Generate_NumberConstructor()
246 __ beq(&no_arguments); in Generate_NumberConstructor()
247 __ subi(r3, r3, Operand(1)); in Generate_NumberConstructor()
248 __ ShiftLeftImm(r3, r3, Operand(kPointerSizeLog2)); in Generate_NumberConstructor()
249 __ LoadPX(r3, MemOperand(sp, r3)); in Generate_NumberConstructor()
255 __ SmiTag(r5); in Generate_NumberConstructor()
256 __ EnterBuiltinFrame(cp, r4, r5); in Generate_NumberConstructor()
257 __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET); in Generate_NumberConstructor()
258 __ LeaveBuiltinFrame(cp, r4, r5); in Generate_NumberConstructor()
259 __ SmiUntag(r5); in Generate_NumberConstructor()
264 __ Drop(r5); in Generate_NumberConstructor()
265 __ Ret(1); in Generate_NumberConstructor()
269 __ bind(&no_arguments); in Generate_NumberConstructor()
270 __ LoadSmiLiteral(r3, Smi::kZero); in Generate_NumberConstructor()
271 __ Ret(1); in Generate_NumberConstructor()
287 __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset)); in Generate_NumberConstructor_ConstructStub()
292 __ mr(r9, r3); // Store argc in r9. in Generate_NumberConstructor_ConstructStub()
293 __ cmpi(r3, Operand::Zero()); in Generate_NumberConstructor_ConstructStub()
294 __ beq(&no_arguments); in Generate_NumberConstructor_ConstructStub()
295 __ subi(r3, r3, Operand(1)); in Generate_NumberConstructor_ConstructStub()
296 __ ShiftLeftImm(r5, r3, Operand(kPointerSizeLog2)); in Generate_NumberConstructor_ConstructStub()
297 __ LoadPX(r5, MemOperand(sp, r5)); in Generate_NumberConstructor_ConstructStub()
298 __ b(&done); in Generate_NumberConstructor_ConstructStub()
299 __ bind(&no_arguments); in Generate_NumberConstructor_ConstructStub()
300 __ LoadSmiLiteral(r5, Smi::kZero); in Generate_NumberConstructor_ConstructStub()
301 __ bind(&done); in Generate_NumberConstructor_ConstructStub()
307 __ JumpIfSmi(r5, &done_convert); in Generate_NumberConstructor_ConstructStub()
308 __ CompareObjectType(r5, r7, r7, HEAP_NUMBER_TYPE); in Generate_NumberConstructor_ConstructStub()
309 __ beq(&done_convert); in Generate_NumberConstructor_ConstructStub()
312 __ SmiTag(r9); in Generate_NumberConstructor_ConstructStub()
313 __ EnterBuiltinFrame(cp, r4, r9); in Generate_NumberConstructor_ConstructStub()
314 __ Push(r6); in Generate_NumberConstructor_ConstructStub()
315 __ mr(r3, r5); in Generate_NumberConstructor_ConstructStub()
316 __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET); in Generate_NumberConstructor_ConstructStub()
317 __ mr(r5, r3); in Generate_NumberConstructor_ConstructStub()
318 __ Pop(r6); in Generate_NumberConstructor_ConstructStub()
319 __ LeaveBuiltinFrame(cp, r4, r9); in Generate_NumberConstructor_ConstructStub()
320 __ SmiUntag(r9); in Generate_NumberConstructor_ConstructStub()
322 __ bind(&done_convert); in Generate_NumberConstructor_ConstructStub()
327 __ cmp(r4, r6); in Generate_NumberConstructor_ConstructStub()
328 __ bne(&new_object); in Generate_NumberConstructor_ConstructStub()
331 __ AllocateJSValue(r3, r4, r5, r7, r8, &new_object); in Generate_NumberConstructor_ConstructStub()
332 __ b(&drop_frame_and_ret); in Generate_NumberConstructor_ConstructStub()
335 __ bind(&new_object); in Generate_NumberConstructor_ConstructStub()
338 __ SmiTag(r9); in Generate_NumberConstructor_ConstructStub()
339 __ EnterBuiltinFrame(cp, r4, r9); in Generate_NumberConstructor_ConstructStub()
340 __ Push(r5); // first argument in Generate_NumberConstructor_ConstructStub()
341 __ Call(CodeFactory::FastNewObject(masm->isolate()).code(), in Generate_NumberConstructor_ConstructStub()
343 __ Pop(r5); in Generate_NumberConstructor_ConstructStub()
344 __ LeaveBuiltinFrame(cp, r4, r9); in Generate_NumberConstructor_ConstructStub()
345 __ SmiUntag(r9); in Generate_NumberConstructor_ConstructStub()
347 __ StoreP(r5, FieldMemOperand(r3, JSValue::kValueOffset), r0); in Generate_NumberConstructor_ConstructStub()
349 __ bind(&drop_frame_and_ret); in Generate_NumberConstructor_ConstructStub()
351 __ Drop(r9); in Generate_NumberConstructor_ConstructStub()
352 __ Ret(1); in Generate_NumberConstructor_ConstructStub()
370 __ mr(r5, r3); // Store argc in r5. in Generate_StringConstructor()
371 __ cmpi(r3, Operand::Zero()); in Generate_StringConstructor()
372 __ beq(&no_arguments); in Generate_StringConstructor()
373 __ subi(r3, r3, Operand(1)); in Generate_StringConstructor()
374 __ ShiftLeftImm(r3, r3, Operand(kPointerSizeLog2)); in Generate_StringConstructor()
375 __ LoadPX(r3, MemOperand(sp, r3)); in Generate_StringConstructor()
382 __ JumpIfSmi(r3, &to_string); in Generate_StringConstructor()
384 __ CompareObjectType(r3, r6, r6, FIRST_NONSTRING_TYPE); in Generate_StringConstructor()
385 __ bgt(&to_string); in Generate_StringConstructor()
386 __ beq(&symbol_descriptive_string); in Generate_StringConstructor()
387 __ b(&drop_frame_and_ret); in Generate_StringConstructor()
391 __ bind(&no_arguments); in Generate_StringConstructor()
393 __ LoadRoot(r3, Heap::kempty_stringRootIndex); in Generate_StringConstructor()
394 __ Ret(1); in Generate_StringConstructor()
398 __ bind(&to_string); in Generate_StringConstructor()
401 __ SmiTag(r5); in Generate_StringConstructor()
402 __ EnterBuiltinFrame(cp, r4, r5); in Generate_StringConstructor()
403 __ Call(masm->isolate()->builtins()->ToString(), RelocInfo::CODE_TARGET); in Generate_StringConstructor()
404 __ LeaveBuiltinFrame(cp, r4, r5); in Generate_StringConstructor()
405 __ SmiUntag(r5); in Generate_StringConstructor()
407 __ b(&drop_frame_and_ret); in Generate_StringConstructor()
410 __ bind(&symbol_descriptive_string); in Generate_StringConstructor()
412 __ Drop(r5); in Generate_StringConstructor()
413 __ Drop(1); in Generate_StringConstructor()
414 __ Push(r3); in Generate_StringConstructor()
415 __ TailCallRuntime(Runtime::kSymbolDescriptiveString); in Generate_StringConstructor()
418 __ bind(&drop_frame_and_ret); in Generate_StringConstructor()
420 __ Drop(r5); in Generate_StringConstructor()
421 __ Ret(1); in Generate_StringConstructor()
438 __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset)); in Generate_StringConstructor_ConstructStub()
443 __ mr(r9, r3); // Store argc in r9. in Generate_StringConstructor_ConstructStub()
444 __ cmpi(r3, Operand::Zero()); in Generate_StringConstructor_ConstructStub()
445 __ beq(&no_arguments); in Generate_StringConstructor_ConstructStub()
446 __ subi(r3, r3, Operand(1)); in Generate_StringConstructor_ConstructStub()
447 __ ShiftLeftImm(r5, r3, Operand(kPointerSizeLog2)); in Generate_StringConstructor_ConstructStub()
448 __ LoadPX(r5, MemOperand(sp, r5)); in Generate_StringConstructor_ConstructStub()
449 __ b(&done); in Generate_StringConstructor_ConstructStub()
450 __ bind(&no_arguments); in Generate_StringConstructor_ConstructStub()
451 __ LoadRoot(r5, Heap::kempty_stringRootIndex); in Generate_StringConstructor_ConstructStub()
452 __ bind(&done); in Generate_StringConstructor_ConstructStub()
458 __ JumpIfSmi(r5, &convert); in Generate_StringConstructor_ConstructStub()
459 __ CompareObjectType(r5, r7, r7, FIRST_NONSTRING_TYPE); in Generate_StringConstructor_ConstructStub()
460 __ blt(&done_convert); in Generate_StringConstructor_ConstructStub()
461 __ bind(&convert); in Generate_StringConstructor_ConstructStub()
464 __ SmiTag(r9); in Generate_StringConstructor_ConstructStub()
465 __ EnterBuiltinFrame(cp, r4, r9); in Generate_StringConstructor_ConstructStub()
466 __ Push(r6); in Generate_StringConstructor_ConstructStub()
467 __ mr(r3, r5); in Generate_StringConstructor_ConstructStub()
468 __ Call(masm->isolate()->builtins()->ToString(), RelocInfo::CODE_TARGET); in Generate_StringConstructor_ConstructStub()
469 __ mr(r5, r3); in Generate_StringConstructor_ConstructStub()
470 __ Pop(r6); in Generate_StringConstructor_ConstructStub()
471 __ LeaveBuiltinFrame(cp, r4, r9); in Generate_StringConstructor_ConstructStub()
472 __ SmiUntag(r9); in Generate_StringConstructor_ConstructStub()
474 __ bind(&done_convert); in Generate_StringConstructor_ConstructStub()
479 __ cmp(r4, r6); in Generate_StringConstructor_ConstructStub()
480 __ bne(&new_object); in Generate_StringConstructor_ConstructStub()
483 __ AllocateJSValue(r3, r4, r5, r7, r8, &new_object); in Generate_StringConstructor_ConstructStub()
484 __ b(&drop_frame_and_ret); in Generate_StringConstructor_ConstructStub()
487 __ bind(&new_object); in Generate_StringConstructor_ConstructStub()
490 __ SmiTag(r9); in Generate_StringConstructor_ConstructStub()
491 __ EnterBuiltinFrame(cp, r4, r9); in Generate_StringConstructor_ConstructStub()
492 __ Push(r5); // first argument in Generate_StringConstructor_ConstructStub()
493 __ Call(CodeFactory::FastNewObject(masm->isolate()).code(), in Generate_StringConstructor_ConstructStub()
495 __ Pop(r5); in Generate_StringConstructor_ConstructStub()
496 __ LeaveBuiltinFrame(cp, r4, r9); in Generate_StringConstructor_ConstructStub()
497 __ SmiUntag(r9); in Generate_StringConstructor_ConstructStub()
499 __ StoreP(r5, FieldMemOperand(r3, JSValue::kValueOffset), r0); in Generate_StringConstructor_ConstructStub()
501 __ bind(&drop_frame_and_ret); in Generate_StringConstructor_ConstructStub()
503 __ Drop(r9); in Generate_StringConstructor_ConstructStub()
504 __ Ret(1); in Generate_StringConstructor_ConstructStub()
509 __ LoadP(ip, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset)); in GenerateTailCallToSharedCode()
510 __ LoadP(ip, FieldMemOperand(ip, SharedFunctionInfo::kCodeOffset)); in GenerateTailCallToSharedCode()
511 __ addi(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag)); in GenerateTailCallToSharedCode()
512 __ JumpToJSEntry(ip); in GenerateTailCallToSharedCode()
527 __ SmiTag(r3); in GenerateTailCallToReturnedCode()
528 __ Push(r3, r4, r6, r4); in GenerateTailCallToReturnedCode()
530 __ CallRuntime(function_id, 1); in GenerateTailCallToReturnedCode()
531 __ mr(r5, r3); in GenerateTailCallToReturnedCode()
534 __ Pop(r3, r4, r6); in GenerateTailCallToReturnedCode()
535 __ SmiUntag(r3); in GenerateTailCallToReturnedCode()
537 __ addi(ip, r5, Operand(Code::kHeaderSize - kHeapObjectTag)); in GenerateTailCallToReturnedCode()
538 __ JumpToJSEntry(ip); in GenerateTailCallToReturnedCode()
548 __ LoadRoot(ip, Heap::kStackLimitRootIndex); in Generate_InOptimizationQueue()
549 __ cmpl(sp, ip); in Generate_InOptimizationQueue()
550 __ bge(&ok); in Generate_InOptimizationQueue()
554 __ bind(&ok); in Generate_InOptimizationQueue()
582 __ SmiTag(r7, r3, SetRC); in Generate_JSConstructStubHelper()
583 __ Push(cp, r7); in Generate_JSConstructStubHelper()
584 __ PushRoot(Heap::kTheHoleValueRootIndex); in Generate_JSConstructStubHelper()
586 __ SmiTag(r3); in Generate_JSConstructStubHelper()
587 __ Push(cp, r3); in Generate_JSConstructStubHelper()
590 __ Push(r4, r6); in Generate_JSConstructStubHelper()
591 __ Call(CodeFactory::FastNewObject(masm->isolate()).code(), in Generate_JSConstructStubHelper()
593 __ mr(r7, r3); in Generate_JSConstructStubHelper()
594 __ Pop(r4, r6); in Generate_JSConstructStubHelper()
603 __ LoadP(r3, MemOperand(sp)); in Generate_JSConstructStubHelper()
604 __ SmiUntag(r3, SetRC); in Generate_JSConstructStubHelper()
609 __ Push(r7, r7); in Generate_JSConstructStubHelper()
613 __ bind(&post_instantiation_deopt_entry); in Generate_JSConstructStubHelper()
616 __ addi(r5, fp, Operand(StandardFrameConstants::kCallerSPOffset)); in Generate_JSConstructStubHelper()
628 __ beq(&no_args, cr0); in Generate_JSConstructStubHelper()
629 __ ShiftLeftImm(ip, r3, Operand(kPointerSizeLog2)); in Generate_JSConstructStubHelper()
630 __ sub(sp, sp, ip); in Generate_JSConstructStubHelper()
631 __ mtctr(r3); in Generate_JSConstructStubHelper()
632 __ bind(&loop); in Generate_JSConstructStubHelper()
633 __ subi(ip, ip, Operand(kPointerSize)); in Generate_JSConstructStubHelper()
634 __ LoadPX(r0, MemOperand(r5, ip)); in Generate_JSConstructStubHelper()
635 __ StorePX(r0, MemOperand(sp, ip)); in Generate_JSConstructStubHelper()
636 __ bdnz(&loop); in Generate_JSConstructStubHelper()
637 __ bind(&no_args); in Generate_JSConstructStubHelper()
646 __ InvokeFunction(r4, r6, actual, CALL_FUNCTION, in Generate_JSConstructStubHelper()
660 __ LoadP(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset)); in Generate_JSConstructStubHelper()
672 __ JumpIfSmi(r3, &use_receiver); in Generate_JSConstructStubHelper()
676 __ CompareObjectType(r3, r4, r6, FIRST_JS_RECEIVER_TYPE); in Generate_JSConstructStubHelper()
677 __ bge(&exit); in Generate_JSConstructStubHelper()
681 __ bind(&use_receiver); in Generate_JSConstructStubHelper()
682 __ LoadP(r3, MemOperand(sp)); in Generate_JSConstructStubHelper()
686 __ bind(&exit); in Generate_JSConstructStubHelper()
690 __ LoadP(r4, MemOperand(sp, 1 * kPointerSize)); in Generate_JSConstructStubHelper()
692 __ LoadP(r4, MemOperand(sp)); in Generate_JSConstructStubHelper()
703 __ JumpIfNotSmi(r3, &dont_throw); in Generate_JSConstructStubHelper()
706 __ CallRuntime(Runtime::kThrowDerivedConstructorReturnedNonObject); in Generate_JSConstructStubHelper()
708 __ bind(&dont_throw); in Generate_JSConstructStubHelper()
711 __ SmiToPtrArrayOffset(r4, r4); in Generate_JSConstructStubHelper()
712 __ add(sp, sp, r4); in Generate_JSConstructStubHelper()
713 __ addi(sp, sp, Operand(kPointerSize)); in Generate_JSConstructStubHelper()
715 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, r4, r5); in Generate_JSConstructStubHelper()
717 __ blr(); in Generate_JSConstructStubHelper()
730 __ pop(r4); in Generate_JSConstructStubHelper()
731 __ Push(r3, r3); in Generate_JSConstructStubHelper()
734 __ LoadP(r3, MemOperand(fp, ConstructFrameConstants::kLengthOffset)); in Generate_JSConstructStubHelper()
735 __ SmiUntag(r3); in Generate_JSConstructStubHelper()
739 __ addi(r6, fp, Operand(StandardFrameConstants::kCallerSPOffset)); in Generate_JSConstructStubHelper()
740 __ ShiftLeftImm(ip, r3, Operand(kPointerSizeLog2)); in Generate_JSConstructStubHelper()
741 __ LoadPX(r6, MemOperand(r6, ip)); in Generate_JSConstructStubHelper()
744 __ b(&post_instantiation_deopt_entry); in Generate_JSConstructStubHelper()
775 __ AssertGeneratorObject(r4); in Generate_ResumeGeneratorTrampoline()
778 __ StoreP(r3, FieldMemOperand(r4, JSGeneratorObject::kInputOrDebugPosOffset), in Generate_ResumeGeneratorTrampoline()
780 __ RecordWriteField(r4, JSGeneratorObject::kInputOrDebugPosOffset, r3, r6, in Generate_ResumeGeneratorTrampoline()
784 __ StoreP(r5, FieldMemOperand(r4, JSGeneratorObject::kResumeModeOffset), r0); in Generate_ResumeGeneratorTrampoline()
787 __ LoadP(r7, FieldMemOperand(r4, JSGeneratorObject::kFunctionOffset)); in Generate_ResumeGeneratorTrampoline()
788 __ LoadP(cp, FieldMemOperand(r7, JSFunction::kContextOffset)); in Generate_ResumeGeneratorTrampoline()
795 __ mov(ip, Operand(debug_hook)); in Generate_ResumeGeneratorTrampoline()
796 __ LoadByte(ip, MemOperand(ip), r0); in Generate_ResumeGeneratorTrampoline()
797 __ extsb(ip, ip); in Generate_ResumeGeneratorTrampoline()
798 __ CmpSmiLiteral(ip, Smi::kZero, r0); in Generate_ResumeGeneratorTrampoline()
799 __ bne(&prepare_step_in_if_stepping); in Generate_ResumeGeneratorTrampoline()
806 __ mov(ip, Operand(debug_suspended_generator)); in Generate_ResumeGeneratorTrampoline()
807 __ LoadP(ip, MemOperand(ip)); in Generate_ResumeGeneratorTrampoline()
808 __ cmp(ip, r4); in Generate_ResumeGeneratorTrampoline()
809 __ beq(&prepare_step_in_suspended_generator); in Generate_ResumeGeneratorTrampoline()
810 __ bind(&stepping_prepared); in Generate_ResumeGeneratorTrampoline()
813 __ LoadP(ip, FieldMemOperand(r4, JSGeneratorObject::kReceiverOffset)); in Generate_ResumeGeneratorTrampoline()
814 __ Push(ip); in Generate_ResumeGeneratorTrampoline()
829 __ LoadP(r6, FieldMemOperand(r7, JSFunction::kSharedFunctionInfoOffset)); in Generate_ResumeGeneratorTrampoline()
830 __ LoadWordArith( in Generate_ResumeGeneratorTrampoline()
834 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); in Generate_ResumeGeneratorTrampoline()
836 __ cmpi(r3, Operand::Zero()); in Generate_ResumeGeneratorTrampoline()
837 __ beq(&done_loop); in Generate_ResumeGeneratorTrampoline()
839 __ SmiUntag(r3, SetRC); in Generate_ResumeGeneratorTrampoline()
840 __ beq(&done_loop, cr0); in Generate_ResumeGeneratorTrampoline()
842 __ mtctr(r3); in Generate_ResumeGeneratorTrampoline()
843 __ bind(&loop); in Generate_ResumeGeneratorTrampoline()
844 __ push(ip); in Generate_ResumeGeneratorTrampoline()
845 __ bdnz(&loop); in Generate_ResumeGeneratorTrampoline()
846 __ bind(&done_loop); in Generate_ResumeGeneratorTrampoline()
851 __ LoadP(r6, FieldMemOperand(r6, SharedFunctionInfo::kFunctionDataOffset)); in Generate_ResumeGeneratorTrampoline()
852 __ CompareObjectType(r6, r6, r6, BYTECODE_ARRAY_TYPE); in Generate_ResumeGeneratorTrampoline()
853 __ Assert(eq, kMissingBytecodeArray); in Generate_ResumeGeneratorTrampoline()
861 __ mr(r6, r4); in Generate_ResumeGeneratorTrampoline()
862 __ mr(r4, r7); in Generate_ResumeGeneratorTrampoline()
863 __ LoadP(ip, FieldMemOperand(r4, JSFunction::kCodeEntryOffset)); in Generate_ResumeGeneratorTrampoline()
864 __ JumpToJSEntry(ip); in Generate_ResumeGeneratorTrampoline()
867 __ bind(&prepare_step_in_if_stepping); in Generate_ResumeGeneratorTrampoline()
870 __ Push(r4, r5, r7); in Generate_ResumeGeneratorTrampoline()
871 __ CallRuntime(Runtime::kDebugOnFunctionCall); in Generate_ResumeGeneratorTrampoline()
872 __ Pop(r4, r5); in Generate_ResumeGeneratorTrampoline()
873 __ LoadP(r7, FieldMemOperand(r4, JSGeneratorObject::kFunctionOffset)); in Generate_ResumeGeneratorTrampoline()
875 __ b(&stepping_prepared); in Generate_ResumeGeneratorTrampoline()
877 __ bind(&prepare_step_in_suspended_generator); in Generate_ResumeGeneratorTrampoline()
880 __ Push(r4, r5); in Generate_ResumeGeneratorTrampoline()
881 __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator); in Generate_ResumeGeneratorTrampoline()
882 __ Pop(r4, r5); in Generate_ResumeGeneratorTrampoline()
883 __ LoadP(r7, FieldMemOperand(r4, JSGeneratorObject::kFunctionOffset)); in Generate_ResumeGeneratorTrampoline()
885 __ b(&stepping_prepared); in Generate_ResumeGeneratorTrampoline()
890 __ push(r4); in Generate_ConstructedNonConstructable()
891 __ CallRuntime(Runtime::kThrowConstructedNonConstructable); in Generate_ConstructedNonConstructable()
903 __ LoadRoot(r5, Heap::kRealStackLimitRootIndex); in Generate_CheckStackOverflow()
906 __ sub(r5, sp, r5); in Generate_CheckStackOverflow()
909 __ SmiToPtrArrayOffset(r0, argc); in Generate_CheckStackOverflow()
912 __ ShiftLeftImm(r0, argc, Operand(kPointerSizeLog2)); in Generate_CheckStackOverflow()
914 __ cmp(r5, r0); in Generate_CheckStackOverflow()
915 __ bgt(&okay); // Signed comparison. in Generate_CheckStackOverflow()
918 __ CallRuntime(Runtime::kThrowStackOverflow); in Generate_CheckStackOverflow()
920 __ bind(&okay); in Generate_CheckStackOverflow()
941 __ mov(cp, Operand(context_address)); in Generate_JSEntryTrampolineHelper()
942 __ LoadP(cp, MemOperand(cp)); in Generate_JSEntryTrampolineHelper()
944 __ InitializeRootRegister(); in Generate_JSEntryTrampolineHelper()
947 __ Push(r4, r5); in Generate_JSEntryTrampolineHelper()
958 __ ShiftLeftImm(r0, r6, Operand(kPointerSizeLog2)); in Generate_JSEntryTrampolineHelper()
959 __ add(r5, r7, r0); in Generate_JSEntryTrampolineHelper()
961 __ b(&entry); in Generate_JSEntryTrampolineHelper()
962 __ bind(&loop); in Generate_JSEntryTrampolineHelper()
963 __ LoadP(r8, MemOperand(r7)); // read next parameter in Generate_JSEntryTrampolineHelper()
964 __ addi(r7, r7, Operand(kPointerSize)); in Generate_JSEntryTrampolineHelper()
965 __ LoadP(r0, MemOperand(r8)); // dereference handle in Generate_JSEntryTrampolineHelper()
966 __ push(r0); // push parameter in Generate_JSEntryTrampolineHelper()
967 __ bind(&entry); in Generate_JSEntryTrampolineHelper()
968 __ cmp(r7, r5); in Generate_JSEntryTrampolineHelper()
969 __ bne(&loop); in Generate_JSEntryTrampolineHelper()
972 __ mr(r7, r3); in Generate_JSEntryTrampolineHelper()
973 __ mr(r3, r6); in Generate_JSEntryTrampolineHelper()
974 __ mr(r6, r7); in Generate_JSEntryTrampolineHelper()
978 __ LoadRoot(r7, Heap::kUndefinedValueRootIndex); in Generate_JSEntryTrampolineHelper()
979 __ mr(r14, r7); in Generate_JSEntryTrampolineHelper()
980 __ mr(r15, r7); in Generate_JSEntryTrampolineHelper()
981 __ mr(r16, r7); in Generate_JSEntryTrampolineHelper()
982 __ mr(r17, r7); in Generate_JSEntryTrampolineHelper()
988 __ Call(builtin, RelocInfo::CODE_TARGET); in Generate_JSEntryTrampolineHelper()
993 __ blr(); in Generate_JSEntryTrampolineHelper()
1010 __ LoadP(args_count, in LeaveInterpreterFrame()
1012 __ lwz(args_count, in LeaveInterpreterFrame()
1016 __ LeaveFrame(StackFrame::JAVA_SCRIPT); in LeaveInterpreterFrame()
1018 __ add(sp, sp, args_count); in LeaveInterpreterFrame()
1044 __ PushStandardFrame(r4); in Generate_InterpreterEntryTrampoline()
1048 __ LoadP(r3, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset)); in Generate_InterpreterEntryTrampoline()
1052 __ LoadP(debug_info, in Generate_InterpreterEntryTrampoline()
1055 __ LoadP(kInterpreterBytecodeArrayRegister, in Generate_InterpreterEntryTrampoline()
1057 __ TestIfSmi(debug_info, r0); in Generate_InterpreterEntryTrampoline()
1058 __ beq(&array_done, cr0); in Generate_InterpreterEntryTrampoline()
1059 __ LoadP(kInterpreterBytecodeArrayRegister, in Generate_InterpreterEntryTrampoline()
1061 __ bind(&array_done); in Generate_InterpreterEntryTrampoline()
1065 __ LoadP(r3, FieldMemOperand(r3, SharedFunctionInfo::kCodeOffset)); in Generate_InterpreterEntryTrampoline()
1066 __ mov(ip, Operand(masm->CodeObject())); // Self-reference to this code. in Generate_InterpreterEntryTrampoline()
1067 __ cmp(r3, ip); in Generate_InterpreterEntryTrampoline()
1068 __ bne(&switch_to_different_code_kind); in Generate_InterpreterEntryTrampoline()
1071 __ LoadP(r7, FieldMemOperand(r4, JSFunction::kFeedbackVectorOffset)); in Generate_InterpreterEntryTrampoline()
1072 __ LoadP(r7, FieldMemOperand(r7, Cell::kValueOffset)); in Generate_InterpreterEntryTrampoline()
1073 __ LoadP(r8, FieldMemOperand( in Generate_InterpreterEntryTrampoline()
1076 __ AddSmiLiteral(r8, r8, Smi::FromInt(1), r0); in Generate_InterpreterEntryTrampoline()
1077 __ StoreP(r8, FieldMemOperand( in Generate_InterpreterEntryTrampoline()
1085 __ TestIfSmi(kInterpreterBytecodeArrayRegister, r0); in Generate_InterpreterEntryTrampoline()
1086 __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, cr0); in Generate_InterpreterEntryTrampoline()
1087 __ CompareObjectType(kInterpreterBytecodeArrayRegister, r3, no_reg, in Generate_InterpreterEntryTrampoline()
1089 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry); in Generate_InterpreterEntryTrampoline()
1093 __ mov(r8, Operand(BytecodeArray::kNoAgeBytecodeAge)); in Generate_InterpreterEntryTrampoline()
1094 __ StoreByte(r8, FieldMemOperand(kInterpreterBytecodeArrayRegister, in Generate_InterpreterEntryTrampoline()
1099 __ mov(kInterpreterBytecodeOffsetRegister, in Generate_InterpreterEntryTrampoline()
1103 __ SmiTag(r3, kInterpreterBytecodeOffsetRegister); in Generate_InterpreterEntryTrampoline()
1104 __ Push(r6, kInterpreterBytecodeArrayRegister, r3); in Generate_InterpreterEntryTrampoline()
1109 __ lwz(r5, FieldMemOperand(kInterpreterBytecodeArrayRegister, in Generate_InterpreterEntryTrampoline()
1114 __ sub(r6, sp, r5); in Generate_InterpreterEntryTrampoline()
1115 __ LoadRoot(r0, Heap::kRealStackLimitRootIndex); in Generate_InterpreterEntryTrampoline()
1116 __ cmpl(r6, r0); in Generate_InterpreterEntryTrampoline()
1117 __ bge(&ok); in Generate_InterpreterEntryTrampoline()
1118 __ CallRuntime(Runtime::kThrowStackOverflow); in Generate_InterpreterEntryTrampoline()
1119 __ bind(&ok); in Generate_InterpreterEntryTrampoline()
1124 __ LoadRoot(r6, Heap::kUndefinedValueRootIndex); in Generate_InterpreterEntryTrampoline()
1125 __ ShiftRightImm(r5, r5, Operand(kPointerSizeLog2), SetRC); in Generate_InterpreterEntryTrampoline()
1126 __ beq(&no_args, cr0); in Generate_InterpreterEntryTrampoline()
1127 __ mtctr(r5); in Generate_InterpreterEntryTrampoline()
1128 __ bind(&loop); in Generate_InterpreterEntryTrampoline()
1129 __ push(r6); in Generate_InterpreterEntryTrampoline()
1130 __ bdnz(&loop); in Generate_InterpreterEntryTrampoline()
1131 __ bind(&no_args); in Generate_InterpreterEntryTrampoline()
1135 __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex); in Generate_InterpreterEntryTrampoline()
1136 __ mov(kInterpreterDispatchTableRegister, in Generate_InterpreterEntryTrampoline()
1141 __ lbzx(r4, MemOperand(kInterpreterBytecodeArrayRegister, in Generate_InterpreterEntryTrampoline()
1143 __ ShiftLeftImm(ip, r4, Operand(kPointerSizeLog2)); in Generate_InterpreterEntryTrampoline()
1144 __ LoadPX(ip, MemOperand(kInterpreterDispatchTableRegister, ip)); in Generate_InterpreterEntryTrampoline()
1145 __ Call(ip); in Generate_InterpreterEntryTrampoline()
1151 __ blr(); in Generate_InterpreterEntryTrampoline()
1156 __ bind(&switch_to_different_code_kind); in Generate_InterpreterEntryTrampoline()
1157 __ LeaveFrame(StackFrame::JAVA_SCRIPT); in Generate_InterpreterEntryTrampoline()
1158 __ LoadP(r7, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset)); in Generate_InterpreterEntryTrampoline()
1159 __ LoadP(r7, FieldMemOperand(r7, SharedFunctionInfo::kCodeOffset)); in Generate_InterpreterEntryTrampoline()
1160 __ addi(r7, r7, Operand(Code::kHeaderSize - kHeapObjectTag)); in Generate_InterpreterEntryTrampoline()
1161 __ StoreP(r7, FieldMemOperand(r4, JSFunction::kCodeEntryOffset), r0); in Generate_InterpreterEntryTrampoline()
1162 __ RecordWriteCodeEntryField(r4, r7, r8); in Generate_InterpreterEntryTrampoline()
1163 __ JumpToJSEntry(r7); in Generate_InterpreterEntryTrampoline()
1172 __ LoadRoot(scratch, Heap::kRealStackLimitRootIndex); in Generate_StackOverflowCheck()
1175 __ sub(scratch, sp, scratch); in Generate_StackOverflowCheck()
1177 __ ShiftLeftImm(r0, num_args, Operand(kPointerSizeLog2)); in Generate_StackOverflowCheck()
1178 __ cmp(scratch, r0); in Generate_StackOverflowCheck()
1179 __ ble(stack_overflow); // Signed comparison. in Generate_StackOverflowCheck()
1190 __ addi(index, index, Operand(kPointerSize)); // Bias up for LoadPU in Generate_InterpreterPushArgs()
1191 __ mtctr(count); in Generate_InterpreterPushArgs()
1192 __ bind(&loop); in Generate_InterpreterPushArgs()
1193 __ LoadPU(scratch, MemOperand(index, -kPointerSize)); in Generate_InterpreterPushArgs()
1194 __ push(scratch); in Generate_InterpreterPushArgs()
1195 __ bdnz(&loop); in Generate_InterpreterPushArgs()
1212 __ addi(r6, r3, Operand(1)); in Generate_InterpreterPushArgsAndCallImpl()
1219 __ Jump(masm->isolate()->builtins()->CallFunction(ConvertReceiverMode::kAny, in Generate_InterpreterPushArgsAndCallImpl()
1223 __ Jump(masm->isolate()->builtins()->CallWithSpread(), in Generate_InterpreterPushArgsAndCallImpl()
1226 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny, in Generate_InterpreterPushArgsAndCallImpl()
1231 __ bind(&stack_overflow); in Generate_InterpreterPushArgsAndCallImpl()
1233 __ TailCallRuntime(Runtime::kThrowStackOverflow); in Generate_InterpreterPushArgsAndCallImpl()
1235 __ bkpt(0); in Generate_InterpreterPushArgsAndCallImpl()
1252 __ li(r0, Operand::Zero()); in Generate_InterpreterPushArgsAndConstructImpl()
1253 __ push(r0); in Generate_InterpreterPushArgsAndConstructImpl()
1257 __ cmpi(r3, Operand::Zero()); in Generate_InterpreterPushArgsAndConstructImpl()
1258 __ beq(&skip); in Generate_InterpreterPushArgsAndConstructImpl()
1261 __ bind(&skip); in Generate_InterpreterPushArgsAndConstructImpl()
1263 __ AssertUndefinedOrAllocationSite(r5, r8); in Generate_InterpreterPushArgsAndConstructImpl()
1265 __ AssertFunction(r4); in Generate_InterpreterPushArgsAndConstructImpl()
1269 __ LoadP(r7, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset)); in Generate_InterpreterPushArgsAndConstructImpl()
1270 __ LoadP(r7, FieldMemOperand(r7, SharedFunctionInfo::kConstructStubOffset)); in Generate_InterpreterPushArgsAndConstructImpl()
1272 __ addi(ip, r7, Operand(Code::kHeaderSize - kHeapObjectTag)); in Generate_InterpreterPushArgsAndConstructImpl()
1273 __ Jump(ip); in Generate_InterpreterPushArgsAndConstructImpl()
1276 __ Jump(masm->isolate()->builtins()->ConstructWithSpread(), in Generate_InterpreterPushArgsAndConstructImpl()
1281 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); in Generate_InterpreterPushArgsAndConstructImpl()
1284 __ bind(&stack_overflow); in Generate_InterpreterPushArgsAndConstructImpl()
1286 __ TailCallRuntime(Runtime::kThrowStackOverflow); in Generate_InterpreterPushArgsAndConstructImpl()
1288 __ bkpt(0); in Generate_InterpreterPushArgsAndConstructImpl()
1303 __ addi(r7, r3, Operand(1)); // Add one for receiver. in Generate_InterpreterPushArgsAndConstructArray()
1309 __ mr(r6, r4); in Generate_InterpreterPushArgsAndConstructArray()
1312 __ TailCallStub(&stub); in Generate_InterpreterPushArgsAndConstructArray()
1314 __ bind(&stack_overflow); in Generate_InterpreterPushArgsAndConstructArray()
1316 __ TailCallRuntime(Runtime::kThrowStackOverflow); in Generate_InterpreterPushArgsAndConstructArray()
1318 __ bkpt(0); in Generate_InterpreterPushArgsAndConstructArray()
1328 __ Move(r5, masm->isolate()->builtins()->InterpreterEntryTrampoline()); in Generate_InterpreterEnterBytecode()
1329 __ addi(r0, r5, Operand(interpreter_entry_return_pc_offset->value() + in Generate_InterpreterEnterBytecode()
1331 __ mtlr(r0); in Generate_InterpreterEnterBytecode()
1334 __ mov(kInterpreterDispatchTableRegister, in Generate_InterpreterEnterBytecode()
1339 __ LoadP(kInterpreterBytecodeArrayRegister, in Generate_InterpreterEnterBytecode()
1344 __ TestIfSmi(kInterpreterBytecodeArrayRegister, r0); in Generate_InterpreterEnterBytecode()
1345 __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, cr0); in Generate_InterpreterEnterBytecode()
1346 __ CompareObjectType(kInterpreterBytecodeArrayRegister, r4, no_reg, in Generate_InterpreterEnterBytecode()
1348 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry); in Generate_InterpreterEnterBytecode()
1352 __ LoadP(kInterpreterBytecodeOffsetRegister, in Generate_InterpreterEnterBytecode()
1354 __ SmiUntag(kInterpreterBytecodeOffsetRegister); in Generate_InterpreterEnterBytecode()
1357 __ lbzx(r4, MemOperand(kInterpreterBytecodeArrayRegister, in Generate_InterpreterEnterBytecode()
1359 __ ShiftLeftImm(ip, r4, Operand(kPointerSizeLog2)); in Generate_InterpreterEnterBytecode()
1360 __ LoadPX(ip, MemOperand(kInterpreterDispatchTableRegister, ip)); in Generate_InterpreterEnterBytecode()
1361 __ Jump(ip); in Generate_InterpreterEnterBytecode()
1368 __ LoadP(r4, MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp)); in Generate_InterpreterEnterBytecodeAdvance()
1369 __ LoadP(r5, in Generate_InterpreterEnterBytecodeAdvance()
1371 __ LoadP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); in Generate_InterpreterEnterBytecodeAdvance()
1374 __ Push(kInterpreterAccumulatorRegister, r4, r5); in Generate_InterpreterEnterBytecodeAdvance()
1375 __ CallRuntime(Runtime::kInterpreterAdvanceBytecodeOffset); in Generate_InterpreterEnterBytecodeAdvance()
1376 __ Move(r5, r3); // Result is the new bytecode offset. in Generate_InterpreterEnterBytecodeAdvance()
1377 __ Pop(kInterpreterAccumulatorRegister); in Generate_InterpreterEnterBytecodeAdvance()
1379 __ StoreP(r5, in Generate_InterpreterEnterBytecodeAdvance()
1405 __ LoadP(index, FieldMemOperand(closure, JSFunction::kFeedbackVectorOffset)); in Generate_CompileLazy()
1406 __ LoadP(index, FieldMemOperand(index, Cell::kValueOffset)); in Generate_CompileLazy()
1407 __ JumpIfRoot(index, Heap::kUndefinedValueRootIndex, &gotta_call_runtime); in Generate_CompileLazy()
1409 __ LoadP(map, in Generate_CompileLazy()
1411 __ LoadP(map, in Generate_CompileLazy()
1413 __ LoadP(index, FieldMemOperand(map, FixedArray::kLengthOffset)); in Generate_CompileLazy()
1414 __ CmpSmiLiteral(index, Smi::FromInt(2), r0); in Generate_CompileLazy()
1415 __ blt(&try_shared); in Generate_CompileLazy()
1423 __ LoadP(native_context, NativeContextMemOperand()); in Generate_CompileLazy()
1425 __ bind(&loop_top); in Generate_CompileLazy()
1430 __ SmiToPtrArrayOffset(array_pointer, index); in Generate_CompileLazy()
1431 __ add(array_pointer, map, array_pointer); in Generate_CompileLazy()
1432 __ LoadP(temp, FieldMemOperand(array_pointer, in Generate_CompileLazy()
1434 __ LoadP(temp, FieldMemOperand(temp, WeakCell::kValueOffset)); in Generate_CompileLazy()
1435 __ cmp(temp, native_context); in Generate_CompileLazy()
1436 __ bne(&loop_bottom); in Generate_CompileLazy()
1440 __ LoadP(entry, in Generate_CompileLazy()
1443 __ LoadP(entry, FieldMemOperand(entry, WeakCell::kValueOffset)); in Generate_CompileLazy()
1444 __ JumpIfSmi(entry, &try_shared); in Generate_CompileLazy()
1448 __ addi(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag)); in Generate_CompileLazy()
1449 __ StoreP(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset), r0); in Generate_CompileLazy()
1450 __ RecordWriteCodeEntryField(closure, entry, r8); in Generate_CompileLazy()
1456 __ LoadP( in Generate_CompileLazy()
1458 __ StoreP(r8, FieldMemOperand(closure, JSFunction::kNextFunctionLinkOffset), in Generate_CompileLazy()
1460 __ RecordWriteField(closure, JSFunction::kNextFunctionLinkOffset, r8, temp, in Generate_CompileLazy()
1465 __ StoreP( in Generate_CompileLazy()
1469 __ mr(r8, closure); in Generate_CompileLazy()
1470 __ RecordWriteContextSlot(native_context, function_list_offset, r8, temp, in Generate_CompileLazy()
1472 __ JumpToJSEntry(entry); in Generate_CompileLazy()
1474 __ bind(&loop_bottom); in Generate_CompileLazy()
1475 __ SubSmiLiteral(index, index, Smi::FromInt(SharedFunctionInfo::kEntryLength), in Generate_CompileLazy()
1477 __ CmpSmiLiteral(index, Smi::FromInt(1), r0); in Generate_CompileLazy()
1478 __ bgt(&loop_top); in Generate_CompileLazy()
1481 __ b(&gotta_call_runtime); in Generate_CompileLazy()
1483 __ bind(&try_shared); in Generate_CompileLazy()
1484 __ LoadP(entry, in Generate_CompileLazy()
1487 __ lbz(r8, FieldMemOperand(entry, in Generate_CompileLazy()
1489 __ TestBit(r8, SharedFunctionInfo::kMarkedForTierUpBitWithinByte, r0); in Generate_CompileLazy()
1490 __ bne(&gotta_call_runtime, cr0); in Generate_CompileLazy()
1493 __ LoadP(entry, FieldMemOperand(entry, SharedFunctionInfo::kCodeOffset)); in Generate_CompileLazy()
1494 __ mov(r8, Operand(masm->CodeObject())); in Generate_CompileLazy()
1495 __ cmp(entry, r8); in Generate_CompileLazy()
1496 __ beq(&gotta_call_runtime); in Generate_CompileLazy()
1499 __ addi(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag)); in Generate_CompileLazy()
1500 __ StoreP(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset), r0); in Generate_CompileLazy()
1501 __ RecordWriteCodeEntryField(closure, entry, r8); in Generate_CompileLazy()
1502 __ JumpToJSEntry(entry); in Generate_CompileLazy()
1504 __ bind(&gotta_call_runtime); in Generate_CompileLazy()
1531 __ Move(r7, r3); in Generate_InstantiateAsmJs()
1534 __ SmiTag(r3); in Generate_InstantiateAsmJs()
1535 __ Push(r3, r4, r6, r4); in Generate_InstantiateAsmJs()
1542 __ cmpi(r7, Operand(j)); in Generate_InstantiateAsmJs()
1543 __ bne(&over); in Generate_InstantiateAsmJs()
1546 __ LoadP(r7, MemOperand(fp, StandardFrameConstants::kCallerSPOffset + in Generate_InstantiateAsmJs()
1548 __ push(r7); in Generate_InstantiateAsmJs()
1551 __ PushRoot(Heap::kUndefinedValueRootIndex); in Generate_InstantiateAsmJs()
1554 __ jmp(&args_done); in Generate_InstantiateAsmJs()
1555 __ bind(&over); in Generate_InstantiateAsmJs()
1558 __ bind(&args_done); in Generate_InstantiateAsmJs()
1561 __ CallRuntime(Runtime::kInstantiateAsmJs, 4); in Generate_InstantiateAsmJs()
1563 __ JumpIfSmi(r3, &failed); in Generate_InstantiateAsmJs()
1565 __ Drop(2); in Generate_InstantiateAsmJs()
1566 __ pop(r7); in Generate_InstantiateAsmJs()
1567 __ SmiUntag(r7); in Generate_InstantiateAsmJs()
1570 __ addi(r7, r7, Operand(1)); in Generate_InstantiateAsmJs()
1571 __ Drop(r7); in Generate_InstantiateAsmJs()
1572 __ Ret(); in Generate_InstantiateAsmJs()
1574 __ bind(&failed); in Generate_InstantiateAsmJs()
1576 __ Pop(r3, r4, r6); in Generate_InstantiateAsmJs()
1577 __ SmiUntag(r3); in Generate_InstantiateAsmJs()
1591 __ mr(r3, ip); in GenerateMakeCodeYoungAgainCommon()
1600 __ mflr(r0); in GenerateMakeCodeYoungAgainCommon()
1601 __ MultiPush(r0.bit() | r3.bit() | r4.bit() | r6.bit() | fp.bit()); in GenerateMakeCodeYoungAgainCommon()
1602 __ PrepareCallCFunction(2, 0, r5); in GenerateMakeCodeYoungAgainCommon()
1603 __ mov(r4, Operand(ExternalReference::isolate_address(masm->isolate()))); in GenerateMakeCodeYoungAgainCommon()
1604 __ CallCFunction( in GenerateMakeCodeYoungAgainCommon()
1606 __ MultiPop(r0.bit() | r3.bit() | r4.bit() | r6.bit() | fp.bit()); in GenerateMakeCodeYoungAgainCommon()
1607 __ mtlr(r0); in GenerateMakeCodeYoungAgainCommon()
1608 __ mr(ip, r3); in GenerateMakeCodeYoungAgainCommon()
1609 __ Jump(ip); in GenerateMakeCodeYoungAgainCommon()
1627 __ mr(r3, ip); in CODE_AGE_LIST()
1636 __ mflr(r0); in CODE_AGE_LIST()
1637 __ MultiPush(r0.bit() | r3.bit() | r4.bit() | r6.bit() | fp.bit()); in CODE_AGE_LIST()
1638 __ PrepareCallCFunction(2, 0, r5); in CODE_AGE_LIST()
1639 __ mov(r4, Operand(ExternalReference::isolate_address(masm->isolate()))); in CODE_AGE_LIST()
1640 __ CallCFunction( in CODE_AGE_LIST()
1643 __ MultiPop(r0.bit() | r3.bit() | r4.bit() | r6.bit() | fp.bit()); in CODE_AGE_LIST()
1644 __ mtlr(r0); in CODE_AGE_LIST()
1645 __ mr(ip, r3); in CODE_AGE_LIST()
1648 __ PushStandardFrame(r4); in CODE_AGE_LIST()
1651 __ addi(r3, ip, Operand(kNoCodeAgeSequenceLength)); in CODE_AGE_LIST()
1652 __ Jump(r3); in CODE_AGE_LIST()
1671 __ MultiPush(kJSCallerSaved | kCalleeSaved); in Generate_NotifyStubFailureHelper()
1673 __ CallRuntime(Runtime::kNotifyStubFailure, save_doubles); in Generate_NotifyStubFailureHelper()
1674 __ MultiPop(kJSCallerSaved | kCalleeSaved); in Generate_NotifyStubFailureHelper()
1677 __ addi(sp, sp, Operand(kPointerSize)); // Ignore state in Generate_NotifyStubFailureHelper()
1678 __ blr(); // Jump to miss handler in Generate_NotifyStubFailureHelper()
1694 __ LoadSmiLiteral(r3, Smi::FromInt(static_cast<int>(type))); in Generate_NotifyDeoptimizedHelper()
1695 __ push(r3); in Generate_NotifyDeoptimizedHelper()
1696 __ CallRuntime(Runtime::kNotifyDeoptimized); in Generate_NotifyDeoptimizedHelper()
1700 __ LoadP(r9, MemOperand(sp, 0 * kPointerSize)); in Generate_NotifyDeoptimizedHelper()
1701 __ SmiUntag(r9); in Generate_NotifyDeoptimizedHelper()
1704 __ cmpi( in Generate_NotifyDeoptimizedHelper()
1707 __ bne(&with_tos_register); in Generate_NotifyDeoptimizedHelper()
1708 __ addi(sp, sp, Operand(1 * kPointerSize)); // Remove state. in Generate_NotifyDeoptimizedHelper()
1709 __ Ret(); in Generate_NotifyDeoptimizedHelper()
1711 __ bind(&with_tos_register); in Generate_NotifyDeoptimizedHelper()
1713 __ LoadP(r3, MemOperand(sp, 1 * kPointerSize)); in Generate_NotifyDeoptimizedHelper()
1714 __ cmpi( in Generate_NotifyDeoptimizedHelper()
1717 __ bne(&unknown_state); in Generate_NotifyDeoptimizedHelper()
1718 __ addi(sp, sp, Operand(2 * kPointerSize)); // Remove state. in Generate_NotifyDeoptimizedHelper()
1719 __ Ret(); in Generate_NotifyDeoptimizedHelper()
1721 __ bind(&unknown_state); in Generate_NotifyDeoptimizedHelper()
1722 __ stop("no cases left"); in Generate_NotifyDeoptimizedHelper()
1747 __ LoadP(signature, FieldMemOperand(function_template_info, in CompatibleReceiverCheck()
1750 __ JumpIfRoot(signature, Heap::kUndefinedValueRootIndex, in CompatibleReceiverCheck()
1754 __ LoadP(map, FieldMemOperand(receiver, HeapObject::kMapOffset)); in CompatibleReceiverCheck()
1756 __ bind(&prototype_loop_start); in CompatibleReceiverCheck()
1759 __ GetMapConstructor(constructor, map, scratch, scratch); in CompatibleReceiverCheck()
1760 __ cmpi(scratch, Operand(JS_FUNCTION_TYPE)); in CompatibleReceiverCheck()
1762 __ bne(&next_prototype); in CompatibleReceiverCheck()
1764 __ LoadP(type, in CompatibleReceiverCheck()
1766 __ LoadP(type, in CompatibleReceiverCheck()
1771 __ bind(&function_template_loop); in CompatibleReceiverCheck()
1774 __ cmp(signature, type); in CompatibleReceiverCheck()
1775 __ beq(&receiver_check_passed); in CompatibleReceiverCheck()
1779 __ JumpIfSmi(type, &next_prototype); in CompatibleReceiverCheck()
1780 __ CompareObjectType(type, scratch, scratch, FUNCTION_TEMPLATE_INFO_TYPE); in CompatibleReceiverCheck()
1781 __ bne(&next_prototype); in CompatibleReceiverCheck()
1784 __ LoadP(type, in CompatibleReceiverCheck()
1786 __ b(&function_template_loop); in CompatibleReceiverCheck()
1789 __ bind(&next_prototype); in CompatibleReceiverCheck()
1790 __ lwz(scratch, FieldMemOperand(map, Map::kBitField3Offset)); in CompatibleReceiverCheck()
1791 __ DecodeField<Map::HasHiddenPrototype>(scratch, SetRC); in CompatibleReceiverCheck()
1792 __ beq(receiver_check_failed, cr0); in CompatibleReceiverCheck()
1794 __ LoadP(receiver, FieldMemOperand(map, Map::kPrototypeOffset)); in CompatibleReceiverCheck()
1795 __ LoadP(map, FieldMemOperand(receiver, HeapObject::kMapOffset)); in CompatibleReceiverCheck()
1797 __ b(&prototype_loop_start); in CompatibleReceiverCheck()
1799 __ bind(&receiver_check_passed); in CompatibleReceiverCheck()
1814 __ LoadP(r6, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset)); in Generate_HandleFastApiCall()
1815 __ LoadP(r6, FieldMemOperand(r6, SharedFunctionInfo::kFunctionDataOffset)); in Generate_HandleFastApiCall()
1819 __ ShiftLeftImm(r11, r3, Operand(kPointerSizeLog2)); in Generate_HandleFastApiCall()
1820 __ LoadPX(r5, MemOperand(sp, r11)); in Generate_HandleFastApiCall()
1825 __ LoadP(r7, FieldMemOperand(r6, FunctionTemplateInfo::kCallCodeOffset)); in Generate_HandleFastApiCall()
1826 __ LoadP(r7, FieldMemOperand(r7, CallHandlerInfo::kFastHandlerOffset)); in Generate_HandleFastApiCall()
1827 __ addi(ip, r7, Operand(Code::kHeaderSize - kHeapObjectTag)); in Generate_HandleFastApiCall()
1828 __ JumpToJSEntry(ip); in Generate_HandleFastApiCall()
1831 __ bind(&receiver_check_failed); in Generate_HandleFastApiCall()
1833 __ addi(r11, r11, Operand(kPointerSize)); in Generate_HandleFastApiCall()
1834 __ add(sp, sp, r11); in Generate_HandleFastApiCall()
1835 __ TailCallRuntime(Runtime::kThrowIllegalInvocation); in Generate_HandleFastApiCall()
1842 __ LoadP(r3, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); in Generate_OnStackReplacementHelper()
1843 __ LoadP(r3, MemOperand(r3, JavaScriptFrameConstants::kFunctionOffset)); in Generate_OnStackReplacementHelper()
1845 __ LoadP(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); in Generate_OnStackReplacementHelper()
1851 __ push(r3); in Generate_OnStackReplacementHelper()
1852 __ CallRuntime(Runtime::kCompileForOnStackReplacement); in Generate_OnStackReplacementHelper()
1857 __ CmpSmiLiteral(r3, Smi::kZero, r0); in Generate_OnStackReplacementHelper()
1858 __ bne(&skip); in Generate_OnStackReplacementHelper()
1859 __ Ret(); in Generate_OnStackReplacementHelper()
1861 __ bind(&skip); in Generate_OnStackReplacementHelper()
1866 __ LeaveFrame(StackFrame::STUB); in Generate_OnStackReplacementHelper()
1871 __ LoadP(r4, FieldMemOperand(r3, Code::kDeoptimizationDataOffset)); in Generate_OnStackReplacementHelper()
1875 __ addi(r3, r3, Operand(Code::kHeaderSize - kHeapObjectTag)); // Code start in Generate_OnStackReplacementHelper()
1878 __ LoadConstantPoolPointerRegisterFromCodeTargetAddress(r3); in Generate_OnStackReplacementHelper()
1883 __ LoadP(r4, FieldMemOperand( in Generate_OnStackReplacementHelper()
1886 __ SmiUntag(r4); in Generate_OnStackReplacementHelper()
1889 __ add(r0, r3, r4); in Generate_OnStackReplacementHelper()
1892 __ mtlr(r0); in Generate_OnStackReplacementHelper()
1893 __ blr(); in Generate_OnStackReplacementHelper()
1922 __ ShiftLeftImm(arg_size, r3, Operand(kPointerSizeLog2)); in Generate_FunctionPrototypeApply()
1923 __ add(new_sp, sp, arg_size); in Generate_FunctionPrototypeApply()
1924 __ LoadRoot(r3, Heap::kUndefinedValueRootIndex); in Generate_FunctionPrototypeApply()
1925 __ mr(scratch, r3); in Generate_FunctionPrototypeApply()
1926 __ LoadP(r4, MemOperand(new_sp, 0)); // receiver in Generate_FunctionPrototypeApply()
1927 __ cmpi(arg_size, Operand(kPointerSize)); in Generate_FunctionPrototypeApply()
1928 __ blt(&skip); in Generate_FunctionPrototypeApply()
1929 __ LoadP(scratch, MemOperand(new_sp, 1 * -kPointerSize)); // thisArg in Generate_FunctionPrototypeApply()
1930 __ beq(&skip); in Generate_FunctionPrototypeApply()
1931 __ LoadP(r3, MemOperand(new_sp, 2 * -kPointerSize)); // argArray in Generate_FunctionPrototypeApply()
1932 __ bind(&skip); in Generate_FunctionPrototypeApply()
1933 __ mr(sp, new_sp); in Generate_FunctionPrototypeApply()
1934 __ StoreP(scratch, MemOperand(sp, 0)); in Generate_FunctionPrototypeApply()
1945 __ JumpIfSmi(r4, &receiver_not_callable); in Generate_FunctionPrototypeApply()
1946 __ LoadP(r7, FieldMemOperand(r4, HeapObject::kMapOffset)); in Generate_FunctionPrototypeApply()
1947 __ lbz(r7, FieldMemOperand(r7, Map::kBitFieldOffset)); in Generate_FunctionPrototypeApply()
1948 __ TestBit(r7, Map::kIsCallable, r0); in Generate_FunctionPrototypeApply()
1949 __ beq(&receiver_not_callable, cr0); in Generate_FunctionPrototypeApply()
1953 __ JumpIfRoot(r3, Heap::kNullValueRootIndex, &no_arguments); in Generate_FunctionPrototypeApply()
1954 __ JumpIfRoot(r3, Heap::kUndefinedValueRootIndex, &no_arguments); in Generate_FunctionPrototypeApply()
1958 __ LoadRoot(r6, Heap::kUndefinedValueRootIndex); in Generate_FunctionPrototypeApply()
1959 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET); in Generate_FunctionPrototypeApply()
1963 __ bind(&no_arguments); in Generate_FunctionPrototypeApply()
1965 __ li(r3, Operand::Zero()); in Generate_FunctionPrototypeApply()
1966 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); in Generate_FunctionPrototypeApply()
1970 __ bind(&receiver_not_callable); in Generate_FunctionPrototypeApply()
1972 __ StoreP(r4, MemOperand(sp, 0)); in Generate_FunctionPrototypeApply()
1973 __ TailCallRuntime(Runtime::kThrowApplyNonFunction); in Generate_FunctionPrototypeApply()
1983 __ cmpi(r3, Operand::Zero()); in Generate_FunctionPrototypeCall()
1984 __ bne(&done); in Generate_FunctionPrototypeCall()
1985 __ PushRoot(Heap::kUndefinedValueRootIndex); in Generate_FunctionPrototypeCall()
1986 __ addi(r3, r3, Operand(1)); in Generate_FunctionPrototypeCall()
1987 __ bind(&done); in Generate_FunctionPrototypeCall()
1992 __ ShiftLeftImm(r5, r3, Operand(kPointerSizeLog2)); in Generate_FunctionPrototypeCall()
1993 __ LoadPX(r4, MemOperand(sp, r5)); in Generate_FunctionPrototypeCall()
2003 __ add(r5, sp, r5); in Generate_FunctionPrototypeCall()
2005 __ mtctr(r3); in Generate_FunctionPrototypeCall()
2006 __ bind(&loop); in Generate_FunctionPrototypeCall()
2007 __ LoadP(ip, MemOperand(r5, -kPointerSize)); in Generate_FunctionPrototypeCall()
2008 __ StoreP(ip, MemOperand(r5)); in Generate_FunctionPrototypeCall()
2009 __ subi(r5, r5, Operand(kPointerSize)); in Generate_FunctionPrototypeCall()
2010 __ bdnz(&loop); in Generate_FunctionPrototypeCall()
2013 __ subi(r3, r3, Operand(1)); in Generate_FunctionPrototypeCall()
2014 __ pop(); in Generate_FunctionPrototypeCall()
2018 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); in Generate_FunctionPrototypeCall()
2038 __ ShiftLeftImm(arg_size, r3, Operand(kPointerSizeLog2)); in Generate_ReflectApply()
2039 __ add(new_sp, sp, arg_size); in Generate_ReflectApply()
2040 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex); in Generate_ReflectApply()
2041 __ mr(scratch, r4); in Generate_ReflectApply()
2042 __ mr(r3, r4); in Generate_ReflectApply()
2043 __ cmpi(arg_size, Operand(kPointerSize)); in Generate_ReflectApply()
2044 __ blt(&skip); in Generate_ReflectApply()
2045 __ LoadP(r4, MemOperand(new_sp, 1 * -kPointerSize)); // target in Generate_ReflectApply()
2046 __ beq(&skip); in Generate_ReflectApply()
2047 __ LoadP(scratch, MemOperand(new_sp, 2 * -kPointerSize)); // thisArgument in Generate_ReflectApply()
2048 __ cmpi(arg_size, Operand(2 * kPointerSize)); in Generate_ReflectApply()
2049 __ beq(&skip); in Generate_ReflectApply()
2050 __ LoadP(r3, MemOperand(new_sp, 3 * -kPointerSize)); // argumentsList in Generate_ReflectApply()
2051 __ bind(&skip); in Generate_ReflectApply()
2052 __ mr(sp, new_sp); in Generate_ReflectApply()
2053 __ StoreP(scratch, MemOperand(sp, 0)); in Generate_ReflectApply()
2064 __ JumpIfSmi(r4, &target_not_callable); in Generate_ReflectApply()
2065 __ LoadP(r7, FieldMemOperand(r4, HeapObject::kMapOffset)); in Generate_ReflectApply()
2066 __ lbz(r7, FieldMemOperand(r7, Map::kBitFieldOffset)); in Generate_ReflectApply()
2067 __ TestBit(r7, Map::kIsCallable, r0); in Generate_ReflectApply()
2068 __ beq(&target_not_callable, cr0); in Generate_ReflectApply()
2072 __ LoadRoot(r6, Heap::kUndefinedValueRootIndex); in Generate_ReflectApply()
2073 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET); in Generate_ReflectApply()
2076 __ bind(&target_not_callable); in Generate_ReflectApply()
2078 __ StoreP(r4, MemOperand(sp, 0)); in Generate_ReflectApply()
2079 __ TailCallRuntime(Runtime::kThrowApplyNonFunction); in Generate_ReflectApply()
2100 __ ShiftLeftImm(arg_size, r3, Operand(kPointerSizeLog2)); in Generate_ReflectConstruct()
2101 __ add(new_sp, sp, arg_size); in Generate_ReflectConstruct()
2102 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex); in Generate_ReflectConstruct()
2103 __ mr(r3, r4); in Generate_ReflectConstruct()
2104 __ mr(r6, r4); in Generate_ReflectConstruct()
2105 __ StoreP(r4, MemOperand(new_sp, 0)); // receiver (undefined) in Generate_ReflectConstruct()
2106 __ cmpi(arg_size, Operand(kPointerSize)); in Generate_ReflectConstruct()
2107 __ blt(&skip); in Generate_ReflectConstruct()
2108 __ LoadP(r4, MemOperand(new_sp, 1 * -kPointerSize)); // target in Generate_ReflectConstruct()
2109 __ mr(r6, r4); // new.target defaults to target in Generate_ReflectConstruct()
2110 __ beq(&skip); in Generate_ReflectConstruct()
2111 __ LoadP(r3, MemOperand(new_sp, 2 * -kPointerSize)); // argumentsList in Generate_ReflectConstruct()
2112 __ cmpi(arg_size, Operand(2 * kPointerSize)); in Generate_ReflectConstruct()
2113 __ beq(&skip); in Generate_ReflectConstruct()
2114 __ LoadP(r6, MemOperand(new_sp, 3 * -kPointerSize)); // new.target in Generate_ReflectConstruct()
2115 __ bind(&skip); in Generate_ReflectConstruct()
2116 __ mr(sp, new_sp); in Generate_ReflectConstruct()
2128 __ JumpIfSmi(r4, &target_not_constructor); in Generate_ReflectConstruct()
2129 __ LoadP(r7, FieldMemOperand(r4, HeapObject::kMapOffset)); in Generate_ReflectConstruct()
2130 __ lbz(r7, FieldMemOperand(r7, Map::kBitFieldOffset)); in Generate_ReflectConstruct()
2131 __ TestBit(r7, Map::kIsConstructor, r0); in Generate_ReflectConstruct()
2132 __ beq(&target_not_constructor, cr0); in Generate_ReflectConstruct()
2136 __ JumpIfSmi(r6, &new_target_not_constructor); in Generate_ReflectConstruct()
2137 __ LoadP(r7, FieldMemOperand(r6, HeapObject::kMapOffset)); in Generate_ReflectConstruct()
2138 __ lbz(r7, FieldMemOperand(r7, Map::kBitFieldOffset)); in Generate_ReflectConstruct()
2139 __ TestBit(r7, Map::kIsConstructor, r0); in Generate_ReflectConstruct()
2140 __ beq(&new_target_not_constructor, cr0); in Generate_ReflectConstruct()
2143 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET); in Generate_ReflectConstruct()
2146 __ bind(&target_not_constructor); in Generate_ReflectConstruct()
2148 __ StoreP(r4, MemOperand(sp, 0)); in Generate_ReflectConstruct()
2149 __ TailCallRuntime(Runtime::kThrowNotConstructor); in Generate_ReflectConstruct()
2153 __ bind(&new_target_not_constructor); in Generate_ReflectConstruct()
2155 __ StoreP(r6, MemOperand(sp, 0)); in Generate_ReflectConstruct()
2156 __ TailCallRuntime(Runtime::kThrowNotConstructor); in Generate_ReflectConstruct()
2161 __ SmiTag(r3); in EnterArgumentsAdaptorFrame()
2162 __ mov(r7, Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR))); in EnterArgumentsAdaptorFrame()
2163 __ mflr(r0); in EnterArgumentsAdaptorFrame()
2164 __ push(r0); in EnterArgumentsAdaptorFrame()
2166 __ Push(fp, kConstantPoolRegister, r7, r4, r3); in EnterArgumentsAdaptorFrame()
2168 __ Push(fp, r7, r4, r3); in EnterArgumentsAdaptorFrame()
2170 __ addi(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp + in EnterArgumentsAdaptorFrame()
2180 __ LoadP(r4, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp + in LeaveArgumentsAdaptorFrame()
2183 __ LeaveFrame(StackFrame::ARGUMENTS_ADAPTOR, stack_adjustment); in LeaveArgumentsAdaptorFrame()
2184 __ SmiToPtrArrayOffset(r0, r4); in LeaveArgumentsAdaptorFrame()
2185 __ add(sp, sp, r0); in LeaveArgumentsAdaptorFrame()
2201 __ JumpIfSmi(r3, &create_runtime); in Generate_Apply()
2204 __ LoadP(r5, FieldMemOperand(r3, HeapObject::kMapOffset)); in Generate_Apply()
2207 __ LoadP(r7, NativeContextMemOperand()); in Generate_Apply()
2210 __ LoadP(ip, ContextMemOperand(r7, Context::SLOPPY_ARGUMENTS_MAP_INDEX)); in Generate_Apply()
2211 __ cmp(ip, r5); in Generate_Apply()
2212 __ beq(&create_arguments); in Generate_Apply()
2213 __ LoadP(ip, ContextMemOperand(r7, Context::STRICT_ARGUMENTS_MAP_INDEX)); in Generate_Apply()
2214 __ cmp(ip, r5); in Generate_Apply()
2215 __ beq(&create_arguments); in Generate_Apply()
2218 __ CompareInstanceType(r5, ip, JS_ARRAY_TYPE); in Generate_Apply()
2219 __ beq(&create_array); in Generate_Apply()
2222 __ bind(&create_runtime); in Generate_Apply()
2225 __ Push(r4, r6, r3); in Generate_Apply()
2226 __ CallRuntime(Runtime::kCreateListFromArrayLike); in Generate_Apply()
2227 __ Pop(r4, r6); in Generate_Apply()
2228 __ LoadP(r5, FieldMemOperand(r3, FixedArray::kLengthOffset)); in Generate_Apply()
2229 __ SmiUntag(r5); in Generate_Apply()
2231 __ b(&done_create); in Generate_Apply()
2234 __ bind(&create_arguments); in Generate_Apply()
2235 __ LoadP(r5, FieldMemOperand(r3, JSArgumentsObject::kLengthOffset)); in Generate_Apply()
2236 __ LoadP(r7, FieldMemOperand(r3, JSObject::kElementsOffset)); in Generate_Apply()
2237 __ LoadP(ip, FieldMemOperand(r7, FixedArray::kLengthOffset)); in Generate_Apply()
2238 __ cmp(r5, ip); in Generate_Apply()
2239 __ bne(&create_runtime); in Generate_Apply()
2240 __ SmiUntag(r5); in Generate_Apply()
2241 __ mr(r3, r7); in Generate_Apply()
2242 __ b(&done_create); in Generate_Apply()
2246 __ bind(&create_holey_array); in Generate_Apply()
2247 __ LoadP(r5, FieldMemOperand(r5, Map::kPrototypeOffset)); in Generate_Apply()
2248 __ LoadP(r7, ContextMemOperand(r7, Context::INITIAL_ARRAY_PROTOTYPE_INDEX)); in Generate_Apply()
2249 __ cmp(r5, r7); in Generate_Apply()
2250 __ bne(&create_runtime); in Generate_Apply()
2251 __ LoadRoot(r7, Heap::kArrayProtectorRootIndex); in Generate_Apply()
2252 __ LoadP(r5, FieldMemOperand(r7, PropertyCell::kValueOffset)); in Generate_Apply()
2253 __ CmpSmiLiteral(r5, Smi::FromInt(Isolate::kProtectorValid), r0); in Generate_Apply()
2254 __ bne(&create_runtime); in Generate_Apply()
2255 __ LoadP(r5, FieldMemOperand(r3, JSArray::kLengthOffset)); in Generate_Apply()
2256 __ LoadP(r3, FieldMemOperand(r3, JSArray::kElementsOffset)); in Generate_Apply()
2257 __ SmiUntag(r5); in Generate_Apply()
2258 __ b(&done_create); in Generate_Apply()
2262 __ bind(&create_array); in Generate_Apply()
2263 __ lbz(r8, FieldMemOperand(r5, Map::kBitField2Offset)); in Generate_Apply()
2264 __ DecodeField<Map::ElementsKindBits>(r8); in Generate_Apply()
2269 __ cmpi(r8, Operand(FAST_HOLEY_ELEMENTS)); in Generate_Apply()
2270 __ bgt(&create_runtime); in Generate_Apply()
2272 __ TestBit(r8, Map::kHasNonInstancePrototype, r0); in Generate_Apply()
2273 __ bne(&create_holey_array, cr0); in Generate_Apply()
2275 __ LoadP(r5, FieldMemOperand(r3, JSArray::kLengthOffset)); in Generate_Apply()
2276 __ LoadP(r3, FieldMemOperand(r3, JSArray::kElementsOffset)); in Generate_Apply()
2277 __ SmiUntag(r5); in Generate_Apply()
2279 __ bind(&done_create); in Generate_Apply()
2287 __ LoadRoot(ip, Heap::kRealStackLimitRootIndex); in Generate_Apply()
2290 __ sub(ip, sp, ip); in Generate_Apply()
2292 __ ShiftLeftImm(r0, r5, Operand(kPointerSizeLog2)); in Generate_Apply()
2293 __ cmp(ip, r0); // Signed comparison. in Generate_Apply()
2294 __ bgt(&done); in Generate_Apply()
2295 __ TailCallRuntime(Runtime::kThrowStackOverflow); in Generate_Apply()
2296 __ bind(&done); in Generate_Apply()
2309 __ LoadRoot(r9, Heap::kUndefinedValueRootIndex); in Generate_Apply()
2311 __ cmpi(r5, Operand::Zero()); in Generate_Apply()
2312 __ beq(&no_args); in Generate_Apply()
2313 __ addi(r3, r3, in Generate_Apply()
2315 __ mtctr(r5); in Generate_Apply()
2316 __ bind(&loop); in Generate_Apply()
2317 __ LoadPU(ip, MemOperand(r3, kPointerSize)); in Generate_Apply()
2318 __ CompareRoot(ip, Heap::kTheHoleValueRootIndex); in Generate_Apply()
2319 __ bne(&skip); in Generate_Apply()
2320 __ mr(ip, r9); in Generate_Apply()
2321 __ bind(&skip); in Generate_Apply()
2322 __ push(ip); in Generate_Apply()
2323 __ bdnz(&loop); in Generate_Apply()
2324 __ bind(&no_args); in Generate_Apply()
2325 __ mr(r3, r5); in Generate_Apply()
2330 __ CompareRoot(r6, Heap::kUndefinedValueRootIndex); in Generate_Apply()
2331 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET, eq); in Generate_Apply()
2332 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); in Generate_Apply()
2348 __ LoadP(r6, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); in Generate_CallForwardVarargs()
2349 __ LoadP(ip, MemOperand(r6, CommonFrameConstants::kContextOrFrameTypeOffset)); in Generate_CallForwardVarargs()
2350 __ cmpi(ip, Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR))); in Generate_CallForwardVarargs()
2351 __ beq(&arguments_adaptor); in Generate_CallForwardVarargs()
2353 __ LoadP(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); in Generate_CallForwardVarargs()
2354 __ LoadP(r3, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset)); in Generate_CallForwardVarargs()
2355 __ LoadWordArith( in Generate_CallForwardVarargs()
2358 __ mr(r6, fp); in Generate_CallForwardVarargs()
2360 __ b(&arguments_done); in Generate_CallForwardVarargs()
2361 __ bind(&arguments_adaptor); in Generate_CallForwardVarargs()
2364 __ LoadP(r3, MemOperand(r6, ArgumentsAdaptorFrameConstants::kLengthOffset)); in Generate_CallForwardVarargs()
2366 __ bind(&arguments_done); in Generate_CallForwardVarargs()
2369 __ SmiUntag(r3); in Generate_CallForwardVarargs()
2370 __ sub(r3, r3, r5); in Generate_CallForwardVarargs()
2371 __ cmpi(r3, Operand::Zero()); in Generate_CallForwardVarargs()
2372 __ ble(&stack_empty); in Generate_CallForwardVarargs()
2380 __ addi(r6, r6, Operand(kPointerSize)); in Generate_CallForwardVarargs()
2381 __ mr(r5, r3); in Generate_CallForwardVarargs()
2382 __ bind(&loop); in Generate_CallForwardVarargs()
2384 __ ShiftLeftImm(ip, r5, Operand(kPointerSizeLog2)); in Generate_CallForwardVarargs()
2385 __ LoadPX(ip, MemOperand(r6, ip)); in Generate_CallForwardVarargs()
2386 __ push(ip); in Generate_CallForwardVarargs()
2387 __ subi(r5, r5, Operand(1)); in Generate_CallForwardVarargs()
2388 __ cmpi(r5, Operand::Zero()); in Generate_CallForwardVarargs()
2389 __ bne(&loop); in Generate_CallForwardVarargs()
2393 __ b(&stack_done); in Generate_CallForwardVarargs()
2394 __ bind(&stack_overflow); in Generate_CallForwardVarargs()
2395 __ TailCallRuntime(Runtime::kThrowStackOverflow); in Generate_CallForwardVarargs()
2396 __ bind(&stack_empty); in Generate_CallForwardVarargs()
2399 __ mov(r3, Operand::Zero()); in Generate_CallForwardVarargs()
2401 __ bind(&stack_done); in Generate_CallForwardVarargs()
2403 __ Jump(code, RelocInfo::CODE_TARGET); in Generate_CallForwardVarargs()
2446 __ mov(scratch1, Operand(is_tail_call_elimination_enabled)); in PrepareForTailCall()
2447 __ lbz(scratch1, MemOperand(scratch1)); in PrepareForTailCall()
2448 __ cmpi(scratch1, Operand::Zero()); in PrepareForTailCall()
2449 __ beq(&done); in PrepareForTailCall()
2454 __ LoadP(scratch3, in PrepareForTailCall()
2456 __ cmpi(scratch3, Operand(StackFrame::TypeToMarker(StackFrame::STUB))); in PrepareForTailCall()
2457 __ bne(&no_interpreter_frame); in PrepareForTailCall()
2458 __ LoadP(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); in PrepareForTailCall()
2459 __ bind(&no_interpreter_frame); in PrepareForTailCall()
2465 __ LoadP(scratch2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); in PrepareForTailCall()
2466 __ LoadP( in PrepareForTailCall()
2469 __ cmpi(scratch3, in PrepareForTailCall()
2471 __ bne(&no_arguments_adaptor); in PrepareForTailCall()
2474 __ mr(fp, scratch2); in PrepareForTailCall()
2475 __ LoadP(caller_args_count_reg, in PrepareForTailCall()
2477 __ SmiUntag(caller_args_count_reg); in PrepareForTailCall()
2478 __ b(&formal_parameter_count_loaded); in PrepareForTailCall()
2480 __ bind(&no_arguments_adaptor); in PrepareForTailCall()
2482 __ LoadP(scratch1, in PrepareForTailCall()
2484 __ LoadP(scratch1, in PrepareForTailCall()
2486 __ LoadWordArith( in PrepareForTailCall()
2491 __ SmiUntag(caller_args_count_reg); in PrepareForTailCall()
2494 __ bind(&formal_parameter_count_loaded); in PrepareForTailCall()
2497 __ PrepareForTailCall(callee_args_count, caller_args_count_reg, scratch2, in PrepareForTailCall()
2499 __ bind(&done); in PrepareForTailCall()
2511 __ AssertFunction(r4); in Generate_CallFunction()
2516 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset)); in Generate_CallFunction()
2517 __ lwz(r6, FieldMemOperand(r5, SharedFunctionInfo::kCompilerHintsOffset)); in Generate_CallFunction()
2518 __ TestBitMask(r6, FunctionKind::kClassConstructor in Generate_CallFunction()
2521 __ bne(&class_constructor, cr0); in Generate_CallFunction()
2526 __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset)); in Generate_CallFunction()
2529 __ andi(r0, r6, Operand((1 << SharedFunctionInfo::kStrictModeBit) | in Generate_CallFunction()
2531 __ bne(&done_convert, cr0); in Generate_CallFunction()
2542 __ LoadGlobalProxy(r6); in Generate_CallFunction()
2545 __ ShiftLeftImm(r6, r3, Operand(kPointerSizeLog2)); in Generate_CallFunction()
2546 __ LoadPX(r6, MemOperand(sp, r6)); in Generate_CallFunction()
2547 __ JumpIfSmi(r6, &convert_to_object); in Generate_CallFunction()
2549 __ CompareObjectType(r6, r7, r7, FIRST_JS_RECEIVER_TYPE); in Generate_CallFunction()
2550 __ bge(&done_convert); in Generate_CallFunction()
2553 __ JumpIfRoot(r6, Heap::kUndefinedValueRootIndex, in Generate_CallFunction()
2555 __ JumpIfNotRoot(r6, Heap::kNullValueRootIndex, &convert_to_object); in Generate_CallFunction()
2556 __ bind(&convert_global_proxy); in Generate_CallFunction()
2559 __ LoadGlobalProxy(r6); in Generate_CallFunction()
2561 __ b(&convert_receiver); in Generate_CallFunction()
2563 __ bind(&convert_to_object); in Generate_CallFunction()
2569 __ SmiTag(r3); in Generate_CallFunction()
2570 __ Push(r3, r4); in Generate_CallFunction()
2571 __ mr(r3, r6); in Generate_CallFunction()
2572 __ Push(cp); in Generate_CallFunction()
2573 __ Call(masm->isolate()->builtins()->ToObject(), in Generate_CallFunction()
2575 __ Pop(cp); in Generate_CallFunction()
2576 __ mr(r6, r3); in Generate_CallFunction()
2577 __ Pop(r3, r4); in Generate_CallFunction()
2578 __ SmiUntag(r3); in Generate_CallFunction()
2580 __ LoadP(r5, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset)); in Generate_CallFunction()
2581 __ bind(&convert_receiver); in Generate_CallFunction()
2583 __ ShiftLeftImm(r7, r3, Operand(kPointerSizeLog2)); in Generate_CallFunction()
2584 __ StorePX(r6, MemOperand(sp, r7)); in Generate_CallFunction()
2586 __ bind(&done_convert); in Generate_CallFunction()
2599 __ LoadWordArith( in Generate_CallFunction()
2602 __ SmiUntag(r5); in Generate_CallFunction()
2606 __ InvokeFunctionCode(r4, no_reg, expected, actual, JUMP_FUNCTION, in Generate_CallFunction()
2610 __ bind(&class_constructor); in Generate_CallFunction()
2613 __ push(r4); in Generate_CallFunction()
2614 __ CallRuntime(Runtime::kThrowConstructorNonCallableError); in Generate_CallFunction()
2629 __ LoadP(r5, FieldMemOperand(r4, JSBoundFunction::kBoundArgumentsOffset)); in Generate_PushBoundArguments()
2630 __ LoadP(r7, FieldMemOperand(r5, FixedArray::kLengthOffset)); in Generate_PushBoundArguments()
2631 __ SmiUntag(r7, SetRC); in Generate_PushBoundArguments()
2632 __ beq(&no_bound_arguments, cr0); in Generate_PushBoundArguments()
2645 __ mr(r9, sp); // preserve previous stack pointer in Generate_PushBoundArguments()
2646 __ ShiftLeftImm(r10, r7, Operand(kPointerSizeLog2)); in Generate_PushBoundArguments()
2647 __ sub(sp, sp, r10); in Generate_PushBoundArguments()
2651 __ CompareRoot(sp, Heap::kRealStackLimitRootIndex); in Generate_PushBoundArguments()
2652 __ bgt(&done); // Signed comparison. in Generate_PushBoundArguments()
2654 __ mr(sp, r9); in Generate_PushBoundArguments()
2657 __ EnterFrame(StackFrame::INTERNAL); in Generate_PushBoundArguments()
2658 __ CallRuntime(Runtime::kThrowStackOverflow); in Generate_PushBoundArguments()
2660 __ bind(&done); in Generate_PushBoundArguments()
2669 __ li(r8, Operand::Zero()); in Generate_PushBoundArguments()
2670 __ cmpi(r3, Operand::Zero()); in Generate_PushBoundArguments()
2671 __ beq(&skip); in Generate_PushBoundArguments()
2672 __ mtctr(r3); in Generate_PushBoundArguments()
2673 __ bind(&loop); in Generate_PushBoundArguments()
2674 __ LoadPX(r0, MemOperand(r9, r8)); in Generate_PushBoundArguments()
2675 __ StorePX(r0, MemOperand(sp, r8)); in Generate_PushBoundArguments()
2676 __ addi(r8, r8, Operand(kPointerSize)); in Generate_PushBoundArguments()
2677 __ bdnz(&loop); in Generate_PushBoundArguments()
2678 __ bind(&skip); in Generate_PushBoundArguments()
2684 __ addi(r5, r5, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); in Generate_PushBoundArguments()
2685 __ add(r5, r5, r10); in Generate_PushBoundArguments()
2686 __ mtctr(r7); in Generate_PushBoundArguments()
2687 __ bind(&loop); in Generate_PushBoundArguments()
2688 __ LoadPU(r0, MemOperand(r5, -kPointerSize)); in Generate_PushBoundArguments()
2689 __ StorePX(r0, MemOperand(sp, r8)); in Generate_PushBoundArguments()
2690 __ addi(r8, r8, Operand(kPointerSize)); in Generate_PushBoundArguments()
2691 __ bdnz(&loop); in Generate_PushBoundArguments()
2692 __ add(r3, r3, r7); in Generate_PushBoundArguments()
2695 __ bind(&no_bound_arguments); in Generate_PushBoundArguments()
2707 __ AssertBoundFunction(r4); in Generate_CallBoundFunctionImpl()
2714 __ LoadP(ip, FieldMemOperand(r4, JSBoundFunction::kBoundThisOffset)); in Generate_CallBoundFunctionImpl()
2715 __ ShiftLeftImm(r0, r3, Operand(kPointerSizeLog2)); in Generate_CallBoundFunctionImpl()
2716 __ StorePX(ip, MemOperand(sp, r0)); in Generate_CallBoundFunctionImpl()
2722 __ LoadP(r4, in Generate_CallBoundFunctionImpl()
2724 __ mov(ip, Operand(ExternalReference(Builtins::kCall_ReceiverIsAny, in Generate_CallBoundFunctionImpl()
2726 __ LoadP(ip, MemOperand(ip)); in Generate_CallBoundFunctionImpl()
2727 __ addi(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag)); in Generate_CallBoundFunctionImpl()
2728 __ JumpToJSEntry(ip); in Generate_CallBoundFunctionImpl()
2740 __ JumpIfSmi(r4, &non_callable); in Generate_Call()
2741 __ bind(&non_smi); in Generate_Call()
2742 __ CompareObjectType(r4, r7, r8, JS_FUNCTION_TYPE); in Generate_Call()
2743 __ Jump(masm->isolate()->builtins()->CallFunction(mode, tail_call_mode), in Generate_Call()
2745 __ cmpi(r8, Operand(JS_BOUND_FUNCTION_TYPE)); in Generate_Call()
2746 __ Jump(masm->isolate()->builtins()->CallBoundFunction(tail_call_mode), in Generate_Call()
2750 __ lbz(r7, FieldMemOperand(r7, Map::kBitFieldOffset)); in Generate_Call()
2751 __ TestBit(r7, Map::kIsCallable, r0); in Generate_Call()
2752 __ beq(&non_callable, cr0); in Generate_Call()
2754 __ cmpi(r8, Operand(JS_PROXY_TYPE)); in Generate_Call()
2755 __ bne(&non_function); in Generate_Call()
2763 __ Push(r4); in Generate_Call()
2766 __ addi(r3, r3, Operand(2)); in Generate_Call()
2768 __ JumpToExternalReference( in Generate_Call()
2773 __ bind(&non_function); in Generate_Call()
2775 __ ShiftLeftImm(r8, r3, Operand(kPointerSizeLog2)); in Generate_Call()
2776 __ StorePX(r4, MemOperand(sp, r8)); in Generate_Call()
2778 __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, r4); in Generate_Call()
2779 __ Jump(masm->isolate()->builtins()->CallFunction( in Generate_Call()
2784 __ bind(&non_callable); in Generate_Call()
2787 __ Push(r4); in Generate_Call()
2788 __ CallRuntime(Runtime::kThrowCalledNonCallable); in Generate_Call()
2804 __ LoadP(spread, MemOperand(sp, 0)); in CheckSpreadAndPushToStack()
2805 __ JumpIfSmi(spread, &runtime_call); in CheckSpreadAndPushToStack()
2806 __ LoadP(spread_map, FieldMemOperand(spread, HeapObject::kMapOffset)); in CheckSpreadAndPushToStack()
2809 __ CompareInstanceType(spread_map, scratch, JS_ARRAY_TYPE); in CheckSpreadAndPushToStack()
2810 __ bne(&runtime_call); in CheckSpreadAndPushToStack()
2813 __ LoadP(scratch, FieldMemOperand(spread_map, Map::kPrototypeOffset)); in CheckSpreadAndPushToStack()
2814 __ LoadP(scratch2, NativeContextMemOperand()); in CheckSpreadAndPushToStack()
2815 __ LoadP(scratch2, in CheckSpreadAndPushToStack()
2817 __ cmp(scratch, scratch2); in CheckSpreadAndPushToStack()
2818 __ bne(&runtime_call); in CheckSpreadAndPushToStack()
2822 __ LoadRoot(scratch, Heap::kArrayIteratorProtectorRootIndex); in CheckSpreadAndPushToStack()
2823 __ LoadP(scratch, FieldMemOperand(scratch, PropertyCell::kValueOffset)); in CheckSpreadAndPushToStack()
2824 __ CmpSmiLiteral(scratch, Smi::FromInt(Isolate::kProtectorValid), r0); in CheckSpreadAndPushToStack()
2825 __ bne(&runtime_call); in CheckSpreadAndPushToStack()
2828 __ LoadP(scratch2, NativeContextMemOperand()); in CheckSpreadAndPushToStack()
2829 __ LoadP(scratch, in CheckSpreadAndPushToStack()
2832 __ LoadP(scratch, FieldMemOperand(scratch, HeapObject::kMapOffset)); in CheckSpreadAndPushToStack()
2833 __ LoadP(scratch2, in CheckSpreadAndPushToStack()
2836 __ cmp(scratch, scratch2); in CheckSpreadAndPushToStack()
2837 __ bne(&runtime_call); in CheckSpreadAndPushToStack()
2842 __ lbz(scratch, FieldMemOperand(spread_map, Map::kBitField2Offset)); in CheckSpreadAndPushToStack()
2843 __ DecodeField<Map::ElementsKindBits>(scratch); in CheckSpreadAndPushToStack()
2844 __ cmpi(scratch, Operand(FAST_HOLEY_ELEMENTS)); in CheckSpreadAndPushToStack()
2845 __ bgt(&runtime_call); in CheckSpreadAndPushToStack()
2847 __ cmpi(scratch, Operand(FAST_SMI_ELEMENTS)); in CheckSpreadAndPushToStack()
2848 __ beq(&no_protector_check); in CheckSpreadAndPushToStack()
2849 __ cmpi(scratch, Operand(FAST_ELEMENTS)); in CheckSpreadAndPushToStack()
2850 __ beq(&no_protector_check); in CheckSpreadAndPushToStack()
2852 __ LoadRoot(scratch, Heap::kArrayProtectorRootIndex); in CheckSpreadAndPushToStack()
2853 __ LoadP(scratch, FieldMemOperand(scratch, PropertyCell::kValueOffset)); in CheckSpreadAndPushToStack()
2854 __ CmpSmiLiteral(scratch, Smi::FromInt(Isolate::kProtectorValid), r0); in CheckSpreadAndPushToStack()
2855 __ bne(&runtime_call); in CheckSpreadAndPushToStack()
2857 __ bind(&no_protector_check); in CheckSpreadAndPushToStack()
2859 __ LoadP(spread_len, FieldMemOperand(spread, JSArray::kLengthOffset)); in CheckSpreadAndPushToStack()
2860 __ SmiUntag(spread_len); in CheckSpreadAndPushToStack()
2861 __ LoadP(spread, FieldMemOperand(spread, JSArray::kElementsOffset)); in CheckSpreadAndPushToStack()
2862 __ b(&push_args); in CheckSpreadAndPushToStack()
2864 __ bind(&runtime_call); in CheckSpreadAndPushToStack()
2868 __ SmiTag(argc); in CheckSpreadAndPushToStack()
2869 __ Push(constructor, new_target, argc, spread); in CheckSpreadAndPushToStack()
2870 __ CallRuntime(Runtime::kSpreadIterableFixed); in CheckSpreadAndPushToStack()
2871 __ mr(spread, r3); in CheckSpreadAndPushToStack()
2872 __ Pop(constructor, new_target, argc); in CheckSpreadAndPushToStack()
2873 __ SmiUntag(argc); in CheckSpreadAndPushToStack()
2878 __ LoadP(spread_len, FieldMemOperand(spread, FixedArray::kLengthOffset)); in CheckSpreadAndPushToStack()
2879 __ SmiUntag(spread_len); in CheckSpreadAndPushToStack()
2881 __ bind(&push_args); in CheckSpreadAndPushToStack()
2883 __ add(argc, argc, spread_len); in CheckSpreadAndPushToStack()
2884 __ subi(argc, argc, Operand(1)); in CheckSpreadAndPushToStack()
2887 __ Pop(scratch); in CheckSpreadAndPushToStack()
2895 __ LoadRoot(scratch, Heap::kRealStackLimitRootIndex); in CheckSpreadAndPushToStack()
2898 __ sub(scratch, sp, scratch); in CheckSpreadAndPushToStack()
2900 __ ShiftLeftImm(r0, spread_len, Operand(kPointerSizeLog2)); in CheckSpreadAndPushToStack()
2901 __ cmp(scratch, r0); in CheckSpreadAndPushToStack()
2902 __ bgt(&done); // Signed comparison. in CheckSpreadAndPushToStack()
2903 __ TailCallRuntime(Runtime::kThrowStackOverflow); in CheckSpreadAndPushToStack()
2904 __ bind(&done); in CheckSpreadAndPushToStack()
2909 __ li(scratch, Operand::Zero()); in CheckSpreadAndPushToStack()
2911 __ bind(&loop); in CheckSpreadAndPushToStack()
2912 __ cmp(scratch, spread_len); in CheckSpreadAndPushToStack()
2913 __ beq(&done); in CheckSpreadAndPushToStack()
2914 __ ShiftLeftImm(r0, scratch, Operand(kPointerSizeLog2)); in CheckSpreadAndPushToStack()
2915 __ add(scratch2, spread, r0); in CheckSpreadAndPushToStack()
2916 __ LoadP(scratch2, FieldMemOperand(scratch2, FixedArray::kHeaderSize)); in CheckSpreadAndPushToStack()
2917 __ JumpIfNotRoot(scratch2, Heap::kTheHoleValueRootIndex, &push); in CheckSpreadAndPushToStack()
2918 __ LoadRoot(scratch2, Heap::kUndefinedValueRootIndex); in CheckSpreadAndPushToStack()
2919 __ bind(&push); in CheckSpreadAndPushToStack()
2920 __ Push(scratch2); in CheckSpreadAndPushToStack()
2921 __ addi(scratch, scratch, Operand(1)); in CheckSpreadAndPushToStack()
2922 __ b(&loop); in CheckSpreadAndPushToStack()
2923 __ bind(&done); in CheckSpreadAndPushToStack()
2935 __ LoadRoot(r6, Heap::kUndefinedValueRootIndex); in Generate_CallWithSpread()
2937 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny, in Generate_CallWithSpread()
2949 __ AssertFunction(r4); in Generate_ConstructFunction()
2953 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex); in Generate_ConstructFunction()
2957 __ LoadP(r7, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset)); in Generate_ConstructFunction()
2958 __ LoadP(r7, FieldMemOperand(r7, SharedFunctionInfo::kConstructStubOffset)); in Generate_ConstructFunction()
2959 __ addi(ip, r7, Operand(Code::kHeaderSize - kHeapObjectTag)); in Generate_ConstructFunction()
2960 __ JumpToJSEntry(ip); in Generate_ConstructFunction()
2970 __ AssertBoundFunction(r4); in Generate_ConstructBoundFunction()
2977 __ cmp(r4, r6); in Generate_ConstructBoundFunction()
2978 __ bne(&skip); in Generate_ConstructBoundFunction()
2979 __ LoadP(r6, in Generate_ConstructBoundFunction()
2981 __ bind(&skip); in Generate_ConstructBoundFunction()
2984 __ LoadP(r4, in Generate_ConstructBoundFunction()
2986 __ mov(ip, Operand(ExternalReference(Builtins::kConstruct, masm->isolate()))); in Generate_ConstructBoundFunction()
2987 __ LoadP(ip, MemOperand(ip)); in Generate_ConstructBoundFunction()
2988 __ addi(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag)); in Generate_ConstructBoundFunction()
2989 __ JumpToJSEntry(ip); in Generate_ConstructBoundFunction()
3002 __ Push(r4, r6); in Generate_ConstructProxy()
3004 __ addi(r3, r3, Operand(3)); in Generate_ConstructProxy()
3006 __ JumpToExternalReference( in Generate_ConstructProxy()
3021 __ JumpIfSmi(r4, &non_constructor); in Generate_Construct()
3024 __ CompareObjectType(r4, r7, r8, JS_FUNCTION_TYPE); in Generate_Construct()
3025 __ Jump(masm->isolate()->builtins()->ConstructFunction(), in Generate_Construct()
3029 __ lbz(r5, FieldMemOperand(r7, Map::kBitFieldOffset)); in Generate_Construct()
3030 __ TestBit(r5, Map::kIsConstructor, r0); in Generate_Construct()
3031 __ beq(&non_constructor, cr0); in Generate_Construct()
3035 __ cmpi(r8, Operand(JS_BOUND_FUNCTION_TYPE)); in Generate_Construct()
3036 __ Jump(masm->isolate()->builtins()->ConstructBoundFunction(), in Generate_Construct()
3040 __ cmpi(r8, Operand(JS_PROXY_TYPE)); in Generate_Construct()
3041 __ Jump(masm->isolate()->builtins()->ConstructProxy(), RelocInfo::CODE_TARGET, in Generate_Construct()
3047 __ ShiftLeftImm(r8, r3, Operand(kPointerSizeLog2)); in Generate_Construct()
3048 __ StorePX(r4, MemOperand(sp, r8)); in Generate_Construct()
3050 __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, r4); in Generate_Construct()
3051 __ Jump(masm->isolate()->builtins()->CallFunction(), in Generate_Construct()
3057 __ bind(&non_constructor); in Generate_Construct()
3058 __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(), in Generate_Construct()
3071 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); in Generate_ConstructWithSpread()
3080 __ SmiTag(r4); in Generate_AllocateInNewSpace()
3081 __ Push(r4); in Generate_AllocateInNewSpace()
3082 __ LoadSmiLiteral(cp, Smi::kZero); in Generate_AllocateInNewSpace()
3083 __ TailCallRuntime(Runtime::kAllocateInNewSpace); in Generate_AllocateInNewSpace()
3092 __ SmiTag(r4); in Generate_AllocateInOldSpace()
3093 __ LoadSmiLiteral(r5, Smi::FromInt(AllocateTargetSpace::encode(OLD_SPACE))); in Generate_AllocateInOldSpace()
3094 __ Push(r4, r5); in Generate_AllocateInOldSpace()
3095 __ LoadSmiLiteral(cp, Smi::kZero); in Generate_AllocateInOldSpace()
3096 __ TailCallRuntime(Runtime::kAllocateInTargetSpace); in Generate_AllocateInOldSpace()
3105 __ push(r4); in Generate_Abort()
3106 __ LoadSmiLiteral(cp, Smi::kZero); in Generate_Abort()
3107 __ TailCallRuntime(Runtime::kAbort); in Generate_Abort()
3121 __ LoadP(ip, FieldMemOperand(r4, JSFunction::kCodeEntryOffset)); in Generate_ArgumentsAdaptorTrampoline()
3122 __ cmp(r3, r5); in Generate_ArgumentsAdaptorTrampoline()
3123 __ blt(&too_few); in Generate_ArgumentsAdaptorTrampoline()
3124 __ cmpi(r5, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel)); in Generate_ArgumentsAdaptorTrampoline()
3125 __ beq(&dont_adapt_arguments); in Generate_ArgumentsAdaptorTrampoline()
3128 __ bind(&enough); in Generate_ArgumentsAdaptorTrampoline()
3138 __ SmiToPtrArrayOffset(r3, r3); in Generate_ArgumentsAdaptorTrampoline()
3139 __ add(r3, r3, fp); in Generate_ArgumentsAdaptorTrampoline()
3141 __ addi(r3, r3, Operand(2 * kPointerSize)); in Generate_ArgumentsAdaptorTrampoline()
3142 __ ShiftLeftImm(r7, r5, Operand(kPointerSizeLog2)); in Generate_ArgumentsAdaptorTrampoline()
3143 __ sub(r7, r3, r7); in Generate_ArgumentsAdaptorTrampoline()
3154 __ bind(&copy); in Generate_ArgumentsAdaptorTrampoline()
3155 __ LoadP(r0, MemOperand(r3, 0)); in Generate_ArgumentsAdaptorTrampoline()
3156 __ push(r0); in Generate_ArgumentsAdaptorTrampoline()
3157 __ cmp(r3, r7); // Compare before moving to next argument. in Generate_ArgumentsAdaptorTrampoline()
3158 __ subi(r3, r3, Operand(kPointerSize)); in Generate_ArgumentsAdaptorTrampoline()
3159 __ bne(&copy); in Generate_ArgumentsAdaptorTrampoline()
3161 __ b(&invoke); in Generate_ArgumentsAdaptorTrampoline()
3165 __ bind(&too_few); in Generate_ArgumentsAdaptorTrampoline()
3176 __ SmiToPtrArrayOffset(r3, r3); in Generate_ArgumentsAdaptorTrampoline()
3177 __ add(r3, r3, fp); in Generate_ArgumentsAdaptorTrampoline()
3186 __ bind(&copy); in Generate_ArgumentsAdaptorTrampoline()
3188 __ LoadP(r0, MemOperand(r3, 2 * kPointerSize)); in Generate_ArgumentsAdaptorTrampoline()
3189 __ push(r0); in Generate_ArgumentsAdaptorTrampoline()
3190 __ cmp(r3, fp); // Compare before moving to next argument. in Generate_ArgumentsAdaptorTrampoline()
3191 __ subi(r3, r3, Operand(kPointerSize)); in Generate_ArgumentsAdaptorTrampoline()
3192 __ bne(&copy); in Generate_ArgumentsAdaptorTrampoline()
3199 __ LoadRoot(r0, Heap::kUndefinedValueRootIndex); in Generate_ArgumentsAdaptorTrampoline()
3200 __ ShiftLeftImm(r7, r5, Operand(kPointerSizeLog2)); in Generate_ArgumentsAdaptorTrampoline()
3201 __ sub(r7, fp, r7); in Generate_ArgumentsAdaptorTrampoline()
3203 __ subi(r7, r7, Operand(StandardFrameConstants::kFixedFrameSizeFromFp + in Generate_ArgumentsAdaptorTrampoline()
3207 __ bind(&fill); in Generate_ArgumentsAdaptorTrampoline()
3208 __ push(r0); in Generate_ArgumentsAdaptorTrampoline()
3209 __ cmp(sp, r7); in Generate_ArgumentsAdaptorTrampoline()
3210 __ bne(&fill); in Generate_ArgumentsAdaptorTrampoline()
3214 __ bind(&invoke); in Generate_ArgumentsAdaptorTrampoline()
3215 __ mr(r3, r5); in Generate_ArgumentsAdaptorTrampoline()
3219 __ CallJSEntry(ip); in Generate_ArgumentsAdaptorTrampoline()
3226 __ blr(); in Generate_ArgumentsAdaptorTrampoline()
3231 __ bind(&dont_adapt_arguments); in Generate_ArgumentsAdaptorTrampoline()
3232 __ JumpToJSEntry(ip); in Generate_ArgumentsAdaptorTrampoline()
3234 __ bind(&stack_overflow); in Generate_ArgumentsAdaptorTrampoline()
3237 __ CallRuntime(Runtime::kThrowStackOverflow); in Generate_ArgumentsAdaptorTrampoline()
3238 __ bkpt(0); in Generate_ArgumentsAdaptorTrampoline()
3242 #undef __