• Home
  • Raw
  • Download

Lines Matching full:__

17 #define __ ACCESS_MASM(masm)  macro
22 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, result); in GenerateLoadArrayFunction()
29 __ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, result); in GenerateLoadInternalArrayFunction()
43 __ AssertFunction(x1); in Generate_Adaptor()
49 __ Ldr(cp, FieldMemOperand(x1, JSFunction::kContextOffset)); in Generate_Adaptor()
54 __ Add(x0, x0, num_extra_args + 1); in Generate_Adaptor()
57 __ SmiTag(x0); in Generate_Adaptor()
58 __ Push(x0, x1, x3); in Generate_Adaptor()
59 __ SmiUntag(x0); in Generate_Adaptor()
61 __ JumpToExternalReference(ExternalReference(address, masm->isolate()), in Generate_Adaptor()
79 __ Ldr(x10, FieldMemOperand(x1, JSFunction::kPrototypeOrInitialMapOffset)); in Generate_InternalArrayCode()
80 __ Tst(x10, kSmiTagMask); in Generate_InternalArrayCode()
81 __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction); in Generate_InternalArrayCode()
82 __ CompareObjectType(x10, x11, x12, MAP_TYPE); in Generate_InternalArrayCode()
83 __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction); in Generate_InternalArrayCode()
89 __ TailCallStub(&stub); in Generate_InternalArrayCode()
106 __ Ldr(x10, FieldMemOperand(x1, JSFunction::kPrototypeOrInitialMapOffset)); in Generate_ArrayCode()
107 __ Tst(x10, kSmiTagMask); in Generate_ArrayCode()
108 __ Assert(ne, kUnexpectedInitialMapForArrayFunction); in Generate_ArrayCode()
109 __ CompareObjectType(x10, x11, x12, MAP_TYPE); in Generate_ArrayCode()
110 __ Assert(eq, kUnexpectedInitialMapForArrayFunction); in Generate_ArrayCode()
114 __ LoadRoot(x2, Heap::kUndefinedValueRootIndex); in Generate_ArrayCode()
115 __ Mov(x3, x1); in Generate_ArrayCode()
117 __ TailCallStub(&stub); in Generate_ArrayCode()
138 __ LoadRoot(x5, root_index); in Generate_MathMaxMin()
139 __ Ldr(d5, FieldMemOperand(x5, HeapNumber::kValueOffset)); in Generate_MathMaxMin()
142 __ mov(x4, x0); in Generate_MathMaxMin()
143 __ Bind(&loop); in Generate_MathMaxMin()
146 __ Subs(x4, x4, 1); in Generate_MathMaxMin()
147 __ B(lt, &done_loop); in Generate_MathMaxMin()
150 __ Peek(x2, Operand(x4, LSL, kPointerSizeLog2)); in Generate_MathMaxMin()
155 __ JumpIfSmi(x2, &convert_smi); in Generate_MathMaxMin()
156 __ JumpIfHeapNumber(x2, &convert_number); in Generate_MathMaxMin()
160 __ SmiTag(x0); in Generate_MathMaxMin()
161 __ SmiTag(x4); in Generate_MathMaxMin()
162 __ EnterBuiltinFrame(cp, x1, x0); in Generate_MathMaxMin()
163 __ Push(x5, x4); in Generate_MathMaxMin()
164 __ Mov(x0, x2); in Generate_MathMaxMin()
165 __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET); in Generate_MathMaxMin()
166 __ Mov(x2, x0); in Generate_MathMaxMin()
167 __ Pop(x4, x5); in Generate_MathMaxMin()
168 __ LeaveBuiltinFrame(cp, x1, x0); in Generate_MathMaxMin()
169 __ SmiUntag(x4); in Generate_MathMaxMin()
170 __ SmiUntag(x0); in Generate_MathMaxMin()
174 __ SmiUntagToDouble(d5, x5, kSpeculativeUntag); in Generate_MathMaxMin()
175 __ JumpIfSmi(x5, &done_restore); in Generate_MathMaxMin()
176 __ Ldr(d5, FieldMemOperand(x5, HeapNumber::kValueOffset)); in Generate_MathMaxMin()
177 __ Bind(&done_restore); in Generate_MathMaxMin()
180 __ AssertNumber(x2); in Generate_MathMaxMin()
181 __ JumpIfSmi(x2, &convert_smi); in Generate_MathMaxMin()
183 __ Bind(&convert_number); in Generate_MathMaxMin()
184 __ Ldr(d2, FieldMemOperand(x2, HeapNumber::kValueOffset)); in Generate_MathMaxMin()
185 __ B(&done_convert); in Generate_MathMaxMin()
187 __ Bind(&convert_smi); in Generate_MathMaxMin()
188 __ SmiUntagToDouble(d2, x2); in Generate_MathMaxMin()
189 __ Bind(&done_convert); in Generate_MathMaxMin()
193 __ Fmov(x11, d5); in Generate_MathMaxMin()
195 __ Fmin(d5, d5, d2); in Generate_MathMaxMin()
198 __ Fmax(d5, d5, d2); in Generate_MathMaxMin()
200 __ Fmov(x10, d5); in Generate_MathMaxMin()
201 __ Cmp(x10, x11); in Generate_MathMaxMin()
202 __ Csel(x5, x5, x2, eq); in Generate_MathMaxMin()
203 __ B(&loop); in Generate_MathMaxMin()
206 __ Bind(&done_loop); in Generate_MathMaxMin()
208 __ Add(x0, x0, 1); in Generate_MathMaxMin()
209 __ Drop(x0); in Generate_MathMaxMin()
210 __ Mov(x0, x5); in Generate_MathMaxMin()
211 __ Ret(); in Generate_MathMaxMin()
229 __ Cbz(x0, &no_arguments); in Generate_NumberConstructor()
230 __ Mov(x2, x0); // Store argc in x2. in Generate_NumberConstructor()
231 __ Sub(x0, x0, 1); in Generate_NumberConstructor()
232 __ Ldr(x0, MemOperand(jssp, x0, LSL, kPointerSizeLog2)); in Generate_NumberConstructor()
238 __ SmiTag(x2); in Generate_NumberConstructor()
239 __ EnterBuiltinFrame(cp, x1, x2); in Generate_NumberConstructor()
240 __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET); in Generate_NumberConstructor()
241 __ LeaveBuiltinFrame(cp, x1, x2); in Generate_NumberConstructor()
242 __ SmiUntag(x2); in Generate_NumberConstructor()
247 __ Drop(x2); in Generate_NumberConstructor()
251 __ Bind(&no_arguments); in Generate_NumberConstructor()
252 __ Drop(1); in Generate_NumberConstructor()
253 __ Ret(); in Generate_NumberConstructor()
270 __ Ldr(cp, FieldMemOperand(x1, JSFunction::kContextOffset)); in Generate_NumberConstructor_ConstructStub()
275 __ Move(x6, x0); // Store argc in x6. in Generate_NumberConstructor_ConstructStub()
276 __ Cbz(x0, &no_arguments); in Generate_NumberConstructor_ConstructStub()
277 __ Sub(x0, x0, 1); in Generate_NumberConstructor_ConstructStub()
278 __ Ldr(x2, MemOperand(jssp, x0, LSL, kPointerSizeLog2)); in Generate_NumberConstructor_ConstructStub()
279 __ B(&done); in Generate_NumberConstructor_ConstructStub()
280 __ Bind(&no_arguments); in Generate_NumberConstructor_ConstructStub()
281 __ Mov(x2, Smi::kZero); in Generate_NumberConstructor_ConstructStub()
282 __ Bind(&done); in Generate_NumberConstructor_ConstructStub()
288 __ JumpIfSmi(x2, &done_convert); in Generate_NumberConstructor_ConstructStub()
289 __ JumpIfObjectType(x2, x4, x4, HEAP_NUMBER_TYPE, &done_convert, eq); in Generate_NumberConstructor_ConstructStub()
292 __ SmiTag(x6); in Generate_NumberConstructor_ConstructStub()
293 __ EnterBuiltinFrame(cp, x1, x6); in Generate_NumberConstructor_ConstructStub()
294 __ Push(x3); in Generate_NumberConstructor_ConstructStub()
295 __ Move(x0, x2); in Generate_NumberConstructor_ConstructStub()
296 __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET); in Generate_NumberConstructor_ConstructStub()
297 __ Move(x2, x0); in Generate_NumberConstructor_ConstructStub()
298 __ Pop(x3); in Generate_NumberConstructor_ConstructStub()
299 __ LeaveBuiltinFrame(cp, x1, x6); in Generate_NumberConstructor_ConstructStub()
300 __ SmiUntag(x6); in Generate_NumberConstructor_ConstructStub()
302 __ Bind(&done_convert); in Generate_NumberConstructor_ConstructStub()
307 __ Cmp(x1, x3); in Generate_NumberConstructor_ConstructStub()
308 __ B(ne, &new_object); in Generate_NumberConstructor_ConstructStub()
311 __ AllocateJSValue(x0, x1, x2, x4, x5, &new_object); in Generate_NumberConstructor_ConstructStub()
312 __ B(&drop_frame_and_ret); in Generate_NumberConstructor_ConstructStub()
315 __ bind(&new_object); in Generate_NumberConstructor_ConstructStub()
318 __ SmiTag(x6); in Generate_NumberConstructor_ConstructStub()
319 __ EnterBuiltinFrame(cp, x1, x6); in Generate_NumberConstructor_ConstructStub()
320 __ Push(x2); // first argument in Generate_NumberConstructor_ConstructStub()
321 __ Call(CodeFactory::FastNewObject(masm->isolate()).code(), in Generate_NumberConstructor_ConstructStub()
323 __ Pop(x2); in Generate_NumberConstructor_ConstructStub()
324 __ LeaveBuiltinFrame(cp, x1, x6); in Generate_NumberConstructor_ConstructStub()
325 __ SmiUntag(x6); in Generate_NumberConstructor_ConstructStub()
327 __ Str(x2, FieldMemOperand(x0, JSValue::kValueOffset)); in Generate_NumberConstructor_ConstructStub()
329 __ bind(&drop_frame_and_ret); in Generate_NumberConstructor_ConstructStub()
331 __ Drop(x6); in Generate_NumberConstructor_ConstructStub()
332 __ Drop(1); in Generate_NumberConstructor_ConstructStub()
333 __ Ret(); in Generate_NumberConstructor_ConstructStub()
352 __ Cbz(x0, &no_arguments); in Generate_StringConstructor()
353 __ Mov(x2, x0); // Store argc in x2. in Generate_StringConstructor()
354 __ Sub(x0, x0, 1); in Generate_StringConstructor()
355 __ Ldr(x0, MemOperand(jssp, x0, LSL, kPointerSizeLog2)); in Generate_StringConstructor()
362 __ JumpIfSmi(x0, &to_string); in Generate_StringConstructor()
364 __ CompareObjectType(x0, x3, x3, FIRST_NONSTRING_TYPE); in Generate_StringConstructor()
365 __ B(hi, &to_string); in Generate_StringConstructor()
366 __ B(eq, &symbol_descriptive_string); in Generate_StringConstructor()
367 __ b(&drop_frame_and_ret); in Generate_StringConstructor()
371 __ Bind(&no_arguments); in Generate_StringConstructor()
373 __ LoadRoot(x0, Heap::kempty_stringRootIndex); in Generate_StringConstructor()
374 __ Drop(1); in Generate_StringConstructor()
375 __ Ret(); in Generate_StringConstructor()
379 __ Bind(&to_string); in Generate_StringConstructor()
382 __ SmiTag(x2); in Generate_StringConstructor()
383 __ EnterBuiltinFrame(cp, x1, x2); in Generate_StringConstructor()
384 __ Call(masm->isolate()->builtins()->ToString(), RelocInfo::CODE_TARGET); in Generate_StringConstructor()
385 __ LeaveBuiltinFrame(cp, x1, x2); in Generate_StringConstructor()
386 __ SmiUntag(x2); in Generate_StringConstructor()
388 __ b(&drop_frame_and_ret); in Generate_StringConstructor()
391 __ Bind(&symbol_descriptive_string); in Generate_StringConstructor()
393 __ Drop(x2); in Generate_StringConstructor()
394 __ Drop(1); in Generate_StringConstructor()
395 __ Push(x0); in Generate_StringConstructor()
396 __ TailCallRuntime(Runtime::kSymbolDescriptiveString); in Generate_StringConstructor()
399 __ bind(&drop_frame_and_ret); in Generate_StringConstructor()
401 __ Drop(x2); in Generate_StringConstructor()
402 __ Drop(1); in Generate_StringConstructor()
403 __ Ret(); in Generate_StringConstructor()
421 __ Ldr(cp, FieldMemOperand(x1, JSFunction::kContextOffset)); in Generate_StringConstructor_ConstructStub()
426 __ mov(x6, x0); // Store argc in x6. in Generate_StringConstructor_ConstructStub()
427 __ Cbz(x0, &no_arguments); in Generate_StringConstructor_ConstructStub()
428 __ Sub(x0, x0, 1); in Generate_StringConstructor_ConstructStub()
429 __ Ldr(x2, MemOperand(jssp, x0, LSL, kPointerSizeLog2)); in Generate_StringConstructor_ConstructStub()
430 __ B(&done); in Generate_StringConstructor_ConstructStub()
431 __ Bind(&no_arguments); in Generate_StringConstructor_ConstructStub()
432 __ LoadRoot(x2, Heap::kempty_stringRootIndex); in Generate_StringConstructor_ConstructStub()
433 __ Bind(&done); in Generate_StringConstructor_ConstructStub()
439 __ JumpIfSmi(x2, &convert); in Generate_StringConstructor_ConstructStub()
440 __ JumpIfObjectType(x2, x4, x4, FIRST_NONSTRING_TYPE, &done_convert, lo); in Generate_StringConstructor_ConstructStub()
441 __ Bind(&convert); in Generate_StringConstructor_ConstructStub()
444 __ SmiTag(x6); in Generate_StringConstructor_ConstructStub()
445 __ EnterBuiltinFrame(cp, x1, x6); in Generate_StringConstructor_ConstructStub()
446 __ Push(x3); in Generate_StringConstructor_ConstructStub()
447 __ Move(x0, x2); in Generate_StringConstructor_ConstructStub()
448 __ Call(masm->isolate()->builtins()->ToString(), RelocInfo::CODE_TARGET); in Generate_StringConstructor_ConstructStub()
449 __ Move(x2, x0); in Generate_StringConstructor_ConstructStub()
450 __ Pop(x3); in Generate_StringConstructor_ConstructStub()
451 __ LeaveBuiltinFrame(cp, x1, x6); in Generate_StringConstructor_ConstructStub()
452 __ SmiUntag(x6); in Generate_StringConstructor_ConstructStub()
454 __ Bind(&done_convert); in Generate_StringConstructor_ConstructStub()
459 __ Cmp(x1, x3); in Generate_StringConstructor_ConstructStub()
460 __ B(ne, &new_object); in Generate_StringConstructor_ConstructStub()
463 __ AllocateJSValue(x0, x1, x2, x4, x5, &new_object); in Generate_StringConstructor_ConstructStub()
464 __ B(&drop_frame_and_ret); in Generate_StringConstructor_ConstructStub()
467 __ bind(&new_object); in Generate_StringConstructor_ConstructStub()
470 __ SmiTag(x6); in Generate_StringConstructor_ConstructStub()
471 __ EnterBuiltinFrame(cp, x1, x6); in Generate_StringConstructor_ConstructStub()
472 __ Push(x2); // first argument in Generate_StringConstructor_ConstructStub()
473 __ Call(CodeFactory::FastNewObject(masm->isolate()).code(), in Generate_StringConstructor_ConstructStub()
475 __ Pop(x2); in Generate_StringConstructor_ConstructStub()
476 __ LeaveBuiltinFrame(cp, x1, x6); in Generate_StringConstructor_ConstructStub()
477 __ SmiUntag(x6); in Generate_StringConstructor_ConstructStub()
479 __ Str(x2, FieldMemOperand(x0, JSValue::kValueOffset)); in Generate_StringConstructor_ConstructStub()
481 __ bind(&drop_frame_and_ret); in Generate_StringConstructor_ConstructStub()
483 __ Drop(x6); in Generate_StringConstructor_ConstructStub()
484 __ Drop(1); in Generate_StringConstructor_ConstructStub()
485 __ Ret(); in Generate_StringConstructor_ConstructStub()
490 __ Ldr(x2, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset)); in GenerateTailCallToSharedCode()
491 __ Ldr(x2, FieldMemOperand(x2, SharedFunctionInfo::kCodeOffset)); in GenerateTailCallToSharedCode()
492 __ Add(x2, x2, Code::kHeaderSize - kHeapObjectTag); in GenerateTailCallToSharedCode()
493 __ Br(x2); in GenerateTailCallToSharedCode()
507 __ SmiTag(x0); in GenerateTailCallToReturnedCode()
508 __ Push(x0, x1, x3, x1); in GenerateTailCallToReturnedCode()
510 __ CallRuntime(function_id, 1); in GenerateTailCallToReturnedCode()
511 __ Move(x2, x0); in GenerateTailCallToReturnedCode()
514 __ Pop(x3, x1, x0); in GenerateTailCallToReturnedCode()
515 __ SmiUntag(x0); in GenerateTailCallToReturnedCode()
518 __ Add(x2, x2, Code::kHeaderSize - kHeapObjectTag); in GenerateTailCallToReturnedCode()
519 __ Br(x2); in GenerateTailCallToReturnedCode()
529 __ CompareRoot(masm->StackPointer(), Heap::kStackLimitRootIndex); in Generate_InOptimizationQueue()
530 __ B(hs, &ok); in Generate_InOptimizationQueue()
534 __ Bind(&ok); in Generate_InOptimizationQueue()
568 __ SmiTag(argc); in Generate_JSConstructStubHelper()
569 __ Push(cp, argc); in Generate_JSConstructStubHelper()
573 __ Push(constructor, new_target); in Generate_JSConstructStubHelper()
574 __ Call(CodeFactory::FastNewObject(masm->isolate()).code(), in Generate_JSConstructStubHelper()
576 __ Mov(x4, x0); in Generate_JSConstructStubHelper()
577 __ Pop(new_target, constructor); in Generate_JSConstructStubHelper()
588 __ Peek(argc, 0); // Load number of arguments. in Generate_JSConstructStubHelper()
591 __ SmiUntag(argc); in Generate_JSConstructStubHelper()
597 __ Push(x4, x4); in Generate_JSConstructStubHelper()
599 __ PushRoot(Heap::kTheHoleValueRootIndex); in Generate_JSConstructStubHelper()
603 __ Bind(&post_instantiation_deopt_entry); in Generate_JSConstructStubHelper()
606 __ Add(x2, fp, StandardFrameConstants::kCallerSPOffset); in Generate_JSConstructStubHelper()
618 __ Add(x4, x2, Operand(argc, LSL, kPointerSizeLog2)); in Generate_JSConstructStubHelper()
620 __ B(&entry); in Generate_JSConstructStubHelper()
621 __ Bind(&loop); in Generate_JSConstructStubHelper()
622 __ Ldp(x10, x11, MemOperand(x4, -2 * kPointerSize, PreIndex)); in Generate_JSConstructStubHelper()
623 __ Push(x11, x10); in Generate_JSConstructStubHelper()
624 __ Bind(&entry); in Generate_JSConstructStubHelper()
625 __ Cmp(x4, x2); in Generate_JSConstructStubHelper()
626 __ B(gt, &loop); in Generate_JSConstructStubHelper()
629 __ B(eq, &done_copying_arguments); in Generate_JSConstructStubHelper()
630 __ Drop(1); in Generate_JSConstructStubHelper()
631 __ Bind(&done_copying_arguments); in Generate_JSConstructStubHelper()
638 __ InvokeFunction(constructor, new_target, actual, CALL_FUNCTION, in Generate_JSConstructStubHelper()
651 __ Ldr(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset)); in Generate_JSConstructStubHelper()
663 __ JumpIfSmi(x0, &use_receiver); in Generate_JSConstructStubHelper()
667 __ JumpIfObjectType(x0, x1, x3, FIRST_JS_RECEIVER_TYPE, &exit, ge); in Generate_JSConstructStubHelper()
671 __ Bind(&use_receiver); in Generate_JSConstructStubHelper()
672 __ Peek(x0, 0); in Generate_JSConstructStubHelper()
676 __ Bind(&exit); in Generate_JSConstructStubHelper()
680 __ Peek(x1, 1 * kXRegSize); in Generate_JSConstructStubHelper()
682 __ Peek(x1, 0); in Generate_JSConstructStubHelper()
693 __ JumpIfNotSmi(x0, &dont_throw); in Generate_JSConstructStubHelper()
696 __ CallRuntime(Runtime::kThrowDerivedConstructorReturnedNonObject); in Generate_JSConstructStubHelper()
698 __ Bind(&dont_throw); in Generate_JSConstructStubHelper()
701 __ DropBySMI(x1); in Generate_JSConstructStubHelper()
702 __ Drop(1); in Generate_JSConstructStubHelper()
704 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, x1, x2); in Generate_JSConstructStubHelper()
706 __ Ret(); in Generate_JSConstructStubHelper()
720 __ Pop(x1); in Generate_JSConstructStubHelper()
721 __ Push(x0, x0); in Generate_JSConstructStubHelper()
724 __ Ldr(x0, MemOperand(fp, ConstructFrameConstants::kLengthOffset)); in Generate_JSConstructStubHelper()
725 __ SmiUntag(x0); in Generate_JSConstructStubHelper()
729 __ Add(x3, fp, Operand(StandardFrameConstants::kCallerSPOffset)); in Generate_JSConstructStubHelper()
730 __ Ldr(x3, MemOperand(x3, x0, LSL, kPointerSizeLog2)); in Generate_JSConstructStubHelper()
733 __ B(&post_instantiation_deopt_entry); in Generate_JSConstructStubHelper()
758 __ Push(x1); in Generate_ConstructedNonConstructable()
759 __ CallRuntime(Runtime::kThrowConstructedNonConstructable); in Generate_ConstructedNonConstructable()
770 __ AssertGeneratorObject(x1); in Generate_ResumeGeneratorTrampoline()
773 __ Str(x0, FieldMemOperand(x1, JSGeneratorObject::kInputOrDebugPosOffset)); in Generate_ResumeGeneratorTrampoline()
774 __ RecordWriteField(x1, JSGeneratorObject::kInputOrDebugPosOffset, x0, x3, in Generate_ResumeGeneratorTrampoline()
778 __ Str(x2, FieldMemOperand(x1, JSGeneratorObject::kResumeModeOffset)); in Generate_ResumeGeneratorTrampoline()
781 __ Ldr(x4, FieldMemOperand(x1, JSGeneratorObject::kFunctionOffset)); in Generate_ResumeGeneratorTrampoline()
782 __ Ldr(cp, FieldMemOperand(x4, JSFunction::kContextOffset)); in Generate_ResumeGeneratorTrampoline()
789 __ Mov(x10, Operand(debug_hook)); in Generate_ResumeGeneratorTrampoline()
790 __ Ldrsb(x10, MemOperand(x10)); in Generate_ResumeGeneratorTrampoline()
791 __ CompareAndBranch(x10, Operand(0), ne, &prepare_step_in_if_stepping); in Generate_ResumeGeneratorTrampoline()
796 __ Mov(x10, Operand(debug_suspended_generator)); in Generate_ResumeGeneratorTrampoline()
797 __ Ldr(x10, MemOperand(x10)); in Generate_ResumeGeneratorTrampoline()
798 __ CompareAndBranch(x10, Operand(x1), eq, in Generate_ResumeGeneratorTrampoline()
800 __ Bind(&stepping_prepared); in Generate_ResumeGeneratorTrampoline()
803 __ Ldr(x5, FieldMemOperand(x1, JSGeneratorObject::kReceiverOffset)); in Generate_ResumeGeneratorTrampoline()
804 __ Push(x5); in Generate_ResumeGeneratorTrampoline()
819 __ Ldr(x10, FieldMemOperand(x4, JSFunction::kSharedFunctionInfoOffset)); in Generate_ResumeGeneratorTrampoline()
820 __ Ldr(w10, in Generate_ResumeGeneratorTrampoline()
822 __ LoadRoot(x11, Heap::kTheHoleValueRootIndex); in Generate_ResumeGeneratorTrampoline()
823 __ PushMultipleTimes(x11, w10); in Generate_ResumeGeneratorTrampoline()
827 __ Ldr(x3, FieldMemOperand(x4, JSFunction::kSharedFunctionInfoOffset)); in Generate_ResumeGeneratorTrampoline()
828 __ Ldr(x3, FieldMemOperand(x3, SharedFunctionInfo::kFunctionDataOffset)); in Generate_ResumeGeneratorTrampoline()
829 __ CompareObjectType(x3, x3, x3, BYTECODE_ARRAY_TYPE); in Generate_ResumeGeneratorTrampoline()
830 __ Assert(eq, kMissingBytecodeArray); in Generate_ResumeGeneratorTrampoline()
835 __ Ldr(x0, FieldMemOperand(x4, JSFunction::kSharedFunctionInfoOffset)); in Generate_ResumeGeneratorTrampoline()
836 __ Ldr(w0, FieldMemOperand( in Generate_ResumeGeneratorTrampoline()
841 __ Move(x3, x1); in Generate_ResumeGeneratorTrampoline()
842 __ Move(x1, x4); in Generate_ResumeGeneratorTrampoline()
843 __ Ldr(x5, FieldMemOperand(x1, JSFunction::kCodeEntryOffset)); in Generate_ResumeGeneratorTrampoline()
844 __ Jump(x5); in Generate_ResumeGeneratorTrampoline()
847 __ Bind(&prepare_step_in_if_stepping); in Generate_ResumeGeneratorTrampoline()
850 __ Push(x1, x2, x4); in Generate_ResumeGeneratorTrampoline()
851 __ CallRuntime(Runtime::kDebugOnFunctionCall); in Generate_ResumeGeneratorTrampoline()
852 __ Pop(x2, x1); in Generate_ResumeGeneratorTrampoline()
853 __ Ldr(x4, FieldMemOperand(x1, JSGeneratorObject::kFunctionOffset)); in Generate_ResumeGeneratorTrampoline()
855 __ B(&stepping_prepared); in Generate_ResumeGeneratorTrampoline()
857 __ Bind(&prepare_step_in_suspended_generator); in Generate_ResumeGeneratorTrampoline()
860 __ Push(x1, x2); in Generate_ResumeGeneratorTrampoline()
861 __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator); in Generate_ResumeGeneratorTrampoline()
862 __ Pop(x2, x1); in Generate_ResumeGeneratorTrampoline()
863 __ Ldr(x4, FieldMemOperand(x1, JSGeneratorObject::kFunctionOffset)); in Generate_ResumeGeneratorTrampoline()
865 __ B(&stepping_prepared); in Generate_ResumeGeneratorTrampoline()
877 __ LoadRoot(x10, Heap::kRealStackLimitRootIndex); in Generate_CheckStackOverflow()
881 __ Sub(x10, jssp, x10); in Generate_CheckStackOverflow()
884 __ Cmp(x10, Operand::UntagSmiAndScale(argc, kPointerSizeLog2)); in Generate_CheckStackOverflow()
887 __ Cmp(x10, Operand(argc, LSL, kPointerSizeLog2)); in Generate_CheckStackOverflow()
889 __ B(gt, &enough_stack_space); in Generate_CheckStackOverflow()
890 __ CallRuntime(Runtime::kThrowStackOverflow); in Generate_CheckStackOverflow()
892 if (__ emit_debug_code()) { in Generate_CheckStackOverflow()
893 __ Unreachable(); in Generate_CheckStackOverflow()
896 __ Bind(&enough_stack_space); in Generate_CheckStackOverflow()
924 __ Mov(scratch, Operand(ExternalReference(Isolate::kContextAddress, in Generate_JSEntryTrampolineHelper()
926 __ Ldr(cp, MemOperand(scratch)); in Generate_JSEntryTrampolineHelper()
928 __ InitializeRootRegister(); in Generate_JSEntryTrampolineHelper()
931 __ Push(function, receiver); in Generate_JSEntryTrampolineHelper()
942 __ Add(scratch, argv, Operand(argc, LSL, kPointerSizeLog2)); in Generate_JSEntryTrampolineHelper()
944 __ B(&entry); in Generate_JSEntryTrampolineHelper()
945 __ Bind(&loop); in Generate_JSEntryTrampolineHelper()
946 __ Ldr(x11, MemOperand(argv, kPointerSize, PostIndex)); in Generate_JSEntryTrampolineHelper()
947 __ Ldr(x12, MemOperand(x11)); // Dereference the handle. in Generate_JSEntryTrampolineHelper()
948 __ Push(x12); // Push the argument. in Generate_JSEntryTrampolineHelper()
949 __ Bind(&entry); in Generate_JSEntryTrampolineHelper()
950 __ Cmp(scratch, argv); in Generate_JSEntryTrampolineHelper()
951 __ B(ne, &loop); in Generate_JSEntryTrampolineHelper()
953 __ Mov(scratch, argc); in Generate_JSEntryTrampolineHelper()
954 __ Mov(argc, new_target); in Generate_JSEntryTrampolineHelper()
955 __ Mov(new_target, scratch); in Generate_JSEntryTrampolineHelper()
962 __ LoadRoot(x19, Heap::kUndefinedValueRootIndex); in Generate_JSEntryTrampolineHelper()
963 __ Mov(x20, x19); in Generate_JSEntryTrampolineHelper()
964 __ Mov(x21, x19); in Generate_JSEntryTrampolineHelper()
965 __ Mov(x22, x19); in Generate_JSEntryTrampolineHelper()
966 __ Mov(x23, x19); in Generate_JSEntryTrampolineHelper()
967 __ Mov(x24, x19); in Generate_JSEntryTrampolineHelper()
968 __ Mov(x25, x19); in Generate_JSEntryTrampolineHelper()
978 __ Call(builtin, RelocInfo::CODE_TARGET); in Generate_JSEntryTrampolineHelper()
985 __ Ret(); in Generate_JSEntryTrampolineHelper()
1000 __ ldr(args_count, in LeaveInterpreterFrame()
1002 __ Ldr(args_count.W(), in LeaveInterpreterFrame()
1006 __ LeaveFrame(StackFrame::JAVA_SCRIPT); in LeaveInterpreterFrame()
1009 __ Drop(args_count, 1); in LeaveInterpreterFrame()
1034 __ Push(lr, fp, cp, x1); in Generate_InterpreterEntryTrampoline()
1035 __ Add(fp, jssp, StandardFrameConstants::kFixedFrameSizeFromFp); in Generate_InterpreterEntryTrampoline()
1039 __ Ldr(x0, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset)); in Generate_InterpreterEntryTrampoline()
1043 __ Ldr(debug_info, FieldMemOperand(x0, SharedFunctionInfo::kDebugInfoOffset)); in Generate_InterpreterEntryTrampoline()
1044 __ JumpIfNotSmi(debug_info, &load_debug_bytecode_array); in Generate_InterpreterEntryTrampoline()
1045 __ Ldr(kInterpreterBytecodeArrayRegister, in Generate_InterpreterEntryTrampoline()
1047 __ Bind(&bytecode_array_loaded); in Generate_InterpreterEntryTrampoline()
1051 __ Ldr(x0, FieldMemOperand(x0, SharedFunctionInfo::kCodeOffset)); in Generate_InterpreterEntryTrampoline()
1052 __ Cmp(x0, Operand(masm->CodeObject())); // Self-reference to this code. in Generate_InterpreterEntryTrampoline()
1053 __ B(ne, &switch_to_different_code_kind); in Generate_InterpreterEntryTrampoline()
1056 __ Ldr(x11, FieldMemOperand(x1, JSFunction::kFeedbackVectorOffset)); in Generate_InterpreterEntryTrampoline()
1057 __ Ldr(x11, FieldMemOperand(x11, Cell::kValueOffset)); in Generate_InterpreterEntryTrampoline()
1058 __ Ldr(x10, FieldMemOperand( in Generate_InterpreterEntryTrampoline()
1061 __ Add(x10, x10, Operand(Smi::FromInt(1))); in Generate_InterpreterEntryTrampoline()
1062 __ Str(x10, FieldMemOperand( in Generate_InterpreterEntryTrampoline()
1068 __ AssertNotSmi(kInterpreterBytecodeArrayRegister, in Generate_InterpreterEntryTrampoline()
1070 __ CompareObjectType(kInterpreterBytecodeArrayRegister, x0, x0, in Generate_InterpreterEntryTrampoline()
1072 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry); in Generate_InterpreterEntryTrampoline()
1076 __ Mov(x10, Operand(BytecodeArray::kNoAgeBytecodeAge)); in Generate_InterpreterEntryTrampoline()
1077 __ Strb(x10, FieldMemOperand(kInterpreterBytecodeArrayRegister, in Generate_InterpreterEntryTrampoline()
1081 __ Mov(kInterpreterBytecodeOffsetRegister, in Generate_InterpreterEntryTrampoline()
1085 __ SmiTag(x0, kInterpreterBytecodeOffsetRegister); in Generate_InterpreterEntryTrampoline()
1086 __ Push(x3, kInterpreterBytecodeArrayRegister, x0); in Generate_InterpreterEntryTrampoline()
1091 __ Ldr(w11, FieldMemOperand(kInterpreterBytecodeArrayRegister, in Generate_InterpreterEntryTrampoline()
1096 DCHECK(jssp.Is(__ StackPointer())); in Generate_InterpreterEntryTrampoline()
1097 __ Sub(x10, jssp, Operand(x11)); in Generate_InterpreterEntryTrampoline()
1098 __ CompareRoot(x10, Heap::kRealStackLimitRootIndex); in Generate_InterpreterEntryTrampoline()
1099 __ B(hs, &ok); in Generate_InterpreterEntryTrampoline()
1100 __ CallRuntime(Runtime::kThrowStackOverflow); in Generate_InterpreterEntryTrampoline()
1101 __ Bind(&ok); in Generate_InterpreterEntryTrampoline()
1107 __ LoadRoot(x10, Heap::kUndefinedValueRootIndex); in Generate_InterpreterEntryTrampoline()
1110 __ Lsr(x11, x11, kPointerSizeLog2); in Generate_InterpreterEntryTrampoline()
1111 __ PushMultipleTimes(x10, x11); in Generate_InterpreterEntryTrampoline()
1112 __ Bind(&loop_header); in Generate_InterpreterEntryTrampoline()
1116 __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex); in Generate_InterpreterEntryTrampoline()
1117 __ Mov(kInterpreterDispatchTableRegister, in Generate_InterpreterEntryTrampoline()
1122 __ Ldrb(x1, MemOperand(kInterpreterBytecodeArrayRegister, in Generate_InterpreterEntryTrampoline()
1124 __ Mov(x1, Operand(x1, LSL, kPointerSizeLog2)); in Generate_InterpreterEntryTrampoline()
1125 __ Ldr(ip0, MemOperand(kInterpreterDispatchTableRegister, x1)); in Generate_InterpreterEntryTrampoline()
1126 __ Call(ip0); in Generate_InterpreterEntryTrampoline()
1131 __ Ret(); in Generate_InterpreterEntryTrampoline()
1134 __ Bind(&load_debug_bytecode_array); in Generate_InterpreterEntryTrampoline()
1135 __ Ldr(kInterpreterBytecodeArrayRegister, in Generate_InterpreterEntryTrampoline()
1137 __ B(&bytecode_array_loaded); in Generate_InterpreterEntryTrampoline()
1142 __ bind(&switch_to_different_code_kind); in Generate_InterpreterEntryTrampoline()
1143 __ LeaveFrame(StackFrame::JAVA_SCRIPT); in Generate_InterpreterEntryTrampoline()
1144 __ Ldr(x7, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset)); in Generate_InterpreterEntryTrampoline()
1145 __ Ldr(x7, FieldMemOperand(x7, SharedFunctionInfo::kCodeOffset)); in Generate_InterpreterEntryTrampoline()
1146 __ Add(x7, x7, Operand(Code::kHeaderSize - kHeapObjectTag)); in Generate_InterpreterEntryTrampoline()
1147 __ Str(x7, FieldMemOperand(x1, JSFunction::kCodeEntryOffset)); in Generate_InterpreterEntryTrampoline()
1148 __ RecordWriteCodeEntryField(x1, x7, x5); in Generate_InterpreterEntryTrampoline()
1149 __ Jump(x7); in Generate_InterpreterEntryTrampoline()
1159 __ LoadRoot(scratch, Heap::kRealStackLimitRootIndex); in Generate_StackOverflowCheck()
1162 __ Sub(scratch, jssp, scratch); in Generate_StackOverflowCheck()
1164 __ Cmp(scratch, Operand(num_args, LSL, kPointerSizeLog2)); in Generate_StackOverflowCheck()
1165 __ B(le, stack_overflow); in Generate_StackOverflowCheck()
1176 __ Mov(scratch, num_args); in Generate_InterpreterPushArgs()
1177 __ lsl(scratch, scratch, kPointerSizeLog2); in Generate_InterpreterPushArgs()
1178 __ sub(last_arg, index, scratch); in Generate_InterpreterPushArgs()
1181 __ Mov(stack_addr, jssp); in Generate_InterpreterPushArgs()
1182 __ Claim(scratch, 1); in Generate_InterpreterPushArgs()
1186 __ B(&loop_check); in Generate_InterpreterPushArgs()
1187 __ Bind(&loop_header); in Generate_InterpreterPushArgs()
1189 __ Ldr(scratch, MemOperand(index, -kPointerSize, PostIndex)); in Generate_InterpreterPushArgs()
1190 __ Str(scratch, MemOperand(stack_addr, -kPointerSize, PreIndex)); in Generate_InterpreterPushArgs()
1191 __ Bind(&loop_check); in Generate_InterpreterPushArgs()
1192 __ Cmp(index, last_arg); in Generate_InterpreterPushArgs()
1193 __ B(gt, &loop_header); in Generate_InterpreterPushArgs()
1210 __ add(x3, x0, Operand(1)); in Generate_InterpreterPushArgsAndCallImpl()
1217 __ Jump(masm->isolate()->builtins()->CallFunction(ConvertReceiverMode::kAny, in Generate_InterpreterPushArgsAndCallImpl()
1221 __ Jump(masm->isolate()->builtins()->CallWithSpread(), in Generate_InterpreterPushArgsAndCallImpl()
1224 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny, in Generate_InterpreterPushArgsAndCallImpl()
1229 __ bind(&stack_overflow); in Generate_InterpreterPushArgsAndCallImpl()
1231 __ TailCallRuntime(Runtime::kThrowStackOverflow); in Generate_InterpreterPushArgsAndCallImpl()
1232 __ Unreachable(); in Generate_InterpreterPushArgsAndCallImpl()
1249 __ Push(xzr); in Generate_InterpreterPushArgsAndConstructImpl()
1254 __ AssertUndefinedOrAllocationSite(x2, x6); in Generate_InterpreterPushArgsAndConstructImpl()
1256 __ AssertFunction(x1); in Generate_InterpreterPushArgsAndConstructImpl()
1260 __ Ldr(x4, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset)); in Generate_InterpreterPushArgsAndConstructImpl()
1261 __ Ldr(x4, FieldMemOperand(x4, SharedFunctionInfo::kConstructStubOffset)); in Generate_InterpreterPushArgsAndConstructImpl()
1262 __ Add(x4, x4, Code::kHeaderSize - kHeapObjectTag); in Generate_InterpreterPushArgsAndConstructImpl()
1263 __ Br(x4); in Generate_InterpreterPushArgsAndConstructImpl()
1266 __ Jump(masm->isolate()->builtins()->ConstructWithSpread(), in Generate_InterpreterPushArgsAndConstructImpl()
1271 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); in Generate_InterpreterPushArgsAndConstructImpl()
1274 __ bind(&stack_overflow); in Generate_InterpreterPushArgsAndConstructImpl()
1276 __ TailCallRuntime(Runtime::kThrowStackOverflow); in Generate_InterpreterPushArgsAndConstructImpl()
1277 __ Unreachable(); in Generate_InterpreterPushArgsAndConstructImpl()
1292 __ add(x4, x0, Operand(1)); // Add one for the receiver. in Generate_InterpreterPushArgsAndConstructArray()
1298 __ mov(x3, x1); in Generate_InterpreterPushArgsAndConstructArray()
1301 __ TailCallStub(&stub); in Generate_InterpreterPushArgsAndConstructArray()
1303 __ bind(&stack_overflow); in Generate_InterpreterPushArgsAndConstructArray()
1305 __ TailCallRuntime(Runtime::kThrowStackOverflow); in Generate_InterpreterPushArgsAndConstructArray()
1306 __ Unreachable(); in Generate_InterpreterPushArgsAndConstructArray()
1316 __ LoadObject(x1, masm->isolate()->builtins()->InterpreterEntryTrampoline()); in Generate_InterpreterEnterBytecode()
1317 __ Add(lr, x1, Operand(interpreter_entry_return_pc_offset->value() + in Generate_InterpreterEnterBytecode()
1321 __ Mov(kInterpreterDispatchTableRegister, in Generate_InterpreterEnterBytecode()
1326 __ Ldr(kInterpreterBytecodeArrayRegister, in Generate_InterpreterEnterBytecode()
1331 __ AssertNotSmi(kInterpreterBytecodeArrayRegister, in Generate_InterpreterEnterBytecode()
1333 __ CompareObjectType(kInterpreterBytecodeArrayRegister, x1, x1, in Generate_InterpreterEnterBytecode()
1335 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry); in Generate_InterpreterEnterBytecode()
1339 __ Ldr(kInterpreterBytecodeOffsetRegister, in Generate_InterpreterEnterBytecode()
1341 __ SmiUntag(kInterpreterBytecodeOffsetRegister); in Generate_InterpreterEnterBytecode()
1344 __ Ldrb(x1, MemOperand(kInterpreterBytecodeArrayRegister, in Generate_InterpreterEnterBytecode()
1346 __ Mov(x1, Operand(x1, LSL, kPointerSizeLog2)); in Generate_InterpreterEnterBytecode()
1347 __ Ldr(ip0, MemOperand(kInterpreterDispatchTableRegister, x1)); in Generate_InterpreterEnterBytecode()
1348 __ Jump(ip0); in Generate_InterpreterEnterBytecode()
1355 __ Ldr(x1, MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp)); in Generate_InterpreterEnterBytecodeAdvance()
1356 __ Ldr(x2, MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp)); in Generate_InterpreterEnterBytecodeAdvance()
1357 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); in Generate_InterpreterEnterBytecodeAdvance()
1360 __ Push(kInterpreterAccumulatorRegister, x1, x2); in Generate_InterpreterEnterBytecodeAdvance()
1361 __ CallRuntime(Runtime::kInterpreterAdvanceBytecodeOffset); in Generate_InterpreterEnterBytecodeAdvance()
1362 __ Mov(x2, x0); // Result is the new bytecode offset. in Generate_InterpreterEnterBytecodeAdvance()
1363 __ Pop(kInterpreterAccumulatorRegister); in Generate_InterpreterEnterBytecodeAdvance()
1365 __ Str(x2, MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp)); in Generate_InterpreterEnterBytecodeAdvance()
1390 __ Ldr(index, FieldMemOperand(closure, JSFunction::kFeedbackVectorOffset)); in Generate_CompileLazy()
1391 __ Ldr(index, FieldMemOperand(index, Cell::kValueOffset)); in Generate_CompileLazy()
1392 __ JumpIfRoot(index, Heap::kUndefinedValueRootIndex, &gotta_call_runtime); in Generate_CompileLazy()
1394 __ Ldr(map, FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset)); in Generate_CompileLazy()
1395 __ Ldr(map, in Generate_CompileLazy()
1397 __ Ldrsw(index, UntagSmiFieldMemOperand(map, FixedArray::kLengthOffset)); in Generate_CompileLazy()
1398 __ Cmp(index, Operand(2)); in Generate_CompileLazy()
1399 __ B(lt, &try_shared); in Generate_CompileLazy()
1407 __ Ldr(native_context, NativeContextMemOperand()); in Generate_CompileLazy()
1409 __ Bind(&loop_top); in Generate_CompileLazy()
1414 __ Add(array_pointer, map, Operand(index, LSL, kPointerSizeLog2)); in Generate_CompileLazy()
1415 __ Ldr(temp, FieldMemOperand(array_pointer, in Generate_CompileLazy()
1417 __ Ldr(temp, FieldMemOperand(temp, WeakCell::kValueOffset)); in Generate_CompileLazy()
1418 __ Cmp(temp, native_context); in Generate_CompileLazy()
1419 __ B(ne, &loop_bottom); in Generate_CompileLazy()
1423 __ Ldr(entry, in Generate_CompileLazy()
1426 __ Ldr(entry, FieldMemOperand(entry, WeakCell::kValueOffset)); in Generate_CompileLazy()
1427 __ JumpIfSmi(entry, &try_shared); in Generate_CompileLazy()
1430 __ Add(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag)); in Generate_CompileLazy()
1431 __ Str(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset)); in Generate_CompileLazy()
1432 __ RecordWriteCodeEntryField(closure, entry, x5); in Generate_CompileLazy()
1438 __ Ldr(x8, in Generate_CompileLazy()
1440 __ Str(x8, FieldMemOperand(closure, JSFunction::kNextFunctionLinkOffset)); in Generate_CompileLazy()
1441 __ RecordWriteField(closure, JSFunction::kNextFunctionLinkOffset, x8, x13, in Generate_CompileLazy()
1446 __ Str(closure, in Generate_CompileLazy()
1448 __ Mov(x5, closure); in Generate_CompileLazy()
1449 __ RecordWriteContextSlot(native_context, function_list_offset, x5, x13, in Generate_CompileLazy()
1451 __ Jump(entry); in Generate_CompileLazy()
1453 __ Bind(&loop_bottom); in Generate_CompileLazy()
1454 __ Sub(index, index, Operand(SharedFunctionInfo::kEntryLength)); in Generate_CompileLazy()
1455 __ Cmp(index, Operand(1)); in Generate_CompileLazy()
1456 __ B(gt, &loop_top); in Generate_CompileLazy()
1459 __ Bind(&try_shared); in Generate_CompileLazy()
1460 __ Ldr(entry, in Generate_CompileLazy()
1463 __ Ldrb(temp, FieldMemOperand( in Generate_CompileLazy()
1465 __ TestAndBranchIfAnySet( in Generate_CompileLazy()
1470 __ Ldr(entry, FieldMemOperand(entry, SharedFunctionInfo::kCodeOffset)); in Generate_CompileLazy()
1471 __ Move(temp, masm->CodeObject()); in Generate_CompileLazy()
1472 __ Cmp(entry, temp); in Generate_CompileLazy()
1473 __ B(eq, &gotta_call_runtime); in Generate_CompileLazy()
1476 __ Add(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag)); in Generate_CompileLazy()
1477 __ Str(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset)); in Generate_CompileLazy()
1478 __ RecordWriteCodeEntryField(closure, entry, x5); in Generate_CompileLazy()
1479 __ Jump(entry); in Generate_CompileLazy()
1481 __ Bind(&gotta_call_runtime); in Generate_CompileLazy()
1508 __ Move(x4, x0); in Generate_InstantiateAsmJs()
1510 __ SmiTag(x0); in Generate_InstantiateAsmJs()
1512 __ Push(x0, x1, x3, x1); in Generate_InstantiateAsmJs()
1519 __ cmp(x4, Operand(j)); in Generate_InstantiateAsmJs()
1520 __ B(ne, &over); in Generate_InstantiateAsmJs()
1523 __ ldr(x4, MemOperand(fp, StandardFrameConstants::kCallerSPOffset + in Generate_InstantiateAsmJs()
1525 __ push(x4); in Generate_InstantiateAsmJs()
1528 __ PushRoot(Heap::kUndefinedValueRootIndex); in Generate_InstantiateAsmJs()
1531 __ jmp(&args_done); in Generate_InstantiateAsmJs()
1532 __ bind(&over); in Generate_InstantiateAsmJs()
1535 __ bind(&args_done); in Generate_InstantiateAsmJs()
1538 __ CallRuntime(Runtime::kInstantiateAsmJs, 4); in Generate_InstantiateAsmJs()
1540 __ JumpIfSmi(x0, &failed); in Generate_InstantiateAsmJs()
1542 __ Drop(2); in Generate_InstantiateAsmJs()
1543 __ pop(x4); in Generate_InstantiateAsmJs()
1544 __ SmiUntag(x4); in Generate_InstantiateAsmJs()
1547 __ add(x4, x4, Operand(1)); in Generate_InstantiateAsmJs()
1548 __ Drop(x4); in Generate_InstantiateAsmJs()
1549 __ Ret(); in Generate_InstantiateAsmJs()
1551 __ bind(&failed); in Generate_InstantiateAsmJs()
1553 __ Pop(x3, x1, x0); in Generate_InstantiateAsmJs()
1554 __ SmiUntag(x0); in Generate_InstantiateAsmJs()
1577 __ Push(x0, x1, x3, fp, lr); in GenerateMakeCodeYoungAgainCommon()
1578 __ Mov(x1, ExternalReference::isolate_address(masm->isolate())); in GenerateMakeCodeYoungAgainCommon()
1579 __ CallCFunction( in GenerateMakeCodeYoungAgainCommon()
1581 __ Pop(lr, fp, x3, x1, x0); in GenerateMakeCodeYoungAgainCommon()
1586 __ Br(x0); in GenerateMakeCodeYoungAgainCommon()
1612 __ Push(x0, x1, x3, fp, lr); in CODE_AGE_LIST()
1613 __ Mov(x1, ExternalReference::isolate_address(masm->isolate())); in CODE_AGE_LIST()
1614 __ CallCFunction( in CODE_AGE_LIST()
1617 __ Pop(lr, fp, x3, x1, x0); in CODE_AGE_LIST()
1620 __ EmitFrameSetupForCodeAgePatching(masm); in CODE_AGE_LIST()
1624 __ Add(x0, x0, kNoCodeAgeSequenceLength); in CODE_AGE_LIST()
1625 __ Br(x0); in CODE_AGE_LIST()
1647 __ PushXRegList(kSafepointSavedRegisters); in Generate_NotifyStubFailureHelper()
1649 __ CallRuntime(Runtime::kNotifyStubFailure, save_doubles); in Generate_NotifyStubFailureHelper()
1650 __ PopXRegList(kSafepointSavedRegisters); in Generate_NotifyStubFailureHelper()
1654 __ Drop(1); in Generate_NotifyStubFailureHelper()
1658 __ Br(lr); in Generate_NotifyStubFailureHelper()
1674 __ Mov(x0, Smi::FromInt(static_cast<int>(type))); in Generate_NotifyDeoptimizedHelper()
1675 __ Push(x0); in Generate_NotifyDeoptimizedHelper()
1676 __ CallRuntime(Runtime::kNotifyDeoptimized); in Generate_NotifyDeoptimizedHelper()
1681 __ Peek(state, 0); in Generate_NotifyDeoptimizedHelper()
1682 __ SmiUntag(state); in Generate_NotifyDeoptimizedHelper()
1686 __ CompareAndBranch(state, in Generate_NotifyDeoptimizedHelper()
1689 __ Drop(1); // Remove state. in Generate_NotifyDeoptimizedHelper()
1690 __ Ret(); in Generate_NotifyDeoptimizedHelper()
1692 __ Bind(&with_tos_register); in Generate_NotifyDeoptimizedHelper()
1695 __ Peek(x0, kPointerSize); in Generate_NotifyDeoptimizedHelper()
1696 __ CompareAndBranch(state, in Generate_NotifyDeoptimizedHelper()
1699 __ Drop(2); // Remove state and TOS. in Generate_NotifyDeoptimizedHelper()
1700 __ Ret(); in Generate_NotifyDeoptimizedHelper()
1702 __ Bind(&unknown_state); in Generate_NotifyDeoptimizedHelper()
1703 __ Abort(kInvalidFullCodegenState); in Generate_NotifyDeoptimizedHelper()
1728 __ Ldr(signature, FieldMemOperand(function_template_info, in CompatibleReceiverCheck()
1730 __ CompareRoot(signature, Heap::kUndefinedValueRootIndex); in CompatibleReceiverCheck()
1732 __ B(eq, &receiver_check_passed); in CompatibleReceiverCheck()
1735 __ Ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset)); in CompatibleReceiverCheck()
1737 __ Bind(&prototype_loop_start); in CompatibleReceiverCheck()
1740 __ GetMapConstructor(constructor, map, x16, x16); in CompatibleReceiverCheck()
1741 __ cmp(x16, Operand(JS_FUNCTION_TYPE)); in CompatibleReceiverCheck()
1743 __ B(ne, &next_prototype); in CompatibleReceiverCheck()
1745 __ Ldr(type, in CompatibleReceiverCheck()
1747 __ Ldr(type, FieldMemOperand(type, SharedFunctionInfo::kFunctionDataOffset)); in CompatibleReceiverCheck()
1751 __ Bind(&function_template_loop); in CompatibleReceiverCheck()
1754 __ Cmp(signature, type); in CompatibleReceiverCheck()
1755 __ B(eq, &receiver_check_passed); in CompatibleReceiverCheck()
1759 __ JumpIfSmi(type, &next_prototype); in CompatibleReceiverCheck()
1760 __ CompareObjectType(type, x16, x17, FUNCTION_TEMPLATE_INFO_TYPE); in CompatibleReceiverCheck()
1761 __ B(ne, &next_prototype); in CompatibleReceiverCheck()
1764 __ Ldr(type, in CompatibleReceiverCheck()
1766 __ B(&function_template_loop); in CompatibleReceiverCheck()
1769 __ Bind(&next_prototype); in CompatibleReceiverCheck()
1770 __ Ldr(x16, FieldMemOperand(map, Map::kBitField3Offset)); in CompatibleReceiverCheck()
1771 __ Tst(x16, Operand(Map::HasHiddenPrototype::kMask)); in CompatibleReceiverCheck()
1772 __ B(eq, receiver_check_failed); in CompatibleReceiverCheck()
1773 __ Ldr(receiver, FieldMemOperand(map, Map::kPrototypeOffset)); in CompatibleReceiverCheck()
1774 __ Ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset)); in CompatibleReceiverCheck()
1776 __ B(&prototype_loop_start); in CompatibleReceiverCheck()
1778 __ Bind(&receiver_check_passed); in CompatibleReceiverCheck()
1793 __ Ldr(x3, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset)); in Generate_HandleFastApiCall()
1794 __ Ldr(x3, FieldMemOperand(x3, SharedFunctionInfo::kFunctionDataOffset)); in Generate_HandleFastApiCall()
1798 __ Ldr(x2, MemOperand(jssp, x0, LSL, kPointerSizeLog2)); in Generate_HandleFastApiCall()
1803 __ Ldr(x4, FieldMemOperand(x3, FunctionTemplateInfo::kCallCodeOffset)); in Generate_HandleFastApiCall()
1804 __ Ldr(x4, FieldMemOperand(x4, CallHandlerInfo::kFastHandlerOffset)); in Generate_HandleFastApiCall()
1805 __ Add(x4, x4, Operand(Code::kHeaderSize - kHeapObjectTag)); in Generate_HandleFastApiCall()
1806 __ Jump(x4); in Generate_HandleFastApiCall()
1809 __ Bind(&receiver_check_failed); in Generate_HandleFastApiCall()
1811 __ add(x0, x0, Operand(1)); in Generate_HandleFastApiCall()
1812 __ Drop(x0); in Generate_HandleFastApiCall()
1813 __ TailCallRuntime(Runtime::kThrowIllegalInvocation); in Generate_HandleFastApiCall()
1820 __ Ldr(x0, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); in Generate_OnStackReplacementHelper()
1821 __ Ldr(x0, MemOperand(x0, JavaScriptFrameConstants::kFunctionOffset)); in Generate_OnStackReplacementHelper()
1823 __ Ldr(x0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); in Generate_OnStackReplacementHelper()
1829 __ Push(x0); in Generate_OnStackReplacementHelper()
1830 __ CallRuntime(Runtime::kCompileForOnStackReplacement); in Generate_OnStackReplacementHelper()
1835 __ CompareAndBranch(x0, Smi::kZero, ne, &skip); in Generate_OnStackReplacementHelper()
1836 __ Ret(); in Generate_OnStackReplacementHelper()
1838 __ Bind(&skip); in Generate_OnStackReplacementHelper()
1843 __ LeaveFrame(StackFrame::STUB); in Generate_OnStackReplacementHelper()
1848 __ Ldr(x1, MemOperand(x0, Code::kDeoptimizationDataOffset - kHeapObjectTag)); in Generate_OnStackReplacementHelper()
1852 __ Ldrsw(w1, UntagSmiFieldMemOperand( in Generate_OnStackReplacementHelper()
1858 __ Add(x0, x0, x1); in Generate_OnStackReplacementHelper()
1859 __ Add(lr, x0, Code::kHeaderSize - kHeapObjectTag); in Generate_OnStackReplacementHelper()
1862 __ Ret(); in Generate_OnStackReplacementHelper()
1890 __ LoadRoot(undefined_value, Heap::kUndefinedValueRootIndex); in Generate_FunctionPrototypeApply()
1891 __ LoadRoot(null_value, Heap::kNullValueRootIndex); in Generate_FunctionPrototypeApply()
1899 __ Claim(2); in Generate_FunctionPrototypeApply()
1900 __ Drop(argc); in Generate_FunctionPrototypeApply()
1908 __ Cmp(argc, 1); in Generate_FunctionPrototypeApply()
1909 __ Pop(arg_array, this_arg); // Overwrites argc. in Generate_FunctionPrototypeApply()
1910 __ CmovX(this_arg, undefined_value, lo); // undefined if argc == 0. in Generate_FunctionPrototypeApply()
1911 __ CmovX(arg_array, undefined_value, ls); // undefined if argc <= 1. in Generate_FunctionPrototypeApply()
1913 __ Peek(receiver, 0); in Generate_FunctionPrototypeApply()
1914 __ Poke(this_arg, 0); in Generate_FunctionPrototypeApply()
1926 __ JumpIfSmi(receiver, &receiver_not_callable); in Generate_FunctionPrototypeApply()
1927 __ Ldr(x10, FieldMemOperand(receiver, HeapObject::kMapOffset)); in Generate_FunctionPrototypeApply()
1928 __ Ldrb(w10, FieldMemOperand(x10, Map::kBitFieldOffset)); in Generate_FunctionPrototypeApply()
1929 __ TestAndBranchIfAllClear(x10, 1 << Map::kIsCallable, in Generate_FunctionPrototypeApply()
1934 __ Cmp(arg_array, null_value); in Generate_FunctionPrototypeApply()
1935 __ Ccmp(arg_array, undefined_value, ZFlag, ne); in Generate_FunctionPrototypeApply()
1936 __ B(eq, &no_arguments); in Generate_FunctionPrototypeApply()
1941 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET); in Generate_FunctionPrototypeApply()
1945 __ Bind(&no_arguments); in Generate_FunctionPrototypeApply()
1947 __ Mov(x0, 0); in Generate_FunctionPrototypeApply()
1949 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); in Generate_FunctionPrototypeApply()
1953 __ Bind(&receiver_not_callable); in Generate_FunctionPrototypeApply()
1955 __ Poke(receiver, 0); in Generate_FunctionPrototypeApply()
1956 __ TailCallRuntime(Runtime::kThrowApplyNonFunction); in Generate_FunctionPrototypeApply()
1972 __ Cbnz(argc, &done); in Generate_FunctionPrototypeCall()
1973 __ LoadRoot(scratch1, Heap::kUndefinedValueRootIndex); in Generate_FunctionPrototypeCall()
1974 __ Push(scratch1); in Generate_FunctionPrototypeCall()
1975 __ Mov(argc, 1); in Generate_FunctionPrototypeCall()
1976 __ Bind(&done); in Generate_FunctionPrototypeCall()
1980 __ Peek(function, Operand(argc, LSL, kXRegSizeLog2)); in Generate_FunctionPrototypeCall()
1988 __ Add(scratch2, jssp, Operand(argc, LSL, kPointerSizeLog2)); in Generate_FunctionPrototypeCall()
1989 __ Sub(scratch1, scratch2, kPointerSize); in Generate_FunctionPrototypeCall()
1991 __ Bind(&loop); in Generate_FunctionPrototypeCall()
1992 __ Ldr(x12, MemOperand(scratch1, -kPointerSize, PostIndex)); in Generate_FunctionPrototypeCall()
1993 __ Str(x12, MemOperand(scratch2, -kPointerSize, PostIndex)); in Generate_FunctionPrototypeCall()
1994 __ Cmp(scratch1, jssp); in Generate_FunctionPrototypeCall()
1995 __ B(ge, &loop); in Generate_FunctionPrototypeCall()
1998 __ Sub(argc, argc, 1); in Generate_FunctionPrototypeCall()
1999 __ Drop(1); in Generate_FunctionPrototypeCall()
2003 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); in Generate_FunctionPrototypeCall()
2022 __ LoadRoot(undefined_value, Heap::kUndefinedValueRootIndex); in Generate_ReflectApply()
2030 __ Claim(3); in Generate_ReflectApply()
2031 __ Drop(argc); in Generate_ReflectApply()
2040 __ Adds(x10, argc, 0); // Preserve argc, and set the Z flag if it is zero. in Generate_ReflectApply()
2041 __ Pop(arguments_list, this_argument, target); // Overwrites argc. in Generate_ReflectApply()
2042 __ CmovX(target, undefined_value, eq); // undefined if argc == 0. in Generate_ReflectApply()
2043 __ Cmp(x10, 2); in Generate_ReflectApply()
2044 __ CmovX(this_argument, undefined_value, lo); // undefined if argc <= 1. in Generate_ReflectApply()
2045 __ CmovX(arguments_list, undefined_value, ls); // undefined if argc <= 2. in Generate_ReflectApply()
2047 __ Poke(this_argument, 0); // Overwrite receiver. in Generate_ReflectApply()
2058 __ JumpIfSmi(target, &target_not_callable); in Generate_ReflectApply()
2059 __ Ldr(x10, FieldMemOperand(target, HeapObject::kMapOffset)); in Generate_ReflectApply()
2060 __ Ldr(x10, FieldMemOperand(x10, Map::kBitFieldOffset)); in Generate_ReflectApply()
2061 __ TestAndBranchIfAllClear(x10, 1 << Map::kIsCallable, &target_not_callable); in Generate_ReflectApply()
2066 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET); in Generate_ReflectApply()
2069 __ Bind(&target_not_callable); in Generate_ReflectApply()
2071 __ Poke(target, 0); in Generate_ReflectApply()
2072 __ TailCallRuntime(Runtime::kThrowApplyNonFunction); in Generate_ReflectApply()
2092 __ LoadRoot(undefined_value, Heap::kUndefinedValueRootIndex); in Generate_ReflectConstruct()
2101 __ Claim(3); in Generate_ReflectConstruct()
2102 __ Drop(argc); in Generate_ReflectConstruct()
2111 __ Adds(x10, argc, 0); // Preserve argc, and set the Z flag if it is zero. in Generate_ReflectConstruct()
2112 __ Pop(new_target, arguments_list, target); // Overwrites argc. in Generate_ReflectConstruct()
2113 __ CmovX(target, undefined_value, eq); // undefined if argc == 0. in Generate_ReflectConstruct()
2114 __ Cmp(x10, 2); in Generate_ReflectConstruct()
2115 __ CmovX(arguments_list, undefined_value, lo); // undefined if argc <= 1. in Generate_ReflectConstruct()
2116 __ CmovX(new_target, target, ls); // target if argc <= 2. in Generate_ReflectConstruct()
2118 __ Poke(undefined_value, 0); // Overwrite receiver. in Generate_ReflectConstruct()
2130 __ JumpIfSmi(target, &target_not_constructor); in Generate_ReflectConstruct()
2131 __ Ldr(x10, FieldMemOperand(target, HeapObject::kMapOffset)); in Generate_ReflectConstruct()
2132 __ Ldrb(x10, FieldMemOperand(x10, Map::kBitFieldOffset)); in Generate_ReflectConstruct()
2133 __ TestAndBranchIfAllClear(x10, 1 << Map::kIsConstructor, in Generate_ReflectConstruct()
2138 __ JumpIfSmi(new_target, &new_target_not_constructor); in Generate_ReflectConstruct()
2139 __ Ldr(x10, FieldMemOperand(new_target, HeapObject::kMapOffset)); in Generate_ReflectConstruct()
2140 __ Ldrb(x10, FieldMemOperand(x10, Map::kBitFieldOffset)); in Generate_ReflectConstruct()
2141 __ TestAndBranchIfAllClear(x10, 1 << Map::kIsConstructor, in Generate_ReflectConstruct()
2145 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET); in Generate_ReflectConstruct()
2148 __ Bind(&target_not_constructor); in Generate_ReflectConstruct()
2150 __ Poke(target, 0); in Generate_ReflectConstruct()
2151 __ TailCallRuntime(Runtime::kThrowNotConstructor); in Generate_ReflectConstruct()
2155 __ Bind(&new_target_not_constructor); in Generate_ReflectConstruct()
2157 __ Poke(new_target, 0); in Generate_ReflectConstruct()
2158 __ TailCallRuntime(Runtime::kThrowNotConstructor); in Generate_ReflectConstruct()
2163 __ SmiTag(x10, x0); in EnterArgumentsAdaptorFrame()
2164 __ Mov(x11, StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)); in EnterArgumentsAdaptorFrame()
2165 __ Push(lr, fp); in EnterArgumentsAdaptorFrame()
2166 __ Push(x11, x1, x10); in EnterArgumentsAdaptorFrame()
2167 __ Add(fp, jssp, in EnterArgumentsAdaptorFrame()
2177 __ Ldr(x10, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp + in LeaveArgumentsAdaptorFrame()
2179 __ Mov(jssp, fp); in LeaveArgumentsAdaptorFrame()
2180 __ Pop(fp, lr); in LeaveArgumentsAdaptorFrame()
2181 __ DropBySMI(x10, kXRegSize); in LeaveArgumentsAdaptorFrame()
2182 __ Drop(1); in LeaveArgumentsAdaptorFrame()
2205 __ JumpIfSmi(arguments_list, &create_runtime); in Generate_Apply()
2209 __ Ldr(native_context, NativeContextMemOperand()); in Generate_Apply()
2213 __ Ldr(arguments_list_map, in Generate_Apply()
2217 __ Ldr(x10, ContextMemOperand(native_context, in Generate_Apply()
2219 __ Ldr(x11, ContextMemOperand(native_context, in Generate_Apply()
2221 __ Cmp(arguments_list_map, x10); in Generate_Apply()
2222 __ Ccmp(arguments_list_map, x11, ZFlag, ne); in Generate_Apply()
2223 __ B(eq, &create_arguments); in Generate_Apply()
2226 __ CompareInstanceType(arguments_list_map, x10, JS_ARRAY_TYPE); in Generate_Apply()
2227 __ B(eq, &create_array); in Generate_Apply()
2230 __ Bind(&create_runtime); in Generate_Apply()
2233 __ Push(target, new_target, arguments_list); in Generate_Apply()
2234 __ CallRuntime(Runtime::kCreateListFromArrayLike); in Generate_Apply()
2235 __ Pop(new_target, target); in Generate_Apply()
2236 __ Ldrsw(len, UntagSmiFieldMemOperand(arguments_list, in Generate_Apply()
2239 __ B(&done_create); in Generate_Apply()
2242 __ Bind(&create_arguments); in Generate_Apply()
2243 __ Ldrsw(len, UntagSmiFieldMemOperand(arguments_list, in Generate_Apply()
2245 __ Ldr(x10, FieldMemOperand(arguments_list, JSObject::kElementsOffset)); in Generate_Apply()
2246 __ Ldrsw(x11, UntagSmiFieldMemOperand(x10, FixedArray::kLengthOffset)); in Generate_Apply()
2247 __ CompareAndBranch(len, x11, ne, &create_runtime); in Generate_Apply()
2248 __ Mov(args, x10); in Generate_Apply()
2249 __ B(&done_create); in Generate_Apply()
2253 __ Bind(&create_holey_array); in Generate_Apply()
2257 __ Ldr(arguments_list_prototype, in Generate_Apply()
2259 __ Ldr(x10, ContextMemOperand(native_context, in Generate_Apply()
2261 __ Cmp(arguments_list_prototype, x10); in Generate_Apply()
2262 __ B(ne, &create_runtime); in Generate_Apply()
2263 __ LoadRoot(x10, Heap::kArrayProtectorRootIndex); in Generate_Apply()
2264 __ Ldrsw(x11, UntagSmiFieldMemOperand(x10, PropertyCell::kValueOffset)); in Generate_Apply()
2265 __ Cmp(x11, Isolate::kProtectorValid); in Generate_Apply()
2266 __ B(ne, &create_runtime); in Generate_Apply()
2267 __ Ldrsw(len, in Generate_Apply()
2269 __ Ldr(args, FieldMemOperand(arguments_list, JSArray::kElementsOffset)); in Generate_Apply()
2270 __ B(&done_create); in Generate_Apply()
2273 __ Bind(&create_array); in Generate_Apply()
2274 __ Ldr(x10, FieldMemOperand(arguments_list_map, Map::kBitField2Offset)); in Generate_Apply()
2275 __ DecodeField<Map::ElementsKindBits>(x10); in Generate_Apply()
2282 __ Cmp(x10, FAST_HOLEY_ELEMENTS); in Generate_Apply()
2283 __ B(hi, &create_runtime); in Generate_Apply()
2285 __ Tbnz(x10, 0, &create_holey_array); in Generate_Apply()
2287 __ Ldrsw(len, in Generate_Apply()
2289 __ Ldr(args, FieldMemOperand(arguments_list, JSArray::kElementsOffset)); in Generate_Apply()
2291 __ Bind(&done_create); in Generate_Apply()
2299 __ LoadRoot(x10, Heap::kRealStackLimitRootIndex); in Generate_Apply()
2302 __ Sub(x10, masm->StackPointer(), x10); in Generate_Apply()
2304 __ Cmp(x10, Operand(len, LSL, kPointerSizeLog2)); in Generate_Apply()
2305 __ B(gt, &done); // Signed comparison. in Generate_Apply()
2306 __ TailCallRuntime(Runtime::kThrowStackOverflow); in Generate_Apply()
2307 __ Bind(&done); in Generate_Apply()
2323 __ Add(src, args, FixedArray::kHeaderSize - kHeapObjectTag); in Generate_Apply()
2324 __ Mov(x0, len); // The 'len' argument for Call() or Construct(). in Generate_Apply()
2325 __ Cbz(len, &done); in Generate_Apply()
2330 __ LoadRoot(the_hole_value, Heap::kTheHoleValueRootIndex); in Generate_Apply()
2331 __ LoadRoot(undefined_value, Heap::kUndefinedValueRootIndex); in Generate_Apply()
2332 __ Claim(len); in Generate_Apply()
2333 __ Bind(&loop); in Generate_Apply()
2334 __ Sub(len, len, 1); in Generate_Apply()
2335 __ Ldr(x10, MemOperand(src, kPointerSize, PostIndex)); in Generate_Apply()
2336 __ Cmp(x10, the_hole_value); in Generate_Apply()
2337 __ Csel(x10, x10, undefined_value, ne); in Generate_Apply()
2338 __ Poke(x10, Operand(len, LSL, kPointerSizeLog2)); in Generate_Apply()
2339 __ Cbnz(len, &loop); in Generate_Apply()
2340 __ Bind(&done); in Generate_Apply()
2356 __ CompareRoot(new_target, Heap::kUndefinedValueRootIndex); in Generate_Apply()
2357 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET, eq); in Generate_Apply()
2358 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); in Generate_Apply()
2374 __ Ldr(x3, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); in Generate_CallForwardVarargs()
2375 __ Ldr(x4, MemOperand(x3, CommonFrameConstants::kContextOrFrameTypeOffset)); in Generate_CallForwardVarargs()
2376 __ Cmp(x4, StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR)); in Generate_CallForwardVarargs()
2377 __ B(eq, &arguments_adaptor); in Generate_CallForwardVarargs()
2379 __ Ldr(x0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); in Generate_CallForwardVarargs()
2380 __ Ldr(x0, FieldMemOperand(x0, JSFunction::kSharedFunctionInfoOffset)); in Generate_CallForwardVarargs()
2381 __ Ldrsw(x0, FieldMemOperand( in Generate_CallForwardVarargs()
2383 __ Mov(x3, fp); in Generate_CallForwardVarargs()
2385 __ B(&arguments_done); in Generate_CallForwardVarargs()
2386 __ Bind(&arguments_adaptor); in Generate_CallForwardVarargs()
2389 __ Ldrsw(x0, UntagSmiMemOperand( in Generate_CallForwardVarargs()
2392 __ Bind(&arguments_done); in Generate_CallForwardVarargs()
2395 __ Subs(x0, x0, x2); in Generate_CallForwardVarargs()
2396 __ B(le, &stack_empty); in Generate_CallForwardVarargs()
2404 __ Add(x3, x3, kPointerSize); in Generate_CallForwardVarargs()
2405 __ Mov(x2, x0); in Generate_CallForwardVarargs()
2406 __ bind(&loop); in Generate_CallForwardVarargs()
2408 __ Ldr(x4, MemOperand(x3, x2, LSL, kPointerSizeLog2)); in Generate_CallForwardVarargs()
2409 __ Push(x4); in Generate_CallForwardVarargs()
2410 __ Subs(x2, x2, 1); in Generate_CallForwardVarargs()
2411 __ B(ne, &loop); in Generate_CallForwardVarargs()
2415 __ B(&stack_done); in Generate_CallForwardVarargs()
2416 __ Bind(&stack_overflow); in Generate_CallForwardVarargs()
2417 __ TailCallRuntime(Runtime::kThrowStackOverflow); in Generate_CallForwardVarargs()
2418 __ Bind(&stack_empty); in Generate_CallForwardVarargs()
2421 __ Mov(x0, 0); in Generate_CallForwardVarargs()
2423 __ Bind(&stack_done); in Generate_CallForwardVarargs()
2425 __ Jump(code, RelocInfo::CODE_TARGET); in Generate_CallForwardVarargs()
2468 __ Mov(scratch1, Operand(is_tail_call_elimination_enabled)); in PrepareForTailCall()
2469 __ Ldrb(scratch1, MemOperand(scratch1)); in PrepareForTailCall()
2470 __ Cmp(scratch1, Operand(0)); in PrepareForTailCall()
2471 __ B(eq, &done); in PrepareForTailCall()
2476 __ Ldr(scratch3, in PrepareForTailCall()
2478 __ Cmp(scratch3, Operand(StackFrame::TypeToMarker(StackFrame::STUB))); in PrepareForTailCall()
2479 __ B(ne, &no_interpreter_frame); in PrepareForTailCall()
2480 __ Ldr(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); in PrepareForTailCall()
2481 __ bind(&no_interpreter_frame); in PrepareForTailCall()
2487 __ Ldr(scratch2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); in PrepareForTailCall()
2488 __ Ldr(scratch3, in PrepareForTailCall()
2490 __ Cmp(scratch3, in PrepareForTailCall()
2492 __ B(ne, &no_arguments_adaptor); in PrepareForTailCall()
2495 __ mov(fp, scratch2); in PrepareForTailCall()
2496 __ Ldr(caller_args_count_reg, in PrepareForTailCall()
2498 __ SmiUntag(caller_args_count_reg); in PrepareForTailCall()
2499 __ B(&formal_parameter_count_loaded); in PrepareForTailCall()
2501 __ bind(&no_arguments_adaptor); in PrepareForTailCall()
2503 __ Ldr(scratch1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); in PrepareForTailCall()
2504 __ Ldr(scratch1, in PrepareForTailCall()
2506 __ Ldrsw(caller_args_count_reg, in PrepareForTailCall()
2509 __ bind(&formal_parameter_count_loaded); in PrepareForTailCall()
2512 __ PrepareForTailCall(callee_args_count, caller_args_count_reg, scratch2, in PrepareForTailCall()
2514 __ bind(&done); in PrepareForTailCall()
2527 __ AssertFunction(x1); in Generate_CallFunction()
2532 __ Ldr(x2, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset)); in Generate_CallFunction()
2533 __ Ldr(w3, FieldMemOperand(x2, SharedFunctionInfo::kCompilerHintsOffset)); in Generate_CallFunction()
2534 __ TestAndBranchIfAnySet(w3, FunctionKind::kClassConstructor in Generate_CallFunction()
2541 __ Ldr(cp, FieldMemOperand(x1, JSFunction::kContextOffset)); in Generate_CallFunction()
2544 __ TestAndBranchIfAnySet(w3, in Generate_CallFunction()
2558 __ LoadGlobalProxy(x3); in Generate_CallFunction()
2561 __ Peek(x3, Operand(x0, LSL, kXRegSizeLog2)); in Generate_CallFunction()
2562 __ JumpIfSmi(x3, &convert_to_object); in Generate_CallFunction()
2564 __ CompareObjectType(x3, x4, x4, FIRST_JS_RECEIVER_TYPE); in Generate_CallFunction()
2565 __ B(hs, &done_convert); in Generate_CallFunction()
2568 __ JumpIfRoot(x3, Heap::kUndefinedValueRootIndex, in Generate_CallFunction()
2570 __ JumpIfNotRoot(x3, Heap::kNullValueRootIndex, &convert_to_object); in Generate_CallFunction()
2571 __ Bind(&convert_global_proxy); in Generate_CallFunction()
2574 __ LoadGlobalProxy(x3); in Generate_CallFunction()
2576 __ B(&convert_receiver); in Generate_CallFunction()
2578 __ Bind(&convert_to_object); in Generate_CallFunction()
2584 __ SmiTag(x0); in Generate_CallFunction()
2585 __ Push(x0, x1); in Generate_CallFunction()
2586 __ Mov(x0, x3); in Generate_CallFunction()
2587 __ Push(cp); in Generate_CallFunction()
2588 __ Call(masm->isolate()->builtins()->ToObject(), in Generate_CallFunction()
2590 __ Pop(cp); in Generate_CallFunction()
2591 __ Mov(x3, x0); in Generate_CallFunction()
2592 __ Pop(x1, x0); in Generate_CallFunction()
2593 __ SmiUntag(x0); in Generate_CallFunction()
2595 __ Ldr(x2, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset)); in Generate_CallFunction()
2596 __ Bind(&convert_receiver); in Generate_CallFunction()
2598 __ Poke(x3, Operand(x0, LSL, kXRegSizeLog2)); in Generate_CallFunction()
2600 __ Bind(&done_convert); in Generate_CallFunction()
2613 __ Ldrsw( in Generate_CallFunction()
2617 __ InvokeFunctionCode(x1, no_reg, expected, actual, JUMP_FUNCTION, in Generate_CallFunction()
2621 __ bind(&class_constructor); in Generate_CallFunction()
2624 __ Push(x1); in Generate_CallFunction()
2625 __ CallRuntime(Runtime::kThrowConstructorNonCallableError); in Generate_CallFunction()
2640 __ Ldr(x2, FieldMemOperand(x1, JSBoundFunction::kBoundArgumentsOffset)); in Generate_PushBoundArguments()
2641 __ Ldrsw(x4, UntagSmiFieldMemOperand(x2, FixedArray::kLengthOffset)); in Generate_PushBoundArguments()
2642 __ Cmp(x4, 0); in Generate_PushBoundArguments()
2643 __ B(eq, &no_bound_arguments); in Generate_PushBoundArguments()
2656 __ Claim(x4); in Generate_PushBoundArguments()
2660 __ CompareRoot(jssp, Heap::kRealStackLimitRootIndex); in Generate_PushBoundArguments()
2661 __ B(gt, &done); // Signed comparison. in Generate_PushBoundArguments()
2663 __ Drop(x4); in Generate_PushBoundArguments()
2666 __ EnterFrame(StackFrame::INTERNAL); in Generate_PushBoundArguments()
2667 __ CallRuntime(Runtime::kThrowStackOverflow); in Generate_PushBoundArguments()
2669 __ Bind(&done); in Generate_PushBoundArguments()
2675 __ Mov(x5, 0); in Generate_PushBoundArguments()
2676 __ Bind(&loop); in Generate_PushBoundArguments()
2677 __ Cmp(x5, x0); in Generate_PushBoundArguments()
2678 __ B(gt, &done_loop); in Generate_PushBoundArguments()
2679 __ Peek(x10, Operand(x4, LSL, kPointerSizeLog2)); in Generate_PushBoundArguments()
2680 __ Poke(x10, Operand(x5, LSL, kPointerSizeLog2)); in Generate_PushBoundArguments()
2681 __ Add(x4, x4, 1); in Generate_PushBoundArguments()
2682 __ Add(x5, x5, 1); in Generate_PushBoundArguments()
2683 __ B(&loop); in Generate_PushBoundArguments()
2684 __ Bind(&done_loop); in Generate_PushBoundArguments()
2690 __ Ldrsw(x4, UntagSmiFieldMemOperand(x2, FixedArray::kLengthOffset)); in Generate_PushBoundArguments()
2691 __ Add(x2, x2, FixedArray::kHeaderSize - kHeapObjectTag); in Generate_PushBoundArguments()
2692 __ Bind(&loop); in Generate_PushBoundArguments()
2693 __ Sub(x4, x4, 1); in Generate_PushBoundArguments()
2694 __ Ldr(x10, MemOperand(x2, x4, LSL, kPointerSizeLog2)); in Generate_PushBoundArguments()
2695 __ Poke(x10, Operand(x0, LSL, kPointerSizeLog2)); in Generate_PushBoundArguments()
2696 __ Add(x0, x0, 1); in Generate_PushBoundArguments()
2697 __ Cmp(x4, 0); in Generate_PushBoundArguments()
2698 __ B(gt, &loop); in Generate_PushBoundArguments()
2701 __ Bind(&no_bound_arguments); in Generate_PushBoundArguments()
2713 __ AssertBoundFunction(x1); in Generate_CallBoundFunctionImpl()
2720 __ Ldr(x10, FieldMemOperand(x1, JSBoundFunction::kBoundThisOffset)); in Generate_CallBoundFunctionImpl()
2721 __ Poke(x10, Operand(x0, LSL, kPointerSizeLog2)); in Generate_CallBoundFunctionImpl()
2727 __ Ldr(x1, FieldMemOperand(x1, JSBoundFunction::kBoundTargetFunctionOffset)); in Generate_CallBoundFunctionImpl()
2728 __ Mov(x10, in Generate_CallBoundFunctionImpl()
2730 __ Ldr(x11, MemOperand(x10)); in Generate_CallBoundFunctionImpl()
2731 __ Add(x12, x11, Code::kHeaderSize - kHeapObjectTag); in Generate_CallBoundFunctionImpl()
2732 __ Br(x12); in Generate_CallBoundFunctionImpl()
2744 __ JumpIfSmi(x1, &non_callable); in Generate_Call()
2745 __ Bind(&non_smi); in Generate_Call()
2746 __ CompareObjectType(x1, x4, x5, JS_FUNCTION_TYPE); in Generate_Call()
2747 __ Jump(masm->isolate()->builtins()->CallFunction(mode, tail_call_mode), in Generate_Call()
2749 __ Cmp(x5, JS_BOUND_FUNCTION_TYPE); in Generate_Call()
2750 __ Jump(masm->isolate()->builtins()->CallBoundFunction(tail_call_mode), in Generate_Call()
2754 __ Ldrb(x4, FieldMemOperand(x4, Map::kBitFieldOffset)); in Generate_Call()
2755 __ TestAndBranchIfAllClear(x4, 1 << Map::kIsCallable, &non_callable); in Generate_Call()
2757 __ Cmp(x5, JS_PROXY_TYPE); in Generate_Call()
2758 __ B(ne, &non_function); in Generate_Call()
2766 __ Push(x1); in Generate_Call()
2769 __ Add(x0, x0, Operand(2)); in Generate_Call()
2771 __ JumpToExternalReference( in Generate_Call()
2776 __ Bind(&non_function); in Generate_Call()
2778 __ Poke(x1, Operand(x0, LSL, kXRegSizeLog2)); in Generate_Call()
2780 __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, x1); in Generate_Call()
2781 __ Jump(masm->isolate()->builtins()->CallFunction( in Generate_Call()
2786 __ bind(&non_callable); in Generate_Call()
2789 __ Push(x1); in Generate_Call()
2790 __ CallRuntime(Runtime::kThrowCalledNonCallable); in Generate_Call()
2808 __ Peek(spread, 0); in CheckSpreadAndPushToStack()
2809 __ JumpIfSmi(spread, &runtime_call); in CheckSpreadAndPushToStack()
2810 __ Ldr(spread_map, FieldMemOperand(spread, HeapObject::kMapOffset)); in CheckSpreadAndPushToStack()
2813 __ CompareInstanceType(spread_map, scratch, JS_ARRAY_TYPE); in CheckSpreadAndPushToStack()
2814 __ B(ne, &runtime_call); in CheckSpreadAndPushToStack()
2817 __ Ldr(scratch, FieldMemOperand(spread_map, Map::kPrototypeOffset)); in CheckSpreadAndPushToStack()
2818 __ Ldr(scratch2, NativeContextMemOperand()); in CheckSpreadAndPushToStack()
2819 __ Ldr(scratch2, in CheckSpreadAndPushToStack()
2821 __ Cmp(scratch, scratch2); in CheckSpreadAndPushToStack()
2822 __ B(ne, &runtime_call); in CheckSpreadAndPushToStack()
2826 __ LoadRoot(scratch, Heap::kArrayIteratorProtectorRootIndex); in CheckSpreadAndPushToStack()
2827 __ Ldr(scratch, FieldMemOperand(scratch, PropertyCell::kValueOffset)); in CheckSpreadAndPushToStack()
2828 __ Cmp(scratch, Smi::FromInt(Isolate::kProtectorValid)); in CheckSpreadAndPushToStack()
2829 __ B(ne, &runtime_call); in CheckSpreadAndPushToStack()
2832 __ Ldr(scratch2, NativeContextMemOperand()); in CheckSpreadAndPushToStack()
2833 __ Ldr(scratch, in CheckSpreadAndPushToStack()
2836 __ Ldr(scratch, FieldMemOperand(scratch, HeapObject::kMapOffset)); in CheckSpreadAndPushToStack()
2837 __ Ldr(scratch2, in CheckSpreadAndPushToStack()
2840 __ Cmp(scratch, scratch2); in CheckSpreadAndPushToStack()
2841 __ B(ne, &runtime_call); in CheckSpreadAndPushToStack()
2846 __ Ldr(scratch, FieldMemOperand(spread_map, Map::kBitField2Offset)); in CheckSpreadAndPushToStack()
2847 __ DecodeField<Map::ElementsKindBits>(scratch); in CheckSpreadAndPushToStack()
2848 __ Cmp(scratch, FAST_HOLEY_ELEMENTS); in CheckSpreadAndPushToStack()
2849 __ B(hi, &runtime_call); in CheckSpreadAndPushToStack()
2851 __ Cmp(scratch, FAST_SMI_ELEMENTS); in CheckSpreadAndPushToStack()
2852 __ B(eq, &no_protector_check); in CheckSpreadAndPushToStack()
2853 __ Cmp(scratch, FAST_ELEMENTS); in CheckSpreadAndPushToStack()
2854 __ B(eq, &no_protector_check); in CheckSpreadAndPushToStack()
2856 __ LoadRoot(scratch, Heap::kArrayProtectorRootIndex); in CheckSpreadAndPushToStack()
2857 __ Ldr(scratch, FieldMemOperand(scratch, PropertyCell::kValueOffset)); in CheckSpreadAndPushToStack()
2858 __ Cmp(scratch, Smi::FromInt(Isolate::kProtectorValid)); in CheckSpreadAndPushToStack()
2859 __ B(ne, &runtime_call); in CheckSpreadAndPushToStack()
2861 __ Bind(&no_protector_check); in CheckSpreadAndPushToStack()
2863 __ Ldrsw(spread_len, UntagSmiFieldMemOperand(spread, JSArray::kLengthOffset)); in CheckSpreadAndPushToStack()
2864 __ Ldr(spread, FieldMemOperand(spread, JSArray::kElementsOffset)); in CheckSpreadAndPushToStack()
2865 __ B(&push_args); in CheckSpreadAndPushToStack()
2867 __ Bind(&runtime_call); in CheckSpreadAndPushToStack()
2871 __ SmiTag(argc); in CheckSpreadAndPushToStack()
2872 __ Push(constructor, new_target, argc, spread); in CheckSpreadAndPushToStack()
2873 __ CallRuntime(Runtime::kSpreadIterableFixed); in CheckSpreadAndPushToStack()
2874 __ Mov(spread, x0); in CheckSpreadAndPushToStack()
2875 __ Pop(argc, new_target, constructor); in CheckSpreadAndPushToStack()
2876 __ SmiUntag(argc); in CheckSpreadAndPushToStack()
2881 __ Ldrsw(spread_len, in CheckSpreadAndPushToStack()
2884 __ Bind(&push_args); in CheckSpreadAndPushToStack()
2886 __ Add(argc, argc, spread_len); in CheckSpreadAndPushToStack()
2887 __ Sub(argc, argc, 1); in CheckSpreadAndPushToStack()
2890 __ Pop(scratch); in CheckSpreadAndPushToStack()
2898 __ LoadRoot(scratch, Heap::kRealStackLimitRootIndex); in CheckSpreadAndPushToStack()
2901 __ Sub(scratch, masm->StackPointer(), scratch); in CheckSpreadAndPushToStack()
2903 __ Cmp(scratch, Operand(spread_len, LSL, kPointerSizeLog2)); in CheckSpreadAndPushToStack()
2904 __ B(gt, &done); // Signed comparison. in CheckSpreadAndPushToStack()
2905 __ TailCallRuntime(Runtime::kThrowStackOverflow); in CheckSpreadAndPushToStack()
2906 __ Bind(&done); in CheckSpreadAndPushToStack()
2911 __ Mov(scratch, 0); in CheckSpreadAndPushToStack()
2913 __ Bind(&loop); in CheckSpreadAndPushToStack()
2914 __ Cmp(scratch, spread_len); in CheckSpreadAndPushToStack()
2915 __ B(eq, &done); in CheckSpreadAndPushToStack()
2916 __ Add(scratch2, spread, Operand(scratch, LSL, kPointerSizeLog2)); in CheckSpreadAndPushToStack()
2917 __ Ldr(scratch2, FieldMemOperand(scratch2, FixedArray::kHeaderSize)); in CheckSpreadAndPushToStack()
2918 __ JumpIfNotRoot(scratch2, Heap::kTheHoleValueRootIndex, &push); in CheckSpreadAndPushToStack()
2919 __ LoadRoot(scratch2, Heap::kUndefinedValueRootIndex); in CheckSpreadAndPushToStack()
2920 __ bind(&push); in CheckSpreadAndPushToStack()
2921 __ Push(scratch2); in CheckSpreadAndPushToStack()
2922 __ Add(scratch, scratch, Operand(1)); in CheckSpreadAndPushToStack()
2923 __ B(&loop); in CheckSpreadAndPushToStack()
2924 __ Bind(&done); in CheckSpreadAndPushToStack()
2936 __ LoadRoot(x3, Heap::kUndefinedValueRootIndex); in Generate_CallWithSpread()
2938 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny, in Generate_CallWithSpread()
2950 __ AssertFunction(x1); in Generate_ConstructFunction()
2954 __ LoadRoot(x2, Heap::kUndefinedValueRootIndex); in Generate_ConstructFunction()
2958 __ Ldr(x4, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset)); in Generate_ConstructFunction()
2959 __ Ldr(x4, FieldMemOperand(x4, SharedFunctionInfo::kConstructStubOffset)); in Generate_ConstructFunction()
2960 __ Add(x4, x4, Code::kHeaderSize - kHeapObjectTag); in Generate_ConstructFunction()
2961 __ Br(x4); in Generate_ConstructFunction()
2971 __ AssertBoundFunction(x1); in Generate_ConstructBoundFunction()
2979 __ Cmp(x1, x3); in Generate_ConstructBoundFunction()
2980 __ B(ne, &done); in Generate_ConstructBoundFunction()
2981 __ Ldr(x3, in Generate_ConstructBoundFunction()
2983 __ Bind(&done); in Generate_ConstructBoundFunction()
2987 __ Ldr(x1, FieldMemOperand(x1, JSBoundFunction::kBoundTargetFunctionOffset)); in Generate_ConstructBoundFunction()
2988 __ Mov(x10, ExternalReference(Builtins::kConstruct, masm->isolate())); in Generate_ConstructBoundFunction()
2989 __ Ldr(x11, MemOperand(x10)); in Generate_ConstructBoundFunction()
2990 __ Add(x12, x11, Code::kHeaderSize - kHeapObjectTag); in Generate_ConstructBoundFunction()
2991 __ Br(x12); in Generate_ConstructBoundFunction()
3004 __ Push(x1); in Generate_ConstructProxy()
3005 __ Push(x3); in Generate_ConstructProxy()
3007 __ Add(x0, x0, 3); in Generate_ConstructProxy()
3009 __ JumpToExternalReference( in Generate_ConstructProxy()
3024 __ JumpIfSmi(x1, &non_constructor); in Generate_Construct()
3027 __ CompareObjectType(x1, x4, x5, JS_FUNCTION_TYPE); in Generate_Construct()
3028 __ Jump(masm->isolate()->builtins()->ConstructFunction(), in Generate_Construct()
3032 __ Ldrb(x2, FieldMemOperand(x4, Map::kBitFieldOffset)); in Generate_Construct()
3033 __ TestAndBranchIfAllClear(x2, 1 << Map::kIsConstructor, &non_constructor); in Generate_Construct()
3037 __ Cmp(x5, JS_BOUND_FUNCTION_TYPE); in Generate_Construct()
3038 __ Jump(masm->isolate()->builtins()->ConstructBoundFunction(), in Generate_Construct()
3042 __ Cmp(x5, JS_PROXY_TYPE); in Generate_Construct()
3043 __ Jump(masm->isolate()->builtins()->ConstructProxy(), RelocInfo::CODE_TARGET, in Generate_Construct()
3049 __ Poke(x1, Operand(x0, LSL, kXRegSizeLog2)); in Generate_Construct()
3051 __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, x1); in Generate_Construct()
3052 __ Jump(masm->isolate()->builtins()->CallFunction(), in Generate_Construct()
3058 __ bind(&non_constructor); in Generate_Construct()
3059 __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(), in Generate_Construct()
3073 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); in Generate_ConstructWithSpread()
3083 __ SmiTag(x1); in Generate_AllocateInNewSpace()
3084 __ Push(x1); in Generate_AllocateInNewSpace()
3085 __ Move(cp, Smi::kZero); in Generate_AllocateInNewSpace()
3086 __ TailCallRuntime(Runtime::kAllocateInNewSpace); in Generate_AllocateInNewSpace()
3096 __ SmiTag(x1); in Generate_AllocateInOldSpace()
3097 __ Move(x2, Smi::FromInt(AllocateTargetSpace::encode(OLD_SPACE))); in Generate_AllocateInOldSpace()
3098 __ Push(x1, x2); in Generate_AllocateInOldSpace()
3099 __ Move(cp, Smi::kZero); in Generate_AllocateInOldSpace()
3100 __ TailCallRuntime(Runtime::kAllocateInTargetSpace); in Generate_AllocateInOldSpace()
3111 __ Push(x1); in Generate_Abort()
3112 __ Move(cp, Smi::kZero); in Generate_Abort()
3113 __ TailCallRuntime(Runtime::kAbort); in Generate_Abort()
3133 __ Cmp(argc_actual, argc_expected); in Generate_ArgumentsAdaptorTrampoline()
3134 __ B(lt, &too_few); in Generate_ArgumentsAdaptorTrampoline()
3135 __ Cmp(argc_expected, SharedFunctionInfo::kDontAdaptArgumentsSentinel); in Generate_ArgumentsAdaptorTrampoline()
3136 __ B(eq, &dont_adapt_arguments); in Generate_ArgumentsAdaptorTrampoline()
3147 __ Lsl(scratch2, argc_expected, kPointerSizeLog2); in Generate_ArgumentsAdaptorTrampoline()
3150 __ Add(copy_start, fp, 3 * kPointerSize); in Generate_ArgumentsAdaptorTrampoline()
3151 __ Add(copy_start, copy_start, Operand(argc_actual, LSL, kPointerSizeLog2)); in Generate_ArgumentsAdaptorTrampoline()
3152 __ Sub(copy_end, copy_start, scratch2); in Generate_ArgumentsAdaptorTrampoline()
3153 __ Sub(copy_end, copy_end, kPointerSize); in Generate_ArgumentsAdaptorTrampoline()
3154 __ Mov(copy_to, jssp); in Generate_ArgumentsAdaptorTrampoline()
3159 __ Add(scratch1, scratch2, 2 * kPointerSize); in Generate_ArgumentsAdaptorTrampoline()
3160 __ Claim(scratch1, 1); in Generate_ArgumentsAdaptorTrampoline()
3164 __ Bind(&copy_2_by_2); in Generate_ArgumentsAdaptorTrampoline()
3165 __ Ldp(scratch1, scratch2, in Generate_ArgumentsAdaptorTrampoline()
3167 __ Stp(scratch1, scratch2, in Generate_ArgumentsAdaptorTrampoline()
3169 __ Cmp(copy_start, copy_end); in Generate_ArgumentsAdaptorTrampoline()
3170 __ B(hi, &copy_2_by_2); in Generate_ArgumentsAdaptorTrampoline()
3173 __ Drop(1); in Generate_ArgumentsAdaptorTrampoline()
3175 __ B(&invoke); in Generate_ArgumentsAdaptorTrampoline()
3179 __ Bind(&too_few); in Generate_ArgumentsAdaptorTrampoline()
3189 __ Lsl(scratch2, argc_expected, kPointerSizeLog2); in Generate_ArgumentsAdaptorTrampoline()
3190 __ Lsl(argc_actual, argc_actual, kPointerSizeLog2); in Generate_ArgumentsAdaptorTrampoline()
3193 __ Add(copy_from, fp, 3 * kPointerSize); in Generate_ArgumentsAdaptorTrampoline()
3194 __ Add(copy_from, copy_from, argc_actual); in Generate_ArgumentsAdaptorTrampoline()
3195 __ Mov(copy_to, jssp); in Generate_ArgumentsAdaptorTrampoline()
3196 __ Sub(copy_end, copy_to, 1 * kPointerSize); // Adjust for the receiver. in Generate_ArgumentsAdaptorTrampoline()
3197 __ Sub(copy_end, copy_end, argc_actual); in Generate_ArgumentsAdaptorTrampoline()
3202 __ Add(scratch1, scratch2, 2 * kPointerSize); in Generate_ArgumentsAdaptorTrampoline()
3203 __ Claim(scratch1, 1); in Generate_ArgumentsAdaptorTrampoline()
3207 __ Bind(&copy_2_by_2); in Generate_ArgumentsAdaptorTrampoline()
3208 __ Ldp(scratch1, scratch2, in Generate_ArgumentsAdaptorTrampoline()
3210 __ Stp(scratch1, scratch2, in Generate_ArgumentsAdaptorTrampoline()
3212 __ Cmp(copy_to, copy_end); in Generate_ArgumentsAdaptorTrampoline()
3213 __ B(hi, &copy_2_by_2); in Generate_ArgumentsAdaptorTrampoline()
3215 __ Mov(copy_to, copy_end); in Generate_ArgumentsAdaptorTrampoline()
3218 __ LoadRoot(scratch1, Heap::kUndefinedValueRootIndex); in Generate_ArgumentsAdaptorTrampoline()
3219 __ Add(copy_end, jssp, kPointerSize); in Generate_ArgumentsAdaptorTrampoline()
3222 __ Bind(&fill); in Generate_ArgumentsAdaptorTrampoline()
3223 __ Stp(scratch1, scratch1, in Generate_ArgumentsAdaptorTrampoline()
3225 __ Cmp(copy_to, copy_end); in Generate_ArgumentsAdaptorTrampoline()
3226 __ B(hi, &fill); in Generate_ArgumentsAdaptorTrampoline()
3229 __ Drop(1); in Generate_ArgumentsAdaptorTrampoline()
3233 __ Bind(&invoke); in Generate_ArgumentsAdaptorTrampoline()
3234 __ Mov(argc_actual, argc_expected); in Generate_ArgumentsAdaptorTrampoline()
3238 __ Ldr(code_entry, FieldMemOperand(function, JSFunction::kCodeEntryOffset)); in Generate_ArgumentsAdaptorTrampoline()
3239 __ Call(code_entry); in Generate_ArgumentsAdaptorTrampoline()
3246 __ Ret(); in Generate_ArgumentsAdaptorTrampoline()
3249 __ Bind(&dont_adapt_arguments); in Generate_ArgumentsAdaptorTrampoline()
3250 __ Ldr(code_entry, FieldMemOperand(function, JSFunction::kCodeEntryOffset)); in Generate_ArgumentsAdaptorTrampoline()
3251 __ Jump(code_entry); in Generate_ArgumentsAdaptorTrampoline()
3253 __ Bind(&stack_overflow); in Generate_ArgumentsAdaptorTrampoline()
3256 __ CallRuntime(Runtime::kThrowStackOverflow); in Generate_ArgumentsAdaptorTrampoline()
3257 __ Unreachable(); in Generate_ArgumentsAdaptorTrampoline()
3261 #undef __