• Home
  • Raw
  • Download

Lines Matching full:__

16 #define __ ACCESS_MASM(masm)  macro
29 __ AssertFunction(a1); in Generate_Adaptor()
35 __ ld(cp, FieldMemOperand(a1, JSFunction::kContextOffset)); in Generate_Adaptor()
40 __ Daddu(a0, a0, num_extra_args + 1); in Generate_Adaptor()
43 __ SmiTag(a0); in Generate_Adaptor()
44 __ Push(a0, a1, a3); in Generate_Adaptor()
45 __ SmiUntag(a0); in Generate_Adaptor()
47 __ JumpToExternalReference(ExternalReference(address, masm->isolate()), in Generate_Adaptor()
55 __ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, result); in GenerateLoadInternalArrayFunction()
61 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, result); in GenerateLoadArrayFunction()
77 __ ld(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset)); in Generate_InternalArrayCode()
78 __ SmiTst(a2, a4); in Generate_InternalArrayCode()
79 __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction, a4, in Generate_InternalArrayCode()
81 __ GetObjectType(a2, a3, a4); in Generate_InternalArrayCode()
82 __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction, a4, in Generate_InternalArrayCode()
90 __ TailCallStub(&stub); in Generate_InternalArrayCode()
106 __ ld(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset)); in Generate_ArrayCode()
107 __ SmiTst(a2, a4); in Generate_ArrayCode()
108 __ Assert(ne, kUnexpectedInitialMapForArrayFunction1, a4, in Generate_ArrayCode()
110 __ GetObjectType(a2, a3, a4); in Generate_ArrayCode()
111 __ Assert(eq, kUnexpectedInitialMapForArrayFunction2, a4, in Generate_ArrayCode()
117 __ mov(a3, a1); in Generate_ArrayCode()
118 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex); in Generate_ArrayCode()
120 __ TailCallStub(&stub); in Generate_ArrayCode()
139 __ LoadRoot(t1, root_index); in Generate_MathMaxMin()
140 __ ldc1(f0, FieldMemOperand(t1, HeapNumber::kValueOffset)); in Generate_MathMaxMin()
143 __ mov(a3, a0); in Generate_MathMaxMin()
144 __ bind(&loop); in Generate_MathMaxMin()
147 __ Dsubu(a3, a3, Operand(1)); in Generate_MathMaxMin()
148 __ Branch(&done_loop, lt, a3, Operand(zero_reg)); in Generate_MathMaxMin()
151 __ Dlsa(at, sp, a3, kPointerSizeLog2); in Generate_MathMaxMin()
152 __ ld(a2, MemOperand(at)); in Generate_MathMaxMin()
157 __ bind(&convert); in Generate_MathMaxMin()
158 __ JumpIfSmi(a2, &convert_smi); in Generate_MathMaxMin()
159 __ ld(a4, FieldMemOperand(a2, HeapObject::kMapOffset)); in Generate_MathMaxMin()
160 __ JumpIfRoot(a4, Heap::kHeapNumberMapRootIndex, &convert_number); in Generate_MathMaxMin()
164 __ SmiTag(a0); in Generate_MathMaxMin()
165 __ SmiTag(a3); in Generate_MathMaxMin()
166 __ EnterBuiltinFrame(cp, a1, a0); in Generate_MathMaxMin()
167 __ Push(t1, a3); in Generate_MathMaxMin()
168 __ mov(a0, a2); in Generate_MathMaxMin()
169 __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET); in Generate_MathMaxMin()
170 __ mov(a2, v0); in Generate_MathMaxMin()
171 __ Pop(t1, a3); in Generate_MathMaxMin()
172 __ LeaveBuiltinFrame(cp, a1, a0); in Generate_MathMaxMin()
173 __ SmiUntag(a3); in Generate_MathMaxMin()
174 __ SmiUntag(a0); in Generate_MathMaxMin()
178 __ JumpIfSmi(t1, &restore_smi); in Generate_MathMaxMin()
179 __ ldc1(f0, FieldMemOperand(t1, HeapNumber::kValueOffset)); in Generate_MathMaxMin()
180 __ jmp(&done_restore); in Generate_MathMaxMin()
181 __ bind(&restore_smi); in Generate_MathMaxMin()
182 __ SmiToDoubleFPURegister(t1, f0, a4); in Generate_MathMaxMin()
183 __ bind(&done_restore); in Generate_MathMaxMin()
186 __ jmp(&convert); in Generate_MathMaxMin()
187 __ bind(&convert_number); in Generate_MathMaxMin()
188 __ ldc1(f2, FieldMemOperand(a2, HeapNumber::kValueOffset)); in Generate_MathMaxMin()
189 __ jmp(&done_convert); in Generate_MathMaxMin()
190 __ bind(&convert_smi); in Generate_MathMaxMin()
191 __ SmiToDoubleFPURegister(a2, f2, a4); in Generate_MathMaxMin()
192 __ bind(&done_convert); in Generate_MathMaxMin()
199 __ BranchF(nullptr, &compare_nan, eq, f0, f2); in Generate_MathMaxMin()
200 __ Move(a4, f0); in Generate_MathMaxMin()
202 __ Float64Min(f0, f0, f2, &ool_min); in Generate_MathMaxMin()
205 __ Float64Max(f0, f0, f2, &ool_max); in Generate_MathMaxMin()
207 __ jmp(&done); in Generate_MathMaxMin()
209 __ bind(&ool_min); in Generate_MathMaxMin()
210 __ Float64MinOutOfLine(f0, f0, f2); in Generate_MathMaxMin()
211 __ jmp(&done); in Generate_MathMaxMin()
213 __ bind(&ool_max); in Generate_MathMaxMin()
214 __ Float64MaxOutOfLine(f0, f0, f2); in Generate_MathMaxMin()
216 __ bind(&done); in Generate_MathMaxMin()
217 __ Move(at, f0); in Generate_MathMaxMin()
218 __ Branch(&loop, eq, a4, Operand(at)); in Generate_MathMaxMin()
219 __ mov(t1, a2); in Generate_MathMaxMin()
220 __ jmp(&loop); in Generate_MathMaxMin()
223 __ bind(&compare_nan); in Generate_MathMaxMin()
224 __ LoadRoot(t1, Heap::kNanValueRootIndex); in Generate_MathMaxMin()
225 __ ldc1(f0, FieldMemOperand(t1, HeapNumber::kValueOffset)); in Generate_MathMaxMin()
226 __ jmp(&loop); in Generate_MathMaxMin()
229 __ bind(&done_loop); in Generate_MathMaxMin()
231 __ Daddu(a0, a0, Operand(1)); in Generate_MathMaxMin()
232 __ Dlsa(sp, sp, a0, kPointerSizeLog2); in Generate_MathMaxMin()
233 __ Ret(USE_DELAY_SLOT); in Generate_MathMaxMin()
234 __ mov(v0, t1); // In delay slot. in Generate_MathMaxMin()
252 __ Branch(USE_DELAY_SLOT, &no_arguments, eq, a0, Operand(zero_reg)); in Generate_NumberConstructor()
253 __ Dsubu(t1, a0, Operand(1)); // In delay slot. in Generate_NumberConstructor()
254 __ mov(t0, a0); // Store argc in t0. in Generate_NumberConstructor()
255 __ Dlsa(at, sp, t1, kPointerSizeLog2); in Generate_NumberConstructor()
256 __ ld(a0, MemOperand(at)); in Generate_NumberConstructor()
262 __ SmiTag(t0); in Generate_NumberConstructor()
263 __ EnterBuiltinFrame(cp, a1, t0); in Generate_NumberConstructor()
264 __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET); in Generate_NumberConstructor()
265 __ LeaveBuiltinFrame(cp, a1, t0); in Generate_NumberConstructor()
266 __ SmiUntag(t0); in Generate_NumberConstructor()
271 __ Dlsa(sp, sp, t0, kPointerSizeLog2); in Generate_NumberConstructor()
272 __ DropAndRet(1); in Generate_NumberConstructor()
276 __ bind(&no_arguments); in Generate_NumberConstructor()
277 __ Move(v0, Smi::kZero); in Generate_NumberConstructor()
278 __ DropAndRet(1); in Generate_NumberConstructor()
293 __ ld(cp, FieldMemOperand(a1, JSFunction::kContextOffset)); in Generate_NumberConstructor_ConstructStub()
299 __ mov(t0, a0); // Store argc in t0. in Generate_NumberConstructor_ConstructStub()
300 __ Branch(USE_DELAY_SLOT, &no_arguments, eq, a0, Operand(zero_reg)); in Generate_NumberConstructor_ConstructStub()
301 __ Dsubu(a0, a0, Operand(1)); // In delay slot. in Generate_NumberConstructor_ConstructStub()
302 __ Dlsa(at, sp, a0, kPointerSizeLog2); in Generate_NumberConstructor_ConstructStub()
303 __ ld(a0, MemOperand(at)); in Generate_NumberConstructor_ConstructStub()
304 __ jmp(&done); in Generate_NumberConstructor_ConstructStub()
305 __ bind(&no_arguments); in Generate_NumberConstructor_ConstructStub()
306 __ Move(a0, Smi::kZero); in Generate_NumberConstructor_ConstructStub()
307 __ bind(&done); in Generate_NumberConstructor_ConstructStub()
313 __ JumpIfSmi(a0, &done_convert); in Generate_NumberConstructor_ConstructStub()
314 __ GetObjectType(a0, a2, a2); in Generate_NumberConstructor_ConstructStub()
315 __ Branch(&done_convert, eq, a2, Operand(HEAP_NUMBER_TYPE)); in Generate_NumberConstructor_ConstructStub()
318 __ SmiTag(t0); in Generate_NumberConstructor_ConstructStub()
319 __ EnterBuiltinFrame(cp, a1, t0); in Generate_NumberConstructor_ConstructStub()
320 __ Push(a3); in Generate_NumberConstructor_ConstructStub()
321 __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET); in Generate_NumberConstructor_ConstructStub()
322 __ Move(a0, v0); in Generate_NumberConstructor_ConstructStub()
323 __ Pop(a3); in Generate_NumberConstructor_ConstructStub()
324 __ LeaveBuiltinFrame(cp, a1, t0); in Generate_NumberConstructor_ConstructStub()
325 __ SmiUntag(t0); in Generate_NumberConstructor_ConstructStub()
327 __ bind(&done_convert); in Generate_NumberConstructor_ConstructStub()
332 __ Branch(&new_object, ne, a1, Operand(a3)); in Generate_NumberConstructor_ConstructStub()
335 __ AllocateJSValue(v0, a1, a0, a2, t1, &new_object); in Generate_NumberConstructor_ConstructStub()
336 __ jmp(&drop_frame_and_ret); in Generate_NumberConstructor_ConstructStub()
339 __ bind(&new_object); in Generate_NumberConstructor_ConstructStub()
342 __ SmiTag(t0); in Generate_NumberConstructor_ConstructStub()
343 __ EnterBuiltinFrame(cp, a1, t0); in Generate_NumberConstructor_ConstructStub()
344 __ Push(a0); in Generate_NumberConstructor_ConstructStub()
345 __ Call(CodeFactory::FastNewObject(masm->isolate()).code(), in Generate_NumberConstructor_ConstructStub()
347 __ Pop(a0); in Generate_NumberConstructor_ConstructStub()
348 __ LeaveBuiltinFrame(cp, a1, t0); in Generate_NumberConstructor_ConstructStub()
349 __ SmiUntag(t0); in Generate_NumberConstructor_ConstructStub()
351 __ sd(a0, FieldMemOperand(v0, JSValue::kValueOffset)); in Generate_NumberConstructor_ConstructStub()
353 __ bind(&drop_frame_and_ret); in Generate_NumberConstructor_ConstructStub()
355 __ Dlsa(sp, sp, t0, kPointerSizeLog2); in Generate_NumberConstructor_ConstructStub()
356 __ DropAndRet(1); in Generate_NumberConstructor_ConstructStub()
375 __ Branch(USE_DELAY_SLOT, &no_arguments, eq, a0, Operand(zero_reg)); in Generate_StringConstructor()
376 __ Dsubu(t1, a0, Operand(1)); // In delay slot. in Generate_StringConstructor()
377 __ mov(t0, a0); // Store argc in t0. in Generate_StringConstructor()
378 __ Dlsa(at, sp, t1, kPointerSizeLog2); in Generate_StringConstructor()
379 __ ld(a0, MemOperand(at)); in Generate_StringConstructor()
386 __ JumpIfSmi(a0, &to_string); in Generate_StringConstructor()
387 __ GetObjectType(a0, t1, t1); in Generate_StringConstructor()
389 __ Subu(t1, t1, Operand(FIRST_NONSTRING_TYPE)); in Generate_StringConstructor()
390 __ Branch(&symbol_descriptive_string, eq, t1, Operand(zero_reg)); in Generate_StringConstructor()
391 __ Branch(&to_string, gt, t1, Operand(zero_reg)); in Generate_StringConstructor()
392 __ mov(v0, a0); in Generate_StringConstructor()
393 __ jmp(&drop_frame_and_ret); in Generate_StringConstructor()
397 __ bind(&no_arguments); in Generate_StringConstructor()
399 __ LoadRoot(v0, Heap::kempty_stringRootIndex); in Generate_StringConstructor()
400 __ DropAndRet(1); in Generate_StringConstructor()
404 __ bind(&to_string); in Generate_StringConstructor()
407 __ SmiTag(t0); in Generate_StringConstructor()
408 __ EnterBuiltinFrame(cp, a1, t0); in Generate_StringConstructor()
409 __ Call(masm->isolate()->builtins()->ToString(), RelocInfo::CODE_TARGET); in Generate_StringConstructor()
410 __ LeaveBuiltinFrame(cp, a1, t0); in Generate_StringConstructor()
411 __ SmiUntag(t0); in Generate_StringConstructor()
413 __ jmp(&drop_frame_and_ret); in Generate_StringConstructor()
416 __ bind(&symbol_descriptive_string); in Generate_StringConstructor()
418 __ Dlsa(sp, sp, t0, kPointerSizeLog2); in Generate_StringConstructor()
419 __ Drop(1); in Generate_StringConstructor()
420 __ Push(a0); in Generate_StringConstructor()
421 __ TailCallRuntime(Runtime::kSymbolDescriptiveString); in Generate_StringConstructor()
424 __ bind(&drop_frame_and_ret); in Generate_StringConstructor()
426 __ Dlsa(sp, sp, t0, kPointerSizeLog2); in Generate_StringConstructor()
427 __ DropAndRet(1); in Generate_StringConstructor()
443 __ ld(cp, FieldMemOperand(a1, JSFunction::kContextOffset)); in Generate_StringConstructor_ConstructStub()
449 __ mov(t0, a0); // Store argc in t0. in Generate_StringConstructor_ConstructStub()
450 __ Branch(USE_DELAY_SLOT, &no_arguments, eq, a0, Operand(zero_reg)); in Generate_StringConstructor_ConstructStub()
451 __ Dsubu(a0, a0, Operand(1)); in Generate_StringConstructor_ConstructStub()
452 __ Dlsa(at, sp, a0, kPointerSizeLog2); in Generate_StringConstructor_ConstructStub()
453 __ ld(a0, MemOperand(at)); in Generate_StringConstructor_ConstructStub()
454 __ jmp(&done); in Generate_StringConstructor_ConstructStub()
455 __ bind(&no_arguments); in Generate_StringConstructor_ConstructStub()
456 __ LoadRoot(a0, Heap::kempty_stringRootIndex); in Generate_StringConstructor_ConstructStub()
457 __ bind(&done); in Generate_StringConstructor_ConstructStub()
463 __ JumpIfSmi(a0, &convert); in Generate_StringConstructor_ConstructStub()
464 __ GetObjectType(a0, a2, a2); in Generate_StringConstructor_ConstructStub()
465 __ And(t1, a2, Operand(kIsNotStringMask)); in Generate_StringConstructor_ConstructStub()
466 __ Branch(&done_convert, eq, t1, Operand(zero_reg)); in Generate_StringConstructor_ConstructStub()
467 __ bind(&convert); in Generate_StringConstructor_ConstructStub()
470 __ SmiTag(t0); in Generate_StringConstructor_ConstructStub()
471 __ EnterBuiltinFrame(cp, a1, t0); in Generate_StringConstructor_ConstructStub()
472 __ Push(a3); in Generate_StringConstructor_ConstructStub()
473 __ Call(masm->isolate()->builtins()->ToString(), RelocInfo::CODE_TARGET); in Generate_StringConstructor_ConstructStub()
474 __ Move(a0, v0); in Generate_StringConstructor_ConstructStub()
475 __ Pop(a3); in Generate_StringConstructor_ConstructStub()
476 __ LeaveBuiltinFrame(cp, a1, t0); in Generate_StringConstructor_ConstructStub()
477 __ SmiUntag(t0); in Generate_StringConstructor_ConstructStub()
479 __ bind(&done_convert); in Generate_StringConstructor_ConstructStub()
484 __ Branch(&new_object, ne, a1, Operand(a3)); in Generate_StringConstructor_ConstructStub()
487 __ AllocateJSValue(v0, a1, a0, a2, t1, &new_object); in Generate_StringConstructor_ConstructStub()
488 __ jmp(&drop_frame_and_ret); in Generate_StringConstructor_ConstructStub()
491 __ bind(&new_object); in Generate_StringConstructor_ConstructStub()
494 __ SmiTag(t0); in Generate_StringConstructor_ConstructStub()
495 __ EnterBuiltinFrame(cp, a1, t0); in Generate_StringConstructor_ConstructStub()
496 __ Push(a0); in Generate_StringConstructor_ConstructStub()
497 __ Call(CodeFactory::FastNewObject(masm->isolate()).code(), in Generate_StringConstructor_ConstructStub()
499 __ Pop(a0); in Generate_StringConstructor_ConstructStub()
500 __ LeaveBuiltinFrame(cp, a1, t0); in Generate_StringConstructor_ConstructStub()
501 __ SmiUntag(t0); in Generate_StringConstructor_ConstructStub()
503 __ sd(a0, FieldMemOperand(v0, JSValue::kValueOffset)); in Generate_StringConstructor_ConstructStub()
505 __ bind(&drop_frame_and_ret); in Generate_StringConstructor_ConstructStub()
507 __ Dlsa(sp, sp, t0, kPointerSizeLog2); in Generate_StringConstructor_ConstructStub()
508 __ DropAndRet(1); in Generate_StringConstructor_ConstructStub()
513 __ ld(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); in GenerateTailCallToSharedCode()
514 __ ld(a2, FieldMemOperand(a2, SharedFunctionInfo::kCodeOffset)); in GenerateTailCallToSharedCode()
515 __ Daddu(at, a2, Operand(Code::kHeaderSize - kHeapObjectTag)); in GenerateTailCallToSharedCode()
516 __ Jump(at); in GenerateTailCallToSharedCode()
530 __ SmiTag(a0); in GenerateTailCallToReturnedCode()
531 __ Push(a0, a1, a3, a1); in GenerateTailCallToReturnedCode()
533 __ CallRuntime(function_id, 1); in GenerateTailCallToReturnedCode()
535 __ Pop(a0, a1, a3); in GenerateTailCallToReturnedCode()
536 __ SmiUntag(a0); in GenerateTailCallToReturnedCode()
539 __ Daddu(at, v0, Operand(Code::kHeaderSize - kHeapObjectTag)); in GenerateTailCallToReturnedCode()
540 __ Jump(at); in GenerateTailCallToReturnedCode()
550 __ LoadRoot(a4, Heap::kStackLimitRootIndex); in Generate_InOptimizationQueue()
551 __ Branch(&ok, hs, sp, Operand(a4)); in Generate_InOptimizationQueue()
555 __ bind(&ok); in Generate_InOptimizationQueue()
582 __ SmiTag(a0); in Generate_JSConstructStubHelper()
583 __ Push(cp, a0); in Generate_JSConstructStubHelper()
586 __ Push(a1, a3); in Generate_JSConstructStubHelper()
587 __ Call(CodeFactory::FastNewObject(masm->isolate()).code(), in Generate_JSConstructStubHelper()
589 __ mov(t0, v0); in Generate_JSConstructStubHelper()
590 __ Pop(a1, a3); in Generate_JSConstructStubHelper()
597 __ ld(a0, MemOperand(sp)); in Generate_JSConstructStubHelper()
599 __ SmiUntag(a0); in Generate_JSConstructStubHelper()
605 __ Push(t0, t0); in Generate_JSConstructStubHelper()
607 __ PushRoot(Heap::kTheHoleValueRootIndex); in Generate_JSConstructStubHelper()
611 __ bind(&post_instantiation_deopt_entry); in Generate_JSConstructStubHelper()
614 __ Daddu(a2, fp, Operand(StandardFrameConstants::kCallerSPOffset)); in Generate_JSConstructStubHelper()
626 __ mov(t0, a0); in Generate_JSConstructStubHelper()
627 __ jmp(&entry); in Generate_JSConstructStubHelper()
628 __ bind(&loop); in Generate_JSConstructStubHelper()
629 __ Dlsa(a4, a2, t0, kPointerSizeLog2); in Generate_JSConstructStubHelper()
630 __ ld(a5, MemOperand(a4)); in Generate_JSConstructStubHelper()
631 __ push(a5); in Generate_JSConstructStubHelper()
632 __ bind(&entry); in Generate_JSConstructStubHelper()
633 __ Daddu(t0, t0, Operand(-1)); in Generate_JSConstructStubHelper()
634 __ Branch(&loop, greater_equal, t0, Operand(zero_reg)); in Generate_JSConstructStubHelper()
641 __ InvokeFunction(a1, a3, actual, CALL_FUNCTION, in Generate_JSConstructStubHelper()
651 __ ld(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset)); in Generate_JSConstructStubHelper()
663 __ JumpIfSmi(v0, &use_receiver); in Generate_JSConstructStubHelper()
667 __ GetObjectType(v0, a1, a3); in Generate_JSConstructStubHelper()
668 __ Branch(&exit, greater_equal, a3, Operand(FIRST_JS_RECEIVER_TYPE)); in Generate_JSConstructStubHelper()
672 __ bind(&use_receiver); in Generate_JSConstructStubHelper()
673 __ ld(v0, MemOperand(sp)); in Generate_JSConstructStubHelper()
677 __ bind(&exit); in Generate_JSConstructStubHelper()
681 __ ld(a1, MemOperand(sp, 1 * kPointerSize)); in Generate_JSConstructStubHelper()
683 __ ld(a1, MemOperand(sp)); in Generate_JSConstructStubHelper()
694 __ JumpIfNotSmi(v0, &dont_throw); in Generate_JSConstructStubHelper()
697 __ CallRuntime(Runtime::kThrowDerivedConstructorReturnedNonObject); in Generate_JSConstructStubHelper()
699 __ bind(&dont_throw); in Generate_JSConstructStubHelper()
702 __ SmiScale(a4, a1, kPointerSizeLog2); in Generate_JSConstructStubHelper()
703 __ Daddu(sp, sp, a4); in Generate_JSConstructStubHelper()
704 __ Daddu(sp, sp, kPointerSize); in Generate_JSConstructStubHelper()
706 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, a1, a2); in Generate_JSConstructStubHelper()
708 __ Ret(); in Generate_JSConstructStubHelper()
722 __ Pop(a1); in Generate_JSConstructStubHelper()
723 __ Push(a0, a0); in Generate_JSConstructStubHelper()
726 __ ld(a0, MemOperand(fp, ConstructFrameConstants::kLengthOffset)); in Generate_JSConstructStubHelper()
727 __ SmiUntag(a0); in Generate_JSConstructStubHelper()
731 __ Daddu(a3, fp, Operand(StandardFrameConstants::kCallerSPOffset)); in Generate_JSConstructStubHelper()
732 __ Dlsa(a3, a3, a0, kPointerSizeLog2); in Generate_JSConstructStubHelper()
733 __ ld(a3, MemOperand(a3)); in Generate_JSConstructStubHelper()
736 __ jmp(&post_instantiation_deopt_entry); in Generate_JSConstructStubHelper()
767 __ AssertGeneratorObject(a1); in Generate_ResumeGeneratorTrampoline()
770 __ sd(v0, FieldMemOperand(a1, JSGeneratorObject::kInputOrDebugPosOffset)); in Generate_ResumeGeneratorTrampoline()
771 __ RecordWriteField(a1, JSGeneratorObject::kInputOrDebugPosOffset, v0, a3, in Generate_ResumeGeneratorTrampoline()
775 __ sd(a2, FieldMemOperand(a1, JSGeneratorObject::kResumeModeOffset)); in Generate_ResumeGeneratorTrampoline()
778 __ ld(a4, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset)); in Generate_ResumeGeneratorTrampoline()
779 __ ld(cp, FieldMemOperand(a4, JSFunction::kContextOffset)); in Generate_ResumeGeneratorTrampoline()
786 __ li(a5, Operand(debug_hook)); in Generate_ResumeGeneratorTrampoline()
787 __ lb(a5, MemOperand(a5)); in Generate_ResumeGeneratorTrampoline()
788 __ Branch(&prepare_step_in_if_stepping, ne, a5, Operand(zero_reg)); in Generate_ResumeGeneratorTrampoline()
793 __ li(a5, Operand(debug_suspended_generator)); in Generate_ResumeGeneratorTrampoline()
794 __ ld(a5, MemOperand(a5)); in Generate_ResumeGeneratorTrampoline()
795 __ Branch(&prepare_step_in_suspended_generator, eq, a1, Operand(a5)); in Generate_ResumeGeneratorTrampoline()
796 __ bind(&stepping_prepared); in Generate_ResumeGeneratorTrampoline()
799 __ ld(a5, FieldMemOperand(a1, JSGeneratorObject::kReceiverOffset)); in Generate_ResumeGeneratorTrampoline()
800 __ Push(a5); in Generate_ResumeGeneratorTrampoline()
815 __ ld(a3, FieldMemOperand(a4, JSFunction::kSharedFunctionInfoOffset)); in Generate_ResumeGeneratorTrampoline()
816 __ lw(a3, in Generate_ResumeGeneratorTrampoline()
820 __ bind(&loop); in Generate_ResumeGeneratorTrampoline()
821 __ Dsubu(a3, a3, Operand(1)); in Generate_ResumeGeneratorTrampoline()
822 __ Branch(&done_loop, lt, a3, Operand(zero_reg)); in Generate_ResumeGeneratorTrampoline()
823 __ PushRoot(Heap::kTheHoleValueRootIndex); in Generate_ResumeGeneratorTrampoline()
824 __ Branch(&loop); in Generate_ResumeGeneratorTrampoline()
825 __ bind(&done_loop); in Generate_ResumeGeneratorTrampoline()
830 __ ld(a3, FieldMemOperand(a4, JSFunction::kSharedFunctionInfoOffset)); in Generate_ResumeGeneratorTrampoline()
831 __ ld(a3, FieldMemOperand(a3, SharedFunctionInfo::kFunctionDataOffset)); in Generate_ResumeGeneratorTrampoline()
832 __ GetObjectType(a3, a3, a3); in Generate_ResumeGeneratorTrampoline()
833 __ Assert(eq, kMissingBytecodeArray, a3, Operand(BYTECODE_ARRAY_TYPE)); in Generate_ResumeGeneratorTrampoline()
838 __ ld(a0, FieldMemOperand(a4, JSFunction::kSharedFunctionInfoOffset)); in Generate_ResumeGeneratorTrampoline()
839 __ lw(a0, in Generate_ResumeGeneratorTrampoline()
844 __ Move(a3, a1); in Generate_ResumeGeneratorTrampoline()
845 __ Move(a1, a4); in Generate_ResumeGeneratorTrampoline()
846 __ ld(a2, FieldMemOperand(a1, JSFunction::kCodeEntryOffset)); in Generate_ResumeGeneratorTrampoline()
847 __ Jump(a2); in Generate_ResumeGeneratorTrampoline()
850 __ bind(&prepare_step_in_if_stepping); in Generate_ResumeGeneratorTrampoline()
853 __ Push(a1, a2, a4); in Generate_ResumeGeneratorTrampoline()
854 __ CallRuntime(Runtime::kDebugOnFunctionCall); in Generate_ResumeGeneratorTrampoline()
855 __ Pop(a1, a2); in Generate_ResumeGeneratorTrampoline()
857 __ Branch(USE_DELAY_SLOT, &stepping_prepared); in Generate_ResumeGeneratorTrampoline()
858 __ ld(a4, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset)); in Generate_ResumeGeneratorTrampoline()
860 __ bind(&prepare_step_in_suspended_generator); in Generate_ResumeGeneratorTrampoline()
863 __ Push(a1, a2); in Generate_ResumeGeneratorTrampoline()
864 __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator); in Generate_ResumeGeneratorTrampoline()
865 __ Pop(a1, a2); in Generate_ResumeGeneratorTrampoline()
867 __ Branch(USE_DELAY_SLOT, &stepping_prepared); in Generate_ResumeGeneratorTrampoline()
868 __ ld(a4, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset)); in Generate_ResumeGeneratorTrampoline()
873 __ Push(a1); in Generate_ConstructedNonConstructable()
874 __ CallRuntime(Runtime::kThrowConstructedNonConstructable); in Generate_ConstructedNonConstructable()
886 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex); in Generate_CheckStackOverflow()
889 __ dsubu(a2, sp, a2); in Generate_CheckStackOverflow()
892 __ SmiScale(a7, v0, kPointerSizeLog2); in Generate_CheckStackOverflow()
895 __ dsll(a7, argc, kPointerSizeLog2); in Generate_CheckStackOverflow()
897 __ Branch(&okay, gt, a2, Operand(a7)); // Signed comparison. in Generate_CheckStackOverflow()
900 __ CallRuntime(Runtime::kThrowStackOverflow); in Generate_CheckStackOverflow()
902 __ bind(&okay); in Generate_CheckStackOverflow()
925 __ li(cp, Operand(context_address)); in Generate_JSEntryTrampolineHelper()
926 __ ld(cp, MemOperand(cp)); in Generate_JSEntryTrampolineHelper()
929 __ Push(a1, a2); in Generate_JSEntryTrampolineHelper()
936 __ mov(a5, a0); in Generate_JSEntryTrampolineHelper()
942 __ Dlsa(a6, s0, a3, kPointerSizeLog2); in Generate_JSEntryTrampolineHelper()
943 __ b(&entry); in Generate_JSEntryTrampolineHelper()
944 __ nop(); // Branch delay slot nop. in Generate_JSEntryTrampolineHelper()
946 __ bind(&loop); in Generate_JSEntryTrampolineHelper()
947 __ ld(a4, MemOperand(s0)); // Read next parameter. in Generate_JSEntryTrampolineHelper()
948 __ daddiu(s0, s0, kPointerSize); in Generate_JSEntryTrampolineHelper()
949 __ ld(a4, MemOperand(a4)); // Dereference handle. in Generate_JSEntryTrampolineHelper()
950 __ push(a4); // Push parameter. in Generate_JSEntryTrampolineHelper()
951 __ bind(&entry); in Generate_JSEntryTrampolineHelper()
952 __ Branch(&loop, ne, s0, Operand(a6)); in Generate_JSEntryTrampolineHelper()
955 __ mov(a0, a3); in Generate_JSEntryTrampolineHelper()
956 __ mov(a3, a5); in Generate_JSEntryTrampolineHelper()
960 __ LoadRoot(a4, Heap::kUndefinedValueRootIndex); in Generate_JSEntryTrampolineHelper()
961 __ mov(s1, a4); in Generate_JSEntryTrampolineHelper()
962 __ mov(s2, a4); in Generate_JSEntryTrampolineHelper()
963 __ mov(s3, a4); in Generate_JSEntryTrampolineHelper()
964 __ mov(s4, a4); in Generate_JSEntryTrampolineHelper()
965 __ mov(s5, a4); in Generate_JSEntryTrampolineHelper()
973 __ Call(builtin, RelocInfo::CODE_TARGET); in Generate_JSEntryTrampolineHelper()
977 __ Jump(ra); in Generate_JSEntryTrampolineHelper()
992 __ ld(args_count, in LeaveInterpreterFrame()
994 __ lw(t0, FieldMemOperand(args_count, BytecodeArray::kParameterSizeOffset)); in LeaveInterpreterFrame()
997 __ LeaveFrame(StackFrame::JAVA_SCRIPT); in LeaveInterpreterFrame()
1000 __ Daddu(sp, sp, args_count); in LeaveInterpreterFrame()
1025 __ PushStandardFrame(a1); in Generate_InterpreterEntryTrampoline()
1029 __ ld(a0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); in Generate_InterpreterEntryTrampoline()
1033 __ ld(debug_info, FieldMemOperand(a0, SharedFunctionInfo::kDebugInfoOffset)); in Generate_InterpreterEntryTrampoline()
1034 __ JumpIfNotSmi(debug_info, &load_debug_bytecode_array); in Generate_InterpreterEntryTrampoline()
1035 __ ld(kInterpreterBytecodeArrayRegister, in Generate_InterpreterEntryTrampoline()
1037 __ bind(&bytecode_array_loaded); in Generate_InterpreterEntryTrampoline()
1041 __ ld(a0, FieldMemOperand(a0, SharedFunctionInfo::kCodeOffset)); in Generate_InterpreterEntryTrampoline()
1042 __ Branch(&switch_to_different_code_kind, ne, a0, in Generate_InterpreterEntryTrampoline()
1046 __ ld(a0, FieldMemOperand(a1, JSFunction::kFeedbackVectorOffset)); in Generate_InterpreterEntryTrampoline()
1047 __ ld(a0, FieldMemOperand(a0, Cell::kValueOffset)); in Generate_InterpreterEntryTrampoline()
1048 __ ld(a4, FieldMemOperand( in Generate_InterpreterEntryTrampoline()
1051 __ Daddu(a4, a4, Operand(Smi::FromInt(1))); in Generate_InterpreterEntryTrampoline()
1052 __ sd(a4, FieldMemOperand( in Generate_InterpreterEntryTrampoline()
1058 __ SmiTst(kInterpreterBytecodeArrayRegister, a4); in Generate_InterpreterEntryTrampoline()
1059 __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, a4, in Generate_InterpreterEntryTrampoline()
1061 __ GetObjectType(kInterpreterBytecodeArrayRegister, a4, a4); in Generate_InterpreterEntryTrampoline()
1062 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, a4, in Generate_InterpreterEntryTrampoline()
1068 __ sb(zero_reg, FieldMemOperand(kInterpreterBytecodeArrayRegister, in Generate_InterpreterEntryTrampoline()
1072 __ li(kInterpreterBytecodeOffsetRegister, in Generate_InterpreterEntryTrampoline()
1076 __ SmiTag(a4, kInterpreterBytecodeOffsetRegister); in Generate_InterpreterEntryTrampoline()
1077 __ Push(a3, kInterpreterBytecodeArrayRegister, a4); in Generate_InterpreterEntryTrampoline()
1082 __ lw(a4, FieldMemOperand(kInterpreterBytecodeArrayRegister, in Generate_InterpreterEntryTrampoline()
1087 __ Dsubu(a5, sp, Operand(a4)); in Generate_InterpreterEntryTrampoline()
1088 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex); in Generate_InterpreterEntryTrampoline()
1089 __ Branch(&ok, hs, a5, Operand(a2)); in Generate_InterpreterEntryTrampoline()
1090 __ CallRuntime(Runtime::kThrowStackOverflow); in Generate_InterpreterEntryTrampoline()
1091 __ bind(&ok); in Generate_InterpreterEntryTrampoline()
1096 __ LoadRoot(a5, Heap::kUndefinedValueRootIndex); in Generate_InterpreterEntryTrampoline()
1097 __ Branch(&loop_check); in Generate_InterpreterEntryTrampoline()
1098 __ bind(&loop_header); in Generate_InterpreterEntryTrampoline()
1100 __ push(a5); in Generate_InterpreterEntryTrampoline()
1102 __ bind(&loop_check); in Generate_InterpreterEntryTrampoline()
1103 __ Dsubu(a4, a4, Operand(kPointerSize)); in Generate_InterpreterEntryTrampoline()
1104 __ Branch(&loop_header, ge, a4, Operand(zero_reg)); in Generate_InterpreterEntryTrampoline()
1108 __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex); in Generate_InterpreterEntryTrampoline()
1109 __ li(kInterpreterDispatchTableRegister, in Generate_InterpreterEntryTrampoline()
1114 __ Daddu(a0, kInterpreterBytecodeArrayRegister, in Generate_InterpreterEntryTrampoline()
1116 __ lbu(a0, MemOperand(a0)); in Generate_InterpreterEntryTrampoline()
1117 __ Dlsa(at, kInterpreterDispatchTableRegister, a0, kPointerSizeLog2); in Generate_InterpreterEntryTrampoline()
1118 __ ld(at, MemOperand(at)); in Generate_InterpreterEntryTrampoline()
1119 __ Call(at); in Generate_InterpreterEntryTrampoline()
1124 __ Jump(ra); in Generate_InterpreterEntryTrampoline()
1127 __ bind(&load_debug_bytecode_array); in Generate_InterpreterEntryTrampoline()
1128 __ ld(kInterpreterBytecodeArrayRegister, in Generate_InterpreterEntryTrampoline()
1130 __ Branch(&bytecode_array_loaded); in Generate_InterpreterEntryTrampoline()
1135 __ bind(&switch_to_different_code_kind); in Generate_InterpreterEntryTrampoline()
1136 __ LeaveFrame(StackFrame::JAVA_SCRIPT); in Generate_InterpreterEntryTrampoline()
1137 __ ld(a4, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); in Generate_InterpreterEntryTrampoline()
1138 __ ld(a4, FieldMemOperand(a4, SharedFunctionInfo::kCodeOffset)); in Generate_InterpreterEntryTrampoline()
1139 __ Daddu(a4, a4, Operand(Code::kHeaderSize - kHeapObjectTag)); in Generate_InterpreterEntryTrampoline()
1140 __ sd(a4, FieldMemOperand(a1, JSFunction::kCodeEntryOffset)); in Generate_InterpreterEntryTrampoline()
1141 __ RecordWriteCodeEntryField(a1, a4, a5); in Generate_InterpreterEntryTrampoline()
1142 __ Jump(a4); in Generate_InterpreterEntryTrampoline()
1151 __ LoadRoot(scratch1, Heap::kRealStackLimitRootIndex); in Generate_StackOverflowCheck()
1154 __ dsubu(scratch1, sp, scratch1); in Generate_StackOverflowCheck()
1156 __ dsll(scratch2, num_args, kPointerSizeLog2); in Generate_StackOverflowCheck()
1158 __ Branch(stack_overflow, le, scratch1, Operand(scratch2)); in Generate_StackOverflowCheck()
1169 __ mov(scratch2, num_args); in Generate_InterpreterPushArgs()
1170 __ dsll(scratch2, scratch2, kPointerSizeLog2); in Generate_InterpreterPushArgs()
1171 __ Dsubu(scratch2, index, Operand(scratch2)); in Generate_InterpreterPushArgs()
1175 __ Branch(&loop_check); in Generate_InterpreterPushArgs()
1176 __ bind(&loop_header); in Generate_InterpreterPushArgs()
1177 __ ld(scratch, MemOperand(index)); in Generate_InterpreterPushArgs()
1178 __ Daddu(index, index, Operand(-kPointerSize)); in Generate_InterpreterPushArgs()
1179 __ push(scratch); in Generate_InterpreterPushArgs()
1180 __ bind(&loop_check); in Generate_InterpreterPushArgs()
1181 __ Branch(&loop_header, gt, index, Operand(scratch2)); in Generate_InterpreterPushArgs()
1197 __ Daddu(a3, a0, Operand(1)); // Add one for receiver. in Generate_InterpreterPushArgsAndCallImpl()
1204 __ Jump(masm->isolate()->builtins()->CallFunction(ConvertReceiverMode::kAny, in Generate_InterpreterPushArgsAndCallImpl()
1208 __ Jump(masm->isolate()->builtins()->CallWithSpread(), in Generate_InterpreterPushArgsAndCallImpl()
1211 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny, in Generate_InterpreterPushArgsAndCallImpl()
1216 __ bind(&stack_overflow); in Generate_InterpreterPushArgsAndCallImpl()
1218 __ TailCallRuntime(Runtime::kThrowStackOverflow); in Generate_InterpreterPushArgsAndCallImpl()
1220 __ break_(0xCC); in Generate_InterpreterPushArgsAndCallImpl()
1237 __ push(zero_reg); in Generate_InterpreterPushArgsAndConstructImpl()
1242 __ AssertUndefinedOrAllocationSite(a2, t0); in Generate_InterpreterPushArgsAndConstructImpl()
1244 __ AssertFunction(a1); in Generate_InterpreterPushArgsAndConstructImpl()
1248 __ ld(a4, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); in Generate_InterpreterPushArgsAndConstructImpl()
1249 __ ld(a4, FieldMemOperand(a4, SharedFunctionInfo::kConstructStubOffset)); in Generate_InterpreterPushArgsAndConstructImpl()
1250 __ Daddu(at, a4, Operand(Code::kHeaderSize - kHeapObjectTag)); in Generate_InterpreterPushArgsAndConstructImpl()
1251 __ Jump(at); in Generate_InterpreterPushArgsAndConstructImpl()
1254 __ Jump(masm->isolate()->builtins()->ConstructWithSpread(), in Generate_InterpreterPushArgsAndConstructImpl()
1259 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); in Generate_InterpreterPushArgsAndConstructImpl()
1262 __ bind(&stack_overflow); in Generate_InterpreterPushArgsAndConstructImpl()
1264 __ TailCallRuntime(Runtime::kThrowStackOverflow); in Generate_InterpreterPushArgsAndConstructImpl()
1266 __ break_(0xCC); in Generate_InterpreterPushArgsAndConstructImpl()
1283 __ Daddu(a4, a0, Operand(1)); // Add one for receiver. in Generate_InterpreterPushArgsAndConstructArray()
1289 __ mov(a3, a1); in Generate_InterpreterPushArgsAndConstructArray()
1292 __ TailCallStub(&stub); in Generate_InterpreterPushArgsAndConstructArray()
1294 __ bind(&stack_overflow); in Generate_InterpreterPushArgsAndConstructArray()
1296 __ TailCallRuntime(Runtime::kThrowStackOverflow); in Generate_InterpreterPushArgsAndConstructArray()
1298 __ break_(0xCC); in Generate_InterpreterPushArgsAndConstructArray()
1308 __ li(t0, Operand(masm->isolate()->builtins()->InterpreterEntryTrampoline())); in Generate_InterpreterEnterBytecode()
1309 __ Daddu(ra, t0, Operand(interpreter_entry_return_pc_offset->value() + in Generate_InterpreterEnterBytecode()
1313 __ li(kInterpreterDispatchTableRegister, in Generate_InterpreterEnterBytecode()
1318 __ ld(kInterpreterBytecodeArrayRegister, in Generate_InterpreterEnterBytecode()
1323 __ SmiTst(kInterpreterBytecodeArrayRegister, at); in Generate_InterpreterEnterBytecode()
1324 __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, at, in Generate_InterpreterEnterBytecode()
1326 __ GetObjectType(kInterpreterBytecodeArrayRegister, a1, a1); in Generate_InterpreterEnterBytecode()
1327 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, a1, in Generate_InterpreterEnterBytecode()
1332 __ lw( in Generate_InterpreterEnterBytecode()
1337 __ Daddu(a1, kInterpreterBytecodeArrayRegister, in Generate_InterpreterEnterBytecode()
1339 __ lbu(a1, MemOperand(a1)); in Generate_InterpreterEnterBytecode()
1340 __ Dlsa(a1, kInterpreterDispatchTableRegister, a1, kPointerSizeLog2); in Generate_InterpreterEnterBytecode()
1341 __ ld(a1, MemOperand(a1)); in Generate_InterpreterEnterBytecode()
1342 __ Jump(a1); in Generate_InterpreterEnterBytecode()
1349 __ ld(a1, MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp)); in Generate_InterpreterEnterBytecodeAdvance()
1350 __ ld(a2, MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp)); in Generate_InterpreterEnterBytecodeAdvance()
1351 __ ld(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); in Generate_InterpreterEnterBytecodeAdvance()
1354 __ Push(kInterpreterAccumulatorRegister, a1, a2); in Generate_InterpreterEnterBytecodeAdvance()
1355 __ CallRuntime(Runtime::kInterpreterAdvanceBytecodeOffset); in Generate_InterpreterEnterBytecodeAdvance()
1356 __ mov(a2, v0); // Result is the new bytecode offset. in Generate_InterpreterEnterBytecodeAdvance()
1357 __ Pop(kInterpreterAccumulatorRegister); in Generate_InterpreterEnterBytecodeAdvance()
1359 __ sd(a2, MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp)); in Generate_InterpreterEnterBytecodeAdvance()
1386 __ ld(index, FieldMemOperand(closure, JSFunction::kFeedbackVectorOffset)); in Generate_CompileLazy()
1387 __ ld(index, FieldMemOperand(index, Cell::kValueOffset)); in Generate_CompileLazy()
1388 __ JumpIfRoot(index, Heap::kUndefinedValueRootIndex, in Generate_CompileLazy()
1391 __ push(argument_count); in Generate_CompileLazy()
1392 __ push(new_target); in Generate_CompileLazy()
1393 __ push(closure); in Generate_CompileLazy()
1395 __ ld(map, FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset)); in Generate_CompileLazy()
1396 __ ld(map, FieldMemOperand(map, SharedFunctionInfo::kOptimizedCodeMapOffset)); in Generate_CompileLazy()
1397 __ ld(index, FieldMemOperand(map, FixedArray::kLengthOffset)); in Generate_CompileLazy()
1398 __ Branch(&try_shared, lt, index, Operand(Smi::FromInt(2))); in Generate_CompileLazy()
1406 __ ld(native_context, NativeContextMemOperand()); in Generate_CompileLazy()
1408 __ bind(&loop_top); in Generate_CompileLazy()
1413 __ SmiScale(at, index, kPointerSizeLog2); in Generate_CompileLazy()
1414 __ Daddu(array_pointer, map, Operand(at)); in Generate_CompileLazy()
1415 __ ld(temp, FieldMemOperand(array_pointer, in Generate_CompileLazy()
1417 __ ld(temp, FieldMemOperand(temp, WeakCell::kValueOffset)); in Generate_CompileLazy()
1418 __ Branch(&loop_bottom, ne, temp, Operand(native_context)); in Generate_CompileLazy()
1422 __ ld(entry, in Generate_CompileLazy()
1425 __ ld(entry, FieldMemOperand(entry, WeakCell::kValueOffset)); in Generate_CompileLazy()
1426 __ JumpIfSmi(entry, &try_shared); in Generate_CompileLazy()
1429 __ pop(closure); in Generate_CompileLazy()
1431 __ Daddu(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag)); in Generate_CompileLazy()
1432 __ sd(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset)); in Generate_CompileLazy()
1433 __ RecordWriteCodeEntryField(closure, entry, a5); in Generate_CompileLazy()
1439 __ ld(a5, in Generate_CompileLazy()
1441 __ sd(a5, FieldMemOperand(closure, JSFunction::kNextFunctionLinkOffset)); in Generate_CompileLazy()
1442 __ RecordWriteField(closure, JSFunction::kNextFunctionLinkOffset, a5, a0, in Generate_CompileLazy()
1447 __ sd(closure, in Generate_CompileLazy()
1450 __ mov(a5, closure); in Generate_CompileLazy()
1451 __ RecordWriteContextSlot(native_context, function_list_offset, closure, a0, in Generate_CompileLazy()
1453 __ mov(closure, a5); in Generate_CompileLazy()
1454 __ pop(new_target); in Generate_CompileLazy()
1455 __ pop(argument_count); in Generate_CompileLazy()
1456 __ Jump(entry); in Generate_CompileLazy()
1458 __ bind(&loop_bottom); in Generate_CompileLazy()
1459 __ Dsubu(index, index, in Generate_CompileLazy()
1461 __ Branch(&loop_top, gt, index, Operand(Smi::FromInt(1))); in Generate_CompileLazy()
1464 __ bind(&try_shared); in Generate_CompileLazy()
1465 __ pop(closure); in Generate_CompileLazy()
1466 __ pop(new_target); in Generate_CompileLazy()
1467 __ pop(argument_count); in Generate_CompileLazy()
1468 __ ld(entry, FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset)); in Generate_CompileLazy()
1470 __ lbu(a5, FieldMemOperand(entry, in Generate_CompileLazy()
1472 __ And(a5, a5, in Generate_CompileLazy()
1474 __ Branch(&gotta_call_runtime_no_stack, ne, a5, Operand(zero_reg)); in Generate_CompileLazy()
1477 __ ld(entry, FieldMemOperand(entry, SharedFunctionInfo::kCodeOffset)); in Generate_CompileLazy()
1478 __ Move(t1, masm->CodeObject()); in Generate_CompileLazy()
1479 __ Branch(&gotta_call_runtime_no_stack, eq, entry, Operand(t1)); in Generate_CompileLazy()
1482 __ Daddu(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag)); in Generate_CompileLazy()
1483 __ sd(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset)); in Generate_CompileLazy()
1484 __ RecordWriteCodeEntryField(closure, entry, a5); in Generate_CompileLazy()
1485 __ Jump(entry); in Generate_CompileLazy()
1487 __ bind(&gotta_call_runtime); in Generate_CompileLazy()
1488 __ pop(closure); in Generate_CompileLazy()
1489 __ pop(new_target); in Generate_CompileLazy()
1490 __ pop(argument_count); in Generate_CompileLazy()
1491 __ bind(&gotta_call_runtime_no_stack); in Generate_CompileLazy()
1519 __ Move(t2, a0); in Generate_InstantiateAsmJs()
1520 __ SmiTag(a0); in Generate_InstantiateAsmJs()
1521 __ Push(a0, a1, a3, a1); in Generate_InstantiateAsmJs()
1528 __ Branch(&over, ne, t2, Operand(j)); in Generate_InstantiateAsmJs()
1531 __ ld(t2, MemOperand(fp, StandardFrameConstants::kCallerSPOffset + in Generate_InstantiateAsmJs()
1533 __ push(t2); in Generate_InstantiateAsmJs()
1536 __ PushRoot(Heap::kUndefinedValueRootIndex); in Generate_InstantiateAsmJs()
1539 __ jmp(&args_done); in Generate_InstantiateAsmJs()
1540 __ bind(&over); in Generate_InstantiateAsmJs()
1543 __ bind(&args_done); in Generate_InstantiateAsmJs()
1546 __ CallRuntime(Runtime::kInstantiateAsmJs, 4); in Generate_InstantiateAsmJs()
1548 __ JumpIfSmi(v0, &failed); in Generate_InstantiateAsmJs()
1550 __ Drop(2); in Generate_InstantiateAsmJs()
1551 __ pop(t2); in Generate_InstantiateAsmJs()
1552 __ SmiUntag(t2); in Generate_InstantiateAsmJs()
1555 __ Daddu(t2, t2, Operand(1)); in Generate_InstantiateAsmJs()
1556 __ Dlsa(sp, sp, t2, kPointerSizeLog2); in Generate_InstantiateAsmJs()
1557 __ Ret(); in Generate_InstantiateAsmJs()
1559 __ bind(&failed); in Generate_InstantiateAsmJs()
1561 __ Pop(a0, a1, a3); in Generate_InstantiateAsmJs()
1562 __ SmiUntag(a0); in Generate_InstantiateAsmJs()
1576 __ Dsubu(a0, a0, Operand(kNoCodeAgeSequenceLength - Assembler::kInstrSize)); in GenerateMakeCodeYoungAgainCommon()
1586 __ MultiPush(saved_regs); in GenerateMakeCodeYoungAgainCommon()
1587 __ PrepareCallCFunction(2, 0, a2); in GenerateMakeCodeYoungAgainCommon()
1588 __ li(a1, Operand(ExternalReference::isolate_address(masm->isolate()))); in GenerateMakeCodeYoungAgainCommon()
1589 __ CallCFunction( in GenerateMakeCodeYoungAgainCommon()
1591 __ MultiPop(saved_regs); in GenerateMakeCodeYoungAgainCommon()
1592 __ Jump(a0); in GenerateMakeCodeYoungAgainCommon()
1609 __ Dsubu(a0, a0, Operand(kNoCodeAgeSequenceLength - Assembler::kInstrSize)); in CODE_AGE_LIST()
1619 __ MultiPush(saved_regs); in CODE_AGE_LIST()
1620 __ PrepareCallCFunction(2, 0, a2); in CODE_AGE_LIST()
1621 __ li(a1, Operand(ExternalReference::isolate_address(masm->isolate()))); in CODE_AGE_LIST()
1622 __ CallCFunction( in CODE_AGE_LIST()
1625 __ MultiPop(saved_regs); in CODE_AGE_LIST()
1628 __ PushStandardFrame(a1); in CODE_AGE_LIST()
1631 __ Daddu(a0, a0, Operand((kNoCodeAgeSequenceLength))); in CODE_AGE_LIST()
1632 __ Jump(a0); in CODE_AGE_LIST()
1651 __ MultiPush(kJSCallerSaved | kCalleeSaved); in Generate_NotifyStubFailureHelper()
1653 __ CallRuntime(Runtime::kNotifyStubFailure, save_doubles); in Generate_NotifyStubFailureHelper()
1654 __ MultiPop(kJSCallerSaved | kCalleeSaved); in Generate_NotifyStubFailureHelper()
1657 __ Daddu(sp, sp, Operand(kPointerSize)); // Ignore state in Generate_NotifyStubFailureHelper()
1658 __ Jump(ra); // Jump to miss handler in Generate_NotifyStubFailureHelper()
1674 __ li(a0, Operand(Smi::FromInt(static_cast<int>(type)))); in Generate_NotifyDeoptimizedHelper()
1675 __ push(a0); in Generate_NotifyDeoptimizedHelper()
1676 __ CallRuntime(Runtime::kNotifyDeoptimized); in Generate_NotifyDeoptimizedHelper()
1680 __ lw(a6, UntagSmiMemOperand(sp, 0 * kPointerSize)); in Generate_NotifyDeoptimizedHelper()
1683 __ Branch( in Generate_NotifyDeoptimizedHelper()
1686 __ Ret(USE_DELAY_SLOT); in Generate_NotifyDeoptimizedHelper()
1688 __ Daddu(sp, sp, Operand(1 * kPointerSize)); // Remove state. in Generate_NotifyDeoptimizedHelper()
1690 __ bind(&with_tos_register); in Generate_NotifyDeoptimizedHelper()
1692 __ ld(v0, MemOperand(sp, 1 * kPointerSize)); in Generate_NotifyDeoptimizedHelper()
1693 __ Branch( in Generate_NotifyDeoptimizedHelper()
1697 __ Ret(USE_DELAY_SLOT); in Generate_NotifyDeoptimizedHelper()
1699 __ Daddu(sp, sp, Operand(2 * kPointerSize)); // Remove state. in Generate_NotifyDeoptimizedHelper()
1701 __ bind(&unknown_state); in Generate_NotifyDeoptimizedHelper()
1702 __ stop("no cases left"); in Generate_NotifyDeoptimizedHelper()
1727 __ ld(signature, FieldMemOperand(function_template_info, in CompatibleReceiverCheck()
1730 __ JumpIfRoot(signature, Heap::kUndefinedValueRootIndex, in CompatibleReceiverCheck()
1734 __ ld(map, FieldMemOperand(receiver, HeapObject::kMapOffset)); in CompatibleReceiverCheck()
1736 __ bind(&prototype_loop_start); in CompatibleReceiverCheck()
1739 __ GetMapConstructor(constructor, map, scratch, scratch); in CompatibleReceiverCheck()
1741 __ Branch(&next_prototype, ne, scratch, Operand(JS_FUNCTION_TYPE)); in CompatibleReceiverCheck()
1743 __ ld(type, in CompatibleReceiverCheck()
1745 __ ld(type, FieldMemOperand(type, SharedFunctionInfo::kFunctionDataOffset)); in CompatibleReceiverCheck()
1749 __ bind(&function_template_loop); in CompatibleReceiverCheck()
1752 __ Branch(&receiver_check_passed, eq, signature, Operand(type), in CompatibleReceiverCheck()
1757 __ JumpIfSmi(type, &next_prototype); in CompatibleReceiverCheck()
1758 __ GetObjectType(type, scratch, scratch); in CompatibleReceiverCheck()
1759 __ Branch(&next_prototype, ne, scratch, Operand(FUNCTION_TEMPLATE_INFO_TYPE)); in CompatibleReceiverCheck()
1762 __ ld(type, in CompatibleReceiverCheck()
1764 __ Branch(&function_template_loop); in CompatibleReceiverCheck()
1767 __ bind(&next_prototype); in CompatibleReceiverCheck()
1768 __ lwu(scratch, FieldMemOperand(map, Map::kBitField3Offset)); in CompatibleReceiverCheck()
1769 __ DecodeField<Map::HasHiddenPrototype>(scratch); in CompatibleReceiverCheck()
1770 __ Branch(receiver_check_failed, eq, scratch, Operand(zero_reg)); in CompatibleReceiverCheck()
1772 __ ld(receiver, FieldMemOperand(map, Map::kPrototypeOffset)); in CompatibleReceiverCheck()
1773 __ ld(map, FieldMemOperand(receiver, HeapObject::kMapOffset)); in CompatibleReceiverCheck()
1775 __ Branch(&prototype_loop_start); in CompatibleReceiverCheck()
1777 __ bind(&receiver_check_passed); in CompatibleReceiverCheck()
1792 __ ld(t1, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); in Generate_HandleFastApiCall()
1793 __ ld(t1, FieldMemOperand(t1, SharedFunctionInfo::kFunctionDataOffset)); in Generate_HandleFastApiCall()
1797 __ Dlsa(t8, sp, a0, kPointerSizeLog2); in Generate_HandleFastApiCall()
1798 __ ld(t0, MemOperand(t8)); in Generate_HandleFastApiCall()
1803 __ ld(t2, FieldMemOperand(t1, FunctionTemplateInfo::kCallCodeOffset)); in Generate_HandleFastApiCall()
1804 __ ld(t2, FieldMemOperand(t2, CallHandlerInfo::kFastHandlerOffset)); in Generate_HandleFastApiCall()
1805 __ Daddu(t2, t2, Operand(Code::kHeaderSize - kHeapObjectTag)); in Generate_HandleFastApiCall()
1806 __ Jump(t2); in Generate_HandleFastApiCall()
1809 __ bind(&receiver_check_failed); in Generate_HandleFastApiCall()
1811 __ Daddu(t8, t8, Operand(kPointerSize)); in Generate_HandleFastApiCall()
1812 __ daddu(sp, t8, zero_reg); in Generate_HandleFastApiCall()
1813 __ TailCallRuntime(Runtime::kThrowIllegalInvocation); in Generate_HandleFastApiCall()
1820 __ ld(a0, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); in Generate_OnStackReplacementHelper()
1821 __ ld(a0, MemOperand(a0, JavaScriptFrameConstants::kFunctionOffset)); in Generate_OnStackReplacementHelper()
1823 __ ld(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); in Generate_OnStackReplacementHelper()
1829 __ push(a0); in Generate_OnStackReplacementHelper()
1830 __ CallRuntime(Runtime::kCompileForOnStackReplacement); in Generate_OnStackReplacementHelper()
1834 __ Ret(eq, v0, Operand(Smi::kZero)); in Generate_OnStackReplacementHelper()
1839 __ LeaveFrame(StackFrame::STUB); in Generate_OnStackReplacementHelper()
1844 __ ld(a1, MemOperand(v0, Code::kDeoptimizationDataOffset - kHeapObjectTag)); in Generate_OnStackReplacementHelper()
1848 __ lw(a1, in Generate_OnStackReplacementHelper()
1855 __ daddu(v0, v0, a1); in Generate_OnStackReplacementHelper()
1856 __ daddiu(ra, v0, Code::kHeaderSize - kHeapObjectTag); in Generate_OnStackReplacementHelper()
1859 __ Ret(); in Generate_OnStackReplacementHelper()
1886 __ LoadRoot(undefined_value, Heap::kUndefinedValueRootIndex); in Generate_FunctionPrototypeApply()
1894 __ Dsubu(sp, sp, Operand(2 * kPointerSize)); in Generate_FunctionPrototypeApply()
1895 __ Dlsa(sp, sp, argc, kPointerSizeLog2); in Generate_FunctionPrototypeApply()
1896 __ mov(scratch, argc); in Generate_FunctionPrototypeApply()
1897 __ Pop(this_arg, arg_array); // Overwrite argc in Generate_FunctionPrototypeApply()
1898 __ Movz(arg_array, undefined_value, scratch); // if argc == 0 in Generate_FunctionPrototypeApply()
1899 __ Movz(this_arg, undefined_value, scratch); // if argc == 0 in Generate_FunctionPrototypeApply()
1900 __ Dsubu(scratch, scratch, Operand(1)); in Generate_FunctionPrototypeApply()
1901 __ Movz(arg_array, undefined_value, scratch); // if argc == 1 in Generate_FunctionPrototypeApply()
1902 __ ld(receiver, MemOperand(sp)); in Generate_FunctionPrototypeApply()
1903 __ sd(this_arg, MemOperand(sp)); in Generate_FunctionPrototypeApply()
1915 __ JumpIfSmi(receiver, &receiver_not_callable); in Generate_FunctionPrototypeApply()
1916 __ ld(a4, FieldMemOperand(receiver, HeapObject::kMapOffset)); in Generate_FunctionPrototypeApply()
1917 __ lbu(a4, FieldMemOperand(a4, Map::kBitFieldOffset)); in Generate_FunctionPrototypeApply()
1918 __ And(a4, a4, Operand(1 << Map::kIsCallable)); in Generate_FunctionPrototypeApply()
1919 __ Branch(&receiver_not_callable, eq, a4, Operand(zero_reg)); in Generate_FunctionPrototypeApply()
1923 __ JumpIfRoot(arg_array, Heap::kNullValueRootIndex, &no_arguments); in Generate_FunctionPrototypeApply()
1924 __ Branch(&no_arguments, eq, arg_array, Operand(undefined_value)); in Generate_FunctionPrototypeApply()
1929 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET); in Generate_FunctionPrototypeApply()
1933 __ bind(&no_arguments); in Generate_FunctionPrototypeApply()
1935 __ mov(a0, zero_reg); in Generate_FunctionPrototypeApply()
1937 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); in Generate_FunctionPrototypeApply()
1941 __ bind(&receiver_not_callable); in Generate_FunctionPrototypeApply()
1943 __ sd(receiver, MemOperand(sp)); in Generate_FunctionPrototypeApply()
1944 __ TailCallRuntime(Runtime::kThrowApplyNonFunction); in Generate_FunctionPrototypeApply()
1954 __ Branch(&done, ne, a0, Operand(zero_reg)); in Generate_FunctionPrototypeCall()
1955 __ PushRoot(Heap::kUndefinedValueRootIndex); in Generate_FunctionPrototypeCall()
1956 __ Daddu(a0, a0, Operand(1)); in Generate_FunctionPrototypeCall()
1957 __ bind(&done); in Generate_FunctionPrototypeCall()
1962 __ Dlsa(at, sp, a0, kPointerSizeLog2); in Generate_FunctionPrototypeCall()
1963 __ ld(a1, MemOperand(at)); in Generate_FunctionPrototypeCall()
1973 __ Dlsa(a2, sp, a0, kPointerSizeLog2); in Generate_FunctionPrototypeCall()
1975 __ bind(&loop); in Generate_FunctionPrototypeCall()
1976 __ ld(at, MemOperand(a2, -kPointerSize)); in Generate_FunctionPrototypeCall()
1977 __ sd(at, MemOperand(a2)); in Generate_FunctionPrototypeCall()
1978 __ Dsubu(a2, a2, Operand(kPointerSize)); in Generate_FunctionPrototypeCall()
1979 __ Branch(&loop, ne, a2, Operand(sp)); in Generate_FunctionPrototypeCall()
1982 __ Dsubu(a0, a0, Operand(1)); in Generate_FunctionPrototypeCall()
1983 __ Pop(); in Generate_FunctionPrototypeCall()
1987 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); in Generate_FunctionPrototypeCall()
2006 __ LoadRoot(undefined_value, Heap::kUndefinedValueRootIndex); in Generate_ReflectApply()
2014 __ Dsubu(sp, sp, Operand(3 * kPointerSize)); in Generate_ReflectApply()
2015 __ Dlsa(sp, sp, argc, kPointerSizeLog2); in Generate_ReflectApply()
2016 __ mov(scratch, argc); in Generate_ReflectApply()
2017 __ Pop(target, this_argument, arguments_list); in Generate_ReflectApply()
2018 __ Movz(arguments_list, undefined_value, scratch); // if argc == 0 in Generate_ReflectApply()
2019 __ Movz(this_argument, undefined_value, scratch); // if argc == 0 in Generate_ReflectApply()
2020 __ Movz(target, undefined_value, scratch); // if argc == 0 in Generate_ReflectApply()
2021 __ Dsubu(scratch, scratch, Operand(1)); in Generate_ReflectApply()
2022 __ Movz(arguments_list, undefined_value, scratch); // if argc == 1 in Generate_ReflectApply()
2023 __ Movz(this_argument, undefined_value, scratch); // if argc == 1 in Generate_ReflectApply()
2024 __ Dsubu(scratch, scratch, Operand(1)); in Generate_ReflectApply()
2025 __ Movz(arguments_list, undefined_value, scratch); // if argc == 2 in Generate_ReflectApply()
2027 __ sd(this_argument, MemOperand(sp, 0)); // Overwrite receiver in Generate_ReflectApply()
2039 __ JumpIfSmi(target, &target_not_callable); in Generate_ReflectApply()
2040 __ ld(a4, FieldMemOperand(target, HeapObject::kMapOffset)); in Generate_ReflectApply()
2041 __ lbu(a4, FieldMemOperand(a4, Map::kBitFieldOffset)); in Generate_ReflectApply()
2042 __ And(a4, a4, Operand(1 << Map::kIsCallable)); in Generate_ReflectApply()
2043 __ Branch(&target_not_callable, eq, a4, Operand(zero_reg)); in Generate_ReflectApply()
2048 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET); in Generate_ReflectApply()
2051 __ bind(&target_not_callable); in Generate_ReflectApply()
2053 __ sd(target, MemOperand(sp)); in Generate_ReflectApply()
2054 __ TailCallRuntime(Runtime::kThrowApplyNonFunction); in Generate_ReflectApply()
2081 __ Dsubu(sp, sp, Operand(3 * kPointerSize)); in Generate_ReflectConstruct()
2082 __ Dlsa(sp, sp, argc, kPointerSizeLog2); in Generate_ReflectConstruct()
2083 __ mov(scratch, argc); in Generate_ReflectConstruct()
2084 __ Pop(target, arguments_list, new_target); in Generate_ReflectConstruct()
2085 __ Movz(arguments_list, undefined_value, scratch); // if argc == 0 in Generate_ReflectConstruct()
2086 __ Movz(new_target, undefined_value, scratch); // if argc == 0 in Generate_ReflectConstruct()
2087 __ Movz(target, undefined_value, scratch); // if argc == 0 in Generate_ReflectConstruct()
2088 __ Dsubu(scratch, scratch, Operand(1)); in Generate_ReflectConstruct()
2089 __ Movz(arguments_list, undefined_value, scratch); // if argc == 1 in Generate_ReflectConstruct()
2090 __ Movz(new_target, target, scratch); // if argc == 1 in Generate_ReflectConstruct()
2091 __ Dsubu(scratch, scratch, Operand(1)); in Generate_ReflectConstruct()
2092 __ Movz(new_target, target, scratch); // if argc == 2 in Generate_ReflectConstruct()
2094 __ sd(undefined_value, MemOperand(sp, 0)); // Overwrite receiver in Generate_ReflectConstruct()
2106 __ JumpIfSmi(target, &target_not_constructor); in Generate_ReflectConstruct()
2107 __ ld(a4, FieldMemOperand(target, HeapObject::kMapOffset)); in Generate_ReflectConstruct()
2108 __ lbu(a4, FieldMemOperand(a4, Map::kBitFieldOffset)); in Generate_ReflectConstruct()
2109 __ And(a4, a4, Operand(1 << Map::kIsConstructor)); in Generate_ReflectConstruct()
2110 __ Branch(&target_not_constructor, eq, a4, Operand(zero_reg)); in Generate_ReflectConstruct()
2114 __ JumpIfSmi(new_target, &new_target_not_constructor); in Generate_ReflectConstruct()
2115 __ ld(a4, FieldMemOperand(new_target, HeapObject::kMapOffset)); in Generate_ReflectConstruct()
2116 __ lbu(a4, FieldMemOperand(a4, Map::kBitFieldOffset)); in Generate_ReflectConstruct()
2117 __ And(a4, a4, Operand(1 << Map::kIsConstructor)); in Generate_ReflectConstruct()
2118 __ Branch(&new_target_not_constructor, eq, a4, Operand(zero_reg)); in Generate_ReflectConstruct()
2121 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET); in Generate_ReflectConstruct()
2124 __ bind(&target_not_constructor); in Generate_ReflectConstruct()
2126 __ sd(target, MemOperand(sp)); in Generate_ReflectConstruct()
2127 __ TailCallRuntime(Runtime::kThrowNotConstructor); in Generate_ReflectConstruct()
2131 __ bind(&new_target_not_constructor); in Generate_ReflectConstruct()
2133 __ sd(new_target, MemOperand(sp)); in Generate_ReflectConstruct()
2134 __ TailCallRuntime(Runtime::kThrowNotConstructor); in Generate_ReflectConstruct()
2139 // __ sll(a0, a0, kSmiTagSize); in EnterArgumentsAdaptorFrame()
2140 __ dsll32(a0, a0, 0); in EnterArgumentsAdaptorFrame()
2141 __ li(a4, Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR))); in EnterArgumentsAdaptorFrame()
2142 __ MultiPush(a0.bit() | a1.bit() | a4.bit() | fp.bit() | ra.bit()); in EnterArgumentsAdaptorFrame()
2143 __ Daddu(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp + in EnterArgumentsAdaptorFrame()
2153 __ ld(a1, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp + in LeaveArgumentsAdaptorFrame()
2155 __ mov(sp, fp); in LeaveArgumentsAdaptorFrame()
2156 __ MultiPop(fp.bit() | ra.bit()); in LeaveArgumentsAdaptorFrame()
2157 __ SmiScale(a4, a1, kPointerSizeLog2); in LeaveArgumentsAdaptorFrame()
2158 __ Daddu(sp, sp, a4); in LeaveArgumentsAdaptorFrame()
2160 __ Daddu(sp, sp, Operand(kPointerSize)); in LeaveArgumentsAdaptorFrame()
2183 __ JumpIfSmi(arguments_list, &create_runtime); in Generate_Apply()
2187 __ ld(arguments_list_map, in Generate_Apply()
2192 __ ld(native_context, NativeContextMemOperand()); in Generate_Apply()
2195 __ ld(at, ContextMemOperand(native_context, in Generate_Apply()
2197 __ Branch(&create_arguments, eq, arguments_list_map, Operand(at)); in Generate_Apply()
2198 __ ld(at, ContextMemOperand(native_context, in Generate_Apply()
2200 __ Branch(&create_arguments, eq, arguments_list_map, Operand(at)); in Generate_Apply()
2203 __ lbu(v0, FieldMemOperand(a2, Map::kInstanceTypeOffset)); in Generate_Apply()
2204 __ Branch(&create_array, eq, v0, Operand(JS_ARRAY_TYPE)); in Generate_Apply()
2207 __ bind(&create_runtime); in Generate_Apply()
2210 __ Push(target, new_target, arguments_list); in Generate_Apply()
2211 __ CallRuntime(Runtime::kCreateListFromArrayLike); in Generate_Apply()
2212 __ mov(arguments_list, v0); in Generate_Apply()
2213 __ Pop(target, new_target); in Generate_Apply()
2214 __ lw(len, UntagSmiFieldMemOperand(v0, FixedArray::kLengthOffset)); in Generate_Apply()
2216 __ Branch(&done_create); in Generate_Apply()
2219 __ bind(&create_arguments); in Generate_Apply()
2220 __ lw(len, UntagSmiFieldMemOperand(arguments_list, in Generate_Apply()
2222 __ ld(a4, FieldMemOperand(arguments_list, JSObject::kElementsOffset)); in Generate_Apply()
2223 __ lw(at, UntagSmiFieldMemOperand(a4, FixedArray::kLengthOffset)); in Generate_Apply()
2224 __ Branch(&create_runtime, ne, len, Operand(at)); in Generate_Apply()
2225 __ mov(args, a4); in Generate_Apply()
2227 __ Branch(&done_create); in Generate_Apply()
2231 __ bind(&create_holey_array); in Generate_Apply()
2232 __ ld(a2, FieldMemOperand(a2, Map::kPrototypeOffset)); in Generate_Apply()
2233 __ ld(at, ContextMemOperand(native_context, in Generate_Apply()
2235 __ Branch(&create_runtime, ne, a2, Operand(at)); in Generate_Apply()
2236 __ LoadRoot(at, Heap::kArrayProtectorRootIndex); in Generate_Apply()
2237 __ lw(a2, FieldMemOperand(at, PropertyCell::kValueOffset)); in Generate_Apply()
2238 __ Branch(&create_runtime, ne, a2, in Generate_Apply()
2240 __ lw(a2, UntagSmiFieldMemOperand(a0, JSArray::kLengthOffset)); in Generate_Apply()
2241 __ ld(a0, FieldMemOperand(a0, JSArray::kElementsOffset)); in Generate_Apply()
2242 __ Branch(&done_create); in Generate_Apply()
2245 __ bind(&create_array); in Generate_Apply()
2246 __ lbu(t1, FieldMemOperand(a2, Map::kBitField2Offset)); in Generate_Apply()
2247 __ DecodeField<Map::ElementsKindBits>(t1); in Generate_Apply()
2251 __ Branch(&create_holey_array, eq, t1, Operand(FAST_HOLEY_SMI_ELEMENTS)); in Generate_Apply()
2252 __ Branch(&create_holey_array, eq, t1, Operand(FAST_HOLEY_ELEMENTS)); in Generate_Apply()
2253 __ Branch(&create_runtime, hi, t1, Operand(FAST_ELEMENTS)); in Generate_Apply()
2254 __ lw(a2, UntagSmiFieldMemOperand(arguments_list, JSArray::kLengthOffset)); in Generate_Apply()
2255 __ ld(a0, FieldMemOperand(arguments_list, JSArray::kElementsOffset)); in Generate_Apply()
2257 __ bind(&done_create); in Generate_Apply()
2265 __ LoadRoot(a4, Heap::kRealStackLimitRootIndex); in Generate_Apply()
2268 __ Dsubu(a4, sp, a4); in Generate_Apply()
2270 __ dsll(at, len, kPointerSizeLog2); in Generate_Apply()
2271 __ Branch(&done, gt, a4, Operand(at)); // Signed comparison. in Generate_Apply()
2272 __ TailCallRuntime(Runtime::kThrowStackOverflow); in Generate_Apply()
2273 __ bind(&done); in Generate_Apply()
2290 __ daddiu(src, args, FixedArray::kHeaderSize - kHeapObjectTag); in Generate_Apply()
2291 __ Branch(&done, eq, len, Operand(zero_reg), i::USE_DELAY_SLOT); in Generate_Apply()
2292 __ mov(a0, len); // The 'len' argument for Call() or Construct(). in Generate_Apply()
2293 __ dsll(scratch, len, kPointerSizeLog2); in Generate_Apply()
2294 __ Dsubu(scratch, sp, Operand(scratch)); in Generate_Apply()
2295 __ LoadRoot(t1, Heap::kTheHoleValueRootIndex); in Generate_Apply()
2296 __ bind(&loop); in Generate_Apply()
2297 __ ld(a5, MemOperand(src)); in Generate_Apply()
2298 __ Branch(&push, ne, a5, Operand(t1)); in Generate_Apply()
2299 __ LoadRoot(a5, Heap::kUndefinedValueRootIndex); in Generate_Apply()
2300 __ bind(&push); in Generate_Apply()
2301 __ daddiu(src, src, kPointerSize); in Generate_Apply()
2302 __ Push(a5); in Generate_Apply()
2303 __ Branch(&loop, ne, scratch, Operand(sp)); in Generate_Apply()
2304 __ bind(&done); in Generate_Apply()
2321 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); in Generate_Apply()
2322 __ Branch(&construct, ne, a3, Operand(at)); in Generate_Apply()
2323 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); in Generate_Apply()
2324 __ bind(&construct); in Generate_Apply()
2325 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); in Generate_Apply()
2341 __ ld(a3, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); in Generate_CallForwardVarargs()
2342 __ ld(a0, MemOperand(a3, CommonFrameConstants::kContextOrFrameTypeOffset)); in Generate_CallForwardVarargs()
2343 __ Branch(&arguments_adaptor, eq, a0, in Generate_CallForwardVarargs()
2346 __ ld(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); in Generate_CallForwardVarargs()
2347 __ ld(a0, FieldMemOperand(a0, JSFunction::kSharedFunctionInfoOffset)); in Generate_CallForwardVarargs()
2348 __ lw(a0, in Generate_CallForwardVarargs()
2350 __ mov(a3, fp); in Generate_CallForwardVarargs()
2352 __ Branch(&arguments_done); in Generate_CallForwardVarargs()
2353 __ bind(&arguments_adaptor); in Generate_CallForwardVarargs()
2356 __ lw(a0, UntagSmiMemOperand( in Generate_CallForwardVarargs()
2359 __ bind(&arguments_done); in Generate_CallForwardVarargs()
2362 __ Subu(a0, a0, a2); in Generate_CallForwardVarargs()
2363 __ Branch(&stack_empty, le, a0, Operand(zero_reg)); in Generate_CallForwardVarargs()
2371 __ mov(a2, a0); in Generate_CallForwardVarargs()
2372 __ bind(&loop); in Generate_CallForwardVarargs()
2374 __ Dlsa(at, a3, a2, kPointerSizeLog2); in Generate_CallForwardVarargs()
2375 __ ld(at, MemOperand(at, 1 * kPointerSize)); in Generate_CallForwardVarargs()
2376 __ push(at); in Generate_CallForwardVarargs()
2377 __ Subu(a2, a2, Operand(1)); in Generate_CallForwardVarargs()
2378 __ Branch(&loop, ne, a2, Operand(zero_reg)); in Generate_CallForwardVarargs()
2382 __ Branch(&stack_done); in Generate_CallForwardVarargs()
2383 __ bind(&stack_overflow); in Generate_CallForwardVarargs()
2384 __ TailCallRuntime(Runtime::kThrowStackOverflow); in Generate_CallForwardVarargs()
2385 __ bind(&stack_empty); in Generate_CallForwardVarargs()
2388 __ mov(a0, zero_reg); in Generate_CallForwardVarargs()
2390 __ bind(&stack_done); in Generate_CallForwardVarargs()
2392 __ Jump(code, RelocInfo::CODE_TARGET); in Generate_CallForwardVarargs()
2435 __ li(at, Operand(is_tail_call_elimination_enabled)); in PrepareForTailCall()
2436 __ lb(scratch1, MemOperand(at)); in PrepareForTailCall()
2437 __ Branch(&done, eq, scratch1, Operand(zero_reg)); in PrepareForTailCall()
2442 __ ld(scratch3, in PrepareForTailCall()
2444 __ Branch(&no_interpreter_frame, ne, scratch3, in PrepareForTailCall()
2446 __ ld(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); in PrepareForTailCall()
2447 __ bind(&no_interpreter_frame); in PrepareForTailCall()
2453 __ ld(scratch2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); in PrepareForTailCall()
2454 __ ld(scratch3, in PrepareForTailCall()
2456 __ Branch(&no_arguments_adaptor, ne, scratch3, in PrepareForTailCall()
2460 __ mov(fp, scratch2); in PrepareForTailCall()
2461 __ lw(caller_args_count_reg, in PrepareForTailCall()
2463 __ Branch(&formal_parameter_count_loaded); in PrepareForTailCall()
2465 __ bind(&no_arguments_adaptor); in PrepareForTailCall()
2467 __ ld(scratch1, in PrepareForTailCall()
2469 __ ld(scratch1, in PrepareForTailCall()
2471 __ lw(caller_args_count_reg, in PrepareForTailCall()
2475 __ bind(&formal_parameter_count_loaded); in PrepareForTailCall()
2478 __ PrepareForTailCall(callee_args_count, caller_args_count_reg, scratch2, in PrepareForTailCall()
2480 __ bind(&done); in PrepareForTailCall()
2492 __ AssertFunction(a1); in Generate_CallFunction()
2497 __ ld(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); in Generate_CallFunction()
2498 __ lbu(a3, FieldMemOperand(a2, SharedFunctionInfo::kFunctionKindByteOffset)); in Generate_CallFunction()
2499 __ And(at, a3, Operand(SharedFunctionInfo::kClassConstructorBitsWithinByte)); in Generate_CallFunction()
2500 __ Branch(&class_constructor, ne, at, Operand(zero_reg)); in Generate_CallFunction()
2507 __ ld(cp, FieldMemOperand(a1, JSFunction::kContextOffset)); in Generate_CallFunction()
2510 __ lbu(a3, FieldMemOperand(a2, SharedFunctionInfo::kNativeByteOffset)); in Generate_CallFunction()
2511 __ And(at, a3, Operand((1 << SharedFunctionInfo::kNativeBitWithinByte) | in Generate_CallFunction()
2513 __ Branch(&done_convert, ne, at, Operand(zero_reg)); in Generate_CallFunction()
2524 __ LoadGlobalProxy(a3); in Generate_CallFunction()
2527 __ Dlsa(at, sp, a0, kPointerSizeLog2); in Generate_CallFunction()
2528 __ ld(a3, MemOperand(at)); in Generate_CallFunction()
2529 __ JumpIfSmi(a3, &convert_to_object); in Generate_CallFunction()
2531 __ GetObjectType(a3, a4, a4); in Generate_CallFunction()
2532 __ Branch(&done_convert, hs, a4, Operand(FIRST_JS_RECEIVER_TYPE)); in Generate_CallFunction()
2535 __ JumpIfRoot(a3, Heap::kUndefinedValueRootIndex, in Generate_CallFunction()
2537 __ JumpIfNotRoot(a3, Heap::kNullValueRootIndex, &convert_to_object); in Generate_CallFunction()
2538 __ bind(&convert_global_proxy); in Generate_CallFunction()
2541 __ LoadGlobalProxy(a3); in Generate_CallFunction()
2543 __ Branch(&convert_receiver); in Generate_CallFunction()
2545 __ bind(&convert_to_object); in Generate_CallFunction()
2551 __ SmiTag(a0); in Generate_CallFunction()
2552 __ Push(a0, a1); in Generate_CallFunction()
2553 __ mov(a0, a3); in Generate_CallFunction()
2554 __ Push(cp); in Generate_CallFunction()
2555 __ Call(masm->isolate()->builtins()->ToObject(), in Generate_CallFunction()
2557 __ Pop(cp); in Generate_CallFunction()
2558 __ mov(a3, v0); in Generate_CallFunction()
2559 __ Pop(a0, a1); in Generate_CallFunction()
2560 __ SmiUntag(a0); in Generate_CallFunction()
2562 __ ld(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); in Generate_CallFunction()
2563 __ bind(&convert_receiver); in Generate_CallFunction()
2565 __ Dlsa(at, sp, a0, kPointerSizeLog2); in Generate_CallFunction()
2566 __ sd(a3, MemOperand(at)); in Generate_CallFunction()
2568 __ bind(&done_convert); in Generate_CallFunction()
2581 __ lw(a2, in Generate_CallFunction()
2585 __ InvokeFunctionCode(a1, no_reg, expected, actual, JUMP_FUNCTION, in Generate_CallFunction()
2589 __ bind(&class_constructor); in Generate_CallFunction()
2592 __ Push(a1); in Generate_CallFunction()
2593 __ CallRuntime(Runtime::kThrowConstructorNonCallableError); in Generate_CallFunction()
2604 __ AssertBoundFunction(a1); in Generate_CallBoundFunctionImpl()
2612 __ ld(at, FieldMemOperand(a1, JSBoundFunction::kBoundThisOffset)); in Generate_CallBoundFunctionImpl()
2613 __ Dlsa(a4, sp, a0, kPointerSizeLog2); in Generate_CallBoundFunctionImpl()
2614 __ sd(at, MemOperand(a4)); in Generate_CallBoundFunctionImpl()
2618 __ ld(a2, FieldMemOperand(a1, JSBoundFunction::kBoundArgumentsOffset)); in Generate_CallBoundFunctionImpl()
2619 __ lw(a4, UntagSmiFieldMemOperand(a2, FixedArray::kLengthOffset)); in Generate_CallBoundFunctionImpl()
2631 __ dsll(a5, a4, kPointerSizeLog2); in Generate_CallBoundFunctionImpl()
2632 __ Dsubu(sp, sp, Operand(a5)); in Generate_CallBoundFunctionImpl()
2635 __ LoadRoot(at, Heap::kRealStackLimitRootIndex); in Generate_CallBoundFunctionImpl()
2636 __ Branch(&done, gt, sp, Operand(at)); // Signed comparison. in Generate_CallBoundFunctionImpl()
2638 __ Daddu(sp, sp, Operand(a5)); in Generate_CallBoundFunctionImpl()
2641 __ EnterFrame(StackFrame::INTERNAL); in Generate_CallBoundFunctionImpl()
2642 __ CallRuntime(Runtime::kThrowStackOverflow); in Generate_CallBoundFunctionImpl()
2644 __ bind(&done); in Generate_CallBoundFunctionImpl()
2650 __ mov(a5, zero_reg); in Generate_CallBoundFunctionImpl()
2651 __ bind(&loop); in Generate_CallBoundFunctionImpl()
2652 __ Branch(&done_loop, gt, a5, Operand(a0)); in Generate_CallBoundFunctionImpl()
2653 __ Dlsa(a6, sp, a4, kPointerSizeLog2); in Generate_CallBoundFunctionImpl()
2654 __ ld(at, MemOperand(a6)); in Generate_CallBoundFunctionImpl()
2655 __ Dlsa(a6, sp, a5, kPointerSizeLog2); in Generate_CallBoundFunctionImpl()
2656 __ sd(at, MemOperand(a6)); in Generate_CallBoundFunctionImpl()
2657 __ Daddu(a4, a4, Operand(1)); in Generate_CallBoundFunctionImpl()
2658 __ Daddu(a5, a5, Operand(1)); in Generate_CallBoundFunctionImpl()
2659 __ Branch(&loop); in Generate_CallBoundFunctionImpl()
2660 __ bind(&done_loop); in Generate_CallBoundFunctionImpl()
2666 __ lw(a4, UntagSmiFieldMemOperand(a2, FixedArray::kLengthOffset)); in Generate_CallBoundFunctionImpl()
2667 __ Daddu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); in Generate_CallBoundFunctionImpl()
2668 __ bind(&loop); in Generate_CallBoundFunctionImpl()
2669 __ Dsubu(a4, a4, Operand(1)); in Generate_CallBoundFunctionImpl()
2670 __ Branch(&done_loop, lt, a4, Operand(zero_reg)); in Generate_CallBoundFunctionImpl()
2671 __ Dlsa(a5, a2, a4, kPointerSizeLog2); in Generate_CallBoundFunctionImpl()
2672 __ ld(at, MemOperand(a5)); in Generate_CallBoundFunctionImpl()
2673 __ Dlsa(a5, sp, a0, kPointerSizeLog2); in Generate_CallBoundFunctionImpl()
2674 __ sd(at, MemOperand(a5)); in Generate_CallBoundFunctionImpl()
2675 __ Daddu(a0, a0, Operand(1)); in Generate_CallBoundFunctionImpl()
2676 __ Branch(&loop); in Generate_CallBoundFunctionImpl()
2677 __ bind(&done_loop); in Generate_CallBoundFunctionImpl()
2681 __ ld(a1, FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset)); in Generate_CallBoundFunctionImpl()
2682 __ li(at, Operand(ExternalReference(Builtins::kCall_ReceiverIsAny, in Generate_CallBoundFunctionImpl()
2684 __ ld(at, MemOperand(at)); in Generate_CallBoundFunctionImpl()
2685 __ Daddu(at, at, Operand(Code::kHeaderSize - kHeapObjectTag)); in Generate_CallBoundFunctionImpl()
2686 __ Jump(at); in Generate_CallBoundFunctionImpl()
2698 __ JumpIfSmi(a1, &non_callable); in Generate_Call()
2699 __ bind(&non_smi); in Generate_Call()
2700 __ GetObjectType(a1, t1, t2); in Generate_Call()
2701 __ Jump(masm->isolate()->builtins()->CallFunction(mode, tail_call_mode), in Generate_Call()
2703 __ Jump(masm->isolate()->builtins()->CallBoundFunction(tail_call_mode), in Generate_Call()
2707 __ lbu(t1, FieldMemOperand(t1, Map::kBitFieldOffset)); in Generate_Call()
2708 __ And(t1, t1, Operand(1 << Map::kIsCallable)); in Generate_Call()
2709 __ Branch(&non_callable, eq, t1, Operand(zero_reg)); in Generate_Call()
2711 __ Branch(&non_function, ne, t2, Operand(JS_PROXY_TYPE)); in Generate_Call()
2719 __ Push(a1); in Generate_Call()
2722 __ Daddu(a0, a0, 2); in Generate_Call()
2724 __ JumpToExternalReference( in Generate_Call()
2729 __ bind(&non_function); in Generate_Call()
2731 __ Dlsa(at, sp, a0, kPointerSizeLog2); in Generate_Call()
2732 __ sd(a1, MemOperand(at)); in Generate_Call()
2734 __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, a1); in Generate_Call()
2735 __ Jump(masm->isolate()->builtins()->CallFunction( in Generate_Call()
2740 __ bind(&non_callable); in Generate_Call()
2743 __ Push(a1); in Generate_Call()
2744 __ CallRuntime(Runtime::kThrowCalledNonCallable); in Generate_Call()
2764 __ ld(spread, MemOperand(sp, 0)); in CheckSpreadAndPushToStack()
2765 __ JumpIfSmi(spread, &runtime_call); in CheckSpreadAndPushToStack()
2766 __ ld(spread_map, FieldMemOperand(spread, HeapObject::kMapOffset)); in CheckSpreadAndPushToStack()
2767 __ ld(native_context, NativeContextMemOperand()); in CheckSpreadAndPushToStack()
2770 __ lbu(scratch, FieldMemOperand(spread_map, Map::kInstanceTypeOffset)); in CheckSpreadAndPushToStack()
2771 __ Branch(&runtime_call, ne, scratch, Operand(JS_ARRAY_TYPE)); in CheckSpreadAndPushToStack()
2774 __ ld(scratch, FieldMemOperand(spread_map, Map::kPrototypeOffset)); in CheckSpreadAndPushToStack()
2775 __ ld(scratch2, ContextMemOperand(native_context, in CheckSpreadAndPushToStack()
2777 __ Branch(&runtime_call, ne, scratch, Operand(scratch2)); in CheckSpreadAndPushToStack()
2781 __ LoadRoot(scratch, Heap::kArrayIteratorProtectorRootIndex); in CheckSpreadAndPushToStack()
2782 __ ld(scratch, FieldMemOperand(scratch, PropertyCell::kValueOffset)); in CheckSpreadAndPushToStack()
2783 __ Branch(&runtime_call, ne, scratch, in CheckSpreadAndPushToStack()
2787 __ ld(scratch, in CheckSpreadAndPushToStack()
2790 __ ld(scratch, FieldMemOperand(scratch, HeapObject::kMapOffset)); in CheckSpreadAndPushToStack()
2791 __ ld(scratch2, in CheckSpreadAndPushToStack()
2794 __ Branch(&runtime_call, ne, scratch, Operand(scratch2)); in CheckSpreadAndPushToStack()
2799 __ lbu(scratch, FieldMemOperand(spread_map, Map::kBitField2Offset)); in CheckSpreadAndPushToStack()
2800 __ DecodeField<Map::ElementsKindBits>(scratch); in CheckSpreadAndPushToStack()
2801 __ Branch(&runtime_call, hi, scratch, Operand(FAST_HOLEY_ELEMENTS)); in CheckSpreadAndPushToStack()
2803 __ Branch(&no_protector_check, eq, scratch, Operand(FAST_SMI_ELEMENTS)); in CheckSpreadAndPushToStack()
2804 __ Branch(&no_protector_check, eq, scratch, Operand(FAST_ELEMENTS)); in CheckSpreadAndPushToStack()
2806 __ LoadRoot(scratch, Heap::kArrayProtectorRootIndex); in CheckSpreadAndPushToStack()
2807 __ ld(scratch, FieldMemOperand(scratch, PropertyCell::kValueOffset)); in CheckSpreadAndPushToStack()
2808 __ Branch(&runtime_call, ne, scratch, in CheckSpreadAndPushToStack()
2811 __ bind(&no_protector_check); in CheckSpreadAndPushToStack()
2813 __ lw(spread_len, UntagSmiFieldMemOperand(spread, JSArray::kLengthOffset)); in CheckSpreadAndPushToStack()
2814 __ ld(spread, FieldMemOperand(spread, JSArray::kElementsOffset)); in CheckSpreadAndPushToStack()
2815 __ Branch(&push_args); in CheckSpreadAndPushToStack()
2817 __ bind(&runtime_call); in CheckSpreadAndPushToStack()
2821 __ SmiTag(argc); in CheckSpreadAndPushToStack()
2822 __ Push(constructor, new_target, argc, spread); in CheckSpreadAndPushToStack()
2823 __ CallRuntime(Runtime::kSpreadIterableFixed); in CheckSpreadAndPushToStack()
2824 __ mov(spread, v0); in CheckSpreadAndPushToStack()
2825 __ Pop(constructor, new_target, argc); in CheckSpreadAndPushToStack()
2826 __ SmiUntag(argc); in CheckSpreadAndPushToStack()
2831 __ lw(spread_len, in CheckSpreadAndPushToStack()
2834 __ bind(&push_args); in CheckSpreadAndPushToStack()
2836 __ Daddu(argc, argc, spread_len); in CheckSpreadAndPushToStack()
2837 __ Dsubu(argc, argc, Operand(1)); in CheckSpreadAndPushToStack()
2840 __ Pop(scratch); in CheckSpreadAndPushToStack()
2848 __ LoadRoot(scratch, Heap::kRealStackLimitRootIndex); in CheckSpreadAndPushToStack()
2851 __ Dsubu(scratch, sp, scratch); in CheckSpreadAndPushToStack()
2853 __ dsll(at, spread_len, kPointerSizeLog2); in CheckSpreadAndPushToStack()
2854 __ Branch(&done, gt, scratch, Operand(at)); // Signed comparison. in CheckSpreadAndPushToStack()
2855 __ TailCallRuntime(Runtime::kThrowStackOverflow); in CheckSpreadAndPushToStack()
2856 __ bind(&done); in CheckSpreadAndPushToStack()
2861 __ mov(scratch, zero_reg); in CheckSpreadAndPushToStack()
2863 __ bind(&loop); in CheckSpreadAndPushToStack()
2864 __ Branch(&done, eq, scratch, Operand(spread_len)); in CheckSpreadAndPushToStack()
2865 __ Dlsa(scratch2, spread, scratch, kPointerSizeLog2); in CheckSpreadAndPushToStack()
2866 __ ld(scratch2, FieldMemOperand(scratch2, FixedArray::kHeaderSize)); in CheckSpreadAndPushToStack()
2867 __ JumpIfNotRoot(scratch2, Heap::kTheHoleValueRootIndex, &push); in CheckSpreadAndPushToStack()
2868 __ LoadRoot(scratch2, Heap::kUndefinedValueRootIndex); in CheckSpreadAndPushToStack()
2869 __ bind(&push); in CheckSpreadAndPushToStack()
2870 __ Push(scratch2); in CheckSpreadAndPushToStack()
2871 __ Daddu(scratch, scratch, Operand(1)); in CheckSpreadAndPushToStack()
2872 __ Branch(&loop); in CheckSpreadAndPushToStack()
2873 __ bind(&done); in CheckSpreadAndPushToStack()
2885 __ LoadRoot(a3, Heap::kUndefinedValueRootIndex); in Generate_CallWithSpread()
2887 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny, in Generate_CallWithSpread()
2898 __ AssertFunction(a1); in Generate_ConstructFunction()
2902 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex); in Generate_ConstructFunction()
2906 __ ld(a4, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); in Generate_ConstructFunction()
2907 __ ld(a4, FieldMemOperand(a4, SharedFunctionInfo::kConstructStubOffset)); in Generate_ConstructFunction()
2908 __ Daddu(at, a4, Operand(Code::kHeaderSize - kHeapObjectTag)); in Generate_ConstructFunction()
2909 __ Jump(at); in Generate_ConstructFunction()
2919 __ AssertBoundFunction(a1); in Generate_ConstructBoundFunction()
2922 __ ld(a2, FieldMemOperand(a1, JSBoundFunction::kBoundArgumentsOffset)); in Generate_ConstructBoundFunction()
2923 __ lw(a4, UntagSmiFieldMemOperand(a2, FixedArray::kLengthOffset)); in Generate_ConstructBoundFunction()
2936 __ dsll(a5, a4, kPointerSizeLog2); in Generate_ConstructBoundFunction()
2937 __ Dsubu(sp, sp, Operand(a5)); in Generate_ConstructBoundFunction()
2940 __ LoadRoot(at, Heap::kRealStackLimitRootIndex); in Generate_ConstructBoundFunction()
2941 __ Branch(&done, gt, sp, Operand(at)); // Signed comparison. in Generate_ConstructBoundFunction()
2943 __ Daddu(sp, sp, Operand(a5)); in Generate_ConstructBoundFunction()
2946 __ EnterFrame(StackFrame::INTERNAL); in Generate_ConstructBoundFunction()
2947 __ CallRuntime(Runtime::kThrowStackOverflow); in Generate_ConstructBoundFunction()
2949 __ bind(&done); in Generate_ConstructBoundFunction()
2955 __ mov(a5, zero_reg); in Generate_ConstructBoundFunction()
2956 __ bind(&loop); in Generate_ConstructBoundFunction()
2957 __ Branch(&done_loop, ge, a5, Operand(a0)); in Generate_ConstructBoundFunction()
2958 __ Dlsa(a6, sp, a4, kPointerSizeLog2); in Generate_ConstructBoundFunction()
2959 __ ld(at, MemOperand(a6)); in Generate_ConstructBoundFunction()
2960 __ Dlsa(a6, sp, a5, kPointerSizeLog2); in Generate_ConstructBoundFunction()
2961 __ sd(at, MemOperand(a6)); in Generate_ConstructBoundFunction()
2962 __ Daddu(a4, a4, Operand(1)); in Generate_ConstructBoundFunction()
2963 __ Daddu(a5, a5, Operand(1)); in Generate_ConstructBoundFunction()
2964 __ Branch(&loop); in Generate_ConstructBoundFunction()
2965 __ bind(&done_loop); in Generate_ConstructBoundFunction()
2971 __ lw(a4, UntagSmiFieldMemOperand(a2, FixedArray::kLengthOffset)); in Generate_ConstructBoundFunction()
2972 __ Daddu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); in Generate_ConstructBoundFunction()
2973 __ bind(&loop); in Generate_ConstructBoundFunction()
2974 __ Dsubu(a4, a4, Operand(1)); in Generate_ConstructBoundFunction()
2975 __ Branch(&done_loop, lt, a4, Operand(zero_reg)); in Generate_ConstructBoundFunction()
2976 __ Dlsa(a5, a2, a4, kPointerSizeLog2); in Generate_ConstructBoundFunction()
2977 __ ld(at, MemOperand(a5)); in Generate_ConstructBoundFunction()
2978 __ Dlsa(a5, sp, a0, kPointerSizeLog2); in Generate_ConstructBoundFunction()
2979 __ sd(at, MemOperand(a5)); in Generate_ConstructBoundFunction()
2980 __ Daddu(a0, a0, Operand(1)); in Generate_ConstructBoundFunction()
2981 __ Branch(&loop); in Generate_ConstructBoundFunction()
2982 __ bind(&done_loop); in Generate_ConstructBoundFunction()
2988 __ Branch(&skip_load, ne, a1, Operand(a3)); in Generate_ConstructBoundFunction()
2989 __ ld(a3, FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset)); in Generate_ConstructBoundFunction()
2990 __ bind(&skip_load); in Generate_ConstructBoundFunction()
2994 __ ld(a1, FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset)); in Generate_ConstructBoundFunction()
2995 __ li(at, Operand(ExternalReference(Builtins::kConstruct, masm->isolate()))); in Generate_ConstructBoundFunction()
2996 __ ld(at, MemOperand(at)); in Generate_ConstructBoundFunction()
2997 __ Daddu(at, at, Operand(Code::kHeaderSize - kHeapObjectTag)); in Generate_ConstructBoundFunction()
2998 __ Jump(at); in Generate_ConstructBoundFunction()
3011 __ Push(a1, a3); in Generate_ConstructProxy()
3013 __ Daddu(a0, a0, Operand(3)); in Generate_ConstructProxy()
3015 __ JumpToExternalReference( in Generate_ConstructProxy()
3030 __ JumpIfSmi(a1, &non_constructor); in Generate_Construct()
3033 __ ld(t1, FieldMemOperand(a1, HeapObject::kMapOffset)); in Generate_Construct()
3034 __ lbu(t2, FieldMemOperand(t1, Map::kInstanceTypeOffset)); in Generate_Construct()
3035 __ Jump(masm->isolate()->builtins()->ConstructFunction(), in Generate_Construct()
3039 __ lbu(t3, FieldMemOperand(t1, Map::kBitFieldOffset)); in Generate_Construct()
3040 __ And(t3, t3, Operand(1 << Map::kIsConstructor)); in Generate_Construct()
3041 __ Branch(&non_constructor, eq, t3, Operand(zero_reg)); in Generate_Construct()
3045 __ Jump(masm->isolate()->builtins()->ConstructBoundFunction(), in Generate_Construct()
3049 __ Jump(masm->isolate()->builtins()->ConstructProxy(), RelocInfo::CODE_TARGET, in Generate_Construct()
3055 __ Dlsa(at, sp, a0, kPointerSizeLog2); in Generate_Construct()
3056 __ sd(a1, MemOperand(at)); in Generate_Construct()
3058 __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, a1); in Generate_Construct()
3059 __ Jump(masm->isolate()->builtins()->CallFunction(), in Generate_Construct()
3065 __ bind(&non_constructor); in Generate_Construct()
3066 __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(), in Generate_Construct()
3080 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); in Generate_ConstructWithSpread()
3089 __ SmiTag(a0); in Generate_AllocateInNewSpace()
3090 __ Push(a0); in Generate_AllocateInNewSpace()
3091 __ Move(cp, Smi::kZero); in Generate_AllocateInNewSpace()
3092 __ TailCallRuntime(Runtime::kAllocateInNewSpace); in Generate_AllocateInNewSpace()
3101 __ SmiTag(a0); in Generate_AllocateInOldSpace()
3102 __ Move(a1, Smi::FromInt(AllocateTargetSpace::encode(OLD_SPACE))); in Generate_AllocateInOldSpace()
3103 __ Push(a0, a1); in Generate_AllocateInOldSpace()
3104 __ Move(cp, Smi::kZero); in Generate_AllocateInOldSpace()
3105 __ TailCallRuntime(Runtime::kAllocateInTargetSpace); in Generate_AllocateInOldSpace()
3114 __ Push(a0); in Generate_Abort()
3115 __ Move(cp, Smi::kZero); in Generate_Abort()
3116 __ TailCallRuntime(Runtime::kAbort); in Generate_Abort()
3131 __ Branch(&dont_adapt_arguments, eq, a2, in Generate_ArgumentsAdaptorTrampoline()
3134 __ Branch(&too_few, Uless, a0, Operand(a2)); in Generate_ArgumentsAdaptorTrampoline()
3141 __ bind(&enough); in Generate_ArgumentsAdaptorTrampoline()
3146 __ SmiScale(a0, a0, kPointerSizeLog2); in Generate_ArgumentsAdaptorTrampoline()
3147 __ Daddu(a0, fp, a0); in Generate_ArgumentsAdaptorTrampoline()
3149 __ Daddu(a0, a0, Operand(2 * kPointerSize)); in Generate_ArgumentsAdaptorTrampoline()
3151 __ dsll(a4, a2, kPointerSizeLog2); in Generate_ArgumentsAdaptorTrampoline()
3152 __ dsubu(a4, a0, a4); in Generate_ArgumentsAdaptorTrampoline()
3162 __ bind(&copy); in Generate_ArgumentsAdaptorTrampoline()
3163 __ ld(a5, MemOperand(a0)); in Generate_ArgumentsAdaptorTrampoline()
3164 __ push(a5); in Generate_ArgumentsAdaptorTrampoline()
3165 __ Branch(USE_DELAY_SLOT, &copy, ne, a0, Operand(a4)); in Generate_ArgumentsAdaptorTrampoline()
3166 __ daddiu(a0, a0, -kPointerSize); // In delay slot. in Generate_ArgumentsAdaptorTrampoline()
3168 __ jmp(&invoke); in Generate_ArgumentsAdaptorTrampoline()
3172 __ bind(&too_few); in Generate_ArgumentsAdaptorTrampoline()
3181 __ SmiScale(a0, a0, kPointerSizeLog2); in Generate_ArgumentsAdaptorTrampoline()
3182 __ Daddu(a0, fp, a0); in Generate_ArgumentsAdaptorTrampoline()
3184 __ Daddu(a0, a0, Operand(2 * kPointerSize)); in Generate_ArgumentsAdaptorTrampoline()
3186 __ Daddu(a7, fp, kPointerSize); in Generate_ArgumentsAdaptorTrampoline()
3195 __ bind(&copy); in Generate_ArgumentsAdaptorTrampoline()
3196 __ ld(a4, MemOperand(a0)); // Adjusted above for return addr and receiver. in Generate_ArgumentsAdaptorTrampoline()
3197 __ Dsubu(sp, sp, kPointerSize); in Generate_ArgumentsAdaptorTrampoline()
3198 __ Dsubu(a0, a0, kPointerSize); in Generate_ArgumentsAdaptorTrampoline()
3199 __ Branch(USE_DELAY_SLOT, &copy, ne, a0, Operand(a7)); in Generate_ArgumentsAdaptorTrampoline()
3200 __ sd(a4, MemOperand(sp)); // In the delay slot. in Generate_ArgumentsAdaptorTrampoline()
3206 __ LoadRoot(a5, Heap::kUndefinedValueRootIndex); in Generate_ArgumentsAdaptorTrampoline()
3207 __ dsll(a6, a2, kPointerSizeLog2); in Generate_ArgumentsAdaptorTrampoline()
3208 __ Dsubu(a4, fp, Operand(a6)); in Generate_ArgumentsAdaptorTrampoline()
3210 __ Dsubu(a4, a4, Operand(StandardFrameConstants::kFixedFrameSizeFromFp + in Generate_ArgumentsAdaptorTrampoline()
3214 __ bind(&fill); in Generate_ArgumentsAdaptorTrampoline()
3215 __ Dsubu(sp, sp, kPointerSize); in Generate_ArgumentsAdaptorTrampoline()
3216 __ Branch(USE_DELAY_SLOT, &fill, ne, sp, Operand(a4)); in Generate_ArgumentsAdaptorTrampoline()
3217 __ sd(a5, MemOperand(sp)); in Generate_ArgumentsAdaptorTrampoline()
3221 __ bind(&invoke); in Generate_ArgumentsAdaptorTrampoline()
3222 __ mov(a0, a2); in Generate_ArgumentsAdaptorTrampoline()
3226 __ ld(a4, FieldMemOperand(a1, JSFunction::kCodeEntryOffset)); in Generate_ArgumentsAdaptorTrampoline()
3227 __ Call(a4); in Generate_ArgumentsAdaptorTrampoline()
3234 __ Ret(); in Generate_ArgumentsAdaptorTrampoline()
3239 __ bind(&dont_adapt_arguments); in Generate_ArgumentsAdaptorTrampoline()
3240 __ ld(a4, FieldMemOperand(a1, JSFunction::kCodeEntryOffset)); in Generate_ArgumentsAdaptorTrampoline()
3241 __ Jump(a4); in Generate_ArgumentsAdaptorTrampoline()
3243 __ bind(&stack_overflow); in Generate_ArgumentsAdaptorTrampoline()
3246 __ CallRuntime(Runtime::kThrowStackOverflow); in Generate_ArgumentsAdaptorTrampoline()
3247 __ break_(0xCC); in Generate_ArgumentsAdaptorTrampoline()
3251 #undef __