• Home
  • Raw
  • Download

Lines Matching full:__

16 #define __ ACCESS_MASM(masm)  macro
29 __ AssertFunction(a1); in Generate_Adaptor()
35 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset)); in Generate_Adaptor()
40 __ Addu(a0, a0, num_extra_args + 1); in Generate_Adaptor()
43 __ SmiTag(a0); in Generate_Adaptor()
44 __ Push(a0, a1, a3); in Generate_Adaptor()
45 __ SmiUntag(a0); in Generate_Adaptor()
47 __ JumpToExternalReference(ExternalReference(address, masm->isolate()), in Generate_Adaptor()
55 __ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, result); in GenerateLoadInternalArrayFunction()
61 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, result); in GenerateLoadArrayFunction()
77 __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset)); in Generate_InternalArrayCode()
78 __ SmiTst(a2, t0); in Generate_InternalArrayCode()
79 __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction, t0, in Generate_InternalArrayCode()
81 __ GetObjectType(a2, a3, t0); in Generate_InternalArrayCode()
82 __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction, t0, in Generate_InternalArrayCode()
90 __ TailCallStub(&stub); in Generate_InternalArrayCode()
106 __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset)); in Generate_ArrayCode()
107 __ SmiTst(a2, t0); in Generate_ArrayCode()
108 __ Assert(ne, kUnexpectedInitialMapForArrayFunction1, t0, in Generate_ArrayCode()
110 __ GetObjectType(a2, a3, t0); in Generate_ArrayCode()
111 __ Assert(eq, kUnexpectedInitialMapForArrayFunction2, t0, in Generate_ArrayCode()
117 __ mov(a3, a1); in Generate_ArrayCode()
118 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex); in Generate_ArrayCode()
120 __ TailCallStub(&stub); in Generate_ArrayCode()
139 __ LoadRoot(t2, root_index); in Generate_MathMaxMin()
140 __ ldc1(f0, FieldMemOperand(t2, HeapNumber::kValueOffset)); in Generate_MathMaxMin()
143 __ mov(a3, a0); in Generate_MathMaxMin()
144 __ bind(&loop); in Generate_MathMaxMin()
147 __ Subu(a3, a3, Operand(1)); in Generate_MathMaxMin()
148 __ Branch(&done_loop, lt, a3, Operand(zero_reg)); in Generate_MathMaxMin()
151 __ Lsa(at, sp, a3, kPointerSizeLog2); in Generate_MathMaxMin()
152 __ lw(a2, MemOperand(at)); in Generate_MathMaxMin()
157 __ bind(&convert); in Generate_MathMaxMin()
158 __ JumpIfSmi(a2, &convert_smi); in Generate_MathMaxMin()
159 __ lw(t0, FieldMemOperand(a2, HeapObject::kMapOffset)); in Generate_MathMaxMin()
160 __ JumpIfRoot(t0, Heap::kHeapNumberMapRootIndex, &convert_number); in Generate_MathMaxMin()
164 __ SmiTag(a0); in Generate_MathMaxMin()
165 __ SmiTag(a3); in Generate_MathMaxMin()
166 __ EnterBuiltinFrame(cp, a1, a0); in Generate_MathMaxMin()
167 __ Push(t2, a3); in Generate_MathMaxMin()
168 __ mov(a0, a2); in Generate_MathMaxMin()
169 __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET); in Generate_MathMaxMin()
170 __ mov(a2, v0); in Generate_MathMaxMin()
171 __ Pop(t2, a3); in Generate_MathMaxMin()
172 __ LeaveBuiltinFrame(cp, a1, a0); in Generate_MathMaxMin()
173 __ SmiUntag(a3); in Generate_MathMaxMin()
174 __ SmiUntag(a0); in Generate_MathMaxMin()
178 __ JumpIfSmi(t2, &restore_smi); in Generate_MathMaxMin()
179 __ ldc1(f0, FieldMemOperand(t2, HeapNumber::kValueOffset)); in Generate_MathMaxMin()
180 __ jmp(&done_restore); in Generate_MathMaxMin()
181 __ bind(&restore_smi); in Generate_MathMaxMin()
182 __ SmiToDoubleFPURegister(t2, f0, t0); in Generate_MathMaxMin()
183 __ bind(&done_restore); in Generate_MathMaxMin()
186 __ jmp(&convert); in Generate_MathMaxMin()
187 __ bind(&convert_number); in Generate_MathMaxMin()
188 __ ldc1(f2, FieldMemOperand(a2, HeapNumber::kValueOffset)); in Generate_MathMaxMin()
189 __ jmp(&done_convert); in Generate_MathMaxMin()
190 __ bind(&convert_smi); in Generate_MathMaxMin()
191 __ SmiToDoubleFPURegister(a2, f2, t0); in Generate_MathMaxMin()
192 __ bind(&done_convert); in Generate_MathMaxMin()
199 __ BranchF(nullptr, &compare_nan, eq, f0, f2); in Generate_MathMaxMin()
200 __ Move(t0, t1, f0); in Generate_MathMaxMin()
202 __ Float64Min(f0, f0, f2, &ool_min); in Generate_MathMaxMin()
205 __ Float64Max(f0, f0, f2, &ool_max); in Generate_MathMaxMin()
207 __ jmp(&done); in Generate_MathMaxMin()
209 __ bind(&ool_min); in Generate_MathMaxMin()
210 __ Float64MinOutOfLine(f0, f0, f2); in Generate_MathMaxMin()
211 __ jmp(&done); in Generate_MathMaxMin()
213 __ bind(&ool_max); in Generate_MathMaxMin()
214 __ Float64MaxOutOfLine(f0, f0, f2); in Generate_MathMaxMin()
216 __ bind(&done); in Generate_MathMaxMin()
217 __ Move(at, t8, f0); in Generate_MathMaxMin()
218 __ Branch(&set_value, ne, t0, Operand(at)); in Generate_MathMaxMin()
219 __ Branch(&set_value, ne, t1, Operand(t8)); in Generate_MathMaxMin()
220 __ jmp(&loop); in Generate_MathMaxMin()
221 __ bind(&set_value); in Generate_MathMaxMin()
222 __ mov(t2, a2); in Generate_MathMaxMin()
223 __ jmp(&loop); in Generate_MathMaxMin()
226 __ bind(&compare_nan); in Generate_MathMaxMin()
227 __ LoadRoot(t2, Heap::kNanValueRootIndex); in Generate_MathMaxMin()
228 __ ldc1(f0, FieldMemOperand(t2, HeapNumber::kValueOffset)); in Generate_MathMaxMin()
229 __ jmp(&loop); in Generate_MathMaxMin()
232 __ bind(&done_loop); in Generate_MathMaxMin()
234 __ Addu(a0, a0, Operand(1)); in Generate_MathMaxMin()
235 __ Lsa(sp, sp, a0, kPointerSizeLog2); in Generate_MathMaxMin()
236 __ Ret(USE_DELAY_SLOT); in Generate_MathMaxMin()
237 __ mov(v0, t2); // In delay slot. in Generate_MathMaxMin()
254 __ Branch(USE_DELAY_SLOT, &no_arguments, eq, a0, Operand(zero_reg)); in Generate_NumberConstructor()
255 __ Subu(t1, a0, Operand(1)); // In delay slot. in Generate_NumberConstructor()
256 __ mov(t0, a0); // Store argc in t0. in Generate_NumberConstructor()
257 __ Lsa(at, sp, t1, kPointerSizeLog2); in Generate_NumberConstructor()
258 __ lw(a0, MemOperand(at)); in Generate_NumberConstructor()
264 __ SmiTag(t0); in Generate_NumberConstructor()
265 __ EnterBuiltinFrame(cp, a1, t0); in Generate_NumberConstructor()
266 __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET); in Generate_NumberConstructor()
267 __ LeaveBuiltinFrame(cp, a1, t0); in Generate_NumberConstructor()
268 __ SmiUntag(t0); in Generate_NumberConstructor()
273 __ Lsa(sp, sp, t0, kPointerSizeLog2); in Generate_NumberConstructor()
274 __ DropAndRet(1); in Generate_NumberConstructor()
278 __ bind(&no_arguments); in Generate_NumberConstructor()
279 __ Move(v0, Smi::kZero); in Generate_NumberConstructor()
280 __ DropAndRet(1); in Generate_NumberConstructor()
296 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset)); in Generate_NumberConstructor_ConstructStub()
301 __ mov(t0, a0); // Store argc in t0. in Generate_NumberConstructor_ConstructStub()
302 __ Branch(USE_DELAY_SLOT, &no_arguments, eq, a0, Operand(zero_reg)); in Generate_NumberConstructor_ConstructStub()
303 __ Subu(t1, a0, Operand(1)); // In delay slot. in Generate_NumberConstructor_ConstructStub()
304 __ Lsa(at, sp, t1, kPointerSizeLog2); in Generate_NumberConstructor_ConstructStub()
305 __ lw(a0, MemOperand(at)); in Generate_NumberConstructor_ConstructStub()
306 __ jmp(&done); in Generate_NumberConstructor_ConstructStub()
307 __ bind(&no_arguments); in Generate_NumberConstructor_ConstructStub()
308 __ Move(a0, Smi::kZero); in Generate_NumberConstructor_ConstructStub()
309 __ bind(&done); in Generate_NumberConstructor_ConstructStub()
315 __ JumpIfSmi(a0, &done_convert); in Generate_NumberConstructor_ConstructStub()
316 __ GetObjectType(a0, a2, a2); in Generate_NumberConstructor_ConstructStub()
317 __ Branch(&done_convert, eq, a2, Operand(HEAP_NUMBER_TYPE)); in Generate_NumberConstructor_ConstructStub()
320 __ SmiTag(t0); in Generate_NumberConstructor_ConstructStub()
321 __ EnterBuiltinFrame(cp, a1, t0); in Generate_NumberConstructor_ConstructStub()
322 __ Push(a3); in Generate_NumberConstructor_ConstructStub()
323 __ Call(masm->isolate()->builtins()->ToNumber(), RelocInfo::CODE_TARGET); in Generate_NumberConstructor_ConstructStub()
324 __ Move(a0, v0); in Generate_NumberConstructor_ConstructStub()
325 __ Pop(a3); in Generate_NumberConstructor_ConstructStub()
326 __ LeaveBuiltinFrame(cp, a1, t0); in Generate_NumberConstructor_ConstructStub()
327 __ SmiUntag(t0); in Generate_NumberConstructor_ConstructStub()
329 __ bind(&done_convert); in Generate_NumberConstructor_ConstructStub()
334 __ Branch(&new_object, ne, a1, Operand(a3)); in Generate_NumberConstructor_ConstructStub()
337 __ AllocateJSValue(v0, a1, a0, a2, t1, &new_object); in Generate_NumberConstructor_ConstructStub()
338 __ jmp(&drop_frame_and_ret); in Generate_NumberConstructor_ConstructStub()
341 __ bind(&new_object); in Generate_NumberConstructor_ConstructStub()
344 __ SmiTag(t0); in Generate_NumberConstructor_ConstructStub()
345 __ EnterBuiltinFrame(cp, a1, t0); in Generate_NumberConstructor_ConstructStub()
346 __ Push(a0); // first argument in Generate_NumberConstructor_ConstructStub()
347 __ Call(CodeFactory::FastNewObject(masm->isolate()).code(), in Generate_NumberConstructor_ConstructStub()
349 __ Pop(a0); in Generate_NumberConstructor_ConstructStub()
350 __ LeaveBuiltinFrame(cp, a1, t0); in Generate_NumberConstructor_ConstructStub()
351 __ SmiUntag(t0); in Generate_NumberConstructor_ConstructStub()
353 __ sw(a0, FieldMemOperand(v0, JSValue::kValueOffset)); in Generate_NumberConstructor_ConstructStub()
355 __ bind(&drop_frame_and_ret); in Generate_NumberConstructor_ConstructStub()
357 __ Lsa(sp, sp, t0, kPointerSizeLog2); in Generate_NumberConstructor_ConstructStub()
358 __ DropAndRet(1); in Generate_NumberConstructor_ConstructStub()
376 __ Branch(USE_DELAY_SLOT, &no_arguments, eq, a0, Operand(zero_reg)); in Generate_StringConstructor()
377 __ Subu(t1, a0, Operand(1)); in Generate_StringConstructor()
378 __ mov(t0, a0); // Store argc in t0. in Generate_StringConstructor()
379 __ Lsa(at, sp, t1, kPointerSizeLog2); in Generate_StringConstructor()
380 __ lw(a0, MemOperand(at)); in Generate_StringConstructor()
387 __ JumpIfSmi(a0, &to_string); in Generate_StringConstructor()
388 __ GetObjectType(a0, t1, t1); in Generate_StringConstructor()
390 __ Subu(t1, t1, Operand(FIRST_NONSTRING_TYPE)); in Generate_StringConstructor()
391 __ Branch(&symbol_descriptive_string, eq, t1, Operand(zero_reg)); in Generate_StringConstructor()
392 __ Branch(&to_string, gt, t1, Operand(zero_reg)); in Generate_StringConstructor()
393 __ mov(v0, a0); in Generate_StringConstructor()
394 __ jmp(&drop_frame_and_ret); in Generate_StringConstructor()
398 __ bind(&no_arguments); in Generate_StringConstructor()
400 __ LoadRoot(v0, Heap::kempty_stringRootIndex); in Generate_StringConstructor()
401 __ DropAndRet(1); in Generate_StringConstructor()
405 __ bind(&to_string); in Generate_StringConstructor()
408 __ SmiTag(t0); in Generate_StringConstructor()
409 __ EnterBuiltinFrame(cp, a1, t0); in Generate_StringConstructor()
410 __ Call(masm->isolate()->builtins()->ToString(), RelocInfo::CODE_TARGET); in Generate_StringConstructor()
411 __ LeaveBuiltinFrame(cp, a1, t0); in Generate_StringConstructor()
412 __ SmiUntag(t0); in Generate_StringConstructor()
414 __ jmp(&drop_frame_and_ret); in Generate_StringConstructor()
417 __ bind(&symbol_descriptive_string); in Generate_StringConstructor()
419 __ Lsa(sp, sp, t0, kPointerSizeLog2); in Generate_StringConstructor()
420 __ Drop(1); in Generate_StringConstructor()
421 __ Push(a0); in Generate_StringConstructor()
422 __ TailCallRuntime(Runtime::kSymbolDescriptiveString); in Generate_StringConstructor()
425 __ bind(&drop_frame_and_ret); in Generate_StringConstructor()
427 __ Lsa(sp, sp, t0, kPointerSizeLog2); in Generate_StringConstructor()
428 __ DropAndRet(1); in Generate_StringConstructor()
445 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset)); in Generate_StringConstructor_ConstructStub()
450 __ mov(t0, a0); // Store argc in t0. in Generate_StringConstructor_ConstructStub()
451 __ Branch(USE_DELAY_SLOT, &no_arguments, eq, a0, Operand(zero_reg)); in Generate_StringConstructor_ConstructStub()
452 __ Subu(t1, a0, Operand(1)); in Generate_StringConstructor_ConstructStub()
453 __ Lsa(at, sp, t1, kPointerSizeLog2); in Generate_StringConstructor_ConstructStub()
454 __ lw(a0, MemOperand(at)); in Generate_StringConstructor_ConstructStub()
455 __ jmp(&done); in Generate_StringConstructor_ConstructStub()
456 __ bind(&no_arguments); in Generate_StringConstructor_ConstructStub()
457 __ LoadRoot(a0, Heap::kempty_stringRootIndex); in Generate_StringConstructor_ConstructStub()
458 __ bind(&done); in Generate_StringConstructor_ConstructStub()
464 __ JumpIfSmi(a0, &convert); in Generate_StringConstructor_ConstructStub()
465 __ GetObjectType(a0, a2, a2); in Generate_StringConstructor_ConstructStub()
466 __ And(t1, a2, Operand(kIsNotStringMask)); in Generate_StringConstructor_ConstructStub()
467 __ Branch(&done_convert, eq, t1, Operand(zero_reg)); in Generate_StringConstructor_ConstructStub()
468 __ bind(&convert); in Generate_StringConstructor_ConstructStub()
471 __ SmiTag(t0); in Generate_StringConstructor_ConstructStub()
472 __ EnterBuiltinFrame(cp, a1, t0); in Generate_StringConstructor_ConstructStub()
473 __ Push(a3); in Generate_StringConstructor_ConstructStub()
474 __ Call(masm->isolate()->builtins()->ToString(), RelocInfo::CODE_TARGET); in Generate_StringConstructor_ConstructStub()
475 __ Move(a0, v0); in Generate_StringConstructor_ConstructStub()
476 __ Pop(a3); in Generate_StringConstructor_ConstructStub()
477 __ LeaveBuiltinFrame(cp, a1, t0); in Generate_StringConstructor_ConstructStub()
478 __ SmiUntag(t0); in Generate_StringConstructor_ConstructStub()
480 __ bind(&done_convert); in Generate_StringConstructor_ConstructStub()
485 __ Branch(&new_object, ne, a1, Operand(a3)); in Generate_StringConstructor_ConstructStub()
488 __ AllocateJSValue(v0, a1, a0, a2, t1, &new_object); in Generate_StringConstructor_ConstructStub()
489 __ jmp(&drop_frame_and_ret); in Generate_StringConstructor_ConstructStub()
492 __ bind(&new_object); in Generate_StringConstructor_ConstructStub()
495 __ SmiTag(t0); in Generate_StringConstructor_ConstructStub()
496 __ EnterBuiltinFrame(cp, a1, t0); in Generate_StringConstructor_ConstructStub()
497 __ Push(a0); // first argument in Generate_StringConstructor_ConstructStub()
498 __ Call(CodeFactory::FastNewObject(masm->isolate()).code(), in Generate_StringConstructor_ConstructStub()
500 __ Pop(a0); in Generate_StringConstructor_ConstructStub()
501 __ LeaveBuiltinFrame(cp, a1, t0); in Generate_StringConstructor_ConstructStub()
502 __ SmiUntag(t0); in Generate_StringConstructor_ConstructStub()
504 __ sw(a0, FieldMemOperand(v0, JSValue::kValueOffset)); in Generate_StringConstructor_ConstructStub()
506 __ bind(&drop_frame_and_ret); in Generate_StringConstructor_ConstructStub()
508 __ Lsa(sp, sp, t0, kPointerSizeLog2); in Generate_StringConstructor_ConstructStub()
509 __ DropAndRet(1); in Generate_StringConstructor_ConstructStub()
514 __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); in GenerateTailCallToSharedCode()
515 __ lw(a2, FieldMemOperand(a2, SharedFunctionInfo::kCodeOffset)); in GenerateTailCallToSharedCode()
516 __ Addu(at, a2, Operand(Code::kHeaderSize - kHeapObjectTag)); in GenerateTailCallToSharedCode()
517 __ Jump(at); in GenerateTailCallToSharedCode()
531 __ SmiTag(a0); in GenerateTailCallToReturnedCode()
532 __ Push(a0, a1, a3, a1); in GenerateTailCallToReturnedCode()
534 __ CallRuntime(function_id, 1); in GenerateTailCallToReturnedCode()
537 __ Pop(a0, a1, a3); in GenerateTailCallToReturnedCode()
538 __ SmiUntag(a0); in GenerateTailCallToReturnedCode()
541 __ Addu(at, v0, Operand(Code::kHeaderSize - kHeapObjectTag)); in GenerateTailCallToReturnedCode()
542 __ Jump(at); in GenerateTailCallToReturnedCode()
552 __ LoadRoot(t0, Heap::kStackLimitRootIndex); in Generate_InOptimizationQueue()
553 __ Branch(&ok, hs, sp, Operand(t0)); in Generate_InOptimizationQueue()
557 __ bind(&ok); in Generate_InOptimizationQueue()
584 __ SmiTag(a0); in Generate_JSConstructStubHelper()
585 __ Push(cp, a0); in Generate_JSConstructStubHelper()
589 __ Push(a1, a3); in Generate_JSConstructStubHelper()
590 __ Call(CodeFactory::FastNewObject(masm->isolate()).code(), in Generate_JSConstructStubHelper()
592 __ mov(t4, v0); in Generate_JSConstructStubHelper()
593 __ Pop(a1, a3); in Generate_JSConstructStubHelper()
602 __ lw(a0, MemOperand(sp)); in Generate_JSConstructStubHelper()
605 __ SmiUntag(a0); in Generate_JSConstructStubHelper()
611 __ Push(t4, t4); in Generate_JSConstructStubHelper()
613 __ PushRoot(Heap::kTheHoleValueRootIndex); in Generate_JSConstructStubHelper()
617 __ bind(&post_instantiation_deopt_entry); in Generate_JSConstructStubHelper()
620 __ Addu(a2, fp, Operand(StandardFrameConstants::kCallerSPOffset)); in Generate_JSConstructStubHelper()
632 __ SmiTag(t4, a0); in Generate_JSConstructStubHelper()
633 __ jmp(&entry); in Generate_JSConstructStubHelper()
634 __ bind(&loop); in Generate_JSConstructStubHelper()
635 __ Lsa(t0, a2, t4, kPointerSizeLog2 - kSmiTagSize); in Generate_JSConstructStubHelper()
636 __ lw(t1, MemOperand(t0)); in Generate_JSConstructStubHelper()
637 __ push(t1); in Generate_JSConstructStubHelper()
638 __ bind(&entry); in Generate_JSConstructStubHelper()
639 __ Addu(t4, t4, Operand(-2)); in Generate_JSConstructStubHelper()
640 __ Branch(&loop, greater_equal, t4, Operand(zero_reg)); in Generate_JSConstructStubHelper()
647 __ InvokeFunction(a1, a3, actual, CALL_FUNCTION, in Generate_JSConstructStubHelper()
657 __ lw(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset)); in Generate_JSConstructStubHelper()
669 __ JumpIfSmi(v0, &use_receiver); in Generate_JSConstructStubHelper()
673 __ GetObjectType(v0, a1, a3); in Generate_JSConstructStubHelper()
674 __ Branch(&exit, greater_equal, a3, Operand(FIRST_JS_RECEIVER_TYPE)); in Generate_JSConstructStubHelper()
678 __ bind(&use_receiver); in Generate_JSConstructStubHelper()
679 __ lw(v0, MemOperand(sp)); in Generate_JSConstructStubHelper()
683 __ bind(&exit); in Generate_JSConstructStubHelper()
687 __ lw(a1, MemOperand(sp, 1 * kPointerSize)); in Generate_JSConstructStubHelper()
689 __ lw(a1, MemOperand(sp)); in Generate_JSConstructStubHelper()
700 __ JumpIfNotSmi(v0, &dont_throw); in Generate_JSConstructStubHelper()
703 __ CallRuntime(Runtime::kThrowDerivedConstructorReturnedNonObject); in Generate_JSConstructStubHelper()
705 __ bind(&dont_throw); in Generate_JSConstructStubHelper()
708 __ Lsa(sp, sp, a1, kPointerSizeLog2 - 1); in Generate_JSConstructStubHelper()
709 __ Addu(sp, sp, kPointerSize); in Generate_JSConstructStubHelper()
711 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, a1, a2); in Generate_JSConstructStubHelper()
713 __ Ret(); in Generate_JSConstructStubHelper()
727 __ Pop(a1); in Generate_JSConstructStubHelper()
728 __ Push(a0, a0); in Generate_JSConstructStubHelper()
731 __ lw(a0, MemOperand(fp, ConstructFrameConstants::kLengthOffset)); in Generate_JSConstructStubHelper()
732 __ SmiUntag(a0); in Generate_JSConstructStubHelper()
736 __ Addu(a3, fp, Operand(StandardFrameConstants::kCallerSPOffset)); in Generate_JSConstructStubHelper()
737 __ Lsa(a3, a3, a0, kPointerSizeLog2); in Generate_JSConstructStubHelper()
738 __ lw(a3, MemOperand(a3)); in Generate_JSConstructStubHelper()
741 __ jmp(&post_instantiation_deopt_entry); in Generate_JSConstructStubHelper()
766 __ Push(a1); in Generate_ConstructedNonConstructable()
767 __ CallRuntime(Runtime::kThrowConstructedNonConstructable); in Generate_ConstructedNonConstructable()
779 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex); in Generate_CheckStackOverflow()
782 __ Subu(a2, sp, a2); in Generate_CheckStackOverflow()
785 __ sll(t3, argc, kPointerSizeLog2 - kSmiTagSize); in Generate_CheckStackOverflow()
788 __ sll(t3, argc, kPointerSizeLog2); in Generate_CheckStackOverflow()
791 __ Branch(&okay, gt, a2, Operand(t3)); in Generate_CheckStackOverflow()
794 __ CallRuntime(Runtime::kThrowStackOverflow); in Generate_CheckStackOverflow()
796 __ bind(&okay); in Generate_CheckStackOverflow()
819 __ li(cp, Operand(context_address)); in Generate_JSEntryTrampolineHelper()
820 __ lw(cp, MemOperand(cp)); in Generate_JSEntryTrampolineHelper()
823 __ Push(a1, a2); in Generate_JSEntryTrampolineHelper()
830 __ mov(t1, a0); in Generate_JSEntryTrampolineHelper()
836 __ Lsa(t2, s0, a3, kPointerSizeLog2); in Generate_JSEntryTrampolineHelper()
837 __ b(&entry); in Generate_JSEntryTrampolineHelper()
838 __ nop(); // Branch delay slot nop. in Generate_JSEntryTrampolineHelper()
840 __ bind(&loop); in Generate_JSEntryTrampolineHelper()
841 __ lw(t0, MemOperand(s0)); // Read next parameter. in Generate_JSEntryTrampolineHelper()
842 __ addiu(s0, s0, kPointerSize); in Generate_JSEntryTrampolineHelper()
843 __ lw(t0, MemOperand(t0)); // Dereference handle. in Generate_JSEntryTrampolineHelper()
844 __ push(t0); // Push parameter. in Generate_JSEntryTrampolineHelper()
845 __ bind(&entry); in Generate_JSEntryTrampolineHelper()
846 __ Branch(&loop, ne, s0, Operand(t2)); in Generate_JSEntryTrampolineHelper()
849 __ mov(a0, a3); in Generate_JSEntryTrampolineHelper()
850 __ mov(a3, t1); in Generate_JSEntryTrampolineHelper()
854 __ LoadRoot(t0, Heap::kUndefinedValueRootIndex); in Generate_JSEntryTrampolineHelper()
855 __ mov(s1, t0); in Generate_JSEntryTrampolineHelper()
856 __ mov(s2, t0); in Generate_JSEntryTrampolineHelper()
857 __ mov(s3, t0); in Generate_JSEntryTrampolineHelper()
858 __ mov(s4, t0); in Generate_JSEntryTrampolineHelper()
859 __ mov(s5, t0); in Generate_JSEntryTrampolineHelper()
867 __ Call(builtin, RelocInfo::CODE_TARGET); in Generate_JSEntryTrampolineHelper()
872 __ Jump(ra); in Generate_JSEntryTrampolineHelper()
891 __ AssertGeneratorObject(a1); in Generate_ResumeGeneratorTrampoline()
894 __ sw(v0, FieldMemOperand(a1, JSGeneratorObject::kInputOrDebugPosOffset)); in Generate_ResumeGeneratorTrampoline()
895 __ RecordWriteField(a1, JSGeneratorObject::kInputOrDebugPosOffset, v0, a3, in Generate_ResumeGeneratorTrampoline()
899 __ sw(a2, FieldMemOperand(a1, JSGeneratorObject::kResumeModeOffset)); in Generate_ResumeGeneratorTrampoline()
902 __ lw(t0, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset)); in Generate_ResumeGeneratorTrampoline()
903 __ lw(cp, FieldMemOperand(t0, JSFunction::kContextOffset)); in Generate_ResumeGeneratorTrampoline()
910 __ li(t1, Operand(debug_hook)); in Generate_ResumeGeneratorTrampoline()
911 __ lb(t1, MemOperand(t1)); in Generate_ResumeGeneratorTrampoline()
912 __ Branch(&prepare_step_in_if_stepping, ne, t1, Operand(zero_reg)); in Generate_ResumeGeneratorTrampoline()
917 __ li(t1, Operand(debug_suspended_generator)); in Generate_ResumeGeneratorTrampoline()
918 __ lw(t1, MemOperand(t1)); in Generate_ResumeGeneratorTrampoline()
919 __ Branch(&prepare_step_in_suspended_generator, eq, a1, Operand(t1)); in Generate_ResumeGeneratorTrampoline()
920 __ bind(&stepping_prepared); in Generate_ResumeGeneratorTrampoline()
923 __ lw(t1, FieldMemOperand(a1, JSGeneratorObject::kReceiverOffset)); in Generate_ResumeGeneratorTrampoline()
924 __ Push(t1); in Generate_ResumeGeneratorTrampoline()
939 __ lw(a3, FieldMemOperand(t0, JSFunction::kSharedFunctionInfoOffset)); in Generate_ResumeGeneratorTrampoline()
940 __ lw(a3, in Generate_ResumeGeneratorTrampoline()
944 __ bind(&loop); in Generate_ResumeGeneratorTrampoline()
945 __ Subu(a3, a3, Operand(Smi::FromInt(1))); in Generate_ResumeGeneratorTrampoline()
946 __ Branch(&done_loop, lt, a3, Operand(zero_reg)); in Generate_ResumeGeneratorTrampoline()
947 __ PushRoot(Heap::kTheHoleValueRootIndex); in Generate_ResumeGeneratorTrampoline()
948 __ Branch(&loop); in Generate_ResumeGeneratorTrampoline()
949 __ bind(&done_loop); in Generate_ResumeGeneratorTrampoline()
954 __ lw(a3, FieldMemOperand(t0, JSFunction::kSharedFunctionInfoOffset)); in Generate_ResumeGeneratorTrampoline()
955 __ lw(a3, FieldMemOperand(a3, SharedFunctionInfo::kFunctionDataOffset)); in Generate_ResumeGeneratorTrampoline()
956 __ GetObjectType(a3, a3, a3); in Generate_ResumeGeneratorTrampoline()
957 __ Assert(eq, kMissingBytecodeArray, a3, Operand(BYTECODE_ARRAY_TYPE)); in Generate_ResumeGeneratorTrampoline()
962 __ lw(a0, FieldMemOperand(t0, JSFunction::kSharedFunctionInfoOffset)); in Generate_ResumeGeneratorTrampoline()
963 __ lw(a0, in Generate_ResumeGeneratorTrampoline()
965 __ SmiUntag(a0); in Generate_ResumeGeneratorTrampoline()
969 __ Move(a3, a1); in Generate_ResumeGeneratorTrampoline()
970 __ Move(a1, t0); in Generate_ResumeGeneratorTrampoline()
971 __ lw(a2, FieldMemOperand(a1, JSFunction::kCodeEntryOffset)); in Generate_ResumeGeneratorTrampoline()
972 __ Jump(a2); in Generate_ResumeGeneratorTrampoline()
975 __ bind(&prepare_step_in_if_stepping); in Generate_ResumeGeneratorTrampoline()
978 __ Push(a1, a2, t0); in Generate_ResumeGeneratorTrampoline()
979 __ CallRuntime(Runtime::kDebugOnFunctionCall); in Generate_ResumeGeneratorTrampoline()
980 __ Pop(a1, a2); in Generate_ResumeGeneratorTrampoline()
982 __ Branch(USE_DELAY_SLOT, &stepping_prepared); in Generate_ResumeGeneratorTrampoline()
983 __ lw(t0, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset)); in Generate_ResumeGeneratorTrampoline()
985 __ bind(&prepare_step_in_suspended_generator); in Generate_ResumeGeneratorTrampoline()
988 __ Push(a1, a2); in Generate_ResumeGeneratorTrampoline()
989 __ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator); in Generate_ResumeGeneratorTrampoline()
990 __ Pop(a1, a2); in Generate_ResumeGeneratorTrampoline()
992 __ Branch(USE_DELAY_SLOT, &stepping_prepared); in Generate_ResumeGeneratorTrampoline()
993 __ lw(t0, FieldMemOperand(a1, JSGeneratorObject::kFunctionOffset)); in Generate_ResumeGeneratorTrampoline()
1000 __ lw(args_count, in LeaveInterpreterFrame()
1002 __ lw(args_count, in LeaveInterpreterFrame()
1006 __ LeaveFrame(StackFrame::JAVA_SCRIPT); in LeaveInterpreterFrame()
1009 __ Addu(sp, sp, args_count); in LeaveInterpreterFrame()
1034 __ PushStandardFrame(a1); in Generate_InterpreterEntryTrampoline()
1038 __ lw(a0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); in Generate_InterpreterEntryTrampoline()
1042 __ lw(debug_info, FieldMemOperand(a0, SharedFunctionInfo::kDebugInfoOffset)); in Generate_InterpreterEntryTrampoline()
1043 __ JumpIfNotSmi(debug_info, &load_debug_bytecode_array); in Generate_InterpreterEntryTrampoline()
1044 __ lw(kInterpreterBytecodeArrayRegister, in Generate_InterpreterEntryTrampoline()
1046 __ bind(&bytecode_array_loaded); in Generate_InterpreterEntryTrampoline()
1050 __ lw(a0, FieldMemOperand(a0, SharedFunctionInfo::kCodeOffset)); in Generate_InterpreterEntryTrampoline()
1051 __ Branch(&switch_to_different_code_kind, ne, a0, in Generate_InterpreterEntryTrampoline()
1055 __ lw(a0, FieldMemOperand(a1, JSFunction::kFeedbackVectorOffset)); in Generate_InterpreterEntryTrampoline()
1056 __ lw(a0, FieldMemOperand(a0, Cell::kValueOffset)); in Generate_InterpreterEntryTrampoline()
1057 __ lw(t0, FieldMemOperand( in Generate_InterpreterEntryTrampoline()
1060 __ Addu(t0, t0, Operand(Smi::FromInt(1))); in Generate_InterpreterEntryTrampoline()
1061 __ sw(t0, FieldMemOperand( in Generate_InterpreterEntryTrampoline()
1067 __ SmiTst(kInterpreterBytecodeArrayRegister, t0); in Generate_InterpreterEntryTrampoline()
1068 __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, t0, in Generate_InterpreterEntryTrampoline()
1070 __ GetObjectType(kInterpreterBytecodeArrayRegister, t0, t0); in Generate_InterpreterEntryTrampoline()
1071 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, t0, in Generate_InterpreterEntryTrampoline()
1077 __ sb(zero_reg, FieldMemOperand(kInterpreterBytecodeArrayRegister, in Generate_InterpreterEntryTrampoline()
1081 __ li(kInterpreterBytecodeOffsetRegister, in Generate_InterpreterEntryTrampoline()
1085 __ SmiTag(t0, kInterpreterBytecodeOffsetRegister); in Generate_InterpreterEntryTrampoline()
1086 __ Push(a3, kInterpreterBytecodeArrayRegister, t0); in Generate_InterpreterEntryTrampoline()
1091 __ lw(t0, FieldMemOperand(kInterpreterBytecodeArrayRegister, in Generate_InterpreterEntryTrampoline()
1096 __ Subu(t1, sp, Operand(t0)); in Generate_InterpreterEntryTrampoline()
1097 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex); in Generate_InterpreterEntryTrampoline()
1098 __ Branch(&ok, hs, t1, Operand(a2)); in Generate_InterpreterEntryTrampoline()
1099 __ CallRuntime(Runtime::kThrowStackOverflow); in Generate_InterpreterEntryTrampoline()
1100 __ bind(&ok); in Generate_InterpreterEntryTrampoline()
1105 __ LoadRoot(t1, Heap::kUndefinedValueRootIndex); in Generate_InterpreterEntryTrampoline()
1106 __ Branch(&loop_check); in Generate_InterpreterEntryTrampoline()
1107 __ bind(&loop_header); in Generate_InterpreterEntryTrampoline()
1109 __ push(t1); in Generate_InterpreterEntryTrampoline()
1111 __ bind(&loop_check); in Generate_InterpreterEntryTrampoline()
1112 __ Subu(t0, t0, Operand(kPointerSize)); in Generate_InterpreterEntryTrampoline()
1113 __ Branch(&loop_header, ge, t0, Operand(zero_reg)); in Generate_InterpreterEntryTrampoline()
1117 __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex); in Generate_InterpreterEntryTrampoline()
1118 __ li(kInterpreterDispatchTableRegister, in Generate_InterpreterEntryTrampoline()
1123 __ Addu(a0, kInterpreterBytecodeArrayRegister, in Generate_InterpreterEntryTrampoline()
1125 __ lbu(a0, MemOperand(a0)); in Generate_InterpreterEntryTrampoline()
1126 __ Lsa(at, kInterpreterDispatchTableRegister, a0, kPointerSizeLog2); in Generate_InterpreterEntryTrampoline()
1127 __ lw(at, MemOperand(at)); in Generate_InterpreterEntryTrampoline()
1128 __ Call(at); in Generate_InterpreterEntryTrampoline()
1133 __ Jump(ra); in Generate_InterpreterEntryTrampoline()
1136 __ bind(&load_debug_bytecode_array); in Generate_InterpreterEntryTrampoline()
1137 __ lw(kInterpreterBytecodeArrayRegister, in Generate_InterpreterEntryTrampoline()
1139 __ Branch(&bytecode_array_loaded); in Generate_InterpreterEntryTrampoline()
1144 __ bind(&switch_to_different_code_kind); in Generate_InterpreterEntryTrampoline()
1145 __ LeaveFrame(StackFrame::JAVA_SCRIPT); in Generate_InterpreterEntryTrampoline()
1146 __ lw(t0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); in Generate_InterpreterEntryTrampoline()
1147 __ lw(t0, FieldMemOperand(t0, SharedFunctionInfo::kCodeOffset)); in Generate_InterpreterEntryTrampoline()
1148 __ Addu(t0, t0, Operand(Code::kHeaderSize - kHeapObjectTag)); in Generate_InterpreterEntryTrampoline()
1149 __ sw(t0, FieldMemOperand(a1, JSFunction::kCodeEntryOffset)); in Generate_InterpreterEntryTrampoline()
1150 __ RecordWriteCodeEntryField(a1, t0, t1); in Generate_InterpreterEntryTrampoline()
1151 __ Jump(t0); in Generate_InterpreterEntryTrampoline()
1160 __ LoadRoot(scratch1, Heap::kRealStackLimitRootIndex); in Generate_StackOverflowCheck()
1163 __ subu(scratch1, sp, scratch1); in Generate_StackOverflowCheck()
1165 __ sll(scratch2, num_args, kPointerSizeLog2); in Generate_StackOverflowCheck()
1167 __ Branch(stack_overflow, le, scratch1, Operand(scratch2)); in Generate_StackOverflowCheck()
1178 __ mov(scratch2, num_args); in Generate_InterpreterPushArgs()
1179 __ sll(scratch2, scratch2, kPointerSizeLog2); in Generate_InterpreterPushArgs()
1180 __ Subu(scratch2, index, Operand(scratch2)); in Generate_InterpreterPushArgs()
1184 __ Branch(&loop_check); in Generate_InterpreterPushArgs()
1185 __ bind(&loop_header); in Generate_InterpreterPushArgs()
1186 __ lw(scratch, MemOperand(index)); in Generate_InterpreterPushArgs()
1187 __ Addu(index, index, Operand(-kPointerSize)); in Generate_InterpreterPushArgs()
1188 __ push(scratch); in Generate_InterpreterPushArgs()
1189 __ bind(&loop_check); in Generate_InterpreterPushArgs()
1190 __ Branch(&loop_header, gt, index, Operand(scratch2)); in Generate_InterpreterPushArgs()
1206 __ Addu(t0, a0, Operand(1)); // Add one for receiver. in Generate_InterpreterPushArgsAndCallImpl()
1213 __ Jump(masm->isolate()->builtins()->CallFunction(ConvertReceiverMode::kAny, in Generate_InterpreterPushArgsAndCallImpl()
1217 __ Jump(masm->isolate()->builtins()->CallWithSpread(), in Generate_InterpreterPushArgsAndCallImpl()
1220 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny, in Generate_InterpreterPushArgsAndCallImpl()
1225 __ bind(&stack_overflow); in Generate_InterpreterPushArgsAndCallImpl()
1227 __ TailCallRuntime(Runtime::kThrowStackOverflow); in Generate_InterpreterPushArgsAndCallImpl()
1229 __ break_(0xCC); in Generate_InterpreterPushArgsAndCallImpl()
1246 __ push(zero_reg); in Generate_InterpreterPushArgsAndConstructImpl()
1251 __ AssertUndefinedOrAllocationSite(a2, t0); in Generate_InterpreterPushArgsAndConstructImpl()
1253 __ AssertFunction(a1); in Generate_InterpreterPushArgsAndConstructImpl()
1257 __ lw(t0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); in Generate_InterpreterPushArgsAndConstructImpl()
1258 __ lw(t0, FieldMemOperand(t0, SharedFunctionInfo::kConstructStubOffset)); in Generate_InterpreterPushArgsAndConstructImpl()
1259 __ Addu(at, t0, Operand(Code::kHeaderSize - kHeapObjectTag)); in Generate_InterpreterPushArgsAndConstructImpl()
1260 __ Jump(at); in Generate_InterpreterPushArgsAndConstructImpl()
1263 __ Jump(masm->isolate()->builtins()->ConstructWithSpread(), in Generate_InterpreterPushArgsAndConstructImpl()
1268 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); in Generate_InterpreterPushArgsAndConstructImpl()
1271 __ bind(&stack_overflow); in Generate_InterpreterPushArgsAndConstructImpl()
1273 __ TailCallRuntime(Runtime::kThrowStackOverflow); in Generate_InterpreterPushArgsAndConstructImpl()
1275 __ break_(0xCC); in Generate_InterpreterPushArgsAndConstructImpl()
1292 __ Addu(t0, a0, Operand(1)); // Add one for receiver. in Generate_InterpreterPushArgsAndConstructArray()
1298 __ mov(a3, a1); in Generate_InterpreterPushArgsAndConstructArray()
1301 __ TailCallStub(&stub); in Generate_InterpreterPushArgsAndConstructArray()
1303 __ bind(&stack_overflow); in Generate_InterpreterPushArgsAndConstructArray()
1305 __ TailCallRuntime(Runtime::kThrowStackOverflow); in Generate_InterpreterPushArgsAndConstructArray()
1307 __ break_(0xCC); in Generate_InterpreterPushArgsAndConstructArray()
1317 __ li(t0, Operand(masm->isolate()->builtins()->InterpreterEntryTrampoline())); in Generate_InterpreterEnterBytecode()
1318 __ Addu(ra, t0, Operand(interpreter_entry_return_pc_offset->value() + in Generate_InterpreterEnterBytecode()
1322 __ li(kInterpreterDispatchTableRegister, in Generate_InterpreterEnterBytecode()
1327 __ lw(kInterpreterBytecodeArrayRegister, in Generate_InterpreterEnterBytecode()
1332 __ SmiTst(kInterpreterBytecodeArrayRegister, at); in Generate_InterpreterEnterBytecode()
1333 __ Assert(ne, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, at, in Generate_InterpreterEnterBytecode()
1335 __ GetObjectType(kInterpreterBytecodeArrayRegister, a1, a1); in Generate_InterpreterEnterBytecode()
1336 __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry, a1, in Generate_InterpreterEnterBytecode()
1341 __ lw(kInterpreterBytecodeOffsetRegister, in Generate_InterpreterEnterBytecode()
1343 __ SmiUntag(kInterpreterBytecodeOffsetRegister); in Generate_InterpreterEnterBytecode()
1346 __ Addu(a1, kInterpreterBytecodeArrayRegister, in Generate_InterpreterEnterBytecode()
1348 __ lbu(a1, MemOperand(a1)); in Generate_InterpreterEnterBytecode()
1349 __ Lsa(a1, kInterpreterDispatchTableRegister, a1, kPointerSizeLog2); in Generate_InterpreterEnterBytecode()
1350 __ lw(a1, MemOperand(a1)); in Generate_InterpreterEnterBytecode()
1351 __ Jump(a1); in Generate_InterpreterEnterBytecode()
1358 __ lw(a1, MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp)); in Generate_InterpreterEnterBytecodeAdvance()
1359 __ lw(a2, MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp)); in Generate_InterpreterEnterBytecodeAdvance()
1360 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); in Generate_InterpreterEnterBytecodeAdvance()
1363 __ Push(kInterpreterAccumulatorRegister, a1, a2); in Generate_InterpreterEnterBytecodeAdvance()
1364 __ CallRuntime(Runtime::kInterpreterAdvanceBytecodeOffset); in Generate_InterpreterEnterBytecodeAdvance()
1365 __ mov(a2, v0); // Result is the new bytecode offset. in Generate_InterpreterEnterBytecodeAdvance()
1366 __ Pop(kInterpreterAccumulatorRegister); in Generate_InterpreterEnterBytecodeAdvance()
1368 __ sw(a2, MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp)); in Generate_InterpreterEnterBytecodeAdvance()
1395 __ lw(index, FieldMemOperand(closure, JSFunction::kFeedbackVectorOffset)); in Generate_CompileLazy()
1396 __ lw(index, FieldMemOperand(index, Cell::kValueOffset)); in Generate_CompileLazy()
1397 __ JumpIfRoot(index, Heap::kUndefinedValueRootIndex, in Generate_CompileLazy()
1400 __ push(argument_count); in Generate_CompileLazy()
1401 __ push(new_target); in Generate_CompileLazy()
1402 __ push(closure); in Generate_CompileLazy()
1404 __ lw(map, FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset)); in Generate_CompileLazy()
1405 __ lw(map, FieldMemOperand(map, SharedFunctionInfo::kOptimizedCodeMapOffset)); in Generate_CompileLazy()
1406 __ lw(index, FieldMemOperand(map, FixedArray::kLengthOffset)); in Generate_CompileLazy()
1407 __ Branch(&try_shared, lt, index, Operand(Smi::FromInt(2))); in Generate_CompileLazy()
1415 __ lw(native_context, NativeContextMemOperand()); in Generate_CompileLazy()
1417 __ bind(&loop_top); in Generate_CompileLazy()
1422 __ sll(at, index, kPointerSizeLog2 - kSmiTagSize); in Generate_CompileLazy()
1423 __ Addu(array_pointer, map, Operand(at)); in Generate_CompileLazy()
1424 __ lw(temp, FieldMemOperand(array_pointer, in Generate_CompileLazy()
1426 __ lw(temp, FieldMemOperand(temp, WeakCell::kValueOffset)); in Generate_CompileLazy()
1427 __ Branch(&loop_bottom, ne, temp, Operand(native_context)); in Generate_CompileLazy()
1431 __ lw(entry, in Generate_CompileLazy()
1434 __ lw(entry, FieldMemOperand(entry, WeakCell::kValueOffset)); in Generate_CompileLazy()
1435 __ JumpIfSmi(entry, &try_shared); in Generate_CompileLazy()
1438 __ pop(closure); in Generate_CompileLazy()
1440 __ Addu(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag)); in Generate_CompileLazy()
1441 __ sw(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset)); in Generate_CompileLazy()
1442 __ RecordWriteCodeEntryField(closure, entry, t1); in Generate_CompileLazy()
1448 __ lw(t1, in Generate_CompileLazy()
1450 __ sw(t1, FieldMemOperand(closure, JSFunction::kNextFunctionLinkOffset)); in Generate_CompileLazy()
1451 __ RecordWriteField(closure, JSFunction::kNextFunctionLinkOffset, t1, a0, in Generate_CompileLazy()
1456 __ sw(closure, in Generate_CompileLazy()
1459 __ mov(t1, closure); in Generate_CompileLazy()
1460 __ RecordWriteContextSlot(native_context, function_list_offset, closure, a0, in Generate_CompileLazy()
1462 __ mov(closure, t1); in Generate_CompileLazy()
1463 __ pop(new_target); in Generate_CompileLazy()
1464 __ pop(argument_count); in Generate_CompileLazy()
1465 __ Jump(entry); in Generate_CompileLazy()
1467 __ bind(&loop_bottom); in Generate_CompileLazy()
1468 __ Subu(index, index, in Generate_CompileLazy()
1470 __ Branch(&loop_top, gt, index, Operand(Smi::FromInt(1))); in Generate_CompileLazy()
1473 __ bind(&try_shared); in Generate_CompileLazy()
1474 __ pop(closure); in Generate_CompileLazy()
1475 __ pop(new_target); in Generate_CompileLazy()
1476 __ pop(argument_count); in Generate_CompileLazy()
1477 __ lw(entry, FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset)); in Generate_CompileLazy()
1479 __ lbu(t1, FieldMemOperand(entry, in Generate_CompileLazy()
1481 __ And(t1, t1, in Generate_CompileLazy()
1483 __ Branch(&gotta_call_runtime_no_stack, ne, t1, Operand(zero_reg)); in Generate_CompileLazy()
1486 __ lw(entry, FieldMemOperand(entry, SharedFunctionInfo::kCodeOffset)); in Generate_CompileLazy()
1487 __ Move(t1, masm->CodeObject()); in Generate_CompileLazy()
1488 __ Branch(&gotta_call_runtime_no_stack, eq, entry, Operand(t1)); in Generate_CompileLazy()
1491 __ Addu(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag)); in Generate_CompileLazy()
1492 __ sw(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset)); in Generate_CompileLazy()
1493 __ RecordWriteCodeEntryField(closure, entry, t1); in Generate_CompileLazy()
1494 __ Jump(entry); in Generate_CompileLazy()
1496 __ bind(&gotta_call_runtime); in Generate_CompileLazy()
1497 __ pop(closure); in Generate_CompileLazy()
1498 __ pop(new_target); in Generate_CompileLazy()
1499 __ pop(argument_count); in Generate_CompileLazy()
1500 __ bind(&gotta_call_runtime_no_stack); in Generate_CompileLazy()
1527 __ Move(t4, a0); in Generate_InstantiateAsmJs()
1530 __ SmiTag(a0); in Generate_InstantiateAsmJs()
1531 __ Push(a0, a1, a3, a1); in Generate_InstantiateAsmJs()
1538 __ Branch(&over, ne, t4, Operand(j)); in Generate_InstantiateAsmJs()
1541 __ lw(t4, MemOperand(fp, StandardFrameConstants::kCallerSPOffset + in Generate_InstantiateAsmJs()
1543 __ push(t4); in Generate_InstantiateAsmJs()
1546 __ PushRoot(Heap::kUndefinedValueRootIndex); in Generate_InstantiateAsmJs()
1549 __ jmp(&args_done); in Generate_InstantiateAsmJs()
1550 __ bind(&over); in Generate_InstantiateAsmJs()
1553 __ bind(&args_done); in Generate_InstantiateAsmJs()
1556 __ CallRuntime(Runtime::kInstantiateAsmJs, 4); in Generate_InstantiateAsmJs()
1558 __ JumpIfSmi(v0, &failed); in Generate_InstantiateAsmJs()
1560 __ Drop(2); in Generate_InstantiateAsmJs()
1561 __ pop(t4); in Generate_InstantiateAsmJs()
1562 __ SmiUntag(t4); in Generate_InstantiateAsmJs()
1565 __ Addu(t4, t4, Operand(1)); in Generate_InstantiateAsmJs()
1566 __ Lsa(sp, sp, t4, kPointerSizeLog2); in Generate_InstantiateAsmJs()
1567 __ Ret(); in Generate_InstantiateAsmJs()
1569 __ bind(&failed); in Generate_InstantiateAsmJs()
1571 __ Pop(a0, a1, a3); in Generate_InstantiateAsmJs()
1572 __ SmiUntag(a0); in Generate_InstantiateAsmJs()
1586 __ Subu(a0, a0, Operand(kNoCodeAgeSequenceLength - Assembler::kInstrSize)); in GenerateMakeCodeYoungAgainCommon()
1596 __ MultiPush(saved_regs); in GenerateMakeCodeYoungAgainCommon()
1597 __ PrepareCallCFunction(2, 0, a2); in GenerateMakeCodeYoungAgainCommon()
1598 __ li(a1, Operand(ExternalReference::isolate_address(masm->isolate()))); in GenerateMakeCodeYoungAgainCommon()
1599 __ CallCFunction( in GenerateMakeCodeYoungAgainCommon()
1601 __ MultiPop(saved_regs); in GenerateMakeCodeYoungAgainCommon()
1602 __ Jump(a0); in GenerateMakeCodeYoungAgainCommon()
1619 __ Subu(a0, a0, Operand(kNoCodeAgeSequenceLength - Assembler::kInstrSize)); in CODE_AGE_LIST()
1629 __ MultiPush(saved_regs); in CODE_AGE_LIST()
1630 __ PrepareCallCFunction(2, 0, a2); in CODE_AGE_LIST()
1631 __ li(a1, Operand(ExternalReference::isolate_address(masm->isolate()))); in CODE_AGE_LIST()
1632 __ CallCFunction( in CODE_AGE_LIST()
1635 __ MultiPop(saved_regs); in CODE_AGE_LIST()
1638 __ PushStandardFrame(a1); in CODE_AGE_LIST()
1641 __ Addu(a0, a0, Operand(kNoCodeAgeSequenceLength)); in CODE_AGE_LIST()
1642 __ Jump(a0); in CODE_AGE_LIST()
1661 __ MultiPush(kJSCallerSaved | kCalleeSaved); in Generate_NotifyStubFailureHelper()
1663 __ CallRuntime(Runtime::kNotifyStubFailure, save_doubles); in Generate_NotifyStubFailureHelper()
1664 __ MultiPop(kJSCallerSaved | kCalleeSaved); in Generate_NotifyStubFailureHelper()
1667 __ Addu(sp, sp, Operand(kPointerSize)); // Ignore state in Generate_NotifyStubFailureHelper()
1668 __ Jump(ra); // Jump to miss handler in Generate_NotifyStubFailureHelper()
1684 __ li(a0, Operand(Smi::FromInt(static_cast<int>(type)))); in Generate_NotifyDeoptimizedHelper()
1685 __ push(a0); in Generate_NotifyDeoptimizedHelper()
1686 __ CallRuntime(Runtime::kNotifyDeoptimized); in Generate_NotifyDeoptimizedHelper()
1690 __ lw(t2, MemOperand(sp, 0 * kPointerSize)); in Generate_NotifyDeoptimizedHelper()
1691 __ SmiUntag(t2); in Generate_NotifyDeoptimizedHelper()
1694 __ Branch(&with_tos_register, ne, t2, in Generate_NotifyDeoptimizedHelper()
1696 __ Ret(USE_DELAY_SLOT); in Generate_NotifyDeoptimizedHelper()
1698 __ Addu(sp, sp, Operand(1 * kPointerSize)); // Remove state. in Generate_NotifyDeoptimizedHelper()
1700 __ bind(&with_tos_register); in Generate_NotifyDeoptimizedHelper()
1702 __ lw(v0, MemOperand(sp, 1 * kPointerSize)); in Generate_NotifyDeoptimizedHelper()
1703 __ Branch(&unknown_state, ne, t2, in Generate_NotifyDeoptimizedHelper()
1706 __ Ret(USE_DELAY_SLOT); in Generate_NotifyDeoptimizedHelper()
1708 __ Addu(sp, sp, Operand(2 * kPointerSize)); // Remove state. in Generate_NotifyDeoptimizedHelper()
1710 __ bind(&unknown_state); in Generate_NotifyDeoptimizedHelper()
1711 __ stop("no cases left"); in Generate_NotifyDeoptimizedHelper()
1736 __ lw(signature, FieldMemOperand(function_template_info, in CompatibleReceiverCheck()
1739 __ JumpIfRoot(signature, Heap::kUndefinedValueRootIndex, in CompatibleReceiverCheck()
1743 __ lw(map, FieldMemOperand(receiver, HeapObject::kMapOffset)); in CompatibleReceiverCheck()
1745 __ bind(&prototype_loop_start); in CompatibleReceiverCheck()
1748 __ GetMapConstructor(constructor, map, scratch, scratch); in CompatibleReceiverCheck()
1750 __ Branch(&next_prototype, ne, scratch, Operand(JS_FUNCTION_TYPE)); in CompatibleReceiverCheck()
1752 __ lw(type, in CompatibleReceiverCheck()
1754 __ lw(type, FieldMemOperand(type, SharedFunctionInfo::kFunctionDataOffset)); in CompatibleReceiverCheck()
1758 __ bind(&function_template_loop); in CompatibleReceiverCheck()
1761 __ Branch(&receiver_check_passed, eq, signature, Operand(type), in CompatibleReceiverCheck()
1766 __ JumpIfSmi(type, &next_prototype); in CompatibleReceiverCheck()
1767 __ GetObjectType(type, scratch, scratch); in CompatibleReceiverCheck()
1768 __ Branch(&next_prototype, ne, scratch, Operand(FUNCTION_TEMPLATE_INFO_TYPE)); in CompatibleReceiverCheck()
1771 __ lw(type, in CompatibleReceiverCheck()
1773 __ Branch(&function_template_loop); in CompatibleReceiverCheck()
1776 __ bind(&next_prototype); in CompatibleReceiverCheck()
1777 __ lw(scratch, FieldMemOperand(map, Map::kBitField3Offset)); in CompatibleReceiverCheck()
1778 __ DecodeField<Map::HasHiddenPrototype>(scratch); in CompatibleReceiverCheck()
1779 __ Branch(receiver_check_failed, eq, scratch, Operand(zero_reg)); in CompatibleReceiverCheck()
1780 __ lw(receiver, FieldMemOperand(map, Map::kPrototypeOffset)); in CompatibleReceiverCheck()
1781 __ lw(map, FieldMemOperand(receiver, HeapObject::kMapOffset)); in CompatibleReceiverCheck()
1783 __ Branch(&prototype_loop_start); in CompatibleReceiverCheck()
1785 __ bind(&receiver_check_passed); in CompatibleReceiverCheck()
1800 __ lw(t1, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); in Generate_HandleFastApiCall()
1801 __ lw(t1, FieldMemOperand(t1, SharedFunctionInfo::kFunctionDataOffset)); in Generate_HandleFastApiCall()
1805 __ Lsa(t8, sp, a0, kPointerSizeLog2); in Generate_HandleFastApiCall()
1806 __ lw(t0, MemOperand(t8)); in Generate_HandleFastApiCall()
1811 __ lw(t2, FieldMemOperand(t1, FunctionTemplateInfo::kCallCodeOffset)); in Generate_HandleFastApiCall()
1812 __ lw(t2, FieldMemOperand(t2, CallHandlerInfo::kFastHandlerOffset)); in Generate_HandleFastApiCall()
1813 __ Addu(t2, t2, Operand(Code::kHeaderSize - kHeapObjectTag)); in Generate_HandleFastApiCall()
1814 __ Jump(t2); in Generate_HandleFastApiCall()
1817 __ bind(&receiver_check_failed); in Generate_HandleFastApiCall()
1819 __ Addu(t8, t8, Operand(kPointerSize)); in Generate_HandleFastApiCall()
1820 __ addu(sp, t8, zero_reg); in Generate_HandleFastApiCall()
1821 __ TailCallRuntime(Runtime::kThrowIllegalInvocation); in Generate_HandleFastApiCall()
1828 __ lw(a0, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); in Generate_OnStackReplacementHelper()
1829 __ lw(a0, MemOperand(a0, JavaScriptFrameConstants::kFunctionOffset)); in Generate_OnStackReplacementHelper()
1831 __ lw(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); in Generate_OnStackReplacementHelper()
1837 __ push(a0); in Generate_OnStackReplacementHelper()
1838 __ CallRuntime(Runtime::kCompileForOnStackReplacement); in Generate_OnStackReplacementHelper()
1842 __ Ret(eq, v0, Operand(Smi::kZero)); in Generate_OnStackReplacementHelper()
1847 __ LeaveFrame(StackFrame::STUB); in Generate_OnStackReplacementHelper()
1852 __ lw(a1, MemOperand(v0, Code::kDeoptimizationDataOffset - kHeapObjectTag)); in Generate_OnStackReplacementHelper()
1856 __ lw(a1, MemOperand(a1, FixedArray::OffsetOfElementAt( in Generate_OnStackReplacementHelper()
1859 __ SmiUntag(a1); in Generate_OnStackReplacementHelper()
1863 __ addu(v0, v0, a1); in Generate_OnStackReplacementHelper()
1864 __ addiu(ra, v0, Code::kHeaderSize - kHeapObjectTag); in Generate_OnStackReplacementHelper()
1867 __ Ret(); in Generate_OnStackReplacementHelper()
1893 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex); in Generate_FunctionPrototypeApply()
1894 __ mov(a3, a2); in Generate_FunctionPrototypeApply()
1896 __ sll(scratch, a0, kPointerSizeLog2); in Generate_FunctionPrototypeApply()
1897 __ Addu(a0, sp, Operand(scratch)); in Generate_FunctionPrototypeApply()
1898 __ lw(a1, MemOperand(a0)); // receiver in Generate_FunctionPrototypeApply()
1899 __ Subu(a0, a0, Operand(kPointerSize)); in Generate_FunctionPrototypeApply()
1900 __ Branch(&no_arg, lt, a0, Operand(sp)); in Generate_FunctionPrototypeApply()
1901 __ lw(a2, MemOperand(a0)); // thisArg in Generate_FunctionPrototypeApply()
1902 __ Subu(a0, a0, Operand(kPointerSize)); in Generate_FunctionPrototypeApply()
1903 __ Branch(&no_arg, lt, a0, Operand(sp)); in Generate_FunctionPrototypeApply()
1904 __ lw(a3, MemOperand(a0)); // argArray in Generate_FunctionPrototypeApply()
1905 __ bind(&no_arg); in Generate_FunctionPrototypeApply()
1906 __ Addu(sp, sp, Operand(scratch)); in Generate_FunctionPrototypeApply()
1907 __ sw(a2, MemOperand(sp)); in Generate_FunctionPrototypeApply()
1908 __ mov(a0, a3); in Generate_FunctionPrototypeApply()
1919 __ JumpIfSmi(a1, &receiver_not_callable); in Generate_FunctionPrototypeApply()
1920 __ lw(t0, FieldMemOperand(a1, HeapObject::kMapOffset)); in Generate_FunctionPrototypeApply()
1921 __ lbu(t0, FieldMemOperand(t0, Map::kBitFieldOffset)); in Generate_FunctionPrototypeApply()
1922 __ And(t0, t0, Operand(1 << Map::kIsCallable)); in Generate_FunctionPrototypeApply()
1923 __ Branch(&receiver_not_callable, eq, t0, Operand(zero_reg)); in Generate_FunctionPrototypeApply()
1927 __ JumpIfRoot(a0, Heap::kNullValueRootIndex, &no_arguments); in Generate_FunctionPrototypeApply()
1928 __ JumpIfRoot(a0, Heap::kUndefinedValueRootIndex, &no_arguments); in Generate_FunctionPrototypeApply()
1932 __ LoadRoot(a3, Heap::kUndefinedValueRootIndex); in Generate_FunctionPrototypeApply()
1933 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET); in Generate_FunctionPrototypeApply()
1937 __ bind(&no_arguments); in Generate_FunctionPrototypeApply()
1939 __ mov(a0, zero_reg); in Generate_FunctionPrototypeApply()
1940 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); in Generate_FunctionPrototypeApply()
1944 __ bind(&receiver_not_callable); in Generate_FunctionPrototypeApply()
1946 __ sw(a1, MemOperand(sp)); in Generate_FunctionPrototypeApply()
1947 __ TailCallRuntime(Runtime::kThrowApplyNonFunction); in Generate_FunctionPrototypeApply()
1957 __ Branch(&done, ne, a0, Operand(zero_reg)); in Generate_FunctionPrototypeCall()
1958 __ PushRoot(Heap::kUndefinedValueRootIndex); in Generate_FunctionPrototypeCall()
1959 __ Addu(a0, a0, Operand(1)); in Generate_FunctionPrototypeCall()
1960 __ bind(&done); in Generate_FunctionPrototypeCall()
1965 __ Lsa(at, sp, a0, kPointerSizeLog2); in Generate_FunctionPrototypeCall()
1966 __ lw(a1, MemOperand(at)); in Generate_FunctionPrototypeCall()
1976 __ Lsa(a2, sp, a0, kPointerSizeLog2); in Generate_FunctionPrototypeCall()
1978 __ bind(&loop); in Generate_FunctionPrototypeCall()
1979 __ lw(at, MemOperand(a2, -kPointerSize)); in Generate_FunctionPrototypeCall()
1980 __ sw(at, MemOperand(a2)); in Generate_FunctionPrototypeCall()
1981 __ Subu(a2, a2, Operand(kPointerSize)); in Generate_FunctionPrototypeCall()
1982 __ Branch(&loop, ne, a2, Operand(sp)); in Generate_FunctionPrototypeCall()
1985 __ Subu(a0, a0, Operand(1)); in Generate_FunctionPrototypeCall()
1986 __ Pop(); in Generate_FunctionPrototypeCall()
1990 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); in Generate_FunctionPrototypeCall()
2008 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex); in Generate_ReflectApply()
2009 __ mov(a2, a1); in Generate_ReflectApply()
2010 __ mov(a3, a1); in Generate_ReflectApply()
2011 __ sll(scratch, a0, kPointerSizeLog2); in Generate_ReflectApply()
2012 __ mov(a0, scratch); in Generate_ReflectApply()
2013 __ Subu(a0, a0, Operand(kPointerSize)); in Generate_ReflectApply()
2014 __ Branch(&no_arg, lt, a0, Operand(zero_reg)); in Generate_ReflectApply()
2015 __ Addu(a0, sp, Operand(a0)); in Generate_ReflectApply()
2016 __ lw(a1, MemOperand(a0)); // target in Generate_ReflectApply()
2017 __ Subu(a0, a0, Operand(kPointerSize)); in Generate_ReflectApply()
2018 __ Branch(&no_arg, lt, a0, Operand(sp)); in Generate_ReflectApply()
2019 __ lw(a2, MemOperand(a0)); // thisArgument in Generate_ReflectApply()
2020 __ Subu(a0, a0, Operand(kPointerSize)); in Generate_ReflectApply()
2021 __ Branch(&no_arg, lt, a0, Operand(sp)); in Generate_ReflectApply()
2022 __ lw(a3, MemOperand(a0)); // argumentsList in Generate_ReflectApply()
2023 __ bind(&no_arg); in Generate_ReflectApply()
2024 __ Addu(sp, sp, Operand(scratch)); in Generate_ReflectApply()
2025 __ sw(a2, MemOperand(sp)); in Generate_ReflectApply()
2026 __ mov(a0, a3); in Generate_ReflectApply()
2037 __ JumpIfSmi(a1, &target_not_callable); in Generate_ReflectApply()
2038 __ lw(t0, FieldMemOperand(a1, HeapObject::kMapOffset)); in Generate_ReflectApply()
2039 __ lbu(t0, FieldMemOperand(t0, Map::kBitFieldOffset)); in Generate_ReflectApply()
2040 __ And(t0, t0, Operand(1 << Map::kIsCallable)); in Generate_ReflectApply()
2041 __ Branch(&target_not_callable, eq, t0, Operand(zero_reg)); in Generate_ReflectApply()
2045 __ LoadRoot(a3, Heap::kUndefinedValueRootIndex); in Generate_ReflectApply()
2046 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET); in Generate_ReflectApply()
2049 __ bind(&target_not_callable); in Generate_ReflectApply()
2051 __ sw(a1, MemOperand(sp)); in Generate_ReflectApply()
2052 __ TailCallRuntime(Runtime::kThrowApplyNonFunction); in Generate_ReflectApply()
2072 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex); in Generate_ReflectConstruct()
2073 __ mov(a2, a1); in Generate_ReflectConstruct()
2075 __ sll(scratch, a0, kPointerSizeLog2); in Generate_ReflectConstruct()
2076 __ Addu(a0, sp, Operand(scratch)); in Generate_ReflectConstruct()
2077 __ sw(a2, MemOperand(a0)); // receiver in Generate_ReflectConstruct()
2078 __ Subu(a0, a0, Operand(kPointerSize)); in Generate_ReflectConstruct()
2079 __ Branch(&no_arg, lt, a0, Operand(sp)); in Generate_ReflectConstruct()
2080 __ lw(a1, MemOperand(a0)); // target in Generate_ReflectConstruct()
2081 __ mov(a3, a1); // new.target defaults to target in Generate_ReflectConstruct()
2082 __ Subu(a0, a0, Operand(kPointerSize)); in Generate_ReflectConstruct()
2083 __ Branch(&no_arg, lt, a0, Operand(sp)); in Generate_ReflectConstruct()
2084 __ lw(a2, MemOperand(a0)); // argumentsList in Generate_ReflectConstruct()
2085 __ Subu(a0, a0, Operand(kPointerSize)); in Generate_ReflectConstruct()
2086 __ Branch(&no_arg, lt, a0, Operand(sp)); in Generate_ReflectConstruct()
2087 __ lw(a3, MemOperand(a0)); // new.target in Generate_ReflectConstruct()
2088 __ bind(&no_arg); in Generate_ReflectConstruct()
2089 __ Addu(sp, sp, Operand(scratch)); in Generate_ReflectConstruct()
2090 __ mov(a0, a2); in Generate_ReflectConstruct()
2102 __ JumpIfSmi(a1, &target_not_constructor); in Generate_ReflectConstruct()
2103 __ lw(t0, FieldMemOperand(a1, HeapObject::kMapOffset)); in Generate_ReflectConstruct()
2104 __ lbu(t0, FieldMemOperand(t0, Map::kBitFieldOffset)); in Generate_ReflectConstruct()
2105 __ And(t0, t0, Operand(1 << Map::kIsConstructor)); in Generate_ReflectConstruct()
2106 __ Branch(&target_not_constructor, eq, t0, Operand(zero_reg)); in Generate_ReflectConstruct()
2110 __ JumpIfSmi(a3, &new_target_not_constructor); in Generate_ReflectConstruct()
2111 __ lw(t0, FieldMemOperand(a3, HeapObject::kMapOffset)); in Generate_ReflectConstruct()
2112 __ lbu(t0, FieldMemOperand(t0, Map::kBitFieldOffset)); in Generate_ReflectConstruct()
2113 __ And(t0, t0, Operand(1 << Map::kIsConstructor)); in Generate_ReflectConstruct()
2114 __ Branch(&new_target_not_constructor, eq, t0, Operand(zero_reg)); in Generate_ReflectConstruct()
2117 __ Jump(masm->isolate()->builtins()->Apply(), RelocInfo::CODE_TARGET); in Generate_ReflectConstruct()
2120 __ bind(&target_not_constructor); in Generate_ReflectConstruct()
2122 __ sw(a1, MemOperand(sp)); in Generate_ReflectConstruct()
2123 __ TailCallRuntime(Runtime::kThrowNotConstructor); in Generate_ReflectConstruct()
2127 __ bind(&new_target_not_constructor); in Generate_ReflectConstruct()
2129 __ sw(a3, MemOperand(sp)); in Generate_ReflectConstruct()
2130 __ TailCallRuntime(Runtime::kThrowNotConstructor); in Generate_ReflectConstruct()
2135 __ sll(a0, a0, kSmiTagSize); in EnterArgumentsAdaptorFrame()
2136 __ li(t0, Operand(StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR))); in EnterArgumentsAdaptorFrame()
2137 __ MultiPush(a0.bit() | a1.bit() | t0.bit() | fp.bit() | ra.bit()); in EnterArgumentsAdaptorFrame()
2138 __ Addu(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp + in EnterArgumentsAdaptorFrame()
2148 __ lw(a1, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp + in LeaveArgumentsAdaptorFrame()
2150 __ mov(sp, fp); in LeaveArgumentsAdaptorFrame()
2151 __ MultiPop(fp.bit() | ra.bit()); in LeaveArgumentsAdaptorFrame()
2152 __ Lsa(sp, sp, a1, kPointerSizeLog2 - kSmiTagSize); in LeaveArgumentsAdaptorFrame()
2154 __ Addu(sp, sp, Operand(kPointerSize)); in LeaveArgumentsAdaptorFrame()
2170 __ JumpIfSmi(a0, &create_runtime); in Generate_Apply()
2173 __ lw(a2, FieldMemOperand(a0, HeapObject::kMapOffset)); in Generate_Apply()
2176 __ lw(t0, NativeContextMemOperand()); in Generate_Apply()
2179 __ lw(at, ContextMemOperand(t0, Context::SLOPPY_ARGUMENTS_MAP_INDEX)); in Generate_Apply()
2180 __ Branch(&create_arguments, eq, a2, Operand(at)); in Generate_Apply()
2181 __ lw(at, ContextMemOperand(t0, Context::STRICT_ARGUMENTS_MAP_INDEX)); in Generate_Apply()
2182 __ Branch(&create_arguments, eq, a2, Operand(at)); in Generate_Apply()
2185 __ lbu(v0, FieldMemOperand(a2, Map::kInstanceTypeOffset)); in Generate_Apply()
2186 __ Branch(&create_array, eq, v0, Operand(JS_ARRAY_TYPE)); in Generate_Apply()
2189 __ bind(&create_runtime); in Generate_Apply()
2192 __ Push(a1, a3, a0); in Generate_Apply()
2193 __ CallRuntime(Runtime::kCreateListFromArrayLike); in Generate_Apply()
2194 __ mov(a0, v0); in Generate_Apply()
2195 __ Pop(a1, a3); in Generate_Apply()
2196 __ lw(a2, FieldMemOperand(v0, FixedArray::kLengthOffset)); in Generate_Apply()
2197 __ SmiUntag(a2); in Generate_Apply()
2199 __ Branch(&done_create); in Generate_Apply()
2202 __ bind(&create_arguments); in Generate_Apply()
2203 __ lw(a2, FieldMemOperand(a0, JSArgumentsObject::kLengthOffset)); in Generate_Apply()
2204 __ lw(t0, FieldMemOperand(a0, JSObject::kElementsOffset)); in Generate_Apply()
2205 __ lw(at, FieldMemOperand(t0, FixedArray::kLengthOffset)); in Generate_Apply()
2206 __ Branch(&create_runtime, ne, a2, Operand(at)); in Generate_Apply()
2207 __ SmiUntag(a2); in Generate_Apply()
2208 __ mov(a0, t0); in Generate_Apply()
2209 __ Branch(&done_create); in Generate_Apply()
2213 __ bind(&create_holey_array); in Generate_Apply()
2214 __ lw(a2, FieldMemOperand(a2, Map::kPrototypeOffset)); in Generate_Apply()
2215 __ lw(at, ContextMemOperand(t0, Context::INITIAL_ARRAY_PROTOTYPE_INDEX)); in Generate_Apply()
2216 __ Branch(&create_runtime, ne, a2, Operand(at)); in Generate_Apply()
2217 __ LoadRoot(at, Heap::kArrayProtectorRootIndex); in Generate_Apply()
2218 __ lw(a2, FieldMemOperand(at, PropertyCell::kValueOffset)); in Generate_Apply()
2219 __ Branch(&create_runtime, ne, a2, in Generate_Apply()
2221 __ lw(a2, FieldMemOperand(a0, JSArray::kLengthOffset)); in Generate_Apply()
2222 __ lw(a0, FieldMemOperand(a0, JSArray::kElementsOffset)); in Generate_Apply()
2223 __ SmiUntag(a2); in Generate_Apply()
2224 __ Branch(&done_create); in Generate_Apply()
2227 __ bind(&create_array); in Generate_Apply()
2228 __ lbu(t1, FieldMemOperand(a2, Map::kBitField2Offset)); in Generate_Apply()
2229 __ DecodeField<Map::ElementsKindBits>(t1); in Generate_Apply()
2234 __ Branch(&create_holey_array, eq, t1, Operand(FAST_HOLEY_SMI_ELEMENTS)); in Generate_Apply()
2235 __ Branch(&create_holey_array, eq, t1, Operand(FAST_HOLEY_ELEMENTS)); in Generate_Apply()
2236 __ Branch(&create_runtime, hi, t1, Operand(FAST_ELEMENTS)); in Generate_Apply()
2237 __ lw(a2, FieldMemOperand(a0, JSArray::kLengthOffset)); in Generate_Apply()
2238 __ lw(a0, FieldMemOperand(a0, JSArray::kElementsOffset)); in Generate_Apply()
2239 __ SmiUntag(a2); in Generate_Apply()
2241 __ bind(&done_create); in Generate_Apply()
2249 __ LoadRoot(t0, Heap::kRealStackLimitRootIndex); in Generate_Apply()
2252 __ Subu(t0, sp, t0); in Generate_Apply()
2254 __ sll(at, a2, kPointerSizeLog2); in Generate_Apply()
2255 __ Branch(&done, gt, t0, Operand(at)); // Signed comparison. in Generate_Apply()
2256 __ TailCallRuntime(Runtime::kThrowStackOverflow); in Generate_Apply()
2257 __ bind(&done); in Generate_Apply()
2270 __ mov(t0, zero_reg); in Generate_Apply()
2272 __ LoadRoot(t1, Heap::kTheHoleValueRootIndex); in Generate_Apply()
2273 __ bind(&loop); in Generate_Apply()
2274 __ Branch(&done, eq, t0, Operand(a2)); in Generate_Apply()
2275 __ Lsa(at, a0, t0, kPointerSizeLog2); in Generate_Apply()
2276 __ lw(at, FieldMemOperand(at, FixedArray::kHeaderSize)); in Generate_Apply()
2277 __ Branch(&push, ne, t1, Operand(at)); in Generate_Apply()
2278 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); in Generate_Apply()
2279 __ bind(&push); in Generate_Apply()
2280 __ Push(at); in Generate_Apply()
2281 __ Addu(t0, t0, Operand(1)); in Generate_Apply()
2282 __ Branch(&loop); in Generate_Apply()
2283 __ bind(&done); in Generate_Apply()
2284 __ Move(a0, t0); in Generate_Apply()
2290 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); in Generate_Apply()
2291 __ Branch(&construct, ne, a3, Operand(at)); in Generate_Apply()
2292 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); in Generate_Apply()
2293 __ bind(&construct); in Generate_Apply()
2294 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); in Generate_Apply()
2310 __ lw(a3, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); in Generate_CallForwardVarargs()
2311 __ lw(a0, MemOperand(a3, CommonFrameConstants::kContextOrFrameTypeOffset)); in Generate_CallForwardVarargs()
2312 __ Branch(&arguments_adaptor, eq, a0, in Generate_CallForwardVarargs()
2315 __ lw(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); in Generate_CallForwardVarargs()
2316 __ lw(a0, FieldMemOperand(a0, JSFunction::kSharedFunctionInfoOffset)); in Generate_CallForwardVarargs()
2317 __ lw(a0, in Generate_CallForwardVarargs()
2319 __ mov(a3, fp); in Generate_CallForwardVarargs()
2321 __ Branch(&arguments_done); in Generate_CallForwardVarargs()
2322 __ bind(&arguments_adaptor); in Generate_CallForwardVarargs()
2325 __ lw(a0, MemOperand(a3, ArgumentsAdaptorFrameConstants::kLengthOffset)); in Generate_CallForwardVarargs()
2327 __ bind(&arguments_done); in Generate_CallForwardVarargs()
2330 __ SmiUntag(a0); in Generate_CallForwardVarargs()
2331 __ Subu(a0, a0, a2); in Generate_CallForwardVarargs()
2332 __ Branch(&stack_empty, le, a0, Operand(zero_reg)); in Generate_CallForwardVarargs()
2340 __ mov(a2, a0); in Generate_CallForwardVarargs()
2341 __ bind(&loop); in Generate_CallForwardVarargs()
2343 __ Lsa(at, a3, a2, kPointerSizeLog2); in Generate_CallForwardVarargs()
2344 __ lw(at, MemOperand(at, 1 * kPointerSize)); in Generate_CallForwardVarargs()
2345 __ push(at); in Generate_CallForwardVarargs()
2346 __ Subu(a2, a2, Operand(1)); in Generate_CallForwardVarargs()
2347 __ Branch(&loop, ne, a2, Operand(zero_reg)); in Generate_CallForwardVarargs()
2351 __ Branch(&stack_done); in Generate_CallForwardVarargs()
2352 __ bind(&stack_overflow); in Generate_CallForwardVarargs()
2353 __ TailCallRuntime(Runtime::kThrowStackOverflow); in Generate_CallForwardVarargs()
2354 __ bind(&stack_empty); in Generate_CallForwardVarargs()
2357 __ li(a0, Operand(0)); in Generate_CallForwardVarargs()
2359 __ bind(&stack_done); in Generate_CallForwardVarargs()
2361 __ Jump(code, RelocInfo::CODE_TARGET); in Generate_CallForwardVarargs()
2404 __ li(at, Operand(is_tail_call_elimination_enabled)); in PrepareForTailCall()
2405 __ lb(scratch1, MemOperand(at)); in PrepareForTailCall()
2406 __ Branch(&done, eq, scratch1, Operand(zero_reg)); in PrepareForTailCall()
2411 __ lw(scratch3, in PrepareForTailCall()
2413 __ Branch(&no_interpreter_frame, ne, scratch3, in PrepareForTailCall()
2415 __ lw(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); in PrepareForTailCall()
2416 __ bind(&no_interpreter_frame); in PrepareForTailCall()
2422 __ lw(scratch2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); in PrepareForTailCall()
2423 __ lw(scratch3, in PrepareForTailCall()
2425 __ Branch(&no_arguments_adaptor, ne, scratch3, in PrepareForTailCall()
2429 __ mov(fp, scratch2); in PrepareForTailCall()
2430 __ lw(caller_args_count_reg, in PrepareForTailCall()
2432 __ SmiUntag(caller_args_count_reg); in PrepareForTailCall()
2433 __ Branch(&formal_parameter_count_loaded); in PrepareForTailCall()
2435 __ bind(&no_arguments_adaptor); in PrepareForTailCall()
2437 __ lw(scratch1, in PrepareForTailCall()
2439 __ lw(scratch1, in PrepareForTailCall()
2441 __ lw(caller_args_count_reg, in PrepareForTailCall()
2444 __ SmiUntag(caller_args_count_reg); in PrepareForTailCall()
2446 __ bind(&formal_parameter_count_loaded); in PrepareForTailCall()
2449 __ PrepareForTailCall(callee_args_count, caller_args_count_reg, scratch2, in PrepareForTailCall()
2451 __ bind(&done); in PrepareForTailCall()
2463 __ AssertFunction(a1); in Generate_CallFunction()
2468 __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); in Generate_CallFunction()
2469 __ lbu(a3, FieldMemOperand(a2, SharedFunctionInfo::kFunctionKindByteOffset)); in Generate_CallFunction()
2470 __ And(at, a3, Operand(SharedFunctionInfo::kClassConstructorBitsWithinByte)); in Generate_CallFunction()
2471 __ Branch(&class_constructor, ne, at, Operand(zero_reg)); in Generate_CallFunction()
2478 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset)); in Generate_CallFunction()
2481 __ lbu(a3, FieldMemOperand(a2, SharedFunctionInfo::kNativeByteOffset)); in Generate_CallFunction()
2482 __ And(at, a3, Operand((1 << SharedFunctionInfo::kNativeBitWithinByte) | in Generate_CallFunction()
2484 __ Branch(&done_convert, ne, at, Operand(zero_reg)); in Generate_CallFunction()
2495 __ LoadGlobalProxy(a3); in Generate_CallFunction()
2498 __ Lsa(at, sp, a0, kPointerSizeLog2); in Generate_CallFunction()
2499 __ lw(a3, MemOperand(at)); in Generate_CallFunction()
2500 __ JumpIfSmi(a3, &convert_to_object); in Generate_CallFunction()
2502 __ GetObjectType(a3, t0, t0); in Generate_CallFunction()
2503 __ Branch(&done_convert, hs, t0, Operand(FIRST_JS_RECEIVER_TYPE)); in Generate_CallFunction()
2506 __ JumpIfRoot(a3, Heap::kUndefinedValueRootIndex, in Generate_CallFunction()
2508 __ JumpIfNotRoot(a3, Heap::kNullValueRootIndex, &convert_to_object); in Generate_CallFunction()
2509 __ bind(&convert_global_proxy); in Generate_CallFunction()
2512 __ LoadGlobalProxy(a3); in Generate_CallFunction()
2514 __ Branch(&convert_receiver); in Generate_CallFunction()
2516 __ bind(&convert_to_object); in Generate_CallFunction()
2522 __ sll(a0, a0, kSmiTagSize); // Smi tagged. in Generate_CallFunction()
2523 __ Push(a0, a1); in Generate_CallFunction()
2524 __ mov(a0, a3); in Generate_CallFunction()
2525 __ Push(cp); in Generate_CallFunction()
2526 __ Call(masm->isolate()->builtins()->ToObject(), in Generate_CallFunction()
2528 __ Pop(cp); in Generate_CallFunction()
2529 __ mov(a3, v0); in Generate_CallFunction()
2530 __ Pop(a0, a1); in Generate_CallFunction()
2531 __ sra(a0, a0, kSmiTagSize); // Un-tag. in Generate_CallFunction()
2533 __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); in Generate_CallFunction()
2534 __ bind(&convert_receiver); in Generate_CallFunction()
2536 __ Lsa(at, sp, a0, kPointerSizeLog2); in Generate_CallFunction()
2537 __ sw(a3, MemOperand(at)); in Generate_CallFunction()
2539 __ bind(&done_convert); in Generate_CallFunction()
2552 __ lw(a2, in Generate_CallFunction()
2554 __ sra(a2, a2, kSmiTagSize); // Un-tag. in Generate_CallFunction()
2557 __ InvokeFunctionCode(a1, no_reg, expected, actual, JUMP_FUNCTION, in Generate_CallFunction()
2561 __ bind(&class_constructor); in Generate_CallFunction()
2564 __ Push(a1); in Generate_CallFunction()
2565 __ CallRuntime(Runtime::kThrowConstructorNonCallableError); in Generate_CallFunction()
2576 __ AssertBoundFunction(a1); in Generate_CallBoundFunctionImpl()
2584 __ lw(at, FieldMemOperand(a1, JSBoundFunction::kBoundThisOffset)); in Generate_CallBoundFunctionImpl()
2585 __ Lsa(t0, sp, a0, kPointerSizeLog2); in Generate_CallBoundFunctionImpl()
2586 __ sw(at, MemOperand(t0)); in Generate_CallBoundFunctionImpl()
2590 __ lw(a2, FieldMemOperand(a1, JSBoundFunction::kBoundArgumentsOffset)); in Generate_CallBoundFunctionImpl()
2591 __ lw(t0, FieldMemOperand(a2, FixedArray::kLengthOffset)); in Generate_CallBoundFunctionImpl()
2592 __ SmiUntag(t0); in Generate_CallBoundFunctionImpl()
2604 __ sll(t1, t0, kPointerSizeLog2); in Generate_CallBoundFunctionImpl()
2605 __ Subu(sp, sp, Operand(t1)); in Generate_CallBoundFunctionImpl()
2608 __ LoadRoot(at, Heap::kRealStackLimitRootIndex); in Generate_CallBoundFunctionImpl()
2609 __ Branch(&done, gt, sp, Operand(at)); // Signed comparison. in Generate_CallBoundFunctionImpl()
2611 __ Addu(sp, sp, Operand(t1)); in Generate_CallBoundFunctionImpl()
2614 __ EnterFrame(StackFrame::INTERNAL); in Generate_CallBoundFunctionImpl()
2615 __ CallRuntime(Runtime::kThrowStackOverflow); in Generate_CallBoundFunctionImpl()
2617 __ bind(&done); in Generate_CallBoundFunctionImpl()
2623 __ mov(t1, zero_reg); in Generate_CallBoundFunctionImpl()
2624 __ bind(&loop); in Generate_CallBoundFunctionImpl()
2625 __ Branch(&done_loop, gt, t1, Operand(a0)); in Generate_CallBoundFunctionImpl()
2626 __ Lsa(t2, sp, t0, kPointerSizeLog2); in Generate_CallBoundFunctionImpl()
2627 __ lw(at, MemOperand(t2)); in Generate_CallBoundFunctionImpl()
2628 __ Lsa(t2, sp, t1, kPointerSizeLog2); in Generate_CallBoundFunctionImpl()
2629 __ sw(at, MemOperand(t2)); in Generate_CallBoundFunctionImpl()
2630 __ Addu(t0, t0, Operand(1)); in Generate_CallBoundFunctionImpl()
2631 __ Addu(t1, t1, Operand(1)); in Generate_CallBoundFunctionImpl()
2632 __ Branch(&loop); in Generate_CallBoundFunctionImpl()
2633 __ bind(&done_loop); in Generate_CallBoundFunctionImpl()
2639 __ lw(t0, FieldMemOperand(a2, FixedArray::kLengthOffset)); in Generate_CallBoundFunctionImpl()
2640 __ SmiUntag(t0); in Generate_CallBoundFunctionImpl()
2641 __ Addu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); in Generate_CallBoundFunctionImpl()
2642 __ bind(&loop); in Generate_CallBoundFunctionImpl()
2643 __ Subu(t0, t0, Operand(1)); in Generate_CallBoundFunctionImpl()
2644 __ Branch(&done_loop, lt, t0, Operand(zero_reg)); in Generate_CallBoundFunctionImpl()
2645 __ Lsa(t1, a2, t0, kPointerSizeLog2); in Generate_CallBoundFunctionImpl()
2646 __ lw(at, MemOperand(t1)); in Generate_CallBoundFunctionImpl()
2647 __ Lsa(t1, sp, a0, kPointerSizeLog2); in Generate_CallBoundFunctionImpl()
2648 __ sw(at, MemOperand(t1)); in Generate_CallBoundFunctionImpl()
2649 __ Addu(a0, a0, Operand(1)); in Generate_CallBoundFunctionImpl()
2650 __ Branch(&loop); in Generate_CallBoundFunctionImpl()
2651 __ bind(&done_loop); in Generate_CallBoundFunctionImpl()
2655 __ lw(a1, FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset)); in Generate_CallBoundFunctionImpl()
2656 __ li(at, Operand(ExternalReference(Builtins::kCall_ReceiverIsAny, in Generate_CallBoundFunctionImpl()
2658 __ lw(at, MemOperand(at)); in Generate_CallBoundFunctionImpl()
2659 __ Addu(at, at, Operand(Code::kHeaderSize - kHeapObjectTag)); in Generate_CallBoundFunctionImpl()
2660 __ Jump(at); in Generate_CallBoundFunctionImpl()
2672 __ JumpIfSmi(a1, &non_callable); in Generate_Call()
2673 __ bind(&non_smi); in Generate_Call()
2674 __ GetObjectType(a1, t1, t2); in Generate_Call()
2675 __ Jump(masm->isolate()->builtins()->CallFunction(mode, tail_call_mode), in Generate_Call()
2677 __ Jump(masm->isolate()->builtins()->CallBoundFunction(tail_call_mode), in Generate_Call()
2681 __ lbu(t1, FieldMemOperand(t1, Map::kBitFieldOffset)); in Generate_Call()
2682 __ And(t1, t1, Operand(1 << Map::kIsCallable)); in Generate_Call()
2683 __ Branch(&non_callable, eq, t1, Operand(zero_reg)); in Generate_Call()
2685 __ Branch(&non_function, ne, t2, Operand(JS_PROXY_TYPE)); in Generate_Call()
2693 __ Push(a1); in Generate_Call()
2696 __ Addu(a0, a0, 2); in Generate_Call()
2698 __ JumpToExternalReference( in Generate_Call()
2703 __ bind(&non_function); in Generate_Call()
2705 __ Lsa(at, sp, a0, kPointerSizeLog2); in Generate_Call()
2706 __ sw(a1, MemOperand(at)); in Generate_Call()
2708 __ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, a1); in Generate_Call()
2709 __ Jump(masm->isolate()->builtins()->CallFunction( in Generate_Call()
2714 __ bind(&non_callable); in Generate_Call()
2717 __ Push(a1); in Generate_Call()
2718 __ CallRuntime(Runtime::kThrowCalledNonCallable); in Generate_Call()
2738 __ lw(spread, MemOperand(sp, 0)); in CheckSpreadAndPushToStack()
2739 __ JumpIfSmi(spread, &runtime_call); in CheckSpreadAndPushToStack()
2740 __ lw(spread_map, FieldMemOperand(spread, HeapObject::kMapOffset)); in CheckSpreadAndPushToStack()
2741 __ lw(native_context, NativeContextMemOperand()); in CheckSpreadAndPushToStack()
2744 __ lbu(scratch, FieldMemOperand(spread_map, Map::kInstanceTypeOffset)); in CheckSpreadAndPushToStack()
2745 __ Branch(&runtime_call, ne, scratch, Operand(JS_ARRAY_TYPE)); in CheckSpreadAndPushToStack()
2748 __ lw(scratch, FieldMemOperand(spread_map, Map::kPrototypeOffset)); in CheckSpreadAndPushToStack()
2749 __ lw(scratch2, ContextMemOperand(native_context, in CheckSpreadAndPushToStack()
2751 __ Branch(&runtime_call, ne, scratch, Operand(scratch2)); in CheckSpreadAndPushToStack()
2755 __ LoadRoot(scratch, Heap::kArrayIteratorProtectorRootIndex); in CheckSpreadAndPushToStack()
2756 __ lw(scratch, FieldMemOperand(scratch, PropertyCell::kValueOffset)); in CheckSpreadAndPushToStack()
2757 __ Branch(&runtime_call, ne, scratch, in CheckSpreadAndPushToStack()
2761 __ lw(scratch, in CheckSpreadAndPushToStack()
2764 __ lw(scratch, FieldMemOperand(scratch, HeapObject::kMapOffset)); in CheckSpreadAndPushToStack()
2765 __ lw(scratch2, in CheckSpreadAndPushToStack()
2768 __ Branch(&runtime_call, ne, scratch, Operand(scratch2)); in CheckSpreadAndPushToStack()
2773 __ lbu(scratch, FieldMemOperand(spread_map, Map::kBitField2Offset)); in CheckSpreadAndPushToStack()
2774 __ DecodeField<Map::ElementsKindBits>(scratch); in CheckSpreadAndPushToStack()
2775 __ Branch(&runtime_call, hi, scratch, Operand(FAST_HOLEY_ELEMENTS)); in CheckSpreadAndPushToStack()
2777 __ Branch(&no_protector_check, eq, scratch, Operand(FAST_SMI_ELEMENTS)); in CheckSpreadAndPushToStack()
2778 __ Branch(&no_protector_check, eq, scratch, Operand(FAST_ELEMENTS)); in CheckSpreadAndPushToStack()
2780 __ LoadRoot(scratch, Heap::kArrayProtectorRootIndex); in CheckSpreadAndPushToStack()
2781 __ lw(scratch, FieldMemOperand(scratch, PropertyCell::kValueOffset)); in CheckSpreadAndPushToStack()
2782 __ Branch(&runtime_call, ne, scratch, in CheckSpreadAndPushToStack()
2785 __ bind(&no_protector_check); in CheckSpreadAndPushToStack()
2787 __ lw(spread_len, FieldMemOperand(spread, JSArray::kLengthOffset)); in CheckSpreadAndPushToStack()
2788 __ SmiUntag(spread_len); in CheckSpreadAndPushToStack()
2789 __ lw(spread, FieldMemOperand(spread, JSArray::kElementsOffset)); in CheckSpreadAndPushToStack()
2790 __ Branch(&push_args); in CheckSpreadAndPushToStack()
2792 __ bind(&runtime_call); in CheckSpreadAndPushToStack()
2796 __ SmiTag(argc); in CheckSpreadAndPushToStack()
2797 __ Push(constructor, new_target, argc, spread); in CheckSpreadAndPushToStack()
2798 __ CallRuntime(Runtime::kSpreadIterableFixed); in CheckSpreadAndPushToStack()
2799 __ mov(spread, v0); in CheckSpreadAndPushToStack()
2800 __ Pop(constructor, new_target, argc); in CheckSpreadAndPushToStack()
2801 __ SmiUntag(argc); in CheckSpreadAndPushToStack()
2806 __ lw(spread_len, FieldMemOperand(spread, FixedArray::kLengthOffset)); in CheckSpreadAndPushToStack()
2807 __ SmiUntag(spread_len); in CheckSpreadAndPushToStack()
2809 __ bind(&push_args); in CheckSpreadAndPushToStack()
2811 __ Addu(argc, argc, spread_len); in CheckSpreadAndPushToStack()
2812 __ Subu(argc, argc, Operand(1)); in CheckSpreadAndPushToStack()
2815 __ Pop(scratch); in CheckSpreadAndPushToStack()
2823 __ LoadRoot(scratch, Heap::kRealStackLimitRootIndex); in CheckSpreadAndPushToStack()
2826 __ Subu(scratch, sp, scratch); in CheckSpreadAndPushToStack()
2828 __ sll(at, spread_len, kPointerSizeLog2); in CheckSpreadAndPushToStack()
2829 __ Branch(&done, gt, scratch, Operand(at)); // Signed comparison. in CheckSpreadAndPushToStack()
2830 __ TailCallRuntime(Runtime::kThrowStackOverflow); in CheckSpreadAndPushToStack()
2831 __ bind(&done); in CheckSpreadAndPushToStack()
2836 __ mov(scratch, zero_reg); in CheckSpreadAndPushToStack()
2838 __ bind(&loop); in CheckSpreadAndPushToStack()
2839 __ Branch(&done, eq, scratch, Operand(spread_len)); in CheckSpreadAndPushToStack()
2840 __ Lsa(scratch2, spread, scratch, kPointerSizeLog2); in CheckSpreadAndPushToStack()
2841 __ lw(scratch2, FieldMemOperand(scratch2, FixedArray::kHeaderSize)); in CheckSpreadAndPushToStack()
2842 __ JumpIfNotRoot(scratch2, Heap::kTheHoleValueRootIndex, &push); in CheckSpreadAndPushToStack()
2843 __ LoadRoot(scratch2, Heap::kUndefinedValueRootIndex); in CheckSpreadAndPushToStack()
2844 __ bind(&push); in CheckSpreadAndPushToStack()
2845 __ Push(scratch2); in CheckSpreadAndPushToStack()
2846 __ Addu(scratch, scratch, Operand(1)); in CheckSpreadAndPushToStack()
2847 __ Branch(&loop); in CheckSpreadAndPushToStack()
2848 __ bind(&done); in CheckSpreadAndPushToStack()
2860 __ LoadRoot(a3, Heap::kUndefinedValueRootIndex); in Generate_CallWithSpread()
2862 __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny, in Generate_CallWithSpread()
2874 __ AssertFunction(a1); in Generate_ConstructFunction()
2878 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex); in Generate_ConstructFunction()
2882 __ lw(t0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); in Generate_ConstructFunction()
2883 __ lw(t0, FieldMemOperand(t0, SharedFunctionInfo::kConstructStubOffset)); in Generate_ConstructFunction()
2884 __ Addu(at, t0, Operand(Code::kHeaderSize - kHeapObjectTag)); in Generate_ConstructFunction()
2885 __ Jump(at); in Generate_ConstructFunction()
2895 __ AssertBoundFunction(a1); in Generate_ConstructBoundFunction()
2898 __ lw(a2, FieldMemOperand(a1, JSBoundFunction::kBoundArgumentsOffset)); in Generate_ConstructBoundFunction()
2899 __ lw(t0, FieldMemOperand(a2, FixedArray::kLengthOffset)); in Generate_ConstructBoundFunction()
2900 __ SmiUntag(t0); in Generate_ConstructBoundFunction()
2913 __ sll(t1, t0, kPointerSizeLog2); in Generate_ConstructBoundFunction()
2914 __ Subu(sp, sp, Operand(t1)); in Generate_ConstructBoundFunction()
2917 __ LoadRoot(at, Heap::kRealStackLimitRootIndex); in Generate_ConstructBoundFunction()
2918 __ Branch(&done, gt, sp, Operand(at)); // Signed comparison. in Generate_ConstructBoundFunction()
2920 __ Addu(sp, sp, Operand(t1)); in Generate_ConstructBoundFunction()
2923 __ EnterFrame(StackFrame::INTERNAL); in Generate_ConstructBoundFunction()
2924 __ CallRuntime(Runtime::kThrowStackOverflow); in Generate_ConstructBoundFunction()
2926 __ bind(&done); in Generate_ConstructBoundFunction()
2932 __ mov(t1, zero_reg); in Generate_ConstructBoundFunction()
2933 __ bind(&loop); in Generate_ConstructBoundFunction()
2934 __ Branch(&done_loop, ge, t1, Operand(a0)); in Generate_ConstructBoundFunction()
2935 __ Lsa(t2, sp, t0, kPointerSizeLog2); in Generate_ConstructBoundFunction()
2936 __ lw(at, MemOperand(t2)); in Generate_ConstructBoundFunction()
2937 __ Lsa(t2, sp, t1, kPointerSizeLog2); in Generate_ConstructBoundFunction()
2938 __ sw(at, MemOperand(t2)); in Generate_ConstructBoundFunction()
2939 __ Addu(t0, t0, Operand(1)); in Generate_ConstructBoundFunction()
2940 __ Addu(t1, t1, Operand(1)); in Generate_ConstructBoundFunction()
2941 __ Branch(&loop); in Generate_ConstructBoundFunction()
2942 __ bind(&done_loop); in Generate_ConstructBoundFunction()
2948 __ lw(t0, FieldMemOperand(a2, FixedArray::kLengthOffset)); in Generate_ConstructBoundFunction()
2949 __ SmiUntag(t0); in Generate_ConstructBoundFunction()
2950 __ Addu(a2, a2, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); in Generate_ConstructBoundFunction()
2951 __ bind(&loop); in Generate_ConstructBoundFunction()
2952 __ Subu(t0, t0, Operand(1)); in Generate_ConstructBoundFunction()
2953 __ Branch(&done_loop, lt, t0, Operand(zero_reg)); in Generate_ConstructBoundFunction()
2954 __ Lsa(t1, a2, t0, kPointerSizeLog2); in Generate_ConstructBoundFunction()
2955 __ lw(at, MemOperand(t1)); in Generate_ConstructBoundFunction()
2956 __ Lsa(t1, sp, a0, kPointerSizeLog2); in Generate_ConstructBoundFunction()
2957 __ sw(at, MemOperand(t1)); in Generate_ConstructBoundFunction()
2958 __ Addu(a0, a0, Operand(1)); in Generate_ConstructBoundFunction()
2959 __ Branch(&loop); in Generate_ConstructBoundFunction()
2960 __ bind(&done_loop); in Generate_ConstructBoundFunction()
2966 __ Branch(&skip_load, ne, a1, Operand(a3)); in Generate_ConstructBoundFunction()
2967 __ lw(a3, FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset)); in Generate_ConstructBoundFunction()
2968 __ bind(&skip_load); in Generate_ConstructBoundFunction()
2972 __ lw(a1, FieldMemOperand(a1, JSBoundFunction::kBoundTargetFunctionOffset)); in Generate_ConstructBoundFunction()
2973 __ li(at, Operand(ExternalReference(Builtins::kConstruct, masm->isolate()))); in Generate_ConstructBoundFunction()
2974 __ lw(at, MemOperand(at)); in Generate_ConstructBoundFunction()
2975 __ Addu(at, at, Operand(Code::kHeaderSize - kHeapObjectTag)); in Generate_ConstructBoundFunction()
2976 __ Jump(at); in Generate_ConstructBoundFunction()
2989 __ Push(a1, a3); in Generate_ConstructProxy()
2991 __ Addu(a0, a0, Operand(3)); in Generate_ConstructProxy()
2993 __ JumpToExternalReference( in Generate_ConstructProxy()
3008 __ JumpIfSmi(a1, &non_constructor); in Generate_Construct()
3011 __ lw(t1, FieldMemOperand(a1, HeapObject::kMapOffset)); in Generate_Construct()
3012 __ lbu(t2, FieldMemOperand(t1, Map::kInstanceTypeOffset)); in Generate_Construct()
3013 __ Jump(masm->isolate()->builtins()->ConstructFunction(), in Generate_Construct()
3017 __ lbu(t3, FieldMemOperand(t1, Map::kBitFieldOffset)); in Generate_Construct()
3018 __ And(t3, t3, Operand(1 << Map::kIsConstructor)); in Generate_Construct()
3019 __ Branch(&non_constructor, eq, t3, Operand(zero_reg)); in Generate_Construct()
3023 __ Jump(masm->isolate()->builtins()->ConstructBoundFunction(), in Generate_Construct()
3027 __ Jump(masm->isolate()->builtins()->ConstructProxy(), RelocInfo::CODE_TARGET, in Generate_Construct()
3033 __ Lsa(at, sp, a0, kPointerSizeLog2); in Generate_Construct()
3034 __ sw(a1, MemOperand(at)); in Generate_Construct()
3036 __ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, a1); in Generate_Construct()
3037 __ Jump(masm->isolate()->builtins()->CallFunction(), in Generate_Construct()
3043 __ bind(&non_constructor); in Generate_Construct()
3044 __ Jump(masm->isolate()->builtins()->ConstructedNonConstructable(), in Generate_Construct()
3058 __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); in Generate_ConstructWithSpread()
3067 __ SmiTag(a0); in Generate_AllocateInNewSpace()
3068 __ Push(a0); in Generate_AllocateInNewSpace()
3069 __ Move(cp, Smi::kZero); in Generate_AllocateInNewSpace()
3070 __ TailCallRuntime(Runtime::kAllocateInNewSpace); in Generate_AllocateInNewSpace()
3079 __ SmiTag(a0); in Generate_AllocateInOldSpace()
3080 __ Move(a1, Smi::FromInt(AllocateTargetSpace::encode(OLD_SPACE))); in Generate_AllocateInOldSpace()
3081 __ Push(a0, a1); in Generate_AllocateInOldSpace()
3082 __ Move(cp, Smi::kZero); in Generate_AllocateInOldSpace()
3083 __ TailCallRuntime(Runtime::kAllocateInTargetSpace); in Generate_AllocateInOldSpace()
3092 __ Push(a0); in Generate_Abort()
3093 __ Move(cp, Smi::kZero); in Generate_Abort()
3094 __ TailCallRuntime(Runtime::kAbort); in Generate_Abort()
3109 __ Branch(&dont_adapt_arguments, eq, a2, in Generate_ArgumentsAdaptorTrampoline()
3112 __ Branch(&too_few, Uless, a0, Operand(a2)); in Generate_ArgumentsAdaptorTrampoline()
3119 __ bind(&enough); in Generate_ArgumentsAdaptorTrampoline()
3124 __ Lsa(a0, fp, a0, kPointerSizeLog2 - kSmiTagSize); in Generate_ArgumentsAdaptorTrampoline()
3126 __ Addu(a0, a0, Operand(2 * kPointerSize)); in Generate_ArgumentsAdaptorTrampoline()
3128 __ sll(t1, a2, kPointerSizeLog2); in Generate_ArgumentsAdaptorTrampoline()
3129 __ subu(t1, a0, t1); in Generate_ArgumentsAdaptorTrampoline()
3139 __ bind(&copy); in Generate_ArgumentsAdaptorTrampoline()
3140 __ lw(t0, MemOperand(a0)); in Generate_ArgumentsAdaptorTrampoline()
3141 __ push(t0); in Generate_ArgumentsAdaptorTrampoline()
3142 __ Branch(USE_DELAY_SLOT, &copy, ne, a0, Operand(t1)); in Generate_ArgumentsAdaptorTrampoline()
3143 __ addiu(a0, a0, -kPointerSize); // In delay slot. in Generate_ArgumentsAdaptorTrampoline()
3145 __ jmp(&invoke); in Generate_ArgumentsAdaptorTrampoline()
3149 __ bind(&too_few); in Generate_ArgumentsAdaptorTrampoline()
3158 __ Lsa(a0, fp, a0, kPointerSizeLog2 - kSmiTagSize); in Generate_ArgumentsAdaptorTrampoline()
3160 __ Addu(a0, a0, Operand(2 * kPointerSize)); in Generate_ArgumentsAdaptorTrampoline()
3162 __ Addu(t3, fp, kPointerSize); in Generate_ArgumentsAdaptorTrampoline()
3171 __ bind(&copy); in Generate_ArgumentsAdaptorTrampoline()
3172 __ lw(t0, MemOperand(a0)); // Adjusted above for return addr and receiver. in Generate_ArgumentsAdaptorTrampoline()
3173 __ Subu(sp, sp, kPointerSize); in Generate_ArgumentsAdaptorTrampoline()
3174 __ Subu(a0, a0, kPointerSize); in Generate_ArgumentsAdaptorTrampoline()
3175 __ Branch(USE_DELAY_SLOT, &copy, ne, a0, Operand(t3)); in Generate_ArgumentsAdaptorTrampoline()
3176 __ sw(t0, MemOperand(sp)); // In the delay slot. in Generate_ArgumentsAdaptorTrampoline()
3182 __ LoadRoot(t0, Heap::kUndefinedValueRootIndex); in Generate_ArgumentsAdaptorTrampoline()
3183 __ sll(t2, a2, kPointerSizeLog2); in Generate_ArgumentsAdaptorTrampoline()
3184 __ Subu(t1, fp, Operand(t2)); in Generate_ArgumentsAdaptorTrampoline()
3186 __ Subu(t1, t1, Operand(StandardFrameConstants::kFixedFrameSizeFromFp + in Generate_ArgumentsAdaptorTrampoline()
3190 __ bind(&fill); in Generate_ArgumentsAdaptorTrampoline()
3191 __ Subu(sp, sp, kPointerSize); in Generate_ArgumentsAdaptorTrampoline()
3192 __ Branch(USE_DELAY_SLOT, &fill, ne, sp, Operand(t1)); in Generate_ArgumentsAdaptorTrampoline()
3193 __ sw(t0, MemOperand(sp)); in Generate_ArgumentsAdaptorTrampoline()
3197 __ bind(&invoke); in Generate_ArgumentsAdaptorTrampoline()
3198 __ mov(a0, a2); in Generate_ArgumentsAdaptorTrampoline()
3202 __ lw(t0, FieldMemOperand(a1, JSFunction::kCodeEntryOffset)); in Generate_ArgumentsAdaptorTrampoline()
3203 __ Call(t0); in Generate_ArgumentsAdaptorTrampoline()
3210 __ Ret(); in Generate_ArgumentsAdaptorTrampoline()
3215 __ bind(&dont_adapt_arguments); in Generate_ArgumentsAdaptorTrampoline()
3216 __ lw(t0, FieldMemOperand(a1, JSFunction::kCodeEntryOffset)); in Generate_ArgumentsAdaptorTrampoline()
3217 __ Jump(t0); in Generate_ArgumentsAdaptorTrampoline()
3219 __ bind(&stack_overflow); in Generate_ArgumentsAdaptorTrampoline()
3222 __ CallRuntime(Runtime::kThrowStackOverflow); in Generate_ArgumentsAdaptorTrampoline()
3223 __ break_(0xCC); in Generate_ArgumentsAdaptorTrampoline()
3227 #undef __