• Home
  • Raw
  • Download

Lines Matching refs:__

44 #define __ ACCESS_MASM(masm)  macro
65 __ push(a1); in Generate_Adaptor()
72 __ Addu(s0, a0, num_extra_args + 1); in Generate_Adaptor()
73 __ sll(s1, s0, kPointerSizeLog2); in Generate_Adaptor()
74 __ Subu(s1, s1, kPointerSize); in Generate_Adaptor()
75 __ JumpToExternalReference(ExternalReference(id, masm->isolate())); in Generate_Adaptor()
84 __ lw(result, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX))); in GenerateLoadInternalArrayFunction()
85 __ lw(result, in GenerateLoadInternalArrayFunction()
88 __ lw(result, in GenerateLoadInternalArrayFunction()
99 __ lw(result, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX))); in GenerateLoadArrayFunction()
100 __ lw(result, in GenerateLoadArrayFunction()
103 __ lw(result, in GenerateLoadArrayFunction()
121 __ LoadInitialArrayMap(array_function, scratch2, scratch1); in AllocateEmptyJSArray()
129 __ AllocateInNewSpace(size, in AllocateEmptyJSArray()
140 __ sw(scratch1, FieldMemOperand(result, JSObject::kMapOffset)); in AllocateEmptyJSArray()
141 __ LoadRoot(scratch1, Heap::kEmptyFixedArrayRootIndex); in AllocateEmptyJSArray()
142 __ sw(scratch1, FieldMemOperand(result, JSArray::kPropertiesOffset)); in AllocateEmptyJSArray()
144 __ mov(scratch3, zero_reg); in AllocateEmptyJSArray()
145 __ sw(scratch3, FieldMemOperand(result, JSArray::kLengthOffset)); in AllocateEmptyJSArray()
148 __ sw(scratch1, FieldMemOperand(result, JSArray::kElementsOffset)); in AllocateEmptyJSArray()
156 __ Addu(scratch1, result, Operand(JSArray::kSize)); in AllocateEmptyJSArray()
157 __ sw(scratch1, FieldMemOperand(result, JSArray::kElementsOffset)); in AllocateEmptyJSArray()
160 __ And(scratch1, scratch1, Operand(~kHeapObjectTagMask)); in AllocateEmptyJSArray()
167 __ LoadRoot(scratch3, Heap::kFixedArrayMapRootIndex); in AllocateEmptyJSArray()
169 __ sw(scratch3, MemOperand(scratch1)); in AllocateEmptyJSArray()
170 __ Addu(scratch1, scratch1, kPointerSize); in AllocateEmptyJSArray()
171 __ li(scratch3, Operand(Smi::FromInt(initial_capacity))); in AllocateEmptyJSArray()
173 __ sw(scratch3, MemOperand(scratch1)); in AllocateEmptyJSArray()
174 __ Addu(scratch1, scratch1, kPointerSize); in AllocateEmptyJSArray()
178 __ LoadRoot(scratch3, Heap::kTheHoleValueRootIndex); in AllocateEmptyJSArray()
182 __ sw(scratch3, MemOperand(scratch1, i * kPointerSize)); in AllocateEmptyJSArray()
186 __ Addu(scratch2, scratch1, Operand(initial_capacity * kPointerSize)); in AllocateEmptyJSArray()
187 __ Branch(&entry); in AllocateEmptyJSArray()
188 __ bind(&loop); in AllocateEmptyJSArray()
189 __ sw(scratch3, MemOperand(scratch1)); in AllocateEmptyJSArray()
190 __ Addu(scratch1, scratch1, kPointerSize); in AllocateEmptyJSArray()
191 __ bind(&entry); in AllocateEmptyJSArray()
192 __ Branch(&loop, lt, scratch1, Operand(scratch2)); in AllocateEmptyJSArray()
217 __ LoadInitialArrayMap(array_function, scratch2, elements_array_storage); in AllocateJSArray()
220 __ Assert( in AllocateJSArray()
227 __ li(elements_array_end, in AllocateJSArray()
229 __ sra(scratch1, array_size, kSmiTagSize); in AllocateJSArray()
230 __ Addu(elements_array_end, elements_array_end, scratch1); in AllocateJSArray()
231 __ AllocateInNewSpace( in AllocateJSArray()
244 __ sw(elements_array_storage, FieldMemOperand(result, JSObject::kMapOffset)); in AllocateJSArray()
245 __ LoadRoot(elements_array_storage, Heap::kEmptyFixedArrayRootIndex); in AllocateJSArray()
246 __ sw(elements_array_storage, in AllocateJSArray()
249 __ sw(array_size, FieldMemOperand(result, JSArray::kLengthOffset)); in AllocateJSArray()
255 __ Addu(elements_array_storage, result, Operand(JSArray::kSize)); in AllocateJSArray()
256 __ sw(elements_array_storage, in AllocateJSArray()
260 __ And(elements_array_storage, in AllocateJSArray()
268 __ LoadRoot(scratch1, Heap::kFixedArrayMapRootIndex); in AllocateJSArray()
270 __ sw(scratch1, MemOperand(elements_array_storage)); in AllocateJSArray()
271 __ Addu(elements_array_storage, elements_array_storage, kPointerSize); in AllocateJSArray()
279 __ sw(array_size, MemOperand(elements_array_storage)); in AllocateJSArray()
280 __ Addu(elements_array_storage, elements_array_storage, kPointerSize); in AllocateJSArray()
287 __ sll(elements_array_end, array_size, kPointerSizeLog2 - kSmiTagSize); in AllocateJSArray()
288 __ Addu(elements_array_end, elements_array_storage, elements_array_end); in AllocateJSArray()
296 __ LoadRoot(scratch1, Heap::kTheHoleValueRootIndex); in AllocateJSArray()
297 __ Branch(&entry); in AllocateJSArray()
298 __ bind(&loop); in AllocateJSArray()
299 __ sw(scratch1, MemOperand(elements_array_storage)); in AllocateJSArray()
300 __ Addu(elements_array_storage, elements_array_storage, kPointerSize); in AllocateJSArray()
302 __ bind(&entry); in AllocateJSArray()
303 __ Branch(&loop, lt, elements_array_storage, Operand(elements_array_end)); in AllocateJSArray()
329 __ Branch(&argc_one_or_more, ne, a0, Operand(zero_reg)); in ArrayNativeCode()
331 __ bind(&empty_array); in ArrayNativeCode()
339 __ IncrementCounter(counters->array_function_native(), 1, a3, t0); in ArrayNativeCode()
341 __ mov(v0, a2); in ArrayNativeCode()
342 __ Addu(sp, sp, Operand(kPointerSize)); in ArrayNativeCode()
343 __ Ret(); in ArrayNativeCode()
347 __ bind(&argc_one_or_more); in ArrayNativeCode()
348 __ Branch(&argc_two_or_more, ne, a0, Operand(1)); in ArrayNativeCode()
351 __ lw(a2, MemOperand(sp)); // Get the argument from the stack. in ArrayNativeCode()
352 __ Branch(&not_empty_array, ne, a2, Operand(zero_reg)); in ArrayNativeCode()
353 __ Drop(1); // Adjust stack. in ArrayNativeCode()
354 __ mov(a0, zero_reg); // Treat this as a call with argc of zero. in ArrayNativeCode()
355 __ Branch(&empty_array); in ArrayNativeCode()
357 __ bind(&not_empty_array); in ArrayNativeCode()
358 __ And(a3, a2, Operand(kIntptrSignBit | kSmiTagMask)); in ArrayNativeCode()
359 __ Branch(call_generic_code, eq, a3, Operand(zero_reg)); in ArrayNativeCode()
364 __ Branch(call_generic_code, Ugreater_equal, a2, in ArrayNativeCode()
381 __ IncrementCounter(counters->array_function_native(), 1, a2, t0); in ArrayNativeCode()
384 __ mov(v0, a3); in ArrayNativeCode()
385 __ Addu(sp, sp, Operand(2 * kPointerSize)); in ArrayNativeCode()
386 __ Ret(); in ArrayNativeCode()
389 __ bind(&argc_two_or_more); in ArrayNativeCode()
390 __ sll(a2, a0, kSmiTagSize); // Convert argc to a smi. in ArrayNativeCode()
406 __ IncrementCounter(counters->array_function_native(), 1, a2, t2); in ArrayNativeCode()
418 __ Branch(USE_DELAY_SLOT, &entry); in ArrayNativeCode()
419 __ mov(t3, sp); in ArrayNativeCode()
420 __ bind(&loop); in ArrayNativeCode()
421 __ lw(a2, MemOperand(t3)); in ArrayNativeCode()
423 __ JumpIfNotSmi(a2, &has_non_smi_element); in ArrayNativeCode()
425 __ Addu(t3, t3, kPointerSize); in ArrayNativeCode()
426 __ Addu(t1, t1, -kPointerSize); in ArrayNativeCode()
427 __ sw(a2, MemOperand(t1)); in ArrayNativeCode()
428 __ bind(&entry); in ArrayNativeCode()
429 __ Branch(&loop, lt, t0, Operand(t1)); in ArrayNativeCode()
431 __ bind(&finish); in ArrayNativeCode()
432 __ mov(sp, t3); in ArrayNativeCode()
439 __ Addu(sp, sp, Operand(kPointerSize)); in ArrayNativeCode()
440 __ mov(v0, a3); in ArrayNativeCode()
441 __ Ret(); in ArrayNativeCode()
443 __ bind(&has_non_smi_element); in ArrayNativeCode()
445 __ CheckMap( in ArrayNativeCode()
447 __ bind(&cant_transition_map); in ArrayNativeCode()
448 __ UndoAllocationInNewSpace(a3, t0); in ArrayNativeCode()
449 __ Branch(call_generic_code); in ArrayNativeCode()
451 __ bind(&not_double); in ArrayNativeCode()
454 __ lw(a2, FieldMemOperand(a3, HeapObject::kMapOffset)); in ArrayNativeCode()
455 __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS, in ArrayNativeCode()
460 __ sw(a2, FieldMemOperand(a3, HeapObject::kMapOffset)); in ArrayNativeCode()
461 __ RecordWriteField(a3, in ArrayNativeCode()
470 __ bind(&loop2); in ArrayNativeCode()
471 __ lw(a2, MemOperand(t3)); in ArrayNativeCode()
472 __ Addu(t3, t3, kPointerSize); in ArrayNativeCode()
473 __ Subu(t1, t1, kPointerSize); in ArrayNativeCode()
474 __ sw(a2, MemOperand(t1)); in ArrayNativeCode()
475 __ Branch(&loop2, lt, t0, Operand(t1)); in ArrayNativeCode()
476 __ Branch(&finish); in ArrayNativeCode()
493 __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset)); in Generate_InternalArrayCode()
494 __ And(t0, a2, Operand(kSmiTagMask)); in Generate_InternalArrayCode()
495 __ Assert(ne, "Unexpected initial map for InternalArray function", in Generate_InternalArrayCode()
497 __ GetObjectType(a2, a3, t0); in Generate_InternalArrayCode()
498 __ Assert(eq, "Unexpected initial map for InternalArray function", in Generate_InternalArrayCode()
508 __ bind(&generic_array_code); in Generate_InternalArrayCode()
512 __ Jump(array_code, RelocInfo::CODE_TARGET); in Generate_InternalArrayCode()
529 __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset)); in Generate_ArrayCode()
530 __ And(t0, a2, Operand(kSmiTagMask)); in Generate_ArrayCode()
531 __ Assert(ne, "Unexpected initial map for Array function (1)", in Generate_ArrayCode()
533 __ GetObjectType(a2, a3, t0); in Generate_ArrayCode()
534 __ Assert(eq, "Unexpected initial map for Array function (2)", in Generate_ArrayCode()
543 __ bind(&generic_array_code); in Generate_ArrayCode()
547 __ Jump(array_code, RelocInfo::CODE_TARGET); in Generate_ArrayCode()
564 __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset)); in Generate_ArrayConstructCode()
565 __ And(t0, a2, Operand(kSmiTagMask)); in Generate_ArrayConstructCode()
566 __ Assert(ne, "Unexpected initial map for Array function (3)", in Generate_ArrayConstructCode()
568 __ GetObjectType(a2, a3, t0); in Generate_ArrayConstructCode()
569 __ Assert(eq, "Unexpected initial map for Array function (4)", in Generate_ArrayConstructCode()
578 __ bind(&generic_constructor); in Generate_ArrayConstructCode()
582 __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET); in Generate_ArrayConstructCode()
595 __ IncrementCounter(counters->string_ctor_calls(), 1, a2, a3); in Generate_StringConstructCode()
599 __ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, a2); in Generate_StringConstructCode()
600 __ Assert(eq, "Unexpected String function", function, Operand(a2)); in Generate_StringConstructCode()
605 __ Branch(&no_arguments, eq, a0, Operand(zero_reg)); in Generate_StringConstructCode()
607 __ Subu(a0, a0, Operand(1)); in Generate_StringConstructCode()
608 __ sll(a0, a0, kPointerSizeLog2); in Generate_StringConstructCode()
609 __ Addu(sp, a0, sp); in Generate_StringConstructCode()
610 __ lw(a0, MemOperand(sp)); in Generate_StringConstructCode()
612 __ Drop(2); in Generate_StringConstructCode()
625 __ IncrementCounter(counters->string_ctor_cached_number(), 1, a3, t0); in Generate_StringConstructCode()
626 __ bind(&argument_is_string); in Generate_StringConstructCode()
635 __ AllocateInNewSpace(JSValue::kSize, in Generate_StringConstructCode()
644 __ LoadGlobalFunctionInitialMap(function, map, t0); in Generate_StringConstructCode()
646 __ lbu(t0, FieldMemOperand(map, Map::kInstanceSizeOffset)); in Generate_StringConstructCode()
647 __ Assert(eq, "Unexpected string wrapper instance size", in Generate_StringConstructCode()
649 __ lbu(t0, FieldMemOperand(map, Map::kUnusedPropertyFieldsOffset)); in Generate_StringConstructCode()
650 __ Assert(eq, "Unexpected unused properties of string wrapper", in Generate_StringConstructCode()
653 __ sw(map, FieldMemOperand(v0, HeapObject::kMapOffset)); in Generate_StringConstructCode()
655 __ LoadRoot(a3, Heap::kEmptyFixedArrayRootIndex); in Generate_StringConstructCode()
656 __ sw(a3, FieldMemOperand(v0, JSObject::kPropertiesOffset)); in Generate_StringConstructCode()
657 __ sw(a3, FieldMemOperand(v0, JSObject::kElementsOffset)); in Generate_StringConstructCode()
659 __ sw(argument, FieldMemOperand(v0, JSValue::kValueOffset)); in Generate_StringConstructCode()
664 __ Ret(); in Generate_StringConstructCode()
669 __ bind(&not_cached); in Generate_StringConstructCode()
670 __ JumpIfSmi(a0, &convert_argument); in Generate_StringConstructCode()
673 __ lw(a2, FieldMemOperand(a0, HeapObject::kMapOffset)); in Generate_StringConstructCode()
674 __ lbu(a3, FieldMemOperand(a2, Map::kInstanceTypeOffset)); in Generate_StringConstructCode()
676 __ And(t0, a3, Operand(kIsNotStringMask)); in Generate_StringConstructCode()
677 __ Branch(&convert_argument, ne, t0, Operand(zero_reg)); in Generate_StringConstructCode()
678 __ mov(argument, a0); in Generate_StringConstructCode()
679 __ IncrementCounter(counters->string_ctor_conversions(), 1, a3, t0); in Generate_StringConstructCode()
680 __ Branch(&argument_is_string); in Generate_StringConstructCode()
683 __ bind(&convert_argument); in Generate_StringConstructCode()
684 __ push(function); // Preserve the function. in Generate_StringConstructCode()
685 __ IncrementCounter(counters->string_ctor_conversions(), 1, a3, t0); in Generate_StringConstructCode()
688 __ push(v0); in Generate_StringConstructCode()
689 __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION); in Generate_StringConstructCode()
691 __ pop(function); in Generate_StringConstructCode()
692 __ mov(argument, v0); in Generate_StringConstructCode()
693 __ Branch(&argument_is_string); in Generate_StringConstructCode()
697 __ bind(&no_arguments); in Generate_StringConstructCode()
698 __ LoadRoot(argument, Heap::kEmptyStringRootIndex); in Generate_StringConstructCode()
699 __ Drop(1); in Generate_StringConstructCode()
700 __ Branch(&argument_is_string); in Generate_StringConstructCode()
704 __ bind(&gc_required); in Generate_StringConstructCode()
705 __ IncrementCounter(counters->string_ctor_gc_required(), 1, a3, t0); in Generate_StringConstructCode()
708 __ push(argument); in Generate_StringConstructCode()
709 __ CallRuntime(Runtime::kNewStringWrapper, 1); in Generate_StringConstructCode()
711 __ Ret(); in Generate_StringConstructCode()
742 __ sll(a0, a0, kSmiTagSize); // Tag arguments count. in Generate_JSConstructStubHelper()
743 __ MultiPushReversed(a0.bit() | a1.bit()); in Generate_JSConstructStubHelper()
746 __ LoadRoot(t7, Heap::kUndefinedValueRootIndex); in Generate_JSConstructStubHelper()
756 __ li(a2, Operand(debug_step_in_fp)); in Generate_JSConstructStubHelper()
757 __ lw(a2, MemOperand(a2)); in Generate_JSConstructStubHelper()
758 __ Branch(&rt_call, ne, a2, Operand(zero_reg)); in Generate_JSConstructStubHelper()
763 __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset)); in Generate_JSConstructStubHelper()
764 __ JumpIfSmi(a2, &rt_call); in Generate_JSConstructStubHelper()
765 __ GetObjectType(a2, a3, t4); in Generate_JSConstructStubHelper()
766 __ Branch(&rt_call, ne, t4, Operand(MAP_TYPE)); in Generate_JSConstructStubHelper()
773 __ lbu(a3, FieldMemOperand(a2, Map::kInstanceTypeOffset)); in Generate_JSConstructStubHelper()
774 __ Branch(&rt_call, eq, a3, Operand(JS_FUNCTION_TYPE)); in Generate_JSConstructStubHelper()
779 __ lw(a3, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); in Generate_JSConstructStubHelper()
782 __ lbu(t0, constructor_count); in Generate_JSConstructStubHelper()
783 __ Subu(t0, t0, Operand(1)); in Generate_JSConstructStubHelper()
784 __ sb(t0, constructor_count); in Generate_JSConstructStubHelper()
785 __ Branch(&allocate, ne, t0, Operand(zero_reg)); in Generate_JSConstructStubHelper()
787 __ Push(a1, a2); in Generate_JSConstructStubHelper()
789 __ push(a1); // Constructor. in Generate_JSConstructStubHelper()
791 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1); in Generate_JSConstructStubHelper()
793 __ pop(a2); in Generate_JSConstructStubHelper()
794 __ pop(a1); in Generate_JSConstructStubHelper()
796 __ bind(&allocate); in Generate_JSConstructStubHelper()
802 __ lbu(a3, FieldMemOperand(a2, Map::kInstanceSizeOffset)); in Generate_JSConstructStubHelper()
803 __ AllocateInNewSpace(a3, t4, t5, t6, &rt_call, SIZE_IN_WORDS); in Generate_JSConstructStubHelper()
811 __ LoadRoot(t6, Heap::kEmptyFixedArrayRootIndex); in Generate_JSConstructStubHelper()
812 __ mov(t5, t4); in Generate_JSConstructStubHelper()
813 __ sw(a2, MemOperand(t5, JSObject::kMapOffset)); in Generate_JSConstructStubHelper()
814 __ sw(t6, MemOperand(t5, JSObject::kPropertiesOffset)); in Generate_JSConstructStubHelper()
815 __ sw(t6, MemOperand(t5, JSObject::kElementsOffset)); in Generate_JSConstructStubHelper()
816 __ Addu(t5, t5, Operand(3*kPointerSize)); in Generate_JSConstructStubHelper()
827 __ sll(t0, a3, kPointerSizeLog2); in Generate_JSConstructStubHelper()
828 __ addu(t6, t4, t0); // End of object. in Generate_JSConstructStubHelper()
830 __ LoadRoot(t7, Heap::kUndefinedValueRootIndex); in Generate_JSConstructStubHelper()
832 __ lw(a0, FieldMemOperand(a2, Map::kInstanceSizesOffset)); in Generate_JSConstructStubHelper()
833 __ Ext(a0, a0, Map::kPreAllocatedPropertyFieldsByte * kBitsPerByte, in Generate_JSConstructStubHelper()
835 __ sll(t0, a0, kPointerSizeLog2); in Generate_JSConstructStubHelper()
836 __ addu(a0, t5, t0); in Generate_JSConstructStubHelper()
839 __ Assert(le, "Unexpected number of pre-allocated property fields.", in Generate_JSConstructStubHelper()
842 __ InitializeFieldsWithFiller(t5, a0, t7); in Generate_JSConstructStubHelper()
844 __ LoadRoot(t7, Heap::kOnePointerFillerMapRootIndex); in Generate_JSConstructStubHelper()
846 __ InitializeFieldsWithFiller(t5, t6, t7); in Generate_JSConstructStubHelper()
852 __ Addu(t4, t4, Operand(kHeapObjectTag)); in Generate_JSConstructStubHelper()
859 __ lbu(a3, FieldMemOperand(a2, Map::kUnusedPropertyFieldsOffset)); in Generate_JSConstructStubHelper()
862 __ lw(a0, FieldMemOperand(a2, Map::kInstanceSizesOffset)); in Generate_JSConstructStubHelper()
863 __ Ext(t6, a0, Map::kPreAllocatedPropertyFieldsByte * kBitsPerByte, in Generate_JSConstructStubHelper()
865 __ Addu(a3, a3, Operand(t6)); in Generate_JSConstructStubHelper()
866 __ Ext(t6, a0, Map::kInObjectPropertiesByte * kBitsPerByte, in Generate_JSConstructStubHelper()
868 __ subu(a3, a3, t6); in Generate_JSConstructStubHelper()
871 __ Branch(&allocated, eq, a3, Operand(zero_reg)); in Generate_JSConstructStubHelper()
872 __ Assert(greater_equal, "Property allocation count failed.", in Generate_JSConstructStubHelper()
881 __ Addu(a0, a3, Operand(FixedArray::kHeaderSize / kPointerSize)); in Generate_JSConstructStubHelper()
882 __ AllocateInNewSpace( in Generate_JSConstructStubHelper()
895 __ LoadRoot(t6, Heap::kFixedArrayMapRootIndex); in Generate_JSConstructStubHelper()
896 __ mov(a2, t5); in Generate_JSConstructStubHelper()
897 __ sw(t6, MemOperand(a2, JSObject::kMapOffset)); in Generate_JSConstructStubHelper()
898 __ sll(a0, a3, kSmiTagSize); in Generate_JSConstructStubHelper()
899 __ sw(a0, MemOperand(a2, FixedArray::kLengthOffset)); in Generate_JSConstructStubHelper()
900 __ Addu(a2, a2, Operand(2 * kPointerSize)); in Generate_JSConstructStubHelper()
911 __ sll(t3, a3, kPointerSizeLog2); in Generate_JSConstructStubHelper()
912 __ addu(t6, a2, t3); // End of object. in Generate_JSConstructStubHelper()
916 __ LoadRoot(t7, Heap::kUndefinedValueRootIndex); in Generate_JSConstructStubHelper()
918 __ LoadRoot(t8, Heap::kUndefinedValueRootIndex); in Generate_JSConstructStubHelper()
919 __ Assert(eq, "Undefined value not loaded.", t7, Operand(t8)); in Generate_JSConstructStubHelper()
921 __ jmp(&entry); in Generate_JSConstructStubHelper()
922 __ bind(&loop); in Generate_JSConstructStubHelper()
923 __ sw(t7, MemOperand(a2)); in Generate_JSConstructStubHelper()
924 __ addiu(a2, a2, kPointerSize); in Generate_JSConstructStubHelper()
925 __ bind(&entry); in Generate_JSConstructStubHelper()
926 __ Branch(&loop, less, a2, Operand(t6)); in Generate_JSConstructStubHelper()
934 __ Addu(t5, t5, Operand(kHeapObjectTag)); // Add the heap tag. in Generate_JSConstructStubHelper()
935 __ sw(t5, FieldMemOperand(t4, JSObject::kPropertiesOffset)); in Generate_JSConstructStubHelper()
940 __ jmp(&allocated); in Generate_JSConstructStubHelper()
946 __ bind(&undo_allocation); in Generate_JSConstructStubHelper()
947 __ UndoAllocationInNewSpace(t4, t5); in Generate_JSConstructStubHelper()
950 __ bind(&rt_call); in Generate_JSConstructStubHelper()
953 __ push(a1); // Argument for Runtime_NewObject. in Generate_JSConstructStubHelper()
954 __ CallRuntime(Runtime::kNewObject, 1); in Generate_JSConstructStubHelper()
955 __ mov(t4, v0); in Generate_JSConstructStubHelper()
959 __ bind(&allocated); in Generate_JSConstructStubHelper()
960 __ push(t4); in Generate_JSConstructStubHelper()
961 __ push(t4); in Generate_JSConstructStubHelper()
968 __ lw(a1, MemOperand(sp, 2 * kPointerSize)); in Generate_JSConstructStubHelper()
969 __ lw(a3, MemOperand(sp, 3 * kPointerSize)); in Generate_JSConstructStubHelper()
972 __ Addu(a2, fp, Operand(StandardFrameConstants::kCallerSPOffset)); in Generate_JSConstructStubHelper()
975 __ srl(a0, a3, kSmiTagSize); in Generate_JSConstructStubHelper()
987 __ jmp(&entry); in Generate_JSConstructStubHelper()
988 __ bind(&loop); in Generate_JSConstructStubHelper()
989 __ sll(t0, a3, kPointerSizeLog2 - kSmiTagSize); in Generate_JSConstructStubHelper()
990 __ Addu(t0, a2, Operand(t0)); in Generate_JSConstructStubHelper()
991 __ lw(t1, MemOperand(t0)); in Generate_JSConstructStubHelper()
992 __ push(t1); in Generate_JSConstructStubHelper()
993 __ bind(&entry); in Generate_JSConstructStubHelper()
994 __ Addu(a3, a3, Operand(-2)); in Generate_JSConstructStubHelper()
995 __ Branch(&loop, greater_equal, a3, Operand(zero_reg)); in Generate_JSConstructStubHelper()
1001 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset)); in Generate_JSConstructStubHelper()
1005 __ InvokeCode(code, expected, expected, in Generate_JSConstructStubHelper()
1009 __ InvokeFunction(a1, actual, CALL_FUNCTION, in Generate_JSConstructStubHelper()
1019 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); in Generate_JSConstructStubHelper()
1031 __ JumpIfSmi(v0, &use_receiver); in Generate_JSConstructStubHelper()
1035 __ GetObjectType(v0, a3, a3); in Generate_JSConstructStubHelper()
1036 __ Branch(&exit, greater_equal, a3, Operand(FIRST_SPEC_OBJECT_TYPE)); in Generate_JSConstructStubHelper()
1040 __ bind(&use_receiver); in Generate_JSConstructStubHelper()
1041 __ lw(v0, MemOperand(sp)); in Generate_JSConstructStubHelper()
1045 __ bind(&exit); in Generate_JSConstructStubHelper()
1050 __ lw(a1, MemOperand(sp, 2 * kPointerSize)); in Generate_JSConstructStubHelper()
1055 __ sll(t0, a1, kPointerSizeLog2 - 1); in Generate_JSConstructStubHelper()
1056 __ Addu(sp, sp, t0); in Generate_JSConstructStubHelper()
1057 __ Addu(sp, sp, kPointerSize); in Generate_JSConstructStubHelper()
1058 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, a1, a2); in Generate_JSConstructStubHelper()
1059 __ Ret(); in Generate_JSConstructStubHelper()
1091 __ mov(cp, zero_reg); in Generate_JSEntryTrampolineHelper()
1098 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset)); in Generate_JSEntryTrampolineHelper()
1101 __ Push(a1, a2); in Generate_JSEntryTrampolineHelper()
1107 __ sll(t0, a3, kPointerSizeLog2); in Generate_JSEntryTrampolineHelper()
1108 __ addu(t2, s0, t0); in Generate_JSEntryTrampolineHelper()
1109 __ b(&entry); in Generate_JSEntryTrampolineHelper()
1110 __ nop(); // Branch delay slot nop. in Generate_JSEntryTrampolineHelper()
1112 __ bind(&loop); in Generate_JSEntryTrampolineHelper()
1113 __ lw(t0, MemOperand(s0)); // Read next parameter. in Generate_JSEntryTrampolineHelper()
1114 __ addiu(s0, s0, kPointerSize); in Generate_JSEntryTrampolineHelper()
1115 __ lw(t0, MemOperand(t0)); // Dereference handle. in Generate_JSEntryTrampolineHelper()
1116 __ push(t0); // Push parameter. in Generate_JSEntryTrampolineHelper()
1117 __ bind(&entry); in Generate_JSEntryTrampolineHelper()
1118 __ Branch(&loop, ne, s0, Operand(t2)); in Generate_JSEntryTrampolineHelper()
1122 __ LoadRoot(t0, Heap::kUndefinedValueRootIndex); in Generate_JSEntryTrampolineHelper()
1123 __ mov(s1, t0); in Generate_JSEntryTrampolineHelper()
1124 __ mov(s2, t0); in Generate_JSEntryTrampolineHelper()
1125 __ mov(s3, t0); in Generate_JSEntryTrampolineHelper()
1126 __ mov(s4, t0); in Generate_JSEntryTrampolineHelper()
1127 __ mov(s5, t0); in Generate_JSEntryTrampolineHelper()
1132 __ mov(a0, a3); in Generate_JSEntryTrampolineHelper()
1135 __ CallStub(&stub); in Generate_JSEntryTrampolineHelper()
1138 __ InvokeFunction(a1, actual, CALL_FUNCTION, in Generate_JSEntryTrampolineHelper()
1145 __ Jump(ra); in Generate_JSEntryTrampolineHelper()
1165 __ push(a1); in Generate_LazyCompile()
1167 __ push(t1); in Generate_LazyCompile()
1170 __ push(a1); in Generate_LazyCompile()
1172 __ CallRuntime(Runtime::kLazyCompile, 1); in Generate_LazyCompile()
1174 __ addiu(t9, v0, Code::kHeaderSize - kHeapObjectTag); in Generate_LazyCompile()
1177 __ pop(t1); in Generate_LazyCompile()
1179 __ pop(a1); in Generate_LazyCompile()
1185 __ Jump(t9); in Generate_LazyCompile()
1195 __ push(a1); in Generate_LazyRecompile()
1197 __ push(t1); in Generate_LazyRecompile()
1200 __ push(a1); in Generate_LazyRecompile()
1201 __ CallRuntime(Runtime::kLazyRecompile, 1); in Generate_LazyRecompile()
1203 __ Addu(t9, v0, Operand(Code::kHeaderSize - kHeapObjectTag)); in Generate_LazyRecompile()
1206 __ pop(t1); in Generate_LazyRecompile()
1208 __ pop(a1); in Generate_LazyRecompile()
1214 __ Jump(t9); in Generate_LazyRecompile()
1223 __ li(a0, Operand(Smi::FromInt(static_cast<int>(type)))); in Generate_NotifyDeoptimizedHelper()
1224 __ push(a0); in Generate_NotifyDeoptimizedHelper()
1225 __ CallRuntime(Runtime::kNotifyDeoptimized, 1); in Generate_NotifyDeoptimizedHelper()
1229 __ lw(t2, MemOperand(sp, 0 * kPointerSize)); in Generate_NotifyDeoptimizedHelper()
1230 __ SmiUntag(t2); in Generate_NotifyDeoptimizedHelper()
1233 __ Branch(&with_tos_register, in Generate_NotifyDeoptimizedHelper()
1235 __ Addu(sp, sp, Operand(1 * kPointerSize)); // Remove state. in Generate_NotifyDeoptimizedHelper()
1236 __ Ret(); in Generate_NotifyDeoptimizedHelper()
1238 __ bind(&with_tos_register); in Generate_NotifyDeoptimizedHelper()
1239 __ lw(v0, MemOperand(sp, 1 * kPointerSize)); in Generate_NotifyDeoptimizedHelper()
1240 __ Branch(&unknown_state, ne, t2, Operand(FullCodeGenerator::TOS_REG)); in Generate_NotifyDeoptimizedHelper()
1242 __ Addu(sp, sp, Operand(2 * kPointerSize)); // Remove state. in Generate_NotifyDeoptimizedHelper()
1243 __ Ret(); in Generate_NotifyDeoptimizedHelper()
1245 __ bind(&unknown_state); in Generate_NotifyDeoptimizedHelper()
1246 __ stop("no cases left"); in Generate_NotifyDeoptimizedHelper()
1267 __ MultiPush(saved_regs); in Generate_NotifyOSR()
1270 __ CallRuntime(Runtime::kNotifyOSR, 0); in Generate_NotifyOSR()
1272 __ MultiPop(saved_regs); in Generate_NotifyOSR()
1273 __ Ret(); in Generate_NotifyOSR()
1280 __ Abort("Unreachable code: Cannot optimize without FPU support."); in Generate_OnStackReplacement()
1286 __ lw(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); in Generate_OnStackReplacement()
1289 __ push(a0); in Generate_OnStackReplacement()
1290 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1); in Generate_OnStackReplacement()
1295 __ Ret(eq, v0, Operand(Smi::FromInt(-1))); in Generate_OnStackReplacement()
1298 __ SmiUntag(v0); in Generate_OnStackReplacement()
1299 __ push(v0); in Generate_OnStackReplacement()
1312 __ Branch(&done, ne, a0, Operand(zero_reg)); in Generate_FunctionCall()
1313 __ LoadRoot(t2, Heap::kUndefinedValueRootIndex); in Generate_FunctionCall()
1314 __ push(t2); in Generate_FunctionCall()
1315 __ Addu(a0, a0, Operand(1)); in Generate_FunctionCall()
1316 __ bind(&done); in Generate_FunctionCall()
1323 __ sll(at, a0, kPointerSizeLog2); in Generate_FunctionCall()
1324 __ addu(at, sp, at); in Generate_FunctionCall()
1325 __ lw(a1, MemOperand(at)); in Generate_FunctionCall()
1326 __ JumpIfSmi(a1, &non_function); in Generate_FunctionCall()
1327 __ GetObjectType(a1, a2, a2); in Generate_FunctionCall()
1328 __ Branch(&slow, ne, a2, Operand(JS_FUNCTION_TYPE)); in Generate_FunctionCall()
1334 __ li(t0, Operand(0, RelocInfo::NONE)); // Indicate regular JS_FUNCTION. in Generate_FunctionCall()
1337 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset)); in Generate_FunctionCall()
1340 __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); in Generate_FunctionCall()
1341 __ lw(a3, FieldMemOperand(a2, SharedFunctionInfo::kCompilerHintsOffset)); in Generate_FunctionCall()
1342 __ And(t3, a3, Operand(1 << (SharedFunctionInfo::kStrictModeFunction + in Generate_FunctionCall()
1344 __ Branch(&shift_arguments, ne, t3, Operand(zero_reg)); in Generate_FunctionCall()
1347 __ And(t3, a3, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize))); in Generate_FunctionCall()
1348 __ Branch(&shift_arguments, ne, t3, Operand(zero_reg)); in Generate_FunctionCall()
1352 __ sll(at, a0, kPointerSizeLog2); in Generate_FunctionCall()
1353 __ addu(a2, sp, at); in Generate_FunctionCall()
1354 __ lw(a2, MemOperand(a2, -kPointerSize)); in Generate_FunctionCall()
1358 __ JumpIfSmi(a2, &convert_to_object, t2); in Generate_FunctionCall()
1360 __ LoadRoot(a3, Heap::kUndefinedValueRootIndex); in Generate_FunctionCall()
1361 __ Branch(&use_global_receiver, eq, a2, Operand(a3)); in Generate_FunctionCall()
1362 __ LoadRoot(a3, Heap::kNullValueRootIndex); in Generate_FunctionCall()
1363 __ Branch(&use_global_receiver, eq, a2, Operand(a3)); in Generate_FunctionCall()
1366 __ GetObjectType(a2, a3, a3); in Generate_FunctionCall()
1367 __ Branch(&shift_arguments, ge, a3, Operand(FIRST_SPEC_OBJECT_TYPE)); in Generate_FunctionCall()
1369 __ bind(&convert_to_object); in Generate_FunctionCall()
1373 __ sll(a0, a0, kSmiTagSize); // Smi tagged. in Generate_FunctionCall()
1374 __ push(a0); in Generate_FunctionCall()
1376 __ push(a2); in Generate_FunctionCall()
1377 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); in Generate_FunctionCall()
1378 __ mov(a2, v0); in Generate_FunctionCall()
1380 __ pop(a0); in Generate_FunctionCall()
1381 __ sra(a0, a0, kSmiTagSize); // Un-tag. in Generate_FunctionCall()
1385 __ sll(at, a0, kPointerSizeLog2); in Generate_FunctionCall()
1386 __ addu(at, sp, at); in Generate_FunctionCall()
1387 __ lw(a1, MemOperand(at)); in Generate_FunctionCall()
1388 __ li(t0, Operand(0, RelocInfo::NONE)); in Generate_FunctionCall()
1389 __ Branch(&patch_receiver); in Generate_FunctionCall()
1393 __ bind(&use_global_receiver); in Generate_FunctionCall()
1396 __ lw(a2, FieldMemOperand(cp, kGlobalIndex)); in Generate_FunctionCall()
1397 __ lw(a2, FieldMemOperand(a2, GlobalObject::kGlobalContextOffset)); in Generate_FunctionCall()
1398 __ lw(a2, FieldMemOperand(a2, kGlobalIndex)); in Generate_FunctionCall()
1399 __ lw(a2, FieldMemOperand(a2, GlobalObject::kGlobalReceiverOffset)); in Generate_FunctionCall()
1401 __ bind(&patch_receiver); in Generate_FunctionCall()
1402 __ sll(at, a0, kPointerSizeLog2); in Generate_FunctionCall()
1403 __ addu(a3, sp, at); in Generate_FunctionCall()
1404 __ sw(a2, MemOperand(a3, -kPointerSize)); in Generate_FunctionCall()
1406 __ Branch(&shift_arguments); in Generate_FunctionCall()
1410 __ bind(&slow); in Generate_FunctionCall()
1411 __ li(t0, Operand(1, RelocInfo::NONE)); // Indicate function proxy. in Generate_FunctionCall()
1412 __ Branch(&shift_arguments, eq, a2, Operand(JS_FUNCTION_PROXY_TYPE)); in Generate_FunctionCall()
1414 __ bind(&non_function); in Generate_FunctionCall()
1415 __ li(t0, Operand(2, RelocInfo::NONE)); // Indicate non-function. in Generate_FunctionCall()
1424 __ sll(at, a0, kPointerSizeLog2); in Generate_FunctionCall()
1425 __ addu(a2, sp, at); in Generate_FunctionCall()
1426 __ sw(a1, MemOperand(a2, -kPointerSize)); in Generate_FunctionCall()
1434 __ bind(&shift_arguments); in Generate_FunctionCall()
1437 __ sll(at, a0, kPointerSizeLog2); in Generate_FunctionCall()
1438 __ addu(a2, sp, at); in Generate_FunctionCall()
1440 __ bind(&loop); in Generate_FunctionCall()
1441 __ lw(at, MemOperand(a2, -kPointerSize)); in Generate_FunctionCall()
1442 __ sw(at, MemOperand(a2)); in Generate_FunctionCall()
1443 __ Subu(a2, a2, Operand(kPointerSize)); in Generate_FunctionCall()
1444 __ Branch(&loop, ne, a2, Operand(sp)); in Generate_FunctionCall()
1447 __ Subu(a0, a0, Operand(1)); in Generate_FunctionCall()
1448 __ Pop(); in Generate_FunctionCall()
1457 __ Branch(&function, eq, t0, Operand(zero_reg)); in Generate_FunctionCall()
1459 __ mov(a2, zero_reg); in Generate_FunctionCall()
1460 __ SetCallKind(t1, CALL_AS_METHOD); in Generate_FunctionCall()
1461 __ Branch(&non_proxy, ne, t0, Operand(1)); in Generate_FunctionCall()
1463 __ push(a1); // Re-add proxy object as additional argument. in Generate_FunctionCall()
1464 __ Addu(a0, a0, Operand(1)); in Generate_FunctionCall()
1465 __ GetBuiltinEntry(a3, Builtins::CALL_FUNCTION_PROXY); in Generate_FunctionCall()
1466 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), in Generate_FunctionCall()
1469 __ bind(&non_proxy); in Generate_FunctionCall()
1470 __ GetBuiltinEntry(a3, Builtins::CALL_NON_FUNCTION); in Generate_FunctionCall()
1471 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), in Generate_FunctionCall()
1473 __ bind(&function); in Generate_FunctionCall()
1481 __ lw(a3, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); in Generate_FunctionCall()
1482 __ lw(a2, in Generate_FunctionCall()
1484 __ sra(a2, a2, kSmiTagSize); in Generate_FunctionCall()
1485 __ lw(a3, FieldMemOperand(a1, JSFunction::kCodeEntryOffset)); in Generate_FunctionCall()
1486 __ SetCallKind(t1, CALL_AS_METHOD); in Generate_FunctionCall()
1488 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), in Generate_FunctionCall()
1492 __ InvokeCode(a3, expected, expected, JUMP_FUNCTION, in Generate_FunctionCall()
1506 __ lw(a0, MemOperand(fp, kFunctionOffset)); // Get the function. in Generate_FunctionApply()
1507 __ push(a0); in Generate_FunctionApply()
1508 __ lw(a0, MemOperand(fp, kArgsOffset)); // Get the args array. in Generate_FunctionApply()
1509 __ push(a0); in Generate_FunctionApply()
1511 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION); in Generate_FunctionApply()
1517 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex); in Generate_FunctionApply()
1520 __ subu(a2, sp, a2); in Generate_FunctionApply()
1522 __ sll(t3, v0, kPointerSizeLog2 - kSmiTagSize); in Generate_FunctionApply()
1523 __ Branch(&okay, gt, a2, Operand(t3)); // Signed comparison. in Generate_FunctionApply()
1526 __ lw(a1, MemOperand(fp, kFunctionOffset)); in Generate_FunctionApply()
1527 __ push(a1); in Generate_FunctionApply()
1528 __ push(v0); in Generate_FunctionApply()
1529 __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION); in Generate_FunctionApply()
1533 __ bind(&okay); in Generate_FunctionApply()
1534 __ push(v0); // Limit. in Generate_FunctionApply()
1535 __ mov(a1, zero_reg); // Initial index. in Generate_FunctionApply()
1536 __ push(a1); in Generate_FunctionApply()
1539 __ lw(a0, MemOperand(fp, kRecvOffset)); in Generate_FunctionApply()
1543 __ lw(a1, MemOperand(fp, kFunctionOffset)); in Generate_FunctionApply()
1544 __ GetObjectType(a1, a2, a2); in Generate_FunctionApply()
1545 __ Branch(&push_receiver, ne, a2, Operand(JS_FUNCTION_TYPE)); in Generate_FunctionApply()
1548 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset)); in Generate_FunctionApply()
1550 __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); in Generate_FunctionApply()
1555 __ lw(a2, FieldMemOperand(a2, SharedFunctionInfo::kCompilerHintsOffset)); in Generate_FunctionApply()
1556 __ And(t3, a2, Operand(1 << (SharedFunctionInfo::kStrictModeFunction + in Generate_FunctionApply()
1558 __ Branch(&push_receiver, ne, t3, Operand(zero_reg)); in Generate_FunctionApply()
1561 __ And(t3, a2, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize))); in Generate_FunctionApply()
1562 __ Branch(&push_receiver, ne, t3, Operand(zero_reg)); in Generate_FunctionApply()
1565 __ JumpIfSmi(a0, &call_to_object); in Generate_FunctionApply()
1566 __ LoadRoot(a1, Heap::kNullValueRootIndex); in Generate_FunctionApply()
1567 __ Branch(&use_global_receiver, eq, a0, Operand(a1)); in Generate_FunctionApply()
1568 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex); in Generate_FunctionApply()
1569 __ Branch(&use_global_receiver, eq, a0, Operand(a2)); in Generate_FunctionApply()
1574 __ GetObjectType(a0, a1, a1); in Generate_FunctionApply()
1575 __ Branch(&push_receiver, ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE)); in Generate_FunctionApply()
1579 __ bind(&call_to_object); in Generate_FunctionApply()
1580 __ push(a0); in Generate_FunctionApply()
1581 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); in Generate_FunctionApply()
1582 __ mov(a0, v0); // Put object in a0 to match other paths to push_receiver. in Generate_FunctionApply()
1583 __ Branch(&push_receiver); in Generate_FunctionApply()
1586 __ bind(&use_global_receiver); in Generate_FunctionApply()
1589 __ lw(a0, FieldMemOperand(cp, kGlobalOffset)); in Generate_FunctionApply()
1590 __ lw(a0, FieldMemOperand(a0, GlobalObject::kGlobalContextOffset)); in Generate_FunctionApply()
1591 __ lw(a0, FieldMemOperand(a0, kGlobalOffset)); in Generate_FunctionApply()
1592 __ lw(a0, FieldMemOperand(a0, GlobalObject::kGlobalReceiverOffset)); in Generate_FunctionApply()
1596 __ bind(&push_receiver); in Generate_FunctionApply()
1597 __ push(a0); in Generate_FunctionApply()
1601 __ lw(a0, MemOperand(fp, kIndexOffset)); in Generate_FunctionApply()
1602 __ Branch(&entry); in Generate_FunctionApply()
1607 __ bind(&loop); in Generate_FunctionApply()
1608 __ lw(a1, MemOperand(fp, kArgsOffset)); in Generate_FunctionApply()
1609 __ push(a1); in Generate_FunctionApply()
1610 __ push(a0); in Generate_FunctionApply()
1613 __ CallRuntime(Runtime::kGetProperty, 2); in Generate_FunctionApply()
1614 __ push(v0); in Generate_FunctionApply()
1617 __ lw(a0, MemOperand(fp, kIndexOffset)); in Generate_FunctionApply()
1618 __ Addu(a0, a0, Operand(1 << kSmiTagSize)); in Generate_FunctionApply()
1619 __ sw(a0, MemOperand(fp, kIndexOffset)); in Generate_FunctionApply()
1623 __ bind(&entry); in Generate_FunctionApply()
1624 __ lw(a1, MemOperand(fp, kLimitOffset)); in Generate_FunctionApply()
1625 __ Branch(&loop, ne, a0, Operand(a1)); in Generate_FunctionApply()
1630 __ sra(a0, a0, kSmiTagSize); in Generate_FunctionApply()
1631 __ lw(a1, MemOperand(fp, kFunctionOffset)); in Generate_FunctionApply()
1632 __ GetObjectType(a1, a2, a2); in Generate_FunctionApply()
1633 __ Branch(&call_proxy, ne, a2, Operand(JS_FUNCTION_TYPE)); in Generate_FunctionApply()
1635 __ InvokeFunction(a1, actual, CALL_FUNCTION, in Generate_FunctionApply()
1639 __ Ret(USE_DELAY_SLOT); in Generate_FunctionApply()
1640 __ Addu(sp, sp, Operand(3 * kPointerSize)); // In delay slot. in Generate_FunctionApply()
1643 __ bind(&call_proxy); in Generate_FunctionApply()
1644 __ push(a1); // Add function proxy as last argument. in Generate_FunctionApply()
1645 __ Addu(a0, a0, Operand(1)); in Generate_FunctionApply()
1646 __ li(a2, Operand(0, RelocInfo::NONE)); in Generate_FunctionApply()
1647 __ SetCallKind(t1, CALL_AS_METHOD); in Generate_FunctionApply()
1648 __ GetBuiltinEntry(a3, Builtins::CALL_FUNCTION_PROXY); in Generate_FunctionApply()
1649 __ Call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), in Generate_FunctionApply()
1654 __ Ret(USE_DELAY_SLOT); in Generate_FunctionApply()
1655 __ Addu(sp, sp, Operand(3 * kPointerSize)); // In delay slot. in Generate_FunctionApply()
1660 __ sll(a0, a0, kSmiTagSize); in EnterArgumentsAdaptorFrame()
1661 __ li(t0, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); in EnterArgumentsAdaptorFrame()
1662 __ MultiPush(a0.bit() | a1.bit() | t0.bit() | fp.bit() | ra.bit()); in EnterArgumentsAdaptorFrame()
1663 __ Addu(fp, sp, Operand(3 * kPointerSize)); in EnterArgumentsAdaptorFrame()
1673 __ lw(a1, MemOperand(fp, -3 * kPointerSize)); in LeaveArgumentsAdaptorFrame()
1674 __ mov(sp, fp); in LeaveArgumentsAdaptorFrame()
1675 __ MultiPop(fp.bit() | ra.bit()); in LeaveArgumentsAdaptorFrame()
1676 __ sll(t0, a1, kPointerSizeLog2 - kSmiTagSize); in LeaveArgumentsAdaptorFrame()
1677 __ Addu(sp, sp, t0); in LeaveArgumentsAdaptorFrame()
1679 __ Addu(sp, sp, Operand(kPointerSize)); in LeaveArgumentsAdaptorFrame()
1696 __ Branch(&dont_adapt_arguments, eq, in Generate_ArgumentsAdaptorTrampoline()
1699 __ Branch(&too_few, Uless, a0, Operand(a2)); in Generate_ArgumentsAdaptorTrampoline()
1706 __ bind(&enough); in Generate_ArgumentsAdaptorTrampoline()
1710 __ sll(a0, a0, kPointerSizeLog2 - kSmiTagSize); in Generate_ArgumentsAdaptorTrampoline()
1711 __ Addu(a0, fp, a0); in Generate_ArgumentsAdaptorTrampoline()
1713 __ Addu(a0, a0, Operand(2 * kPointerSize)); in Generate_ArgumentsAdaptorTrampoline()
1715 __ sll(a2, a2, kPointerSizeLog2); in Generate_ArgumentsAdaptorTrampoline()
1716 __ subu(a2, a0, a2); in Generate_ArgumentsAdaptorTrampoline()
1725 __ bind(&copy); in Generate_ArgumentsAdaptorTrampoline()
1726 __ lw(t0, MemOperand(a0)); in Generate_ArgumentsAdaptorTrampoline()
1727 __ push(t0); in Generate_ArgumentsAdaptorTrampoline()
1728 __ Branch(USE_DELAY_SLOT, &copy, ne, a0, Operand(a2)); in Generate_ArgumentsAdaptorTrampoline()
1729 __ addiu(a0, a0, -kPointerSize); // In delay slot. in Generate_ArgumentsAdaptorTrampoline()
1731 __ jmp(&invoke); in Generate_ArgumentsAdaptorTrampoline()
1735 __ bind(&too_few); in Generate_ArgumentsAdaptorTrampoline()
1743 __ sll(a0, a0, kPointerSizeLog2 - kSmiTagSize); in Generate_ArgumentsAdaptorTrampoline()
1744 __ Addu(a0, fp, a0); in Generate_ArgumentsAdaptorTrampoline()
1746 __ Addu(a0, a0, Operand(2 * kPointerSize)); in Generate_ArgumentsAdaptorTrampoline()
1748 __ Addu(t3, fp, kPointerSize); in Generate_ArgumentsAdaptorTrampoline()
1757 __ bind(&copy); in Generate_ArgumentsAdaptorTrampoline()
1758 __ lw(t0, MemOperand(a0)); // Adjusted above for return addr and receiver. in Generate_ArgumentsAdaptorTrampoline()
1759 __ Subu(sp, sp, kPointerSize); in Generate_ArgumentsAdaptorTrampoline()
1760 __ Subu(a0, a0, kPointerSize); in Generate_ArgumentsAdaptorTrampoline()
1761 __ Branch(USE_DELAY_SLOT, &copy, ne, a0, Operand(t3)); in Generate_ArgumentsAdaptorTrampoline()
1762 __ sw(t0, MemOperand(sp)); // In the delay slot. in Generate_ArgumentsAdaptorTrampoline()
1768 __ LoadRoot(t0, Heap::kUndefinedValueRootIndex); in Generate_ArgumentsAdaptorTrampoline()
1769 __ sll(t2, a2, kPointerSizeLog2); in Generate_ArgumentsAdaptorTrampoline()
1770 __ Subu(a2, fp, Operand(t2)); in Generate_ArgumentsAdaptorTrampoline()
1771 __ Addu(a2, a2, Operand(-4 * kPointerSize)); // Adjust for frame. in Generate_ArgumentsAdaptorTrampoline()
1774 __ bind(&fill); in Generate_ArgumentsAdaptorTrampoline()
1775 __ Subu(sp, sp, kPointerSize); in Generate_ArgumentsAdaptorTrampoline()
1776 __ Branch(USE_DELAY_SLOT, &fill, ne, sp, Operand(a2)); in Generate_ArgumentsAdaptorTrampoline()
1777 __ sw(t0, MemOperand(sp)); in Generate_ArgumentsAdaptorTrampoline()
1781 __ bind(&invoke); in Generate_ArgumentsAdaptorTrampoline()
1783 __ Call(a3); in Generate_ArgumentsAdaptorTrampoline()
1790 __ Ret(); in Generate_ArgumentsAdaptorTrampoline()
1796 __ bind(&dont_adapt_arguments); in Generate_ArgumentsAdaptorTrampoline()
1797 __ Jump(a3); in Generate_ArgumentsAdaptorTrampoline()
1801 #undef __