• Home
  • Raw
  • Download

Lines Matching refs:__

41 #define __ ACCESS_MASM(masm)  macro
63 __ lw(scratch1, FieldMemOperand(operand, HeapObject::kMapOffset)); in EmitCheckForHeapNumber()
64 __ LoadRoot(scratch2, Heap::kHeapNumberMapRootIndex); in EmitCheckForHeapNumber()
65 __ Branch(not_a_heap_number, ne, scratch1, Operand(scratch2)); in EmitCheckForHeapNumber()
72 __ JumpIfNotSmi(a0, &check_heap_number); in Generate()
73 __ Ret(USE_DELAY_SLOT); in Generate()
74 __ mov(v0, a0); in Generate()
76 __ bind(&check_heap_number); in Generate()
78 __ Ret(USE_DELAY_SLOT); in Generate()
79 __ mov(v0, a0); in Generate()
81 __ bind(&call_builtin); in Generate()
82 __ push(a0); in Generate()
83 __ InvokeBuiltin(Builtins::TO_NUMBER, JUMP_FUNCTION); in Generate()
93 __ pop(a3); in Generate()
96 __ AllocateInNewSpace(JSFunction::kSize, in Generate()
109 __ lw(a2, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX))); in Generate()
110 __ lw(a2, FieldMemOperand(a2, GlobalObject::kGlobalContextOffset)); in Generate()
111 __ lw(a2, MemOperand(a2, Context::SlotOffset(map_index))); in Generate()
112 __ sw(a2, FieldMemOperand(v0, HeapObject::kMapOffset)); in Generate()
116 __ LoadRoot(a1, Heap::kEmptyFixedArrayRootIndex); in Generate()
117 __ LoadRoot(a2, Heap::kTheHoleValueRootIndex); in Generate()
118 __ LoadRoot(t0, Heap::kUndefinedValueRootIndex); in Generate()
119 __ sw(a1, FieldMemOperand(v0, JSObject::kPropertiesOffset)); in Generate()
120 __ sw(a1, FieldMemOperand(v0, JSObject::kElementsOffset)); in Generate()
121 __ sw(a2, FieldMemOperand(v0, JSFunction::kPrototypeOrInitialMapOffset)); in Generate()
122 __ sw(a3, FieldMemOperand(v0, JSFunction::kSharedFunctionInfoOffset)); in Generate()
123 __ sw(cp, FieldMemOperand(v0, JSFunction::kContextOffset)); in Generate()
124 __ sw(a1, FieldMemOperand(v0, JSFunction::kLiteralsOffset)); in Generate()
125 __ sw(t0, FieldMemOperand(v0, JSFunction::kNextFunctionLinkOffset)); in Generate()
129 __ lw(a3, FieldMemOperand(a3, SharedFunctionInfo::kCodeOffset)); in Generate()
130 __ Addu(a3, a3, Operand(Code::kHeaderSize - kHeapObjectTag)); in Generate()
133 __ sw(a3, FieldMemOperand(v0, JSFunction::kCodeEntryOffset)); in Generate()
134 __ Ret(); in Generate()
137 __ bind(&gc); in Generate()
138 __ LoadRoot(t0, Heap::kFalseValueRootIndex); in Generate()
139 __ Push(cp, a3, t0); in Generate()
140 __ TailCallRuntime(Runtime::kNewClosure, 3, 1); in Generate()
150 __ AllocateInNewSpace(FixedArray::SizeFor(length), in Generate()
158 __ lw(a3, MemOperand(sp, 0)); in Generate()
161 __ LoadRoot(a1, Heap::kFunctionContextMapRootIndex); in Generate()
162 __ li(a2, Operand(Smi::FromInt(length))); in Generate()
163 __ sw(a2, FieldMemOperand(v0, FixedArray::kLengthOffset)); in Generate()
164 __ sw(a1, FieldMemOperand(v0, HeapObject::kMapOffset)); in Generate()
167 __ lw(a2, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX))); in Generate()
168 __ li(a1, Operand(Smi::FromInt(0))); in Generate()
169 __ sw(a3, MemOperand(v0, Context::SlotOffset(Context::CLOSURE_INDEX))); in Generate()
170 __ sw(cp, MemOperand(v0, Context::SlotOffset(Context::PREVIOUS_INDEX))); in Generate()
171 __ sw(a1, MemOperand(v0, Context::SlotOffset(Context::EXTENSION_INDEX))); in Generate()
172 __ sw(a2, MemOperand(v0, Context::SlotOffset(Context::GLOBAL_INDEX))); in Generate()
175 __ LoadRoot(a1, Heap::kUndefinedValueRootIndex); in Generate()
177 __ sw(a1, MemOperand(v0, Context::SlotOffset(i))); in Generate()
181 __ mov(cp, v0); in Generate()
182 __ DropAndRet(1); in Generate()
185 __ bind(&gc); in Generate()
186 __ TailCallRuntime(Runtime::kNewFunctionContext, 1, 1); in Generate()
199 __ AllocateInNewSpace(FixedArray::SizeFor(length), in Generate()
203 __ lw(a3, MemOperand(sp, 0)); in Generate()
206 __ lw(a1, MemOperand(sp, 1 * kPointerSize)); in Generate()
209 __ LoadRoot(a2, Heap::kBlockContextMapRootIndex); in Generate()
210 __ sw(a2, FieldMemOperand(v0, HeapObject::kMapOffset)); in Generate()
211 __ li(a2, Operand(Smi::FromInt(length))); in Generate()
212 __ sw(a2, FieldMemOperand(v0, FixedArray::kLengthOffset)); in Generate()
219 __ JumpIfNotSmi(a3, &after_sentinel); in Generate()
222 __ Assert(eq, message, a3, Operand(zero_reg)); in Generate()
224 __ lw(a3, GlobalObjectOperand()); in Generate()
225 __ lw(a3, FieldMemOperand(a3, GlobalObject::kGlobalContextOffset)); in Generate()
226 __ lw(a3, ContextOperand(a3, Context::CLOSURE_INDEX)); in Generate()
227 __ bind(&after_sentinel); in Generate()
230 __ lw(a2, ContextOperand(cp, Context::GLOBAL_INDEX)); in Generate()
231 __ sw(a3, ContextOperand(v0, Context::CLOSURE_INDEX)); in Generate()
232 __ sw(cp, ContextOperand(v0, Context::PREVIOUS_INDEX)); in Generate()
233 __ sw(a1, ContextOperand(v0, Context::EXTENSION_INDEX)); in Generate()
234 __ sw(a2, ContextOperand(v0, Context::GLOBAL_INDEX)); in Generate()
237 __ LoadRoot(a1, Heap::kTheHoleValueRootIndex); in Generate()
239 __ sw(a1, ContextOperand(v0, i + Context::MIN_CONTEXT_SLOTS)); in Generate()
243 __ mov(cp, v0); in Generate()
244 __ DropAndRet(2); in Generate()
247 __ bind(&gc); in Generate()
248 __ TailCallRuntime(Runtime::kPushBlockContext, 2, 1); in Generate()
272 __ AllocateInNewSpace(size, in GenerateFastCloneShallowArrayCommon()
282 __ lw(a1, FieldMemOperand(a3, i)); in GenerateFastCloneShallowArrayCommon()
283 __ sw(a1, FieldMemOperand(v0, i)); in GenerateFastCloneShallowArrayCommon()
290 __ lw(a3, FieldMemOperand(a3, JSArray::kElementsOffset)); in GenerateFastCloneShallowArrayCommon()
291 __ Addu(a2, v0, Operand(JSArray::kSize)); in GenerateFastCloneShallowArrayCommon()
292 __ sw(a2, FieldMemOperand(v0, JSArray::kElementsOffset)); in GenerateFastCloneShallowArrayCommon()
296 __ CopyFields(a2, a3, a1.bit(), elements_size / kPointerSize); in GenerateFastCloneShallowArrayCommon()
310 __ lw(a3, MemOperand(sp, 2 * kPointerSize)); in Generate()
311 __ lw(a0, MemOperand(sp, 1 * kPointerSize)); in Generate()
312 __ Addu(a3, a3, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); in Generate()
313 __ sll(t0, a0, kPointerSizeLog2 - kSmiTagSize); in Generate()
314 __ Addu(t0, a3, t0); in Generate()
315 __ lw(a3, MemOperand(t0)); in Generate()
316 __ LoadRoot(t1, Heap::kUndefinedValueRootIndex); in Generate()
317 __ Branch(&slow_case, eq, a3, Operand(t1)); in Generate()
322 __ lw(v0, FieldMemOperand(a3, JSArray::kElementsOffset)); in Generate()
323 __ lw(v0, FieldMemOperand(v0, HeapObject::kMapOffset)); in Generate()
324 __ LoadRoot(t1, Heap::kFixedCOWArrayMapRootIndex); in Generate()
325 __ Branch(&check_fast_elements, ne, v0, Operand(t1)); in Generate()
329 __ DropAndRet(3); in Generate()
331 __ bind(&check_fast_elements); in Generate()
332 __ LoadRoot(t1, Heap::kFixedArrayMapRootIndex); in Generate()
333 __ Branch(&double_elements, ne, v0, Operand(t1)); in Generate()
337 __ DropAndRet(3); in Generate()
339 __ bind(&double_elements); in Generate()
358 __ push(a3); in Generate()
359 __ lw(a3, FieldMemOperand(a3, JSArray::kElementsOffset)); in Generate()
360 __ lw(a3, FieldMemOperand(a3, HeapObject::kMapOffset)); in Generate()
361 __ LoadRoot(at, expected_map_index); in Generate()
362 __ Assert(eq, message, a3, Operand(at)); in Generate()
363 __ pop(a3); in Generate()
369 __ DropAndRet(3); in Generate()
371 __ bind(&slow_case); in Generate()
372 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1); in Generate()
387 __ lw(a3, MemOperand(sp, 3 * kPointerSize)); in Generate()
388 __ lw(a0, MemOperand(sp, 2 * kPointerSize)); in Generate()
389 __ Addu(a3, a3, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); in Generate()
390 __ sll(t0, a0, kPointerSizeLog2 - kSmiTagSize); in Generate()
391 __ Addu(a3, t0, a3); in Generate()
392 __ lw(a3, MemOperand(a3)); in Generate()
393 __ LoadRoot(t0, Heap::kUndefinedValueRootIndex); in Generate()
394 __ Branch(&slow_case, eq, a3, Operand(t0)); in Generate()
399 __ lw(a0, FieldMemOperand(a3, HeapObject::kMapOffset)); in Generate()
400 __ lbu(a0, FieldMemOperand(a0, Map::kInstanceSizeOffset)); in Generate()
401 __ Branch(&slow_case, ne, a0, Operand(size >> kPointerSizeLog2)); in Generate()
405 __ AllocateInNewSpace(size, v0, a1, a2, &slow_case, TAG_OBJECT); in Generate()
407 __ lw(a1, FieldMemOperand(a3, i)); in Generate()
408 __ sw(a1, FieldMemOperand(v0, i)); in Generate()
412 __ DropAndRet(4); in Generate()
414 __ bind(&slow_case); in Generate()
415 __ TailCallRuntime(Runtime::kCreateObjectLiteralShallow, 4, 1); in Generate()
468 __ sra(source_, source_, kSmiTagSize); in Generate()
473 __ And(exponent, source_, Operand(HeapNumber::kSignMask)); in Generate()
475 __ subu(at, zero_reg, source_); in Generate()
476 __ Movn(source_, at, exponent); in Generate()
481 __ Branch(&not_special, gt, source_, Operand(1)); in Generate()
487 __ Or(at, exponent, Operand(exponent_word_for_1)); in Generate()
488 __ Movn(exponent, at, source_); // Write exp when source not 0. in Generate()
490 __ Ret(USE_DELAY_SLOT); in Generate()
491 __ mov(mantissa, zero_reg); in Generate()
493 __ bind(&not_special); in Generate()
496 __ Clz(zeros_, source_); in Generate()
499 __ li(mantissa, Operand(31 + HeapNumber::kExponentBias)); in Generate()
500 __ subu(mantissa, mantissa, zeros_); in Generate()
501 __ sll(mantissa, mantissa, HeapNumber::kExponentShift); in Generate()
502 __ Or(exponent, exponent, mantissa); in Generate()
505 __ Addu(zeros_, zeros_, Operand(1)); in Generate()
507 __ sllv(source_, source_, zeros_); in Generate()
509 __ sll(mantissa, source_, HeapNumber::kMantissaBitsInTopWord); in Generate()
511 __ srl(source_, source_, 32 - HeapNumber::kMantissaBitsInTopWord); in Generate()
513 __ Ret(USE_DELAY_SLOT); in Generate()
514 __ or_(exponent, exponent, source_); in Generate()
524 __ sra(scratch1, a0, kSmiTagSize); in LoadSmis()
525 __ mtc1(scratch1, f14); in LoadSmis()
526 __ cvt_d_w(f14, f14); in LoadSmis()
527 __ sra(scratch1, a1, kSmiTagSize); in LoadSmis()
528 __ mtc1(scratch1, f12); in LoadSmis()
529 __ cvt_d_w(f12, f12); in LoadSmis()
531 __ Move(a2, a3, f14); in LoadSmis()
532 __ Move(a0, a1, f12); in LoadSmis()
537 __ mov(scratch1, a0); in LoadSmis()
539 __ push(ra); in LoadSmis()
540 __ Call(stub1.GetCode()); in LoadSmis()
542 __ mov(scratch1, a1); in LoadSmis()
544 __ Call(stub2.GetCode()); in LoadSmis()
545 __ pop(ra); in LoadSmis()
579 __ AbortIfNotRootValue(heap_number_map, in LoadNumber()
587 __ UntagAndJumpIfSmi(scratch1, object, &is_smi); in LoadNumber()
589 __ JumpIfNotHeapNumber(object, heap_number_map, scratch1, not_number); in LoadNumber()
600 __ ldc1(dst, FieldMemOperand(object, HeapNumber::kValueOffset)); in LoadNumber()
604 __ lw(dst1, FieldMemOperand(object, HeapNumber::kValueOffset)); in LoadNumber()
605 __ lw(dst2, FieldMemOperand(object, in LoadNumber()
608 __ Branch(&done); in LoadNumber()
611 __ bind(&is_smi); in LoadNumber()
615 __ mtc1(scratch1, dst); in LoadNumber()
616 __ cvt_d_w(dst, dst); in LoadNumber()
619 __ Move(dst1, dst2, dst); in LoadNumber()
624 __ mov(scratch1, object); in LoadNumber()
626 __ push(ra); in LoadNumber()
627 __ Call(stub.GetCode()); in LoadNumber()
628 __ pop(ra); in LoadNumber()
631 __ bind(&done); in LoadNumber()
645 __ AbortIfNotRootValue(heap_number_map, in ConvertNumberToInt32()
652 __ UntagAndJumpIfSmi(dst, object, &done); in ConvertNumberToInt32()
653 __ lw(scratch1, FieldMemOperand(object, HeapNumber::kMapOffset)); in ConvertNumberToInt32()
654 __ Branch(not_number, ne, scratch1, Operand(heap_number_map)); in ConvertNumberToInt32()
655 __ ConvertToInt32(object, in ConvertNumberToInt32()
661 __ jmp(&done); in ConvertNumberToInt32()
663 __ bind(&not_in_int32_range); in ConvertNumberToInt32()
664 __ lw(scratch1, FieldMemOperand(object, HeapNumber::kExponentOffset)); in ConvertNumberToInt32()
665 __ lw(scratch2, FieldMemOperand(object, HeapNumber::kMantissaOffset)); in ConvertNumberToInt32()
667 __ EmitOutOfInt32RangeTruncate(dst, in ConvertNumberToInt32()
672 __ bind(&done); in ConvertNumberToInt32()
692 __ mtc1(int_scratch, single_scratch); in ConvertIntToDouble()
693 __ cvt_d_w(double_dst, single_scratch); in ConvertIntToDouble()
695 __ Move(dst1, dst2, double_dst); in ConvertIntToDouble()
704 __ mov(dst2, int_scratch); in ConvertIntToDouble()
705 __ mov(dst1, int_scratch); in ConvertIntToDouble()
706 __ Branch(&done, eq, int_scratch, Operand(zero_reg)); in ConvertIntToDouble()
709 __ And(dst2, int_scratch, Operand(HeapNumber::kSignMask)); in ConvertIntToDouble()
712 __ Branch(&skip_sub, ge, dst2, Operand(zero_reg)); in ConvertIntToDouble()
713 __ Subu(int_scratch, zero_reg, int_scratch); in ConvertIntToDouble()
714 __ bind(&skip_sub); in ConvertIntToDouble()
719 __ Clz(dst1, int_scratch); in ConvertIntToDouble()
720 __ li(scratch2, 31); in ConvertIntToDouble()
721 __ Subu(dst1, scratch2, dst1); in ConvertIntToDouble()
724 __ Addu(scratch2, dst1, Operand(HeapNumber::kExponentBias)); in ConvertIntToDouble()
725 __ Ins(dst2, scratch2, in ConvertIntToDouble()
729 __ li(scratch2, Operand(1)); in ConvertIntToDouble()
730 __ sllv(scratch2, scratch2, dst1); in ConvertIntToDouble()
731 __ li(at, -1); in ConvertIntToDouble()
732 __ Xor(scratch2, scratch2, at); in ConvertIntToDouble()
733 __ And(int_scratch, int_scratch, scratch2); in ConvertIntToDouble()
736 __ Subu(scratch2, dst1, Operand(HeapNumber::kMantissaBitsInTopWord)); in ConvertIntToDouble()
737 __ Branch(&fewer_than_20_useful_bits, lt, scratch2, Operand(zero_reg)); in ConvertIntToDouble()
739 __ srlv(at, int_scratch, scratch2); in ConvertIntToDouble()
740 __ or_(dst2, dst2, at); in ConvertIntToDouble()
741 __ li(at, 32); in ConvertIntToDouble()
742 __ subu(scratch2, at, scratch2); in ConvertIntToDouble()
743 __ sllv(dst1, int_scratch, scratch2); in ConvertIntToDouble()
744 __ Branch(&done); in ConvertIntToDouble()
746 __ bind(&fewer_than_20_useful_bits); in ConvertIntToDouble()
747 __ li(at, HeapNumber::kMantissaBitsInTopWord); in ConvertIntToDouble()
748 __ subu(scratch2, at, dst1); in ConvertIntToDouble()
749 __ sllv(scratch2, int_scratch, scratch2); in ConvertIntToDouble()
750 __ Or(dst2, dst2, scratch2); in ConvertIntToDouble()
752 __ mov(dst1, zero_reg); in ConvertIntToDouble()
754 __ bind(&done); in ConvertIntToDouble()
777 __ JumpIfNotSmi(object, &obj_is_not_smi); in LoadNumberAsInt32Double()
778 __ SmiUntag(scratch1, object); in LoadNumberAsInt32Double()
781 __ Branch(&done); in LoadNumberAsInt32Double()
783 __ bind(&obj_is_not_smi); in LoadNumberAsInt32Double()
785 __ AbortIfNotRootValue(heap_number_map, in LoadNumberAsInt32Double()
789 __ JumpIfNotHeapNumber(object, heap_number_map, scratch1, not_int32); in LoadNumberAsInt32Double()
795 __ ldc1(double_dst, FieldMemOperand(object, HeapNumber::kValueOffset)); in LoadNumberAsInt32Double()
798 __ EmitFPUTruncate(kRoundToZero, in LoadNumberAsInt32Double()
806 __ Branch(not_int32, ne, except_flag, Operand(zero_reg)); in LoadNumberAsInt32Double()
809 __ Move(dst1, dst2, double_dst); in LoadNumberAsInt32Double()
815 __ lw(dst2, FieldMemOperand(object, HeapNumber::kExponentOffset)); in LoadNumberAsInt32Double()
816 __ lw(dst1, FieldMemOperand(object, HeapNumber::kMantissaOffset)); in LoadNumberAsInt32Double()
819 __ And(scratch1, dst1, Operand(~HeapNumber::kSignMask)); in LoadNumberAsInt32Double()
820 __ Or(scratch1, scratch1, Operand(dst2)); in LoadNumberAsInt32Double()
821 __ Branch(&done, eq, scratch1, Operand(zero_reg)); in LoadNumberAsInt32Double()
828 __ lw(dst2, FieldMemOperand(object, HeapNumber::kExponentOffset)); in LoadNumberAsInt32Double()
829 __ lw(dst1, FieldMemOperand(object, HeapNumber::kMantissaOffset)); in LoadNumberAsInt32Double()
832 __ bind(&done); in LoadNumberAsInt32Double()
853 __ UntagAndJumpIfSmi(dst, object, &done); in LoadNumberAsInt32()
856 __ AbortIfNotRootValue(heap_number_map, in LoadNumberAsInt32()
860 __ JumpIfNotHeapNumber(object, heap_number_map, scratch1, not_int32); in LoadNumberAsInt32()
867 __ ldc1(double_scratch, FieldMemOperand(object, HeapNumber::kValueOffset)); in LoadNumberAsInt32()
871 __ EmitFPUTruncate(kRoundToZero, in LoadNumberAsInt32()
879 __ Branch(not_int32, ne, except_flag, Operand(zero_reg)); in LoadNumberAsInt32()
881 __ mfc1(dst, single_scratch); in LoadNumberAsInt32()
885 __ lw(scratch2, FieldMemOperand(object, HeapNumber::kExponentOffset)); in LoadNumberAsInt32()
886 __ lw(scratch1, FieldMemOperand(object, HeapNumber::kMantissaOffset)); in LoadNumberAsInt32()
889 __ And(dst, scratch1, Operand(~HeapNumber::kSignMask)); in LoadNumberAsInt32()
890 __ Or(dst, scratch2, Operand(dst)); in LoadNumberAsInt32()
891 __ Branch(&done, eq, dst, Operand(zero_reg)); in LoadNumberAsInt32()
900 __ srlv(dst, dst, scratch3); in LoadNumberAsInt32()
902 __ li(at, 32); in LoadNumberAsInt32()
903 __ subu(scratch3, at, scratch3); in LoadNumberAsInt32()
904 __ sllv(scratch2, scratch2, scratch3); in LoadNumberAsInt32()
905 __ Or(dst, dst, scratch2); in LoadNumberAsInt32()
907 __ lw(scratch1, FieldMemOperand(object, HeapNumber::kExponentOffset)); in LoadNumberAsInt32()
908 __ And(scratch1, scratch1, Operand(HeapNumber::kSignMask)); in LoadNumberAsInt32()
910 __ Branch(&skip_sub, ge, scratch1, Operand(zero_reg)); in LoadNumberAsInt32()
911 __ Subu(dst, zero_reg, dst); in LoadNumberAsInt32()
912 __ bind(&skip_sub); in LoadNumberAsInt32()
915 __ bind(&done); in LoadNumberAsInt32()
926 __ Ext(scratch, in DoubleIs32BitInteger()
932 __ Subu(scratch, scratch, Operand(HeapNumber::kExponentBias)); in DoubleIs32BitInteger()
940 __ Branch(not_int32, lt, scratch, Operand(zero_reg)); in DoubleIs32BitInteger()
947 __ srl(at, src1, 31); in DoubleIs32BitInteger()
948 __ subu(tmp, scratch, at); in DoubleIs32BitInteger()
949 __ Branch(not_int32, gt, tmp, Operand(30)); in DoubleIs32BitInteger()
951 __ And(tmp, src2, 0x3fffff); in DoubleIs32BitInteger()
952 __ Branch(not_int32, ne, tmp, Operand(zero_reg)); in DoubleIs32BitInteger()
961 __ Ext(dst, in DoubleIs32BitInteger()
965 __ sll(at, src1, HeapNumber::kNonMantissaBitsInTopWord); in DoubleIs32BitInteger()
966 __ or_(dst, dst, at); in DoubleIs32BitInteger()
969 __ li(at, 32); in DoubleIs32BitInteger()
970 __ subu(scratch, at, scratch); in DoubleIs32BitInteger()
971 __ li(src2, 1); in DoubleIs32BitInteger()
972 __ sllv(src1, src2, scratch); in DoubleIs32BitInteger()
973 __ Subu(src1, src1, Operand(1)); in DoubleIs32BitInteger()
974 __ And(src1, dst, src1); in DoubleIs32BitInteger()
975 __ Branch(not_int32, ne, src1, Operand(zero_reg)); in DoubleIs32BitInteger()
995 __ push(ra); in CallCCodeForDoubleOperation()
996 __ PrepareCallCFunction(4, scratch); // Two doubles are 4 arguments. in CallCCodeForDoubleOperation()
1004 __ Move(f12, a0, a1); in CallCCodeForDoubleOperation()
1005 __ Move(f14, a2, a3); in CallCCodeForDoubleOperation()
1009 __ CallCFunction( in CallCCodeForDoubleOperation()
1016 __ sdc1(f0, FieldMemOperand(heap_number_result, HeapNumber::kValueOffset)); in CallCCodeForDoubleOperation()
1019 __ sw(v1, FieldMemOperand(heap_number_result, HeapNumber::kExponentOffset)); in CallCCodeForDoubleOperation()
1020 __ sw(v0, FieldMemOperand(heap_number_result, HeapNumber::kMantissaOffset)); in CallCCodeForDoubleOperation()
1023 __ pop(ra); in CallCCodeForDoubleOperation()
1024 __ Ret(USE_DELAY_SLOT); in CallCCodeForDoubleOperation()
1025 __ mov(v0, heap_number_result); in CallCCodeForDoubleOperation()
1065 __ And(sign_, the_int_, Operand(0x80000000u)); in Generate()
1066 __ Branch(&max_negative_int, eq, the_int_, Operand(0x80000000u)); in Generate()
1072 __ li(scratch_, Operand(non_smi_exponent)); in Generate()
1074 __ or_(scratch_, scratch_, sign_); in Generate()
1076 __ subu(at, zero_reg, the_int_); in Generate()
1077 __ Movn(the_int_, at, sign_); in Generate()
1084 __ srl(at, the_int_, shift_distance); in Generate()
1085 __ or_(scratch_, scratch_, at); in Generate()
1086 __ sw(scratch_, FieldMemOperand(the_heap_number_, in Generate()
1088 __ sll(scratch_, the_int_, 32 - shift_distance); in Generate()
1089 __ sw(scratch_, FieldMemOperand(the_heap_number_, in Generate()
1091 __ Ret(); in Generate()
1093 __ bind(&max_negative_int); in Generate()
1099 __ li(scratch_, Operand(HeapNumber::kSignMask | non_smi_exponent)); in Generate()
1100 __ sw(scratch_, in Generate()
1102 __ mov(scratch_, zero_reg); in Generate()
1103 __ sw(scratch_, in Generate()
1105 __ Ret(); in Generate()
1120 __ Branch(&not_identical, ne, a0, Operand(a1)); in EmitIdenticalObjectComparison()
1125 __ li(exp_mask_reg, Operand(HeapNumber::kExponentMask)); in EmitIdenticalObjectComparison()
1132 __ GetObjectType(a0, t4, t4); in EmitIdenticalObjectComparison()
1133 __ Branch(slow, greater, t4, Operand(FIRST_SPEC_OBJECT_TYPE)); in EmitIdenticalObjectComparison()
1135 __ GetObjectType(a0, t4, t4); in EmitIdenticalObjectComparison()
1136 __ Branch(&heap_number, eq, t4, Operand(HEAP_NUMBER_TYPE)); in EmitIdenticalObjectComparison()
1139 __ Branch(slow, greater, t4, Operand(FIRST_SPEC_OBJECT_TYPE)); in EmitIdenticalObjectComparison()
1144 __ Branch(&return_equal, ne, t4, Operand(ODDBALL_TYPE)); in EmitIdenticalObjectComparison()
1145 __ LoadRoot(t2, Heap::kUndefinedValueRootIndex); in EmitIdenticalObjectComparison()
1146 __ Branch(&return_equal, ne, a0, Operand(t2)); in EmitIdenticalObjectComparison()
1149 __ li(v0, Operand(GREATER)); in EmitIdenticalObjectComparison()
1152 __ li(v0, Operand(LESS)); in EmitIdenticalObjectComparison()
1154 __ Ret(); in EmitIdenticalObjectComparison()
1160 __ bind(&return_equal); in EmitIdenticalObjectComparison()
1163 __ li(v0, Operand(GREATER)); // Things aren't less than themselves. in EmitIdenticalObjectComparison()
1165 __ li(v0, Operand(LESS)); // Things aren't greater than themselves. in EmitIdenticalObjectComparison()
1167 __ mov(v0, zero_reg); // Things are <=, >=, ==, === themselves. in EmitIdenticalObjectComparison()
1169 __ Ret(); in EmitIdenticalObjectComparison()
1176 __ bind(&heap_number); in EmitIdenticalObjectComparison()
1183 __ lw(t2, FieldMemOperand(a0, HeapNumber::kExponentOffset)); in EmitIdenticalObjectComparison()
1185 __ And(t3, t2, Operand(exp_mask_reg)); in EmitIdenticalObjectComparison()
1187 __ Branch(&return_equal, ne, t3, Operand(exp_mask_reg)); in EmitIdenticalObjectComparison()
1190 __ sll(t2, t2, HeapNumber::kNonMantissaBitsInTopWord); in EmitIdenticalObjectComparison()
1192 __ lw(t3, FieldMemOperand(a0, HeapNumber::kMantissaOffset)); in EmitIdenticalObjectComparison()
1193 __ Or(v0, t3, Operand(t2)); in EmitIdenticalObjectComparison()
1200 __ Ret(eq, v0, Operand(zero_reg)); in EmitIdenticalObjectComparison()
1202 __ li(v0, Operand(GREATER)); // NaN <= NaN should fail. in EmitIdenticalObjectComparison()
1204 __ li(v0, Operand(LESS)); // NaN >= NaN should fail. in EmitIdenticalObjectComparison()
1207 __ Ret(); in EmitIdenticalObjectComparison()
1212 __ bind(&not_identical); in EmitIdenticalObjectComparison()
1226 __ JumpIfSmi(lhs, &lhs_is_smi); in EmitSmiNonsmiComparison()
1229 __ GetObjectType(lhs, t4, t4); in EmitSmiNonsmiComparison()
1233 __ Ret(USE_DELAY_SLOT, ne, t4, Operand(HEAP_NUMBER_TYPE)); in EmitSmiNonsmiComparison()
1234 __ mov(v0, lhs); in EmitSmiNonsmiComparison()
1238 __ Branch(slow, ne, t4, Operand(HEAP_NUMBER_TYPE)); in EmitSmiNonsmiComparison()
1245 __ sra(at, rhs, kSmiTagSize); in EmitSmiNonsmiComparison()
1246 __ mtc1(at, f14); in EmitSmiNonsmiComparison()
1247 __ cvt_d_w(f14, f14); in EmitSmiNonsmiComparison()
1248 __ ldc1(f12, FieldMemOperand(lhs, HeapNumber::kValueOffset)); in EmitSmiNonsmiComparison()
1251 __ lw(a3, FieldMemOperand(lhs, HeapNumber::kValueOffset + 4)); in EmitSmiNonsmiComparison()
1252 __ lw(a2, FieldMemOperand(lhs, HeapNumber::kValueOffset)); in EmitSmiNonsmiComparison()
1255 __ mov(t6, rhs); in EmitSmiNonsmiComparison()
1257 __ push(ra); in EmitSmiNonsmiComparison()
1258 __ Call(stub1.GetCode()); in EmitSmiNonsmiComparison()
1260 __ pop(ra); in EmitSmiNonsmiComparison()
1264 __ jmp(both_loaded_as_doubles); in EmitSmiNonsmiComparison()
1266 __ bind(&lhs_is_smi); in EmitSmiNonsmiComparison()
1268 __ GetObjectType(rhs, t4, t4); in EmitSmiNonsmiComparison()
1272 __ Ret(USE_DELAY_SLOT, ne, t4, Operand(HEAP_NUMBER_TYPE)); in EmitSmiNonsmiComparison()
1273 __ li(v0, Operand(1)); in EmitSmiNonsmiComparison()
1277 __ Branch(slow, ne, t4, Operand(HEAP_NUMBER_TYPE)); in EmitSmiNonsmiComparison()
1284 __ sra(at, lhs, kSmiTagSize); in EmitSmiNonsmiComparison()
1285 __ mtc1(at, f12); in EmitSmiNonsmiComparison()
1286 __ cvt_d_w(f12, f12); in EmitSmiNonsmiComparison()
1287 __ ldc1(f14, FieldMemOperand(rhs, HeapNumber::kValueOffset)); in EmitSmiNonsmiComparison()
1290 __ mov(t6, lhs); in EmitSmiNonsmiComparison()
1292 __ push(ra); in EmitSmiNonsmiComparison()
1293 __ Call(stub2.GetCode()); in EmitSmiNonsmiComparison()
1294 __ pop(ra); in EmitSmiNonsmiComparison()
1297 __ lw(a1, FieldMemOperand(rhs, HeapNumber::kValueOffset + 4)); in EmitSmiNonsmiComparison()
1298 __ lw(a0, FieldMemOperand(rhs, HeapNumber::kValueOffset)); in EmitSmiNonsmiComparison()
1300 __ lw(a0, FieldMemOperand(rhs, HeapNumber::kValueOffset)); in EmitSmiNonsmiComparison()
1301 __ lw(a1, FieldMemOperand(rhs, HeapNumber::kValueOffset + 4)); in EmitSmiNonsmiComparison()
1313 __ Move(t0, t1, f14); in EmitNanCheck()
1314 __ Move(t2, t3, f12); in EmitNanCheck()
1317 __ mov(t0, a0); // a0 has LS 32 bits of rhs. in EmitNanCheck()
1318 __ mov(t1, a1); // a1 has MS 32 bits of rhs. in EmitNanCheck()
1319 __ mov(t2, a2); // a2 has LS 32 bits of lhs. in EmitNanCheck()
1320 __ mov(t3, a3); // a3 has MS 32 bits of lhs. in EmitNanCheck()
1330 __ li(exp_mask_reg, HeapNumber::kExponentMask); in EmitNanCheck()
1331 __ and_(t5, lhs_exponent, exp_mask_reg); in EmitNanCheck()
1332 __ Branch(&lhs_not_nan_exp_mask_is_loaded, ne, t5, Operand(exp_mask_reg)); in EmitNanCheck()
1334 __ sll(t5, lhs_exponent, HeapNumber::kNonMantissaBitsInTopWord); in EmitNanCheck()
1335 __ Branch(&one_is_nan, ne, t5, Operand(zero_reg)); in EmitNanCheck()
1337 __ Branch(&one_is_nan, ne, lhs_mantissa, Operand(zero_reg)); in EmitNanCheck()
1339 __ li(exp_mask_reg, HeapNumber::kExponentMask); in EmitNanCheck()
1340 __ bind(&lhs_not_nan_exp_mask_is_loaded); in EmitNanCheck()
1341 __ and_(t5, rhs_exponent, exp_mask_reg); in EmitNanCheck()
1343 __ Branch(&neither_is_nan, ne, t5, Operand(exp_mask_reg)); in EmitNanCheck()
1345 __ sll(t5, rhs_exponent, HeapNumber::kNonMantissaBitsInTopWord); in EmitNanCheck()
1346 __ Branch(&one_is_nan, ne, t5, Operand(zero_reg)); in EmitNanCheck()
1348 __ Branch(&neither_is_nan, eq, rhs_mantissa, Operand(zero_reg)); in EmitNanCheck()
1350 __ bind(&one_is_nan); in EmitNanCheck()
1355 __ li(v0, Operand(GREATER)); in EmitNanCheck()
1357 __ li(v0, Operand(LESS)); in EmitNanCheck()
1359 __ Ret(); in EmitNanCheck()
1361 __ bind(&neither_is_nan); in EmitNanCheck()
1380 __ Move(t0, t1, f14); in EmitTwoNonNanDoubleComparison()
1381 __ Move(t2, t3, f12); in EmitTwoNonNanDoubleComparison()
1384 __ mov(t0, a0); // a0 has LS 32 bits of rhs. in EmitTwoNonNanDoubleComparison()
1385 __ mov(t1, a1); // a1 has MS 32 bits of rhs. in EmitTwoNonNanDoubleComparison()
1386 __ mov(t2, a2); // a2 has LS 32 bits of lhs. in EmitTwoNonNanDoubleComparison()
1387 __ mov(t3, a3); // a3 has MS 32 bits of lhs. in EmitTwoNonNanDoubleComparison()
1394 __ xor_(v0, rhs_mantissa, lhs_mantissa); in EmitTwoNonNanDoubleComparison()
1395 __ Branch(&return_result_not_equal, ne, v0, Operand(zero_reg)); in EmitTwoNonNanDoubleComparison()
1397 __ subu(v0, rhs_exponent, lhs_exponent); in EmitTwoNonNanDoubleComparison()
1398 __ Branch(&return_result_equal, eq, v0, Operand(zero_reg)); in EmitTwoNonNanDoubleComparison()
1400 __ sll(rhs_exponent, rhs_exponent, kSmiTagSize); in EmitTwoNonNanDoubleComparison()
1401 __ sll(lhs_exponent, lhs_exponent, kSmiTagSize); in EmitTwoNonNanDoubleComparison()
1402 __ or_(t4, rhs_exponent, lhs_exponent); in EmitTwoNonNanDoubleComparison()
1403 __ or_(t4, t4, rhs_mantissa); in EmitTwoNonNanDoubleComparison()
1405 __ Branch(&return_result_not_equal, ne, t4, Operand(zero_reg)); in EmitTwoNonNanDoubleComparison()
1407 __ bind(&return_result_equal); in EmitTwoNonNanDoubleComparison()
1409 __ li(v0, Operand(EQUAL)); in EmitTwoNonNanDoubleComparison()
1410 __ Ret(); in EmitTwoNonNanDoubleComparison()
1413 __ bind(&return_result_not_equal); in EmitTwoNonNanDoubleComparison()
1416 __ push(ra); in EmitTwoNonNanDoubleComparison()
1417 __ PrepareCallCFunction(0, 2, t4); in EmitTwoNonNanDoubleComparison()
1424 __ Move(f12, a0, a1); in EmitTwoNonNanDoubleComparison()
1425 __ Move(f14, a2, a3); in EmitTwoNonNanDoubleComparison()
1429 __ CallCFunction(ExternalReference::compare_doubles(masm->isolate()), in EmitTwoNonNanDoubleComparison()
1431 __ pop(ra); // Because this function returns int, result is in v0. in EmitTwoNonNanDoubleComparison()
1432 __ Ret(); in EmitTwoNonNanDoubleComparison()
1436 __ BranchF(&equal, NULL, eq, f12, f14); in EmitTwoNonNanDoubleComparison()
1437 __ BranchF(&less_than, NULL, lt, f12, f14); in EmitTwoNonNanDoubleComparison()
1441 __ li(v0, Operand(GREATER)); in EmitTwoNonNanDoubleComparison()
1442 __ Ret(); in EmitTwoNonNanDoubleComparison()
1444 __ bind(&equal); in EmitTwoNonNanDoubleComparison()
1445 __ li(v0, Operand(EQUAL)); in EmitTwoNonNanDoubleComparison()
1446 __ Ret(); in EmitTwoNonNanDoubleComparison()
1448 __ bind(&less_than); in EmitTwoNonNanDoubleComparison()
1449 __ li(v0, Operand(LESS)); in EmitTwoNonNanDoubleComparison()
1450 __ Ret(); in EmitTwoNonNanDoubleComparison()
1465 __ GetObjectType(lhs, a2, a2); in EmitStrictTwoHeapObjectCompare()
1466 __ Branch(&first_non_object, less, a2, Operand(FIRST_SPEC_OBJECT_TYPE)); in EmitStrictTwoHeapObjectCompare()
1470 __ bind(&return_not_equal); in EmitStrictTwoHeapObjectCompare()
1471 __ Ret(USE_DELAY_SLOT); in EmitStrictTwoHeapObjectCompare()
1472 __ li(v0, Operand(1)); in EmitStrictTwoHeapObjectCompare()
1474 __ bind(&first_non_object); in EmitStrictTwoHeapObjectCompare()
1476 __ Branch(&return_not_equal, eq, a2, Operand(ODDBALL_TYPE)); in EmitStrictTwoHeapObjectCompare()
1478 __ GetObjectType(rhs, a3, a3); in EmitStrictTwoHeapObjectCompare()
1479 __ Branch(&return_not_equal, greater, a3, Operand(FIRST_SPEC_OBJECT_TYPE)); in EmitStrictTwoHeapObjectCompare()
1482 __ Branch(&return_not_equal, eq, a3, Operand(ODDBALL_TYPE)); in EmitStrictTwoHeapObjectCompare()
1488 __ And(t2, a2, Operand(a3)); in EmitStrictTwoHeapObjectCompare()
1489 __ And(t0, t2, Operand(kIsSymbolMask)); in EmitStrictTwoHeapObjectCompare()
1490 __ Branch(&return_not_equal, ne, t0, Operand(zero_reg)); in EmitStrictTwoHeapObjectCompare()
1500 __ GetObjectType(lhs, a3, a2); in EmitCheckForTwoHeapNumbers()
1501 __ Branch(not_heap_numbers, ne, a2, Operand(HEAP_NUMBER_TYPE)); in EmitCheckForTwoHeapNumbers()
1502 __ lw(a2, FieldMemOperand(rhs, HeapObject::kMapOffset)); in EmitCheckForTwoHeapNumbers()
1504 __ Branch(slow, ne, a3, Operand(a2)); in EmitCheckForTwoHeapNumbers()
1510 __ ldc1(f12, FieldMemOperand(lhs, HeapNumber::kValueOffset)); in EmitCheckForTwoHeapNumbers()
1511 __ ldc1(f14, FieldMemOperand(rhs, HeapNumber::kValueOffset)); in EmitCheckForTwoHeapNumbers()
1513 __ lw(a2, FieldMemOperand(lhs, HeapNumber::kValueOffset)); in EmitCheckForTwoHeapNumbers()
1514 __ lw(a3, FieldMemOperand(lhs, HeapNumber::kValueOffset + 4)); in EmitCheckForTwoHeapNumbers()
1516 __ lw(a1, FieldMemOperand(rhs, HeapNumber::kValueOffset + 4)); in EmitCheckForTwoHeapNumbers()
1517 __ lw(a0, FieldMemOperand(rhs, HeapNumber::kValueOffset)); in EmitCheckForTwoHeapNumbers()
1519 __ lw(a0, FieldMemOperand(rhs, HeapNumber::kValueOffset)); in EmitCheckForTwoHeapNumbers()
1520 __ lw(a1, FieldMemOperand(rhs, HeapNumber::kValueOffset + 4)); in EmitCheckForTwoHeapNumbers()
1523 __ jmp(both_loaded_as_doubles); in EmitCheckForTwoHeapNumbers()
1540 __ And(at, a2, Operand(kIsNotStringMask)); in EmitCheckForSymbolsOrObjects()
1541 __ Branch(&object_test, ne, at, Operand(zero_reg)); in EmitCheckForSymbolsOrObjects()
1542 __ And(at, a2, Operand(kIsSymbolMask)); in EmitCheckForSymbolsOrObjects()
1543 __ Branch(possible_strings, eq, at, Operand(zero_reg)); in EmitCheckForSymbolsOrObjects()
1544 __ GetObjectType(rhs, a3, a3); in EmitCheckForSymbolsOrObjects()
1545 __ Branch(not_both_strings, ge, a3, Operand(FIRST_NONSTRING_TYPE)); in EmitCheckForSymbolsOrObjects()
1546 __ And(at, a3, Operand(kIsSymbolMask)); in EmitCheckForSymbolsOrObjects()
1547 __ Branch(possible_strings, eq, at, Operand(zero_reg)); in EmitCheckForSymbolsOrObjects()
1551 __ Ret(USE_DELAY_SLOT); in EmitCheckForSymbolsOrObjects()
1552 __ li(v0, Operand(1)); // Non-zero indicates not equal. in EmitCheckForSymbolsOrObjects()
1554 __ bind(&object_test); in EmitCheckForSymbolsOrObjects()
1555 __ Branch(not_both_strings, lt, a2, Operand(FIRST_SPEC_OBJECT_TYPE)); in EmitCheckForSymbolsOrObjects()
1556 __ GetObjectType(rhs, a2, a3); in EmitCheckForSymbolsOrObjects()
1557 __ Branch(not_both_strings, lt, a3, Operand(FIRST_SPEC_OBJECT_TYPE)); in EmitCheckForSymbolsOrObjects()
1562 __ lw(a3, FieldMemOperand(lhs, HeapObject::kMapOffset)); in EmitCheckForSymbolsOrObjects()
1563 __ lbu(a2, FieldMemOperand(a2, Map::kBitFieldOffset)); in EmitCheckForSymbolsOrObjects()
1564 __ lbu(a3, FieldMemOperand(a3, Map::kBitFieldOffset)); in EmitCheckForSymbolsOrObjects()
1565 __ and_(a0, a2, a3); in EmitCheckForSymbolsOrObjects()
1566 __ And(a0, a0, Operand(1 << Map::kIsUndetectable)); in EmitCheckForSymbolsOrObjects()
1567 __ Ret(USE_DELAY_SLOT); in EmitCheckForSymbolsOrObjects()
1568 __ xori(v0, a0, 1 << Map::kIsUndetectable); in EmitCheckForSymbolsOrObjects()
1585 __ LoadRoot(number_string_cache, Heap::kNumberStringCacheRootIndex); in GenerateLookupNumberStringCache()
1589 __ lw(mask, FieldMemOperand(number_string_cache, FixedArray::kLengthOffset)); in GenerateLookupNumberStringCache()
1591 __ sra(mask, mask, kSmiTagSize + 1); in GenerateLookupNumberStringCache()
1592 __ Addu(mask, mask, -1); // Make mask. in GenerateLookupNumberStringCache()
1602 __ JumpIfSmi(object, &is_smi); in GenerateLookupNumberStringCache()
1605 __ CheckMap(object, in GenerateLookupNumberStringCache()
1612 __ Addu(scratch1, in GenerateLookupNumberStringCache()
1615 __ lw(scratch2, MemOperand(scratch1, kPointerSize)); in GenerateLookupNumberStringCache()
1616 __ lw(scratch1, MemOperand(scratch1, 0)); in GenerateLookupNumberStringCache()
1617 __ Xor(scratch1, scratch1, Operand(scratch2)); in GenerateLookupNumberStringCache()
1618 __ And(scratch1, scratch1, Operand(mask)); in GenerateLookupNumberStringCache()
1622 __ sll(scratch1, scratch1, kPointerSizeLog2 + 1); in GenerateLookupNumberStringCache()
1623 __ Addu(scratch1, number_string_cache, scratch1); in GenerateLookupNumberStringCache()
1626 __ lw(probe, in GenerateLookupNumberStringCache()
1628 __ JumpIfSmi(probe, not_found); in GenerateLookupNumberStringCache()
1629 __ ldc1(f12, FieldMemOperand(object, HeapNumber::kValueOffset)); in GenerateLookupNumberStringCache()
1630 __ ldc1(f14, FieldMemOperand(probe, HeapNumber::kValueOffset)); in GenerateLookupNumberStringCache()
1631 __ BranchF(&load_result_from_cache, NULL, eq, f12, f14); in GenerateLookupNumberStringCache()
1632 __ Branch(not_found); in GenerateLookupNumberStringCache()
1637 __ Branch(not_found); in GenerateLookupNumberStringCache()
1641 __ bind(&is_smi); in GenerateLookupNumberStringCache()
1643 __ sra(scratch, object, 1); // Shift away the tag. in GenerateLookupNumberStringCache()
1644 __ And(scratch, mask, Operand(scratch)); in GenerateLookupNumberStringCache()
1648 __ sll(scratch, scratch, kPointerSizeLog2 + 1); in GenerateLookupNumberStringCache()
1649 __ Addu(scratch, number_string_cache, scratch); in GenerateLookupNumberStringCache()
1653 __ lw(probe, FieldMemOperand(scratch, FixedArray::kHeaderSize)); in GenerateLookupNumberStringCache()
1654 __ Branch(not_found, ne, object, Operand(probe)); in GenerateLookupNumberStringCache()
1657 __ bind(&load_result_from_cache); in GenerateLookupNumberStringCache()
1658 __ lw(result, in GenerateLookupNumberStringCache()
1661 __ IncrementCounter(isolate->counters()->number_to_string_native(), in GenerateLookupNumberStringCache()
1671 __ lw(a1, MemOperand(sp, 0)); in Generate()
1675 __ DropAndRet(1); in Generate()
1677 __ bind(&runtime); in Generate()
1679 __ TailCallRuntime(Runtime::kNumberToString, 1, 1); in Generate()
1693 __ Or(a2, a1, a0); in Generate()
1694 __ JumpIfNotSmi(a2, &not_two_smis); in Generate()
1695 __ sra(a1, a1, 1); in Generate()
1696 __ sra(a0, a0, 1); in Generate()
1697 __ Ret(USE_DELAY_SLOT); in Generate()
1698 __ subu(v0, a1, a0); in Generate()
1699 __ bind(&not_two_smis); in Generate()
1701 __ Or(a2, a1, a0); in Generate()
1702 __ And(a2, a2, kSmiTagMask); in Generate()
1703 __ Assert(ne, "CompareStub: unexpected smi operands.", in Generate()
1719 __ And(t2, lhs_, Operand(rhs_)); in Generate()
1720 __ JumpIfNotSmi(t2, &not_smis, t0); in Generate()
1732 __ bind(&both_loaded_as_doubles); in Generate()
1741 __ li(t0, Operand(LESS)); in Generate()
1742 __ li(t1, Operand(GREATER)); in Generate()
1743 __ li(t2, Operand(EQUAL)); in Generate()
1746 __ BranchF(NULL, &nan, eq, f12, f14); in Generate()
1750 __ c(OLT, D, f12, f14); in Generate()
1751 __ Movt(v0, t0); in Generate()
1755 __ Movf(v0, t1); in Generate()
1758 __ c(EQ, D, f12, f14); in Generate()
1759 __ Movt(v0, t2); in Generate()
1761 __ Ret(); in Generate()
1763 __ bind(&nan); in Generate()
1767 __ li(v0, Operand(GREATER)); in Generate()
1769 __ li(v0, Operand(LESS)); in Generate()
1771 __ Ret(); in Generate()
1782 __ bind(&not_smis); in Generate()
1804 __ bind(&check_for_symbols); in Generate()
1814 __ bind(&flat_string_check); in Generate()
1816 __ JumpIfNonSmisNotBothSequentialAsciiStrings(lhs_, rhs_, a2, a3, &slow); in Generate()
1818 __ IncrementCounter(isolate->counters()->string_compare_native(), 1, a2, a3); in Generate()
1837 __ bind(&slow); in Generate()
1840 __ Push(lhs_, rhs_); in Generate()
1854 __ li(a0, Operand(Smi::FromInt(ncr))); in Generate()
1855 __ push(a0); in Generate()
1860 __ InvokeBuiltin(native, JUMP_FUNCTION); in Generate()
1885 __ And(at, tos_, kSmiTagMask); in Generate()
1887 __ Ret(eq, at, Operand(zero_reg)); in Generate()
1890 __ JumpIfSmi(tos_, &patch); in Generate()
1894 __ lw(map, FieldMemOperand(tos_, HeapObject::kMapOffset)); in Generate()
1897 __ lbu(at, FieldMemOperand(map, Map::kBitFieldOffset)); in Generate()
1898 __ And(at, at, Operand(1 << Map::kIsUndetectable)); in Generate()
1900 __ Movn(tos_, zero_reg, at); in Generate()
1901 __ Ret(ne, at, Operand(zero_reg)); in Generate()
1907 __ lbu(at, FieldMemOperand(map, Map::kInstanceTypeOffset)); in Generate()
1909 __ Ret(ge, at, Operand(FIRST_SPEC_OBJECT_TYPE)); in Generate()
1914 __ lbu(at, FieldMemOperand(map, Map::kInstanceTypeOffset)); in Generate()
1916 __ Branch(&skip, ge, at, Operand(FIRST_NONSTRING_TYPE)); in Generate()
1917 __ Ret(USE_DELAY_SLOT); // the string length is OK as the return value in Generate()
1918 __ lw(tos_, FieldMemOperand(tos_, String::kLengthOffset)); in Generate()
1919 __ bind(&skip); in Generate()
1925 __ LoadRoot(at, Heap::kHeapNumberMapRootIndex); in Generate()
1926 __ Branch(&not_heap_number, ne, map, Operand(at)); in Generate()
1928 __ ldc1(f2, FieldMemOperand(tos_, HeapNumber::kValueOffset)); in Generate()
1929 __ BranchF(&number, &zero_or_nan, ne, f2, kDoubleRegZero); in Generate()
1933 __ bind(&zero_or_nan); in Generate()
1934 __ mov(tos_, zero_reg); in Generate()
1935 __ bind(&number); in Generate()
1936 __ Ret(); in Generate()
1937 __ bind(&not_heap_number); in Generate()
1940 __ bind(&patch); in Generate()
1951 __ LoadRoot(at, value); in CheckOddball()
1952 __ Subu(at, at, tos_); // This is a check for equality for the movz below. in CheckOddball()
1956 __ Movz(tos_, zero_reg, at); in CheckOddball()
1958 __ Ret(eq, at, Operand(zero_reg)); in CheckOddball()
1964 __ Move(a3, tos_); in GenerateTypeTransition()
1965 __ li(a2, Operand(Smi::FromInt(tos_.code()))); in GenerateTypeTransition()
1966 __ li(a1, Operand(Smi::FromInt(types_.ToByte()))); in GenerateTypeTransition()
1967 __ Push(a3, a2, a1); in GenerateTypeTransition()
1970 __ TailCallExternalReference( in GenerateTypeTransition()
1981 __ MultiPush(kJSCallerSaved | ra.bit()); in Generate()
1984 __ MultiPushFPU(kCallerSavedFPU); in Generate()
1991 __ PrepareCallCFunction(argument_count, fp_argument_count, scratch); in Generate()
1992 __ li(a0, Operand(ExternalReference::isolate_address())); in Generate()
1993 __ CallCFunction( in Generate()
1998 __ MultiPopFPU(kCallerSavedFPU); in Generate()
2001 __ MultiPop(kJSCallerSaved | ra.bit()); in Generate()
2002 __ Ret(); in Generate()
2041 __ li(a2, Operand(Smi::FromInt(op_))); in GenerateTypeTransition()
2042 __ li(a1, Operand(Smi::FromInt(mode_))); in GenerateTypeTransition()
2043 __ li(a0, Operand(Smi::FromInt(operand_type_))); in GenerateTypeTransition()
2044 __ Push(v0, a2, a1, a0); in GenerateTypeTransition()
2046 __ TailCallExternalReference( in GenerateTypeTransition()
2069 __ bind(&non_smi); in GenerateSmiStubSub()
2070 __ bind(&slow); in GenerateSmiStubSub()
2078 __ bind(&non_smi); in GenerateSmiStubBitNot()
2086 __ JumpIfNotSmi(a0, non_smi); in GenerateSmiCodeSub()
2089 __ And(t0, a0, ~0x80000000); in GenerateSmiCodeSub()
2090 __ Branch(slow, eq, t0, Operand(zero_reg)); in GenerateSmiCodeSub()
2093 __ Ret(USE_DELAY_SLOT); in GenerateSmiCodeSub()
2094 __ subu(v0, zero_reg, a0); in GenerateSmiCodeSub()
2100 __ JumpIfNotSmi(a0, non_smi); in GenerateSmiCodeBitNot()
2103 __ Neg(v0, a0); in GenerateSmiCodeBitNot()
2104 __ And(v0, v0, ~kSmiTagMask); in GenerateSmiCodeBitNot()
2105 __ Ret(); in GenerateSmiCodeBitNot()
2127 __ bind(&non_smi); in GenerateHeapNumberStubSub()
2129 __ bind(&slow); in GenerateHeapNumberStubSub()
2131 __ bind(&call_builtin); in GenerateHeapNumberStubSub()
2139 __ bind(&non_smi); in GenerateHeapNumberStubBitNot()
2141 __ bind(&slow); in GenerateHeapNumberStubBitNot()
2151 __ lw(a2, FieldMemOperand(a0, HeapNumber::kExponentOffset)); in GenerateHeapNumberCodeSub()
2152 __ Xor(a2, a2, Operand(HeapNumber::kSignMask)); // Flip sign. in GenerateHeapNumberCodeSub()
2153 __ sw(a2, FieldMemOperand(a0, HeapNumber::kExponentOffset)); in GenerateHeapNumberCodeSub()
2156 __ AllocateHeapNumber(a1, a2, a3, t2, &slow_allocate_heapnumber); in GenerateHeapNumberCodeSub()
2157 __ jmp(&heapnumber_allocated); in GenerateHeapNumberCodeSub()
2159 __ bind(&slow_allocate_heapnumber); in GenerateHeapNumberCodeSub()
2162 __ push(a0); in GenerateHeapNumberCodeSub()
2163 __ CallRuntime(Runtime::kNumberAlloc, 0); in GenerateHeapNumberCodeSub()
2164 __ mov(a1, v0); in GenerateHeapNumberCodeSub()
2165 __ pop(a0); in GenerateHeapNumberCodeSub()
2168 __ bind(&heapnumber_allocated); in GenerateHeapNumberCodeSub()
2169 __ lw(a3, FieldMemOperand(a0, HeapNumber::kMantissaOffset)); in GenerateHeapNumberCodeSub()
2170 __ lw(a2, FieldMemOperand(a0, HeapNumber::kExponentOffset)); in GenerateHeapNumberCodeSub()
2171 __ sw(a3, FieldMemOperand(a1, HeapNumber::kMantissaOffset)); in GenerateHeapNumberCodeSub()
2172 __ Xor(a2, a2, Operand(HeapNumber::kSignMask)); // Flip sign. in GenerateHeapNumberCodeSub()
2173 __ sw(a2, FieldMemOperand(a1, HeapNumber::kExponentOffset)); in GenerateHeapNumberCodeSub()
2174 __ mov(v0, a1); in GenerateHeapNumberCodeSub()
2176 __ Ret(); in GenerateHeapNumberCodeSub()
2187 __ ConvertToInt32(a0, a1, a2, a3, f0, slow); in GenerateHeapNumberCodeBitNot()
2191 __ Neg(a1, a1); in GenerateHeapNumberCodeBitNot()
2192 __ Addu(a2, a1, Operand(0x40000000)); in GenerateHeapNumberCodeBitNot()
2193 __ Branch(&try_float, lt, a2, Operand(zero_reg)); in GenerateHeapNumberCodeBitNot()
2196 __ SmiTag(v0, a1); in GenerateHeapNumberCodeBitNot()
2197 __ Ret(); in GenerateHeapNumberCodeBitNot()
2200 __ bind(&try_float); in GenerateHeapNumberCodeBitNot()
2204 __ AllocateHeapNumber(a2, a3, t0, t2, &slow_allocate_heapnumber); in GenerateHeapNumberCodeBitNot()
2205 __ jmp(&heapnumber_allocated); in GenerateHeapNumberCodeBitNot()
2207 __ bind(&slow_allocate_heapnumber); in GenerateHeapNumberCodeBitNot()
2210 __ push(v0); // Push the heap number, not the untagged int32. in GenerateHeapNumberCodeBitNot()
2211 __ CallRuntime(Runtime::kNumberAlloc, 0); in GenerateHeapNumberCodeBitNot()
2212 __ mov(a2, v0); // Move the new heap number into a2. in GenerateHeapNumberCodeBitNot()
2214 __ pop(v0); in GenerateHeapNumberCodeBitNot()
2220 __ ConvertToInt32(v0, a1, a3, t0, f0, &impossible); in GenerateHeapNumberCodeBitNot()
2222 __ Xor(a1, a1, -1); in GenerateHeapNumberCodeBitNot()
2224 __ bind(&heapnumber_allocated); in GenerateHeapNumberCodeBitNot()
2225 __ mov(v0, a2); // Move newly allocated heap number to v0. in GenerateHeapNumberCodeBitNot()
2231 __ mtc1(a1, f0); in GenerateHeapNumberCodeBitNot()
2232 __ cvt_d_w(f0, f0); in GenerateHeapNumberCodeBitNot()
2233 __ sdc1(f0, FieldMemOperand(v0, HeapNumber::kValueOffset)); in GenerateHeapNumberCodeBitNot()
2234 __ Ret(); in GenerateHeapNumberCodeBitNot()
2239 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); in GenerateHeapNumberCodeBitNot()
2242 __ bind(&impossible); in GenerateHeapNumberCodeBitNot()
2244 __ stop("Incorrect assumption in bit-not stub"); in GenerateHeapNumberCodeBitNot()
2267 __ bind(&non_smi); in GenerateGenericStubSub()
2269 __ bind(&slow); in GenerateGenericStubSub()
2277 __ bind(&non_smi); in GenerateGenericStubBitNot()
2279 __ bind(&slow); in GenerateGenericStubBitNot()
2287 __ push(a0); in GenerateGenericCodeFallback()
2290 __ InvokeBuiltin(Builtins::UNARY_MINUS, JUMP_FUNCTION); in GenerateGenericCodeFallback()
2293 __ InvokeBuiltin(Builtins::BIT_NOT, JUMP_FUNCTION); in GenerateGenericCodeFallback()
2304 __ Push(a1, a0); in GenerateTypeTransition()
2306 __ li(a2, Operand(Smi::FromInt(MinorKey()))); in GenerateTypeTransition()
2307 __ li(a1, Operand(Smi::FromInt(op_))); in GenerateTypeTransition()
2308 __ li(a0, Operand(Smi::FromInt(operands_type_))); in GenerateTypeTransition()
2309 __ Push(a2, a1, a0); in GenerateTypeTransition()
2311 __ TailCallExternalReference( in GenerateTypeTransition()
2390 __ AdduAndCheckForOverflow(v0, left, right, scratch1); in GenerateSmiSmiOperation()
2391 __ RetOnNoOverflow(scratch1); in GenerateSmiSmiOperation()
2395 __ SubuAndCheckForOverflow(v0, left, right, scratch1); in GenerateSmiSmiOperation()
2396 __ RetOnNoOverflow(scratch1); in GenerateSmiSmiOperation()
2402 __ SmiUntag(scratch1, right); in GenerateSmiSmiOperation()
2406 __ Mult(left, scratch1); in GenerateSmiSmiOperation()
2409 __ mflo(scratch1); in GenerateSmiSmiOperation()
2410 __ mfhi(scratch2); in GenerateSmiSmiOperation()
2411 __ sra(scratch1, scratch1, 31); in GenerateSmiSmiOperation()
2412 __ Branch(&not_smi_result, ne, scratch1, Operand(scratch2)); in GenerateSmiSmiOperation()
2414 __ mflo(v0); in GenerateSmiSmiOperation()
2415 __ Ret(ne, v0, Operand(zero_reg)); in GenerateSmiSmiOperation()
2418 __ Addu(scratch2, right, left); in GenerateSmiSmiOperation()
2422 __ Branch(&skip, lt, scratch2, Operand(zero_reg)); in GenerateSmiSmiOperation()
2424 __ Ret(USE_DELAY_SLOT); in GenerateSmiSmiOperation()
2425 __ mov(v0, zero_reg); // Return smi 0 if the non-zero one was positive. in GenerateSmiSmiOperation()
2426 __ bind(&skip); in GenerateSmiSmiOperation()
2433 __ SmiUntag(scratch2, right); in GenerateSmiSmiOperation()
2434 __ SmiUntag(scratch1, left); in GenerateSmiSmiOperation()
2435 __ Div(scratch1, scratch2); in GenerateSmiSmiOperation()
2438 __ Branch(&not_smi_result, eq, scratch2, Operand(zero_reg)); in GenerateSmiSmiOperation()
2443 __ mfhi(scratch1); in GenerateSmiSmiOperation()
2444 __ Branch(&not_smi_result, ne, scratch1, Operand(zero_reg)); in GenerateSmiSmiOperation()
2445 __ mflo(scratch1); in GenerateSmiSmiOperation()
2446 __ Branch(&done, ne, scratch1, Operand(zero_reg)); in GenerateSmiSmiOperation()
2447 __ Branch(&not_smi_result, lt, scratch2, Operand(zero_reg)); in GenerateSmiSmiOperation()
2448 __ bind(&done); in GenerateSmiSmiOperation()
2450 __ Addu(scratch2, scratch1, Operand(0x40000000)); in GenerateSmiSmiOperation()
2451 __ Branch(&not_smi_result, lt, scratch2, Operand(zero_reg)); in GenerateSmiSmiOperation()
2452 __ SmiTag(v0, scratch1); in GenerateSmiSmiOperation()
2453 __ Ret(); in GenerateSmiSmiOperation()
2458 __ SmiUntag(scratch2, right); in GenerateSmiSmiOperation()
2459 __ SmiUntag(scratch1, left); in GenerateSmiSmiOperation()
2460 __ Div(scratch1, scratch2); in GenerateSmiSmiOperation()
2464 __ Branch(&not_smi_result, eq, scratch2, Operand(zero_reg)); in GenerateSmiSmiOperation()
2468 __ mfhi(scratch2); in GenerateSmiSmiOperation()
2469 __ Branch(&done, ne, scratch2, Operand(zero_reg)); in GenerateSmiSmiOperation()
2470 __ Branch(&not_smi_result, lt, scratch1, Operand(zero_reg)); in GenerateSmiSmiOperation()
2471 __ bind(&done); in GenerateSmiSmiOperation()
2473 __ Addu(scratch1, scratch2, Operand(0x40000000)); in GenerateSmiSmiOperation()
2474 __ Branch(&not_smi_result, lt, scratch1, Operand(zero_reg)); in GenerateSmiSmiOperation()
2475 __ SmiTag(v0, scratch2); in GenerateSmiSmiOperation()
2476 __ Ret(); in GenerateSmiSmiOperation()
2480 __ Ret(USE_DELAY_SLOT); in GenerateSmiSmiOperation()
2481 __ or_(v0, left, right); in GenerateSmiSmiOperation()
2484 __ Ret(USE_DELAY_SLOT); in GenerateSmiSmiOperation()
2485 __ and_(v0, left, right); in GenerateSmiSmiOperation()
2488 __ Ret(USE_DELAY_SLOT); in GenerateSmiSmiOperation()
2489 __ xor_(v0, left, right); in GenerateSmiSmiOperation()
2493 __ GetLeastBitsFromSmi(scratch1, right, 5); in GenerateSmiSmiOperation()
2494 __ srav(scratch1, left, scratch1); in GenerateSmiSmiOperation()
2496 __ And(v0, scratch1, ~kSmiTagMask); in GenerateSmiSmiOperation()
2497 __ Ret(); in GenerateSmiSmiOperation()
2502 __ SmiUntag(scratch1, left); in GenerateSmiSmiOperation()
2503 __ GetLeastBitsFromSmi(scratch2, right, 5); in GenerateSmiSmiOperation()
2504 __ srlv(v0, scratch1, scratch2); in GenerateSmiSmiOperation()
2507 __ And(scratch1, v0, Operand(0xc0000000)); in GenerateSmiSmiOperation()
2508 __ Branch(&not_smi_result, ne, scratch1, Operand(zero_reg)); in GenerateSmiSmiOperation()
2510 __ SmiTag(v0); in GenerateSmiSmiOperation()
2511 __ Ret(); in GenerateSmiSmiOperation()
2515 __ SmiUntag(scratch1, left); in GenerateSmiSmiOperation()
2516 __ GetLeastBitsFromSmi(scratch2, right, 5); in GenerateSmiSmiOperation()
2517 __ sllv(scratch1, scratch1, scratch2); in GenerateSmiSmiOperation()
2519 __ Addu(scratch2, scratch1, Operand(0x40000000)); in GenerateSmiSmiOperation()
2520 __ Branch(&not_smi_result, lt, scratch2, Operand(zero_reg)); in GenerateSmiSmiOperation()
2521 __ SmiTag(v0, scratch1); in GenerateSmiSmiOperation()
2522 __ Ret(); in GenerateSmiSmiOperation()
2527 __ bind(&not_smi_result); in GenerateSmiSmiOperation()
2543 __ AbortIfNotSmi(left); in GenerateFPOperation()
2544 __ AbortIfNotSmi(right); in GenerateFPOperation()
2548 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex); in GenerateFPOperation()
2589 __ add_d(f10, f12, f14); in GenerateFPOperation()
2592 __ sub_d(f10, f12, f14); in GenerateFPOperation()
2595 __ mul_d(f10, f12, f14); in GenerateFPOperation()
2598 __ div_d(f10, f12, f14); in GenerateFPOperation()
2607 __ sdc1(f10, FieldMemOperand(result, HeapNumber::kValueOffset)); in GenerateFPOperation()
2608 __ Ret(USE_DELAY_SLOT); in GenerateFPOperation()
2609 __ mov(v0, result); in GenerateFPOperation()
2617 __ stop("Unreachable code."); in GenerateFPOperation()
2629 __ SmiUntag(a3, left); in GenerateFPOperation()
2630 __ SmiUntag(a2, right); in GenerateFPOperation()
2655 __ Or(a2, a3, Operand(a2)); in GenerateFPOperation()
2658 __ Xor(a2, a3, Operand(a2)); in GenerateFPOperation()
2661 __ And(a2, a3, Operand(a2)); in GenerateFPOperation()
2665 __ GetLeastBitsFromInt32(a2, a2, 5); in GenerateFPOperation()
2666 __ srav(a2, a3, a2); in GenerateFPOperation()
2670 __ GetLeastBitsFromInt32(a2, a2, 5); in GenerateFPOperation()
2671 __ srlv(a2, a3, a2); in GenerateFPOperation()
2677 __ Branch(&result_not_a_smi, lt, a2, Operand(zero_reg)); in GenerateFPOperation()
2679 __ Branch(not_numbers, lt, a2, Operand(zero_reg)); in GenerateFPOperation()
2684 __ GetLeastBitsFromInt32(a2, a2, 5); in GenerateFPOperation()
2685 __ sllv(a2, a3, a2); in GenerateFPOperation()
2691 __ Addu(a3, a2, Operand(0x40000000)); in GenerateFPOperation()
2692 __ Branch(&result_not_a_smi, lt, a3, Operand(zero_reg)); in GenerateFPOperation()
2693 __ SmiTag(v0, a2); in GenerateFPOperation()
2694 __ Ret(); in GenerateFPOperation()
2697 __ bind(&result_not_a_smi); in GenerateFPOperation()
2700 __ AllocateHeapNumber( in GenerateFPOperation()
2712 __ mov(v0, t1); in GenerateFPOperation()
2718 __ mtc1(a2, f0); in GenerateFPOperation()
2720 __ Cvt_d_uw(f0, f0, f22); in GenerateFPOperation()
2722 __ cvt_d_w(f0, f0); in GenerateFPOperation()
2727 __ sdc1(f0, FieldMemOperand(v0, HeapNumber::kValueOffset)); in GenerateFPOperation()
2728 __ Ret(); in GenerateFPOperation()
2733 __ TailCallStub(&stub); in GenerateFPOperation()
2759 __ Or(scratch1, left, Operand(right)); in GenerateSmiCode()
2761 __ JumpIfNotSmi(scratch1, &not_smis); in GenerateSmiCode()
2771 __ bind(&not_smis); in GenerateSmiCode()
2795 __ bind(&call_runtime); in GenerateSmiStub()
2821 __ JumpIfSmi(left, &call_runtime); in GenerateBothStringStub()
2822 __ GetObjectType(left, a2, a2); in GenerateBothStringStub()
2823 __ Branch(&call_runtime, ge, a2, Operand(FIRST_NONSTRING_TYPE)); in GenerateBothStringStub()
2826 __ JumpIfSmi(right, &call_runtime); in GenerateBothStringStub()
2827 __ GetObjectType(right, a2, a2); in GenerateBothStringStub()
2828 __ Branch(&call_runtime, ge, a2, Operand(FIRST_NONSTRING_TYPE)); in GenerateBothStringStub()
2832 __ TailCallStub(&string_add_stub); in GenerateBothStringStub()
2834 __ bind(&call_runtime); in GenerateBothStringStub()
2851 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex); in GenerateInt32Stub()
2861 __ Or(scratch1, left, right); in GenerateInt32Stub()
2862 __ JumpIfNotSmi(scratch1, &skip); in GenerateInt32Stub()
2865 __ bind(&skip); in GenerateInt32Stub()
2909 __ add_d(f10, f12, f14); in GenerateInt32Stub()
2912 __ sub_d(f10, f12, f14); in GenerateInt32Stub()
2915 __ mul_d(f10, f12, f14); in GenerateInt32Stub()
2918 __ div_d(f10, f12, f14); in GenerateInt32Stub()
2931 __ EmitFPUTruncate(kRoundToZero, in GenerateInt32Stub()
2939 __ Branch(&transition, ne, except_flag, Operand(zero_reg)); in GenerateInt32Stub()
2943 __ mfc1(scratch1, single_scratch); in GenerateInt32Stub()
2944 __ Addu(scratch2, scratch1, Operand(0x40000000)); in GenerateInt32Stub()
2946 __ Branch(&return_heap_number, lt, scratch2, Operand(zero_reg)); in GenerateInt32Stub()
2949 __ Branch(&not_zero, ne, scratch1, Operand(zero_reg)); in GenerateInt32Stub()
2950 __ mfc1(scratch2, f11); in GenerateInt32Stub()
2951 __ And(scratch2, scratch2, HeapNumber::kSignMask); in GenerateInt32Stub()
2952 __ Branch(&return_heap_number, ne, scratch2, Operand(zero_reg)); in GenerateInt32Stub()
2953 __ bind(&not_zero); in GenerateInt32Stub()
2956 __ SmiTag(v0, scratch1); in GenerateInt32Stub()
2957 __ Ret(); in GenerateInt32Stub()
2962 __ bind(&return_heap_number); in GenerateInt32Stub()
2975 __ mov(v0, heap_number_result); in GenerateInt32Stub()
2976 __ sdc1(f10, FieldMemOperand(v0, HeapNumber::kValueOffset)); in GenerateInt32Stub()
2977 __ Ret(); in GenerateInt32Stub()
2986 __ Push(t1, t0); in GenerateInt32Stub()
3000 __ Pop(a1, a0); in GenerateInt32Stub()
3006 __ stop("Unreachable code."); in GenerateInt32Stub()
3009 __ bind(&pop_and_call_runtime); in GenerateInt32Stub()
3010 __ Drop(2); in GenerateInt32Stub()
3011 __ Branch(&call_runtime); in GenerateInt32Stub()
3051 __ Or(a2, a3, Operand(a2)); in GenerateInt32Stub()
3054 __ Xor(a2, a3, Operand(a2)); in GenerateInt32Stub()
3057 __ And(a2, a3, Operand(a2)); in GenerateInt32Stub()
3060 __ And(a2, a2, Operand(0x1f)); in GenerateInt32Stub()
3061 __ srav(a2, a3, a2); in GenerateInt32Stub()
3064 __ And(a2, a2, Operand(0x1f)); in GenerateInt32Stub()
3065 __ srlv(a2, a3, a2); in GenerateInt32Stub()
3073 __ Branch((result_type_ <= BinaryOpIC::INT32) in GenerateInt32Stub()
3080 __ Branch((result_type_ <= BinaryOpIC::INT32) in GenerateInt32Stub()
3089 __ And(a2, a2, Operand(0x1f)); in GenerateInt32Stub()
3090 __ sllv(a2, a3, a2); in GenerateInt32Stub()
3097 __ Addu(scratch1, a2, Operand(0x40000000)); in GenerateInt32Stub()
3099 __ Branch(&return_heap_number, lt, scratch1, Operand(zero_reg)); in GenerateInt32Stub()
3101 __ SmiTag(v0, a2); in GenerateInt32Stub()
3102 __ Ret(); in GenerateInt32Stub()
3104 __ bind(&return_heap_number); in GenerateInt32Stub()
3118 __ mtc1(a2, double_scratch); in GenerateInt32Stub()
3119 __ cvt_d_w(double_scratch, double_scratch); in GenerateInt32Stub()
3122 __ mtc1(a2, double_scratch); in GenerateInt32Stub()
3123 __ Cvt_d_uw(double_scratch, double_scratch, single_scratch); in GenerateInt32Stub()
3127 __ mov(v0, heap_number_result); in GenerateInt32Stub()
3128 __ sdc1(double_scratch, FieldMemOperand(v0, HeapNumber::kValueOffset)); in GenerateInt32Stub()
3129 __ Ret(); in GenerateInt32Stub()
3133 __ mov(a0, t1); in GenerateInt32Stub()
3135 __ TailCallStub(&stub); in GenerateInt32Stub()
3150 __ bind(&transition); in GenerateInt32Stub()
3154 __ bind(&call_runtime); in GenerateInt32Stub()
3170 __ LoadRoot(t0, Heap::kUndefinedValueRootIndex); in GenerateOddballStub()
3171 __ Branch(&check, ne, a1, Operand(t0)); in GenerateOddballStub()
3173 __ li(a1, Operand(Smi::FromInt(0))); in GenerateOddballStub()
3175 __ LoadRoot(a1, Heap::kNanValueRootIndex); in GenerateOddballStub()
3177 __ jmp(&done); in GenerateOddballStub()
3178 __ bind(&check); in GenerateOddballStub()
3179 __ LoadRoot(t0, Heap::kUndefinedValueRootIndex); in GenerateOddballStub()
3180 __ Branch(&done, ne, a0, Operand(t0)); in GenerateOddballStub()
3182 __ li(a0, Operand(Smi::FromInt(0))); in GenerateOddballStub()
3184 __ LoadRoot(a0, Heap::kNanValueRootIndex); in GenerateOddballStub()
3186 __ bind(&done); in GenerateOddballStub()
3196 __ bind(&call_runtime); in GenerateHeapNumberStub()
3208 __ bind(&call_string_add_or_runtime); in GenerateGeneric()
3213 __ bind(&call_runtime); in GenerateGeneric()
3226 __ JumpIfSmi(left, &left_not_string); in GenerateAddStrings()
3227 __ GetObjectType(left, a2, a2); in GenerateAddStrings()
3228 __ Branch(&left_not_string, ge, a2, Operand(FIRST_NONSTRING_TYPE)); in GenerateAddStrings()
3232 __ TailCallStub(&string_add_left_stub); in GenerateAddStrings()
3235 __ bind(&left_not_string); in GenerateAddStrings()
3236 __ JumpIfSmi(right, &call_runtime); in GenerateAddStrings()
3237 __ GetObjectType(right, a2, a2); in GenerateAddStrings()
3238 __ Branch(&call_runtime, ge, a2, Operand(FIRST_NONSTRING_TYPE)); in GenerateAddStrings()
3242 __ TailCallStub(&string_add_right_stub); in GenerateAddStrings()
3245 __ bind(&call_runtime); in GenerateAddStrings()
3253 __ InvokeBuiltin(Builtins::ADD, JUMP_FUNCTION); in GenerateCallRuntime()
3256 __ InvokeBuiltin(Builtins::SUB, JUMP_FUNCTION); in GenerateCallRuntime()
3259 __ InvokeBuiltin(Builtins::MUL, JUMP_FUNCTION); in GenerateCallRuntime()
3262 __ InvokeBuiltin(Builtins::DIV, JUMP_FUNCTION); in GenerateCallRuntime()
3265 __ InvokeBuiltin(Builtins::MOD, JUMP_FUNCTION); in GenerateCallRuntime()
3268 __ InvokeBuiltin(Builtins::BIT_OR, JUMP_FUNCTION); in GenerateCallRuntime()
3271 __ InvokeBuiltin(Builtins::BIT_AND, JUMP_FUNCTION); in GenerateCallRuntime()
3274 __ InvokeBuiltin(Builtins::BIT_XOR, JUMP_FUNCTION); in GenerateCallRuntime()
3277 __ InvokeBuiltin(Builtins::SAR, JUMP_FUNCTION); in GenerateCallRuntime()
3280 __ InvokeBuiltin(Builtins::SHR, JUMP_FUNCTION); in GenerateCallRuntime()
3283 __ InvokeBuiltin(Builtins::SHL, JUMP_FUNCTION); in GenerateCallRuntime()
3308 __ JumpIfNotSmi(overwritable_operand, &skip_allocation); in GenerateHeapResultAllocation()
3310 __ AllocateHeapNumber( in GenerateHeapResultAllocation()
3312 __ Branch(&allocated); in GenerateHeapResultAllocation()
3313 __ bind(&skip_allocation); in GenerateHeapResultAllocation()
3315 __ mov(result, overwritable_operand); in GenerateHeapResultAllocation()
3316 __ bind(&allocated); in GenerateHeapResultAllocation()
3319 __ AllocateHeapNumber( in GenerateHeapResultAllocation()
3326 __ Push(a1, a0); in GenerateRegisterArgsPush()
3352 __ JumpIfNotSmi(a0, &input_not_smi); in Generate()
3356 __ sra(t0, a0, kSmiTagSize); in Generate()
3357 __ mtc1(t0, f4); in Generate()
3358 __ cvt_d_w(f4, f4); in Generate()
3359 __ Move(a2, a3, f4); in Generate()
3360 __ Branch(&loaded); in Generate()
3362 __ bind(&input_not_smi); in Generate()
3364 __ CheckMap(a0, in Generate()
3371 __ lw(a2, FieldMemOperand(a0, HeapNumber::kValueOffset)); in Generate()
3372 __ lw(a3, FieldMemOperand(a0, HeapNumber::kValueOffset + 4)); in Generate()
3375 __ Move(a2, a3, f4); in Generate()
3377 __ bind(&loaded); in Generate()
3382 __ Xor(a1, a2, a3); in Generate()
3383 __ sra(t0, a1, 16); in Generate()
3384 __ Xor(a1, a1, t0); in Generate()
3385 __ sra(t0, a1, 8); in Generate()
3386 __ Xor(a1, a1, t0); in Generate()
3388 __ And(a1, a1, Operand(TranscendentalCache::SubCache::kCacheSize - 1)); in Generate()
3393 __ li(cache_entry, Operand( in Generate()
3397 __ lw(cache_entry, MemOperand(cache_entry, type_ * sizeof( in Generate()
3401 __ Branch(&invalid_cache, eq, cache_entry, Operand(zero_reg)); in Generate()
3419 __ sll(t0, a1, 1); in Generate()
3420 __ Addu(a1, a1, t0); in Generate()
3421 __ sll(t0, a1, 2); in Generate()
3422 __ Addu(cache_entry, cache_entry, t0); in Generate()
3425 __ lw(t0, MemOperand(cache_entry, 0)); in Generate()
3426 __ lw(t1, MemOperand(cache_entry, 4)); in Generate()
3427 __ lw(t2, MemOperand(cache_entry, 8)); in Generate()
3428 __ Branch(&calculate, ne, a2, Operand(t0)); in Generate()
3429 __ Branch(&calculate, ne, a3, Operand(t1)); in Generate()
3432 __ IncrementCounter( in Generate()
3436 __ Drop(1); in Generate()
3437 __ mov(v0, t2); in Generate()
3440 __ ldc1(f4, FieldMemOperand(t2, HeapNumber::kValueOffset)); in Generate()
3442 __ Ret(); in Generate()
3445 __ bind(&calculate); in Generate()
3447 __ IncrementCounter( in Generate()
3450 __ bind(&invalid_cache); in Generate()
3451 __ TailCallExternalReference(ExternalReference(RuntimeFunction(), in Generate()
3466 __ Push(cache_entry, a2, a3); in Generate()
3468 __ GetCFunctionDoubleResult(f4); in Generate()
3472 __ Pop(cache_entry, a2, a3); in Generate()
3473 __ LoadRoot(t1, Heap::kHeapNumberMapRootIndex); in Generate()
3474 __ AllocateHeapNumber(t2, scratch0, scratch1, t1, &no_update); in Generate()
3475 __ sdc1(f4, FieldMemOperand(t2, HeapNumber::kValueOffset)); in Generate()
3477 __ sw(a2, MemOperand(cache_entry, 0 * kPointerSize)); in Generate()
3478 __ sw(a3, MemOperand(cache_entry, 1 * kPointerSize)); in Generate()
3479 __ sw(t2, MemOperand(cache_entry, 2 * kPointerSize)); in Generate()
3481 __ Ret(USE_DELAY_SLOT); in Generate()
3482 __ mov(v0, cache_entry); in Generate()
3484 __ bind(&invalid_cache); in Generate()
3487 __ LoadRoot(t1, Heap::kHeapNumberMapRootIndex); in Generate()
3488 __ AllocateHeapNumber(a0, scratch0, scratch1, t1, &skip_cache); in Generate()
3489 __ sdc1(f4, FieldMemOperand(a0, HeapNumber::kValueOffset)); in Generate()
3492 __ push(a0); in Generate()
3493 __ CallRuntime(RuntimeFunction(), 1); in Generate()
3495 __ ldc1(f4, FieldMemOperand(v0, HeapNumber::kValueOffset)); in Generate()
3496 __ Ret(); in Generate()
3498 __ bind(&skip_cache); in Generate()
3502 __ GetCFunctionDoubleResult(f4); in Generate()
3503 __ bind(&no_update); in Generate()
3512 __ li(scratch0, Operand(4 * kPointerSize)); in Generate()
3513 __ push(scratch0); in Generate()
3514 __ CallRuntimeSaveDoubles(Runtime::kAllocateInNewSpace); in Generate()
3516 __ Ret(); in Generate()
3523 __ push(ra); in GenerateCallCFunction()
3524 __ PrepareCallCFunction(2, scratch); in GenerateCallCFunction()
3526 __ Move(a0, a1, f4); in GenerateCallCFunction()
3528 __ mov_d(f12, f4); in GenerateCallCFunction()
3534 __ CallCFunction( in GenerateCallCFunction()
3539 __ CallCFunction( in GenerateCallCFunction()
3544 __ CallCFunction(ExternalReference::math_tan_double_function(isolate), in GenerateCallCFunction()
3548 __ CallCFunction( in GenerateCallCFunction()
3556 __ pop(ra); in GenerateCallCFunction()
3575 __ TailCallRuntime(Runtime::kStackGuard, 0, 1); in Generate()
3580 __ TailCallRuntime(Runtime::kInterrupt, 0, 1); in Generate()
3604 __ lw(base, MemOperand(sp, 1 * kPointerSize)); in Generate()
3605 __ lw(exponent, MemOperand(sp, 0 * kPointerSize)); in Generate()
3607 __ LoadRoot(heapnumbermap, Heap::kHeapNumberMapRootIndex); in Generate()
3609 __ UntagAndJumpIfSmi(scratch, base, &base_is_smi); in Generate()
3610 __ lw(scratch, FieldMemOperand(base, JSObject::kMapOffset)); in Generate()
3611 __ Branch(&call_runtime, ne, scratch, Operand(heapnumbermap)); in Generate()
3613 __ ldc1(double_base, FieldMemOperand(base, HeapNumber::kValueOffset)); in Generate()
3614 __ jmp(&unpack_exponent); in Generate()
3616 __ bind(&base_is_smi); in Generate()
3617 __ mtc1(scratch, single_scratch); in Generate()
3618 __ cvt_d_w(double_base, single_scratch); in Generate()
3619 __ bind(&unpack_exponent); in Generate()
3621 __ UntagAndJumpIfSmi(scratch, exponent, &int_exponent); in Generate()
3623 __ lw(scratch, FieldMemOperand(exponent, JSObject::kMapOffset)); in Generate()
3624 __ Branch(&call_runtime, ne, scratch, Operand(heapnumbermap)); in Generate()
3625 __ ldc1(double_exponent, in Generate()
3629 __ UntagAndJumpIfSmi(scratch, exponent, &int_exponent); in Generate()
3631 __ ldc1(double_exponent, in Generate()
3638 __ EmitFPUTruncate(kRoundToMinusInf, in Generate()
3645 __ Branch(&int_exponent_convert, eq, scratch2, Operand(zero_reg)); in Generate()
3654 __ Move(double_scratch, 0.5); in Generate()
3655 __ BranchF(USE_DELAY_SLOT, in Generate()
3664 __ Move(double_scratch, -V8_INFINITY); in Generate()
3665 __ BranchF(USE_DELAY_SLOT, &done, NULL, eq, double_base, double_scratch); in Generate()
3666 __ neg_d(double_result, double_scratch); in Generate()
3669 __ add_d(double_scratch, double_base, kDoubleRegZero); in Generate()
3670 __ sqrt_d(double_result, double_scratch); in Generate()
3671 __ jmp(&done); in Generate()
3673 __ bind(&not_plus_half); in Generate()
3674 __ Move(double_scratch, -0.5); in Generate()
3675 __ BranchF(USE_DELAY_SLOT, in Generate()
3684 __ Move(double_scratch, -V8_INFINITY); in Generate()
3685 __ BranchF(USE_DELAY_SLOT, &done, NULL, eq, double_base, double_scratch); in Generate()
3686 __ Move(double_result, kDoubleRegZero); in Generate()
3689 __ add_d(double_scratch, double_base, kDoubleRegZero); in Generate()
3690 __ Move(double_result, 1); in Generate()
3691 __ sqrt_d(double_scratch, double_scratch); in Generate()
3692 __ div_d(double_result, double_result, double_scratch); in Generate()
3693 __ jmp(&done); in Generate()
3696 __ push(ra); in Generate()
3699 __ PrepareCallCFunction(0, 2, scratch); in Generate()
3700 __ SetCallCDoubleArguments(double_base, double_exponent); in Generate()
3701 __ CallCFunction( in Generate()
3705 __ pop(ra); in Generate()
3706 __ GetCFunctionDoubleResult(double_result); in Generate()
3707 __ jmp(&done); in Generate()
3709 __ bind(&int_exponent_convert); in Generate()
3710 __ mfc1(scratch, single_scratch); in Generate()
3714 __ bind(&int_exponent); in Generate()
3718 __ mov(scratch, exponent); in Generate()
3721 __ mov(exponent, scratch); in Generate()
3724 __ mov_d(double_scratch, double_base); // Back up base. in Generate()
3725 __ Move(double_result, 1.0); in Generate()
3729 __ Branch(&positive_exponent, ge, scratch, Operand(zero_reg)); in Generate()
3730 __ Subu(scratch, zero_reg, scratch); in Generate()
3731 __ bind(&positive_exponent); in Generate()
3734 __ bind(&while_true); in Generate()
3736 __ And(scratch2, scratch, 1); in Generate()
3738 __ Branch(&no_carry, eq, scratch2, Operand(zero_reg)); in Generate()
3739 __ mul_d(double_result, double_result, double_scratch); in Generate()
3740 __ bind(&no_carry); in Generate()
3742 __ sra(scratch, scratch, 1); in Generate()
3744 __ Branch(&loop_end, eq, scratch, Operand(zero_reg)); in Generate()
3745 __ mul_d(double_scratch, double_scratch, double_scratch); in Generate()
3747 __ Branch(&while_true); in Generate()
3749 __ bind(&loop_end); in Generate()
3751 __ Branch(&done, ge, exponent, Operand(zero_reg)); in Generate()
3752 __ Move(double_scratch, 1.0); in Generate()
3753 __ div_d(double_result, double_scratch, double_result); in Generate()
3756 __ BranchF(&done, NULL, ne, double_result, kDoubleRegZero); in Generate()
3760 __ mtc1(exponent, single_scratch); in Generate()
3761 __ cvt_d_w(double_exponent, single_scratch); in Generate()
3767 __ bind(&call_runtime); in Generate()
3768 __ TailCallRuntime(Runtime::kMath_pow_cfunction, 2, 1); in Generate()
3772 __ bind(&done); in Generate()
3773 __ AllocateHeapNumber( in Generate()
3775 __ sdc1(double_result, in Generate()
3778 __ IncrementCounter(counters->math_pow(), 1, scratch, scratch2); in Generate()
3779 __ DropAndRet(2); in Generate()
3781 __ push(ra); in Generate()
3784 __ PrepareCallCFunction(0, 2, scratch); in Generate()
3785 __ SetCallCDoubleArguments(double_base, double_exponent); in Generate()
3786 __ CallCFunction( in Generate()
3790 __ pop(ra); in Generate()
3791 __ GetCFunctionDoubleResult(double_result); in Generate()
3793 __ bind(&done); in Generate()
3794 __ IncrementCounter(counters->math_pow(), 1, scratch, scratch2); in Generate()
3795 __ Ret(); in Generate()
3851 __ mov(a0, v0); in GenerateCore()
3852 __ PrepareCallCFunction(1, 0, a1); in GenerateCore()
3853 __ CallCFunction(ExternalReference::perform_gc_function(isolate), 1, 0); in GenerateCore()
3859 __ li(a0, Operand(scope_depth)); in GenerateCore()
3860 __ lw(a1, MemOperand(a0)); in GenerateCore()
3861 __ Addu(a1, a1, Operand(1)); in GenerateCore()
3862 __ sw(a1, MemOperand(a0)); in GenerateCore()
3867 __ mov(a0, s0); in GenerateCore()
3873 __ AssertStackIsAligned(); in GenerateCore()
3875 __ li(a2, Operand(ExternalReference::isolate_address())); in GenerateCore()
3912 __ li(a2, Operand(scope_depth)); in GenerateCore()
3913 __ lw(a3, MemOperand(a2)); in GenerateCore()
3914 __ Subu(a3, a3, Operand(1)); in GenerateCore()
3915 __ sw(a3, MemOperand(a2)); in GenerateCore()
3921 __ addiu(a2, v0, 1); in GenerateCore()
3922 __ andi(t0, a2, kFailureTagMask); in GenerateCore()
3923 __ Branch(USE_DELAY_SLOT, &failure_returned, eq, t0, Operand(zero_reg)); in GenerateCore()
3925 __ addiu(sp, sp, kCArgsSlotsSize); in GenerateCore()
3932 __ LeaveExitFrame(save_doubles_, s0, true); in GenerateCore()
3936 __ bind(&failure_returned); in GenerateCore()
3938 __ andi(t0, v0, ((1 << kFailureTypeTagSize) - 1) << kFailureTagSize); in GenerateCore()
3939 __ Branch(&retry, eq, t0, Operand(zero_reg)); in GenerateCore()
3943 __ Branch(USE_DELAY_SLOT, in GenerateCore()
3952 __ LoadRoot(a3, Heap::kTheHoleValueRootIndex); in GenerateCore()
3953 __ li(t0, Operand(ExternalReference(Isolate::kPendingExceptionAddress, in GenerateCore()
3955 __ lw(v0, MemOperand(t0)); in GenerateCore()
3956 __ sw(a3, MemOperand(t0)); in GenerateCore()
3960 __ LoadRoot(t0, Heap::kTerminationExceptionRootIndex); in GenerateCore()
3961 __ Branch(throw_termination_exception, eq, v0, Operand(t0)); in GenerateCore()
3964 __ jmp(throw_normal_exception); in GenerateCore()
3966 __ bind(&retry); in GenerateCore()
3991 __ Addu(s1, sp, s1); in Generate()
3995 __ EnterExitFrame(save_doubles_); in Generate()
4023 __ li(v0, Operand(reinterpret_cast<int32_t>(failure))); in Generate()
4031 __ bind(&throw_out_of_memory_exception); in Generate()
4036 __ li(a0, Operand(false, RelocInfo::NONE)); in Generate()
4037 __ li(a2, Operand(external_caught)); in Generate()
4038 __ sw(a0, MemOperand(a2)); in Generate()
4042 __ li(v0, Operand(reinterpret_cast<int32_t>(out_of_memory))); in Generate()
4043 __ li(a2, Operand(ExternalReference(Isolate::kPendingExceptionAddress, in Generate()
4045 __ sw(v0, MemOperand(a2)); in Generate()
4048 __ bind(&throw_termination_exception); in Generate()
4049 __ ThrowUncatchable(v0); in Generate()
4051 __ bind(&throw_normal_exception); in Generate()
4052 __ Throw(v0); in Generate()
4071 __ MultiPush(kCalleeSaved | ra.bit()); in GenerateBody()
4076 __ MultiPushFPU(kCalleeSavedFPU); in GenerateBody()
4078 __ Move(kDoubleRegZero, 0.0); in GenerateBody()
4088 __ InitializeRootRegister(); in GenerateBody()
4089 __ lw(s0, MemOperand(sp, offset_to_argv + kCArgsSlotsSize)); in GenerateBody()
4092 __ li(t3, Operand(-1)); // Push a bad frame pointer to fail if it is used. in GenerateBody()
4094 __ li(t2, Operand(Smi::FromInt(marker))); in GenerateBody()
4095 __ li(t1, Operand(Smi::FromInt(marker))); in GenerateBody()
4096 __ li(t0, Operand(ExternalReference(Isolate::kCEntryFPAddress, in GenerateBody()
4098 __ lw(t0, MemOperand(t0)); in GenerateBody()
4099 __ Push(t3, t2, t1, t0); in GenerateBody()
4101 __ addiu(fp, sp, -EntryFrameConstants::kCallerFPOffset); in GenerateBody()
4122 __ li(t1, Operand(ExternalReference(js_entry_sp))); in GenerateBody()
4123 __ lw(t2, MemOperand(t1)); in GenerateBody()
4124 __ Branch(&non_outermost_js, ne, t2, Operand(zero_reg)); in GenerateBody()
4125 __ sw(fp, MemOperand(t1)); in GenerateBody()
4126 __ li(t0, Operand(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME))); in GenerateBody()
4128 __ b(&cont); in GenerateBody()
4129 __ nop(); // Branch delay slot nop. in GenerateBody()
4130 __ bind(&non_outermost_js); in GenerateBody()
4131 __ li(t0, Operand(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME))); in GenerateBody()
4132 __ bind(&cont); in GenerateBody()
4133 __ push(t0); in GenerateBody()
4137 __ jmp(&invoke); in GenerateBody()
4138 __ bind(&handler_entry); in GenerateBody()
4144 __ li(t0, Operand(ExternalReference(Isolate::kPendingExceptionAddress, in GenerateBody()
4146 __ sw(v0, MemOperand(t0)); // We come back from 'invoke'. result is in v0. in GenerateBody()
4147 __ li(v0, Operand(reinterpret_cast<int32_t>(Failure::Exception()))); in GenerateBody()
4148 __ b(&exit); // b exposes branch delay slot. in GenerateBody()
4149 __ nop(); // Branch delay slot nop. in GenerateBody()
4153 __ bind(&invoke); in GenerateBody()
4154 __ PushTryHandler(StackHandler::JS_ENTRY, 0); in GenerateBody()
4161 __ LoadRoot(t1, Heap::kTheHoleValueRootIndex); in GenerateBody()
4162 __ li(t0, Operand(ExternalReference(Isolate::kPendingExceptionAddress, in GenerateBody()
4164 __ sw(t1, MemOperand(t0)); in GenerateBody()
4187 __ li(t0, Operand(construct_entry)); in GenerateBody()
4190 __ li(t0, Operand(entry)); in GenerateBody()
4192 __ lw(t9, MemOperand(t0)); // Deref address. in GenerateBody()
4195 __ addiu(t9, t9, Code::kHeaderSize - kHeapObjectTag); in GenerateBody()
4196 __ Call(t9); in GenerateBody()
4199 __ PopTryHandler(); in GenerateBody()
4201 __ bind(&exit); // v0 holds result in GenerateBody()
4204 __ pop(t1); in GenerateBody()
4205 __ Branch(&non_outermost_js_2, in GenerateBody()
4209 __ li(t1, Operand(ExternalReference(js_entry_sp))); in GenerateBody()
4210 __ sw(zero_reg, MemOperand(t1)); in GenerateBody()
4211 __ bind(&non_outermost_js_2); in GenerateBody()
4214 __ pop(t1); in GenerateBody()
4215 __ li(t0, Operand(ExternalReference(Isolate::kCEntryFPAddress, in GenerateBody()
4217 __ sw(t1, MemOperand(t0)); in GenerateBody()
4220 __ addiu(sp, sp, -EntryFrameConstants::kCallerFPOffset); in GenerateBody()
4225 __ MultiPopFPU(kCalleeSavedFPU); in GenerateBody()
4229 __ MultiPop(kCalleeSaved | ra.bit()); in GenerateBody()
4231 __ Jump(ra); in GenerateBody()
4262 __ lw(object, MemOperand(sp, 1 * kPointerSize)); in Generate()
4263 __ lw(function, MemOperand(sp, 0)); in Generate()
4267 __ JumpIfSmi(object, &not_js_object); in Generate()
4268 __ IsObjectJSObjectType(object, map, scratch, &not_js_object); in Generate()
4274 __ LoadRoot(at, Heap::kInstanceofCacheFunctionRootIndex); in Generate()
4275 __ Branch(&miss, ne, function, Operand(at)); in Generate()
4276 __ LoadRoot(at, Heap::kInstanceofCacheMapRootIndex); in Generate()
4277 __ Branch(&miss, ne, map, Operand(at)); in Generate()
4278 __ LoadRoot(v0, Heap::kInstanceofCacheAnswerRootIndex); in Generate()
4279 __ DropAndRet(HasArgsInRegisters() ? 0 : 2); in Generate()
4281 __ bind(&miss); in Generate()
4285 __ TryGetFunctionPrototype(function, prototype, scratch, &slow, true); in Generate()
4288 __ JumpIfSmi(prototype, &slow); in Generate()
4289 __ IsObjectJSObjectType(prototype, scratch, scratch, &slow); in Generate()
4294 __ StoreRoot(function, Heap::kInstanceofCacheFunctionRootIndex); in Generate()
4295 __ StoreRoot(map, Heap::kInstanceofCacheMapRootIndex); in Generate()
4302 __ LoadFromSafepointRegisterSlot(scratch, t0); in Generate()
4303 __ Subu(inline_site, ra, scratch); in Generate()
4305 __ GetRelocatedValue(inline_site, scratch, v1); // v1 used as scratch. in Generate()
4306 __ sw(map, FieldMemOperand(scratch, JSGlobalPropertyCell::kValueOffset)); in Generate()
4311 __ lw(scratch, FieldMemOperand(map, Map::kPrototypeOffset)); in Generate()
4318 __ LoadRoot(scratch2, Heap::kNullValueRootIndex); in Generate()
4319 __ bind(&loop); in Generate()
4320 __ Branch(&is_instance, eq, scratch, Operand(prototype)); in Generate()
4321 __ Branch(&is_not_instance, eq, scratch, Operand(scratch2)); in Generate()
4322 __ lw(scratch, FieldMemOperand(scratch, HeapObject::kMapOffset)); in Generate()
4323 __ lw(scratch, FieldMemOperand(scratch, Map::kPrototypeOffset)); in Generate()
4324 __ Branch(&loop); in Generate()
4326 __ bind(&is_instance); in Generate()
4329 __ mov(v0, zero_reg); in Generate()
4330 __ StoreRoot(v0, Heap::kInstanceofCacheAnswerRootIndex); in Generate()
4333 __ LoadRoot(v0, Heap::kTrueValueRootIndex); in Generate()
4334 __ Addu(inline_site, inline_site, Operand(kDeltaToLoadBoolResult)); in Generate()
4336 __ PatchRelocatedValue(inline_site, scratch, v0); in Generate()
4340 __ mov(v0, zero_reg); in Generate()
4343 __ DropAndRet(HasArgsInRegisters() ? 0 : 2); in Generate()
4345 __ bind(&is_not_instance); in Generate()
4347 __ li(v0, Operand(Smi::FromInt(1))); in Generate()
4348 __ StoreRoot(v0, Heap::kInstanceofCacheAnswerRootIndex); in Generate()
4351 __ LoadRoot(v0, Heap::kFalseValueRootIndex); in Generate()
4352 __ Addu(inline_site, inline_site, Operand(kDeltaToLoadBoolResult)); in Generate()
4354 __ PatchRelocatedValue(inline_site, scratch, v0); in Generate()
4357 __ li(v0, Operand(Smi::FromInt(1))); in Generate()
4361 __ DropAndRet(HasArgsInRegisters() ? 0 : 2); in Generate()
4364 __ bind(&not_js_object); in Generate()
4367 __ JumpIfSmi(function, &slow); in Generate()
4368 __ GetObjectType(function, scratch2, scratch); in Generate()
4369 __ Branch(&slow, ne, scratch, Operand(JS_FUNCTION_TYPE)); in Generate()
4372 __ Branch(&object_not_null, in Generate()
4376 __ li(v0, Operand(Smi::FromInt(1))); in Generate()
4377 __ DropAndRet(HasArgsInRegisters() ? 0 : 2); in Generate()
4379 __ bind(&object_not_null); in Generate()
4381 __ JumpIfNotSmi(object, &object_not_null_or_smi); in Generate()
4382 __ li(v0, Operand(Smi::FromInt(1))); in Generate()
4383 __ DropAndRet(HasArgsInRegisters() ? 0 : 2); in Generate()
4385 __ bind(&object_not_null_or_smi); in Generate()
4387 __ IsObjectJSStringType(object, scratch, &slow); in Generate()
4388 __ li(v0, Operand(Smi::FromInt(1))); in Generate()
4389 __ DropAndRet(HasArgsInRegisters() ? 0 : 2); in Generate()
4392 __ bind(&slow); in Generate()
4395 __ Push(a0, a1); in Generate()
4397 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION); in Generate()
4401 __ Push(a0, a1); in Generate()
4402 __ InvokeBuiltin(Builtins::INSTANCE_OF, CALL_FUNCTION); in Generate()
4404 __ mov(a0, v0); in Generate()
4405 __ LoadRoot(v0, Heap::kTrueValueRootIndex); in Generate()
4406 __ DropAndRet(HasArgsInRegisters() ? 0 : 2, eq, a0, Operand(zero_reg)); in Generate()
4407 __ LoadRoot(v0, Heap::kFalseValueRootIndex); in Generate()
4408 __ DropAndRet(HasArgsInRegisters() ? 0 : 2); in Generate()
4427 __ JumpIfNotSmi(a1, &slow); in GenerateReadElement()
4431 __ lw(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); in GenerateReadElement()
4432 __ lw(a3, MemOperand(a2, StandardFrameConstants::kContextOffset)); in GenerateReadElement()
4433 __ Branch(&adaptor, in GenerateReadElement()
4441 __ Branch(&slow, hs, a1, Operand(a0)); in GenerateReadElement()
4444 __ subu(a3, a0, a1); in GenerateReadElement()
4445 __ sll(t3, a3, kPointerSizeLog2 - kSmiTagSize); in GenerateReadElement()
4446 __ Addu(a3, fp, Operand(t3)); in GenerateReadElement()
4447 __ lw(v0, MemOperand(a3, kDisplacement)); in GenerateReadElement()
4448 __ Ret(); in GenerateReadElement()
4453 __ bind(&adaptor); in GenerateReadElement()
4454 __ lw(a0, MemOperand(a2, ArgumentsAdaptorFrameConstants::kLengthOffset)); in GenerateReadElement()
4455 __ Branch(&slow, Ugreater_equal, a1, Operand(a0)); in GenerateReadElement()
4458 __ subu(a3, a0, a1); in GenerateReadElement()
4459 __ sll(t3, a3, kPointerSizeLog2 - kSmiTagSize); in GenerateReadElement()
4460 __ Addu(a3, a2, Operand(t3)); in GenerateReadElement()
4461 __ lw(v0, MemOperand(a3, kDisplacement)); in GenerateReadElement()
4462 __ Ret(); in GenerateReadElement()
4466 __ bind(&slow); in GenerateReadElement()
4467 __ push(a1); in GenerateReadElement()
4468 __ TailCallRuntime(Runtime::kGetArgumentsProperty, 1, 1); in GenerateReadElement()
4478 __ lw(a3, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); in GenerateNewNonStrictSlow()
4479 __ lw(a2, MemOperand(a3, StandardFrameConstants::kContextOffset)); in GenerateNewNonStrictSlow()
4480 __ Branch(&runtime, in GenerateNewNonStrictSlow()
4486 __ lw(a2, MemOperand(a3, ArgumentsAdaptorFrameConstants::kLengthOffset)); in GenerateNewNonStrictSlow()
4487 __ sw(a2, MemOperand(sp, 0 * kPointerSize)); in GenerateNewNonStrictSlow()
4488 __ sll(t3, a2, 1); in GenerateNewNonStrictSlow()
4489 __ Addu(a3, a3, Operand(t3)); in GenerateNewNonStrictSlow()
4490 __ addiu(a3, a3, StandardFrameConstants::kCallerSPOffset); in GenerateNewNonStrictSlow()
4491 __ sw(a3, MemOperand(sp, 1 * kPointerSize)); in GenerateNewNonStrictSlow()
4493 __ bind(&runtime); in GenerateNewNonStrictSlow()
4494 __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1); in GenerateNewNonStrictSlow()
4507 __ lw(a1, MemOperand(sp, 0 * kPointerSize)); in GenerateNewNonStrictFast()
4513 __ lw(a3, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); in GenerateNewNonStrictFast()
4514 __ lw(a2, MemOperand(a3, StandardFrameConstants::kContextOffset)); in GenerateNewNonStrictFast()
4515 __ Branch(&adaptor_frame, in GenerateNewNonStrictFast()
4521 __ mov(a2, a1); in GenerateNewNonStrictFast()
4522 __ b(&try_allocate); in GenerateNewNonStrictFast()
4523 __ nop(); // Branch delay slot nop. in GenerateNewNonStrictFast()
4526 __ bind(&adaptor_frame); in GenerateNewNonStrictFast()
4527 __ lw(a2, MemOperand(a3, ArgumentsAdaptorFrameConstants::kLengthOffset)); in GenerateNewNonStrictFast()
4528 __ sll(t6, a2, 1); in GenerateNewNonStrictFast()
4529 __ Addu(a3, a3, Operand(t6)); in GenerateNewNonStrictFast()
4530 __ Addu(a3, a3, Operand(StandardFrameConstants::kCallerSPOffset)); in GenerateNewNonStrictFast()
4531 __ sw(a3, MemOperand(sp, 1 * kPointerSize)); in GenerateNewNonStrictFast()
4537 __ Branch(&skip_min, lt, a1, Operand(a2)); in GenerateNewNonStrictFast()
4538 __ mov(a1, a2); in GenerateNewNonStrictFast()
4539 __ bind(&skip_min); in GenerateNewNonStrictFast()
4541 __ bind(&try_allocate); in GenerateNewNonStrictFast()
4550 __ Branch(USE_DELAY_SLOT, &param_map_size, eq, a1, Operand(zero_reg)); in GenerateNewNonStrictFast()
4551 __ mov(t5, zero_reg); // In delay slot: param map size = 0 when a1 == 0. in GenerateNewNonStrictFast()
4552 __ sll(t5, a1, 1); in GenerateNewNonStrictFast()
4553 __ addiu(t5, t5, kParameterMapHeaderSize); in GenerateNewNonStrictFast()
4554 __ bind(&param_map_size); in GenerateNewNonStrictFast()
4557 __ sll(t6, a2, 1); in GenerateNewNonStrictFast()
4558 __ Addu(t5, t5, Operand(t6)); in GenerateNewNonStrictFast()
4559 __ Addu(t5, t5, Operand(FixedArray::kHeaderSize)); in GenerateNewNonStrictFast()
4562 __ Addu(t5, t5, Operand(Heap::kArgumentsObjectSize)); in GenerateNewNonStrictFast()
4565 __ AllocateInNewSpace(t5, v0, a3, t0, &runtime, TAG_OBJECT); in GenerateNewNonStrictFast()
4575 __ lw(t0, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX))); in GenerateNewNonStrictFast()
4576 __ lw(t0, FieldMemOperand(t0, GlobalObject::kGlobalContextOffset)); in GenerateNewNonStrictFast()
4578 __ Branch(&skip2_ne, ne, a1, Operand(zero_reg)); in GenerateNewNonStrictFast()
4579 __ lw(t0, MemOperand(t0, kNormalOffset)); in GenerateNewNonStrictFast()
4580 __ bind(&skip2_ne); in GenerateNewNonStrictFast()
4582 __ Branch(&skip2_eq, eq, a1, Operand(zero_reg)); in GenerateNewNonStrictFast()
4583 __ lw(t0, MemOperand(t0, kAliasedOffset)); in GenerateNewNonStrictFast()
4584 __ bind(&skip2_eq); in GenerateNewNonStrictFast()
4592 __ lw(a3, FieldMemOperand(t0, i)); in GenerateNewNonStrictFast()
4593 __ sw(a3, FieldMemOperand(v0, i)); in GenerateNewNonStrictFast()
4598 __ lw(a3, MemOperand(sp, 2 * kPointerSize)); in GenerateNewNonStrictFast()
4601 __ sw(a3, FieldMemOperand(v0, kCalleeOffset)); in GenerateNewNonStrictFast()
4607 __ sw(a2, FieldMemOperand(v0, kLengthOffset)); in GenerateNewNonStrictFast()
4612 __ Addu(t0, v0, Operand(Heap::kArgumentsObjectSize)); in GenerateNewNonStrictFast()
4613 __ sw(t0, FieldMemOperand(v0, JSObject::kElementsOffset)); in GenerateNewNonStrictFast()
4622 __ Branch(&skip3, ne, a1, Operand(Smi::FromInt(0))); in GenerateNewNonStrictFast()
4625 __ mov(a3, t0); in GenerateNewNonStrictFast()
4626 __ bind(&skip3); in GenerateNewNonStrictFast()
4628 __ Branch(&skip_parameter_map, eq, a1, Operand(Smi::FromInt(0))); in GenerateNewNonStrictFast()
4630 __ LoadRoot(t2, Heap::kNonStrictArgumentsElementsMapRootIndex); in GenerateNewNonStrictFast()
4631 __ sw(t2, FieldMemOperand(t0, FixedArray::kMapOffset)); in GenerateNewNonStrictFast()
4632 __ Addu(t2, a1, Operand(Smi::FromInt(2))); in GenerateNewNonStrictFast()
4633 __ sw(t2, FieldMemOperand(t0, FixedArray::kLengthOffset)); in GenerateNewNonStrictFast()
4634 __ sw(cp, FieldMemOperand(t0, FixedArray::kHeaderSize + 0 * kPointerSize)); in GenerateNewNonStrictFast()
4635 __ sll(t6, a1, 1); in GenerateNewNonStrictFast()
4636 __ Addu(t2, t0, Operand(t6)); in GenerateNewNonStrictFast()
4637 __ Addu(t2, t2, Operand(kParameterMapHeaderSize)); in GenerateNewNonStrictFast()
4638 __ sw(t2, FieldMemOperand(t0, FixedArray::kHeaderSize + 1 * kPointerSize)); in GenerateNewNonStrictFast()
4649 __ mov(t2, a1); in GenerateNewNonStrictFast()
4650 __ lw(t5, MemOperand(sp, 0 * kPointerSize)); in GenerateNewNonStrictFast()
4651 __ Addu(t5, t5, Operand(Smi::FromInt(Context::MIN_CONTEXT_SLOTS))); in GenerateNewNonStrictFast()
4652 __ Subu(t5, t5, Operand(a1)); in GenerateNewNonStrictFast()
4653 __ LoadRoot(t3, Heap::kTheHoleValueRootIndex); in GenerateNewNonStrictFast()
4654 __ sll(t6, t2, 1); in GenerateNewNonStrictFast()
4655 __ Addu(a3, t0, Operand(t6)); in GenerateNewNonStrictFast()
4656 __ Addu(a3, a3, Operand(kParameterMapHeaderSize)); in GenerateNewNonStrictFast()
4664 __ jmp(&parameters_test); in GenerateNewNonStrictFast()
4666 __ bind(&parameters_loop); in GenerateNewNonStrictFast()
4667 __ Subu(t2, t2, Operand(Smi::FromInt(1))); in GenerateNewNonStrictFast()
4668 __ sll(t1, t2, 1); in GenerateNewNonStrictFast()
4669 __ Addu(t1, t1, Operand(kParameterMapHeaderSize - kHeapObjectTag)); in GenerateNewNonStrictFast()
4670 __ Addu(t6, t0, t1); in GenerateNewNonStrictFast()
4671 __ sw(t5, MemOperand(t6)); in GenerateNewNonStrictFast()
4672 __ Subu(t1, t1, Operand(kParameterMapHeaderSize - FixedArray::kHeaderSize)); in GenerateNewNonStrictFast()
4673 __ Addu(t6, a3, t1); in GenerateNewNonStrictFast()
4674 __ sw(t3, MemOperand(t6)); in GenerateNewNonStrictFast()
4675 __ Addu(t5, t5, Operand(Smi::FromInt(1))); in GenerateNewNonStrictFast()
4676 __ bind(&parameters_test); in GenerateNewNonStrictFast()
4677 __ Branch(&parameters_loop, ne, t2, Operand(Smi::FromInt(0))); in GenerateNewNonStrictFast()
4679 __ bind(&skip_parameter_map); in GenerateNewNonStrictFast()
4684 __ LoadRoot(t1, Heap::kFixedArrayMapRootIndex); in GenerateNewNonStrictFast()
4685 __ sw(t1, FieldMemOperand(a3, FixedArray::kMapOffset)); in GenerateNewNonStrictFast()
4686 __ sw(a2, FieldMemOperand(a3, FixedArray::kLengthOffset)); in GenerateNewNonStrictFast()
4689 __ mov(t5, a1); in GenerateNewNonStrictFast()
4690 __ lw(t0, MemOperand(sp, 1 * kPointerSize)); in GenerateNewNonStrictFast()
4691 __ sll(t6, t5, 1); in GenerateNewNonStrictFast()
4692 __ Subu(t0, t0, Operand(t6)); in GenerateNewNonStrictFast()
4693 __ jmp(&arguments_test); in GenerateNewNonStrictFast()
4695 __ bind(&arguments_loop); in GenerateNewNonStrictFast()
4696 __ Subu(t0, t0, Operand(kPointerSize)); in GenerateNewNonStrictFast()
4697 __ lw(t2, MemOperand(t0, 0)); in GenerateNewNonStrictFast()
4698 __ sll(t6, t5, 1); in GenerateNewNonStrictFast()
4699 __ Addu(t1, a3, Operand(t6)); in GenerateNewNonStrictFast()
4700 __ sw(t2, FieldMemOperand(t1, FixedArray::kHeaderSize)); in GenerateNewNonStrictFast()
4701 __ Addu(t5, t5, Operand(Smi::FromInt(1))); in GenerateNewNonStrictFast()
4703 __ bind(&arguments_test); in GenerateNewNonStrictFast()
4704 __ Branch(&arguments_loop, lt, t5, Operand(a2)); in GenerateNewNonStrictFast()
4707 __ DropAndRet(3); in GenerateNewNonStrictFast()
4711 __ bind(&runtime); in GenerateNewNonStrictFast()
4712 __ sw(a2, MemOperand(sp, 0 * kPointerSize)); // Patch argument count. in GenerateNewNonStrictFast()
4713 __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1); in GenerateNewNonStrictFast()
4723 __ lw(a2, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); in GenerateNewStrict()
4724 __ lw(a3, MemOperand(a2, StandardFrameConstants::kContextOffset)); in GenerateNewStrict()
4725 __ Branch(&adaptor_frame, in GenerateNewStrict()
4731 __ lw(a1, MemOperand(sp, 0)); in GenerateNewStrict()
4732 __ Branch(&try_allocate); in GenerateNewStrict()
4735 __ bind(&adaptor_frame); in GenerateNewStrict()
4736 __ lw(a1, MemOperand(a2, ArgumentsAdaptorFrameConstants::kLengthOffset)); in GenerateNewStrict()
4737 __ sw(a1, MemOperand(sp, 0)); in GenerateNewStrict()
4738 __ sll(at, a1, kPointerSizeLog2 - kSmiTagSize); in GenerateNewStrict()
4739 __ Addu(a3, a2, Operand(at)); in GenerateNewStrict()
4741 __ Addu(a3, a3, Operand(StandardFrameConstants::kCallerSPOffset)); in GenerateNewStrict()
4742 __ sw(a3, MemOperand(sp, 1 * kPointerSize)); in GenerateNewStrict()
4747 __ bind(&try_allocate); in GenerateNewStrict()
4748 __ Branch(&add_arguments_object, eq, a1, Operand(zero_reg)); in GenerateNewStrict()
4749 __ srl(a1, a1, kSmiTagSize); in GenerateNewStrict()
4751 __ Addu(a1, a1, Operand(FixedArray::kHeaderSize / kPointerSize)); in GenerateNewStrict()
4752 __ bind(&add_arguments_object); in GenerateNewStrict()
4753 __ Addu(a1, a1, Operand(Heap::kArgumentsObjectSizeStrict / kPointerSize)); in GenerateNewStrict()
4756 __ AllocateInNewSpace(a1, in GenerateNewStrict()
4765 __ lw(t0, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX))); in GenerateNewStrict()
4766 __ lw(t0, FieldMemOperand(t0, GlobalObject::kGlobalContextOffset)); in GenerateNewStrict()
4767 __ lw(t0, MemOperand(t0, Context::SlotOffset( in GenerateNewStrict()
4771 __ CopyFields(v0, t0, a3.bit(), JSObject::kHeaderSize / kPointerSize); in GenerateNewStrict()
4775 __ lw(a1, MemOperand(sp, 0 * kPointerSize)); in GenerateNewStrict()
4776 __ sw(a1, FieldMemOperand(v0, JSObject::kHeaderSize + in GenerateNewStrict()
4780 __ Branch(&done, eq, a1, Operand(zero_reg)); in GenerateNewStrict()
4783 __ lw(a2, MemOperand(sp, 1 * kPointerSize)); in GenerateNewStrict()
4787 __ Addu(t0, v0, Operand(Heap::kArgumentsObjectSizeStrict)); in GenerateNewStrict()
4788 __ sw(t0, FieldMemOperand(v0, JSObject::kElementsOffset)); in GenerateNewStrict()
4789 __ LoadRoot(a3, Heap::kFixedArrayMapRootIndex); in GenerateNewStrict()
4790 __ sw(a3, FieldMemOperand(t0, FixedArray::kMapOffset)); in GenerateNewStrict()
4791 __ sw(a1, FieldMemOperand(t0, FixedArray::kLengthOffset)); in GenerateNewStrict()
4793 __ srl(a1, a1, kSmiTagSize); in GenerateNewStrict()
4798 __ Addu(t0, t0, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); in GenerateNewStrict()
4799 __ bind(&loop); in GenerateNewStrict()
4802 __ Addu(a2, a2, Operand(-kPointerSize)); in GenerateNewStrict()
4803 __ lw(a3, MemOperand(a2)); in GenerateNewStrict()
4805 __ sw(a3, MemOperand(t0)); in GenerateNewStrict()
4806 __ Addu(t0, t0, Operand(kPointerSize)); in GenerateNewStrict()
4807 __ Subu(a1, a1, Operand(1)); in GenerateNewStrict()
4808 __ Branch(&loop, ne, a1, Operand(zero_reg)); in GenerateNewStrict()
4811 __ bind(&done); in GenerateNewStrict()
4812 __ DropAndRet(3); in GenerateNewStrict()
4815 __ bind(&runtime); in GenerateNewStrict()
4816 __ TailCallRuntime(Runtime::kNewStrictArgumentsFast, 3, 1); in GenerateNewStrict()
4825 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1); in Generate()
4859 __ li(a0, Operand(address_of_regexp_stack_memory_size)); in Generate()
4860 __ lw(a0, MemOperand(a0, 0)); in Generate()
4861 __ Branch(&runtime, eq, a0, Operand(zero_reg)); in Generate()
4864 __ lw(a0, MemOperand(sp, kJSRegExpOffset)); in Generate()
4866 __ JumpIfSmi(a0, &runtime); in Generate()
4867 __ GetObjectType(a0, a1, a1); in Generate()
4868 __ Branch(&runtime, ne, a1, Operand(JS_REGEXP_TYPE)); in Generate()
4871 __ lw(regexp_data, FieldMemOperand(a0, JSRegExp::kDataOffset)); in Generate()
4873 __ And(t0, regexp_data, Operand(kSmiTagMask)); in Generate()
4874 __ Check(nz, in Generate()
4878 __ GetObjectType(regexp_data, a0, a0); in Generate()
4879 __ Check(eq, in Generate()
4887 __ lw(a0, FieldMemOperand(regexp_data, JSRegExp::kDataTagOffset)); in Generate()
4888 __ Branch(&runtime, ne, a0, Operand(Smi::FromInt(JSRegExp::IRREGEXP))); in Generate()
4892 __ lw(a2, in Generate()
4898 __ Addu(a2, a2, Operand(2)); // a2 was a smi. in Generate()
4900 __ Branch(&runtime, hi, a2, Operand(OffsetsVector::kStaticOffsetsVectorSize)); in Generate()
4905 __ lw(subject, MemOperand(sp, kSubjectOffset)); in Generate()
4906 __ JumpIfSmi(subject, &runtime); in Generate()
4907 __ GetObjectType(subject, a0, a0); in Generate()
4908 __ And(a0, a0, Operand(kIsNotStringMask)); in Generate()
4910 __ Branch(&runtime, ne, a0, Operand(zero_reg)); in Generate()
4913 __ lw(a3, FieldMemOperand(subject, String::kLengthOffset)); in Generate()
4921 __ lw(a0, MemOperand(sp, kPreviousIndexOffset)); in Generate()
4922 __ JumpIfNotSmi(a0, &runtime); in Generate()
4923 __ Branch(&runtime, ls, a3, Operand(a0)); in Generate()
4929 __ lw(a0, MemOperand(sp, kLastMatchInfoOffset)); in Generate()
4930 __ JumpIfSmi(a0, &runtime); in Generate()
4931 __ GetObjectType(a0, a1, a1); in Generate()
4932 __ Branch(&runtime, ne, a1, Operand(JS_ARRAY_TYPE)); in Generate()
4934 __ lw(last_match_info_elements, in Generate()
4936 __ lw(a0, FieldMemOperand(last_match_info_elements, HeapObject::kMapOffset)); in Generate()
4937 __ Branch(&runtime, ne, a0, Operand( in Generate()
4941 __ lw(a0, in Generate()
4943 __ Addu(a2, a2, Operand(RegExpImpl::kLastMatchOverhead)); in Generate()
4944 __ sra(at, a0, kSmiTagSize); // Untag length for comparison. in Generate()
4945 __ Branch(&runtime, gt, a2, Operand(at)); in Generate()
4948 __ mov(t0, zero_reg); in Generate()
4953 __ lw(a0, FieldMemOperand(subject, HeapObject::kMapOffset)); in Generate()
4954 __ lbu(a0, FieldMemOperand(a0, Map::kInstanceTypeOffset)); in Generate()
4957 __ And(a1, in Generate()
4963 __ Branch(&seq_string, eq, a1, Operand(zero_reg)); in Generate()
4980 __ Branch(&cons_string, lt, a1, Operand(kExternalStringTag)); in Generate()
4981 __ Branch(&external_string, eq, a1, Operand(kExternalStringTag)); in Generate()
4985 __ And(at, a1, Operand(kIsNotStringMask | kShortExternalStringMask)); in Generate()
4986 __ Branch(&runtime, ne, at, Operand(zero_reg)); in Generate()
4989 __ lw(t0, FieldMemOperand(subject, SlicedString::kOffsetOffset)); in Generate()
4990 __ sra(t0, t0, kSmiTagSize); in Generate()
4991 __ lw(subject, FieldMemOperand(subject, SlicedString::kParentOffset)); in Generate()
4993 __ jmp(&check_encoding); in Generate()
4995 __ bind(&cons_string); in Generate()
4996 __ lw(a0, FieldMemOperand(subject, ConsString::kSecondOffset)); in Generate()
4997 __ LoadRoot(a1, Heap::kEmptyStringRootIndex); in Generate()
4998 __ Branch(&runtime, ne, a0, Operand(a1)); in Generate()
4999 __ lw(subject, FieldMemOperand(subject, ConsString::kFirstOffset)); in Generate()
5001 __ bind(&check_encoding); in Generate()
5002 __ lw(a0, FieldMemOperand(subject, HeapObject::kMapOffset)); in Generate()
5003 __ lbu(a0, FieldMemOperand(a0, Map::kInstanceTypeOffset)); in Generate()
5005 __ And(at, a0, Operand(kStringRepresentationMask)); in Generate()
5006 __ Branch(&external_string, ne, at, Operand(zero_reg)); in Generate()
5008 __ bind(&seq_string); in Generate()
5016 __ And(a0, a0, Operand(kStringEncodingMask)); // Non-zero for ASCII. in Generate()
5017 __ lw(t9, FieldMemOperand(regexp_data, JSRegExp::kDataAsciiCodeOffset)); in Generate()
5018 __ sra(a3, a0, 2); // a3 is 1 for ASCII, 0 for UC16 (used below). in Generate()
5019 __ lw(t1, FieldMemOperand(regexp_data, JSRegExp::kDataUC16CodeOffset)); in Generate()
5020 __ Movz(t9, t1, a0); // If UC16 (a0 is 0), replace t9 w/kDataUC16CodeOffset. in Generate()
5025 __ JumpIfSmi(t9, &runtime); in Generate()
5033 __ lw(a1, MemOperand(sp, kPreviousIndexOffset)); in Generate()
5034 __ sra(a1, a1, kSmiTagSize); // Untag the Smi. in Generate()
5042 __ IncrementCounter(isolate->counters()->regexp_entry_native(), in Generate()
5048 __ EnterExitFrame(false, kRegExpExecuteArguments - kParameterRegisters); in Generate()
5065 __ li(a0, Operand(ExternalReference::isolate_address())); in Generate()
5066 __ sw(a0, MemOperand(sp, 4 * kPointerSize)); in Generate()
5069 __ li(a0, Operand(1)); in Generate()
5070 __ sw(a0, MemOperand(sp, 3 * kPointerSize)); in Generate()
5073 __ li(a0, Operand(address_of_regexp_stack_memory_address)); in Generate()
5074 __ lw(a0, MemOperand(a0, 0)); in Generate()
5075 __ li(a2, Operand(address_of_regexp_stack_memory_size)); in Generate()
5076 __ lw(a2, MemOperand(a2, 0)); in Generate()
5077 __ addu(a0, a0, a2); in Generate()
5078 __ sw(a0, MemOperand(sp, 2 * kPointerSize)); in Generate()
5081 __ li(a0, Operand( in Generate()
5083 __ sw(a0, MemOperand(sp, 1 * kPointerSize)); in Generate()
5087 __ Addu(t2, subject, Operand(SeqString::kHeaderSize - kHeapObjectTag)); in Generate()
5088 __ Xor(a3, a3, Operand(1)); // 1 for 2-byte str, 0 for 1-byte. in Generate()
5093 __ lw(subject, MemOperand(fp, kSubjectOffset + 2 * kPointerSize)); in Generate()
5098 __ sllv(t1, t0, a3); in Generate()
5099 __ addu(t0, t2, t1); in Generate()
5100 __ sllv(t1, a1, a3); in Generate()
5101 __ addu(a2, t0, t1); in Generate()
5103 __ lw(t2, FieldMemOperand(subject, String::kLengthOffset)); in Generate()
5104 __ sra(t2, t2, kSmiTagSize); in Generate()
5105 __ sllv(t1, t2, a3); in Generate()
5106 __ addu(a3, t0, t1); in Generate()
5111 __ mov(a0, subject); in Generate()
5114 __ Addu(t9, t9, Operand(Code::kHeaderSize - kHeapObjectTag)); in Generate()
5118 __ LeaveExitFrame(false, no_reg); in Generate()
5128 __ Branch(&success, eq, v0, Operand(NativeRegExpMacroAssembler::SUCCESS)); in Generate()
5130 __ Branch(&failure, eq, v0, Operand(NativeRegExpMacroAssembler::FAILURE)); in Generate()
5132 __ Branch(&runtime, ne, v0, Operand(NativeRegExpMacroAssembler::EXCEPTION)); in Generate()
5137 __ li(a1, Operand(isolate->factory()->the_hole_value())); in Generate()
5138 __ li(a2, Operand(ExternalReference(Isolate::kPendingExceptionAddress, in Generate()
5140 __ lw(v0, MemOperand(a2, 0)); in Generate()
5141 __ Branch(&runtime, eq, v0, Operand(a1)); in Generate()
5143 __ sw(a1, MemOperand(a2, 0)); // Clear pending exception. in Generate()
5146 __ LoadRoot(a0, Heap::kTerminationExceptionRootIndex); in Generate()
5148 __ Branch(&termination_exception, eq, v0, Operand(a0)); in Generate()
5150 __ Throw(v0); in Generate()
5152 __ bind(&termination_exception); in Generate()
5153 __ ThrowUncatchable(v0); in Generate()
5155 __ bind(&failure); in Generate()
5157 __ li(v0, Operand(isolate->factory()->null_value())); in Generate()
5158 __ DropAndRet(4); in Generate()
5161 __ bind(&success); in Generate()
5162 __ lw(a1, in Generate()
5167 __ Addu(a1, a1, Operand(2)); // a1 was a smi. in Generate()
5172 __ sll(a2, a1, kSmiTagSize + kSmiShiftSize); // To smi. in Generate()
5173 __ sw(a2, FieldMemOperand(last_match_info_elements, in Generate()
5176 __ sw(subject, in Generate()
5179 __ mov(a2, subject); in Generate()
5180 __ RecordWriteField(last_match_info_elements, in Generate()
5186 __ sw(subject, in Generate()
5189 __ RecordWriteField(last_match_info_elements, in Generate()
5199 __ li(a2, Operand(address_of_static_offsets_vector)); in Generate()
5206 __ Addu(a0, in Generate()
5209 __ bind(&next_capture); in Generate()
5210 __ Subu(a1, a1, Operand(1)); in Generate()
5211 __ Branch(&done, lt, a1, Operand(zero_reg)); in Generate()
5213 __ lw(a3, MemOperand(a2, 0)); in Generate()
5214 __ addiu(a2, a2, kPointerSize); in Generate()
5216 __ sll(a3, a3, kSmiTagSize); // Convert to Smi. in Generate()
5217 __ sw(a3, MemOperand(a0, 0)); in Generate()
5218 __ Branch(&next_capture, USE_DELAY_SLOT); in Generate()
5219 __ addiu(a0, a0, kPointerSize); // In branch delay slot. in Generate()
5221 __ bind(&done); in Generate()
5224 __ lw(v0, MemOperand(sp, kLastMatchInfoOffset)); in Generate()
5225 __ DropAndRet(4); in Generate()
5229 __ bind(&external_string); in Generate()
5230 __ lw(a0, FieldMemOperand(subject, HeapObject::kMapOffset)); in Generate()
5231 __ lbu(a0, FieldMemOperand(a0, Map::kInstanceTypeOffset)); in Generate()
5235 __ And(at, a0, Operand(kIsIndirectStringMask)); in Generate()
5236 __ Assert(eq, in Generate()
5241 __ lw(subject, in Generate()
5245 __ Subu(subject, in Generate()
5248 __ jmp(&seq_string); in Generate()
5251 __ bind(&runtime); in Generate()
5252 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1); in Generate()
5261 __ lw(a1, MemOperand(sp, kPointerSize * 2)); in Generate()
5264 __ JumpIfNotSmi(a1, &slowcase); in Generate()
5265 __ Branch(&slowcase, hi, a1, Operand(Smi::FromInt(kMaxInlineLength))); in Generate()
5274 __ srl(t1, a1, kSmiTagSize + kSmiShiftSize); in Generate()
5275 __ Addu(a2, t1, Operand(objects_size)); in Generate()
5276 __ AllocateInNewSpace( in Generate()
5291 __ lw(a2, ContextOperand(cp, Context::GLOBAL_INDEX)); in Generate()
5292 __ Addu(a3, v0, Operand(JSRegExpResult::kSize)); in Generate()
5293 __ li(t0, Operand(masm->isolate()->factory()->empty_fixed_array())); in Generate()
5294 __ lw(a2, FieldMemOperand(a2, GlobalObject::kGlobalContextOffset)); in Generate()
5295 __ sw(a3, FieldMemOperand(v0, JSObject::kElementsOffset)); in Generate()
5296 __ lw(a2, ContextOperand(a2, Context::REGEXP_RESULT_MAP_INDEX)); in Generate()
5297 __ sw(t0, FieldMemOperand(v0, JSObject::kPropertiesOffset)); in Generate()
5298 __ sw(a2, FieldMemOperand(v0, HeapObject::kMapOffset)); in Generate()
5301 __ lw(a1, MemOperand(sp, kPointerSize * 0)); in Generate()
5302 __ lw(a2, MemOperand(sp, kPointerSize * 1)); in Generate()
5303 __ lw(t2, MemOperand(sp, kPointerSize * 2)); in Generate()
5304 __ sw(a1, FieldMemOperand(v0, JSRegExpResult::kInputOffset)); in Generate()
5305 __ sw(a2, FieldMemOperand(v0, JSRegExpResult::kIndexOffset)); in Generate()
5306 __ sw(t2, FieldMemOperand(v0, JSArray::kLengthOffset)); in Generate()
5314 __ li(a2, Operand(masm->isolate()->factory()->fixed_array_map())); in Generate()
5315 __ sw(a2, FieldMemOperand(a3, HeapObject::kMapOffset)); in Generate()
5317 __ sll(t2, t1, kSmiTagSize); in Generate()
5318 __ sw(t2, FieldMemOperand(a3, FixedArray::kLengthOffset)); in Generate()
5320 __ li(a2, Operand(masm->isolate()->factory()->the_hole_value())); in Generate()
5321 __ Addu(a3, a3, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); in Generate()
5328 __ sll(t1, t1, kPointerSizeLog2); // Convert num elements to num bytes. in Generate()
5329 __ addu(t1, t1, a3); // Point past last element to store. in Generate()
5330 __ bind(&loop); in Generate()
5331 __ Branch(&done, ge, a3, Operand(t1)); // Break when a3 past end of elem. in Generate()
5332 __ sw(a2, MemOperand(a3)); in Generate()
5333 __ Branch(&loop, USE_DELAY_SLOT); in Generate()
5334 __ addiu(a3, a3, kPointerSize); // In branch delay slot. in Generate()
5336 __ bind(&done); in Generate()
5337 __ DropAndRet(3); in Generate()
5339 __ bind(&slowcase); in Generate()
5340 __ TailCallRuntime(Runtime::kRegExpConstructResult, 3, 1); in Generate()
5358 __ lw(a3, FieldMemOperand(a2, JSGlobalPropertyCell::kValueOffset)); in GenerateRecordCallTarget()
5362 __ Branch(&done, eq, a3, Operand(a1)); in GenerateRecordCallTarget()
5363 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); in GenerateRecordCallTarget()
5364 __ Branch(&done, eq, a3, Operand(at)); in GenerateRecordCallTarget()
5368 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); in GenerateRecordCallTarget()
5370 __ Branch(USE_DELAY_SLOT, &done, eq, a3, Operand(at)); in GenerateRecordCallTarget()
5374 __ sw(a1, FieldMemOperand(a2, JSGlobalPropertyCell::kValueOffset)); in GenerateRecordCallTarget()
5379 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); in GenerateRecordCallTarget()
5380 __ sw(at, FieldMemOperand(a2, JSGlobalPropertyCell::kValueOffset)); in GenerateRecordCallTarget()
5382 __ bind(&done); in GenerateRecordCallTarget()
5398 __ lw(t0, MemOperand(sp, argc_ * kPointerSize)); in Generate()
5400 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); in Generate()
5401 __ Branch(&call, ne, t0, Operand(at)); in Generate()
5403 __ lw(a2, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX))); in Generate()
5404 __ lw(a2, FieldMemOperand(a2, GlobalObject::kGlobalReceiverOffset)); in Generate()
5405 __ sw(a2, MemOperand(sp, argc_ * kPointerSize)); in Generate()
5406 __ bind(&call); in Generate()
5411 __ JumpIfSmi(a1, &non_function); in Generate()
5413 __ GetObjectType(a1, a2, a2); in Generate()
5414 __ Branch(&slow, ne, a2, Operand(JS_FUNCTION_TYPE)); in Generate()
5422 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); in Generate()
5423 __ Branch(&call_as_function, eq, t0, Operand(at)); in Generate()
5424 __ InvokeFunction(a1, in Generate()
5429 __ bind(&call_as_function); in Generate()
5431 __ InvokeFunction(a1, in Generate()
5438 __ bind(&slow); in Generate()
5440 __ Branch(&non_function, ne, a2, Operand(JS_FUNCTION_PROXY_TYPE)); in Generate()
5441 __ push(a1); // Put proxy as additional argument. in Generate()
5442 __ li(a0, Operand(argc_ + 1, RelocInfo::NONE)); in Generate()
5443 __ li(a2, Operand(0, RelocInfo::NONE)); in Generate()
5444 __ GetBuiltinEntry(a3, Builtins::CALL_FUNCTION_PROXY); in Generate()
5445 __ SetCallKind(t1, CALL_AS_METHOD); in Generate()
5449 __ Jump(adaptor, RelocInfo::CODE_TARGET); in Generate()
5454 __ bind(&non_function); in Generate()
5455 __ sw(a1, MemOperand(sp, argc_ * kPointerSize)); in Generate()
5456 __ li(a0, Operand(argc_)); // Set up the number of arguments. in Generate()
5457 __ mov(a2, zero_reg); in Generate()
5458 __ GetBuiltinEntry(a3, Builtins::CALL_NON_FUNCTION); in Generate()
5459 __ SetCallKind(t1, CALL_AS_METHOD); in Generate()
5460 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), in Generate()
5472 __ JumpIfSmi(a1, &non_function_call); in Generate()
5474 __ GetObjectType(a1, a3, a3); in Generate()
5475 __ Branch(&slow, ne, a3, Operand(JS_FUNCTION_TYPE)); in Generate()
5482 __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); in Generate()
5483 __ lw(a2, FieldMemOperand(a2, SharedFunctionInfo::kConstructStubOffset)); in Generate()
5484 __ Addu(at, a2, Operand(Code::kHeaderSize - kHeapObjectTag)); in Generate()
5485 __ Jump(at); in Generate()
5491 __ bind(&slow); in Generate()
5492 __ Branch(&non_function_call, ne, a3, Operand(JS_FUNCTION_PROXY_TYPE)); in Generate()
5493 __ GetBuiltinEntry(a3, Builtins::CALL_FUNCTION_PROXY_AS_CONSTRUCTOR); in Generate()
5494 __ jmp(&do_call); in Generate()
5496 __ bind(&non_function_call); in Generate()
5497 __ GetBuiltinEntry(a3, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR); in Generate()
5498 __ bind(&do_call); in Generate()
5500 __ li(a2, Operand(0, RelocInfo::NONE)); in Generate()
5501 __ SetCallKind(t1, CALL_AS_METHOD); in Generate()
5502 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), in Generate()
5558 __ JumpIfSmi(object_, receiver_not_string_); in GenerateFast()
5561 __ lw(result_, FieldMemOperand(object_, HeapObject::kMapOffset)); in GenerateFast()
5562 __ lbu(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset)); in GenerateFast()
5564 __ And(t0, result_, Operand(kIsNotStringMask)); in GenerateFast()
5565 __ Branch(receiver_not_string_, ne, t0, Operand(zero_reg)); in GenerateFast()
5568 __ JumpIfNotSmi(index_, &index_not_smi_); in GenerateFast()
5570 __ bind(&got_smi_index_); in GenerateFast()
5573 __ lw(t0, FieldMemOperand(object_, String::kLengthOffset)); in GenerateFast()
5574 __ Branch(index_out_of_range_, ls, t0, Operand(index_)); in GenerateFast()
5576 __ sra(index_, index_, kSmiTagSize); in GenerateFast()
5584 __ sll(result_, result_, kSmiTagSize); in GenerateFast()
5585 __ bind(&exit_); in GenerateFast()
5592 __ Abort("Unexpected fallthrough to CharCodeAt slow case"); in GenerateSlow()
5595 __ bind(&index_not_smi_); in GenerateSlow()
5597 __ CheckMap(index_, in GenerateSlow()
5604 __ Push(object_, index_); in GenerateSlow()
5606 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1); in GenerateSlow()
5610 __ CallRuntime(Runtime::kNumberToSmi, 1); in GenerateSlow()
5616 __ Move(index_, v0); in GenerateSlow()
5617 __ pop(object_); in GenerateSlow()
5619 __ lw(result_, FieldMemOperand(object_, HeapObject::kMapOffset)); in GenerateSlow()
5620 __ lbu(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset)); in GenerateSlow()
5623 __ JumpIfNotSmi(index_, index_out_of_range_); in GenerateSlow()
5625 __ Branch(&got_smi_index_); in GenerateSlow()
5630 __ bind(&call_runtime_); in GenerateSlow()
5632 __ sll(index_, index_, kSmiTagSize); in GenerateSlow()
5633 __ Push(object_, index_); in GenerateSlow()
5634 __ CallRuntime(Runtime::kStringCharCodeAt, 2); in GenerateSlow()
5636 __ Move(result_, v0); in GenerateSlow()
5639 __ jmp(&exit_); in GenerateSlow()
5641 __ Abort("Unexpected fallthrough from CharCodeAt slow case"); in GenerateSlow()
5657 __ And(t0, in GenerateFast()
5661 __ Branch(&slow_case_, ne, t0, Operand(zero_reg)); in GenerateFast()
5663 __ LoadRoot(result_, Heap::kSingleCharacterStringCacheRootIndex); in GenerateFast()
5666 __ sll(t0, code_, kPointerSizeLog2 - kSmiTagSize); in GenerateFast()
5667 __ Addu(result_, result_, t0); in GenerateFast()
5668 __ lw(result_, FieldMemOperand(result_, FixedArray::kHeaderSize)); in GenerateFast()
5669 __ LoadRoot(t0, Heap::kUndefinedValueRootIndex); in GenerateFast()
5670 __ Branch(&slow_case_, eq, result_, Operand(t0)); in GenerateFast()
5671 __ bind(&exit_); in GenerateFast()
5678 __ Abort("Unexpected fallthrough to CharFromCode slow case"); in GenerateSlow()
5680 __ bind(&slow_case_); in GenerateSlow()
5682 __ push(code_); in GenerateSlow()
5683 __ CallRuntime(Runtime::kCharFromCode, 1); in GenerateSlow()
5684 __ Move(result_, v0); in GenerateSlow()
5687 __ Branch(&exit_); in GenerateSlow()
5689 __ Abort("Unexpected fallthrough from CharFromCode slow case"); in GenerateSlow()
5721 __ addu(count, count, count); in GenerateCopyCharacters()
5723 __ Branch(&done, eq, count, Operand(zero_reg)); in GenerateCopyCharacters()
5724 __ addu(count, dest, count); // Count now points to the last dest byte. in GenerateCopyCharacters()
5726 __ bind(&loop); in GenerateCopyCharacters()
5727 __ lbu(scratch, MemOperand(src)); in GenerateCopyCharacters()
5728 __ addiu(src, src, 1); in GenerateCopyCharacters()
5729 __ sb(scratch, MemOperand(dest)); in GenerateCopyCharacters()
5730 __ addiu(dest, dest, 1); in GenerateCopyCharacters()
5731 __ Branch(&loop, lt, dest, Operand(count)); in GenerateCopyCharacters()
5733 __ bind(&done); in GenerateCopyCharacters()
5759 __ And(scratch4, dest, Operand(kPointerAlignmentMask)); in GenerateCopyCharactersLong()
5760 __ Check(eq, in GenerateCopyCharactersLong()
5777 __ addu(count, count, count); in GenerateCopyCharactersLong()
5779 __ Branch(&done, eq, count, Operand(zero_reg)); in GenerateCopyCharactersLong()
5783 __ Subu(scratch1, count, Operand(8)); in GenerateCopyCharactersLong()
5784 __ Addu(count, dest, Operand(count)); in GenerateCopyCharactersLong()
5786 __ Branch(&byte_loop, lt, scratch1, Operand(zero_reg)); in GenerateCopyCharactersLong()
5790 __ And(scratch4, dest, Operand(kReadAlignmentMask)); in GenerateCopyCharactersLong()
5792 __ Branch(&dest_aligned, eq, scratch4, Operand(zero_reg)); in GenerateCopyCharactersLong()
5794 __ bind(&aligned_loop); in GenerateCopyCharactersLong()
5795 __ lbu(scratch1, MemOperand(src)); in GenerateCopyCharactersLong()
5796 __ addiu(src, src, 1); in GenerateCopyCharactersLong()
5797 __ sb(scratch1, MemOperand(dest)); in GenerateCopyCharactersLong()
5798 __ addiu(dest, dest, 1); in GenerateCopyCharactersLong()
5799 __ addiu(scratch4, scratch4, 1); in GenerateCopyCharactersLong()
5800 __ Branch(&aligned_loop, le, scratch4, Operand(kReadAlignmentMask)); in GenerateCopyCharactersLong()
5801 __ bind(&dest_aligned); in GenerateCopyCharactersLong()
5806 __ And(scratch4, src, Operand(kReadAlignmentMask)); in GenerateCopyCharactersLong()
5807 __ Branch(&simple_loop, eq, scratch4, Operand(zero_reg)); in GenerateCopyCharactersLong()
5814 __ bind(&loop); in GenerateCopyCharactersLong()
5815 __ lwr(scratch1, MemOperand(src)); in GenerateCopyCharactersLong()
5816 __ Addu(src, src, Operand(kReadAlignment)); in GenerateCopyCharactersLong()
5817 __ lwl(scratch1, MemOperand(src, -1)); in GenerateCopyCharactersLong()
5818 __ sw(scratch1, MemOperand(dest)); in GenerateCopyCharactersLong()
5819 __ Addu(dest, dest, Operand(kReadAlignment)); in GenerateCopyCharactersLong()
5820 __ Subu(scratch2, limit, dest); in GenerateCopyCharactersLong()
5821 __ Branch(&loop, ge, scratch2, Operand(kReadAlignment)); in GenerateCopyCharactersLong()
5824 __ Branch(&byte_loop); in GenerateCopyCharactersLong()
5829 __ bind(&simple_loop); in GenerateCopyCharactersLong()
5832 __ bind(&loop); in GenerateCopyCharactersLong()
5833 __ lw(scratch1, MemOperand(src)); in GenerateCopyCharactersLong()
5834 __ Addu(src, src, Operand(kReadAlignment)); in GenerateCopyCharactersLong()
5835 __ sw(scratch1, MemOperand(dest)); in GenerateCopyCharactersLong()
5836 __ Addu(dest, dest, Operand(kReadAlignment)); in GenerateCopyCharactersLong()
5837 __ Subu(scratch2, limit, dest); in GenerateCopyCharactersLong()
5838 __ Branch(&loop, ge, scratch2, Operand(kReadAlignment)); in GenerateCopyCharactersLong()
5842 __ bind(&byte_loop); in GenerateCopyCharactersLong()
5844 __ Branch(&done, ge, dest, Operand(limit)); in GenerateCopyCharactersLong()
5845 __ lbu(scratch1, MemOperand(src)); in GenerateCopyCharactersLong()
5846 __ addiu(src, src, 1); in GenerateCopyCharactersLong()
5847 __ sb(scratch1, MemOperand(dest)); in GenerateCopyCharactersLong()
5848 __ addiu(dest, dest, 1); in GenerateCopyCharactersLong()
5849 __ Branch(&byte_loop); in GenerateCopyCharactersLong()
5851 __ bind(&done); in GenerateCopyCharactersLong()
5870 __ Subu(scratch, c1, Operand(static_cast<int>('0'))); in GenerateTwoCharacterSymbolTableProbe()
5871 __ Branch(&not_array_index, in GenerateTwoCharacterSymbolTableProbe()
5875 __ Subu(scratch, c2, Operand(static_cast<int>('0'))); in GenerateTwoCharacterSymbolTableProbe()
5881 __ sll(scratch1, c2, kBitsPerByte); in GenerateTwoCharacterSymbolTableProbe()
5882 __ Branch(&tmp, Ugreater, scratch, Operand(static_cast<int>('9' - '0'))); in GenerateTwoCharacterSymbolTableProbe()
5883 __ Or(c1, c1, scratch1); in GenerateTwoCharacterSymbolTableProbe()
5884 __ bind(&tmp); in GenerateTwoCharacterSymbolTableProbe()
5885 __ Branch( in GenerateTwoCharacterSymbolTableProbe()
5888 __ bind(&not_array_index); in GenerateTwoCharacterSymbolTableProbe()
5897 __ sll(scratch, c2, kBitsPerByte); in GenerateTwoCharacterSymbolTableProbe()
5898 __ Or(chars, chars, scratch); in GenerateTwoCharacterSymbolTableProbe()
5906 __ LoadRoot(symbol_table, Heap::kSymbolTableRootIndex); in GenerateTwoCharacterSymbolTableProbe()
5909 __ LoadRoot(undefined, Heap::kUndefinedValueRootIndex); in GenerateTwoCharacterSymbolTableProbe()
5913 __ lw(mask, FieldMemOperand(symbol_table, SymbolTable::kCapacityOffset)); in GenerateTwoCharacterSymbolTableProbe()
5914 __ sra(mask, mask, 1); in GenerateTwoCharacterSymbolTableProbe()
5915 __ Addu(mask, mask, -1); in GenerateTwoCharacterSymbolTableProbe()
5919 __ Addu(first_symbol_table_element, symbol_table, in GenerateTwoCharacterSymbolTableProbe()
5939 __ Addu(candidate, hash, Operand(SymbolTable::GetProbeOffset(i))); in GenerateTwoCharacterSymbolTableProbe()
5941 __ mov(candidate, hash); in GenerateTwoCharacterSymbolTableProbe()
5944 __ And(candidate, candidate, Operand(mask)); in GenerateTwoCharacterSymbolTableProbe()
5948 __ sll(scratch, candidate, kPointerSizeLog2); in GenerateTwoCharacterSymbolTableProbe()
5949 __ Addu(scratch, scratch, first_symbol_table_element); in GenerateTwoCharacterSymbolTableProbe()
5950 __ lw(candidate, MemOperand(scratch)); in GenerateTwoCharacterSymbolTableProbe()
5954 __ GetObjectType(candidate, scratch, scratch); in GenerateTwoCharacterSymbolTableProbe()
5955 __ Branch(&is_string, ne, scratch, Operand(ODDBALL_TYPE)); in GenerateTwoCharacterSymbolTableProbe()
5957 __ Branch(not_found, eq, undefined, Operand(candidate)); in GenerateTwoCharacterSymbolTableProbe()
5960 __ LoadRoot(scratch, Heap::kTheHoleValueRootIndex); in GenerateTwoCharacterSymbolTableProbe()
5961 __ Assert(eq, "oddball in symbol table is not undefined or the hole", in GenerateTwoCharacterSymbolTableProbe()
5964 __ jmp(&next_probe[i]); in GenerateTwoCharacterSymbolTableProbe()
5966 __ bind(&is_string); in GenerateTwoCharacterSymbolTableProbe()
5971 __ JumpIfInstanceTypeIsNotSequentialAscii(scratch, scratch, &next_probe[i]); in GenerateTwoCharacterSymbolTableProbe()
5974 __ lw(scratch, FieldMemOperand(candidate, String::kLengthOffset)); in GenerateTwoCharacterSymbolTableProbe()
5975 __ Branch(&next_probe[i], ne, scratch, Operand(Smi::FromInt(2))); in GenerateTwoCharacterSymbolTableProbe()
5979 __ lhu(scratch, FieldMemOperand(candidate, SeqAsciiString::kHeaderSize)); in GenerateTwoCharacterSymbolTableProbe()
5980 __ Branch(&found_in_symbol_table, eq, chars, Operand(scratch)); in GenerateTwoCharacterSymbolTableProbe()
5981 __ bind(&next_probe[i]); in GenerateTwoCharacterSymbolTableProbe()
5985 __ jmp(not_found); in GenerateTwoCharacterSymbolTableProbe()
5989 __ bind(&found_in_symbol_table); in GenerateTwoCharacterSymbolTableProbe()
5990 __ mov(v0, result); in GenerateTwoCharacterSymbolTableProbe()
5998 __ LoadRoot(hash, Heap::kHashSeedRootIndex); in GenerateHashInit()
6000 __ SmiUntag(hash); in GenerateHashInit()
6001 __ addu(hash, hash, character); in GenerateHashInit()
6002 __ sll(at, hash, 10); in GenerateHashInit()
6003 __ addu(hash, hash, at); in GenerateHashInit()
6005 __ srl(at, hash, 6); in GenerateHashInit()
6006 __ xor_(hash, hash, at); in GenerateHashInit()
6014 __ addu(hash, hash, character); in GenerateHashAddCharacter()
6016 __ sll(at, hash, 10); in GenerateHashAddCharacter()
6017 __ addu(hash, hash, at); in GenerateHashAddCharacter()
6019 __ srl(at, hash, 6); in GenerateHashAddCharacter()
6020 __ xor_(hash, hash, at); in GenerateHashAddCharacter()
6027 __ sll(at, hash, 3); in GenerateHashGetHash()
6028 __ addu(hash, hash, at); in GenerateHashGetHash()
6030 __ srl(at, hash, 11); in GenerateHashGetHash()
6031 __ xor_(hash, hash, at); in GenerateHashGetHash()
6033 __ sll(at, hash, 15); in GenerateHashGetHash()
6034 __ addu(hash, hash, at); in GenerateHashGetHash()
6036 __ li(at, Operand(String::kHashBitMask)); in GenerateHashGetHash()
6037 __ and_(hash, hash, at); in GenerateHashGetHash()
6040 __ ori(at, zero_reg, StringHasher::kZeroHash); in GenerateHashGetHash()
6041 __ Movz(hash, at, hash); in GenerateHashGetHash()
6064 __ lw(a2, MemOperand(sp, kToOffset)); in Generate()
6065 __ lw(a3, MemOperand(sp, kFromOffset)); in Generate()
6072 __ UntagAndJumpIfNotSmi(a2, a2, &runtime); in Generate()
6073 __ UntagAndJumpIfNotSmi(a3, a3, &runtime); in Generate()
6076 __ Branch(&runtime, lt, a3, Operand(zero_reg)); // From < 0. in Generate()
6078 __ Branch(&runtime, gt, a3, Operand(a2)); // Fail if from > to. in Generate()
6079 __ Subu(a2, a2, a3); in Generate()
6082 __ lw(v0, MemOperand(sp, kStringOffset)); in Generate()
6083 __ JumpIfSmi(v0, &runtime); in Generate()
6084 __ lw(a1, FieldMemOperand(v0, HeapObject::kMapOffset)); in Generate()
6085 __ lbu(a1, FieldMemOperand(a1, Map::kInstanceTypeOffset)); in Generate()
6086 __ And(t0, a1, Operand(kIsNotStringMask)); in Generate()
6088 __ Branch(&runtime, ne, t0, Operand(zero_reg)); in Generate()
6094 __ lw(t0, FieldMemOperand(v0, String::kLengthOffset)); in Generate()
6095 __ sra(t0, t0, 1); in Generate()
6096 __ Branch(&return_v0, eq, a2, Operand(t0)); in Generate()
6102 __ li(t0, 2); in Generate()
6103 __ Branch(&result_longer_than_two, gt, a2, Operand(t0)); in Generate()
6104 __ Branch(&runtime, lt, a2, Operand(t0)); in Generate()
6106 __ JumpIfInstanceTypeIsNotSequentialAscii(a1, a1, &runtime); in Generate()
6109 __ Addu(v0, v0, Operand(a3)); in Generate()
6110 __ lbu(a3, FieldMemOperand(v0, SeqAsciiString::kHeaderSize)); in Generate()
6111 __ lbu(t0, FieldMemOperand(v0, SeqAsciiString::kHeaderSize + 1)); in Generate()
6117 __ jmp(&return_v0); in Generate()
6121 __ bind(&make_two_character_string); in Generate()
6122 __ AllocateAsciiString(v0, a2, t0, t1, t4, &runtime); in Generate()
6123 __ sh(a3, FieldMemOperand(v0, SeqAsciiString::kHeaderSize)); in Generate()
6124 __ jmp(&return_v0); in Generate()
6126 __ bind(&result_longer_than_two); in Generate()
6138 __ And(t0, a1, Operand(kIsIndirectStringMask)); in Generate()
6139 __ Branch(USE_DELAY_SLOT, &seq_or_external_string, eq, t0, Operand(zero_reg)); in Generate()
6141 __ And(t0, a1, Operand(kSlicedNotConsMask)); in Generate()
6142 __ Branch(&sliced_string, ne, t0, Operand(zero_reg)); in Generate()
6144 __ lw(t1, FieldMemOperand(v0, ConsString::kSecondOffset)); in Generate()
6145 __ LoadRoot(t0, Heap::kEmptyStringRootIndex); in Generate()
6146 __ Branch(&runtime, ne, t1, Operand(t0)); in Generate()
6147 __ lw(t1, FieldMemOperand(v0, ConsString::kFirstOffset)); in Generate()
6149 __ lw(a1, FieldMemOperand(t1, HeapObject::kMapOffset)); in Generate()
6150 __ lbu(a1, FieldMemOperand(a1, Map::kInstanceTypeOffset)); in Generate()
6151 __ jmp(&underlying_unpacked); in Generate()
6153 __ bind(&sliced_string); in Generate()
6155 __ lw(t1, FieldMemOperand(v0, SlicedString::kParentOffset)); in Generate()
6156 __ lw(t0, FieldMemOperand(v0, SlicedString::kOffsetOffset)); in Generate()
6157 __ sra(t0, t0, 1); // Add offset to index. in Generate()
6158 __ Addu(a3, a3, t0); in Generate()
6160 __ lw(a1, FieldMemOperand(t1, HeapObject::kMapOffset)); in Generate()
6161 __ lbu(a1, FieldMemOperand(a1, Map::kInstanceTypeOffset)); in Generate()
6162 __ jmp(&underlying_unpacked); in Generate()
6164 __ bind(&seq_or_external_string); in Generate()
6166 __ mov(t1, v0); in Generate()
6168 __ bind(&underlying_unpacked); in Generate()
6177 __ Branch(&copy_routine, lt, a2, Operand(SlicedString::kMinLength)); in Generate()
6186 __ And(t0, a1, Operand(kStringEncodingMask)); in Generate()
6187 __ Branch(&two_byte_slice, eq, t0, Operand(zero_reg)); in Generate()
6188 __ AllocateAsciiSlicedString(v0, a2, t2, t3, &runtime); in Generate()
6189 __ jmp(&set_slice_header); in Generate()
6190 __ bind(&two_byte_slice); in Generate()
6191 __ AllocateTwoByteSlicedString(v0, a2, t2, t3, &runtime); in Generate()
6192 __ bind(&set_slice_header); in Generate()
6193 __ sll(a3, a3, 1); in Generate()
6194 __ sw(t1, FieldMemOperand(v0, SlicedString::kParentOffset)); in Generate()
6195 __ sw(a3, FieldMemOperand(v0, SlicedString::kOffsetOffset)); in Generate()
6196 __ jmp(&return_v0); in Generate()
6198 __ bind(&copy_routine); in Generate()
6208 __ And(t0, a1, Operand(kExternalStringTag)); in Generate()
6209 __ Branch(&sequential_string, eq, t0, Operand(zero_reg)); in Generate()
6214 __ And(t0, a1, Operand(kShortExternalStringTag)); in Generate()
6215 __ Branch(&runtime, ne, t0, Operand(zero_reg)); in Generate()
6216 __ lw(t1, FieldMemOperand(t1, ExternalString::kResourceDataOffset)); in Generate()
6218 __ jmp(&allocate_result); in Generate()
6220 __ bind(&sequential_string); in Generate()
6223 __ Addu(t1, t1, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag)); in Generate()
6225 __ bind(&allocate_result); in Generate()
6228 __ And(t0, a1, Operand(kStringEncodingMask)); in Generate()
6229 __ Branch(&two_byte_sequential, eq, t0, Operand(zero_reg)); in Generate()
6232 __ AllocateAsciiString(v0, a2, t0, t2, t3, &runtime); in Generate()
6235 __ Addu(t1, t1, a3); in Generate()
6238 __ Addu(a1, v0, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag)); in Generate()
6247 __ jmp(&return_v0); in Generate()
6250 __ bind(&two_byte_sequential); in Generate()
6251 __ AllocateTwoByteString(v0, a2, t0, t2, t3, &runtime); in Generate()
6255 __ sll(t0, a3, 1); in Generate()
6256 __ Addu(t1, t1, t0); in Generate()
6258 __ Addu(a1, v0, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag)); in Generate()
6268 __ bind(&return_v0); in Generate()
6270 __ IncrementCounter(counters->sub_string_native(), 1, a3, t0); in Generate()
6271 __ DropAndRet(3); in Generate()
6274 __ bind(&runtime); in Generate()
6275 __ TailCallRuntime(Runtime::kSubString, 3, 1); in Generate()
6289 __ lw(length, FieldMemOperand(left, String::kLengthOffset)); in GenerateFlatAsciiStringEquals()
6290 __ lw(scratch2, FieldMemOperand(right, String::kLengthOffset)); in GenerateFlatAsciiStringEquals()
6291 __ Branch(&check_zero_length, eq, length, Operand(scratch2)); in GenerateFlatAsciiStringEquals()
6292 __ bind(&strings_not_equal); in GenerateFlatAsciiStringEquals()
6293 __ li(v0, Operand(Smi::FromInt(NOT_EQUAL))); in GenerateFlatAsciiStringEquals()
6294 __ Ret(); in GenerateFlatAsciiStringEquals()
6298 __ bind(&check_zero_length); in GenerateFlatAsciiStringEquals()
6300 __ Branch(&compare_chars, ne, length, Operand(zero_reg)); in GenerateFlatAsciiStringEquals()
6301 __ li(v0, Operand(Smi::FromInt(EQUAL))); in GenerateFlatAsciiStringEquals()
6302 __ Ret(); in GenerateFlatAsciiStringEquals()
6305 __ bind(&compare_chars); in GenerateFlatAsciiStringEquals()
6312 __ li(v0, Operand(Smi::FromInt(EQUAL))); in GenerateFlatAsciiStringEquals()
6313 __ Ret(); in GenerateFlatAsciiStringEquals()
6326 __ lw(scratch1, FieldMemOperand(left, String::kLengthOffset)); in GenerateCompareFlatAsciiStrings()
6327 __ lw(scratch2, FieldMemOperand(right, String::kLengthOffset)); in GenerateCompareFlatAsciiStrings()
6328 __ Subu(scratch3, scratch1, Operand(scratch2)); in GenerateCompareFlatAsciiStrings()
6330 __ slt(scratch4, scratch2, scratch1); in GenerateCompareFlatAsciiStrings()
6331 __ Movn(scratch1, scratch2, scratch4); in GenerateCompareFlatAsciiStrings()
6334 __ Branch(&compare_lengths, eq, min_length, Operand(zero_reg)); in GenerateCompareFlatAsciiStrings()
6342 __ bind(&compare_lengths); in GenerateCompareFlatAsciiStrings()
6345 __ mov(scratch2, length_delta); in GenerateCompareFlatAsciiStrings()
6346 __ mov(scratch4, zero_reg); in GenerateCompareFlatAsciiStrings()
6347 __ mov(v0, zero_reg); in GenerateCompareFlatAsciiStrings()
6349 __ bind(&result_not_equal); in GenerateCompareFlatAsciiStrings()
6353 __ Branch(&ret, eq, scratch2, Operand(scratch4)); in GenerateCompareFlatAsciiStrings()
6354 __ li(v0, Operand(Smi::FromInt(GREATER))); in GenerateCompareFlatAsciiStrings()
6355 __ Branch(&ret, gt, scratch2, Operand(scratch4)); in GenerateCompareFlatAsciiStrings()
6356 __ li(v0, Operand(Smi::FromInt(LESS))); in GenerateCompareFlatAsciiStrings()
6357 __ bind(&ret); in GenerateCompareFlatAsciiStrings()
6358 __ Ret(); in GenerateCompareFlatAsciiStrings()
6374 __ SmiUntag(length); in GenerateAsciiCharsCompareLoop()
6375 __ Addu(scratch1, length, in GenerateAsciiCharsCompareLoop()
6377 __ Addu(left, left, Operand(scratch1)); in GenerateAsciiCharsCompareLoop()
6378 __ Addu(right, right, Operand(scratch1)); in GenerateAsciiCharsCompareLoop()
6379 __ Subu(length, zero_reg, length); in GenerateAsciiCharsCompareLoop()
6385 __ bind(&loop); in GenerateAsciiCharsCompareLoop()
6386 __ Addu(scratch3, left, index); in GenerateAsciiCharsCompareLoop()
6387 __ lbu(scratch1, MemOperand(scratch3)); in GenerateAsciiCharsCompareLoop()
6388 __ Addu(scratch3, right, index); in GenerateAsciiCharsCompareLoop()
6389 __ lbu(scratch2, MemOperand(scratch3)); in GenerateAsciiCharsCompareLoop()
6390 __ Branch(chars_not_equal, ne, scratch1, Operand(scratch2)); in GenerateAsciiCharsCompareLoop()
6391 __ Addu(index, index, 1); in GenerateAsciiCharsCompareLoop()
6392 __ Branch(&loop, ne, index, Operand(zero_reg)); in GenerateAsciiCharsCompareLoop()
6404 __ lw(a1, MemOperand(sp, 1 * kPointerSize)); // Left. in Generate()
6405 __ lw(a0, MemOperand(sp, 0 * kPointerSize)); // Right. in Generate()
6408 __ Branch(&not_same, ne, a0, Operand(a1)); in Generate()
6411 __ li(v0, Operand(Smi::FromInt(EQUAL))); in Generate()
6412 __ IncrementCounter(counters->string_compare_native(), 1, a1, a2); in Generate()
6413 __ DropAndRet(2); in Generate()
6415 __ bind(&not_same); in Generate()
6418 __ JumpIfNotBothSequentialAsciiStrings(a1, a0, a2, a3, &runtime); in Generate()
6421 __ IncrementCounter(counters->string_compare_native(), 1, a2, a3); in Generate()
6422 __ Addu(sp, sp, Operand(2 * kPointerSize)); in Generate()
6425 __ bind(&runtime); in Generate()
6426 __ TailCallRuntime(Runtime::kStringCompare, 2, 1); in Generate()
6441 __ lw(a0, MemOperand(sp, 1 * kPointerSize)); // First argument. in Generate()
6442 __ lw(a1, MemOperand(sp, 0 * kPointerSize)); // Second argument. in Generate()
6446 __ JumpIfEitherSmi(a0, a1, &call_runtime); in Generate()
6448 __ lw(t0, FieldMemOperand(a0, HeapObject::kMapOffset)); in Generate()
6449 __ lw(t1, FieldMemOperand(a1, HeapObject::kMapOffset)); in Generate()
6450 __ lbu(t0, FieldMemOperand(t0, Map::kInstanceTypeOffset)); in Generate()
6451 __ lbu(t1, FieldMemOperand(t1, Map::kInstanceTypeOffset)); in Generate()
6454 __ Or(t4, t0, Operand(t1)); in Generate()
6455 __ And(t4, t4, Operand(kIsNotStringMask)); in Generate()
6456 __ Branch(&call_runtime, ne, t4, Operand(zero_reg)); in Generate()
6485 __ lw(a2, FieldMemOperand(a0, String::kLengthOffset)); in Generate()
6486 __ lw(a3, FieldMemOperand(a1, String::kLengthOffset)); in Generate()
6487 __ mov(v0, a0); // Assume we'll return first string (from a0). in Generate()
6488 __ Movz(v0, a1, a2); // If first is empty, return second (from a1). in Generate()
6489 __ slt(t4, zero_reg, a2); // if (a2 > 0) t4 = 1. in Generate()
6490 __ slt(t5, zero_reg, a3); // if (a3 > 0) t5 = 1. in Generate()
6491 __ and_(t4, t4, t5); // Branch if both strings were non-empty. in Generate()
6492 __ Branch(&strings_not_empty, ne, t4, Operand(zero_reg)); in Generate()
6494 __ IncrementCounter(counters->string_add_native(), 1, a2, a3); in Generate()
6495 __ DropAndRet(2); in Generate()
6497 __ bind(&strings_not_empty); in Generate()
6501 __ sra(a2, a2, kSmiTagSize); in Generate()
6502 __ sra(a3, a3, kSmiTagSize); in Generate()
6515 __ Addu(t2, a2, Operand(a3)); in Generate()
6518 __ Branch(&longer_than_two, ne, t2, Operand(2)); in Generate()
6522 __ lw(t0, FieldMemOperand(a0, HeapObject::kMapOffset)); in Generate()
6523 __ lw(t1, FieldMemOperand(a1, HeapObject::kMapOffset)); in Generate()
6524 __ lbu(t0, FieldMemOperand(t0, Map::kInstanceTypeOffset)); in Generate()
6525 __ lbu(t1, FieldMemOperand(t1, Map::kInstanceTypeOffset)); in Generate()
6527 __ JumpIfBothInstanceTypesAreNotSequentialAscii(t0, t1, t2, t3, in Generate()
6531 __ lbu(a2, FieldMemOperand(a0, SeqAsciiString::kHeaderSize)); in Generate()
6532 __ lbu(a3, FieldMemOperand(a1, SeqAsciiString::kHeaderSize)); in Generate()
6539 __ IncrementCounter(counters->string_add_native(), 1, a2, a3); in Generate()
6540 __ DropAndRet(2); in Generate()
6542 __ bind(&make_two_character_string); in Generate()
6548 __ li(t2, Operand(2)); in Generate()
6549 __ AllocateAsciiString(v0, t2, t0, t1, t5, &call_runtime); in Generate()
6550 __ sh(a2, FieldMemOperand(v0, SeqAsciiString::kHeaderSize)); in Generate()
6551 __ IncrementCounter(counters->string_add_native(), 1, a2, a3); in Generate()
6552 __ DropAndRet(2); in Generate()
6554 __ bind(&longer_than_two); in Generate()
6556 __ Branch(&string_add_flat_result, lt, t2, Operand(ConsString::kMinLength)); in Generate()
6561 __ Branch(&call_runtime, hs, t2, Operand(String::kMaxLength + 1)); in Generate()
6566 __ lw(t0, FieldMemOperand(a0, HeapObject::kMapOffset)); in Generate()
6567 __ lw(t1, FieldMemOperand(a1, HeapObject::kMapOffset)); in Generate()
6568 __ lbu(t0, FieldMemOperand(t0, Map::kInstanceTypeOffset)); in Generate()
6569 __ lbu(t1, FieldMemOperand(t1, Map::kInstanceTypeOffset)); in Generate()
6574 __ And(t4, t0, Operand(t1)); in Generate()
6575 __ And(t4, t4, Operand(kStringEncodingMask)); in Generate()
6576 __ Branch(&non_ascii, eq, t4, Operand(zero_reg)); in Generate()
6579 __ bind(&ascii_data); in Generate()
6580 __ AllocateAsciiConsString(v0, t2, t0, t1, &call_runtime); in Generate()
6581 __ bind(&allocated); in Generate()
6583 __ sw(a0, FieldMemOperand(v0, ConsString::kFirstOffset)); in Generate()
6584 __ sw(a1, FieldMemOperand(v0, ConsString::kSecondOffset)); in Generate()
6585 __ IncrementCounter(counters->string_add_native(), 1, a2, a3); in Generate()
6586 __ DropAndRet(2); in Generate()
6588 __ bind(&non_ascii); in Generate()
6594 __ And(at, t0, Operand(kAsciiDataHintMask)); in Generate()
6595 __ and_(at, at, t1); in Generate()
6596 __ Branch(&ascii_data, ne, at, Operand(zero_reg)); in Generate()
6598 __ xor_(t0, t0, t1); in Generate()
6600 __ And(t0, t0, Operand(kAsciiStringTag | kAsciiDataHintTag)); in Generate()
6601 __ Branch(&ascii_data, eq, t0, Operand(kAsciiStringTag | kAsciiDataHintTag)); in Generate()
6604 __ AllocateTwoByteConsString(v0, t2, t0, t1, &call_runtime); in Generate()
6605 __ Branch(&allocated); in Generate()
6619 __ bind(&string_add_flat_result); in Generate()
6621 __ lw(t0, FieldMemOperand(a0, HeapObject::kMapOffset)); in Generate()
6622 __ lw(t1, FieldMemOperand(a1, HeapObject::kMapOffset)); in Generate()
6623 __ lbu(t0, FieldMemOperand(t0, Map::kInstanceTypeOffset)); in Generate()
6624 __ lbu(t1, FieldMemOperand(t1, Map::kInstanceTypeOffset)); in Generate()
6627 __ Xor(t3, t0, Operand(t1)); in Generate()
6628 __ And(t3, t3, Operand(kStringEncodingMask)); in Generate()
6629 __ Branch(&call_runtime, ne, t3, Operand(zero_reg)); in Generate()
6632 __ And(t4, t0, Operand(kStringRepresentationMask)); in Generate()
6636 __ Branch(&skip_first_add, ne, t4, Operand(zero_reg)); in Generate()
6637 __ Branch(USE_DELAY_SLOT, &first_prepared); in Generate()
6638 __ addiu(t3, a0, SeqAsciiString::kHeaderSize - kHeapObjectTag); in Generate()
6639 __ bind(&skip_first_add); in Generate()
6642 __ And(t4, t0, Operand(kShortExternalStringMask)); in Generate()
6643 __ Branch(&call_runtime, ne, t4, Operand(zero_reg)); in Generate()
6644 __ lw(t3, FieldMemOperand(a0, ExternalString::kResourceDataOffset)); in Generate()
6645 __ bind(&first_prepared); in Generate()
6648 __ And(t4, t1, Operand(kStringRepresentationMask)); in Generate()
6651 __ Branch(&skip_second_add, ne, t4, Operand(zero_reg)); in Generate()
6652 __ Branch(USE_DELAY_SLOT, &second_prepared); in Generate()
6653 __ addiu(a1, a1, SeqAsciiString::kHeaderSize - kHeapObjectTag); in Generate()
6654 __ bind(&skip_second_add); in Generate()
6657 __ And(t4, t1, Operand(kShortExternalStringMask)); in Generate()
6658 __ Branch(&call_runtime, ne, t4, Operand(zero_reg)); in Generate()
6659 __ lw(a1, FieldMemOperand(a1, ExternalString::kResourceDataOffset)); in Generate()
6660 __ bind(&second_prepared); in Generate()
6670 __ And(t4, t1, Operand(kStringEncodingMask)); in Generate()
6671 __ Branch(&non_ascii_string_add_flat_result, eq, t4, Operand(zero_reg)); in Generate()
6673 __ AllocateAsciiString(v0, t2, t0, t1, t5, &call_runtime); in Generate()
6674 __ Addu(t2, v0, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag)); in Generate()
6685 __ IncrementCounter(counters->string_add_native(), 1, a2, a3); in Generate()
6686 __ DropAndRet(2); in Generate()
6688 __ bind(&non_ascii_string_add_flat_result); in Generate()
6689 __ AllocateTwoByteString(v0, t2, t0, t1, t5, &call_runtime); in Generate()
6690 __ Addu(t2, v0, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag)); in Generate()
6701 __ IncrementCounter(counters->string_add_native(), 1, a2, a3); in Generate()
6702 __ DropAndRet(2); in Generate()
6705 __ bind(&call_runtime); in Generate()
6706 __ TailCallRuntime(Runtime::kStringAdd, 2, 1); in Generate()
6709 __ bind(&call_builtin); in Generate()
6710 __ InvokeBuiltin(builtin_id, JUMP_FUNCTION); in Generate()
6725 __ JumpIfSmi(arg, &not_string); in GenerateConvertArgument()
6726 __ GetObjectType(arg, scratch1, scratch1); in GenerateConvertArgument()
6727 __ Branch(&done, lt, scratch1, Operand(FIRST_NONSTRING_TYPE)); in GenerateConvertArgument()
6731 __ bind(&not_string); in GenerateConvertArgument()
6741 __ mov(arg, scratch1); in GenerateConvertArgument()
6742 __ sw(arg, MemOperand(sp, stack_offset)); in GenerateConvertArgument()
6743 __ jmp(&done); in GenerateConvertArgument()
6746 __ bind(&not_cached); in GenerateConvertArgument()
6747 __ JumpIfSmi(arg, slow); in GenerateConvertArgument()
6748 __ GetObjectType(arg, scratch1, scratch2); // map -> scratch1. in GenerateConvertArgument()
6749 __ Branch(slow, ne, scratch2, Operand(JS_VALUE_TYPE)); in GenerateConvertArgument()
6750 __ lbu(scratch2, FieldMemOperand(scratch1, Map::kBitField2Offset)); in GenerateConvertArgument()
6751 __ li(scratch4, 1 << Map::kStringWrapperSafeForDefaultValueOf); in GenerateConvertArgument()
6752 __ And(scratch2, scratch2, scratch4); in GenerateConvertArgument()
6753 __ Branch(slow, ne, scratch2, Operand(scratch4)); in GenerateConvertArgument()
6754 __ lw(arg, FieldMemOperand(arg, JSValue::kValueOffset)); in GenerateConvertArgument()
6755 __ sw(arg, MemOperand(sp, stack_offset)); in GenerateConvertArgument()
6757 __ bind(&done); in GenerateConvertArgument()
6764 __ Or(a2, a1, a0); in GenerateSmis()
6765 __ JumpIfNotSmi(a2, &miss); in GenerateSmis()
6769 __ Subu(v0, a0, a1); in GenerateSmis()
6772 __ SmiUntag(a1); in GenerateSmis()
6773 __ SmiUntag(a0); in GenerateSmis()
6774 __ Subu(v0, a1, a0); in GenerateSmis()
6776 __ Ret(); in GenerateSmis()
6778 __ bind(&miss); in GenerateSmis()
6789 __ And(a2, a1, Operand(a0)); in GenerateHeapNumbers()
6790 __ JumpIfSmi(a2, &generic_stub); in GenerateHeapNumbers()
6792 __ GetObjectType(a0, a2, a2); in GenerateHeapNumbers()
6793 __ Branch(&maybe_undefined1, ne, a2, Operand(HEAP_NUMBER_TYPE)); in GenerateHeapNumbers()
6794 __ GetObjectType(a1, a2, a2); in GenerateHeapNumbers()
6795 __ Branch(&maybe_undefined2, ne, a2, Operand(HEAP_NUMBER_TYPE)); in GenerateHeapNumbers()
6803 __ Subu(a2, a1, Operand(kHeapObjectTag)); in GenerateHeapNumbers()
6804 __ ldc1(f0, MemOperand(a2, HeapNumber::kValueOffset)); in GenerateHeapNumbers()
6805 __ Subu(a2, a0, Operand(kHeapObjectTag)); in GenerateHeapNumbers()
6806 __ ldc1(f2, MemOperand(a2, HeapNumber::kValueOffset)); in GenerateHeapNumbers()
6811 __ BranchF(&fpu_eq, &unordered, eq, f0, f2); in GenerateHeapNumbers()
6814 __ BranchF(&fpu_lt, NULL, lt, f0, f2); in GenerateHeapNumbers()
6817 __ li(v0, Operand(GREATER)); in GenerateHeapNumbers()
6818 __ Ret(); in GenerateHeapNumbers()
6820 __ bind(&fpu_eq); in GenerateHeapNumbers()
6821 __ li(v0, Operand(EQUAL)); in GenerateHeapNumbers()
6822 __ Ret(); in GenerateHeapNumbers()
6824 __ bind(&fpu_lt); in GenerateHeapNumbers()
6825 __ li(v0, Operand(LESS)); in GenerateHeapNumbers()
6826 __ Ret(); in GenerateHeapNumbers()
6829 __ bind(&unordered); in GenerateHeapNumbers()
6832 __ bind(&generic_stub); in GenerateHeapNumbers()
6833 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); in GenerateHeapNumbers()
6835 __ bind(&maybe_undefined1); in GenerateHeapNumbers()
6837 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); in GenerateHeapNumbers()
6838 __ Branch(&miss, ne, a0, Operand(at)); in GenerateHeapNumbers()
6839 __ GetObjectType(a1, a2, a2); in GenerateHeapNumbers()
6840 __ Branch(&maybe_undefined2, ne, a2, Operand(HEAP_NUMBER_TYPE)); in GenerateHeapNumbers()
6841 __ jmp(&unordered); in GenerateHeapNumbers()
6844 __ bind(&maybe_undefined2); in GenerateHeapNumbers()
6846 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); in GenerateHeapNumbers()
6847 __ Branch(&unordered, eq, a1, Operand(at)); in GenerateHeapNumbers()
6850 __ bind(&miss); in GenerateHeapNumbers()
6866 __ JumpIfEitherSmi(left, right, &miss); in GenerateSymbols()
6869 __ lw(tmp1, FieldMemOperand(left, HeapObject::kMapOffset)); in GenerateSymbols()
6870 __ lw(tmp2, FieldMemOperand(right, HeapObject::kMapOffset)); in GenerateSymbols()
6871 __ lbu(tmp1, FieldMemOperand(tmp1, Map::kInstanceTypeOffset)); in GenerateSymbols()
6872 __ lbu(tmp2, FieldMemOperand(tmp2, Map::kInstanceTypeOffset)); in GenerateSymbols()
6874 __ And(tmp1, tmp1, Operand(tmp2)); in GenerateSymbols()
6875 __ And(tmp1, tmp1, kIsSymbolMask); in GenerateSymbols()
6876 __ Branch(&miss, eq, tmp1, Operand(zero_reg)); in GenerateSymbols()
6882 __ mov(v0, right); in GenerateSymbols()
6884 __ Ret(ne, left, Operand(right)); in GenerateSymbols()
6885 __ li(v0, Operand(Smi::FromInt(EQUAL))); in GenerateSymbols()
6886 __ Ret(); in GenerateSymbols()
6888 __ bind(&miss); in GenerateSymbols()
6909 __ JumpIfEitherSmi(left, right, &miss); in GenerateStrings()
6913 __ lw(tmp1, FieldMemOperand(left, HeapObject::kMapOffset)); in GenerateStrings()
6914 __ lw(tmp2, FieldMemOperand(right, HeapObject::kMapOffset)); in GenerateStrings()
6915 __ lbu(tmp1, FieldMemOperand(tmp1, Map::kInstanceTypeOffset)); in GenerateStrings()
6916 __ lbu(tmp2, FieldMemOperand(tmp2, Map::kInstanceTypeOffset)); in GenerateStrings()
6918 __ Or(tmp3, tmp1, tmp2); in GenerateStrings()
6919 __ And(tmp5, tmp3, Operand(kIsNotStringMask)); in GenerateStrings()
6920 __ Branch(&miss, ne, tmp5, Operand(zero_reg)); in GenerateStrings()
6926 __ Branch(&left_ne_right, ne, left, Operand(right)); in GenerateStrings()
6927 __ Ret(USE_DELAY_SLOT); in GenerateStrings()
6928 __ mov(v0, zero_reg); // In the delay slot. in GenerateStrings()
6929 __ bind(&left_ne_right); in GenerateStrings()
6938 __ And(tmp3, tmp1, Operand(tmp2)); in GenerateStrings()
6939 __ And(tmp5, tmp3, Operand(kIsSymbolMask)); in GenerateStrings()
6941 __ Branch(&is_symbol, eq, tmp5, Operand(zero_reg)); in GenerateStrings()
6945 __ Ret(USE_DELAY_SLOT); in GenerateStrings()
6946 __ mov(v0, a0); // In the delay slot. in GenerateStrings()
6947 __ bind(&is_symbol); in GenerateStrings()
6952 __ JumpIfBothInstanceTypesAreNotSequentialAscii( in GenerateStrings()
6965 __ bind(&runtime); in GenerateStrings()
6966 __ Push(left, right); in GenerateStrings()
6968 __ TailCallRuntime(Runtime::kStringEquals, 2, 1); in GenerateStrings()
6970 __ TailCallRuntime(Runtime::kStringCompare, 2, 1); in GenerateStrings()
6973 __ bind(&miss); in GenerateStrings()
6981 __ And(a2, a1, Operand(a0)); in GenerateObjects()
6982 __ JumpIfSmi(a2, &miss); in GenerateObjects()
6984 __ GetObjectType(a0, a2, a2); in GenerateObjects()
6985 __ Branch(&miss, ne, a2, Operand(JS_OBJECT_TYPE)); in GenerateObjects()
6986 __ GetObjectType(a1, a2, a2); in GenerateObjects()
6987 __ Branch(&miss, ne, a2, Operand(JS_OBJECT_TYPE)); in GenerateObjects()
6990 __ Ret(USE_DELAY_SLOT); in GenerateObjects()
6991 __ subu(v0, a0, a1); in GenerateObjects()
6993 __ bind(&miss); in GenerateObjects()
7000 __ And(a2, a1, a0); in GenerateKnownObjects()
7001 __ JumpIfSmi(a2, &miss); in GenerateKnownObjects()
7002 __ lw(a2, FieldMemOperand(a0, HeapObject::kMapOffset)); in GenerateKnownObjects()
7003 __ lw(a3, FieldMemOperand(a1, HeapObject::kMapOffset)); in GenerateKnownObjects()
7004 __ Branch(&miss, ne, a2, Operand(known_map_)); in GenerateKnownObjects()
7005 __ Branch(&miss, ne, a3, Operand(known_map_)); in GenerateKnownObjects()
7007 __ Ret(USE_DELAY_SLOT); in GenerateKnownObjects()
7008 __ subu(v0, a0, a1); in GenerateKnownObjects()
7010 __ bind(&miss); in GenerateKnownObjects()
7020 __ Push(a1, a0); in GenerateMiss()
7021 __ push(ra); in GenerateMiss()
7022 __ Push(a1, a0); in GenerateMiss()
7023 __ li(t0, Operand(Smi::FromInt(op_))); in GenerateMiss()
7024 __ addiu(sp, sp, -kPointerSize); in GenerateMiss()
7025 __ CallExternalReference(miss, 3, USE_DELAY_SLOT); in GenerateMiss()
7026 __ sw(t0, MemOperand(sp)); // In the delay slot. in GenerateMiss()
7028 __ Addu(a2, v0, Operand(Code::kHeaderSize - kHeapObjectTag)); in GenerateMiss()
7030 __ Pop(a1, a0, ra); in GenerateMiss()
7032 __ Jump(a2); in GenerateMiss()
7040 __ lw(t9, MemOperand(sp, kCArgsSlotsSize)); in Generate()
7046 __ lw(t0, MemOperand(t9)); in Generate()
7047 __ Assert(ne, "Received invalid return address.", t0, in Generate()
7050 __ Jump(t9); in Generate()
7056 __ li(t9, Operand(function)); in GenerateCall()
7063 __ Move(t9, target); in GenerateCall()
7064 __ AssertStackIsAligned(); in GenerateCall()
7066 __ Subu(sp, sp, kCArgsSlotsSize); in GenerateCall()
7111 __ lw(index, FieldMemOperand(properties, kCapacityOffset)); in GenerateNegativeLookup()
7112 __ Subu(index, index, Operand(1)); in GenerateNegativeLookup()
7113 __ And(index, index, Operand( in GenerateNegativeLookup()
7118 __ sll(at, index, 1); in GenerateNegativeLookup()
7119 __ Addu(index, index, at); in GenerateNegativeLookup()
7125 __ sll(scratch0, index, 1); in GenerateNegativeLookup()
7126 __ Addu(tmp, properties, scratch0); in GenerateNegativeLookup()
7127 __ lw(entity_name, FieldMemOperand(tmp, kElementsStartOffset)); in GenerateNegativeLookup()
7130 __ LoadRoot(tmp, Heap::kUndefinedValueRootIndex); in GenerateNegativeLookup()
7131 __ Branch(done, eq, entity_name, Operand(tmp)); in GenerateNegativeLookup()
7135 __ LoadRoot(tmp, Heap::kTheHoleValueRootIndex); in GenerateNegativeLookup()
7138 __ Branch(miss, eq, entity_name, Operand(Handle<String>(name))); in GenerateNegativeLookup()
7141 __ Branch(&the_hole, eq, entity_name, Operand(tmp)); in GenerateNegativeLookup()
7144 __ lw(entity_name, FieldMemOperand(entity_name, HeapObject::kMapOffset)); in GenerateNegativeLookup()
7145 __ lbu(entity_name, in GenerateNegativeLookup()
7147 __ And(scratch0, entity_name, Operand(kIsSymbolMask)); in GenerateNegativeLookup()
7148 __ Branch(miss, eq, scratch0, Operand(zero_reg)); in GenerateNegativeLookup()
7150 __ bind(&the_hole); in GenerateNegativeLookup()
7153 __ lw(properties, in GenerateNegativeLookup()
7162 __ MultiPush(spill_mask); in GenerateNegativeLookup()
7163 __ lw(a0, FieldMemOperand(receiver, JSObject::kPropertiesOffset)); in GenerateNegativeLookup()
7164 __ li(a1, Operand(Handle<String>(name))); in GenerateNegativeLookup()
7166 __ CallStub(&stub); in GenerateNegativeLookup()
7167 __ mov(at, v0); in GenerateNegativeLookup()
7168 __ MultiPop(spill_mask); in GenerateNegativeLookup()
7170 __ Branch(done, eq, at, Operand(zero_reg)); in GenerateNegativeLookup()
7171 __ Branch(miss, ne, at, Operand(zero_reg)); in GenerateNegativeLookup()
7192 if (FLAG_debug_code) __ AbortIfNotString(name); in GeneratePositiveLookup()
7195 __ lw(scratch1, FieldMemOperand(elements, kCapacityOffset)); in GeneratePositiveLookup()
7196 __ sra(scratch1, scratch1, kSmiTagSize); // convert smi to int in GeneratePositiveLookup()
7197 __ Subu(scratch1, scratch1, Operand(1)); in GeneratePositiveLookup()
7204 __ lw(scratch2, FieldMemOperand(name, String::kHashFieldOffset)); in GeneratePositiveLookup()
7211 __ Addu(scratch2, scratch2, Operand( in GeneratePositiveLookup()
7214 __ srl(scratch2, scratch2, String::kHashShift); in GeneratePositiveLookup()
7215 __ And(scratch2, scratch1, scratch2); in GeneratePositiveLookup()
7221 __ sll(at, scratch2, 1); in GeneratePositiveLookup()
7222 __ Addu(scratch2, scratch2, at); in GeneratePositiveLookup()
7225 __ sll(at, scratch2, 2); in GeneratePositiveLookup()
7226 __ Addu(scratch2, elements, at); in GeneratePositiveLookup()
7227 __ lw(at, FieldMemOperand(scratch2, kElementsStartOffset)); in GeneratePositiveLookup()
7228 __ Branch(done, eq, name, Operand(at)); in GeneratePositiveLookup()
7236 __ MultiPush(spill_mask); in GeneratePositiveLookup()
7239 __ Move(a1, name); in GeneratePositiveLookup()
7240 __ Move(a0, elements); in GeneratePositiveLookup()
7242 __ Move(a0, elements); in GeneratePositiveLookup()
7243 __ Move(a1, name); in GeneratePositiveLookup()
7246 __ CallStub(&stub); in GeneratePositiveLookup()
7247 __ mov(scratch2, a2); in GeneratePositiveLookup()
7248 __ mov(at, v0); in GeneratePositiveLookup()
7249 __ MultiPop(spill_mask); in GeneratePositiveLookup()
7251 __ Branch(done, ne, at, Operand(zero_reg)); in GeneratePositiveLookup()
7252 __ Branch(miss, eq, at, Operand(zero_reg)); in GeneratePositiveLookup()
7279 __ lw(mask, FieldMemOperand(dictionary, kCapacityOffset)); in Generate()
7280 __ sra(mask, mask, kSmiTagSize); in Generate()
7281 __ Subu(mask, mask, Operand(1)); in Generate()
7283 __ lw(hash, FieldMemOperand(key, String::kHashFieldOffset)); in Generate()
7285 __ LoadRoot(undefined, Heap::kUndefinedValueRootIndex); in Generate()
7296 __ Addu(index, hash, Operand( in Generate()
7299 __ mov(index, hash); in Generate()
7301 __ srl(index, index, String::kHashShift); in Generate()
7302 __ And(index, mask, index); in Generate()
7307 __ mov(at, index); in Generate()
7308 __ sll(index, index, 1); in Generate()
7309 __ Addu(index, index, at); in Generate()
7313 __ sll(index, index, 2); in Generate()
7314 __ Addu(index, index, dictionary); in Generate()
7315 __ lw(entry_key, FieldMemOperand(index, kElementsStartOffset)); in Generate()
7318 __ Branch(&not_in_dictionary, eq, entry_key, Operand(undefined)); in Generate()
7321 __ Branch(&in_dictionary, eq, entry_key, Operand(key)); in Generate()
7325 __ lw(entry_key, FieldMemOperand(entry_key, HeapObject::kMapOffset)); in Generate()
7326 __ lbu(entry_key, in Generate()
7328 __ And(result, entry_key, Operand(kIsSymbolMask)); in Generate()
7329 __ Branch(&maybe_in_dictionary, eq, result, Operand(zero_reg)); in Generate()
7333 __ bind(&maybe_in_dictionary); in Generate()
7338 __ Ret(USE_DELAY_SLOT); in Generate()
7339 __ mov(result, zero_reg); in Generate()
7342 __ bind(&in_dictionary); in Generate()
7343 __ Ret(USE_DELAY_SLOT); in Generate()
7344 __ li(result, 1); in Generate()
7346 __ bind(&not_in_dictionary); in Generate()
7347 __ Ret(USE_DELAY_SLOT); in Generate()
7348 __ mov(result, zero_reg); in Generate()
7450 __ beq(zero_reg, zero_reg, &skip_to_incremental_noncompacting); in Generate()
7451 __ nop(); in Generate()
7452 __ beq(zero_reg, zero_reg, &skip_to_incremental_compacting); in Generate()
7453 __ nop(); in Generate()
7456 __ RememberedSetHelper(object_, in Generate()
7462 __ Ret(); in Generate()
7464 __ bind(&skip_to_incremental_noncompacting); in Generate()
7467 __ bind(&skip_to_incremental_compacting); in Generate()
7484 __ lw(regs_.scratch0(), MemOperand(regs_.address(), 0)); in GenerateIncremental()
7485 __ JumpIfNotInNewSpace(regs_.scratch0(), // Value. in GenerateIncremental()
7489 __ CheckPageFlag(regs_.object(), in GenerateIncremental()
7501 __ RememberedSetHelper(object_, in GenerateIncremental()
7507 __ bind(&dont_need_remembered_set); in GenerateIncremental()
7514 __ Ret(); in GenerateIncremental()
7521 __ PrepareCallCFunction(argument_count, regs_.scratch0()); in InformIncrementalMarker()
7526 __ Move(address, regs_.address()); in InformIncrementalMarker()
7527 __ Move(a0, regs_.object()); in InformIncrementalMarker()
7529 __ Move(a1, address); in InformIncrementalMarker()
7532 __ lw(a1, MemOperand(address, 0)); in InformIncrementalMarker()
7534 __ li(a2, Operand(ExternalReference::isolate_address())); in InformIncrementalMarker()
7538 __ CallCFunction( in InformIncrementalMarker()
7544 __ CallCFunction( in InformIncrementalMarker()
7563 __ JumpIfBlack(regs_.object(), regs_.scratch0(), regs_.scratch1(), &on_black); in CheckNeedsToInformIncrementalMarker()
7567 __ RememberedSetHelper(object_, in CheckNeedsToInformIncrementalMarker()
7573 __ Ret(); in CheckNeedsToInformIncrementalMarker()
7576 __ bind(&on_black); in CheckNeedsToInformIncrementalMarker()
7579 __ lw(regs_.scratch0(), MemOperand(regs_.address(), 0)); in CheckNeedsToInformIncrementalMarker()
7584 __ CheckPageFlag(regs_.scratch0(), // Contains value. in CheckNeedsToInformIncrementalMarker()
7590 __ CheckPageFlag(regs_.object(), in CheckNeedsToInformIncrementalMarker()
7596 __ bind(&ensure_not_white); in CheckNeedsToInformIncrementalMarker()
7601 __ Push(regs_.object(), regs_.address()); in CheckNeedsToInformIncrementalMarker()
7602 __ EnsureNotWhite(regs_.scratch0(), // The value. in CheckNeedsToInformIncrementalMarker()
7607 __ Pop(regs_.object(), regs_.address()); in CheckNeedsToInformIncrementalMarker()
7611 __ RememberedSetHelper(object_, in CheckNeedsToInformIncrementalMarker()
7617 __ Ret(); in CheckNeedsToInformIncrementalMarker()
7620 __ bind(&need_incremental_pop_scratch); in CheckNeedsToInformIncrementalMarker()
7621 __ Pop(regs_.object(), regs_.address()); in CheckNeedsToInformIncrementalMarker()
7623 __ bind(&need_incremental); in CheckNeedsToInformIncrementalMarker()
7644 __ CheckFastElements(a2, t1, &double_elements); in Generate()
7646 __ JumpIfSmi(a0, &smi_element); in Generate()
7647 __ CheckFastSmiOnlyElements(a2, t1, &fast_elements); in Generate()
7651 __ bind(&slow_elements); in Generate()
7653 __ Push(a1, a3, a0); in Generate()
7654 __ lw(t1, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); in Generate()
7655 __ lw(t1, FieldMemOperand(t1, JSFunction::kLiteralsOffset)); in Generate()
7656 __ Push(t1, t0); in Generate()
7657 __ TailCallRuntime(Runtime::kStoreArrayLiteralElement, 5, 1); in Generate()
7660 __ bind(&fast_elements); in Generate()
7661 __ lw(t1, FieldMemOperand(a1, JSObject::kElementsOffset)); in Generate()
7662 __ sll(t2, a3, kPointerSizeLog2 - kSmiTagSize); in Generate()
7663 __ Addu(t2, t1, t2); in Generate()
7664 __ Addu(t2, t2, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); in Generate()
7665 __ sw(a0, MemOperand(t2, 0)); in Generate()
7667 __ RecordWrite(t1, t2, a0, kRAHasNotBeenSaved, kDontSaveFPRegs, in Generate()
7669 __ Ret(USE_DELAY_SLOT); in Generate()
7670 __ mov(v0, a0); in Generate()
7674 __ bind(&smi_element); in Generate()
7675 __ lw(t1, FieldMemOperand(a1, JSObject::kElementsOffset)); in Generate()
7676 __ sll(t2, a3, kPointerSizeLog2 - kSmiTagSize); in Generate()
7677 __ Addu(t2, t1, t2); in Generate()
7678 __ sw(a0, FieldMemOperand(t2, FixedArray::kHeaderSize)); in Generate()
7679 __ Ret(USE_DELAY_SLOT); in Generate()
7680 __ mov(v0, a0); in Generate()
7683 __ bind(&double_elements); in Generate()
7684 __ lw(t1, FieldMemOperand(a1, JSObject::kElementsOffset)); in Generate()
7685 __ StoreNumberToDoubleElements(a0, a3, a1, t1, t2, t3, t5, a2, in Generate()
7687 __ Ret(USE_DELAY_SLOT); in Generate()
7688 __ mov(v0, a0); in Generate()
7692 #undef __