• Home
  • Raw
  • Download

Lines Matching full:__

23 #define __ ACCESS_MASM(masm)  macro
26 __ dsll(t9, a0, kPointerSizeLog2); in Generate()
27 __ Daddu(t9, sp, t9); in Generate()
28 __ sd(a1, MemOperand(t9, 0)); in Generate()
29 __ Push(a1); in Generate()
30 __ Push(a2); in Generate()
31 __ Daddu(a0, a0, 3); in Generate()
32 __ TailCallRuntime(Runtime::kNewArray); in Generate()
61 __ Dsubu(sp, sp, Operand(param_count * kPointerSize)); in GenerateLightweightMiss()
64 __ sd(descriptor.GetRegisterParameter(i), in GenerateLightweightMiss()
67 __ CallExternalReference(miss, param_count); in GenerateLightweightMiss()
70 __ Ret(); in GenerateLightweightMiss()
91 __ Push(scratch, scratch2, scratch3); in Generate()
94 __ ldc1(double_scratch, MemOperand(input_reg, double_offset)); in Generate()
97 __ cfc1(scratch2, FCSR); in Generate()
98 __ ctc1(zero_reg, FCSR); in Generate()
101 __ Trunc_w_d(double_scratch, double_scratch); in Generate()
103 __ mfc1(scratch3, double_scratch); in Generate()
106 __ cfc1(scratch, FCSR); in Generate()
107 __ ctc1(scratch2, FCSR); in Generate()
110 __ And( in Generate()
116 __ Branch(&error, ne, scratch, Operand(zero_reg)); in Generate()
117 __ Move(result_reg, scratch3); in Generate()
118 __ Branch(&done); in Generate()
119 __ bind(&error); in Generate()
126 __ lw(input_low, in Generate()
128 __ lw(input_high, in Generate()
133 __ Ext(result_reg, in Generate()
139 __ Subu(scratch, result_reg, HeapNumber::kExponentMask); in Generate()
140 __ Movz(result_reg, zero_reg, scratch); in Generate()
141 __ Branch(&done, eq, scratch, Operand(zero_reg)); in Generate()
144 __ Subu(result_reg, in Generate()
150 __ Branch(&normal_exponent, le, result_reg, Operand(zero_reg)); in Generate()
151 __ mov(result_reg, zero_reg); in Generate()
152 __ Branch(&done); in Generate()
154 __ bind(&normal_exponent); in Generate()
157 __ Addu(scratch, result_reg, Operand(kShiftBase + HeapNumber::kMantissaBits)); in Generate()
162 __ And(sign, input_high, Operand(HeapNumber::kSignMask)); in Generate()
167 __ Branch(&high_shift_needed, lt, scratch, Operand(32)); in Generate()
168 __ mov(input_high, zero_reg); in Generate()
169 __ Branch(&high_shift_done); in Generate()
170 __ bind(&high_shift_needed); in Generate()
173 __ Or(input_high, in Generate()
179 __ sllv(input_high, input_high, scratch); in Generate()
181 __ bind(&high_shift_done); in Generate()
185 __ li(at, 32); in Generate()
186 __ subu(scratch, at, scratch); in Generate()
187 __ Branch(&pos_shift, ge, scratch, Operand(zero_reg)); in Generate()
190 __ Subu(scratch, zero_reg, scratch); in Generate()
191 __ sllv(input_low, input_low, scratch); in Generate()
192 __ Branch(&shift_done); in Generate()
194 __ bind(&pos_shift); in Generate()
195 __ srlv(input_low, input_low, scratch); in Generate()
197 __ bind(&shift_done); in Generate()
198 __ Or(input_high, input_high, Operand(input_low)); in Generate()
200 __ mov(scratch, sign); in Generate()
203 __ Subu(result_reg, zero_reg, input_high); in Generate()
204 __ Movz(result_reg, input_high, scratch); in Generate()
206 __ bind(&done); in Generate()
208 __ Pop(scratch, scratch2, scratch3); in Generate()
209 __ Ret(); in Generate()
222 __ Branch(&not_identical, ne, a0, Operand(a1)); in EmitIdenticalObjectComparison()
224 __ li(exp_mask_reg, Operand(HeapNumber::kExponentMask)); in EmitIdenticalObjectComparison()
230 __ GetObjectType(a0, t0, t0); in EmitIdenticalObjectComparison()
233 __ Branch(slow, greater, t0, Operand(FIRST_JS_RECEIVER_TYPE)); in EmitIdenticalObjectComparison()
235 __ Branch(slow, eq, t0, Operand(SYMBOL_TYPE)); in EmitIdenticalObjectComparison()
237 __ Branch(&heap_number, eq, t0, Operand(HEAP_NUMBER_TYPE)); in EmitIdenticalObjectComparison()
240 __ Branch(slow, greater, t0, Operand(FIRST_JS_RECEIVER_TYPE)); in EmitIdenticalObjectComparison()
242 __ Branch(slow, eq, t0, Operand(SYMBOL_TYPE)); in EmitIdenticalObjectComparison()
247 __ Branch(&return_equal, ne, t0, Operand(ODDBALL_TYPE)); in EmitIdenticalObjectComparison()
248 __ LoadRoot(a6, Heap::kUndefinedValueRootIndex); in EmitIdenticalObjectComparison()
249 __ Branch(&return_equal, ne, a0, Operand(a6)); in EmitIdenticalObjectComparison()
251 __ Ret(USE_DELAY_SLOT); in EmitIdenticalObjectComparison()
254 __ li(v0, Operand(GREATER)); in EmitIdenticalObjectComparison()
257 __ li(v0, Operand(LESS)); in EmitIdenticalObjectComparison()
263 __ bind(&return_equal); in EmitIdenticalObjectComparison()
265 __ Ret(USE_DELAY_SLOT); in EmitIdenticalObjectComparison()
267 __ li(v0, Operand(GREATER)); // Things aren't less than themselves. in EmitIdenticalObjectComparison()
269 __ li(v0, Operand(LESS)); // Things aren't greater than themselves. in EmitIdenticalObjectComparison()
271 __ mov(v0, zero_reg); // Things are <=, >=, ==, === themselves. in EmitIdenticalObjectComparison()
277 __ bind(&heap_number); in EmitIdenticalObjectComparison()
284 __ lwu(a6, FieldMemOperand(a0, HeapNumber::kExponentOffset)); in EmitIdenticalObjectComparison()
286 __ And(a7, a6, Operand(exp_mask_reg)); in EmitIdenticalObjectComparison()
288 __ Branch(&return_equal, ne, a7, Operand(exp_mask_reg)); in EmitIdenticalObjectComparison()
291 __ sll(a6, a6, HeapNumber::kNonMantissaBitsInTopWord); in EmitIdenticalObjectComparison()
293 __ lwu(a7, FieldMemOperand(a0, HeapNumber::kMantissaOffset)); in EmitIdenticalObjectComparison()
294 __ Or(v0, a7, Operand(a6)); in EmitIdenticalObjectComparison()
301 __ Ret(eq, v0, Operand(zero_reg)); in EmitIdenticalObjectComparison()
303 __ Ret(USE_DELAY_SLOT); in EmitIdenticalObjectComparison()
305 __ li(v0, Operand(GREATER)); // NaN <= NaN should fail. in EmitIdenticalObjectComparison()
307 __ li(v0, Operand(LESS)); // NaN >= NaN should fail. in EmitIdenticalObjectComparison()
313 __ bind(&not_identical); in EmitIdenticalObjectComparison()
327 __ JumpIfSmi(lhs, &lhs_is_smi); in EmitSmiNonsmiComparison()
330 __ GetObjectType(lhs, t0, t0); in EmitSmiNonsmiComparison()
334 __ Ret(USE_DELAY_SLOT, ne, t0, Operand(HEAP_NUMBER_TYPE)); in EmitSmiNonsmiComparison()
335 __ mov(v0, lhs); in EmitSmiNonsmiComparison()
339 __ Branch(slow, ne, t0, Operand(HEAP_NUMBER_TYPE)); in EmitSmiNonsmiComparison()
343 __ SmiUntag(at, rhs); in EmitSmiNonsmiComparison()
344 __ mtc1(at, f14); in EmitSmiNonsmiComparison()
345 __ cvt_d_w(f14, f14); in EmitSmiNonsmiComparison()
346 __ ldc1(f12, FieldMemOperand(lhs, HeapNumber::kValueOffset)); in EmitSmiNonsmiComparison()
349 __ jmp(both_loaded_as_doubles); in EmitSmiNonsmiComparison()
351 __ bind(&lhs_is_smi); in EmitSmiNonsmiComparison()
353 __ GetObjectType(rhs, t0, t0); in EmitSmiNonsmiComparison()
357 __ Ret(USE_DELAY_SLOT, ne, t0, Operand(HEAP_NUMBER_TYPE)); in EmitSmiNonsmiComparison()
358 __ li(v0, Operand(1)); in EmitSmiNonsmiComparison()
362 __ Branch(slow, ne, t0, Operand(HEAP_NUMBER_TYPE)); in EmitSmiNonsmiComparison()
367 __ SmiUntag(at, lhs); in EmitSmiNonsmiComparison()
368 __ mtc1(at, f12); in EmitSmiNonsmiComparison()
369 __ cvt_d_w(f12, f12); in EmitSmiNonsmiComparison()
370 __ ldc1(f14, FieldMemOperand(rhs, HeapNumber::kValueOffset)); in EmitSmiNonsmiComparison()
385 __ GetObjectType(lhs, a2, a2); in EmitStrictTwoHeapObjectCompare()
386 __ Branch(&first_non_object, less, a2, Operand(FIRST_JS_RECEIVER_TYPE)); in EmitStrictTwoHeapObjectCompare()
390 __ bind(&return_not_equal); in EmitStrictTwoHeapObjectCompare()
391 __ Ret(USE_DELAY_SLOT); in EmitStrictTwoHeapObjectCompare()
392 __ li(v0, Operand(1)); in EmitStrictTwoHeapObjectCompare()
394 __ bind(&first_non_object); in EmitStrictTwoHeapObjectCompare()
396 __ Branch(&return_not_equal, eq, a2, Operand(ODDBALL_TYPE)); in EmitStrictTwoHeapObjectCompare()
398 __ GetObjectType(rhs, a3, a3); in EmitStrictTwoHeapObjectCompare()
399 __ Branch(&return_not_equal, greater, a3, Operand(FIRST_JS_RECEIVER_TYPE)); in EmitStrictTwoHeapObjectCompare()
402 __ Branch(&return_not_equal, eq, a3, Operand(ODDBALL_TYPE)); in EmitStrictTwoHeapObjectCompare()
407 __ Or(a2, a2, Operand(a3)); in EmitStrictTwoHeapObjectCompare()
408 __ And(at, a2, Operand(kIsNotStringMask | kIsNotInternalizedMask)); in EmitStrictTwoHeapObjectCompare()
409 __ Branch(&return_not_equal, eq, at, Operand(zero_reg)); in EmitStrictTwoHeapObjectCompare()
419 __ GetObjectType(lhs, a3, a2); in EmitCheckForTwoHeapNumbers()
420 __ Branch(not_heap_numbers, ne, a2, Operand(HEAP_NUMBER_TYPE)); in EmitCheckForTwoHeapNumbers()
421 __ ld(a2, FieldMemOperand(rhs, HeapObject::kMapOffset)); in EmitCheckForTwoHeapNumbers()
423 __ Branch(slow, ne, a3, Operand(a2)); in EmitCheckForTwoHeapNumbers()
427 __ ldc1(f12, FieldMemOperand(lhs, HeapNumber::kValueOffset)); in EmitCheckForTwoHeapNumbers()
428 __ ldc1(f14, FieldMemOperand(rhs, HeapNumber::kValueOffset)); in EmitCheckForTwoHeapNumbers()
430 __ jmp(both_loaded_as_doubles); in EmitCheckForTwoHeapNumbers()
445 __ And(at, a2, Operand(kIsNotStringMask)); in EmitCheckForInternalizedStringsOrObjects()
446 __ Branch(&object_test, ne, at, Operand(zero_reg)); in EmitCheckForInternalizedStringsOrObjects()
447 __ And(at, a2, Operand(kIsNotInternalizedMask)); in EmitCheckForInternalizedStringsOrObjects()
448 __ Branch(possible_strings, ne, at, Operand(zero_reg)); in EmitCheckForInternalizedStringsOrObjects()
449 __ GetObjectType(rhs, a3, a3); in EmitCheckForInternalizedStringsOrObjects()
450 __ Branch(runtime_call, ge, a3, Operand(FIRST_NONSTRING_TYPE)); in EmitCheckForInternalizedStringsOrObjects()
451 __ And(at, a3, Operand(kIsNotInternalizedMask)); in EmitCheckForInternalizedStringsOrObjects()
452 __ Branch(possible_strings, ne, at, Operand(zero_reg)); in EmitCheckForInternalizedStringsOrObjects()
457 __ Ret(USE_DELAY_SLOT); in EmitCheckForInternalizedStringsOrObjects()
458 __ mov(v0, a0); // In delay slot. in EmitCheckForInternalizedStringsOrObjects()
460 __ bind(&object_test); in EmitCheckForInternalizedStringsOrObjects()
461 __ ld(a2, FieldMemOperand(lhs, HeapObject::kMapOffset)); in EmitCheckForInternalizedStringsOrObjects()
462 __ ld(a3, FieldMemOperand(rhs, HeapObject::kMapOffset)); in EmitCheckForInternalizedStringsOrObjects()
463 __ lbu(t0, FieldMemOperand(a2, Map::kBitFieldOffset)); in EmitCheckForInternalizedStringsOrObjects()
464 __ lbu(t1, FieldMemOperand(a3, Map::kBitFieldOffset)); in EmitCheckForInternalizedStringsOrObjects()
465 __ And(at, t0, Operand(1 << Map::kIsUndetectable)); in EmitCheckForInternalizedStringsOrObjects()
466 __ Branch(&undetectable, ne, at, Operand(zero_reg)); in EmitCheckForInternalizedStringsOrObjects()
467 __ And(at, t1, Operand(1 << Map::kIsUndetectable)); in EmitCheckForInternalizedStringsOrObjects()
468 __ Branch(&return_unequal, ne, at, Operand(zero_reg)); in EmitCheckForInternalizedStringsOrObjects()
470 __ GetInstanceType(a2, a2); in EmitCheckForInternalizedStringsOrObjects()
471 __ Branch(runtime_call, lt, a2, Operand(FIRST_JS_RECEIVER_TYPE)); in EmitCheckForInternalizedStringsOrObjects()
472 __ GetInstanceType(a3, a3); in EmitCheckForInternalizedStringsOrObjects()
473 __ Branch(runtime_call, lt, a3, Operand(FIRST_JS_RECEIVER_TYPE)); in EmitCheckForInternalizedStringsOrObjects()
475 __ bind(&return_unequal); in EmitCheckForInternalizedStringsOrObjects()
477 __ Ret(USE_DELAY_SLOT); in EmitCheckForInternalizedStringsOrObjects()
478 __ mov(v0, a0); // In delay slot. in EmitCheckForInternalizedStringsOrObjects()
480 __ bind(&undetectable); in EmitCheckForInternalizedStringsOrObjects()
481 __ And(at, t1, Operand(1 << Map::kIsUndetectable)); in EmitCheckForInternalizedStringsOrObjects()
482 __ Branch(&return_unequal, eq, at, Operand(zero_reg)); in EmitCheckForInternalizedStringsOrObjects()
487 __ GetInstanceType(a2, a2); in EmitCheckForInternalizedStringsOrObjects()
488 __ Branch(&return_equal, eq, a2, Operand(ODDBALL_TYPE)); in EmitCheckForInternalizedStringsOrObjects()
489 __ GetInstanceType(a3, a3); in EmitCheckForInternalizedStringsOrObjects()
490 __ Branch(&return_unequal, ne, a3, Operand(ODDBALL_TYPE)); in EmitCheckForInternalizedStringsOrObjects()
492 __ bind(&return_equal); in EmitCheckForInternalizedStringsOrObjects()
493 __ Ret(USE_DELAY_SLOT); in EmitCheckForInternalizedStringsOrObjects()
494 __ li(v0, Operand(EQUAL)); // In delay slot. in EmitCheckForInternalizedStringsOrObjects()
504 __ JumpIfNotSmi(input, fail); in CompareICStub_CheckInputType()
506 __ JumpIfSmi(input, &ok); in CompareICStub_CheckInputType()
507 __ CheckMap(input, scratch, Heap::kHeapNumberMapRootIndex, fail, in CompareICStub_CheckInputType()
512 __ bind(&ok); in CompareICStub_CheckInputType()
532 __ Or(a2, a1, a0); in GenerateGeneric()
533 __ JumpIfNotSmi(a2, &not_two_smis); in GenerateGeneric()
534 __ SmiUntag(a1); in GenerateGeneric()
535 __ SmiUntag(a0); in GenerateGeneric()
537 __ Ret(USE_DELAY_SLOT); in GenerateGeneric()
538 __ dsubu(v0, a1, a0); in GenerateGeneric()
539 __ bind(&not_two_smis); in GenerateGeneric()
552 __ And(a6, lhs, Operand(rhs)); in GenerateGeneric()
553 __ JumpIfNotSmi(a6, &not_smis, a4); in GenerateGeneric()
565 __ bind(&both_loaded_as_doubles); in GenerateGeneric()
571 __ li(a4, Operand(LESS)); in GenerateGeneric()
572 __ li(a5, Operand(GREATER)); in GenerateGeneric()
573 __ li(a6, Operand(EQUAL)); in GenerateGeneric()
576 __ BranchF(NULL, &nan, eq, f12, f14); in GenerateGeneric()
581 __ c(OLT, D, f12, f14); in GenerateGeneric()
582 __ Movt(v0, a4); in GenerateGeneric()
586 __ Movf(v0, a5); in GenerateGeneric()
589 __ c(EQ, D, f12, f14); in GenerateGeneric()
590 __ Movt(v0, a6); in GenerateGeneric()
593 __ BranchF(USE_DELAY_SLOT, &skip, NULL, lt, f12, f14); in GenerateGeneric()
594 __ mov(v0, a4); // Return LESS as result. in GenerateGeneric()
596 __ BranchF(USE_DELAY_SLOT, &skip, NULL, eq, f12, f14); in GenerateGeneric()
597 __ mov(v0, a6); // Return EQUAL as result. in GenerateGeneric()
599 __ mov(v0, a5); // Return GREATER as result. in GenerateGeneric()
600 __ bind(&skip); in GenerateGeneric()
602 __ Ret(); in GenerateGeneric()
604 __ bind(&nan); in GenerateGeneric()
608 __ Ret(USE_DELAY_SLOT); in GenerateGeneric()
610 __ li(v0, Operand(GREATER)); in GenerateGeneric()
612 __ li(v0, Operand(LESS)); in GenerateGeneric()
616 __ bind(&not_smis); in GenerateGeneric()
639 __ bind(&check_for_internalized_strings); in GenerateGeneric()
651 __ bind(&flat_string_check); in GenerateGeneric()
653 __ JumpIfNonSmisNotBothSequentialOneByteStrings(lhs, rhs, a2, a3, &slow); in GenerateGeneric()
655 __ IncrementCounter(isolate()->counters()->string_compare_native(), 1, a2, in GenerateGeneric()
665 __ bind(&slow); in GenerateGeneric()
669 __ Push(cp); in GenerateGeneric()
670 __ Call(strict() ? isolate()->builtins()->StrictEqual() in GenerateGeneric()
673 __ Pop(cp); in GenerateGeneric()
677 __ LoadRoot(a0, Heap::kTrueValueRootIndex); in GenerateGeneric()
678 __ Ret(USE_DELAY_SLOT); in GenerateGeneric()
679 __ subu(v0, v0, a0); // In delay slot. in GenerateGeneric()
683 __ Push(lhs, rhs); in GenerateGeneric()
691 __ li(a0, Operand(Smi::FromInt(ncr))); in GenerateGeneric()
692 __ push(a0); in GenerateGeneric()
696 __ TailCallRuntime(Runtime::kCompare); in GenerateGeneric()
699 __ bind(&miss); in GenerateGeneric()
705 __ mov(t9, ra); in Generate()
706 __ pop(ra); in Generate()
707 __ PushSafepointRegisters(); in Generate()
708 __ Jump(t9); in Generate()
713 __ mov(t9, ra); in Generate()
714 __ pop(ra); in Generate()
715 __ PopSafepointRegisters(); in Generate()
716 __ Jump(t9); in Generate()
724 __ MultiPush(kJSCallerSaved | ra.bit()); in Generate()
726 __ MultiPushFPU(kCallerSavedFPU); in Generate()
733 __ PrepareCallCFunction(argument_count, fp_argument_count, scratch); in Generate()
734 __ li(a0, Operand(ExternalReference::isolate_address(isolate()))); in Generate()
735 __ CallCFunction( in Generate()
739 __ MultiPopFPU(kCallerSavedFPU); in Generate()
742 __ MultiPop(kJSCallerSaved | ra.bit()); in Generate()
743 __ Ret(); in Generate()
761 __ UntagAndJumpIfSmi(scratch, exponent, &int_exponent); in Generate()
763 __ ldc1(double_exponent, in Generate()
770 __ EmitFPUTruncate(kRoundToMinusInf, in Generate()
778 __ Branch(&int_exponent_convert, eq, scratch2, Operand(zero_reg)); in Generate()
780 __ push(ra); in Generate()
783 __ PrepareCallCFunction(0, 2, scratch2); in Generate()
784 __ MovToFloatParameters(double_base, double_exponent); in Generate()
785 __ CallCFunction( in Generate()
789 __ pop(ra); in Generate()
790 __ MovFromFloatResult(double_result); in Generate()
791 __ jmp(&done); in Generate()
793 __ bind(&int_exponent_convert); in Generate()
797 __ bind(&int_exponent); in Generate()
801 __ mov(scratch, exponent); in Generate()
804 __ mov(exponent, scratch); in Generate()
807 __ mov_d(double_scratch, double_base); // Back up base. in Generate()
808 __ Move(double_result, 1.0); in Generate()
812 __ Branch(&positive_exponent, ge, scratch, Operand(zero_reg)); in Generate()
813 __ Dsubu(scratch, zero_reg, scratch); in Generate()
816 __ Branch(&bail_out, gt, zero_reg, Operand(scratch)); in Generate()
817 __ bind(&positive_exponent); in Generate()
818 __ Assert(ge, kUnexpectedNegativeValue, scratch, Operand(zero_reg)); in Generate()
821 __ bind(&while_true); in Generate()
823 __ And(scratch2, scratch, 1); in Generate()
825 __ Branch(&no_carry, eq, scratch2, Operand(zero_reg)); in Generate()
826 __ mul_d(double_result, double_result, double_scratch); in Generate()
827 __ bind(&no_carry); in Generate()
829 __ dsra(scratch, scratch, 1); in Generate()
831 __ Branch(&loop_end, eq, scratch, Operand(zero_reg)); in Generate()
832 __ mul_d(double_scratch, double_scratch, double_scratch); in Generate()
834 __ Branch(&while_true); in Generate()
836 __ bind(&loop_end); in Generate()
838 __ Branch(&done, ge, exponent, Operand(zero_reg)); in Generate()
839 __ Move(double_scratch, 1.0); in Generate()
840 __ div_d(double_result, double_scratch, double_result); in Generate()
843 __ BranchF(&done, NULL, ne, double_result, kDoubleRegZero); in Generate()
847 __ bind(&bail_out); in Generate()
848 __ mtc1(exponent, single_scratch); in Generate()
849 __ cvt_d_w(double_exponent, single_scratch); in Generate()
852 __ push(ra); in Generate()
855 __ PrepareCallCFunction(0, 2, scratch); in Generate()
856 __ MovToFloatParameters(double_base, double_exponent); in Generate()
857 __ CallCFunction(ExternalReference::power_double_double_function(isolate()), in Generate()
860 __ pop(ra); in Generate()
861 __ MovFromFloatResult(double_result); in Generate()
863 __ bind(&done); in Generate()
864 __ Ret(); in Generate()
928 __ mov(s1, a2); in Generate()
931 __ Dlsa(s1, sp, a0, kPointerSizeLog2); in Generate()
932 __ Dsubu(s1, s1, kPointerSize); in Generate()
937 __ EnterExitFrame(save_doubles(), 0, is_builtin_exit() in Generate()
947 __ mov(s0, a0); in Generate()
948 __ mov(s2, a1); in Generate()
953 __ AssertStackIsAligned(); in Generate()
960 __ li(a2, Operand(ExternalReference::isolate_address(isolate()))); in Generate()
961 __ mov(a1, s1); in Generate()
969 __ Dsubu(sp, sp, Operand(result_stack_size)); in Generate()
972 __ li(a3, Operand(ExternalReference::isolate_address(isolate()))); in Generate()
973 __ mov(a2, s1); in Generate()
974 __ mov(a1, a0); in Generate()
975 __ mov(a0, sp); in Generate()
989 __ addiupc(ra, kNumInstructionsToJump + 1); in Generate()
993 __ bal(&find_ra); // bal exposes branch delay slot. in Generate()
994 __ Daddu(ra, ra, kNumInstructionsToJump * Instruction::kInstrSize); in Generate()
996 __ bind(&find_ra); in Generate()
999 __ sd(ra, MemOperand(sp, result_stack_size)); in Generate()
1004 __ mov(t9, s2); // Function pointer to t9 to conform to ABI for PIC. in Generate()
1005 __ jalr(t9); in Generate()
1007 __ daddiu(sp, sp, -kCArgsSlotsSize); in Generate()
1015 __ ld(a0, MemOperand(v0, 2 * kPointerSize)); in Generate()
1016 __ ld(v1, MemOperand(v0, 1 * kPointerSize)); in Generate()
1017 __ ld(v0, MemOperand(v0, 0 * kPointerSize)); in Generate()
1023 __ LoadRoot(a4, Heap::kExceptionRootIndex); in Generate()
1024 __ Branch(&exception_returned, eq, a4, Operand(v0)); in Generate()
1032 __ li(a2, Operand(pending_exception_address)); in Generate()
1033 __ ld(a2, MemOperand(a2)); in Generate()
1034 __ LoadRoot(a4, Heap::kTheHoleValueRootIndex); in Generate()
1036 __ Branch(&okay, eq, a4, Operand(a2)); in Generate()
1037 __ stop("Unexpected pending exception"); in Generate()
1038 __ bind(&okay); in Generate()
1053 __ LeaveExitFrame(save_doubles(), argc, true, EMIT_RETURN); in Generate()
1056 __ bind(&exception_returned); in Generate()
1075 __ PrepareCallCFunction(3, 0, a0); in Generate()
1076 __ mov(a0, zero_reg); in Generate()
1077 __ mov(a1, zero_reg); in Generate()
1078 __ li(a2, Operand(ExternalReference::isolate_address(isolate()))); in Generate()
1079 __ CallCFunction(find_handler, 3); in Generate()
1083 __ li(cp, Operand(pending_handler_context_address)); in Generate()
1084 __ ld(cp, MemOperand(cp)); in Generate()
1085 __ li(sp, Operand(pending_handler_sp_address)); in Generate()
1086 __ ld(sp, MemOperand(sp)); in Generate()
1087 __ li(fp, Operand(pending_handler_fp_address)); in Generate()
1088 __ ld(fp, MemOperand(fp)); in Generate()
1093 __ Branch(&zero, eq, cp, Operand(zero_reg)); in Generate()
1094 __ sd(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); in Generate()
1095 __ bind(&zero); in Generate()
1098 __ li(a1, Operand(pending_handler_code_address)); in Generate()
1099 __ ld(a1, MemOperand(a1)); in Generate()
1100 __ li(a2, Operand(pending_handler_offset_address)); in Generate()
1101 __ ld(a2, MemOperand(a2)); in Generate()
1102 __ Daddu(a1, a1, Operand(Code::kHeaderSize - kHeapObjectTag)); in Generate()
1103 __ Daddu(t9, a1, a2); in Generate()
1104 __ Jump(t9); in Generate()
1127 __ MultiPush(kCalleeSaved | ra.bit()); in Generate()
1130 __ MultiPushFPU(kCalleeSavedFPU); in Generate()
1132 __ Move(kDoubleRegZero, 0.0); in Generate()
1135 __ mov(s0, a4); // 5th parameter in mips64 a4 (a4) register. in Generate()
1137 __ InitializeRootRegister(); in Generate()
1140 __ li(a7, Operand(-1)); // Push a bad frame pointer to fail if it is used. in Generate()
1142 __ li(a6, Operand(StackFrame::TypeToMarker(marker))); in Generate()
1143 __ li(a5, Operand(StackFrame::TypeToMarker(marker))); in Generate()
1145 __ li(a4, Operand(c_entry_fp)); in Generate()
1146 __ ld(a4, MemOperand(a4)); in Generate()
1147 __ Push(a7, a6, a5, a4); in Generate()
1149 __ daddiu(fp, sp, -EntryFrameConstants::kCallerFPOffset); in Generate()
1170 __ li(a5, Operand(ExternalReference(js_entry_sp))); in Generate()
1171 __ ld(a6, MemOperand(a5)); in Generate()
1172 __ Branch(&non_outermost_js, ne, a6, Operand(zero_reg)); in Generate()
1173 __ sd(fp, MemOperand(a5)); in Generate()
1174 __ li(a4, Operand(StackFrame::OUTERMOST_JSENTRY_FRAME)); in Generate()
1176 __ b(&cont); in Generate()
1177 __ nop(); // Branch delay slot nop. in Generate()
1178 __ bind(&non_outermost_js); in Generate()
1179 __ li(a4, Operand(StackFrame::INNER_JSENTRY_FRAME)); in Generate()
1180 __ bind(&cont); in Generate()
1181 __ push(a4); in Generate()
1185 __ jmp(&invoke); in Generate()
1186 __ bind(&handler_entry); in Generate()
1192 __ li(a4, Operand(ExternalReference(Isolate::kPendingExceptionAddress, in Generate()
1194 __ sd(v0, MemOperand(a4)); // We come back from 'invoke'. result is in v0. in Generate()
1195 __ LoadRoot(v0, Heap::kExceptionRootIndex); in Generate()
1196 __ b(&exit); // b exposes branch delay slot. in Generate()
1197 __ nop(); // Branch delay slot nop. in Generate()
1200 __ bind(&invoke); in Generate()
1201 __ PushStackHandler(); in Generate()
1228 __ li(a4, Operand(construct_entry)); in Generate()
1231 __ li(a4, Operand(entry)); in Generate()
1233 __ ld(t9, MemOperand(a4)); // Deref address. in Generate()
1235 __ daddiu(t9, t9, Code::kHeaderSize - kHeapObjectTag); in Generate()
1236 __ Call(t9); in Generate()
1239 __ PopStackHandler(); in Generate()
1241 __ bind(&exit); // v0 holds result in Generate()
1244 __ pop(a5); in Generate()
1245 __ Branch(&non_outermost_js_2, ne, a5, in Generate()
1247 __ li(a5, Operand(ExternalReference(js_entry_sp))); in Generate()
1248 __ sd(zero_reg, MemOperand(a5)); in Generate()
1249 __ bind(&non_outermost_js_2); in Generate()
1252 __ pop(a5); in Generate()
1253 __ li(a4, Operand(ExternalReference(Isolate::kCEntryFPAddress, in Generate()
1255 __ sd(a5, MemOperand(a4)); in Generate()
1258 __ daddiu(sp, sp, -EntryFrameConstants::kCallerFPOffset); in Generate()
1261 __ MultiPopFPU(kCalleeSavedFPU); in Generate()
1264 __ MultiPop(kCalleeSaved | ra.bit()); in Generate()
1266 __ Jump(ra); in Generate()
1274 __ TailCallRuntime(Runtime::kRegExpExec); in Generate()
1305 __ li(a0, Operand(address_of_regexp_stack_memory_size)); in Generate()
1306 __ ld(a0, MemOperand(a0, 0)); in Generate()
1307 __ Branch(&runtime, eq, a0, Operand(zero_reg)); in Generate()
1310 __ ld(a0, MemOperand(sp, kJSRegExpOffset)); in Generate()
1312 __ JumpIfSmi(a0, &runtime); in Generate()
1313 __ GetObjectType(a0, a1, a1); in Generate()
1314 __ Branch(&runtime, ne, a1, Operand(JS_REGEXP_TYPE)); in Generate()
1317 __ ld(regexp_data, FieldMemOperand(a0, JSRegExp::kDataOffset)); in Generate()
1319 __ SmiTst(regexp_data, a4); in Generate()
1320 __ Check(nz, in Generate()
1324 __ GetObjectType(regexp_data, a0, a0); in Generate()
1325 __ Check(eq, in Generate()
1333 __ ld(a0, FieldMemOperand(regexp_data, JSRegExp::kDataTagOffset)); in Generate()
1334 __ Branch(&runtime, ne, a0, Operand(Smi::FromInt(JSRegExp::IRREGEXP))); in Generate()
1338 __ ld(a2, in Generate()
1346 __ Branch(&runtime, hi, a2, Operand(Smi::FromInt(temp))); in Generate()
1349 __ mov(t0, zero_reg); in Generate()
1350 __ ld(subject, MemOperand(sp, kSubjectOffset)); in Generate()
1351 __ JumpIfSmi(subject, &runtime); in Generate()
1352 __ mov(a3, subject); // Make a copy of the original subject string. in Generate()
1379 __ bind(&check_underlying); in Generate()
1380 __ ld(a2, FieldMemOperand(subject, HeapObject::kMapOffset)); in Generate()
1381 __ lbu(a0, FieldMemOperand(a2, Map::kInstanceTypeOffset)); in Generate()
1384 __ And(a1, in Generate()
1390 __ Branch(&seq_string, eq, a1, Operand(zero_reg)); // Go to (4). in Generate()
1399 __ Branch(&not_seq_nor_cons, ge, a1, Operand(kExternalStringTag)); in Generate()
1403 __ ld(a0, FieldMemOperand(subject, ConsString::kSecondOffset)); in Generate()
1404 __ LoadRoot(a1, Heap::kempty_stringRootIndex); in Generate()
1405 __ Branch(&runtime, ne, a0, Operand(a1)); in Generate()
1406 __ ld(subject, FieldMemOperand(subject, ConsString::kFirstOffset)); in Generate()
1407 __ jmp(&check_underlying); in Generate()
1410 __ bind(&seq_string); in Generate()
1416 __ ld(a1, MemOperand(sp, kPreviousIndexOffset)); in Generate()
1417 __ JumpIfNotSmi(a1, &runtime); in Generate()
1418 __ ld(a3, FieldMemOperand(a3, String::kLengthOffset)); in Generate()
1419 __ Branch(&runtime, ls, a3, Operand(a1)); in Generate()
1420 __ SmiUntag(a1); in Generate()
1425 __ And(a0, a0, Operand(kStringEncodingMask)); // Non-zero for one_byte. in Generate()
1426 __ ld(t9, FieldMemOperand(regexp_data, JSRegExp::kDataOneByteCodeOffset)); in Generate()
1427 __ dsra(a3, a0, 3); // a3 is 1 for one_byte, 0 for UC16 (used below). in Generate()
1428 __ ld(a5, FieldMemOperand(regexp_data, JSRegExp::kDataUC16CodeOffset)); in Generate()
1429 __ Movz(t9, a5, a0); // If UC16 (a0 is 0), replace t9 w/kDataUC16CodeOffset. in Generate()
1436 __ JumpIfSmi(t9, &runtime); in Generate()
1444 __ IncrementCounter(isolate()->counters()->regexp_entry_native(), in Generate()
1450 __ EnterExitFrame(false, kRegExpExecuteArguments - kParameterRegisters); in Generate()
1471 __ li(a0, Operand(ExternalReference::isolate_address(isolate()))); in Generate()
1472 __ sd(a0, MemOperand(sp, 1 * kPointerSize)); in Generate()
1475 __ li(a7, Operand(1)); in Generate()
1478 __ li(a0, Operand(address_of_regexp_stack_memory_address)); in Generate()
1479 __ ld(a0, MemOperand(a0, 0)); in Generate()
1480 __ li(a2, Operand(address_of_regexp_stack_memory_size)); in Generate()
1481 __ ld(a2, MemOperand(a2, 0)); in Generate()
1482 __ daddu(a6, a0, a2); in Generate()
1486 __ mov(a5, zero_reg); in Generate()
1489 __ li( in Generate()
1495 __ Daddu(t2, subject, Operand(SeqString::kHeaderSize - kHeapObjectTag)); in Generate()
1496 __ Xor(a3, a3, Operand(1)); // 1 for 2-byte str, 0 for 1-byte. in Generate()
1501 __ ld(subject, MemOperand(fp, kSubjectOffset + 2 * kPointerSize)); in Generate()
1506 __ dsllv(t1, t0, a3); in Generate()
1507 __ daddu(t0, t2, t1); in Generate()
1508 __ dsllv(t1, a1, a3); in Generate()
1509 __ daddu(a2, t0, t1); in Generate()
1511 __ ld(t2, FieldMemOperand(subject, String::kLengthOffset)); in Generate()
1513 __ SmiUntag(t2); in Generate()
1514 __ dsllv(t1, t2, a3); in Generate()
1515 __ daddu(a3, t0, t1); in Generate()
1520 __ mov(a0, subject); in Generate()
1523 __ Daddu(t9, t9, Operand(Code::kHeaderSize - kHeapObjectTag)); in Generate()
1527 __ LeaveExitFrame(false, no_reg, true); in Generate()
1535 __ Branch(&success, eq, v0, Operand(1)); in Generate()
1539 __ Branch(&failure, eq, v0, Operand(NativeRegExpMacroAssembler::FAILURE)); in Generate()
1541 __ Branch(&runtime, ne, v0, Operand(NativeRegExpMacroAssembler::EXCEPTION)); in Generate()
1546 __ li(a1, Operand(isolate()->factory()->the_hole_value())); in Generate()
1547 __ li(a2, Operand(ExternalReference(Isolate::kPendingExceptionAddress, in Generate()
1549 __ ld(v0, MemOperand(a2, 0)); in Generate()
1550 __ Branch(&runtime, eq, v0, Operand(a1)); in Generate()
1553 __ TailCallRuntime(Runtime::kRegExpExecReThrow); in Generate()
1555 __ bind(&failure); in Generate()
1557 __ li(v0, Operand(isolate()->factory()->null_value())); in Generate()
1558 __ DropAndRet(4); in Generate()
1561 __ bind(&success); in Generate()
1563 __ lw(a1, UntagSmiFieldMemOperand( in Generate()
1566 __ Daddu(a1, a1, Operand(1)); in Generate()
1567 __ dsll(a1, a1, 1); // Multiply by 2. in Generate()
1570 __ ld(last_match_info_elements, MemOperand(sp, kLastMatchInfoOffset)); in Generate()
1571 __ JumpIfSmi(last_match_info_elements, &runtime); in Generate()
1573 __ ld(a0, FieldMemOperand(last_match_info_elements, HeapObject::kMapOffset)); in Generate()
1574 __ LoadRoot(at, Heap::kFixedArrayMapRootIndex); in Generate()
1575 __ Branch(&runtime, ne, a0, Operand(at)); in Generate()
1578 __ ld(a0, in Generate()
1580 __ Daddu(a2, a1, Operand(RegExpMatchInfo::kLastMatchOverhead)); in Generate()
1582 __ SmiUntag(at, a0); in Generate()
1583 __ Branch(&runtime, gt, a2, Operand(at)); in Generate()
1588 __ SmiTag(a2, a1); // To smi. in Generate()
1589 __ sd(a2, FieldMemOperand(last_match_info_elements, in Generate()
1592 __ sd(subject, FieldMemOperand(last_match_info_elements, in Generate()
1594 __ mov(a2, subject); in Generate()
1595 __ RecordWriteField(last_match_info_elements, in Generate()
1598 __ mov(subject, a2); in Generate()
1599 __ sd(subject, FieldMemOperand(last_match_info_elements, in Generate()
1601 __ RecordWriteField(last_match_info_elements, in Generate()
1608 __ li(a2, Operand(address_of_static_offsets_vector)); in Generate()
1615 __ Daddu(a0, last_match_info_elements, in Generate()
1617 __ bind(&next_capture); in Generate()
1618 __ Dsubu(a1, a1, Operand(1)); in Generate()
1619 __ Branch(&done, lt, a1, Operand(zero_reg)); in Generate()
1621 __ lw(a3, MemOperand(a2, 0)); in Generate()
1622 __ daddiu(a2, a2, kIntSize); in Generate()
1624 __ SmiTag(a3); in Generate()
1625 __ sd(a3, MemOperand(a0, 0)); in Generate()
1626 __ Branch(&next_capture, USE_DELAY_SLOT); in Generate()
1627 __ daddiu(a0, a0, kPointerSize); // In branch delay slot. in Generate()
1629 __ bind(&done); in Generate()
1632 __ mov(v0, last_match_info_elements); in Generate()
1633 __ DropAndRet(4); in Generate()
1636 __ bind(&runtime); in Generate()
1637 __ TailCallRuntime(Runtime::kRegExpExec); in Generate()
1641 __ bind(&not_seq_nor_cons); in Generate()
1643 __ Branch(&not_long_external, gt, a1, Operand(kExternalStringTag)); in Generate()
1646 __ bind(&external_string); in Generate()
1647 __ ld(a0, FieldMemOperand(subject, HeapObject::kMapOffset)); in Generate()
1648 __ lbu(a0, FieldMemOperand(a0, Map::kInstanceTypeOffset)); in Generate()
1652 __ And(at, a0, Operand(kIsIndirectStringMask)); in Generate()
1653 __ Assert(eq, in Generate()
1658 __ ld(subject, in Generate()
1662 __ Dsubu(subject, in Generate()
1665 __ jmp(&seq_string); // Go to (4). in Generate()
1668 __ bind(&not_long_external); in Generate()
1670 __ And(at, a1, Operand(kIsNotStringMask | kShortExternalStringMask)); in Generate()
1671 __ Branch(&runtime, ne, at, Operand(zero_reg)); in Generate()
1675 __ Branch(&thin_string, eq, a1, Operand(kThinStringTag)); in Generate()
1677 __ ld(t0, FieldMemOperand(subject, SlicedString::kOffsetOffset)); in Generate()
1678 __ SmiUntag(t0); in Generate()
1679 __ ld(subject, FieldMemOperand(subject, SlicedString::kParentOffset)); in Generate()
1680 __ jmp(&check_underlying); // Go to (1). in Generate()
1682 __ bind(&thin_string); in Generate()
1683 __ ld(subject, FieldMemOperand(subject, ThinString::kActualOffset)); in Generate()
1684 __ jmp(&check_underlying); // Go to (1). in Generate()
1702 __ SmiTag(a0); in CallStubInRecordCallTarget()
1703 __ MultiPush(kSavedRegs); in CallStubInRecordCallTarget()
1705 __ CallStub(stub); in CallStubInRecordCallTarget()
1707 __ MultiPop(kSavedRegs); in CallStubInRecordCallTarget()
1708 __ SmiUntag(a0); in CallStubInRecordCallTarget()
1728 __ dsrl(a5, a3, 32 - kPointerSizeLog2); in GenerateRecordCallTarget()
1729 __ Daddu(a5, a2, Operand(a5)); in GenerateRecordCallTarget()
1730 __ ld(a5, FieldMemOperand(a5, FixedArray::kHeaderSize)); in GenerateRecordCallTarget()
1739 __ ld(weak_value, FieldMemOperand(a5, WeakCell::kValueOffset)); in GenerateRecordCallTarget()
1740 __ Branch(&done, eq, a1, Operand(weak_value)); in GenerateRecordCallTarget()
1741 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex); in GenerateRecordCallTarget()
1742 __ Branch(&done, eq, a5, Operand(at)); in GenerateRecordCallTarget()
1743 __ ld(feedback_map, FieldMemOperand(a5, HeapObject::kMapOffset)); in GenerateRecordCallTarget()
1744 __ LoadRoot(at, Heap::kWeakCellMapRootIndex); in GenerateRecordCallTarget()
1745 __ Branch(&check_allocation_site, ne, feedback_map, Operand(at)); in GenerateRecordCallTarget()
1748 __ JumpIfSmi(weak_value, &initialize); in GenerateRecordCallTarget()
1749 __ jmp(&megamorphic); in GenerateRecordCallTarget()
1751 __ bind(&check_allocation_site); in GenerateRecordCallTarget()
1756 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex); in GenerateRecordCallTarget()
1757 __ Branch(&miss, ne, feedback_map, Operand(at)); in GenerateRecordCallTarget()
1760 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, a5); in GenerateRecordCallTarget()
1761 __ Branch(&megamorphic, ne, a1, Operand(a5)); in GenerateRecordCallTarget()
1762 __ jmp(&done); in GenerateRecordCallTarget()
1764 __ bind(&miss); in GenerateRecordCallTarget()
1768 __ LoadRoot(at, Heap::kuninitialized_symbolRootIndex); in GenerateRecordCallTarget()
1769 __ Branch(&initialize, eq, a5, Operand(at)); in GenerateRecordCallTarget()
1772 __ bind(&megamorphic); in GenerateRecordCallTarget()
1773 __ dsrl(a5, a3, 32 - kPointerSizeLog2); in GenerateRecordCallTarget()
1774 __ Daddu(a5, a2, Operand(a5)); in GenerateRecordCallTarget()
1775 __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex); in GenerateRecordCallTarget()
1776 __ sd(at, FieldMemOperand(a5, FixedArray::kHeaderSize)); in GenerateRecordCallTarget()
1777 __ jmp(&done); in GenerateRecordCallTarget()
1780 __ bind(&initialize); in GenerateRecordCallTarget()
1782 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, a5); in GenerateRecordCallTarget()
1783 __ Branch(&not_array_function, ne, a1, Operand(a5)); in GenerateRecordCallTarget()
1790 __ Branch(&done); in GenerateRecordCallTarget()
1792 __ bind(&not_array_function); in GenerateRecordCallTarget()
1797 __ bind(&done); in GenerateRecordCallTarget()
1800 __ SmiScale(a4, a3, kPointerSizeLog2); in GenerateRecordCallTarget()
1801 __ Daddu(a5, a2, Operand(a4)); in GenerateRecordCallTarget()
1802 __ ld(a4, FieldMemOperand(a5, FixedArray::kHeaderSize + kPointerSize)); in GenerateRecordCallTarget()
1803 __ Daddu(a4, a4, Operand(Smi::FromInt(1))); in GenerateRecordCallTarget()
1804 __ sd(a4, FieldMemOperand(a5, FixedArray::kHeaderSize + kPointerSize)); in GenerateRecordCallTarget()
1816 __ JumpIfSmi(a1, &non_function); in Generate()
1818 __ GetObjectType(a1, a5, a5); in Generate()
1819 __ Branch(&non_function, ne, a5, Operand(JS_FUNCTION_TYPE)); in Generate()
1823 __ dsrl(at, a3, 32 - kPointerSizeLog2); in Generate()
1824 __ Daddu(a5, a2, at); in Generate()
1827 __ ld(a2, FieldMemOperand(a5, FixedArray::kHeaderSize)); in Generate()
1828 __ ld(a5, FieldMemOperand(a2, AllocationSite::kMapOffset)); in Generate()
1829 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex); in Generate()
1830 __ Branch(&feedback_register_initialized, eq, a5, Operand(at)); in Generate()
1831 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex); in Generate()
1832 __ bind(&feedback_register_initialized); in Generate()
1834 __ AssertUndefinedOrAllocationSite(a2, a5); in Generate()
1837 __ mov(a3, a1); in Generate()
1841 __ ld(a4, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); in Generate()
1842 __ ld(a4, FieldMemOperand(a4, SharedFunctionInfo::kConstructStubOffset)); in Generate()
1843 __ Daddu(at, a4, Operand(Code::kHeaderSize - kHeapObjectTag)); in Generate()
1844 __ Jump(at); in Generate()
1846 __ bind(&non_function); in Generate()
1847 __ mov(a3, a1); in Generate()
1848 __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); in Generate()
1860 __ JumpIfSmi(object_, receiver_not_string_); in GenerateFast()
1863 __ ld(result_, FieldMemOperand(object_, HeapObject::kMapOffset)); in GenerateFast()
1864 __ lbu(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset)); in GenerateFast()
1866 __ And(a4, result_, Operand(kIsNotStringMask)); in GenerateFast()
1867 __ Branch(receiver_not_string_, ne, a4, Operand(zero_reg)); in GenerateFast()
1871 __ JumpIfNotSmi(index_, &index_not_smi_); in GenerateFast()
1873 __ bind(&got_smi_index_); in GenerateFast()
1876 __ ld(a4, FieldMemOperand(object_, String::kLengthOffset)); in GenerateFast()
1877 __ Branch(index_out_of_range_, ls, a4, Operand(index_)); in GenerateFast()
1879 __ SmiUntag(index_); in GenerateFast()
1887 __ SmiTag(result_); in GenerateFast()
1888 __ bind(&exit_); in GenerateFast()
1894 __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase); in GenerateSlow()
1897 __ bind(&index_not_smi_); in GenerateSlow()
1899 __ CheckMap(index_, in GenerateSlow()
1907 __ Push(LoadWithVectorDescriptor::VectorRegister(), in GenerateSlow()
1910 __ Push(object_, index_); in GenerateSlow()
1912 __ CallRuntime(Runtime::kNumberToSmi); in GenerateSlow()
1917 __ Move(index_, v0); in GenerateSlow()
1919 __ Pop(LoadWithVectorDescriptor::VectorRegister(), in GenerateSlow()
1922 __ pop(object_); in GenerateSlow()
1925 __ ld(result_, FieldMemOperand(object_, HeapObject::kMapOffset)); in GenerateSlow()
1926 __ lbu(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset)); in GenerateSlow()
1929 __ JumpIfNotSmi(index_, index_out_of_range_); in GenerateSlow()
1931 __ Branch(&got_smi_index_); in GenerateSlow()
1936 __ bind(&call_runtime_); in GenerateSlow()
1938 __ SmiTag(index_); in GenerateSlow()
1939 __ Push(object_, index_); in GenerateSlow()
1940 __ CallRuntime(Runtime::kStringCharCodeAtRT); in GenerateSlow()
1942 __ Move(result_, v0); in GenerateSlow()
1945 __ jmp(&exit_); in GenerateSlow()
1947 __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase); in GenerateSlow()
1957 __ ld(length, FieldMemOperand(left, String::kLengthOffset)); in GenerateFlatOneByteStringEquals()
1958 __ ld(scratch2, FieldMemOperand(right, String::kLengthOffset)); in GenerateFlatOneByteStringEquals()
1959 __ Branch(&check_zero_length, eq, length, Operand(scratch2)); in GenerateFlatOneByteStringEquals()
1960 __ bind(&strings_not_equal); in GenerateFlatOneByteStringEquals()
1962 __ li(v0, Operand(Smi::FromInt(NOT_EQUAL))); in GenerateFlatOneByteStringEquals()
1963 __ Ret(); in GenerateFlatOneByteStringEquals()
1967 __ bind(&check_zero_length); in GenerateFlatOneByteStringEquals()
1969 __ Branch(&compare_chars, ne, length, Operand(zero_reg)); in GenerateFlatOneByteStringEquals()
1971 __ Ret(USE_DELAY_SLOT); in GenerateFlatOneByteStringEquals()
1972 __ li(v0, Operand(Smi::FromInt(EQUAL))); in GenerateFlatOneByteStringEquals()
1975 __ bind(&compare_chars); in GenerateFlatOneByteStringEquals()
1981 __ Ret(USE_DELAY_SLOT); in GenerateFlatOneByteStringEquals()
1982 __ li(v0, Operand(Smi::FromInt(EQUAL))); in GenerateFlatOneByteStringEquals()
1991 __ ld(scratch1, FieldMemOperand(left, String::kLengthOffset)); in GenerateCompareFlatOneByteStrings()
1992 __ ld(scratch2, FieldMemOperand(right, String::kLengthOffset)); in GenerateCompareFlatOneByteStrings()
1993 __ Dsubu(scratch3, scratch1, Operand(scratch2)); in GenerateCompareFlatOneByteStrings()
1995 __ slt(scratch4, scratch2, scratch1); in GenerateCompareFlatOneByteStrings()
1996 __ Movn(scratch1, scratch2, scratch4); in GenerateCompareFlatOneByteStrings()
1999 __ Branch(&compare_lengths, eq, min_length, Operand(zero_reg)); in GenerateCompareFlatOneByteStrings()
2006 __ bind(&compare_lengths); in GenerateCompareFlatOneByteStrings()
2009 __ mov(scratch2, length_delta); in GenerateCompareFlatOneByteStrings()
2010 __ mov(scratch4, zero_reg); in GenerateCompareFlatOneByteStrings()
2011 __ mov(v0, zero_reg); in GenerateCompareFlatOneByteStrings()
2013 __ bind(&result_not_equal); in GenerateCompareFlatOneByteStrings()
2017 __ Branch(&ret, eq, scratch2, Operand(scratch4)); in GenerateCompareFlatOneByteStrings()
2018 __ li(v0, Operand(Smi::FromInt(GREATER))); in GenerateCompareFlatOneByteStrings()
2019 __ Branch(&ret, gt, scratch2, Operand(scratch4)); in GenerateCompareFlatOneByteStrings()
2020 __ li(v0, Operand(Smi::FromInt(LESS))); in GenerateCompareFlatOneByteStrings()
2021 __ bind(&ret); in GenerateCompareFlatOneByteStrings()
2022 __ Ret(); in GenerateCompareFlatOneByteStrings()
2033 __ SmiUntag(length); in GenerateOneByteCharsCompareLoop()
2034 __ Daddu(scratch1, length, in GenerateOneByteCharsCompareLoop()
2036 __ Daddu(left, left, Operand(scratch1)); in GenerateOneByteCharsCompareLoop()
2037 __ Daddu(right, right, Operand(scratch1)); in GenerateOneByteCharsCompareLoop()
2038 __ Dsubu(length, zero_reg, length); in GenerateOneByteCharsCompareLoop()
2044 __ bind(&loop); in GenerateOneByteCharsCompareLoop()
2045 __ Daddu(scratch3, left, index); in GenerateOneByteCharsCompareLoop()
2046 __ lbu(scratch1, MemOperand(scratch3)); in GenerateOneByteCharsCompareLoop()
2047 __ Daddu(scratch3, right, index); in GenerateOneByteCharsCompareLoop()
2048 __ lbu(scratch2, MemOperand(scratch3)); in GenerateOneByteCharsCompareLoop()
2049 __ Branch(chars_not_equal, ne, scratch1, Operand(scratch2)); in GenerateOneByteCharsCompareLoop()
2050 __ Daddu(index, index, 1); in GenerateOneByteCharsCompareLoop()
2051 __ Branch(&loop, ne, index, Operand(zero_reg)); in GenerateOneByteCharsCompareLoop()
2065 __ li(a2, isolate()->factory()->undefined_value()); in Generate()
2069 __ And(at, a2, Operand(kSmiTagMask)); in Generate()
2070 __ Assert(ne, kExpectedAllocationSite, at, Operand(zero_reg)); in Generate()
2071 __ ld(a4, FieldMemOperand(a2, HeapObject::kMapOffset)); in Generate()
2072 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex); in Generate()
2073 __ Assert(eq, kExpectedAllocationSite, a4, Operand(at)); in Generate()
2079 __ TailCallStub(&stub); in Generate()
2087 __ CheckMap(a1, a2, Heap::kBooleanMapRootIndex, &miss, DO_SMI_CHECK); in GenerateBooleans()
2088 __ CheckMap(a0, a3, Heap::kBooleanMapRootIndex, &miss, DO_SMI_CHECK); in GenerateBooleans()
2090 __ ld(a1, FieldMemOperand(a1, Oddball::kToNumberOffset)); in GenerateBooleans()
2091 __ AssertSmi(a1); in GenerateBooleans()
2092 __ ld(a0, FieldMemOperand(a0, Oddball::kToNumberOffset)); in GenerateBooleans()
2093 __ AssertSmi(a0); in GenerateBooleans()
2095 __ Ret(USE_DELAY_SLOT); in GenerateBooleans()
2096 __ Dsubu(v0, a1, a0); in GenerateBooleans()
2098 __ bind(&miss); in GenerateBooleans()
2106 __ Or(a2, a1, a0); in GenerateSmis()
2107 __ JumpIfNotSmi(a2, &miss); in GenerateSmis()
2111 __ Ret(USE_DELAY_SLOT); in GenerateSmis()
2112 __ Dsubu(v0, a0, a1); in GenerateSmis()
2115 __ SmiUntag(a1); in GenerateSmis()
2116 __ SmiUntag(a0); in GenerateSmis()
2117 __ Ret(USE_DELAY_SLOT); in GenerateSmis()
2118 __ Dsubu(v0, a1, a0); in GenerateSmis()
2121 __ bind(&miss); in GenerateSmis()
2134 __ JumpIfNotSmi(a1, &miss); in GenerateNumbers()
2137 __ JumpIfNotSmi(a0, &miss); in GenerateNumbers()
2144 __ JumpIfSmi(a0, &right_smi); in GenerateNumbers()
2145 __ CheckMap(a0, a2, Heap::kHeapNumberMapRootIndex, &maybe_undefined1, in GenerateNumbers()
2147 __ Dsubu(a2, a0, Operand(kHeapObjectTag)); in GenerateNumbers()
2148 __ ldc1(f2, MemOperand(a2, HeapNumber::kValueOffset)); in GenerateNumbers()
2149 __ Branch(&left); in GenerateNumbers()
2150 __ bind(&right_smi); in GenerateNumbers()
2151 __ SmiUntag(a2, a0); // Can't clobber a0 yet. in GenerateNumbers()
2153 __ mtc1(a2, single_scratch); in GenerateNumbers()
2154 __ cvt_d_w(f2, single_scratch); in GenerateNumbers()
2156 __ bind(&left); in GenerateNumbers()
2157 __ JumpIfSmi(a1, &left_smi); in GenerateNumbers()
2158 __ CheckMap(a1, a2, Heap::kHeapNumberMapRootIndex, &maybe_undefined2, in GenerateNumbers()
2160 __ Dsubu(a2, a1, Operand(kHeapObjectTag)); in GenerateNumbers()
2161 __ ldc1(f0, MemOperand(a2, HeapNumber::kValueOffset)); in GenerateNumbers()
2162 __ Branch(&done); in GenerateNumbers()
2163 __ bind(&left_smi); in GenerateNumbers()
2164 __ SmiUntag(a2, a1); // Can't clobber a1 yet. in GenerateNumbers()
2166 __ mtc1(a2, single_scratch); in GenerateNumbers()
2167 __ cvt_d_w(f0, single_scratch); in GenerateNumbers()
2169 __ bind(&done); in GenerateNumbers()
2174 __ BranchF(&fpu_eq, &unordered, eq, f0, f2); in GenerateNumbers()
2177 __ BranchF(&fpu_lt, NULL, lt, f0, f2); in GenerateNumbers()
2181 __ Ret(USE_DELAY_SLOT); in GenerateNumbers()
2182 __ li(v0, Operand(GREATER)); in GenerateNumbers()
2184 __ bind(&fpu_eq); in GenerateNumbers()
2185 __ Ret(USE_DELAY_SLOT); in GenerateNumbers()
2186 __ li(v0, Operand(EQUAL)); in GenerateNumbers()
2188 __ bind(&fpu_lt); in GenerateNumbers()
2189 __ Ret(USE_DELAY_SLOT); in GenerateNumbers()
2190 __ li(v0, Operand(LESS)); in GenerateNumbers()
2192 __ bind(&unordered); in GenerateNumbers()
2193 __ bind(&generic_stub); in GenerateNumbers()
2196 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); in GenerateNumbers()
2198 __ bind(&maybe_undefined1); in GenerateNumbers()
2200 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); in GenerateNumbers()
2201 __ Branch(&miss, ne, a0, Operand(at)); in GenerateNumbers()
2202 __ JumpIfSmi(a1, &unordered); in GenerateNumbers()
2203 __ GetObjectType(a1, a2, a2); in GenerateNumbers()
2204 __ Branch(&maybe_undefined2, ne, a2, Operand(HEAP_NUMBER_TYPE)); in GenerateNumbers()
2205 __ jmp(&unordered); in GenerateNumbers()
2208 __ bind(&maybe_undefined2); in GenerateNumbers()
2210 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); in GenerateNumbers()
2211 __ Branch(&unordered, eq, a1, Operand(at)); in GenerateNumbers()
2214 __ bind(&miss); in GenerateNumbers()
2230 __ JumpIfEitherSmi(left, right, &miss); in GenerateInternalizedStrings()
2233 __ ld(tmp1, FieldMemOperand(left, HeapObject::kMapOffset)); in GenerateInternalizedStrings()
2234 __ ld(tmp2, FieldMemOperand(right, HeapObject::kMapOffset)); in GenerateInternalizedStrings()
2235 __ lbu(tmp1, FieldMemOperand(tmp1, Map::kInstanceTypeOffset)); in GenerateInternalizedStrings()
2236 __ lbu(tmp2, FieldMemOperand(tmp2, Map::kInstanceTypeOffset)); in GenerateInternalizedStrings()
2238 __ Or(tmp1, tmp1, Operand(tmp2)); in GenerateInternalizedStrings()
2239 __ And(at, tmp1, Operand(kIsNotStringMask | kIsNotInternalizedMask)); in GenerateInternalizedStrings()
2240 __ Branch(&miss, ne, at, Operand(zero_reg)); in GenerateInternalizedStrings()
2247 __ mov(v0, right); in GenerateInternalizedStrings()
2249 __ Ret(ne, left, Operand(right)); in GenerateInternalizedStrings()
2251 __ Ret(USE_DELAY_SLOT); in GenerateInternalizedStrings()
2252 __ li(v0, Operand(Smi::FromInt(EQUAL))); in GenerateInternalizedStrings()
2254 __ bind(&miss); in GenerateInternalizedStrings()
2271 __ JumpIfEitherSmi(left, right, &miss); in GenerateUniqueNames()
2275 __ ld(tmp1, FieldMemOperand(left, HeapObject::kMapOffset)); in GenerateUniqueNames()
2276 __ ld(tmp2, FieldMemOperand(right, HeapObject::kMapOffset)); in GenerateUniqueNames()
2277 __ lbu(tmp1, FieldMemOperand(tmp1, Map::kInstanceTypeOffset)); in GenerateUniqueNames()
2278 __ lbu(tmp2, FieldMemOperand(tmp2, Map::kInstanceTypeOffset)); in GenerateUniqueNames()
2280 __ JumpIfNotUniqueNameInstanceType(tmp1, &miss); in GenerateUniqueNames()
2281 __ JumpIfNotUniqueNameInstanceType(tmp2, &miss); in GenerateUniqueNames()
2284 __ mov(v0, a0); in GenerateUniqueNames()
2288 __ Branch(&done, ne, left, Operand(right)); in GenerateUniqueNames()
2294 __ li(v0, Operand(Smi::FromInt(EQUAL))); in GenerateUniqueNames()
2295 __ bind(&done); in GenerateUniqueNames()
2296 __ Ret(); in GenerateUniqueNames()
2298 __ bind(&miss); in GenerateUniqueNames()
2319 __ JumpIfEitherSmi(left, right, &miss); in GenerateStrings()
2323 __ ld(tmp1, FieldMemOperand(left, HeapObject::kMapOffset)); in GenerateStrings()
2324 __ ld(tmp2, FieldMemOperand(right, HeapObject::kMapOffset)); in GenerateStrings()
2325 __ lbu(tmp1, FieldMemOperand(tmp1, Map::kInstanceTypeOffset)); in GenerateStrings()
2326 __ lbu(tmp2, FieldMemOperand(tmp2, Map::kInstanceTypeOffset)); in GenerateStrings()
2328 __ Or(tmp3, tmp1, tmp2); in GenerateStrings()
2329 __ And(tmp5, tmp3, Operand(kIsNotStringMask)); in GenerateStrings()
2330 __ Branch(&miss, ne, tmp5, Operand(zero_reg)); in GenerateStrings()
2336 __ Branch(&left_ne_right, ne, left, Operand(right)); in GenerateStrings()
2337 __ Ret(USE_DELAY_SLOT); in GenerateStrings()
2338 __ mov(v0, zero_reg); // In the delay slot. in GenerateStrings()
2339 __ bind(&left_ne_right); in GenerateStrings()
2349 __ Or(tmp3, tmp1, Operand(tmp2)); in GenerateStrings()
2350 __ And(tmp5, tmp3, Operand(kIsNotInternalizedMask)); in GenerateStrings()
2352 __ Branch(&is_symbol, ne, tmp5, Operand(zero_reg)); in GenerateStrings()
2356 __ Ret(USE_DELAY_SLOT); in GenerateStrings()
2357 __ mov(v0, a0); // In the delay slot. in GenerateStrings()
2358 __ bind(&is_symbol); in GenerateStrings()
2363 __ JumpIfBothInstanceTypesAreNotSequentialOneByte(tmp1, tmp2, tmp3, tmp4, in GenerateStrings()
2376 __ bind(&runtime); in GenerateStrings()
2380 __ Push(left, right); in GenerateStrings()
2381 __ CallRuntime(Runtime::kStringEqual); in GenerateStrings()
2383 __ LoadRoot(a0, Heap::kTrueValueRootIndex); in GenerateStrings()
2384 __ Ret(USE_DELAY_SLOT); in GenerateStrings()
2385 __ Subu(v0, v0, a0); // In delay slot. in GenerateStrings()
2387 __ Push(left, right); in GenerateStrings()
2388 __ TailCallRuntime(Runtime::kStringCompare); in GenerateStrings()
2391 __ bind(&miss); in GenerateStrings()
2399 __ And(a2, a1, Operand(a0)); in GenerateReceivers()
2400 __ JumpIfSmi(a2, &miss); in GenerateReceivers()
2403 __ GetObjectType(a0, a2, a2); in GenerateReceivers()
2404 __ Branch(&miss, lt, a2, Operand(FIRST_JS_RECEIVER_TYPE)); in GenerateReceivers()
2405 __ GetObjectType(a1, a2, a2); in GenerateReceivers()
2406 __ Branch(&miss, lt, a2, Operand(FIRST_JS_RECEIVER_TYPE)); in GenerateReceivers()
2409 __ Ret(USE_DELAY_SLOT); in GenerateReceivers()
2410 __ dsubu(v0, a0, a1); in GenerateReceivers()
2412 __ bind(&miss); in GenerateReceivers()
2420 __ And(a2, a1, a0); in GenerateKnownReceivers()
2421 __ JumpIfSmi(a2, &miss); in GenerateKnownReceivers()
2422 __ GetWeakValue(a4, cell); in GenerateKnownReceivers()
2423 __ ld(a2, FieldMemOperand(a0, HeapObject::kMapOffset)); in GenerateKnownReceivers()
2424 __ ld(a3, FieldMemOperand(a1, HeapObject::kMapOffset)); in GenerateKnownReceivers()
2425 __ Branch(&miss, ne, a2, Operand(a4)); in GenerateKnownReceivers()
2426 __ Branch(&miss, ne, a3, Operand(a4)); in GenerateKnownReceivers()
2429 __ Ret(USE_DELAY_SLOT); in GenerateKnownReceivers()
2430 __ dsubu(v0, a0, a1); in GenerateKnownReceivers()
2433 __ li(a2, Operand(Smi::FromInt(GREATER))); in GenerateKnownReceivers()
2435 __ li(a2, Operand(Smi::FromInt(LESS))); in GenerateKnownReceivers()
2437 __ Push(a1, a0, a2); in GenerateKnownReceivers()
2438 __ TailCallRuntime(Runtime::kCompare); in GenerateKnownReceivers()
2441 __ bind(&miss); in GenerateKnownReceivers()
2450 __ Push(a1, a0); in GenerateMiss()
2451 __ Push(ra, a1, a0); in GenerateMiss()
2452 __ li(a4, Operand(Smi::FromInt(op()))); in GenerateMiss()
2453 __ daddiu(sp, sp, -kPointerSize); in GenerateMiss()
2454 __ CallRuntime(Runtime::kCompareIC_Miss, 3, kDontSaveFPRegs, in GenerateMiss()
2456 __ sd(a4, MemOperand(sp)); // In the delay slot. in GenerateMiss()
2458 __ Daddu(a2, v0, Operand(Code::kHeaderSize - kHeapObjectTag)); in GenerateMiss()
2460 __ Pop(a1, a0, ra); in GenerateMiss()
2462 __ Jump(a2); in GenerateMiss()
2472 __ daddiu(sp, sp, -kCArgsSlotsSize); in Generate()
2475 __ sd(ra, MemOperand(sp, kCArgsSlotsSize)); in Generate()
2476 __ Call(t9); // Call the C++ function. in Generate()
2477 __ ld(t9, MemOperand(sp, kCArgsSlotsSize)); in Generate()
2483 __ Uld(a4, MemOperand(t9)); in Generate()
2484 __ Assert(ne, kReceivedInvalidReturnAddress, a4, in Generate()
2487 __ Jump(t9); in Generate()
2495 __ Move(t9, target); in GenerateCall()
2496 __ li(at, Operand(loc, RelocInfo::CODE_TARGET), CONSTANT_SIZE); in GenerateCall()
2497 __ Call(at); in GenerateCall()
2519 __ SmiLoadUntag(index, FieldMemOperand(properties, kCapacityOffset)); in GenerateNegativeLookup()
2520 __ Dsubu(index, index, Operand(1)); in GenerateNegativeLookup()
2521 __ And(index, index, in GenerateNegativeLookup()
2526 __ Dlsa(index, index, index, 1); // index *= 3. in GenerateNegativeLookup()
2533 __ Dlsa(tmp, properties, index, kPointerSizeLog2); in GenerateNegativeLookup()
2534 __ ld(entity_name, FieldMemOperand(tmp, kElementsStartOffset)); in GenerateNegativeLookup()
2537 __ LoadRoot(tmp, Heap::kUndefinedValueRootIndex); in GenerateNegativeLookup()
2538 __ Branch(done, eq, entity_name, Operand(tmp)); in GenerateNegativeLookup()
2541 __ LoadRoot(tmp, Heap::kTheHoleValueRootIndex); in GenerateNegativeLookup()
2544 __ Branch(miss, eq, entity_name, Operand(Handle<Name>(name))); in GenerateNegativeLookup()
2547 __ Branch(&good, eq, entity_name, Operand(tmp)); in GenerateNegativeLookup()
2550 __ ld(entity_name, FieldMemOperand(entity_name, HeapObject::kMapOffset)); in GenerateNegativeLookup()
2551 __ lbu(entity_name, in GenerateNegativeLookup()
2553 __ JumpIfNotUniqueNameInstanceType(entity_name, miss); in GenerateNegativeLookup()
2554 __ bind(&good); in GenerateNegativeLookup()
2557 __ ld(properties, in GenerateNegativeLookup()
2565 __ MultiPush(spill_mask); in GenerateNegativeLookup()
2566 __ ld(a0, FieldMemOperand(receiver, JSObject::kPropertiesOffset)); in GenerateNegativeLookup()
2567 __ li(a1, Operand(Handle<Name>(name))); in GenerateNegativeLookup()
2569 __ CallStub(&stub); in GenerateNegativeLookup()
2570 __ mov(at, v0); in GenerateNegativeLookup()
2571 __ MultiPop(spill_mask); in GenerateNegativeLookup()
2573 __ Branch(done, eq, at, Operand(zero_reg)); in GenerateNegativeLookup()
2574 __ Branch(miss, ne, at, Operand(zero_reg)); in GenerateNegativeLookup()
2600 __ ld(mask, FieldMemOperand(dictionary, kCapacityOffset)); in Generate()
2601 __ SmiUntag(mask); in Generate()
2602 __ Dsubu(mask, mask, Operand(1)); in Generate()
2604 __ lwu(hash, FieldMemOperand(key, Name::kHashFieldOffset)); in Generate()
2606 __ LoadRoot(undefined, Heap::kUndefinedValueRootIndex); in Generate()
2617 __ Daddu(index, hash, Operand( in Generate()
2620 __ mov(index, hash); in Generate()
2622 __ dsrl(index, index, Name::kHashShift); in Generate()
2623 __ And(index, mask, index); in Generate()
2628 __ Dlsa(index, index, index, 1); in Generate()
2631 __ Dlsa(index, dictionary, index, kPointerSizeLog2); in Generate()
2632 __ ld(entry_key, FieldMemOperand(index, kElementsStartOffset)); in Generate()
2635 __ Branch(&not_in_dictionary, eq, entry_key, Operand(undefined)); in Generate()
2638 __ Branch(&in_dictionary, eq, entry_key, Operand(key)); in Generate()
2642 __ ld(entry_key, FieldMemOperand(entry_key, HeapObject::kMapOffset)); in Generate()
2643 __ lbu(entry_key, in Generate()
2645 __ JumpIfNotUniqueNameInstanceType(entry_key, &maybe_in_dictionary); in Generate()
2649 __ bind(&maybe_in_dictionary); in Generate()
2654 __ Ret(USE_DELAY_SLOT); in Generate()
2655 __ mov(result, zero_reg); in Generate()
2658 __ bind(&in_dictionary); in Generate()
2659 __ Ret(USE_DELAY_SLOT); in Generate()
2660 __ li(result, 1); in Generate()
2662 __ bind(&not_in_dictionary); in Generate()
2663 __ Ret(USE_DELAY_SLOT); in Generate()
2664 __ mov(result, zero_reg); in Generate()
2692 __ beq(zero_reg, zero_reg, &skip_to_incremental_noncompacting); in Generate()
2693 __ nop(); in Generate()
2694 __ beq(zero_reg, zero_reg, &skip_to_incremental_compacting); in Generate()
2695 __ nop(); in Generate()
2698 __ RememberedSetHelper(object(), in Generate()
2704 __ Ret(); in Generate()
2706 __ bind(&skip_to_incremental_noncompacting); in Generate()
2709 __ bind(&skip_to_incremental_compacting); in Generate()
2726 __ ld(regs_.scratch0(), MemOperand(regs_.address(), 0)); in GenerateIncremental()
2727 __ JumpIfNotInNewSpace(regs_.scratch0(), // Value. in GenerateIncremental()
2731 __ JumpIfInNewSpace(regs_.object(), regs_.scratch0(), in GenerateIncremental()
2740 __ RememberedSetHelper(object(), in GenerateIncremental()
2746 __ bind(&dont_need_remembered_set); in GenerateIncremental()
2753 __ Ret(); in GenerateIncremental()
2760 __ PrepareCallCFunction(argument_count, regs_.scratch0()); in InformIncrementalMarker()
2765 __ Move(address, regs_.address()); in InformIncrementalMarker()
2766 __ Move(a0, regs_.object()); in InformIncrementalMarker()
2767 __ Move(a1, address); in InformIncrementalMarker()
2768 __ li(a2, Operand(ExternalReference::isolate_address(isolate()))); in InformIncrementalMarker()
2771 __ CallCFunction( in InformIncrementalMarker()
2788 __ JumpIfBlack(regs_.object(), regs_.scratch0(), regs_.scratch1(), &on_black); in CheckNeedsToInformIncrementalMarker()
2792 __ RememberedSetHelper(object(), in CheckNeedsToInformIncrementalMarker()
2798 __ Ret(); in CheckNeedsToInformIncrementalMarker()
2801 __ bind(&on_black); in CheckNeedsToInformIncrementalMarker()
2804 __ ld(regs_.scratch0(), MemOperand(regs_.address(), 0)); in CheckNeedsToInformIncrementalMarker()
2809 __ CheckPageFlag(regs_.scratch0(), // Contains value. in CheckNeedsToInformIncrementalMarker()
2815 __ CheckPageFlag(regs_.object(), in CheckNeedsToInformIncrementalMarker()
2821 __ bind(&ensure_not_white); in CheckNeedsToInformIncrementalMarker()
2826 __ Push(regs_.object(), regs_.address()); in CheckNeedsToInformIncrementalMarker()
2827 __ JumpIfWhite(regs_.scratch0(), // The value. in CheckNeedsToInformIncrementalMarker()
2832 __ Pop(regs_.object(), regs_.address()); in CheckNeedsToInformIncrementalMarker()
2836 __ RememberedSetHelper(object(), in CheckNeedsToInformIncrementalMarker()
2842 __ Ret(); in CheckNeedsToInformIncrementalMarker()
2845 __ bind(&need_incremental_pop_scratch); in CheckNeedsToInformIncrementalMarker()
2846 __ Pop(regs_.object(), regs_.address()); in CheckNeedsToInformIncrementalMarker()
2848 __ bind(&need_incremental); in CheckNeedsToInformIncrementalMarker()
2856 __ Call(ces.GetCode(), RelocInfo::CODE_TARGET); in Generate()
2859 __ ld(a1, MemOperand(fp, parameter_count_offset)); in Generate()
2861 __ Daddu(a1, a1, Operand(1)); in Generate()
2864 __ dsll(a1, a1, kPointerSizeLog2); in Generate()
2865 __ Ret(USE_DELAY_SLOT); in Generate()
2866 __ Daddu(sp, sp, a1); in Generate()
2872 __ push(ra); in MaybeCallEntryHook()
2873 __ CallStub(&stub); in MaybeCallEntryHook()
2874 __ pop(ra); in MaybeCallEntryHook()
2894 __ MultiPush(kSavedRegs | ra.bit()); in Generate()
2897 __ Dsubu(a0, ra, Operand(kReturnAddressDistanceFromFunctionStart)); in Generate()
2901 __ Daddu(a1, sp, Operand(kNumSavedRegs * kPointerSize)); in Generate()
2906 __ mov(s5, sp); in Generate()
2908 __ And(sp, sp, Operand(-frame_alignment)); in Generate()
2911 __ Dsubu(sp, sp, kCArgsSlotsSize); in Generate()
2915 __ li(t9, Operand(entry_hook)); in Generate()
2920 __ li(a2, Operand(ExternalReference::isolate_address(isolate()))); in Generate()
2923 __ li(t9, Operand(ExternalReference(&dispatcher, in Generate()
2928 __ Call(t9); in Generate()
2932 __ mov(sp, s5); in Generate()
2934 __ Daddu(sp, sp, kCArgsSlotsSize); in Generate()
2938 __ MultiPop(kSavedRegs | ra.bit()); in Generate()
2939 __ Ret(); in Generate()
2948 __ TailCallStub(&stub); in CreateArrayDispatch()
2955 __ TailCallStub(&stub, eq, a3, Operand(kind)); in CreateArrayDispatch()
2959 __ Abort(kUnexpectedElementsKindInArrayConstructor); in CreateArrayDispatch()
2983 __ And(at, a3, Operand(1)); in CreateArrayDispatchOneArgument()
2984 __ Branch(&normal_sequence, ne, at, Operand(zero_reg)); in CreateArrayDispatchOneArgument()
2987 __ ld(a5, MemOperand(sp, 0)); in CreateArrayDispatchOneArgument()
2988 __ Branch(&normal_sequence, eq, a5, Operand(zero_reg)); in CreateArrayDispatchOneArgument()
2997 __ TailCallStub(&stub_holey); in CreateArrayDispatchOneArgument()
2999 __ bind(&normal_sequence); in CreateArrayDispatchOneArgument()
3003 __ TailCallStub(&stub); in CreateArrayDispatchOneArgument()
3007 __ Daddu(a3, a3, Operand(1)); in CreateArrayDispatchOneArgument()
3010 __ ld(a5, FieldMemOperand(a2, 0)); in CreateArrayDispatchOneArgument()
3011 __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex); in CreateArrayDispatchOneArgument()
3012 __ Assert(eq, kExpectedAllocationSite, a5, Operand(at)); in CreateArrayDispatchOneArgument()
3019 __ ld(a4, FieldMemOperand(a2, AllocationSite::kTransitionInfoOffset)); in CreateArrayDispatchOneArgument()
3020 __ Daddu(a4, a4, Operand(Smi::FromInt(kFastElementsKindPackedToHoley))); in CreateArrayDispatchOneArgument()
3021 __ sd(a4, FieldMemOperand(a2, AllocationSite::kTransitionInfoOffset)); in CreateArrayDispatchOneArgument()
3024 __ bind(&normal_sequence); in CreateArrayDispatchOneArgument()
3030 __ TailCallStub(&stub, eq, a3, Operand(kind)); in CreateArrayDispatchOneArgument()
3034 __ Abort(kUnexpectedElementsKindInArrayConstructor); in CreateArrayDispatchOneArgument()
3078 __ And(at, a0, a0); in GenerateDispatchToArrayStub()
3079 __ Branch(&not_zero_case, ne, at, Operand(zero_reg)); in GenerateDispatchToArrayStub()
3082 __ bind(&not_zero_case); in GenerateDispatchToArrayStub()
3083 __ Branch(&not_one_case, gt, a0, Operand(1)); in GenerateDispatchToArrayStub()
3086 __ bind(&not_one_case); in GenerateDispatchToArrayStub()
3088 __ TailCallStub(&stub); in GenerateDispatchToArrayStub()
3106 __ ld(a4, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset)); in Generate()
3108 __ SmiTst(a4, at); in Generate()
3109 __ Assert(ne, kUnexpectedInitialMapForArrayFunction, in Generate()
3111 __ GetObjectType(a4, a4, a5); in Generate()
3112 __ Assert(eq, kUnexpectedInitialMapForArrayFunction, in Generate()
3116 __ AssertUndefinedOrAllocationSite(a2, a4); in Generate()
3120 __ ld(cp, FieldMemOperand(a1, JSFunction::kContextOffset)); in Generate()
3123 __ Branch(&subclassing, ne, a1, Operand(a3)); in Generate()
3127 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); in Generate()
3128 __ Branch(&no_info, eq, a2, Operand(at)); in Generate()
3130 __ ld(a3, FieldMemOperand(a2, AllocationSite::kTransitionInfoOffset)); in Generate()
3131 __ SmiUntag(a3); in Generate()
3133 __ And(a3, a3, Operand(AllocationSite::ElementsKindBits::kMask)); in Generate()
3136 __ bind(&no_info); in Generate()
3140 __ bind(&subclassing); in Generate()
3141 __ Dlsa(at, sp, a0, kPointerSizeLog2); in Generate()
3142 __ sd(a1, MemOperand(at)); in Generate()
3143 __ li(at, Operand(3)); in Generate()
3144 __ Daddu(a0, a0, at); in Generate()
3145 __ Push(a3, a2); in Generate()
3146 __ JumpToExternalReference(ExternalReference(Runtime::kNewArray, isolate())); in Generate()
3154 __ TailCallStub(&stub0, lo, a0, Operand(1)); in GenerateCase()
3157 __ TailCallStub(&stubN, hi, a0, Operand(1)); in GenerateCase()
3162 __ ld(at, MemOperand(sp, 0)); in GenerateCase()
3166 __ TailCallStub(&stub1_holey, ne, at, Operand(zero_reg)); in GenerateCase()
3170 __ TailCallStub(&stub1); in GenerateCase()
3187 __ ld(a3, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset)); in Generate()
3189 __ SmiTst(a3, at); in Generate()
3190 __ Assert(ne, kUnexpectedInitialMapForArrayFunction, in Generate()
3192 __ GetObjectType(a3, a3, a4); in Generate()
3193 __ Assert(eq, kUnexpectedInitialMapForArrayFunction, in Generate()
3198 __ ld(a3, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset)); in Generate()
3202 __ lbu(a3, FieldMemOperand(a3, Map::kBitField2Offset)); in Generate()
3204 __ DecodeField<Map::ElementsKindBits>(a3); in Generate()
3208 __ Branch(&done, eq, a3, Operand(FAST_ELEMENTS)); in Generate()
3209 __ Assert( in Generate()
3212 __ bind(&done); in Generate()
3216 __ Branch(&fast_elements_case, eq, a3, Operand(FAST_ELEMENTS)); in Generate()
3219 __ bind(&fast_elements_case); in Generate()
3251 __ li(t9, Operand(ExternalReference::is_profiling_address(isolate))); in CallApiFunctionAndReturn()
3252 __ lb(t9, MemOperand(t9, 0)); in CallApiFunctionAndReturn()
3253 __ Branch(&profiler_disabled, eq, t9, Operand(zero_reg)); in CallApiFunctionAndReturn()
3256 __ li(t9, Operand(thunk_ref)); in CallApiFunctionAndReturn()
3257 __ jmp(&end_profiler_check); in CallApiFunctionAndReturn()
3259 __ bind(&profiler_disabled); in CallApiFunctionAndReturn()
3260 __ mov(t9, function_address); in CallApiFunctionAndReturn()
3261 __ bind(&end_profiler_check); in CallApiFunctionAndReturn()
3264 __ li(s3, Operand(next_address)); in CallApiFunctionAndReturn()
3265 __ ld(s0, MemOperand(s3, kNextOffset)); in CallApiFunctionAndReturn()
3266 __ ld(s1, MemOperand(s3, kLimitOffset)); in CallApiFunctionAndReturn()
3267 __ lw(s2, MemOperand(s3, kLevelOffset)); in CallApiFunctionAndReturn()
3268 __ Addu(s2, s2, Operand(1)); in CallApiFunctionAndReturn()
3269 __ sw(s2, MemOperand(s3, kLevelOffset)); in CallApiFunctionAndReturn()
3273 __ PushSafepointRegisters(); in CallApiFunctionAndReturn()
3274 __ PrepareCallCFunction(1, a0); in CallApiFunctionAndReturn()
3275 __ li(a0, Operand(ExternalReference::isolate_address(isolate))); in CallApiFunctionAndReturn()
3276 __ CallCFunction(ExternalReference::log_enter_external_function(isolate), in CallApiFunctionAndReturn()
3278 __ PopSafepointRegisters(); in CallApiFunctionAndReturn()
3289 __ PushSafepointRegisters(); in CallApiFunctionAndReturn()
3290 __ PrepareCallCFunction(1, a0); in CallApiFunctionAndReturn()
3291 __ li(a0, Operand(ExternalReference::isolate_address(isolate))); in CallApiFunctionAndReturn()
3292 __ CallCFunction(ExternalReference::log_leave_external_function(isolate), in CallApiFunctionAndReturn()
3294 __ PopSafepointRegisters(); in CallApiFunctionAndReturn()
3303 __ ld(v0, return_value_operand); in CallApiFunctionAndReturn()
3304 __ bind(&return_value_loaded); in CallApiFunctionAndReturn()
3308 __ sd(s0, MemOperand(s3, kNextOffset)); in CallApiFunctionAndReturn()
3309 if (__ emit_debug_code()) { in CallApiFunctionAndReturn()
3310 __ lw(a1, MemOperand(s3, kLevelOffset)); in CallApiFunctionAndReturn()
3311 __ Check(eq, kUnexpectedLevelAfterReturnFromApiCall, a1, Operand(s2)); in CallApiFunctionAndReturn()
3313 __ Subu(s2, s2, Operand(1)); in CallApiFunctionAndReturn()
3314 __ sw(s2, MemOperand(s3, kLevelOffset)); in CallApiFunctionAndReturn()
3315 __ ld(at, MemOperand(s3, kLimitOffset)); in CallApiFunctionAndReturn()
3316 __ Branch(&delete_allocated_handles, ne, s1, Operand(at)); in CallApiFunctionAndReturn()
3319 __ bind(&leave_exit_frame); in CallApiFunctionAndReturn()
3323 __ ld(cp, *context_restore_operand); in CallApiFunctionAndReturn()
3327 __ ld(s0, MemOperand(sp, stack_space_offset)); in CallApiFunctionAndReturn()
3329 __ li(s0, Operand(stack_space)); in CallApiFunctionAndReturn()
3331 __ LeaveExitFrame(false, s0, !restore_context, NO_EMIT_RETURN, in CallApiFunctionAndReturn()
3335 __ LoadRoot(a4, Heap::kTheHoleValueRootIndex); in CallApiFunctionAndReturn()
3336 __ li(at, Operand(ExternalReference::scheduled_exception_address(isolate))); in CallApiFunctionAndReturn()
3337 __ ld(a5, MemOperand(at)); in CallApiFunctionAndReturn()
3338 __ Branch(&promote_scheduled_exception, ne, a4, Operand(a5)); in CallApiFunctionAndReturn()
3340 __ Ret(); in CallApiFunctionAndReturn()
3343 __ bind(&promote_scheduled_exception); in CallApiFunctionAndReturn()
3344 __ TailCallRuntime(Runtime::kPromoteScheduledException); in CallApiFunctionAndReturn()
3347 __ bind(&delete_allocated_handles); in CallApiFunctionAndReturn()
3348 __ sd(s1, MemOperand(s3, kLimitOffset)); in CallApiFunctionAndReturn()
3349 __ mov(s0, v0); in CallApiFunctionAndReturn()
3350 __ mov(a0, v0); in CallApiFunctionAndReturn()
3351 __ PrepareCallCFunction(1, s1); in CallApiFunctionAndReturn()
3352 __ li(a0, Operand(ExternalReference::isolate_address(isolate))); in CallApiFunctionAndReturn()
3353 __ CallCFunction(ExternalReference::delete_handle_scope_extensions(isolate), in CallApiFunctionAndReturn()
3355 __ mov(v0, s0); in CallApiFunctionAndReturn()
3356 __ jmp(&leave_exit_frame); in CallApiFunctionAndReturn()
3392 __ PushRoot(Heap::kUndefinedValueRootIndex); in Generate()
3395 __ Push(context, callee, call_data); in Generate()
3398 __ ld(context, FieldMemOperand(callee, JSFunction::kContextOffset)); in Generate()
3403 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex); in Generate()
3406 __ Push(scratch, scratch); in Generate()
3407 __ li(scratch, Operand(ExternalReference::isolate_address(masm->isolate()))); in Generate()
3409 __ Push(scratch, holder); in Generate()
3412 __ mov(scratch, sp); in Generate()
3419 __ EnterExitFrame(false, kApiStackSpace); in Generate()
3424 __ Daddu(a0, sp, Operand(1 * kPointerSize)); in Generate()
3426 __ sd(scratch, MemOperand(a0, 0 * kPointerSize)); in Generate()
3428 __ Daddu(at, scratch, in Generate()
3430 __ sd(at, MemOperand(a0, 1 * kPointerSize)); in Generate()
3434 __ li(at, Operand(argc())); in Generate()
3435 __ sw(at, MemOperand(a0, 2 * kPointerSize)); in Generate()
3484 __ Dsubu(sp, sp, (PCA::kArgsLength + 1) * kPointerSize); in Generate()
3485 __ sd(receiver, MemOperand(sp, (PCA::kThisIndex + 1) * kPointerSize)); in Generate()
3486 __ ld(scratch, FieldMemOperand(callback, AccessorInfo::kDataOffset)); in Generate()
3487 __ sd(scratch, MemOperand(sp, (PCA::kDataIndex + 1) * kPointerSize)); in Generate()
3488 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex); in Generate()
3489 __ sd(scratch, MemOperand(sp, (PCA::kReturnValueOffset + 1) * kPointerSize)); in Generate()
3490 __ sd(scratch, MemOperand(sp, (PCA::kReturnValueDefaultValueIndex + 1) * in Generate()
3492 __ li(scratch, Operand(ExternalReference::isolate_address(isolate()))); in Generate()
3493 __ sd(scratch, MemOperand(sp, (PCA::kIsolateIndex + 1) * kPointerSize)); in Generate()
3494 __ sd(holder, MemOperand(sp, (PCA::kHolderIndex + 1) * kPointerSize)); in Generate()
3497 __ sd(zero_reg, in Generate()
3499 __ ld(scratch, FieldMemOperand(callback, AccessorInfo::kNameOffset)); in Generate()
3500 __ sd(scratch, MemOperand(sp, 0 * kPointerSize)); in Generate()
3506 __ mov(a0, sp); // a0 = Handle<Name> in Generate()
3507 __ Daddu(a1, a0, Operand(1 * kPointerSize)); // a1 = v8::PCI::args_ in Generate()
3511 __ EnterExitFrame(false, kApiStackSpace); in Generate()
3515 __ sd(a1, MemOperand(sp, 1 * kPointerSize)); in Generate()
3516 __ Daddu(a1, sp, Operand(1 * kPointerSize)); in Generate()
3522 __ ld(scratch, FieldMemOperand(callback, AccessorInfo::kJsGetterOffset)); in Generate()
3523 __ ld(api_function_address, in Generate()
3534 #undef __