• Home
  • Raw
  • Download

Lines Matching full:__

24 #define __ ACCESS_MASM(masm)  macro
27 __ ShiftLeftP(r1, r2, Operand(kPointerSizeLog2)); in Generate()
28 __ StoreP(r3, MemOperand(sp, r1)); in Generate()
29 __ push(r3); in Generate()
30 __ push(r4); in Generate()
31 __ AddP(r2, r2, Operand(3)); in Generate()
32 __ TailCallRuntime(Runtime::kNewArray); in Generate()
57 __ push(descriptor.GetRegisterParameter(i)); in GenerateLightweightMiss()
59 __ CallExternalReference(miss, param_count); in GenerateLightweightMiss()
62 __ Ret(); in GenerateLightweightMiss()
81 __ push(scratch); in Generate()
87 __ LoadDouble(double_scratch, MemOperand(input_reg, double_offset)); in Generate()
90 __ ConvertDoubleToInt64(double_scratch, in Generate()
98 __ TestIfInt32(result_reg, r0); in Generate()
100 __ TestIfInt32(scratch, result_reg, r0); in Generate()
102 __ beq(&fastpath_done, Label::kNear); in Generate()
105 __ Push(scratch_high, scratch_low); in Generate()
109 __ LoadlW(scratch_high, in Generate()
111 __ LoadlW(scratch_low, in Generate()
114 __ ExtractBitMask(scratch, scratch_high, HeapNumber::kExponentMask); in Generate()
118 __ SubP(scratch, Operand(HeapNumber::kExponentBias + 1)); in Generate()
123 __ CmpP(scratch, Operand(83)); in Generate()
124 __ bge(&out_of_range, Label::kNear); in Generate()
131 __ Load(r0, Operand(51)); in Generate()
132 __ SubP(scratch, r0, scratch); in Generate()
133 __ CmpP(scratch, Operand::Zero()); in Generate()
134 __ ble(&only_low, Label::kNear); in Generate()
137 __ ShiftRight(scratch_low, scratch_low, scratch); in Generate()
141 __ Load(r0, Operand(32)); in Generate()
142 __ SubP(scratch, r0, scratch); in Generate()
143 __ ExtractBitMask(result_reg, scratch_high, HeapNumber::kMantissaMask); in Generate()
146 __ Load(r0, Operand(1 << ((HeapNumber::kMantissaBitsInTopWord)-16))); in Generate()
147 __ ShiftLeftP(r0, r0, Operand(16)); in Generate()
148 __ OrP(result_reg, result_reg, r0); in Generate()
149 __ ShiftLeft(r0, result_reg, scratch); in Generate()
150 __ OrP(result_reg, scratch_low, r0); in Generate()
151 __ b(&negate, Label::kNear); in Generate()
153 __ bind(&out_of_range); in Generate()
154 __ mov(result_reg, Operand::Zero()); in Generate()
155 __ b(&done, Label::kNear); in Generate()
157 __ bind(&only_low); in Generate()
160 __ LoadComplementRR(scratch, scratch); in Generate()
161 __ ShiftLeft(result_reg, scratch_low, scratch); in Generate()
163 __ bind(&negate); in Generate()
170 __ ShiftRightArith(r0, scratch_high, Operand(31)); in Generate()
172 __ lgfr(r0, r0); in Generate()
173 __ ShiftRightP(r0, r0, Operand(32)); in Generate()
175 __ XorP(result_reg, r0); in Generate()
176 __ ShiftRight(r0, scratch_high, Operand(31)); in Generate()
177 __ AddP(result_reg, r0); in Generate()
179 __ bind(&done); in Generate()
180 __ Pop(scratch_high, scratch_low); in Generate()
182 __ bind(&fastpath_done); in Generate()
183 __ pop(scratch); in Generate()
185 __ Ret(); in Generate()
195 __ CmpP(r2, r3); in EmitIdenticalObjectComparison()
196 __ bne(&not_identical); in EmitIdenticalObjectComparison()
204 __ CompareObjectType(r2, r6, r6, FIRST_JS_RECEIVER_TYPE); in EmitIdenticalObjectComparison()
205 __ bge(slow); in EmitIdenticalObjectComparison()
207 __ CmpP(r6, Operand(SYMBOL_TYPE)); in EmitIdenticalObjectComparison()
208 __ beq(slow); in EmitIdenticalObjectComparison()
210 __ CompareObjectType(r2, r6, r6, HEAP_NUMBER_TYPE); in EmitIdenticalObjectComparison()
211 __ beq(&heap_number); in EmitIdenticalObjectComparison()
214 __ CmpP(r6, Operand(FIRST_JS_RECEIVER_TYPE)); in EmitIdenticalObjectComparison()
215 __ bge(slow); in EmitIdenticalObjectComparison()
217 __ CmpP(r6, Operand(SYMBOL_TYPE)); in EmitIdenticalObjectComparison()
218 __ beq(slow); in EmitIdenticalObjectComparison()
223 __ CmpP(r6, Operand(ODDBALL_TYPE)); in EmitIdenticalObjectComparison()
224 __ bne(&return_equal); in EmitIdenticalObjectComparison()
225 __ CompareRoot(r2, Heap::kUndefinedValueRootIndex); in EmitIdenticalObjectComparison()
226 __ bne(&return_equal); in EmitIdenticalObjectComparison()
229 __ LoadImmP(r2, Operand(GREATER)); in EmitIdenticalObjectComparison()
232 __ LoadImmP(r2, Operand(LESS)); in EmitIdenticalObjectComparison()
234 __ Ret(); in EmitIdenticalObjectComparison()
239 __ bind(&return_equal); in EmitIdenticalObjectComparison()
241 __ LoadImmP(r2, Operand(GREATER)); // Things aren't less than themselves. in EmitIdenticalObjectComparison()
243 __ LoadImmP(r2, Operand(LESS)); // Things aren't greater than themselves. in EmitIdenticalObjectComparison()
245 __ LoadImmP(r2, Operand(EQUAL)); // Things are <=, >=, ==, === themselves in EmitIdenticalObjectComparison()
247 __ Ret(); in EmitIdenticalObjectComparison()
253 __ bind(&heap_number); in EmitIdenticalObjectComparison()
260 __ LoadlW(r4, FieldMemOperand(r2, HeapNumber::kExponentOffset)); in EmitIdenticalObjectComparison()
263 __ ExtractBitMask(r5, r4, HeapNumber::kExponentMask); in EmitIdenticalObjectComparison()
264 __ CmpLogicalP(r5, Operand(0x7ff)); in EmitIdenticalObjectComparison()
265 __ bne(&return_equal); in EmitIdenticalObjectComparison()
268 __ sll(r4, Operand(HeapNumber::kNonMantissaBitsInTopWord)); in EmitIdenticalObjectComparison()
270 __ LoadlW(r5, FieldMemOperand(r2, HeapNumber::kMantissaOffset)); in EmitIdenticalObjectComparison()
271 __ OrP(r2, r5, r4); in EmitIdenticalObjectComparison()
272 __ CmpP(r2, Operand::Zero()); in EmitIdenticalObjectComparison()
279 __ bne(&not_equal, Label::kNear); in EmitIdenticalObjectComparison()
281 __ Ret(); in EmitIdenticalObjectComparison()
282 __ bind(&not_equal); in EmitIdenticalObjectComparison()
284 __ LoadImmP(r2, Operand(GREATER)); // NaN <= NaN should fail. in EmitIdenticalObjectComparison()
286 __ LoadImmP(r2, Operand(LESS)); // NaN >= NaN should fail. in EmitIdenticalObjectComparison()
289 __ Ret(); in EmitIdenticalObjectComparison()
293 __ bind(&not_identical); in EmitIdenticalObjectComparison()
303 __ JumpIfSmi(rhs, &rhs_is_smi); in EmitSmiNonsmiComparison()
306 __ CompareObjectType(rhs, r5, r6, HEAP_NUMBER_TYPE); in EmitSmiNonsmiComparison()
312 __ beq(&skip, Label::kNear); in EmitSmiNonsmiComparison()
314 __ mov(r2, Operand(NOT_EQUAL)); in EmitSmiNonsmiComparison()
316 __ Ret(); in EmitSmiNonsmiComparison()
317 __ bind(&skip); in EmitSmiNonsmiComparison()
321 __ bne(slow); in EmitSmiNonsmiComparison()
326 __ SmiToDouble(d7, lhs); in EmitSmiNonsmiComparison()
328 __ LoadDouble(d6, FieldMemOperand(rhs, HeapNumber::kValueOffset)); in EmitSmiNonsmiComparison()
332 __ b(lhs_not_nan); in EmitSmiNonsmiComparison()
334 __ bind(&rhs_is_smi); in EmitSmiNonsmiComparison()
336 __ CompareObjectType(lhs, r6, r6, HEAP_NUMBER_TYPE); in EmitSmiNonsmiComparison()
342 __ beq(&skip, Label::kNear); in EmitSmiNonsmiComparison()
344 __ mov(r2, Operand(NOT_EQUAL)); in EmitSmiNonsmiComparison()
346 __ Ret(); in EmitSmiNonsmiComparison()
347 __ bind(&skip); in EmitSmiNonsmiComparison()
351 __ bne(slow); in EmitSmiNonsmiComparison()
356 __ LoadDouble(d7, FieldMemOperand(lhs, HeapNumber::kValueOffset)); in EmitSmiNonsmiComparison()
358 __ SmiToDouble(d6, rhs); in EmitSmiNonsmiComparison()
374 __ CompareObjectType(rhs, r4, r4, FIRST_JS_RECEIVER_TYPE); in EmitStrictTwoHeapObjectCompare()
375 __ blt(&first_non_object, Label::kNear); in EmitStrictTwoHeapObjectCompare()
379 __ bind(&return_not_equal); in EmitStrictTwoHeapObjectCompare()
380 __ Ret(); in EmitStrictTwoHeapObjectCompare()
382 __ bind(&first_non_object); in EmitStrictTwoHeapObjectCompare()
384 __ CmpP(r4, Operand(ODDBALL_TYPE)); in EmitStrictTwoHeapObjectCompare()
385 __ beq(&return_not_equal); in EmitStrictTwoHeapObjectCompare()
387 __ CompareObjectType(lhs, r5, r5, FIRST_JS_RECEIVER_TYPE); in EmitStrictTwoHeapObjectCompare()
388 __ bge(&return_not_equal); in EmitStrictTwoHeapObjectCompare()
391 __ CmpP(r5, Operand(ODDBALL_TYPE)); in EmitStrictTwoHeapObjectCompare()
392 __ beq(&return_not_equal); in EmitStrictTwoHeapObjectCompare()
397 __ OrP(r4, r4, r5); in EmitStrictTwoHeapObjectCompare()
398 __ AndP(r0, r4, Operand(kIsNotStringMask | kIsNotInternalizedMask)); in EmitStrictTwoHeapObjectCompare()
399 __ beq(&return_not_equal); in EmitStrictTwoHeapObjectCompare()
409 __ CompareObjectType(rhs, r5, r4, HEAP_NUMBER_TYPE); in EmitCheckForTwoHeapNumbers()
410 __ bne(not_heap_numbers); in EmitCheckForTwoHeapNumbers()
411 __ LoadP(r4, FieldMemOperand(lhs, HeapObject::kMapOffset)); in EmitCheckForTwoHeapNumbers()
412 __ CmpP(r4, r5); in EmitCheckForTwoHeapNumbers()
413 __ bne(slow); // First was a heap number, second wasn't. Go slow case. in EmitCheckForTwoHeapNumbers()
417 __ LoadDouble(d6, FieldMemOperand(rhs, HeapNumber::kValueOffset)); in EmitCheckForTwoHeapNumbers()
418 __ LoadDouble(d7, FieldMemOperand(lhs, HeapNumber::kValueOffset)); in EmitCheckForTwoHeapNumbers()
420 __ b(both_loaded_as_doubles); in EmitCheckForTwoHeapNumbers()
435 __ mov(r0, Operand(kIsNotStringMask)); in EmitCheckForInternalizedStringsOrObjects()
436 __ AndP(r0, r4); in EmitCheckForInternalizedStringsOrObjects()
437 __ bne(&object_test, Label::kNear); in EmitCheckForInternalizedStringsOrObjects()
438 __ mov(r0, Operand(kIsNotInternalizedMask)); in EmitCheckForInternalizedStringsOrObjects()
439 __ AndP(r0, r4); in EmitCheckForInternalizedStringsOrObjects()
440 __ bne(possible_strings); in EmitCheckForInternalizedStringsOrObjects()
441 __ CompareObjectType(lhs, r5, r5, FIRST_NONSTRING_TYPE); in EmitCheckForInternalizedStringsOrObjects()
442 __ bge(runtime_call); in EmitCheckForInternalizedStringsOrObjects()
443 __ mov(r0, Operand(kIsNotInternalizedMask)); in EmitCheckForInternalizedStringsOrObjects()
444 __ AndP(r0, r5); in EmitCheckForInternalizedStringsOrObjects()
445 __ bne(possible_strings); in EmitCheckForInternalizedStringsOrObjects()
450 __ Ret(); in EmitCheckForInternalizedStringsOrObjects()
452 __ bind(&object_test); in EmitCheckForInternalizedStringsOrObjects()
453 __ LoadP(r4, FieldMemOperand(lhs, HeapObject::kMapOffset)); in EmitCheckForInternalizedStringsOrObjects()
454 __ LoadP(r5, FieldMemOperand(rhs, HeapObject::kMapOffset)); in EmitCheckForInternalizedStringsOrObjects()
455 __ LoadlB(r6, FieldMemOperand(r4, Map::kBitFieldOffset)); in EmitCheckForInternalizedStringsOrObjects()
456 __ LoadlB(r7, FieldMemOperand(r5, Map::kBitFieldOffset)); in EmitCheckForInternalizedStringsOrObjects()
457 __ AndP(r0, r6, Operand(1 << Map::kIsUndetectable)); in EmitCheckForInternalizedStringsOrObjects()
458 __ bne(&undetectable); in EmitCheckForInternalizedStringsOrObjects()
459 __ AndP(r0, r7, Operand(1 << Map::kIsUndetectable)); in EmitCheckForInternalizedStringsOrObjects()
460 __ bne(&return_unequal); in EmitCheckForInternalizedStringsOrObjects()
462 __ CompareInstanceType(r4, r4, FIRST_JS_RECEIVER_TYPE); in EmitCheckForInternalizedStringsOrObjects()
463 __ blt(runtime_call); in EmitCheckForInternalizedStringsOrObjects()
464 __ CompareInstanceType(r5, r5, FIRST_JS_RECEIVER_TYPE); in EmitCheckForInternalizedStringsOrObjects()
465 __ blt(runtime_call); in EmitCheckForInternalizedStringsOrObjects()
467 __ bind(&return_unequal); in EmitCheckForInternalizedStringsOrObjects()
469 __ Ret(); in EmitCheckForInternalizedStringsOrObjects()
471 __ bind(&undetectable); in EmitCheckForInternalizedStringsOrObjects()
472 __ AndP(r0, r7, Operand(1 << Map::kIsUndetectable)); in EmitCheckForInternalizedStringsOrObjects()
473 __ beq(&return_unequal); in EmitCheckForInternalizedStringsOrObjects()
478 __ CompareInstanceType(r4, r4, ODDBALL_TYPE); in EmitCheckForInternalizedStringsOrObjects()
479 __ beq(&return_equal); in EmitCheckForInternalizedStringsOrObjects()
480 __ CompareInstanceType(r5, r5, ODDBALL_TYPE); in EmitCheckForInternalizedStringsOrObjects()
481 __ bne(&return_unequal); in EmitCheckForInternalizedStringsOrObjects()
483 __ bind(&return_equal); in EmitCheckForInternalizedStringsOrObjects()
484 __ LoadImmP(r2, Operand(EQUAL)); in EmitCheckForInternalizedStringsOrObjects()
485 __ Ret(); in EmitCheckForInternalizedStringsOrObjects()
494 __ JumpIfNotSmi(input, fail); in CompareICStub_CheckInputType()
496 __ JumpIfSmi(input, &ok); in CompareICStub_CheckInputType()
497 __ CheckMap(input, scratch, Heap::kHeapNumberMapRootIndex, fail, in CompareICStub_CheckInputType()
502 __ bind(&ok); in CompareICStub_CheckInputType()
521 __ OrP(r4, r3, r2); in GenerateGeneric()
522 __ JumpIfNotSmi(r4, &not_two_smis); in GenerateGeneric()
523 __ SmiUntag(r3); in GenerateGeneric()
524 __ SmiUntag(r2); in GenerateGeneric()
525 __ SubP(r2, r3, r2); in GenerateGeneric()
526 __ Ret(); in GenerateGeneric()
527 __ bind(&not_two_smis); in GenerateGeneric()
540 __ AndP(r4, lhs, rhs); in GenerateGeneric()
541 __ JumpIfNotSmi(r4, &not_smis); in GenerateGeneric()
552 __ bind(&both_loaded_as_doubles); in GenerateGeneric()
554 __ bind(&lhs_not_nan); in GenerateGeneric()
556 __ cdbr(d7, d6); in GenerateGeneric()
559 __ bunordered(&nan); in GenerateGeneric()
560 __ beq(&equal, Label::kNear); in GenerateGeneric()
561 __ blt(&less_than, Label::kNear); in GenerateGeneric()
562 __ LoadImmP(r2, Operand(GREATER)); in GenerateGeneric()
563 __ Ret(); in GenerateGeneric()
564 __ bind(&equal); in GenerateGeneric()
565 __ LoadImmP(r2, Operand(EQUAL)); in GenerateGeneric()
566 __ Ret(); in GenerateGeneric()
567 __ bind(&less_than); in GenerateGeneric()
568 __ LoadImmP(r2, Operand(LESS)); in GenerateGeneric()
569 __ Ret(); in GenerateGeneric()
571 __ bind(&nan); in GenerateGeneric()
576 __ LoadImmP(r2, Operand(GREATER)); in GenerateGeneric()
578 __ LoadImmP(r2, Operand(LESS)); in GenerateGeneric()
580 __ Ret(); in GenerateGeneric()
582 __ bind(&not_smis); in GenerateGeneric()
602 __ bind(&check_for_internalized_strings); in GenerateGeneric()
615 __ bind(&flat_string_check); in GenerateGeneric()
617 __ JumpIfNonSmisNotBothSequentialOneByteStrings(lhs, rhs, r4, r5, &slow); in GenerateGeneric()
619 __ IncrementCounter(isolate()->counters()->string_compare_native(), 1, r4, in GenerateGeneric()
628 __ bind(&slow); in GenerateGeneric()
633 __ Push(cp); in GenerateGeneric()
634 __ Call(strict() ? isolate()->builtins()->StrictEqual() in GenerateGeneric()
637 __ Pop(cp); in GenerateGeneric()
641 __ LoadRoot(r3, Heap::kTrueValueRootIndex); in GenerateGeneric()
642 __ SubP(r2, r2, r3); in GenerateGeneric()
643 __ Ret(); in GenerateGeneric()
645 __ Push(lhs, rhs); in GenerateGeneric()
653 __ LoadSmiLiteral(r2, Smi::FromInt(ncr)); in GenerateGeneric()
654 __ push(r2); in GenerateGeneric()
658 __ TailCallRuntime(Runtime::kCompare); in GenerateGeneric()
661 __ bind(&miss); in GenerateGeneric()
669 __ MultiPush(kJSCallerSaved | r14.bit()); in Generate()
671 __ MultiPushDoubles(kCallerSavedDoubles); in Generate()
678 __ PrepareCallCFunction(argument_count, fp_argument_count, scratch); in Generate()
679 __ mov(r2, Operand(ExternalReference::isolate_address(isolate()))); in Generate()
680 __ CallCFunction(ExternalReference::store_buffer_overflow_function(isolate()), in Generate()
683 __ MultiPopDoubles(kCallerSavedDoubles); in Generate()
685 __ MultiPop(kJSCallerSaved | r14.bit()); in Generate()
686 __ Ret(); in Generate()
690 __ PushSafepointRegisters(); in Generate()
691 __ b(r14); in Generate()
695 __ PopSafepointRegisters(); in Generate()
696 __ b(r14); in Generate()
712 __ UntagAndJumpIfSmi(scratch, exponent, &int_exponent); in Generate()
714 __ LoadDouble(double_exponent, in Generate()
720 __ TryDoubleToInt32Exact(scratch, double_exponent, scratch2, in Generate()
722 __ beq(&int_exponent, Label::kNear); in Generate()
724 __ push(r14); in Generate()
727 __ PrepareCallCFunction(0, 2, scratch); in Generate()
728 __ MovToFloatParameters(double_base, double_exponent); in Generate()
729 __ CallCFunction( in Generate()
732 __ pop(r14); in Generate()
733 __ MovFromFloatResult(double_result); in Generate()
734 __ b(&done); in Generate()
738 __ bind(&int_exponent); in Generate()
742 __ LoadRR(scratch, exponent); in Generate()
745 __ LoadRR(exponent, scratch); in Generate()
747 __ ldr(double_scratch, double_base); // Back up base. in Generate()
748 __ LoadImmP(scratch2, Operand(1)); in Generate()
749 __ ConvertIntToDouble(scratch2, double_result); in Generate()
753 __ CmpP(scratch, Operand::Zero()); in Generate()
754 __ bge(&positive_exponent, Label::kNear); in Generate()
755 __ LoadComplementRR(scratch, scratch); in Generate()
756 __ bind(&positive_exponent); in Generate()
759 __ bind(&while_true); in Generate()
760 __ mov(scratch2, Operand(1)); in Generate()
761 __ AndP(scratch2, scratch); in Generate()
762 __ beq(&no_carry, Label::kNear); in Generate()
763 __ mdbr(double_result, double_scratch); in Generate()
764 __ bind(&no_carry); in Generate()
765 __ ShiftRightP(scratch, scratch, Operand(1)); in Generate()
766 __ LoadAndTestP(scratch, scratch); in Generate()
767 __ beq(&loop_end, Label::kNear); in Generate()
768 __ mdbr(double_scratch, double_scratch); in Generate()
769 __ b(&while_true); in Generate()
770 __ bind(&loop_end); in Generate()
772 __ CmpP(exponent, Operand::Zero()); in Generate()
773 __ bge(&done); in Generate()
776 __ ldr(double_scratch, double_result); in Generate()
777 __ LoadImmP(scratch2, Operand(1)); in Generate()
778 __ ConvertIntToDouble(scratch2, double_result); in Generate()
779 __ ddbr(double_result, double_scratch); in Generate()
783 __ lzdr(kDoubleRegZero); in Generate()
784 __ cdbr(double_result, kDoubleRegZero); in Generate()
785 __ bne(&done, Label::kNear); in Generate()
788 __ ConvertIntToDouble(exponent, double_exponent); in Generate()
791 __ push(r14); in Generate()
794 __ PrepareCallCFunction(0, 2, scratch); in Generate()
795 __ MovToFloatParameters(double_base, double_exponent); in Generate()
796 __ CallCFunction( in Generate()
799 __ pop(r14); in Generate()
800 __ MovFromFloatResult(double_result); in Generate()
802 __ bind(&done); in Generate()
803 __ Ret(); in Generate()
855 __ LoadRR(r7, r3); in Generate()
859 __ LoadRR(r3, r4); in Generate()
862 __ ShiftLeftP(r3, r2, Operand(kPointerSizeLog2)); in Generate()
863 __ lay(r3, MemOperand(r3, sp, -kPointerSize)); in Generate()
885 __ EnterExitFrame(save_doubles(), arg_stack_space, is_builtin_exit() in Generate()
890 __ LoadRR(r6, r2); in Generate()
891 __ LoadRR(r8, r3); in Generate()
904 __ LoadRR(r4, r3); in Generate()
905 __ LoadRR(r3, r2); in Generate()
906 __ la(r2, MemOperand(sp, (kStackFrameExtraParamSlot + 1) * kPointerSize)); in Generate()
910 __ mov(isolate_reg, Operand(ExternalReference::isolate_address(isolate()))); in Generate()
920 __ larl(r14, &return_label); // Generate the return addr of call later. in Generate()
921 __ StoreP(r14, MemOperand(sp, kStackFrameRASlot * kPointerSize)); in Generate()
925 // __ lay(sp, MemOperand(sp, -kCalleeRegisterSaveAreaSize)); in Generate()
926 __ b(target); in Generate()
927 __ bind(&return_label); in Generate()
928 // __ la(sp, MemOperand(sp, +kCalleeRegisterSaveAreaSize)); in Generate()
933 if (result_size() > 2) __ LoadP(r4, MemOperand(r2, 2 * kPointerSize)); in Generate()
934 __ LoadP(r3, MemOperand(r2, kPointerSize)); in Generate()
935 __ LoadP(r2, MemOperand(r2)); in Generate()
940 __ CompareRoot(r2, Heap::kExceptionRootIndex); in Generate()
941 __ beq(&exception_returned, Label::kNear); in Generate()
949 __ mov(r1, Operand(pending_exception_address)); in Generate()
950 __ LoadP(r1, MemOperand(r1)); in Generate()
951 __ CompareRoot(r1, Heap::kTheHoleValueRootIndex); in Generate()
953 __ beq(&okay, Label::kNear); in Generate()
954 __ stop("Unexpected pending exception"); in Generate()
955 __ bind(&okay); in Generate()
970 __ LeaveExitFrame(save_doubles(), argc, true); in Generate()
971 __ b(r14); in Generate()
974 __ bind(&exception_returned); in Generate()
993 __ PrepareCallCFunction(3, 0, r2); in Generate()
994 __ LoadImmP(r2, Operand::Zero()); in Generate()
995 __ LoadImmP(r3, Operand::Zero()); in Generate()
996 __ mov(r4, Operand(ExternalReference::isolate_address(isolate()))); in Generate()
997 __ CallCFunction(find_handler, 3); in Generate()
1001 __ mov(cp, Operand(pending_handler_context_address)); in Generate()
1002 __ LoadP(cp, MemOperand(cp)); in Generate()
1003 __ mov(sp, Operand(pending_handler_sp_address)); in Generate()
1004 __ LoadP(sp, MemOperand(sp)); in Generate()
1005 __ mov(fp, Operand(pending_handler_fp_address)); in Generate()
1006 __ LoadP(fp, MemOperand(fp)); in Generate()
1011 __ CmpP(cp, Operand::Zero()); in Generate()
1012 __ beq(&skip, Label::kNear); in Generate()
1013 __ StoreP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); in Generate()
1014 __ bind(&skip); in Generate()
1017 __ mov(r3, Operand(pending_handler_code_address)); in Generate()
1018 __ LoadP(r3, MemOperand(r3)); in Generate()
1019 __ mov(r4, Operand(pending_handler_offset_address)); in Generate()
1020 __ LoadP(r4, MemOperand(r4)); in Generate()
1021 __ AddP(r3, r3, Operand(Code::kHeaderSize - kHeapObjectTag)); // Code start in Generate()
1022 __ AddP(ip, r3, r4); in Generate()
1023 __ Jump(ip); in Generate()
1040 __ lay(sp, MemOperand(sp, -8 * kDoubleSize)); in Generate()
1041 __ std(d8, MemOperand(sp)); in Generate()
1042 __ std(d9, MemOperand(sp, 1 * kDoubleSize)); in Generate()
1043 __ std(d10, MemOperand(sp, 2 * kDoubleSize)); in Generate()
1044 __ std(d11, MemOperand(sp, 3 * kDoubleSize)); in Generate()
1045 __ std(d12, MemOperand(sp, 4 * kDoubleSize)); in Generate()
1046 __ std(d13, MemOperand(sp, 5 * kDoubleSize)); in Generate()
1047 __ std(d14, MemOperand(sp, 6 * kDoubleSize)); in Generate()
1048 __ std(d15, MemOperand(sp, 7 * kDoubleSize)); in Generate()
1052 __ lay(sp, MemOperand(sp, -2 * kDoubleSize)); in Generate()
1053 __ std(d4, MemOperand(sp)); in Generate()
1054 __ std(d6, MemOperand(sp, kDoubleSize)); in Generate()
1067 __ lay(sp, MemOperand(sp, -10 * kPointerSize)); in Generate()
1068 __ StoreMultipleP(r6, sp, MemOperand(sp, 0)); in Generate()
1071 // __ LoadDoubleLiteral(kDoubleRegZero, 0.0, r0); in Generate()
1079 __ lay(sp, MemOperand(sp, -5 * kPointerSize)); in Generate()
1081 __ LoadImmP(r10, Operand(-1)); in Generate()
1084 __ Load(r9, Operand(StackFrame::TypeToMarker(marker))); in Generate()
1085 __ Load(r8, Operand(StackFrame::TypeToMarker(marker))); in Generate()
1087 __ mov(r7, Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate()))); in Generate()
1088 __ LoadP(r7, MemOperand(r7)); in Generate()
1089 __ StoreMultipleP(r7, r10, MemOperand(sp, kPointerSize)); in Generate()
1093 __ lay(fp, in Generate()
1099 __ mov(r7, Operand(ExternalReference(js_entry_sp))); in Generate()
1100 __ LoadAndTestP(r8, MemOperand(r7)); in Generate()
1101 __ bne(&non_outermost_js, Label::kNear); in Generate()
1102 __ StoreP(fp, MemOperand(r7)); in Generate()
1103 __ Load(ip, Operand(StackFrame::OUTERMOST_JSENTRY_FRAME)); in Generate()
1105 __ b(&cont, Label::kNear); in Generate()
1106 __ bind(&non_outermost_js); in Generate()
1107 __ Load(ip, Operand(StackFrame::INNER_JSENTRY_FRAME)); in Generate()
1109 __ bind(&cont); in Generate()
1110 __ StoreP(ip, MemOperand(sp)); // frame-type in Generate()
1114 __ b(&invoke, Label::kNear); in Generate()
1116 __ bind(&handler_entry); in Generate()
1122 __ mov(ip, Operand(ExternalReference(Isolate::kPendingExceptionAddress, in Generate()
1125 __ StoreP(r2, MemOperand(ip)); in Generate()
1126 __ LoadRoot(r2, Heap::kExceptionRootIndex); in Generate()
1127 __ b(&exit, Label::kNear); in Generate()
1130 __ bind(&invoke); in Generate()
1132 __ PushStackHandler(); in Generate()
1151 __ mov(ip, Operand(construct_entry)); in Generate()
1154 __ mov(ip, Operand(entry)); in Generate()
1156 __ LoadP(ip, MemOperand(ip)); // deref address in Generate()
1160 __ AddP(ip, Operand(Code::kHeaderSize - kHeapObjectTag)); in Generate()
1162 // __ basr(r14, ip); in Generate()
1163 __ larl(r14, &return_addr); in Generate()
1164 __ b(ip); in Generate()
1165 __ bind(&return_addr); in Generate()
1168 __ PopStackHandler(); in Generate()
1170 __ bind(&exit); // r2 holds result in Generate()
1173 __ pop(r7); in Generate()
1174 __ CmpP(r7, Operand(StackFrame::OUTERMOST_JSENTRY_FRAME)); in Generate()
1175 __ bne(&non_outermost_js_2, Label::kNear); in Generate()
1176 __ mov(r8, Operand::Zero()); in Generate()
1177 __ mov(r7, Operand(ExternalReference(js_entry_sp))); in Generate()
1178 __ StoreP(r8, MemOperand(r7)); in Generate()
1179 __ bind(&non_outermost_js_2); in Generate()
1182 __ pop(r5); in Generate()
1183 __ mov(ip, Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate()))); in Generate()
1184 __ StoreP(r5, MemOperand(ip)); in Generate()
1187 __ lay(sp, MemOperand(sp, -EntryFrameConstants::kCallerFPOffset)); in Generate()
1190 __ LoadMultipleP(r6, sp, MemOperand(sp, 0)); in Generate()
1191 __ la(sp, MemOperand(sp, 10 * kPointerSize)); in Generate()
1196 __ ld(d8, MemOperand(sp)); in Generate()
1197 __ ld(d9, MemOperand(sp, 1 * kDoubleSize)); in Generate()
1198 __ ld(d10, MemOperand(sp, 2 * kDoubleSize)); in Generate()
1199 __ ld(d11, MemOperand(sp, 3 * kDoubleSize)); in Generate()
1200 __ ld(d12, MemOperand(sp, 4 * kDoubleSize)); in Generate()
1201 __ ld(d13, MemOperand(sp, 5 * kDoubleSize)); in Generate()
1202 __ ld(d14, MemOperand(sp, 6 * kDoubleSize)); in Generate()
1203 __ ld(d15, MemOperand(sp, 7 * kDoubleSize)); in Generate()
1204 __ la(sp, MemOperand(sp, 8 * kDoubleSize)); in Generate()
1208 __ ld(d4, MemOperand(sp)); in Generate()
1209 __ ld(d6, MemOperand(sp, kDoubleSize)); in Generate()
1210 __ la(sp, MemOperand(sp, 2 * kDoubleSize)); in Generate()
1213 __ b(r14); in Generate()
1221 __ TailCallRuntime(Runtime::kRegExpExec); in Generate()
1247 __ CleanseP(r14); in Generate()
1260 __ mov(r2, Operand(address_of_regexp_stack_memory_size)); in Generate()
1261 __ LoadAndTestP(r2, MemOperand(r2)); in Generate()
1262 __ beq(&runtime); in Generate()
1265 __ LoadP(r2, MemOperand(sp, kJSRegExpOffset)); in Generate()
1266 __ JumpIfSmi(r2, &runtime); in Generate()
1267 __ CompareObjectType(r2, r3, r3, JS_REGEXP_TYPE); in Generate()
1268 __ bne(&runtime); in Generate()
1271 __ LoadP(regexp_data, FieldMemOperand(r2, JSRegExp::kDataOffset)); in Generate()
1273 __ TestIfSmi(regexp_data); in Generate()
1274 __ Check(ne, kUnexpectedTypeForRegExpDataFixedArrayExpected, cr0); in Generate()
1275 __ CompareObjectType(regexp_data, r2, r2, FIXED_ARRAY_TYPE); in Generate()
1276 __ Check(eq, kUnexpectedTypeForRegExpDataFixedArrayExpected); in Generate()
1281 __ LoadP(r2, FieldMemOperand(regexp_data, JSRegExp::kDataTagOffset)); in Generate()
1283 __ CmpSmiLiteral(r2, Smi::FromInt(JSRegExp::IRREGEXP), r0); in Generate()
1284 __ bne(&runtime); in Generate()
1288 __ LoadP(r4, in Generate()
1294 __ SmiToShortArrayOffset(r4, r4); in Generate()
1296 __ CmpLogicalP(r4, Operand(Isolate::kJSRegexpStaticOffsetsVectorSize - 2)); in Generate()
1297 __ bgt(&runtime); in Generate()
1300 __ LoadImmP(ip, Operand::Zero()); in Generate()
1301 __ LoadP(subject, MemOperand(sp, kSubjectOffset)); in Generate()
1302 __ JumpIfSmi(subject, &runtime); in Generate()
1303 __ LoadRR(r5, subject); // Make a copy of the original subject string. in Generate()
1326 __ bind(&check_underlying); in Generate()
1327 __ LoadP(r2, FieldMemOperand(subject, HeapObject::kMapOffset)); in Generate()
1328 __ LoadlB(r2, FieldMemOperand(r2, Map::kInstanceTypeOffset)); in Generate()
1334 __ mov(r3, Operand(kIsNotStringMask | kStringRepresentationMask | in Generate()
1336 __ AndP(r3, r2); in Generate()
1338 __ beq(&seq_string, Label::kNear); // Go to (4). in Generate()
1347 __ CmpP(r3, Operand(kExternalStringTag)); in Generate()
1348 __ bge(&not_seq_nor_cons); // Go to (5). in Generate()
1352 __ LoadP(r2, FieldMemOperand(subject, ConsString::kSecondOffset)); in Generate()
1353 __ CompareRoot(r2, Heap::kempty_stringRootIndex); in Generate()
1354 __ bne(&runtime); in Generate()
1355 __ LoadP(subject, FieldMemOperand(subject, ConsString::kFirstOffset)); in Generate()
1356 __ b(&check_underlying); in Generate()
1359 __ bind(&seq_string); in Generate()
1365 __ LoadP(r3, MemOperand(sp, kPreviousIndexOffset)); in Generate()
1366 __ JumpIfNotSmi(r3, &runtime); in Generate()
1367 __ LoadP(r5, FieldMemOperand(r5, String::kLengthOffset)); in Generate()
1368 __ CmpLogicalP(r5, r3); in Generate()
1369 __ ble(&runtime); in Generate()
1370 __ SmiUntag(r3); in Generate()
1375 __ ExtractBitMask(r5, r2, kStringEncodingMask, SetRC); in Generate()
1376 __ beq(&encoding_type_UC16, Label::kNear); in Generate()
1377 __ LoadP(code, in Generate()
1379 __ b(&br_over, Label::kNear); in Generate()
1380 __ bind(&encoding_type_UC16); in Generate()
1381 __ LoadP(code, FieldMemOperand(regexp_data, JSRegExp::kDataUC16CodeOffset)); in Generate()
1382 __ bind(&br_over); in Generate()
1389 __ JumpIfSmi(code, &runtime); in Generate()
1397 __ IncrementCounter(isolate()->counters()->regexp_entry_native(), 1, r2, r4); in Generate()
1402 __ EnterExitFrame(false, kRegExpExecuteArguments - kParameterRegisters); in Generate()
1408 __ mov(r2, Operand(ExternalReference::isolate_address(isolate()))); in Generate()
1409 __ StoreP(r2, MemOperand(sp, kStackFrameExtraParamSlot * kPointerSize + in Generate()
1414 __ mov(r2, Operand::Zero()); in Generate()
1415 __ StoreP(r2, MemOperand(sp, kStackFrameExtraParamSlot * kPointerSize + in Generate()
1419 __ mov(r2, Operand(1)); in Generate()
1420 __ StoreP(r2, MemOperand(sp, kStackFrameExtraParamSlot * kPointerSize + in Generate()
1424 __ mov(r2, Operand(address_of_regexp_stack_memory_address)); in Generate()
1425 __ LoadP(r2, MemOperand(r2, 0)); in Generate()
1426 __ mov(r1, Operand(address_of_regexp_stack_memory_size)); in Generate()
1427 __ LoadP(r1, MemOperand(r1, 0)); in Generate()
1428 __ AddP(r2, r1); in Generate()
1429 __ StoreP(r2, MemOperand(sp, kStackFrameExtraParamSlot * kPointerSize + in Generate()
1435 __ mov(r2, Operand::Zero()); in Generate()
1436 __ StoreP(r2, MemOperand(sp, kStackFrameExtraParamSlot * kPointerSize + in Generate()
1445 __ LoadP(r2, MemOperand(fp, kSubjectOffset + 2 * kPointerSize)); in Generate()
1449 __ AddP(r1, subject, Operand(SeqString::kHeaderSize - kHeapObjectTag)); in Generate()
1452 __ mov( in Generate()
1458 __ XorP(r5, Operand(1)); in Generate()
1462 __ ShiftLeftP(ip, ip, r5); in Generate()
1463 __ AddP(ip, r1, ip); in Generate()
1464 __ ShiftLeftP(r4, r3, r5); in Generate()
1465 __ AddP(r4, ip, r4); in Generate()
1468 __ LoadP(r1, FieldMemOperand(r2, String::kLengthOffset)); in Generate()
1469 __ SmiUntag(r1); in Generate()
1470 __ ShiftLeftP(r0, r1, r5); in Generate()
1471 __ AddP(r5, ip, r0); in Generate()
1474 __ AddP(code, Operand(Code::kHeaderSize - kHeapObjectTag)); in Generate()
1479 __ LeaveExitFrame(false, no_reg, true); in Generate()
1483 __ LoadP(subject, MemOperand(sp, kSubjectOffset)); in Generate()
1489 __ Cmp32(r2, Operand(1)); in Generate()
1492 __ beq(&success); in Generate()
1494 __ Cmp32(r2, Operand(NativeRegExpMacroAssembler::FAILURE)); in Generate()
1495 __ beq(&failure); in Generate()
1496 __ Cmp32(r2, Operand(NativeRegExpMacroAssembler::EXCEPTION)); in Generate()
1498 __ bne(&runtime); in Generate()
1503 __ mov(r3, Operand(isolate()->factory()->the_hole_value())); in Generate()
1504 __ mov(r4, Operand(ExternalReference(Isolate::kPendingExceptionAddress, in Generate()
1506 __ LoadP(r2, MemOperand(r4, 0)); in Generate()
1507 __ CmpP(r2, r3); in Generate()
1508 __ beq(&runtime); in Generate()
1511 __ TailCallRuntime(Runtime::kRegExpExecReThrow); in Generate()
1513 __ bind(&failure); in Generate()
1515 __ mov(r2, Operand(isolate()->factory()->null_value())); in Generate()
1516 __ la(sp, MemOperand(sp, (4 * kPointerSize))); in Generate()
1517 __ Ret(); in Generate()
1520 __ bind(&success); in Generate()
1521 __ LoadP(r3, in Generate()
1526 __ SmiToShortArrayOffset(r3, r3); in Generate()
1527 __ AddP(r3, Operand(2)); in Generate()
1530 __ LoadP(last_match_info_elements, MemOperand(sp, kLastMatchInfoOffset)); in Generate()
1531 __ JumpIfSmi(last_match_info_elements, &runtime); in Generate()
1533 __ LoadP(r2, in Generate()
1535 __ CompareRoot(r2, Heap::kFixedArrayMapRootIndex); in Generate()
1536 __ bne(&runtime); in Generate()
1539 __ LoadP( in Generate()
1541 __ AddP(r4, r3, Operand(RegExpMatchInfo::kLastMatchOverhead)); in Generate()
1542 __ SmiUntag(r0, r2); in Generate()
1543 __ CmpP(r4, r0); in Generate()
1544 __ bgt(&runtime); in Generate()
1549 __ SmiTag(r4, r3); in Generate()
1550 __ StoreP(r4, FieldMemOperand(last_match_info_elements, in Generate()
1553 __ StoreP(subject, FieldMemOperand(last_match_info_elements, in Generate()
1555 __ LoadRR(r4, subject); in Generate()
1556 __ RecordWriteField(last_match_info_elements, in Generate()
1559 __ LoadRR(subject, r4); in Generate()
1560 __ StoreP(subject, FieldMemOperand(last_match_info_elements, in Generate()
1562 __ RecordWriteField(last_match_info_elements, in Generate()
1569 __ mov(r4, Operand(address_of_static_offsets_vector)); in Generate()
1576 __ AddP(r2, last_match_info_elements, in Generate()
1579 __ AddP(r4, Operand(-kIntSize)); // bias down for lwzu in Generate()
1580 __ bind(&next_capture); in Generate()
1582 __ ly(r5, MemOperand(r4, kIntSize)); in Generate()
1583 __ lay(r4, MemOperand(r4, kIntSize)); in Generate()
1585 __ SmiTag(r5); in Generate()
1586 __ StoreP(r5, MemOperand(r2, kPointerSize)); in Generate()
1587 __ lay(r2, MemOperand(r2, kPointerSize)); in Generate()
1588 __ BranchOnCount(r3, &next_capture); in Generate()
1591 __ LoadRR(r2, last_match_info_elements); in Generate()
1592 __ la(sp, MemOperand(sp, (4 * kPointerSize))); in Generate()
1593 __ Ret(); in Generate()
1596 __ bind(&runtime); in Generate()
1597 __ TailCallRuntime(Runtime::kRegExpExec); in Generate()
1601 __ bind(&not_seq_nor_cons); in Generate()
1603 __ bgt(&not_long_external, Label::kNear); // Go to (7). in Generate()
1606 __ bind(&external_string); in Generate()
1607 __ LoadP(r2, FieldMemOperand(subject, HeapObject::kMapOffset)); in Generate()
1608 __ LoadlB(r2, FieldMemOperand(r2, Map::kInstanceTypeOffset)); in Generate()
1613 __ tmll(r2, Operand(kIsIndirectStringMask)); in Generate()
1614 __ Assert(eq, kExternalStringExpectedButNotFound, cr0); in Generate()
1616 __ LoadP(subject, in Generate()
1620 __ SubP(subject, subject, in Generate()
1622 __ b(&seq_string); // Go to (4). in Generate()
1625 __ bind(&not_long_external); in Generate()
1627 __ mov(r0, Operand(kIsNotStringMask | kShortExternalStringMask)); in Generate()
1628 __ AndP(r0, r3); in Generate()
1629 __ bne(&runtime); in Generate()
1633 __ CmpP(r3, Operand(kThinStringTag)); in Generate()
1634 __ beq(&thin_string); in Generate()
1636 __ LoadP(ip, FieldMemOperand(subject, SlicedString::kOffsetOffset)); in Generate()
1637 __ SmiUntag(ip); in Generate()
1638 __ LoadP(subject, FieldMemOperand(subject, SlicedString::kParentOffset)); in Generate()
1639 __ b(&check_underlying); // Go to (4). in Generate()
1641 __ bind(&thin_string); in Generate()
1642 __ LoadP(subject, FieldMemOperand(subject, ThinString::kActualOffset)); in Generate()
1643 __ b(&check_underlying); // Go to (4). in Generate()
1655 __ SmiTag(r2); in CallStubInRecordCallTarget()
1656 __ Push(r5, r4, r3, r2); in CallStubInRecordCallTarget()
1657 __ Push(cp); in CallStubInRecordCallTarget()
1659 __ CallStub(stub); in CallStubInRecordCallTarget()
1661 __ Pop(cp); in CallStubInRecordCallTarget()
1662 __ Pop(r5, r4, r3, r2); in CallStubInRecordCallTarget()
1663 __ SmiUntag(r2); in CallStubInRecordCallTarget()
1684 __ SmiToPtrArrayOffset(r7, r5); in GenerateRecordCallTarget()
1685 __ AddP(r7, r4, r7); in GenerateRecordCallTarget()
1686 __ LoadP(r7, FieldMemOperand(r7, FixedArray::kHeaderSize)); in GenerateRecordCallTarget()
1695 __ LoadP(weak_value, FieldMemOperand(r7, WeakCell::kValueOffset)); in GenerateRecordCallTarget()
1696 __ CmpP(r3, weak_value); in GenerateRecordCallTarget()
1697 __ beq(&done, Label::kNear); in GenerateRecordCallTarget()
1698 __ CompareRoot(r7, Heap::kmegamorphic_symbolRootIndex); in GenerateRecordCallTarget()
1699 __ beq(&done, Label::kNear); in GenerateRecordCallTarget()
1700 __ LoadP(feedback_map, FieldMemOperand(r7, HeapObject::kMapOffset)); in GenerateRecordCallTarget()
1701 __ CompareRoot(feedback_map, Heap::kWeakCellMapRootIndex); in GenerateRecordCallTarget()
1702 __ bne(&check_allocation_site); in GenerateRecordCallTarget()
1705 __ JumpIfSmi(weak_value, &initialize); in GenerateRecordCallTarget()
1706 __ b(&megamorphic); in GenerateRecordCallTarget()
1708 __ bind(&check_allocation_site); in GenerateRecordCallTarget()
1713 __ CompareRoot(feedback_map, Heap::kAllocationSiteMapRootIndex); in GenerateRecordCallTarget()
1714 __ bne(&miss); in GenerateRecordCallTarget()
1717 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r7); in GenerateRecordCallTarget()
1718 __ CmpP(r3, r7); in GenerateRecordCallTarget()
1719 __ bne(&megamorphic); in GenerateRecordCallTarget()
1720 __ b(&done, Label::kNear); in GenerateRecordCallTarget()
1722 __ bind(&miss); in GenerateRecordCallTarget()
1726 __ CompareRoot(r7, Heap::kuninitialized_symbolRootIndex); in GenerateRecordCallTarget()
1727 __ beq(&initialize); in GenerateRecordCallTarget()
1730 __ bind(&megamorphic); in GenerateRecordCallTarget()
1731 __ SmiToPtrArrayOffset(r7, r5); in GenerateRecordCallTarget()
1732 __ AddP(r7, r4, r7); in GenerateRecordCallTarget()
1733 __ LoadRoot(ip, Heap::kmegamorphic_symbolRootIndex); in GenerateRecordCallTarget()
1734 __ StoreP(ip, FieldMemOperand(r7, FixedArray::kHeaderSize), r0); in GenerateRecordCallTarget()
1735 __ jmp(&done); in GenerateRecordCallTarget()
1738 __ bind(&initialize); in GenerateRecordCallTarget()
1741 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r7); in GenerateRecordCallTarget()
1742 __ CmpP(r3, r7); in GenerateRecordCallTarget()
1743 __ bne(&not_array_function); in GenerateRecordCallTarget()
1750 __ b(&done, Label::kNear); in GenerateRecordCallTarget()
1752 __ bind(&not_array_function); in GenerateRecordCallTarget()
1757 __ bind(&done); in GenerateRecordCallTarget()
1760 __ SmiToPtrArrayOffset(r7, r5); in GenerateRecordCallTarget()
1761 __ AddP(r7, r4, r7); in GenerateRecordCallTarget()
1763 __ LoadP(r6, FieldMemOperand(r7, count_offset)); in GenerateRecordCallTarget()
1764 __ AddSmiLiteral(r6, r6, Smi::FromInt(1), r0); in GenerateRecordCallTarget()
1765 __ StoreP(r6, FieldMemOperand(r7, count_offset), r0); in GenerateRecordCallTarget()
1776 __ JumpIfSmi(r3, &non_function); in Generate()
1778 __ CompareObjectType(r3, r7, r7, JS_FUNCTION_TYPE); in Generate()
1779 __ bne(&non_function); in Generate()
1783 __ SmiToPtrArrayOffset(r7, r5); in Generate()
1784 __ AddP(r7, r4, r7); in Generate()
1786 __ LoadP(r4, FieldMemOperand(r7, FixedArray::kHeaderSize)); in Generate()
1787 __ LoadP(r7, FieldMemOperand(r4, AllocationSite::kMapOffset)); in Generate()
1788 __ CompareRoot(r7, Heap::kAllocationSiteMapRootIndex); in Generate()
1790 __ beq(&feedback_register_initialized); in Generate()
1791 __ LoadRoot(r4, Heap::kUndefinedValueRootIndex); in Generate()
1792 __ bind(&feedback_register_initialized); in Generate()
1794 __ AssertUndefinedOrAllocationSite(r4, r7); in Generate()
1797 __ LoadRR(r5, r3); in Generate()
1801 __ LoadP(r6, FieldMemOperand(r3, JSFunction::kSharedFunctionInfoOffset)); in Generate()
1802 __ LoadP(r6, FieldMemOperand(r6, SharedFunctionInfo::kConstructStubOffset)); in Generate()
1803 __ AddP(ip, r6, Operand(Code::kHeaderSize - kHeapObjectTag)); in Generate()
1804 __ JumpToJSEntry(ip); in Generate()
1806 __ bind(&non_function); in Generate()
1807 __ LoadRR(r5, r3); in Generate()
1808 __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); in Generate()
1815 __ JumpIfSmi(object_, receiver_not_string_); in GenerateFast()
1818 __ LoadP(result_, FieldMemOperand(object_, HeapObject::kMapOffset)); in GenerateFast()
1819 __ LoadlB(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset)); in GenerateFast()
1821 __ mov(r0, Operand(kIsNotStringMask)); in GenerateFast()
1822 __ AndP(r0, result_); in GenerateFast()
1823 __ bne(receiver_not_string_); in GenerateFast()
1827 __ JumpIfNotSmi(index_, &index_not_smi_); in GenerateFast()
1828 __ bind(&got_smi_index_); in GenerateFast()
1831 __ LoadP(ip, FieldMemOperand(object_, String::kLengthOffset)); in GenerateFast()
1832 __ CmpLogicalP(ip, index_); in GenerateFast()
1833 __ ble(index_out_of_range_); in GenerateFast()
1835 __ SmiUntag(index_); in GenerateFast()
1840 __ SmiTag(result_); in GenerateFast()
1841 __ bind(&exit_); in GenerateFast()
1847 __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase); in GenerateSlow()
1850 __ bind(&index_not_smi_); in GenerateSlow()
1852 __ CheckMap(index_, result_, Heap::kHeapNumberMapRootIndex, index_not_number_, in GenerateSlow()
1856 __ Push(LoadWithVectorDescriptor::VectorRegister(), in GenerateSlow()
1860 __ Push(object_, index_); in GenerateSlow()
1862 __ CallRuntime(Runtime::kNumberToSmi); in GenerateSlow()
1865 __ Move(index_, r2); in GenerateSlow()
1867 __ Pop(LoadWithVectorDescriptor::VectorRegister(), in GenerateSlow()
1870 __ pop(object_); in GenerateSlow()
1873 __ LoadP(result_, FieldMemOperand(object_, HeapObject::kMapOffset)); in GenerateSlow()
1874 __ LoadlB(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset)); in GenerateSlow()
1877 __ JumpIfNotSmi(index_, index_out_of_range_); in GenerateSlow()
1879 __ b(&got_smi_index_); in GenerateSlow()
1884 __ bind(&call_runtime_); in GenerateSlow()
1886 __ SmiTag(index_); in GenerateSlow()
1887 __ Push(object_, index_); in GenerateSlow()
1888 __ CallRuntime(Runtime::kStringCharCodeAtRT); in GenerateSlow()
1889 __ Move(result_, r2); in GenerateSlow()
1891 __ b(&exit_); in GenerateSlow()
1893 __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase); in GenerateSlow()
1905 __ LoadP(length, FieldMemOperand(left, String::kLengthOffset)); in GenerateFlatOneByteStringEquals()
1906 __ LoadP(scratch2, FieldMemOperand(right, String::kLengthOffset)); in GenerateFlatOneByteStringEquals()
1907 __ CmpP(length, scratch2); in GenerateFlatOneByteStringEquals()
1908 __ beq(&check_zero_length); in GenerateFlatOneByteStringEquals()
1909 __ bind(&strings_not_equal); in GenerateFlatOneByteStringEquals()
1910 __ LoadSmiLiteral(r2, Smi::FromInt(NOT_EQUAL)); in GenerateFlatOneByteStringEquals()
1911 __ Ret(); in GenerateFlatOneByteStringEquals()
1915 __ bind(&check_zero_length); in GenerateFlatOneByteStringEquals()
1917 __ CmpP(length, Operand::Zero()); in GenerateFlatOneByteStringEquals()
1918 __ bne(&compare_chars); in GenerateFlatOneByteStringEquals()
1919 __ LoadSmiLiteral(r2, Smi::FromInt(EQUAL)); in GenerateFlatOneByteStringEquals()
1920 __ Ret(); in GenerateFlatOneByteStringEquals()
1923 __ bind(&compare_chars); in GenerateFlatOneByteStringEquals()
1928 __ LoadSmiLiteral(r2, Smi::FromInt(EQUAL)); in GenerateFlatOneByteStringEquals()
1929 __ Ret(); in GenerateFlatOneByteStringEquals()
1937 __ LoadP(scratch1, FieldMemOperand(left, String::kLengthOffset)); in GenerateCompareFlatOneByteStrings()
1938 __ LoadP(scratch2, FieldMemOperand(right, String::kLengthOffset)); in GenerateCompareFlatOneByteStrings()
1939 __ SubP(scratch3, scratch1, scratch2 /*, LeaveOE, SetRC*/); in GenerateCompareFlatOneByteStrings()
1942 __ ble(&skip, Label::kNear); in GenerateCompareFlatOneByteStrings()
1943 __ LoadRR(scratch1, scratch2); in GenerateCompareFlatOneByteStrings()
1944 __ bind(&skip); in GenerateCompareFlatOneByteStrings()
1947 __ CmpP(min_length, Operand::Zero()); in GenerateCompareFlatOneByteStrings()
1948 __ beq(&compare_lengths); in GenerateCompareFlatOneByteStrings()
1955 __ bind(&compare_lengths); in GenerateCompareFlatOneByteStrings()
1958 __ LoadRR(r2, length_delta); in GenerateCompareFlatOneByteStrings()
1959 __ CmpP(length_delta, Operand::Zero()); in GenerateCompareFlatOneByteStrings()
1960 __ bind(&result_not_equal); in GenerateCompareFlatOneByteStrings()
1964 __ ble(&less_equal); in GenerateCompareFlatOneByteStrings()
1965 __ LoadSmiLiteral(r2, Smi::FromInt(GREATER)); in GenerateCompareFlatOneByteStrings()
1966 __ Ret(); in GenerateCompareFlatOneByteStrings()
1967 __ bind(&less_equal); in GenerateCompareFlatOneByteStrings()
1968 __ beq(&equal); in GenerateCompareFlatOneByteStrings()
1969 __ LoadSmiLiteral(r2, Smi::FromInt(LESS)); in GenerateCompareFlatOneByteStrings()
1970 __ bind(&equal); in GenerateCompareFlatOneByteStrings()
1971 __ Ret(); in GenerateCompareFlatOneByteStrings()
1980 __ SmiUntag(length); in GenerateOneByteCharsCompareLoop()
1981 __ AddP(scratch1, length, in GenerateOneByteCharsCompareLoop()
1983 __ AddP(left, scratch1); in GenerateOneByteCharsCompareLoop()
1984 __ AddP(right, scratch1); in GenerateOneByteCharsCompareLoop()
1985 __ LoadComplementRR(length, length); in GenerateOneByteCharsCompareLoop()
1990 __ bind(&loop); in GenerateOneByteCharsCompareLoop()
1991 __ LoadlB(scratch1, MemOperand(left, index)); in GenerateOneByteCharsCompareLoop()
1992 __ LoadlB(r0, MemOperand(right, index)); in GenerateOneByteCharsCompareLoop()
1993 __ CmpP(scratch1, r0); in GenerateOneByteCharsCompareLoop()
1994 __ bne(chars_not_equal); in GenerateOneByteCharsCompareLoop()
1995 __ AddP(index, Operand(1)); in GenerateOneByteCharsCompareLoop()
1996 __ CmpP(index, Operand::Zero()); in GenerateOneByteCharsCompareLoop()
1997 __ bne(&loop); in GenerateOneByteCharsCompareLoop()
2010 __ Move(r4, isolate()->factory()->undefined_value()); in Generate()
2014 __ TestIfSmi(r4); in Generate()
2015 __ Assert(ne, kExpectedAllocationSite, cr0); in Generate()
2016 __ push(r4); in Generate()
2017 __ LoadP(r4, FieldMemOperand(r4, HeapObject::kMapOffset)); in Generate()
2018 __ CompareRoot(r4, Heap::kAllocationSiteMapRootIndex); in Generate()
2019 __ pop(r4); in Generate()
2020 __ Assert(eq, kExpectedAllocationSite); in Generate()
2026 __ TailCallStub(&stub); in Generate()
2033 __ CheckMap(r3, r4, Heap::kBooleanMapRootIndex, &miss, DO_SMI_CHECK); in GenerateBooleans()
2034 __ CheckMap(r2, r5, Heap::kBooleanMapRootIndex, &miss, DO_SMI_CHECK); in GenerateBooleans()
2036 __ LoadP(r3, FieldMemOperand(r3, Oddball::kToNumberOffset)); in GenerateBooleans()
2037 __ AssertSmi(r3); in GenerateBooleans()
2038 __ LoadP(r2, FieldMemOperand(r2, Oddball::kToNumberOffset)); in GenerateBooleans()
2039 __ AssertSmi(r2); in GenerateBooleans()
2041 __ SubP(r2, r3, r2); in GenerateBooleans()
2042 __ Ret(); in GenerateBooleans()
2044 __ bind(&miss); in GenerateBooleans()
2051 __ OrP(r4, r3, r2); in GenerateSmis()
2052 __ JumpIfNotSmi(r4, &miss); in GenerateSmis()
2056 // __ sub(r2, r2, r3, SetCC); in GenerateSmis()
2057 __ SubP(r2, r2, r3); in GenerateSmis()
2060 __ SmiUntag(r3); in GenerateSmis()
2061 __ SmiUntag(r2); in GenerateSmis()
2062 __ SubP(r2, r3, r2); in GenerateSmis()
2064 __ Ret(); in GenerateSmis()
2066 __ bind(&miss); in GenerateSmis()
2079 __ JumpIfNotSmi(r3, &miss); in GenerateNumbers()
2082 __ JumpIfNotSmi(r2, &miss); in GenerateNumbers()
2089 __ JumpIfSmi(r2, &right_smi); in GenerateNumbers()
2090 __ CheckMap(r2, r4, Heap::kHeapNumberMapRootIndex, &maybe_undefined1, in GenerateNumbers()
2092 __ LoadDouble(d1, FieldMemOperand(r2, HeapNumber::kValueOffset)); in GenerateNumbers()
2093 __ b(&left); in GenerateNumbers()
2094 __ bind(&right_smi); in GenerateNumbers()
2095 __ SmiToDouble(d1, r2); in GenerateNumbers()
2097 __ bind(&left); in GenerateNumbers()
2098 __ JumpIfSmi(r3, &left_smi); in GenerateNumbers()
2099 __ CheckMap(r3, r4, Heap::kHeapNumberMapRootIndex, &maybe_undefined2, in GenerateNumbers()
2101 __ LoadDouble(d0, FieldMemOperand(r3, HeapNumber::kValueOffset)); in GenerateNumbers()
2102 __ b(&done); in GenerateNumbers()
2103 __ bind(&left_smi); in GenerateNumbers()
2104 __ SmiToDouble(d0, r3); in GenerateNumbers()
2106 __ bind(&done); in GenerateNumbers()
2109 __ cdbr(d0, d1); in GenerateNumbers()
2112 __ bunordered(&unordered); in GenerateNumbers()
2115 __ beq(&equal); in GenerateNumbers()
2116 __ blt(&less_than); in GenerateNumbers()
2118 __ LoadImmP(r2, Operand(GREATER)); in GenerateNumbers()
2119 __ Ret(); in GenerateNumbers()
2120 __ bind(&equal); in GenerateNumbers()
2121 __ LoadImmP(r2, Operand(EQUAL)); in GenerateNumbers()
2122 __ Ret(); in GenerateNumbers()
2123 __ bind(&less_than); in GenerateNumbers()
2124 __ LoadImmP(r2, Operand(LESS)); in GenerateNumbers()
2125 __ Ret(); in GenerateNumbers()
2127 __ bind(&unordered); in GenerateNumbers()
2128 __ bind(&generic_stub); in GenerateNumbers()
2131 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); in GenerateNumbers()
2133 __ bind(&maybe_undefined1); in GenerateNumbers()
2135 __ CompareRoot(r2, Heap::kUndefinedValueRootIndex); in GenerateNumbers()
2136 __ bne(&miss); in GenerateNumbers()
2137 __ JumpIfSmi(r3, &unordered); in GenerateNumbers()
2138 __ CompareObjectType(r3, r4, r4, HEAP_NUMBER_TYPE); in GenerateNumbers()
2139 __ bne(&maybe_undefined2); in GenerateNumbers()
2140 __ b(&unordered); in GenerateNumbers()
2143 __ bind(&maybe_undefined2); in GenerateNumbers()
2145 __ CompareRoot(r3, Heap::kUndefinedValueRootIndex); in GenerateNumbers()
2146 __ beq(&unordered); in GenerateNumbers()
2149 __ bind(&miss); in GenerateNumbers()
2164 __ JumpIfEitherSmi(left, right, &miss); in GenerateInternalizedStrings()
2167 __ LoadP(tmp1, FieldMemOperand(left, HeapObject::kMapOffset)); in GenerateInternalizedStrings()
2168 __ LoadP(tmp2, FieldMemOperand(right, HeapObject::kMapOffset)); in GenerateInternalizedStrings()
2169 __ LoadlB(tmp1, FieldMemOperand(tmp1, Map::kInstanceTypeOffset)); in GenerateInternalizedStrings()
2170 __ LoadlB(tmp2, FieldMemOperand(tmp2, Map::kInstanceTypeOffset)); in GenerateInternalizedStrings()
2172 __ OrP(tmp1, tmp1, tmp2); in GenerateInternalizedStrings()
2173 __ AndP(r0, tmp1, Operand(kIsNotStringMask | kIsNotInternalizedMask)); in GenerateInternalizedStrings()
2174 __ bne(&miss); in GenerateInternalizedStrings()
2177 __ CmpP(left, right); in GenerateInternalizedStrings()
2178 __ bne(&not_equal); in GenerateInternalizedStrings()
2184 __ LoadSmiLiteral(r2, Smi::FromInt(EQUAL)); in GenerateInternalizedStrings()
2185 __ bind(&not_equal); in GenerateInternalizedStrings()
2186 __ Ret(); in GenerateInternalizedStrings()
2188 __ bind(&miss); in GenerateInternalizedStrings()
2204 __ JumpIfEitherSmi(left, right, &miss); in GenerateUniqueNames()
2208 __ LoadP(tmp1, FieldMemOperand(left, HeapObject::kMapOffset)); in GenerateUniqueNames()
2209 __ LoadP(tmp2, FieldMemOperand(right, HeapObject::kMapOffset)); in GenerateUniqueNames()
2210 __ LoadlB(tmp1, FieldMemOperand(tmp1, Map::kInstanceTypeOffset)); in GenerateUniqueNames()
2211 __ LoadlB(tmp2, FieldMemOperand(tmp2, Map::kInstanceTypeOffset)); in GenerateUniqueNames()
2213 __ JumpIfNotUniqueNameInstanceType(tmp1, &miss); in GenerateUniqueNames()
2214 __ JumpIfNotUniqueNameInstanceType(tmp2, &miss); in GenerateUniqueNames()
2217 __ CmpP(left, right); in GenerateUniqueNames()
2218 __ bne(&miss); in GenerateUniqueNames()
2224 __ LoadSmiLiteral(r2, Smi::FromInt(EQUAL)); in GenerateUniqueNames()
2225 __ Ret(); in GenerateUniqueNames()
2227 __ bind(&miss); in GenerateUniqueNames()
2246 __ JumpIfEitherSmi(left, right, &miss); in GenerateStrings()
2250 __ LoadP(tmp1, FieldMemOperand(left, HeapObject::kMapOffset)); in GenerateStrings()
2251 __ LoadP(tmp2, FieldMemOperand(right, HeapObject::kMapOffset)); in GenerateStrings()
2252 __ LoadlB(tmp1, FieldMemOperand(tmp1, Map::kInstanceTypeOffset)); in GenerateStrings()
2253 __ LoadlB(tmp2, FieldMemOperand(tmp2, Map::kInstanceTypeOffset)); in GenerateStrings()
2255 __ OrP(tmp3, tmp1, tmp2); in GenerateStrings()
2256 __ AndP(r0, tmp3, Operand(kIsNotStringMask)); in GenerateStrings()
2257 __ bne(&miss); in GenerateStrings()
2260 __ CmpP(left, right); in GenerateStrings()
2263 __ bne(&not_identical); in GenerateStrings()
2264 __ LoadSmiLiteral(r2, Smi::FromInt(EQUAL)); in GenerateStrings()
2265 __ Ret(); in GenerateStrings()
2266 __ bind(&not_identical); in GenerateStrings()
2276 __ OrP(tmp3, tmp1, tmp2); in GenerateStrings()
2277 __ AndP(r0, tmp3, Operand(kIsNotInternalizedMask)); in GenerateStrings()
2278 __ bne(&is_symbol); in GenerateStrings()
2282 __ Ret(); in GenerateStrings()
2283 __ bind(&is_symbol); in GenerateStrings()
2288 __ JumpIfBothInstanceTypesAreNotSequentialOneByte(tmp1, tmp2, tmp3, tmp4, in GenerateStrings()
2301 __ bind(&runtime); in GenerateStrings()
2305 __ Push(left, right); in GenerateStrings()
2306 __ CallRuntime(Runtime::kStringEqual); in GenerateStrings()
2308 __ LoadRoot(r3, Heap::kTrueValueRootIndex); in GenerateStrings()
2309 __ SubP(r2, r2, r3); in GenerateStrings()
2310 __ Ret(); in GenerateStrings()
2312 __ Push(left, right); in GenerateStrings()
2313 __ TailCallRuntime(Runtime::kStringCompare); in GenerateStrings()
2316 __ bind(&miss); in GenerateStrings()
2323 __ AndP(r4, r3, r2); in GenerateReceivers()
2324 __ JumpIfSmi(r4, &miss); in GenerateReceivers()
2327 __ CompareObjectType(r2, r4, r4, FIRST_JS_RECEIVER_TYPE); in GenerateReceivers()
2328 __ blt(&miss); in GenerateReceivers()
2329 __ CompareObjectType(r3, r4, r4, FIRST_JS_RECEIVER_TYPE); in GenerateReceivers()
2330 __ blt(&miss); in GenerateReceivers()
2333 __ SubP(r2, r2, r3); in GenerateReceivers()
2334 __ Ret(); in GenerateReceivers()
2336 __ bind(&miss); in GenerateReceivers()
2343 __ AndP(r4, r3, r2); in GenerateKnownReceivers()
2344 __ JumpIfSmi(r4, &miss); in GenerateKnownReceivers()
2345 __ GetWeakValue(r6, cell); in GenerateKnownReceivers()
2346 __ LoadP(r4, FieldMemOperand(r2, HeapObject::kMapOffset)); in GenerateKnownReceivers()
2347 __ LoadP(r5, FieldMemOperand(r3, HeapObject::kMapOffset)); in GenerateKnownReceivers()
2348 __ CmpP(r4, r6); in GenerateKnownReceivers()
2349 __ bne(&miss); in GenerateKnownReceivers()
2350 __ CmpP(r5, r6); in GenerateKnownReceivers()
2351 __ bne(&miss); in GenerateKnownReceivers()
2354 __ SubP(r2, r2, r3); in GenerateKnownReceivers()
2355 __ Ret(); in GenerateKnownReceivers()
2358 __ LoadSmiLiteral(r4, Smi::FromInt(GREATER)); in GenerateKnownReceivers()
2360 __ LoadSmiLiteral(r4, Smi::FromInt(LESS)); in GenerateKnownReceivers()
2362 __ Push(r3, r2, r4); in GenerateKnownReceivers()
2363 __ TailCallRuntime(Runtime::kCompare); in GenerateKnownReceivers()
2366 __ bind(&miss); in GenerateKnownReceivers()
2374 __ Push(r3, r2); in GenerateMiss()
2375 __ Push(r3, r2); in GenerateMiss()
2376 __ LoadSmiLiteral(r0, Smi::FromInt(op())); in GenerateMiss()
2377 __ push(r0); in GenerateMiss()
2378 __ CallRuntime(Runtime::kCompareIC_Miss); in GenerateMiss()
2380 __ AddP(r4, r2, Operand(Code::kHeaderSize - kHeapObjectTag)); in GenerateMiss()
2382 __ Pop(r3, r2); in GenerateMiss()
2385 __ JumpToJSEntry(r4); in GenerateMiss()
2390 __ CleanseP(r14); in Generate()
2392 __ b(ip); // Callee will return to R14 directly in Generate()
2398 __ LoadP(ToRegister(ABI_TOC_REGISTER), MemOperand(target, kPointerSize)); in GenerateCall()
2399 __ LoadP(target, MemOperand(target, 0)); // Instruction address in GenerateCall()
2403 __ Move(ip, target); in GenerateCall()
2406 __ call(GetCode(), RelocInfo::CODE_TARGET); // Call the stub. in GenerateCall()
2423 __ LoadP(index, FieldMemOperand(properties, kCapacityOffset)); in GenerateNegativeLookup()
2424 __ SubP(index, Operand(1)); in GenerateNegativeLookup()
2425 __ LoadSmiLiteral( in GenerateNegativeLookup()
2427 __ AndP(index, ip); in GenerateNegativeLookup()
2431 __ ShiftLeftP(ip, index, Operand(1)); in GenerateNegativeLookup()
2432 __ AddP(index, ip); // index *= 3. in GenerateNegativeLookup()
2437 __ SmiToPtrArrayOffset(ip, index); in GenerateNegativeLookup()
2438 __ AddP(tmp, properties, ip); in GenerateNegativeLookup()
2439 __ LoadP(entity_name, FieldMemOperand(tmp, kElementsStartOffset)); in GenerateNegativeLookup()
2442 __ CompareRoot(entity_name, Heap::kUndefinedValueRootIndex); in GenerateNegativeLookup()
2443 __ beq(done); in GenerateNegativeLookup()
2446 __ CmpP(entity_name, Operand(Handle<Name>(name))); in GenerateNegativeLookup()
2447 __ beq(miss); in GenerateNegativeLookup()
2450 __ CompareRoot(entity_name, Heap::kTheHoleValueRootIndex); in GenerateNegativeLookup()
2451 __ beq(&good); in GenerateNegativeLookup()
2454 __ LoadP(entity_name, FieldMemOperand(entity_name, HeapObject::kMapOffset)); in GenerateNegativeLookup()
2455 __ LoadlB(entity_name, in GenerateNegativeLookup()
2457 __ JumpIfNotUniqueNameInstanceType(entity_name, miss); in GenerateNegativeLookup()
2458 __ bind(&good); in GenerateNegativeLookup()
2461 __ LoadP(properties, in GenerateNegativeLookup()
2468 __ LoadRR(r0, r14); in GenerateNegativeLookup()
2469 __ MultiPush(spill_mask); in GenerateNegativeLookup()
2471 __ LoadP(r2, FieldMemOperand(receiver, JSObject::kPropertiesOffset)); in GenerateNegativeLookup()
2472 __ mov(r3, Operand(Handle<Name>(name))); in GenerateNegativeLookup()
2474 __ CallStub(&stub); in GenerateNegativeLookup()
2475 __ CmpP(r2, Operand::Zero()); in GenerateNegativeLookup()
2477 __ MultiPop(spill_mask); // MultiPop does not touch condition flags in GenerateNegativeLookup()
2478 __ LoadRR(r14, r0); in GenerateNegativeLookup()
2480 __ beq(done); in GenerateNegativeLookup()
2481 __ bne(miss); in GenerateNegativeLookup()
2508 __ LoadP(mask, FieldMemOperand(dictionary, kCapacityOffset)); in Generate()
2509 __ SmiUntag(mask); in Generate()
2510 __ SubP(mask, Operand(1)); in Generate()
2512 __ LoadlW(hash, FieldMemOperand(key, String::kHashFieldOffset)); in Generate()
2514 __ LoadRoot(undefined, Heap::kUndefinedValueRootIndex); in Generate()
2525 __ AddP(index, hash, in Generate()
2528 __ LoadRR(index, hash); in Generate()
2530 __ ShiftRight(r0, index, Operand(String::kHashShift)); in Generate()
2531 __ AndP(index, r0, mask); in Generate()
2535 __ ShiftLeftP(scratch, index, Operand(1)); in Generate()
2536 __ AddP(index, scratch); // index *= 3. in Generate()
2538 __ ShiftLeftP(scratch, index, Operand(kPointerSizeLog2)); in Generate()
2539 __ AddP(index, dictionary, scratch); in Generate()
2540 __ LoadP(entry_key, FieldMemOperand(index, kElementsStartOffset)); in Generate()
2543 __ CmpP(entry_key, undefined); in Generate()
2544 __ beq(&not_in_dictionary); in Generate()
2547 __ CmpP(entry_key, key); in Generate()
2548 __ beq(&in_dictionary); in Generate()
2552 __ LoadP(entry_key, FieldMemOperand(entry_key, HeapObject::kMapOffset)); in Generate()
2553 __ LoadlB(entry_key, in Generate()
2555 __ JumpIfNotUniqueNameInstanceType(entry_key, &maybe_in_dictionary); in Generate()
2559 __ bind(&maybe_in_dictionary); in Generate()
2564 __ LoadImmP(result, Operand::Zero()); in Generate()
2565 __ Ret(); in Generate()
2568 __ bind(&in_dictionary); in Generate()
2569 __ LoadImmP(result, Operand(1)); in Generate()
2570 __ Ret(); in Generate()
2572 __ bind(&not_in_dictionary); in Generate()
2573 __ LoadImmP(result, Operand::Zero()); in Generate()
2574 __ Ret(); in Generate()
2601 __ b(CC_NOP, &skip_to_incremental_noncompacting); in Generate()
2602 __ b(CC_NOP, &skip_to_incremental_compacting); in Generate()
2605 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(), in Generate()
2608 __ Ret(); in Generate()
2610 __ bind(&skip_to_incremental_noncompacting); in Generate()
2613 __ bind(&skip_to_incremental_compacting); in Generate()
2627 __ LoadP(regs_.scratch0(), MemOperand(regs_.address(), 0)); in GenerateIncremental()
2628 __ JumpIfNotInNewSpace(regs_.scratch0(), // Value. in GenerateIncremental()
2631 __ JumpIfInNewSpace(regs_.object(), regs_.scratch0(), in GenerateIncremental()
2640 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(), in GenerateIncremental()
2643 __ bind(&dont_need_remembered_set); in GenerateIncremental()
2650 __ Ret(); in GenerateIncremental()
2656 __ PrepareCallCFunction(argument_count, regs_.scratch0()); in InformIncrementalMarker()
2661 __ LoadRR(address, regs_.address()); in InformIncrementalMarker()
2662 __ LoadRR(r2, regs_.object()); in InformIncrementalMarker()
2663 __ LoadRR(r3, address); in InformIncrementalMarker()
2664 __ mov(r4, Operand(ExternalReference::isolate_address(isolate()))); in InformIncrementalMarker()
2667 __ CallCFunction( in InformIncrementalMarker()
2682 __ JumpIfBlack(regs_.object(), regs_.scratch0(), regs_.scratch1(), &on_black); in CheckNeedsToInformIncrementalMarker()
2686 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(), in CheckNeedsToInformIncrementalMarker()
2689 __ Ret(); in CheckNeedsToInformIncrementalMarker()
2692 __ bind(&on_black); in CheckNeedsToInformIncrementalMarker()
2695 __ LoadP(regs_.scratch0(), MemOperand(regs_.address(), 0)); in CheckNeedsToInformIncrementalMarker()
2700 __ CheckPageFlag(regs_.scratch0(), // Contains value. in CheckNeedsToInformIncrementalMarker()
2705 __ CheckPageFlag(regs_.object(), in CheckNeedsToInformIncrementalMarker()
2710 __ bind(&ensure_not_white); in CheckNeedsToInformIncrementalMarker()
2715 __ Push(regs_.object(), regs_.address()); in CheckNeedsToInformIncrementalMarker()
2716 __ JumpIfWhite(regs_.scratch0(), // The value. in CheckNeedsToInformIncrementalMarker()
2721 __ Pop(regs_.object(), regs_.address()); in CheckNeedsToInformIncrementalMarker()
2725 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(), in CheckNeedsToInformIncrementalMarker()
2728 __ Ret(); in CheckNeedsToInformIncrementalMarker()
2731 __ bind(&need_incremental_pop_scratch); in CheckNeedsToInformIncrementalMarker()
2732 __ Pop(regs_.object(), regs_.address()); in CheckNeedsToInformIncrementalMarker()
2734 __ bind(&need_incremental); in CheckNeedsToInformIncrementalMarker()
2741 __ Call(ces.GetCode(), RelocInfo::CODE_TARGET); in Generate()
2744 __ LoadP(r3, MemOperand(fp, parameter_count_offset)); in Generate()
2746 __ AddP(r3, Operand(1)); in Generate()
2749 __ ShiftLeftP(r3, r3, Operand(kPointerSizeLog2)); in Generate()
2750 __ la(sp, MemOperand(r3, sp)); in Generate()
2751 __ Ret(); in Generate()
2765 __ CleanseP(r14); in MaybeCallEntryHook()
2766 __ Push(r14, ip); in MaybeCallEntryHook()
2767 __ CallStub(&stub); // BRASL in MaybeCallEntryHook()
2768 __ Pop(r14, ip); in MaybeCallEntryHook()
2793 __ CleanseP(r14); in Generate()
2794 __ LoadRR(ip, r14); in Generate()
2795 __ MultiPush(kSavedRegs | ip.bit()); in Generate()
2799 __ SubP(r2, ip, Operand(kReturnAddressDistanceFromFunctionStart)); in Generate()
2803 __ lay(r3, MemOperand(sp, kNumSavedRegs * kPointerSize)); in Generate()
2808 __ LoadRR(r7, sp); in Generate()
2810 __ ClearRightImm(sp, sp, Operand(WhichPowerOf2(frame_alignment))); in Generate()
2816 __ mov(ip, Operand(entry_hook)); in Generate()
2820 __ LoadP(ToRegister(ABI_TOC_REGISTER), MemOperand(ip, kPointerSize)); in Generate()
2821 __ LoadP(ip, MemOperand(ip, 0)); in Generate()
2828 __ LoadImmP(r0, Operand::Zero()); in Generate()
2829 __ lay(sp, MemOperand(sp, -kCalleeRegisterSaveAreaSize - in Generate()
2831 __ StoreP(r0, MemOperand(sp)); in Generate()
2836 __ mov(r4, Operand(ExternalReference::isolate_address(isolate()))); in Generate()
2839 __ mov(ip, Operand(ExternalReference( in Generate()
2842 __ Call(ip); in Generate()
2846 __ la(sp, MemOperand(sp, kCalleeRegisterSaveAreaSize + in Generate()
2851 __ LoadRR(sp, r7); in Generate()
2855 __ MultiPop(kSavedRegs | ip.bit()); in Generate()
2856 __ LoadRR(r14, ip); in Generate()
2857 __ Ret(); in Generate()
2865 __ TailCallStub(&stub); in CreateArrayDispatch()
2871 __ CmpP(r5, Operand(kind)); in CreateArrayDispatch()
2873 __ TailCallStub(&stub, eq); in CreateArrayDispatch()
2877 __ Abort(kUnexpectedElementsKindInArrayConstructor); in CreateArrayDispatch()
2900 __ AndP(r0, r5, Operand(1)); in CreateArrayDispatchOneArgument()
2901 __ bne(&normal_sequence); in CreateArrayDispatchOneArgument()
2905 __ LoadP(r7, MemOperand(sp, 0)); in CreateArrayDispatchOneArgument()
2906 __ CmpP(r7, Operand::Zero()); in CreateArrayDispatchOneArgument()
2907 __ beq(&normal_sequence); in CreateArrayDispatchOneArgument()
2915 __ TailCallStub(&stub_holey); in CreateArrayDispatchOneArgument()
2917 __ bind(&normal_sequence); in CreateArrayDispatchOneArgument()
2920 __ TailCallStub(&stub); in CreateArrayDispatchOneArgument()
2924 __ AddP(r5, r5, Operand(1)); in CreateArrayDispatchOneArgument()
2926 __ LoadP(r7, FieldMemOperand(r4, 0)); in CreateArrayDispatchOneArgument()
2927 __ CompareRoot(r7, Heap::kAllocationSiteMapRootIndex); in CreateArrayDispatchOneArgument()
2928 __ Assert(eq, kExpectedAllocationSite); in CreateArrayDispatchOneArgument()
2935 __ LoadP(r6, FieldMemOperand(r4, AllocationSite::kTransitionInfoOffset)); in CreateArrayDispatchOneArgument()
2936 __ AddSmiLiteral(r6, r6, Smi::FromInt(kFastElementsKindPackedToHoley), r0); in CreateArrayDispatchOneArgument()
2937 __ StoreP(r6, FieldMemOperand(r4, AllocationSite::kTransitionInfoOffset)); in CreateArrayDispatchOneArgument()
2939 __ bind(&normal_sequence); in CreateArrayDispatchOneArgument()
2944 __ CmpP(r5, Operand(kind)); in CreateArrayDispatchOneArgument()
2946 __ TailCallStub(&stub, eq); in CreateArrayDispatchOneArgument()
2950 __ Abort(kUnexpectedElementsKindInArrayConstructor); in CreateArrayDispatchOneArgument()
2989 __ CmpP(r2, Operand::Zero()); in GenerateDispatchToArrayStub()
2990 __ bne(&not_zero_case); in GenerateDispatchToArrayStub()
2993 __ bind(&not_zero_case); in GenerateDispatchToArrayStub()
2994 __ CmpP(r2, Operand(1)); in GenerateDispatchToArrayStub()
2995 __ bgt(&not_one_case); in GenerateDispatchToArrayStub()
2998 __ bind(&not_one_case); in GenerateDispatchToArrayStub()
3000 __ TailCallStub(&stub); in GenerateDispatchToArrayStub()
3018 __ LoadP(r6, FieldMemOperand(r3, JSFunction::kPrototypeOrInitialMapOffset)); in Generate()
3020 __ TestIfSmi(r6); in Generate()
3021 __ Assert(ne, kUnexpectedInitialMapForArrayFunction, cr0); in Generate()
3022 __ CompareObjectType(r6, r6, r7, MAP_TYPE); in Generate()
3023 __ Assert(eq, kUnexpectedInitialMapForArrayFunction); in Generate()
3026 __ AssertUndefinedOrAllocationSite(r4, r6); in Generate()
3030 __ LoadP(cp, FieldMemOperand(r3, JSFunction::kContextOffset)); in Generate()
3033 __ CmpP(r5, r3); in Generate()
3034 __ bne(&subclassing, Label::kNear); in Generate()
3038 __ CompareRoot(r4, Heap::kUndefinedValueRootIndex); in Generate()
3039 __ beq(&no_info); in Generate()
3041 __ LoadP(r5, FieldMemOperand(r4, AllocationSite::kTransitionInfoOffset)); in Generate()
3042 __ SmiUntag(r5); in Generate()
3044 __ AndP(r5, Operand(AllocationSite::ElementsKindBits::kMask)); in Generate()
3047 __ bind(&no_info); in Generate()
3050 __ bind(&subclassing); in Generate()
3051 __ ShiftLeftP(r1, r2, Operand(kPointerSizeLog2)); in Generate()
3052 __ StoreP(r3, MemOperand(sp, r1)); in Generate()
3053 __ AddP(r2, r2, Operand(3)); in Generate()
3054 __ Push(r5, r4); in Generate()
3055 __ JumpToExternalReference(ExternalReference(Runtime::kNewArray, isolate())); in Generate()
3060 __ CmpLogicalP(r2, Operand(1)); in GenerateCase()
3063 __ TailCallStub(&stub0, lt); in GenerateCase()
3066 __ TailCallStub(&stubN, gt); in GenerateCase()
3071 __ LoadP(r5, MemOperand(sp, 0)); in GenerateCase()
3072 __ CmpP(r5, Operand::Zero()); in GenerateCase()
3076 __ TailCallStub(&stub1_holey, ne); in GenerateCase()
3080 __ TailCallStub(&stub1); in GenerateCase()
3096 __ LoadP(r5, FieldMemOperand(r3, JSFunction::kPrototypeOrInitialMapOffset)); in Generate()
3098 __ TestIfSmi(r5); in Generate()
3099 __ Assert(ne, kUnexpectedInitialMapForArrayFunction, cr0); in Generate()
3100 __ CompareObjectType(r5, r5, r6, MAP_TYPE); in Generate()
3101 __ Assert(eq, kUnexpectedInitialMapForArrayFunction); in Generate()
3105 __ LoadP(r5, FieldMemOperand(r3, JSFunction::kPrototypeOrInitialMapOffset)); in Generate()
3107 __ LoadlB(r5, FieldMemOperand(r5, Map::kBitField2Offset)); in Generate()
3109 __ DecodeField<Map::ElementsKindBits>(r5); in Generate()
3113 __ CmpP(r5, Operand(FAST_ELEMENTS)); in Generate()
3114 __ beq(&done); in Generate()
3115 __ CmpP(r5, Operand(FAST_HOLEY_ELEMENTS)); in Generate()
3116 __ Assert(eq, kInvalidElementsKindForInternalArrayOrInternalPackedArray); in Generate()
3117 __ bind(&done); in Generate()
3121 __ CmpP(r5, Operand(FAST_ELEMENTS)); in Generate()
3122 __ beq(&fast_elements_case); in Generate()
3125 __ bind(&fast_elements_case); in Generate()
3157 __ mov(scratch, Operand(ExternalReference::is_profiling_address(isolate))); in CallApiFunctionAndReturn()
3158 __ LoadlB(scratch, MemOperand(scratch, 0)); in CallApiFunctionAndReturn()
3159 __ CmpP(scratch, Operand::Zero()); in CallApiFunctionAndReturn()
3163 __ beq(&profiler_disabled, Label::kNear); in CallApiFunctionAndReturn()
3164 __ mov(scratch, Operand(thunk_ref)); in CallApiFunctionAndReturn()
3165 __ b(&end_profiler_check, Label::kNear); in CallApiFunctionAndReturn()
3166 __ bind(&profiler_disabled); in CallApiFunctionAndReturn()
3167 __ LoadRR(scratch, function_address); in CallApiFunctionAndReturn()
3168 __ bind(&end_profiler_check); in CallApiFunctionAndReturn()
3175 __ mov(r9, Operand(next_address)); in CallApiFunctionAndReturn()
3176 __ LoadP(r6, MemOperand(r9, kNextOffset)); in CallApiFunctionAndReturn()
3177 __ LoadP(r7, MemOperand(r9, kLimitOffset)); in CallApiFunctionAndReturn()
3178 __ LoadlW(r8, MemOperand(r9, kLevelOffset)); in CallApiFunctionAndReturn()
3179 __ AddP(r8, Operand(1)); in CallApiFunctionAndReturn()
3180 __ StoreW(r8, MemOperand(r9, kLevelOffset)); in CallApiFunctionAndReturn()
3184 __ PushSafepointRegisters(); in CallApiFunctionAndReturn()
3185 __ PrepareCallCFunction(1, r2); in CallApiFunctionAndReturn()
3186 __ mov(r2, Operand(ExternalReference::isolate_address(isolate))); in CallApiFunctionAndReturn()
3187 __ CallCFunction(ExternalReference::log_enter_external_function(isolate), in CallApiFunctionAndReturn()
3189 __ PopSafepointRegisters(); in CallApiFunctionAndReturn()
3200 __ PushSafepointRegisters(); in CallApiFunctionAndReturn()
3201 __ PrepareCallCFunction(1, r2); in CallApiFunctionAndReturn()
3202 __ mov(r2, Operand(ExternalReference::isolate_address(isolate))); in CallApiFunctionAndReturn()
3203 __ CallCFunction(ExternalReference::log_leave_external_function(isolate), in CallApiFunctionAndReturn()
3205 __ PopSafepointRegisters(); in CallApiFunctionAndReturn()
3214 __ LoadP(r2, return_value_operand); in CallApiFunctionAndReturn()
3215 __ bind(&return_value_loaded); in CallApiFunctionAndReturn()
3218 __ StoreP(r6, MemOperand(r9, kNextOffset)); in CallApiFunctionAndReturn()
3219 if (__ emit_debug_code()) { in CallApiFunctionAndReturn()
3220 __ LoadlW(r3, MemOperand(r9, kLevelOffset)); in CallApiFunctionAndReturn()
3221 __ CmpP(r3, r8); in CallApiFunctionAndReturn()
3222 __ Check(eq, kUnexpectedLevelAfterReturnFromApiCall); in CallApiFunctionAndReturn()
3224 __ SubP(r8, Operand(1)); in CallApiFunctionAndReturn()
3225 __ StoreW(r8, MemOperand(r9, kLevelOffset)); in CallApiFunctionAndReturn()
3226 __ CmpP(r7, MemOperand(r9, kLimitOffset)); in CallApiFunctionAndReturn()
3227 __ bne(&delete_allocated_handles, Label::kNear); in CallApiFunctionAndReturn()
3230 __ bind(&leave_exit_frame); in CallApiFunctionAndReturn()
3233 __ LoadP(cp, *context_restore_operand); in CallApiFunctionAndReturn()
3237 __ l(r6, *stack_space_operand); in CallApiFunctionAndReturn()
3239 __ mov(r6, Operand(stack_space)); in CallApiFunctionAndReturn()
3241 __ LeaveExitFrame(false, r6, !restore_context, stack_space_operand != NULL); in CallApiFunctionAndReturn()
3244 __ mov(r7, Operand(ExternalReference::scheduled_exception_address(isolate))); in CallApiFunctionAndReturn()
3245 __ LoadP(r7, MemOperand(r7)); in CallApiFunctionAndReturn()
3246 __ CompareRoot(r7, Heap::kTheHoleValueRootIndex); in CallApiFunctionAndReturn()
3247 __ bne(&promote_scheduled_exception, Label::kNear); in CallApiFunctionAndReturn()
3249 __ b(r14); in CallApiFunctionAndReturn()
3252 __ bind(&promote_scheduled_exception); in CallApiFunctionAndReturn()
3253 __ TailCallRuntime(Runtime::kPromoteScheduledException); in CallApiFunctionAndReturn()
3256 __ bind(&delete_allocated_handles); in CallApiFunctionAndReturn()
3257 __ StoreP(r7, MemOperand(r9, kLimitOffset)); in CallApiFunctionAndReturn()
3258 __ LoadRR(r6, r2); in CallApiFunctionAndReturn()
3259 __ PrepareCallCFunction(1, r7); in CallApiFunctionAndReturn()
3260 __ mov(r2, Operand(ExternalReference::isolate_address(isolate))); in CallApiFunctionAndReturn()
3261 __ CallCFunction(ExternalReference::delete_handle_scope_extensions(isolate), in CallApiFunctionAndReturn()
3263 __ LoadRR(r2, r6); in CallApiFunctionAndReturn()
3264 __ b(&leave_exit_frame, Label::kNear); in CallApiFunctionAndReturn()
3300 __ PushRoot(Heap::kUndefinedValueRootIndex); in Generate()
3303 __ push(context); in Generate()
3306 __ LoadP(context, FieldMemOperand(callee, JSFunction::kContextOffset)); in Generate()
3310 __ push(callee); in Generate()
3313 __ push(call_data); in Generate()
3317 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex); in Generate()
3320 __ push(scratch); in Generate()
3322 __ push(scratch); in Generate()
3324 __ mov(scratch, Operand(ExternalReference::isolate_address(masm->isolate()))); in Generate()
3325 __ push(scratch); in Generate()
3327 __ push(holder); in Generate()
3330 __ LoadRR(scratch, sp); in Generate()
3344 __ EnterExitFrame(false, kApiStackSpace); in Generate()
3349 __ AddP(r2, sp, Operand(kFunctionCallbackInfoOffset)); in Generate()
3351 __ StoreP(scratch, MemOperand(r2, 0 * kPointerSize)); in Generate()
3353 __ AddP(ip, scratch, Operand((FCA::kArgsLength - 1 + argc()) * kPointerSize)); in Generate()
3354 __ StoreP(ip, MemOperand(r2, 1 * kPointerSize)); in Generate()
3356 __ LoadImmP(ip, Operand(argc())); in Generate()
3357 __ StoreW(ip, MemOperand(r2, 2 * kPointerSize)); in Generate()
3407 __ push(receiver); in Generate()
3409 __ LoadP(scratch, FieldMemOperand(callback, AccessorInfo::kDataOffset)); in Generate()
3410 __ push(scratch); in Generate()
3411 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex); in Generate()
3412 __ Push(scratch, scratch); in Generate()
3413 __ mov(scratch, Operand(ExternalReference::isolate_address(isolate()))); in Generate()
3414 __ Push(scratch, holder); in Generate()
3415 __ Push(Smi::kZero); // should_throw_on_error -> false in Generate()
3416 __ LoadP(scratch, FieldMemOperand(callback, AccessorInfo::kNameOffset)); in Generate()
3417 __ push(scratch); in Generate()
3423 __ LoadRR(r2, sp); // r2 = Handle<Name> in Generate()
3424 __ AddP(r3, r2, Operand(1 * kPointerSize)); // r3 = v8::PCI::args_ in Generate()
3448 __ EnterExitFrame(false, apiStackSpace); in Generate()
3452 __ StoreP(r2, MemOperand(sp, arg0Slot * kPointerSize)); in Generate()
3453 __ AddP(r2, sp, Operand(arg0Slot * kPointerSize)); in Generate()
3458 __ StoreP(r3, MemOperand(sp, accessorInfoSlot * kPointerSize)); in Generate()
3459 __ AddP(r3, sp, Operand(accessorInfoSlot * kPointerSize)); in Generate()
3465 __ LoadP(scratch, FieldMemOperand(callback, AccessorInfo::kJsGetterOffset)); in Generate()
3466 __ LoadP(api_function_address, in Generate()
3476 #undef __