• Home
  • Raw
  • Download

Lines Matching full:__

24 #define __ ACCESS_MASM(masm)  macro
27 __ ShiftLeftImm(r0, r3, Operand(kPointerSizeLog2)); in Generate()
28 __ StorePX(r4, MemOperand(sp, r0)); in Generate()
29 __ push(r4); in Generate()
30 __ push(r5); in Generate()
31 __ addi(r3, r3, Operand(3)); in Generate()
32 __ TailCallRuntime(Runtime::kNewArray); in Generate()
58 __ push(descriptor.GetRegisterParameter(i)); in GenerateLightweightMiss()
60 __ CallExternalReference(miss, param_count); in GenerateLightweightMiss()
63 __ Ret(); in GenerateLightweightMiss()
83 __ push(scratch); in Generate()
89 __ lfd(double_scratch, MemOperand(input_reg, double_offset)); in Generate()
92 __ ConvertDoubleToInt64(double_scratch, in Generate()
100 __ TestIfInt32(result_reg, r0); in Generate()
102 __ TestIfInt32(scratch, result_reg, r0); in Generate()
104 __ beq(&fastpath_done); in Generate()
107 __ Push(scratch_high, scratch_low); in Generate()
111 __ lwz(scratch_high, in Generate()
113 __ lwz(scratch_low, in Generate()
116 __ ExtractBitMask(scratch, scratch_high, HeapNumber::kExponentMask); in Generate()
120 __ subi(scratch, scratch, Operand(HeapNumber::kExponentBias + 1)); in Generate()
125 __ cmpi(scratch, Operand(83)); in Generate()
126 __ bge(&out_of_range); in Generate()
133 __ subfic(scratch, scratch, Operand(51)); in Generate()
134 __ cmpi(scratch, Operand::Zero()); in Generate()
135 __ ble(&only_low); in Generate()
138 __ srw(scratch_low, scratch_low, scratch); in Generate()
142 __ subfic(scratch, scratch, Operand(32)); in Generate()
143 __ ExtractBitMask(result_reg, scratch_high, HeapNumber::kMantissaMask); in Generate()
146 __ oris(result_reg, result_reg, in Generate()
148 __ slw(r0, result_reg, scratch); in Generate()
149 __ orx(result_reg, scratch_low, r0); in Generate()
150 __ b(&negate); in Generate()
152 __ bind(&out_of_range); in Generate()
153 __ mov(result_reg, Operand::Zero()); in Generate()
154 __ b(&done); in Generate()
156 __ bind(&only_low); in Generate()
159 __ neg(scratch, scratch); in Generate()
160 __ slw(result_reg, scratch_low, scratch); in Generate()
162 __ bind(&negate); in Generate()
169 __ srawi(r0, scratch_high, 31); in Generate()
171 __ srdi(r0, r0, Operand(32)); in Generate()
173 __ xor_(result_reg, result_reg, r0); in Generate()
174 __ srwi(r0, scratch_high, Operand(31)); in Generate()
175 __ add(result_reg, result_reg, r0); in Generate()
177 __ bind(&done); in Generate()
178 __ Pop(scratch_high, scratch_low); in Generate()
180 __ bind(&fastpath_done); in Generate()
181 __ pop(scratch); in Generate()
183 __ Ret(); in Generate()
194 __ cmp(r3, r4); in EmitIdenticalObjectComparison()
195 __ bne(&not_identical); in EmitIdenticalObjectComparison()
203 __ CompareObjectType(r3, r7, r7, FIRST_JS_RECEIVER_TYPE); in EmitIdenticalObjectComparison()
204 __ bge(slow); in EmitIdenticalObjectComparison()
206 __ cmpi(r7, Operand(SYMBOL_TYPE)); in EmitIdenticalObjectComparison()
207 __ beq(slow); in EmitIdenticalObjectComparison()
209 __ CompareObjectType(r3, r7, r7, HEAP_NUMBER_TYPE); in EmitIdenticalObjectComparison()
210 __ beq(&heap_number); in EmitIdenticalObjectComparison()
213 __ cmpi(r7, Operand(FIRST_JS_RECEIVER_TYPE)); in EmitIdenticalObjectComparison()
214 __ bge(slow); in EmitIdenticalObjectComparison()
216 __ cmpi(r7, Operand(SYMBOL_TYPE)); in EmitIdenticalObjectComparison()
217 __ beq(slow); in EmitIdenticalObjectComparison()
222 __ cmpi(r7, Operand(ODDBALL_TYPE)); in EmitIdenticalObjectComparison()
223 __ bne(&return_equal); in EmitIdenticalObjectComparison()
224 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex); in EmitIdenticalObjectComparison()
225 __ cmp(r3, r5); in EmitIdenticalObjectComparison()
226 __ bne(&return_equal); in EmitIdenticalObjectComparison()
229 __ li(r3, Operand(GREATER)); in EmitIdenticalObjectComparison()
232 __ li(r3, Operand(LESS)); in EmitIdenticalObjectComparison()
234 __ Ret(); in EmitIdenticalObjectComparison()
239 __ bind(&return_equal); in EmitIdenticalObjectComparison()
241 __ li(r3, Operand(GREATER)); // Things aren't less than themselves. in EmitIdenticalObjectComparison()
243 __ li(r3, Operand(LESS)); // Things aren't greater than themselves. in EmitIdenticalObjectComparison()
245 __ li(r3, Operand(EQUAL)); // Things are <=, >=, ==, === themselves. in EmitIdenticalObjectComparison()
247 __ Ret(); in EmitIdenticalObjectComparison()
253 __ bind(&heap_number); in EmitIdenticalObjectComparison()
260 __ lwz(r5, FieldMemOperand(r3, HeapNumber::kExponentOffset)); in EmitIdenticalObjectComparison()
263 __ ExtractBitMask(r6, r5, HeapNumber::kExponentMask); in EmitIdenticalObjectComparison()
264 __ cmpli(r6, Operand(0x7ff)); in EmitIdenticalObjectComparison()
265 __ bne(&return_equal); in EmitIdenticalObjectComparison()
268 __ slwi(r5, r5, Operand(HeapNumber::kNonMantissaBitsInTopWord)); in EmitIdenticalObjectComparison()
270 __ lwz(r6, FieldMemOperand(r3, HeapNumber::kMantissaOffset)); in EmitIdenticalObjectComparison()
271 __ orx(r3, r6, r5); in EmitIdenticalObjectComparison()
272 __ cmpi(r3, Operand::Zero()); in EmitIdenticalObjectComparison()
279 __ li(r4, Operand((cond == le) ? GREATER : LESS)); in EmitIdenticalObjectComparison()
280 __ isel(eq, r3, r3, r4); in EmitIdenticalObjectComparison()
283 __ Ret(eq); in EmitIdenticalObjectComparison()
285 __ li(r3, Operand(GREATER)); // NaN <= NaN should fail. in EmitIdenticalObjectComparison()
287 __ li(r3, Operand(LESS)); // NaN >= NaN should fail. in EmitIdenticalObjectComparison()
291 __ Ret(); in EmitIdenticalObjectComparison()
295 __ bind(&not_identical); in EmitIdenticalObjectComparison()
306 __ JumpIfSmi(rhs, &rhs_is_smi); in EmitSmiNonsmiComparison()
309 __ CompareObjectType(rhs, r6, r7, HEAP_NUMBER_TYPE); in EmitSmiNonsmiComparison()
316 __ beq(&skip); in EmitSmiNonsmiComparison()
317 __ mov(r3, Operand(NOT_EQUAL)); in EmitSmiNonsmiComparison()
318 __ Ret(); in EmitSmiNonsmiComparison()
319 __ bind(&skip); in EmitSmiNonsmiComparison()
321 __ Ret(ne); in EmitSmiNonsmiComparison()
326 __ bne(slow); in EmitSmiNonsmiComparison()
331 __ SmiToDouble(d7, lhs); in EmitSmiNonsmiComparison()
333 __ lfd(d6, FieldMemOperand(rhs, HeapNumber::kValueOffset)); in EmitSmiNonsmiComparison()
337 __ b(lhs_not_nan); in EmitSmiNonsmiComparison()
339 __ bind(&rhs_is_smi); in EmitSmiNonsmiComparison()
341 __ CompareObjectType(lhs, r7, r7, HEAP_NUMBER_TYPE); in EmitSmiNonsmiComparison()
348 __ beq(&skip); in EmitSmiNonsmiComparison()
349 __ mov(r3, Operand(NOT_EQUAL)); in EmitSmiNonsmiComparison()
350 __ Ret(); in EmitSmiNonsmiComparison()
351 __ bind(&skip); in EmitSmiNonsmiComparison()
353 __ Ret(ne); in EmitSmiNonsmiComparison()
358 __ bne(slow); in EmitSmiNonsmiComparison()
363 __ lfd(d7, FieldMemOperand(lhs, HeapNumber::kValueOffset)); in EmitSmiNonsmiComparison()
365 __ SmiToDouble(d6, rhs); in EmitSmiNonsmiComparison()
382 __ CompareObjectType(rhs, r5, r5, FIRST_JS_RECEIVER_TYPE); in EmitStrictTwoHeapObjectCompare()
383 __ blt(&first_non_object); in EmitStrictTwoHeapObjectCompare()
387 __ bind(&return_not_equal); in EmitStrictTwoHeapObjectCompare()
388 __ Ret(); in EmitStrictTwoHeapObjectCompare()
390 __ bind(&first_non_object); in EmitStrictTwoHeapObjectCompare()
392 __ cmpi(r5, Operand(ODDBALL_TYPE)); in EmitStrictTwoHeapObjectCompare()
393 __ beq(&return_not_equal); in EmitStrictTwoHeapObjectCompare()
395 __ CompareObjectType(lhs, r6, r6, FIRST_JS_RECEIVER_TYPE); in EmitStrictTwoHeapObjectCompare()
396 __ bge(&return_not_equal); in EmitStrictTwoHeapObjectCompare()
399 __ cmpi(r6, Operand(ODDBALL_TYPE)); in EmitStrictTwoHeapObjectCompare()
400 __ beq(&return_not_equal); in EmitStrictTwoHeapObjectCompare()
405 __ orx(r5, r5, r6); in EmitStrictTwoHeapObjectCompare()
406 __ andi(r0, r5, Operand(kIsNotStringMask | kIsNotInternalizedMask)); in EmitStrictTwoHeapObjectCompare()
407 __ beq(&return_not_equal, cr0); in EmitStrictTwoHeapObjectCompare()
418 __ CompareObjectType(rhs, r6, r5, HEAP_NUMBER_TYPE); in EmitCheckForTwoHeapNumbers()
419 __ bne(not_heap_numbers); in EmitCheckForTwoHeapNumbers()
420 __ LoadP(r5, FieldMemOperand(lhs, HeapObject::kMapOffset)); in EmitCheckForTwoHeapNumbers()
421 __ cmp(r5, r6); in EmitCheckForTwoHeapNumbers()
422 __ bne(slow); // First was a heap number, second wasn't. Go slow case. in EmitCheckForTwoHeapNumbers()
426 __ lfd(d6, FieldMemOperand(rhs, HeapNumber::kValueOffset)); in EmitCheckForTwoHeapNumbers()
427 __ lfd(d7, FieldMemOperand(lhs, HeapNumber::kValueOffset)); in EmitCheckForTwoHeapNumbers()
429 __ b(both_loaded_as_doubles); in EmitCheckForTwoHeapNumbers()
444 __ andi(r0, r5, Operand(kIsNotStringMask)); in EmitCheckForInternalizedStringsOrObjects()
445 __ bne(&object_test, cr0); in EmitCheckForInternalizedStringsOrObjects()
446 __ andi(r0, r5, Operand(kIsNotInternalizedMask)); in EmitCheckForInternalizedStringsOrObjects()
447 __ bne(possible_strings, cr0); in EmitCheckForInternalizedStringsOrObjects()
448 __ CompareObjectType(lhs, r6, r6, FIRST_NONSTRING_TYPE); in EmitCheckForInternalizedStringsOrObjects()
449 __ bge(runtime_call); in EmitCheckForInternalizedStringsOrObjects()
450 __ andi(r0, r6, Operand(kIsNotInternalizedMask)); in EmitCheckForInternalizedStringsOrObjects()
451 __ bne(possible_strings, cr0); in EmitCheckForInternalizedStringsOrObjects()
456 __ Ret(); in EmitCheckForInternalizedStringsOrObjects()
458 __ bind(&object_test); in EmitCheckForInternalizedStringsOrObjects()
459 __ LoadP(r5, FieldMemOperand(lhs, HeapObject::kMapOffset)); in EmitCheckForInternalizedStringsOrObjects()
460 __ LoadP(r6, FieldMemOperand(rhs, HeapObject::kMapOffset)); in EmitCheckForInternalizedStringsOrObjects()
461 __ lbz(r7, FieldMemOperand(r5, Map::kBitFieldOffset)); in EmitCheckForInternalizedStringsOrObjects()
462 __ lbz(r8, FieldMemOperand(r6, Map::kBitFieldOffset)); in EmitCheckForInternalizedStringsOrObjects()
463 __ andi(r0, r7, Operand(1 << Map::kIsUndetectable)); in EmitCheckForInternalizedStringsOrObjects()
464 __ bne(&undetectable, cr0); in EmitCheckForInternalizedStringsOrObjects()
465 __ andi(r0, r8, Operand(1 << Map::kIsUndetectable)); in EmitCheckForInternalizedStringsOrObjects()
466 __ bne(&return_unequal, cr0); in EmitCheckForInternalizedStringsOrObjects()
468 __ CompareInstanceType(r5, r5, FIRST_JS_RECEIVER_TYPE); in EmitCheckForInternalizedStringsOrObjects()
469 __ blt(runtime_call); in EmitCheckForInternalizedStringsOrObjects()
470 __ CompareInstanceType(r6, r6, FIRST_JS_RECEIVER_TYPE); in EmitCheckForInternalizedStringsOrObjects()
471 __ blt(runtime_call); in EmitCheckForInternalizedStringsOrObjects()
473 __ bind(&return_unequal); in EmitCheckForInternalizedStringsOrObjects()
475 __ Ret(); in EmitCheckForInternalizedStringsOrObjects()
477 __ bind(&undetectable); in EmitCheckForInternalizedStringsOrObjects()
478 __ andi(r0, r8, Operand(1 << Map::kIsUndetectable)); in EmitCheckForInternalizedStringsOrObjects()
479 __ beq(&return_unequal, cr0); in EmitCheckForInternalizedStringsOrObjects()
484 __ CompareInstanceType(r5, r5, ODDBALL_TYPE); in EmitCheckForInternalizedStringsOrObjects()
485 __ beq(&return_equal); in EmitCheckForInternalizedStringsOrObjects()
486 __ CompareInstanceType(r6, r6, ODDBALL_TYPE); in EmitCheckForInternalizedStringsOrObjects()
487 __ bne(&return_unequal); in EmitCheckForInternalizedStringsOrObjects()
489 __ bind(&return_equal); in EmitCheckForInternalizedStringsOrObjects()
490 __ li(r3, Operand(EQUAL)); in EmitCheckForInternalizedStringsOrObjects()
491 __ Ret(); in EmitCheckForInternalizedStringsOrObjects()
501 __ JumpIfNotSmi(input, fail); in CompareICStub_CheckInputType()
503 __ JumpIfSmi(input, &ok); in CompareICStub_CheckInputType()
504 __ CheckMap(input, scratch, Heap::kHeapNumberMapRootIndex, fail, in CompareICStub_CheckInputType()
509 __ bind(&ok); in CompareICStub_CheckInputType()
529 __ orx(r5, r4, r3); in GenerateGeneric()
530 __ JumpIfNotSmi(r5, &not_two_smis); in GenerateGeneric()
531 __ SmiUntag(r4); in GenerateGeneric()
532 __ SmiUntag(r3); in GenerateGeneric()
533 __ sub(r3, r4, r3); in GenerateGeneric()
534 __ Ret(); in GenerateGeneric()
535 __ bind(&not_two_smis); in GenerateGeneric()
548 __ and_(r5, lhs, rhs); in GenerateGeneric()
549 __ JumpIfNotSmi(r5, &not_smis); in GenerateGeneric()
560 __ bind(&both_loaded_as_doubles); in GenerateGeneric()
562 __ bind(&lhs_not_nan); in GenerateGeneric()
564 __ fcmpu(d7, d6); in GenerateGeneric()
567 __ bunordered(&nan); in GenerateGeneric()
570 __ li(r4, Operand(GREATER)); in GenerateGeneric()
571 __ li(r5, Operand(LESS)); in GenerateGeneric()
572 __ isel(eq, r3, r0, r4); in GenerateGeneric()
573 __ isel(lt, r3, r5, r3); in GenerateGeneric()
574 __ Ret(); in GenerateGeneric()
576 __ beq(&equal); in GenerateGeneric()
577 __ blt(&less_than); in GenerateGeneric()
578 __ li(r3, Operand(GREATER)); in GenerateGeneric()
579 __ Ret(); in GenerateGeneric()
580 __ bind(&equal); in GenerateGeneric()
581 __ li(r3, Operand(EQUAL)); in GenerateGeneric()
582 __ Ret(); in GenerateGeneric()
583 __ bind(&less_than); in GenerateGeneric()
584 __ li(r3, Operand(LESS)); in GenerateGeneric()
585 __ Ret(); in GenerateGeneric()
588 __ bind(&nan); in GenerateGeneric()
593 __ li(r3, Operand(GREATER)); in GenerateGeneric()
595 __ li(r3, Operand(LESS)); in GenerateGeneric()
597 __ Ret(); in GenerateGeneric()
599 __ bind(&not_smis); in GenerateGeneric()
619 __ bind(&check_for_internalized_strings); in GenerateGeneric()
632 __ bind(&flat_string_check); in GenerateGeneric()
634 __ JumpIfNonSmisNotBothSequentialOneByteStrings(lhs, rhs, r5, r6, &slow); in GenerateGeneric()
636 __ IncrementCounter(isolate()->counters()->string_compare_native(), 1, r5, in GenerateGeneric()
645 __ bind(&slow); in GenerateGeneric()
650 __ Push(cp); in GenerateGeneric()
651 __ Call(strict() ? isolate()->builtins()->StrictEqual() in GenerateGeneric()
654 __ Pop(cp); in GenerateGeneric()
658 __ LoadRoot(r4, Heap::kTrueValueRootIndex); in GenerateGeneric()
659 __ sub(r3, r3, r4); in GenerateGeneric()
660 __ Ret(); in GenerateGeneric()
662 __ Push(lhs, rhs); in GenerateGeneric()
670 __ LoadSmiLiteral(r3, Smi::FromInt(ncr)); in GenerateGeneric()
671 __ push(r3); in GenerateGeneric()
675 __ TailCallRuntime(Runtime::kCompare); in GenerateGeneric()
678 __ bind(&miss); in GenerateGeneric()
687 __ mflr(r0); in Generate()
688 __ MultiPush(kJSCallerSaved | r0.bit()); in Generate()
690 __ MultiPushDoubles(kCallerSavedDoubles); in Generate()
697 __ PrepareCallCFunction(argument_count, fp_argument_count, scratch); in Generate()
698 __ mov(r3, Operand(ExternalReference::isolate_address(isolate()))); in Generate()
699 __ CallCFunction(ExternalReference::store_buffer_overflow_function(isolate()), in Generate()
702 __ MultiPopDoubles(kCallerSavedDoubles); in Generate()
704 __ MultiPop(kJSCallerSaved | r0.bit()); in Generate()
705 __ mtlr(r0); in Generate()
706 __ Ret(); in Generate()
711 __ PushSafepointRegisters(); in Generate()
712 __ blr(); in Generate()
717 __ PopSafepointRegisters(); in Generate()
718 __ blr(); in Generate()
735 __ UntagAndJumpIfSmi(scratch, exponent, &int_exponent); in Generate()
737 __ lfd(double_exponent, in Generate()
743 __ TryDoubleToInt32Exact(scratch, double_exponent, scratch2, in Generate()
745 __ beq(&int_exponent); in Generate()
747 __ mflr(r0); in Generate()
748 __ push(r0); in Generate()
751 __ PrepareCallCFunction(0, 2, scratch); in Generate()
752 __ MovToFloatParameters(double_base, double_exponent); in Generate()
753 __ CallCFunction( in Generate()
756 __ pop(r0); in Generate()
757 __ mtlr(r0); in Generate()
758 __ MovFromFloatResult(double_result); in Generate()
759 __ b(&done); in Generate()
763 __ bind(&int_exponent); in Generate()
767 __ mr(scratch, exponent); in Generate()
770 __ mr(exponent, scratch); in Generate()
772 __ fmr(double_scratch, double_base); // Back up base. in Generate()
773 __ li(scratch2, Operand(1)); in Generate()
774 __ ConvertIntToDouble(scratch2, double_result); in Generate()
777 __ cmpi(scratch, Operand::Zero()); in Generate()
779 __ neg(scratch2, scratch); in Generate()
780 __ isel(lt, scratch, scratch2, scratch); in Generate()
783 __ bge(&positive_exponent); in Generate()
784 __ neg(scratch, scratch); in Generate()
785 __ bind(&positive_exponent); in Generate()
789 __ bind(&while_true); in Generate()
790 __ andi(scratch2, scratch, Operand(1)); in Generate()
791 __ beq(&no_carry, cr0); in Generate()
792 __ fmul(double_result, double_result, double_scratch); in Generate()
793 __ bind(&no_carry); in Generate()
794 __ ShiftRightImm(scratch, scratch, Operand(1), SetRC); in Generate()
795 __ beq(&loop_end, cr0); in Generate()
796 __ fmul(double_scratch, double_scratch, double_scratch); in Generate()
797 __ b(&while_true); in Generate()
798 __ bind(&loop_end); in Generate()
800 __ cmpi(exponent, Operand::Zero()); in Generate()
801 __ bge(&done); in Generate()
803 __ li(scratch2, Operand(1)); in Generate()
804 __ ConvertIntToDouble(scratch2, double_scratch); in Generate()
805 __ fdiv(double_result, double_scratch, double_result); in Generate()
808 __ fcmpu(double_result, kDoubleRegZero); in Generate()
809 __ bne(&done); in Generate()
812 __ ConvertIntToDouble(exponent, double_exponent); in Generate()
815 __ mflr(r0); in Generate()
816 __ push(r0); in Generate()
819 __ PrepareCallCFunction(0, 2, scratch); in Generate()
820 __ MovToFloatParameters(double_base, double_exponent); in Generate()
821 __ CallCFunction( in Generate()
824 __ pop(r0); in Generate()
825 __ mtlr(r0); in Generate()
826 __ MovFromFloatResult(double_result); in Generate()
828 __ bind(&done); in Generate()
829 __ Ret(); in Generate()
889 __ mr(r15, r4); in Generate()
893 __ mr(r4, r5); in Generate()
896 __ ShiftLeftImm(r4, r3, Operand(kPointerSizeLog2)); in Generate()
897 __ add(r4, r4, sp); in Generate()
898 __ subi(r4, r4, Operand(kPointerSize)); in Generate()
915 __ EnterExitFrame(save_doubles(), arg_stack_space, is_builtin_exit() in Generate()
920 __ mr(r14, r3); in Generate()
933 __ mr(r5, r4); in Generate()
934 __ mr(r4, r3); in Generate()
935 __ addi(r3, sp, Operand((kStackFrameExtraParamSlot + 1) * kPointerSize)); in Generate()
940 __ mov(isolate_reg, Operand(ExternalReference::isolate_address(isolate()))); in Generate()
945 __ LoadP(ToRegister(ABI_TOC_REGISTER), MemOperand(r15, kPointerSize)); in Generate()
946 __ LoadP(ip, MemOperand(r15, 0)); // Instruction address in Generate()
949 __ Move(ip, r15); in Generate()
958 __ mov_label_addr(r0, &after_call); in Generate()
959 __ StoreP(r0, MemOperand(sp, kStackFrameExtraParamSlot * kPointerSize)); in Generate()
960 __ Call(target); in Generate()
961 __ bind(&after_call); in Generate()
965 if (result_size() > 2) __ LoadP(r5, MemOperand(r3, 2 * kPointerSize)); in Generate()
966 __ LoadP(r4, MemOperand(r3, kPointerSize)); in Generate()
967 __ LoadP(r3, MemOperand(r3)); in Generate()
972 __ CompareRoot(r3, Heap::kExceptionRootIndex); in Generate()
973 __ beq(&exception_returned); in Generate()
982 __ mov(r6, Operand(pending_exception_address)); in Generate()
983 __ LoadP(r6, MemOperand(r6)); in Generate()
984 __ CompareRoot(r6, Heap::kTheHoleValueRootIndex); in Generate()
986 __ beq(&okay); in Generate()
987 __ stop("Unexpected pending exception"); in Generate()
988 __ bind(&okay); in Generate()
1003 __ LeaveExitFrame(save_doubles(), argc, true); in Generate()
1004 __ blr(); in Generate()
1007 __ bind(&exception_returned); in Generate()
1026 __ PrepareCallCFunction(3, 0, r3); in Generate()
1027 __ li(r3, Operand::Zero()); in Generate()
1028 __ li(r4, Operand::Zero()); in Generate()
1029 __ mov(r5, Operand(ExternalReference::isolate_address(isolate()))); in Generate()
1030 __ CallCFunction(find_handler, 3); in Generate()
1034 __ mov(cp, Operand(pending_handler_context_address)); in Generate()
1035 __ LoadP(cp, MemOperand(cp)); in Generate()
1036 __ mov(sp, Operand(pending_handler_sp_address)); in Generate()
1037 __ LoadP(sp, MemOperand(sp)); in Generate()
1038 __ mov(fp, Operand(pending_handler_fp_address)); in Generate()
1039 __ LoadP(fp, MemOperand(fp)); in Generate()
1044 __ cmpi(cp, Operand::Zero()); in Generate()
1045 __ beq(&skip); in Generate()
1046 __ StoreP(cp, MemOperand(fp, StandardFrameConstants::kContextOffset)); in Generate()
1047 __ bind(&skip); in Generate()
1051 __ mov(r4, Operand(pending_handler_code_address)); in Generate()
1052 __ LoadP(r4, MemOperand(r4)); in Generate()
1053 __ mov(r5, Operand(pending_handler_offset_address)); in Generate()
1054 __ LoadP(r5, MemOperand(r5)); in Generate()
1055 __ addi(r4, r4, Operand(Code::kHeaderSize - kHeapObjectTag)); // Code start in Generate()
1057 __ LoadConstantPoolPointerRegisterFromCodeTargetAddress(r4); in Generate()
1059 __ add(ip, r4, r5); in Generate()
1060 __ Jump(ip); in Generate()
1074 __ function_descriptor(); in Generate()
1080 __ mflr(r0); in Generate()
1081 __ StoreP(r0, MemOperand(sp, kStackFrameLRSlot * kPointerSize)); in Generate()
1084 __ MultiPush(kCalleeSaved); in Generate()
1087 __ MultiPushDoubles(kCalleeSavedDoubles); in Generate()
1089 __ LoadDoubleLiteral(kDoubleRegZero, 0.0, r0); in Generate()
1097 __ li(r0, Operand(-1)); // Push a bad frame pointer to fail if it is used. in Generate()
1098 __ push(r0); in Generate()
1100 __ li(kConstantPoolRegister, Operand::Zero()); in Generate()
1101 __ push(kConstantPoolRegister); in Generate()
1104 __ mov(r0, Operand(StackFrame::TypeToMarker(marker))); in Generate()
1105 __ push(r0); in Generate()
1106 __ push(r0); in Generate()
1108 __ mov(r8, Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate()))); in Generate()
1109 __ LoadP(r0, MemOperand(r8)); in Generate()
1110 __ push(r0); in Generate()
1113 __ addi(fp, sp, Operand(-EntryFrameConstants::kCallerFPOffset)); in Generate()
1118 __ mov(r8, Operand(ExternalReference(js_entry_sp))); in Generate()
1119 __ LoadP(r9, MemOperand(r8)); in Generate()
1120 __ cmpi(r9, Operand::Zero()); in Generate()
1121 __ bne(&non_outermost_js); in Generate()
1122 __ StoreP(fp, MemOperand(r8)); in Generate()
1123 __ mov(ip, Operand(StackFrame::OUTERMOST_JSENTRY_FRAME)); in Generate()
1125 __ b(&cont); in Generate()
1126 __ bind(&non_outermost_js); in Generate()
1127 __ mov(ip, Operand(StackFrame::INNER_JSENTRY_FRAME)); in Generate()
1128 __ bind(&cont); in Generate()
1129 __ push(ip); // frame-type in Generate()
1133 __ b(&invoke); in Generate()
1135 __ bind(&handler_entry); in Generate()
1141 __ mov(ip, Operand(ExternalReference(Isolate::kPendingExceptionAddress, in Generate()
1144 __ StoreP(r3, MemOperand(ip)); in Generate()
1145 __ LoadRoot(r3, Heap::kExceptionRootIndex); in Generate()
1146 __ b(&exit); in Generate()
1149 __ bind(&invoke); in Generate()
1151 __ PushStackHandler(); in Generate()
1170 __ mov(ip, Operand(construct_entry)); in Generate()
1173 __ mov(ip, Operand(entry)); in Generate()
1175 __ LoadP(ip, MemOperand(ip)); // deref address in Generate()
1179 __ addi(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag)); in Generate()
1180 __ mtctr(ip); in Generate()
1181 __ bctrl(); // make the call in Generate()
1184 __ PopStackHandler(); in Generate()
1186 __ bind(&exit); // r3 holds result in Generate()
1189 __ pop(r8); in Generate()
1190 __ cmpi(r8, Operand(StackFrame::OUTERMOST_JSENTRY_FRAME)); in Generate()
1191 __ bne(&non_outermost_js_2); in Generate()
1192 __ mov(r9, Operand::Zero()); in Generate()
1193 __ mov(r8, Operand(ExternalReference(js_entry_sp))); in Generate()
1194 __ StoreP(r9, MemOperand(r8)); in Generate()
1195 __ bind(&non_outermost_js_2); in Generate()
1198 __ pop(r6); in Generate()
1199 __ mov(ip, Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate()))); in Generate()
1200 __ StoreP(r6, MemOperand(ip)); in Generate()
1203 __ addi(sp, sp, Operand(-EntryFrameConstants::kCallerFPOffset)); in Generate()
1206 __ MultiPopDoubles(kCalleeSavedDoubles); in Generate()
1209 __ MultiPop(kCalleeSaved); in Generate()
1212 __ LoadP(r0, MemOperand(sp, kStackFrameLRSlot * kPointerSize)); in Generate()
1213 __ mtlr(r0); in Generate()
1214 __ blr(); in Generate()
1222 __ TailCallRuntime(Runtime::kRegExpExec); in Generate()
1259 __ mov(r3, Operand(address_of_regexp_stack_memory_size)); in Generate()
1260 __ LoadP(r3, MemOperand(r3, 0)); in Generate()
1261 __ cmpi(r3, Operand::Zero()); in Generate()
1262 __ beq(&runtime); in Generate()
1265 __ LoadP(r3, MemOperand(sp, kJSRegExpOffset)); in Generate()
1266 __ JumpIfSmi(r3, &runtime); in Generate()
1267 __ CompareObjectType(r3, r4, r4, JS_REGEXP_TYPE); in Generate()
1268 __ bne(&runtime); in Generate()
1271 __ LoadP(regexp_data, FieldMemOperand(r3, JSRegExp::kDataOffset)); in Generate()
1273 __ TestIfSmi(regexp_data, r0); in Generate()
1274 __ Check(ne, kUnexpectedTypeForRegExpDataFixedArrayExpected, cr0); in Generate()
1275 __ CompareObjectType(regexp_data, r3, r3, FIXED_ARRAY_TYPE); in Generate()
1276 __ Check(eq, kUnexpectedTypeForRegExpDataFixedArrayExpected); in Generate()
1281 __ LoadP(r3, FieldMemOperand(regexp_data, JSRegExp::kDataTagOffset)); in Generate()
1283 __ CmpSmiLiteral(r3, Smi::FromInt(JSRegExp::IRREGEXP), r0); in Generate()
1284 __ bne(&runtime); in Generate()
1288 __ LoadP(r5, in Generate()
1294 __ SmiToShortArrayOffset(r5, r5); in Generate()
1296 __ cmpli(r5, Operand(Isolate::kJSRegexpStaticOffsetsVectorSize - 2)); in Generate()
1297 __ bgt(&runtime); in Generate()
1300 __ li(r11, Operand::Zero()); in Generate()
1301 __ LoadP(subject, MemOperand(sp, kSubjectOffset)); in Generate()
1302 __ JumpIfSmi(subject, &runtime); in Generate()
1303 __ mr(r6, subject); // Make a copy of the original subject string. in Generate()
1326 __ bind(&check_underlying); in Generate()
1327 __ LoadP(r3, FieldMemOperand(subject, HeapObject::kMapOffset)); in Generate()
1328 __ lbz(r3, FieldMemOperand(r3, Map::kInstanceTypeOffset)); in Generate()
1334 __ andi(r4, r3, Operand(kIsNotStringMask | kStringRepresentationMask | in Generate()
1337 __ beq(&seq_string, cr0); // Go to (4). in Generate()
1346 __ cmpi(r4, Operand(kExternalStringTag)); in Generate()
1347 __ bge(&not_seq_nor_cons); // Go to (5). in Generate()
1351 __ LoadP(r3, FieldMemOperand(subject, ConsString::kSecondOffset)); in Generate()
1352 __ CompareRoot(r3, Heap::kempty_stringRootIndex); in Generate()
1353 __ bne(&runtime); in Generate()
1354 __ LoadP(subject, FieldMemOperand(subject, ConsString::kFirstOffset)); in Generate()
1355 __ b(&check_underlying); in Generate()
1358 __ bind(&seq_string); in Generate()
1364 __ LoadP(r4, MemOperand(sp, kPreviousIndexOffset)); in Generate()
1365 __ JumpIfNotSmi(r4, &runtime); in Generate()
1366 __ LoadP(r6, FieldMemOperand(r6, String::kLengthOffset)); in Generate()
1367 __ cmpl(r6, r4); in Generate()
1368 __ ble(&runtime); in Generate()
1369 __ SmiUntag(r4); in Generate()
1374 __ ExtractBitMask(r6, r3, kStringEncodingMask, SetRC); in Generate()
1375 __ beq(&encoding_type_UC16, cr0); in Generate()
1376 __ LoadP(code, in Generate()
1378 __ b(&br_over); in Generate()
1379 __ bind(&encoding_type_UC16); in Generate()
1380 __ LoadP(code, FieldMemOperand(regexp_data, JSRegExp::kDataUC16CodeOffset)); in Generate()
1381 __ bind(&br_over); in Generate()
1388 __ JumpIfSmi(code, &runtime); in Generate()
1396 __ IncrementCounter(isolate()->counters()->regexp_entry_native(), 1, r3, r5); in Generate()
1401 __ EnterExitFrame(false, kRegExpExecuteArguments - kParameterRegisters); in Generate()
1407 __ mov(r3, Operand(ExternalReference::isolate_address(isolate()))); in Generate()
1408 __ StoreP(r3, MemOperand(sp, (kStackFrameExtraParamSlot + 1) * kPointerSize)); in Generate()
1414 __ li(r10, Operand(1)); in Generate()
1417 __ mov(r3, Operand(address_of_regexp_stack_memory_address)); in Generate()
1418 __ LoadP(r3, MemOperand(r3, 0)); in Generate()
1419 __ mov(r5, Operand(address_of_regexp_stack_memory_size)); in Generate()
1420 __ LoadP(r5, MemOperand(r5, 0)); in Generate()
1421 __ add(r9, r3, r5); in Generate()
1426 __ li(r8, Operand::Zero()); in Generate()
1429 __ mov( in Generate()
1435 __ addi(r18, subject, Operand(SeqString::kHeaderSize - kHeapObjectTag)); in Generate()
1436 __ xori(r6, r6, Operand(1)); in Generate()
1441 __ LoadP(subject, MemOperand(fp, kSubjectOffset + 2 * kPointerSize)); in Generate()
1446 __ ShiftLeft_(r11, r11, r6); in Generate()
1447 __ add(r11, r18, r11); in Generate()
1448 __ ShiftLeft_(r5, r4, r6); in Generate()
1449 __ add(r5, r11, r5); in Generate()
1451 __ LoadP(r18, FieldMemOperand(subject, String::kLengthOffset)); in Generate()
1452 __ SmiUntag(r18); in Generate()
1453 __ ShiftLeft_(r6, r18, r6); in Generate()
1454 __ add(r6, r11, r6); in Generate()
1460 __ mr(r3, subject); in Generate()
1463 __ addi(code, code, Operand(Code::kHeaderSize - kHeapObjectTag)); in Generate()
1468 __ LeaveExitFrame(false, no_reg, true); in Generate()
1476 __ cmpwi(r3, Operand(1)); in Generate()
1479 __ beq(&success); in Generate()
1481 __ cmpwi(r3, Operand(NativeRegExpMacroAssembler::FAILURE)); in Generate()
1482 __ beq(&failure); in Generate()
1483 __ cmpwi(r3, Operand(NativeRegExpMacroAssembler::EXCEPTION)); in Generate()
1485 __ bne(&runtime); in Generate()
1490 __ mov(r4, Operand(isolate()->factory()->the_hole_value())); in Generate()
1491 __ mov(r5, Operand(ExternalReference(Isolate::kPendingExceptionAddress, in Generate()
1493 __ LoadP(r3, MemOperand(r5, 0)); in Generate()
1494 __ cmp(r3, r4); in Generate()
1495 __ beq(&runtime); in Generate()
1498 __ TailCallRuntime(Runtime::kRegExpExecReThrow); in Generate()
1500 __ bind(&failure); in Generate()
1502 __ mov(r3, Operand(isolate()->factory()->null_value())); in Generate()
1503 __ addi(sp, sp, Operand(4 * kPointerSize)); in Generate()
1504 __ Ret(); in Generate()
1507 __ bind(&success); in Generate()
1508 __ LoadP(r4, in Generate()
1513 __ SmiToShortArrayOffset(r4, r4); in Generate()
1514 __ addi(r4, r4, Operand(2)); in Generate()
1517 __ LoadP(last_match_info_elements, MemOperand(sp, kLastMatchInfoOffset)); in Generate()
1518 __ JumpIfSmi(last_match_info_elements, &runtime); in Generate()
1520 __ LoadP(r3, in Generate()
1522 __ CompareRoot(r3, Heap::kFixedArrayMapRootIndex); in Generate()
1523 __ bne(&runtime); in Generate()
1526 __ LoadP( in Generate()
1528 __ addi(r5, r4, Operand(RegExpMatchInfo::kLastMatchOverhead)); in Generate()
1529 __ SmiUntag(r0, r3); in Generate()
1530 __ cmp(r5, r0); in Generate()
1531 __ bgt(&runtime); in Generate()
1536 __ SmiTag(r5, r4); in Generate()
1537 __ StoreP(r5, FieldMemOperand(last_match_info_elements, in Generate()
1541 __ StoreP(subject, FieldMemOperand(last_match_info_elements, in Generate()
1544 __ mr(r5, subject); in Generate()
1545 __ RecordWriteField(last_match_info_elements, in Generate()
1548 __ mr(subject, r5); in Generate()
1549 __ StoreP(subject, FieldMemOperand(last_match_info_elements, in Generate()
1552 __ RecordWriteField(last_match_info_elements, in Generate()
1559 __ mov(r5, Operand(address_of_static_offsets_vector)); in Generate()
1566 __ addi(r3, last_match_info_elements, in Generate()
1569 __ addi(r5, r5, Operand(-kIntSize)); // bias down for lwzu in Generate()
1570 __ mtctr(r4); in Generate()
1571 __ bind(&next_capture); in Generate()
1573 __ lwzu(r6, MemOperand(r5, kIntSize)); in Generate()
1575 __ SmiTag(r6); in Generate()
1576 __ StorePU(r6, MemOperand(r3, kPointerSize)); in Generate()
1577 __ bdnz(&next_capture); in Generate()
1580 __ mr(r3, last_match_info_elements); in Generate()
1581 __ addi(sp, sp, Operand(4 * kPointerSize)); in Generate()
1582 __ Ret(); in Generate()
1585 __ bind(&runtime); in Generate()
1586 __ TailCallRuntime(Runtime::kRegExpExec); in Generate()
1590 __ bind(&not_seq_nor_cons); in Generate()
1592 __ bgt(&not_long_external); // Go to (7). in Generate()
1595 __ bind(&external_string); in Generate()
1596 __ LoadP(r3, FieldMemOperand(subject, HeapObject::kMapOffset)); in Generate()
1597 __ lbz(r3, FieldMemOperand(r3, Map::kInstanceTypeOffset)); in Generate()
1602 __ andi(r0, r3, Operand(kIsIndirectStringMask)); in Generate()
1603 __ Assert(eq, kExternalStringExpectedButNotFound, cr0); in Generate()
1605 __ LoadP(subject, in Generate()
1609 __ subi(subject, subject, in Generate()
1611 __ b(&seq_string); // Go to (4). in Generate()
1614 __ bind(&not_long_external); in Generate()
1616 __ andi(r0, r4, Operand(kIsNotStringMask | kShortExternalStringMask)); in Generate()
1617 __ bne(&runtime, cr0); in Generate()
1621 __ cmpi(r4, Operand(kThinStringTag)); in Generate()
1622 __ beq(&thin_string); in Generate()
1624 __ LoadP(r11, FieldMemOperand(subject, SlicedString::kOffsetOffset)); in Generate()
1625 __ SmiUntag(r11); in Generate()
1626 __ LoadP(subject, FieldMemOperand(subject, SlicedString::kParentOffset)); in Generate()
1627 __ b(&check_underlying); // Go to (4). in Generate()
1629 __ bind(&thin_string); in Generate()
1630 __ LoadP(subject, FieldMemOperand(subject, ThinString::kActualOffset)); in Generate()
1631 __ b(&check_underlying); // Go to (4). in Generate()
1644 __ SmiTag(r3); in CallStubInRecordCallTarget()
1645 __ Push(r6, r5, r4, r3); in CallStubInRecordCallTarget()
1646 __ Push(cp); in CallStubInRecordCallTarget()
1648 __ CallStub(stub); in CallStubInRecordCallTarget()
1650 __ Pop(cp); in CallStubInRecordCallTarget()
1651 __ Pop(r6, r5, r4, r3); in CallStubInRecordCallTarget()
1652 __ SmiUntag(r3); in CallStubInRecordCallTarget()
1674 __ SmiToPtrArrayOffset(r8, r6); in GenerateRecordCallTarget()
1675 __ add(r8, r5, r8); in GenerateRecordCallTarget()
1676 __ LoadP(r8, FieldMemOperand(r8, FixedArray::kHeaderSize)); in GenerateRecordCallTarget()
1685 __ LoadP(weak_value, FieldMemOperand(r8, WeakCell::kValueOffset)); in GenerateRecordCallTarget()
1686 __ cmp(r4, weak_value); in GenerateRecordCallTarget()
1687 __ beq(&done); in GenerateRecordCallTarget()
1688 __ CompareRoot(r8, Heap::kmegamorphic_symbolRootIndex); in GenerateRecordCallTarget()
1689 __ beq(&done); in GenerateRecordCallTarget()
1690 __ LoadP(feedback_map, FieldMemOperand(r8, HeapObject::kMapOffset)); in GenerateRecordCallTarget()
1691 __ CompareRoot(feedback_map, Heap::kWeakCellMapRootIndex); in GenerateRecordCallTarget()
1692 __ bne(&check_allocation_site); in GenerateRecordCallTarget()
1695 __ JumpIfSmi(weak_value, &initialize); in GenerateRecordCallTarget()
1696 __ b(&megamorphic); in GenerateRecordCallTarget()
1698 __ bind(&check_allocation_site); in GenerateRecordCallTarget()
1703 __ CompareRoot(feedback_map, Heap::kAllocationSiteMapRootIndex); in GenerateRecordCallTarget()
1704 __ bne(&miss); in GenerateRecordCallTarget()
1707 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r8); in GenerateRecordCallTarget()
1708 __ cmp(r4, r8); in GenerateRecordCallTarget()
1709 __ bne(&megamorphic); in GenerateRecordCallTarget()
1710 __ b(&done); in GenerateRecordCallTarget()
1712 __ bind(&miss); in GenerateRecordCallTarget()
1716 __ CompareRoot(r8, Heap::kuninitialized_symbolRootIndex); in GenerateRecordCallTarget()
1717 __ beq(&initialize); in GenerateRecordCallTarget()
1720 __ bind(&megamorphic); in GenerateRecordCallTarget()
1721 __ SmiToPtrArrayOffset(r8, r6); in GenerateRecordCallTarget()
1722 __ add(r8, r5, r8); in GenerateRecordCallTarget()
1723 __ LoadRoot(ip, Heap::kmegamorphic_symbolRootIndex); in GenerateRecordCallTarget()
1724 __ StoreP(ip, FieldMemOperand(r8, FixedArray::kHeaderSize), r0); in GenerateRecordCallTarget()
1725 __ jmp(&done); in GenerateRecordCallTarget()
1728 __ bind(&initialize); in GenerateRecordCallTarget()
1731 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r8); in GenerateRecordCallTarget()
1732 __ cmp(r4, r8); in GenerateRecordCallTarget()
1733 __ bne(&not_array_function); in GenerateRecordCallTarget()
1740 __ b(&done); in GenerateRecordCallTarget()
1742 __ bind(&not_array_function); in GenerateRecordCallTarget()
1747 __ bind(&done); in GenerateRecordCallTarget()
1750 __ SmiToPtrArrayOffset(r8, r6); in GenerateRecordCallTarget()
1751 __ add(r8, r5, r8); in GenerateRecordCallTarget()
1753 __ LoadP(r7, FieldMemOperand(r8, count_offset)); in GenerateRecordCallTarget()
1754 __ AddSmiLiteral(r7, r7, Smi::FromInt(1), r0); in GenerateRecordCallTarget()
1755 __ StoreP(r7, FieldMemOperand(r8, count_offset), r0); in GenerateRecordCallTarget()
1767 __ JumpIfSmi(r4, &non_function); in Generate()
1769 __ CompareObjectType(r4, r8, r8, JS_FUNCTION_TYPE); in Generate()
1770 __ bne(&non_function); in Generate()
1774 __ SmiToPtrArrayOffset(r8, r6); in Generate()
1775 __ add(r8, r5, r8); in Generate()
1777 __ LoadP(r5, FieldMemOperand(r8, FixedArray::kHeaderSize)); in Generate()
1778 __ LoadP(r8, FieldMemOperand(r5, AllocationSite::kMapOffset)); in Generate()
1779 __ CompareRoot(r8, Heap::kAllocationSiteMapRootIndex); in Generate()
1781 __ LoadRoot(r8, Heap::kUndefinedValueRootIndex); in Generate()
1782 __ isel(eq, r5, r5, r8); in Generate()
1785 __ beq(&feedback_register_initialized); in Generate()
1786 __ LoadRoot(r5, Heap::kUndefinedValueRootIndex); in Generate()
1787 __ bind(&feedback_register_initialized); in Generate()
1790 __ AssertUndefinedOrAllocationSite(r5, r8); in Generate()
1793 __ mr(r6, r4); in Generate()
1797 __ LoadP(r7, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset)); in Generate()
1798 __ LoadP(r7, FieldMemOperand(r7, SharedFunctionInfo::kConstructStubOffset)); in Generate()
1799 __ addi(ip, r7, Operand(Code::kHeaderSize - kHeapObjectTag)); in Generate()
1800 __ JumpToJSEntry(ip); in Generate()
1802 __ bind(&non_function); in Generate()
1803 __ mr(r6, r4); in Generate()
1804 __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); in Generate()
1812 __ JumpIfSmi(object_, receiver_not_string_); in GenerateFast()
1815 __ LoadP(result_, FieldMemOperand(object_, HeapObject::kMapOffset)); in GenerateFast()
1816 __ lbz(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset)); in GenerateFast()
1818 __ andi(r0, result_, Operand(kIsNotStringMask)); in GenerateFast()
1819 __ bne(receiver_not_string_, cr0); in GenerateFast()
1823 __ JumpIfNotSmi(index_, &index_not_smi_); in GenerateFast()
1824 __ bind(&got_smi_index_); in GenerateFast()
1827 __ LoadP(ip, FieldMemOperand(object_, String::kLengthOffset)); in GenerateFast()
1828 __ cmpl(ip, index_); in GenerateFast()
1829 __ ble(index_out_of_range_); in GenerateFast()
1831 __ SmiUntag(index_); in GenerateFast()
1836 __ SmiTag(result_); in GenerateFast()
1837 __ bind(&exit_); in GenerateFast()
1844 __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase); in GenerateSlow()
1847 __ bind(&index_not_smi_); in GenerateSlow()
1849 __ CheckMap(index_, result_, Heap::kHeapNumberMapRootIndex, index_not_number_, in GenerateSlow()
1853 __ Push(LoadWithVectorDescriptor::VectorRegister(), in GenerateSlow()
1857 __ Push(object_, index_); in GenerateSlow()
1859 __ CallRuntime(Runtime::kNumberToSmi); in GenerateSlow()
1862 __ Move(index_, r3); in GenerateSlow()
1864 __ Pop(LoadWithVectorDescriptor::VectorRegister(), in GenerateSlow()
1867 __ pop(object_); in GenerateSlow()
1870 __ LoadP(result_, FieldMemOperand(object_, HeapObject::kMapOffset)); in GenerateSlow()
1871 __ lbz(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset)); in GenerateSlow()
1874 __ JumpIfNotSmi(index_, index_out_of_range_); in GenerateSlow()
1876 __ b(&got_smi_index_); in GenerateSlow()
1881 __ bind(&call_runtime_); in GenerateSlow()
1883 __ SmiTag(index_); in GenerateSlow()
1884 __ Push(object_, index_); in GenerateSlow()
1885 __ CallRuntime(Runtime::kStringCharCodeAtRT); in GenerateSlow()
1886 __ Move(result_, r3); in GenerateSlow()
1888 __ b(&exit_); in GenerateSlow()
1890 __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase); in GenerateSlow()
1902 __ LoadP(length, FieldMemOperand(left, String::kLengthOffset)); in GenerateFlatOneByteStringEquals()
1903 __ LoadP(scratch2, FieldMemOperand(right, String::kLengthOffset)); in GenerateFlatOneByteStringEquals()
1904 __ cmp(length, scratch2); in GenerateFlatOneByteStringEquals()
1905 __ beq(&check_zero_length); in GenerateFlatOneByteStringEquals()
1906 __ bind(&strings_not_equal); in GenerateFlatOneByteStringEquals()
1907 __ LoadSmiLiteral(r3, Smi::FromInt(NOT_EQUAL)); in GenerateFlatOneByteStringEquals()
1908 __ Ret(); in GenerateFlatOneByteStringEquals()
1912 __ bind(&check_zero_length); in GenerateFlatOneByteStringEquals()
1914 __ cmpi(length, Operand::Zero()); in GenerateFlatOneByteStringEquals()
1915 __ bne(&compare_chars); in GenerateFlatOneByteStringEquals()
1916 __ LoadSmiLiteral(r3, Smi::FromInt(EQUAL)); in GenerateFlatOneByteStringEquals()
1917 __ Ret(); in GenerateFlatOneByteStringEquals()
1920 __ bind(&compare_chars); in GenerateFlatOneByteStringEquals()
1925 __ LoadSmiLiteral(r3, Smi::FromInt(EQUAL)); in GenerateFlatOneByteStringEquals()
1926 __ Ret(); in GenerateFlatOneByteStringEquals()
1935 __ LoadP(scratch1, FieldMemOperand(left, String::kLengthOffset)); in GenerateCompareFlatOneByteStrings()
1936 __ LoadP(scratch2, FieldMemOperand(right, String::kLengthOffset)); in GenerateCompareFlatOneByteStrings()
1937 __ sub(scratch3, scratch1, scratch2, LeaveOE, SetRC); in GenerateCompareFlatOneByteStrings()
1940 __ isel(gt, scratch1, scratch2, scratch1, cr0); in GenerateCompareFlatOneByteStrings()
1943 __ ble(&skip, cr0); in GenerateCompareFlatOneByteStrings()
1944 __ mr(scratch1, scratch2); in GenerateCompareFlatOneByteStrings()
1945 __ bind(&skip); in GenerateCompareFlatOneByteStrings()
1949 __ cmpi(min_length, Operand::Zero()); in GenerateCompareFlatOneByteStrings()
1950 __ beq(&compare_lengths); in GenerateCompareFlatOneByteStrings()
1957 __ bind(&compare_lengths); in GenerateCompareFlatOneByteStrings()
1960 __ mr(r3, length_delta); in GenerateCompareFlatOneByteStrings()
1961 __ cmpi(r3, Operand::Zero()); in GenerateCompareFlatOneByteStrings()
1962 __ bind(&result_not_equal); in GenerateCompareFlatOneByteStrings()
1966 __ LoadSmiLiteral(r4, Smi::FromInt(GREATER)); in GenerateCompareFlatOneByteStrings()
1967 __ LoadSmiLiteral(r5, Smi::FromInt(LESS)); in GenerateCompareFlatOneByteStrings()
1968 __ isel(eq, r3, r0, r4); in GenerateCompareFlatOneByteStrings()
1969 __ isel(lt, r3, r5, r3); in GenerateCompareFlatOneByteStrings()
1970 __ Ret(); in GenerateCompareFlatOneByteStrings()
1973 __ ble(&less_equal); in GenerateCompareFlatOneByteStrings()
1974 __ LoadSmiLiteral(r3, Smi::FromInt(GREATER)); in GenerateCompareFlatOneByteStrings()
1975 __ Ret(); in GenerateCompareFlatOneByteStrings()
1976 __ bind(&less_equal); in GenerateCompareFlatOneByteStrings()
1977 __ beq(&equal); in GenerateCompareFlatOneByteStrings()
1978 __ LoadSmiLiteral(r3, Smi::FromInt(LESS)); in GenerateCompareFlatOneByteStrings()
1979 __ bind(&equal); in GenerateCompareFlatOneByteStrings()
1980 __ Ret(); in GenerateCompareFlatOneByteStrings()
1991 __ SmiUntag(length); in GenerateOneByteCharsCompareLoop()
1992 __ addi(scratch1, length, in GenerateOneByteCharsCompareLoop()
1994 __ add(left, left, scratch1); in GenerateOneByteCharsCompareLoop()
1995 __ add(right, right, scratch1); in GenerateOneByteCharsCompareLoop()
1996 __ subfic(length, length, Operand::Zero()); in GenerateOneByteCharsCompareLoop()
2001 __ bind(&loop); in GenerateOneByteCharsCompareLoop()
2002 __ lbzx(scratch1, MemOperand(left, index)); in GenerateOneByteCharsCompareLoop()
2003 __ lbzx(r0, MemOperand(right, index)); in GenerateOneByteCharsCompareLoop()
2004 __ cmp(scratch1, r0); in GenerateOneByteCharsCompareLoop()
2005 __ bne(chars_not_equal); in GenerateOneByteCharsCompareLoop()
2006 __ addi(index, index, Operand(1)); in GenerateOneByteCharsCompareLoop()
2007 __ cmpi(index, Operand::Zero()); in GenerateOneByteCharsCompareLoop()
2008 __ bne(&loop); in GenerateOneByteCharsCompareLoop()
2022 __ Move(r5, isolate()->factory()->undefined_value()); in Generate()
2026 __ TestIfSmi(r5, r0); in Generate()
2027 __ Assert(ne, kExpectedAllocationSite, cr0); in Generate()
2028 __ push(r5); in Generate()
2029 __ LoadP(r5, FieldMemOperand(r5, HeapObject::kMapOffset)); in Generate()
2030 __ LoadRoot(ip, Heap::kAllocationSiteMapRootIndex); in Generate()
2031 __ cmp(r5, ip); in Generate()
2032 __ pop(r5); in Generate()
2033 __ Assert(eq, kExpectedAllocationSite); in Generate()
2039 __ TailCallStub(&stub); in Generate()
2047 __ CheckMap(r4, r5, Heap::kBooleanMapRootIndex, &miss, DO_SMI_CHECK); in GenerateBooleans()
2048 __ CheckMap(r3, r6, Heap::kBooleanMapRootIndex, &miss, DO_SMI_CHECK); in GenerateBooleans()
2050 __ LoadP(r4, FieldMemOperand(r4, Oddball::kToNumberOffset)); in GenerateBooleans()
2051 __ AssertSmi(r4); in GenerateBooleans()
2052 __ LoadP(r3, FieldMemOperand(r3, Oddball::kToNumberOffset)); in GenerateBooleans()
2053 __ AssertSmi(r3); in GenerateBooleans()
2055 __ sub(r3, r4, r3); in GenerateBooleans()
2056 __ Ret(); in GenerateBooleans()
2058 __ bind(&miss); in GenerateBooleans()
2066 __ orx(r5, r4, r3); in GenerateSmis()
2067 __ JumpIfNotSmi(r5, &miss); in GenerateSmis()
2071 // __ sub(r3, r3, r4, SetCC); in GenerateSmis()
2072 __ sub(r3, r3, r4); in GenerateSmis()
2075 __ SmiUntag(r4); in GenerateSmis()
2076 __ SmiUntag(r3); in GenerateSmis()
2077 __ sub(r3, r4, r3); in GenerateSmis()
2079 __ Ret(); in GenerateSmis()
2081 __ bind(&miss); in GenerateSmis()
2095 __ JumpIfNotSmi(r4, &miss); in GenerateNumbers()
2098 __ JumpIfNotSmi(r3, &miss); in GenerateNumbers()
2105 __ JumpIfSmi(r3, &right_smi); in GenerateNumbers()
2106 __ CheckMap(r3, r5, Heap::kHeapNumberMapRootIndex, &maybe_undefined1, in GenerateNumbers()
2108 __ lfd(d1, FieldMemOperand(r3, HeapNumber::kValueOffset)); in GenerateNumbers()
2109 __ b(&left); in GenerateNumbers()
2110 __ bind(&right_smi); in GenerateNumbers()
2111 __ SmiToDouble(d1, r3); in GenerateNumbers()
2113 __ bind(&left); in GenerateNumbers()
2114 __ JumpIfSmi(r4, &left_smi); in GenerateNumbers()
2115 __ CheckMap(r4, r5, Heap::kHeapNumberMapRootIndex, &maybe_undefined2, in GenerateNumbers()
2117 __ lfd(d0, FieldMemOperand(r4, HeapNumber::kValueOffset)); in GenerateNumbers()
2118 __ b(&done); in GenerateNumbers()
2119 __ bind(&left_smi); in GenerateNumbers()
2120 __ SmiToDouble(d0, r4); in GenerateNumbers()
2122 __ bind(&done); in GenerateNumbers()
2125 __ fcmpu(d0, d1); in GenerateNumbers()
2128 __ bunordered(&unordered); in GenerateNumbers()
2133 __ li(r4, Operand(GREATER)); in GenerateNumbers()
2134 __ li(r5, Operand(LESS)); in GenerateNumbers()
2135 __ isel(eq, r3, r0, r4); in GenerateNumbers()
2136 __ isel(lt, r3, r5, r3); in GenerateNumbers()
2137 __ Ret(); in GenerateNumbers()
2139 __ beq(&equal); in GenerateNumbers()
2140 __ blt(&less_than); in GenerateNumbers()
2142 __ li(r3, Operand(GREATER)); in GenerateNumbers()
2143 __ Ret(); in GenerateNumbers()
2144 __ bind(&equal); in GenerateNumbers()
2145 __ li(r3, Operand(EQUAL)); in GenerateNumbers()
2146 __ Ret(); in GenerateNumbers()
2147 __ bind(&less_than); in GenerateNumbers()
2148 __ li(r3, Operand(LESS)); in GenerateNumbers()
2149 __ Ret(); in GenerateNumbers()
2152 __ bind(&unordered); in GenerateNumbers()
2153 __ bind(&generic_stub); in GenerateNumbers()
2156 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); in GenerateNumbers()
2158 __ bind(&maybe_undefined1); in GenerateNumbers()
2160 __ CompareRoot(r3, Heap::kUndefinedValueRootIndex); in GenerateNumbers()
2161 __ bne(&miss); in GenerateNumbers()
2162 __ JumpIfSmi(r4, &unordered); in GenerateNumbers()
2163 __ CompareObjectType(r4, r5, r5, HEAP_NUMBER_TYPE); in GenerateNumbers()
2164 __ bne(&maybe_undefined2); in GenerateNumbers()
2165 __ b(&unordered); in GenerateNumbers()
2168 __ bind(&maybe_undefined2); in GenerateNumbers()
2170 __ CompareRoot(r4, Heap::kUndefinedValueRootIndex); in GenerateNumbers()
2171 __ beq(&unordered); in GenerateNumbers()
2174 __ bind(&miss); in GenerateNumbers()
2190 __ JumpIfEitherSmi(left, right, &miss); in GenerateInternalizedStrings()
2193 __ LoadP(tmp1, FieldMemOperand(left, HeapObject::kMapOffset)); in GenerateInternalizedStrings()
2194 __ LoadP(tmp2, FieldMemOperand(right, HeapObject::kMapOffset)); in GenerateInternalizedStrings()
2195 __ lbz(tmp1, FieldMemOperand(tmp1, Map::kInstanceTypeOffset)); in GenerateInternalizedStrings()
2196 __ lbz(tmp2, FieldMemOperand(tmp2, Map::kInstanceTypeOffset)); in GenerateInternalizedStrings()
2198 __ orx(tmp1, tmp1, tmp2); in GenerateInternalizedStrings()
2199 __ andi(r0, tmp1, Operand(kIsNotStringMask | kIsNotInternalizedMask)); in GenerateInternalizedStrings()
2200 __ bne(&miss, cr0); in GenerateInternalizedStrings()
2203 __ cmp(left, right); in GenerateInternalizedStrings()
2204 __ bne(&not_equal); in GenerateInternalizedStrings()
2210 __ LoadSmiLiteral(r3, Smi::FromInt(EQUAL)); in GenerateInternalizedStrings()
2211 __ bind(&not_equal); in GenerateInternalizedStrings()
2212 __ Ret(); in GenerateInternalizedStrings()
2214 __ bind(&miss); in GenerateInternalizedStrings()
2231 __ JumpIfEitherSmi(left, right, &miss); in GenerateUniqueNames()
2235 __ LoadP(tmp1, FieldMemOperand(left, HeapObject::kMapOffset)); in GenerateUniqueNames()
2236 __ LoadP(tmp2, FieldMemOperand(right, HeapObject::kMapOffset)); in GenerateUniqueNames()
2237 __ lbz(tmp1, FieldMemOperand(tmp1, Map::kInstanceTypeOffset)); in GenerateUniqueNames()
2238 __ lbz(tmp2, FieldMemOperand(tmp2, Map::kInstanceTypeOffset)); in GenerateUniqueNames()
2240 __ JumpIfNotUniqueNameInstanceType(tmp1, &miss); in GenerateUniqueNames()
2241 __ JumpIfNotUniqueNameInstanceType(tmp2, &miss); in GenerateUniqueNames()
2244 __ cmp(left, right); in GenerateUniqueNames()
2245 __ bne(&miss); in GenerateUniqueNames()
2251 __ LoadSmiLiteral(r3, Smi::FromInt(EQUAL)); in GenerateUniqueNames()
2252 __ Ret(); in GenerateUniqueNames()
2254 __ bind(&miss); in GenerateUniqueNames()
2274 __ JumpIfEitherSmi(left, right, &miss); in GenerateStrings()
2278 __ LoadP(tmp1, FieldMemOperand(left, HeapObject::kMapOffset)); in GenerateStrings()
2279 __ LoadP(tmp2, FieldMemOperand(right, HeapObject::kMapOffset)); in GenerateStrings()
2280 __ lbz(tmp1, FieldMemOperand(tmp1, Map::kInstanceTypeOffset)); in GenerateStrings()
2281 __ lbz(tmp2, FieldMemOperand(tmp2, Map::kInstanceTypeOffset)); in GenerateStrings()
2283 __ orx(tmp3, tmp1, tmp2); in GenerateStrings()
2284 __ andi(r0, tmp3, Operand(kIsNotStringMask)); in GenerateStrings()
2285 __ bne(&miss, cr0); in GenerateStrings()
2288 __ cmp(left, right); in GenerateStrings()
2291 __ bne(&not_identical); in GenerateStrings()
2292 __ LoadSmiLiteral(r3, Smi::FromInt(EQUAL)); in GenerateStrings()
2293 __ Ret(); in GenerateStrings()
2294 __ bind(&not_identical); in GenerateStrings()
2304 __ orx(tmp3, tmp1, tmp2); in GenerateStrings()
2305 __ andi(r0, tmp3, Operand(kIsNotInternalizedMask)); in GenerateStrings()
2309 __ Ret(eq, cr0); in GenerateStrings()
2314 __ JumpIfBothInstanceTypesAreNotSequentialOneByte(tmp1, tmp2, tmp3, tmp4, in GenerateStrings()
2327 __ bind(&runtime); in GenerateStrings()
2331 __ Push(left, right); in GenerateStrings()
2332 __ CallRuntime(Runtime::kStringEqual); in GenerateStrings()
2334 __ LoadRoot(r4, Heap::kTrueValueRootIndex); in GenerateStrings()
2335 __ sub(r3, r3, r4); in GenerateStrings()
2336 __ Ret(); in GenerateStrings()
2338 __ Push(left, right); in GenerateStrings()
2339 __ TailCallRuntime(Runtime::kStringCompare); in GenerateStrings()
2342 __ bind(&miss); in GenerateStrings()
2350 __ and_(r5, r4, r3); in GenerateReceivers()
2351 __ JumpIfSmi(r5, &miss); in GenerateReceivers()
2354 __ CompareObjectType(r3, r5, r5, FIRST_JS_RECEIVER_TYPE); in GenerateReceivers()
2355 __ blt(&miss); in GenerateReceivers()
2356 __ CompareObjectType(r4, r5, r5, FIRST_JS_RECEIVER_TYPE); in GenerateReceivers()
2357 __ blt(&miss); in GenerateReceivers()
2360 __ sub(r3, r3, r4); in GenerateReceivers()
2361 __ Ret(); in GenerateReceivers()
2363 __ bind(&miss); in GenerateReceivers()
2371 __ and_(r5, r4, r3); in GenerateKnownReceivers()
2372 __ JumpIfSmi(r5, &miss); in GenerateKnownReceivers()
2373 __ GetWeakValue(r7, cell); in GenerateKnownReceivers()
2374 __ LoadP(r5, FieldMemOperand(r3, HeapObject::kMapOffset)); in GenerateKnownReceivers()
2375 __ LoadP(r6, FieldMemOperand(r4, HeapObject::kMapOffset)); in GenerateKnownReceivers()
2376 __ cmp(r5, r7); in GenerateKnownReceivers()
2377 __ bne(&miss); in GenerateKnownReceivers()
2378 __ cmp(r6, r7); in GenerateKnownReceivers()
2379 __ bne(&miss); in GenerateKnownReceivers()
2382 __ sub(r3, r3, r4); in GenerateKnownReceivers()
2383 __ Ret(); in GenerateKnownReceivers()
2386 __ LoadSmiLiteral(r5, Smi::FromInt(GREATER)); in GenerateKnownReceivers()
2388 __ LoadSmiLiteral(r5, Smi::FromInt(LESS)); in GenerateKnownReceivers()
2390 __ Push(r4, r3, r5); in GenerateKnownReceivers()
2391 __ TailCallRuntime(Runtime::kCompare); in GenerateKnownReceivers()
2394 __ bind(&miss); in GenerateKnownReceivers()
2403 __ Push(r4, r3); in GenerateMiss()
2404 __ Push(r4, r3); in GenerateMiss()
2405 __ LoadSmiLiteral(r0, Smi::FromInt(op())); in GenerateMiss()
2406 __ push(r0); in GenerateMiss()
2407 __ CallRuntime(Runtime::kCompareIC_Miss); in GenerateMiss()
2409 __ addi(r5, r3, Operand(Code::kHeaderSize - kHeapObjectTag)); in GenerateMiss()
2411 __ Pop(r4, r3); in GenerateMiss()
2414 __ JumpToJSEntry(r5); in GenerateMiss()
2422 __ mflr(r0); in Generate()
2423 __ StoreP(r0, MemOperand(sp, kStackFrameExtraParamSlot * kPointerSize)); in Generate()
2424 __ Call(ip); // Call the C++ function. in Generate()
2425 __ LoadP(r0, MemOperand(sp, kStackFrameExtraParamSlot * kPointerSize)); in Generate()
2426 __ mtlr(r0); in Generate()
2427 __ blr(); in Generate()
2434 __ LoadP(ToRegister(ABI_TOC_REGISTER), MemOperand(target, kPointerSize)); in GenerateCall()
2435 __ LoadP(ip, MemOperand(target, 0)); // Instruction address in GenerateCall()
2439 __ Move(ip, target); in GenerateCall()
2443 __ mov(r0, Operand(code, RelocInfo::CODE_TARGET)); in GenerateCall()
2444 __ Call(r0); // Call the stub. in GenerateCall()
2462 __ LoadP(index, FieldMemOperand(properties, kCapacityOffset)); in GenerateNegativeLookup()
2463 __ subi(index, index, Operand(1)); in GenerateNegativeLookup()
2464 __ LoadSmiLiteral( in GenerateNegativeLookup()
2466 __ and_(index, index, ip); in GenerateNegativeLookup()
2470 __ ShiftLeftImm(ip, index, Operand(1)); in GenerateNegativeLookup()
2471 __ add(index, index, ip); // index *= 3. in GenerateNegativeLookup()
2476 __ SmiToPtrArrayOffset(ip, index); in GenerateNegativeLookup()
2477 __ add(tmp, properties, ip); in GenerateNegativeLookup()
2478 __ LoadP(entity_name, FieldMemOperand(tmp, kElementsStartOffset)); in GenerateNegativeLookup()
2481 __ LoadRoot(tmp, Heap::kUndefinedValueRootIndex); in GenerateNegativeLookup()
2482 __ cmp(entity_name, tmp); in GenerateNegativeLookup()
2483 __ beq(done); in GenerateNegativeLookup()
2486 __ LoadRoot(tmp, Heap::kTheHoleValueRootIndex); in GenerateNegativeLookup()
2489 __ Cmpi(entity_name, Operand(Handle<Name>(name)), r0); in GenerateNegativeLookup()
2490 __ beq(miss); in GenerateNegativeLookup()
2493 __ cmp(entity_name, tmp); in GenerateNegativeLookup()
2494 __ beq(&good); in GenerateNegativeLookup()
2497 __ LoadP(entity_name, FieldMemOperand(entity_name, HeapObject::kMapOffset)); in GenerateNegativeLookup()
2498 __ lbz(entity_name, FieldMemOperand(entity_name, Map::kInstanceTypeOffset)); in GenerateNegativeLookup()
2499 __ JumpIfNotUniqueNameInstanceType(entity_name, miss); in GenerateNegativeLookup()
2500 __ bind(&good); in GenerateNegativeLookup()
2503 __ LoadP(properties, in GenerateNegativeLookup()
2510 __ mflr(r0); in GenerateNegativeLookup()
2511 __ MultiPush(spill_mask); in GenerateNegativeLookup()
2513 __ LoadP(r3, FieldMemOperand(receiver, JSObject::kPropertiesOffset)); in GenerateNegativeLookup()
2514 __ mov(r4, Operand(Handle<Name>(name))); in GenerateNegativeLookup()
2516 __ CallStub(&stub); in GenerateNegativeLookup()
2517 __ cmpi(r3, Operand::Zero()); in GenerateNegativeLookup()
2519 __ MultiPop(spill_mask); // MultiPop does not touch condition flags in GenerateNegativeLookup()
2520 __ mtlr(r0); in GenerateNegativeLookup()
2522 __ beq(done); in GenerateNegativeLookup()
2523 __ bne(miss); in GenerateNegativeLookup()
2550 __ LoadP(mask, FieldMemOperand(dictionary, kCapacityOffset)); in Generate()
2551 __ SmiUntag(mask); in Generate()
2552 __ subi(mask, mask, Operand(1)); in Generate()
2554 __ lwz(hash, FieldMemOperand(key, Name::kHashFieldOffset)); in Generate()
2556 __ LoadRoot(undefined, Heap::kUndefinedValueRootIndex); in Generate()
2567 __ addi(index, hash, in Generate()
2570 __ mr(index, hash); in Generate()
2572 __ srwi(r0, index, Operand(Name::kHashShift)); in Generate()
2573 __ and_(index, mask, r0); in Generate()
2577 __ ShiftLeftImm(scratch, index, Operand(1)); in Generate()
2578 __ add(index, index, scratch); // index *= 3. in Generate()
2580 __ ShiftLeftImm(scratch, index, Operand(kPointerSizeLog2)); in Generate()
2581 __ add(index, dictionary, scratch); in Generate()
2582 __ LoadP(entry_key, FieldMemOperand(index, kElementsStartOffset)); in Generate()
2585 __ cmp(entry_key, undefined); in Generate()
2586 __ beq(&not_in_dictionary); in Generate()
2589 __ cmp(entry_key, key); in Generate()
2590 __ beq(&in_dictionary); in Generate()
2594 __ LoadP(entry_key, FieldMemOperand(entry_key, HeapObject::kMapOffset)); in Generate()
2595 __ lbz(entry_key, FieldMemOperand(entry_key, Map::kInstanceTypeOffset)); in Generate()
2596 __ JumpIfNotUniqueNameInstanceType(entry_key, &maybe_in_dictionary); in Generate()
2600 __ bind(&maybe_in_dictionary); in Generate()
2605 __ li(result, Operand::Zero()); in Generate()
2606 __ Ret(); in Generate()
2609 __ bind(&in_dictionary); in Generate()
2610 __ li(result, Operand(1)); in Generate()
2611 __ Ret(); in Generate()
2613 __ bind(&not_in_dictionary); in Generate()
2614 __ li(result, Operand::Zero()); in Generate()
2615 __ Ret(); in Generate()
2644 __ crclr(Assembler::encode_crbit(cr2, CR_LT)); in Generate()
2645 __ blt(&skip_to_incremental_noncompacting, cr2); in Generate()
2646 __ blt(&skip_to_incremental_compacting, cr2); in Generate()
2649 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(), in Generate()
2652 __ Ret(); in Generate()
2654 __ bind(&skip_to_incremental_noncompacting); in Generate()
2657 __ bind(&skip_to_incremental_compacting); in Generate()
2672 __ LoadP(regs_.scratch0(), MemOperand(regs_.address(), 0)); in GenerateIncremental()
2673 __ JumpIfNotInNewSpace(regs_.scratch0(), // Value. in GenerateIncremental()
2676 __ JumpIfInNewSpace(regs_.object(), regs_.scratch0(), in GenerateIncremental()
2685 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(), in GenerateIncremental()
2688 __ bind(&dont_need_remembered_set); in GenerateIncremental()
2695 __ Ret(); in GenerateIncremental()
2702 __ PrepareCallCFunction(argument_count, regs_.scratch0()); in InformIncrementalMarker()
2707 __ mr(address, regs_.address()); in InformIncrementalMarker()
2708 __ mr(r3, regs_.object()); in InformIncrementalMarker()
2709 __ mr(r4, address); in InformIncrementalMarker()
2710 __ mov(r5, Operand(ExternalReference::isolate_address(isolate()))); in InformIncrementalMarker()
2713 __ CallCFunction( in InformIncrementalMarker()
2729 __ JumpIfBlack(regs_.object(), regs_.scratch0(), regs_.scratch1(), &on_black); in CheckNeedsToInformIncrementalMarker()
2733 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(), in CheckNeedsToInformIncrementalMarker()
2736 __ Ret(); in CheckNeedsToInformIncrementalMarker()
2739 __ bind(&on_black); in CheckNeedsToInformIncrementalMarker()
2742 __ LoadP(regs_.scratch0(), MemOperand(regs_.address(), 0)); in CheckNeedsToInformIncrementalMarker()
2747 __ CheckPageFlag(regs_.scratch0(), // Contains value. in CheckNeedsToInformIncrementalMarker()
2752 __ CheckPageFlag(regs_.object(), in CheckNeedsToInformIncrementalMarker()
2757 __ bind(&ensure_not_white); in CheckNeedsToInformIncrementalMarker()
2762 __ Push(regs_.object(), regs_.address()); in CheckNeedsToInformIncrementalMarker()
2763 __ JumpIfWhite(regs_.scratch0(), // The value. in CheckNeedsToInformIncrementalMarker()
2768 __ Pop(regs_.object(), regs_.address()); in CheckNeedsToInformIncrementalMarker()
2772 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(), in CheckNeedsToInformIncrementalMarker()
2775 __ Ret(); in CheckNeedsToInformIncrementalMarker()
2778 __ bind(&need_incremental_pop_scratch); in CheckNeedsToInformIncrementalMarker()
2779 __ Pop(regs_.object(), regs_.address()); in CheckNeedsToInformIncrementalMarker()
2781 __ bind(&need_incremental); in CheckNeedsToInformIncrementalMarker()
2789 __ Call(ces.GetCode(), RelocInfo::CODE_TARGET); in Generate()
2792 __ LoadP(r4, MemOperand(fp, parameter_count_offset)); in Generate()
2794 __ addi(r4, r4, Operand(1)); in Generate()
2797 __ slwi(r4, r4, Operand(kPointerSizeLog2)); in Generate()
2798 __ add(sp, sp, r4); in Generate()
2799 __ Ret(); in Generate()
2811 __ mflr(r0); in MaybeCallEntryHook()
2812 __ Push(r0, ip); in MaybeCallEntryHook()
2813 __ CallStub(&stub); in MaybeCallEntryHook()
2814 __ Pop(r0, ip); in MaybeCallEntryHook()
2815 __ mtlr(r0); in MaybeCallEntryHook()
2833 __ mflr(ip); in Generate()
2834 __ MultiPush(kSavedRegs | ip.bit()); in Generate()
2837 __ subi(r3, ip, Operand(kReturnAddressDistanceFromFunctionStart)); in Generate()
2841 __ addi(r4, sp, Operand((kNumSavedRegs + 1) * kPointerSize)); in Generate()
2846 __ mr(r15, sp); in Generate()
2848 __ ClearRightImm(sp, sp, Operand(WhichPowerOf2(frame_alignment))); in Generate()
2862 __ mov(r5, Operand(ExternalReference::isolate_address(isolate()))); in Generate()
2865 __ mov(ip, Operand(entry_hook)); in Generate()
2868 __ LoadP(ToRegister(ABI_TOC_REGISTER), MemOperand(ip, kPointerSize)); in Generate()
2869 __ LoadP(ip, MemOperand(ip, 0)); in Generate()
2874 __ li(r0, Operand::Zero()); in Generate()
2875 __ StorePU(r0, MemOperand(sp, -kNumRequiredStackFrameSlots * kPointerSize)); in Generate()
2877 __ Call(ip); in Generate()
2879 __ addi(sp, sp, Operand(kNumRequiredStackFrameSlots * kPointerSize)); in Generate()
2883 __ mr(sp, r15); in Generate()
2887 __ MultiPop(kSavedRegs | ip.bit()); in Generate()
2888 __ mtlr(ip); in Generate()
2889 __ Ret(); in Generate()
2898 __ TailCallStub(&stub); in CreateArrayDispatch()
2904 __ Cmpi(r6, Operand(kind), r0); in CreateArrayDispatch()
2906 __ TailCallStub(&stub, eq); in CreateArrayDispatch()
2910 __ Abort(kUnexpectedElementsKindInArrayConstructor); in CreateArrayDispatch()
2934 __ andi(r0, r6, Operand(1)); in CreateArrayDispatchOneArgument()
2935 __ bne(&normal_sequence, cr0); in CreateArrayDispatchOneArgument()
2939 __ LoadP(r8, MemOperand(sp, 0)); in CreateArrayDispatchOneArgument()
2940 __ cmpi(r8, Operand::Zero()); in CreateArrayDispatchOneArgument()
2941 __ beq(&normal_sequence); in CreateArrayDispatchOneArgument()
2949 __ TailCallStub(&stub_holey); in CreateArrayDispatchOneArgument()
2951 __ bind(&normal_sequence); in CreateArrayDispatchOneArgument()
2954 __ TailCallStub(&stub); in CreateArrayDispatchOneArgument()
2958 __ addi(r6, r6, Operand(1)); in CreateArrayDispatchOneArgument()
2961 __ LoadP(r8, FieldMemOperand(r5, 0)); in CreateArrayDispatchOneArgument()
2962 __ CompareRoot(r8, Heap::kAllocationSiteMapRootIndex); in CreateArrayDispatchOneArgument()
2963 __ Assert(eq, kExpectedAllocationSite); in CreateArrayDispatchOneArgument()
2970 __ LoadP(r7, FieldMemOperand(r5, AllocationSite::kTransitionInfoOffset)); in CreateArrayDispatchOneArgument()
2971 __ AddSmiLiteral(r7, r7, Smi::FromInt(kFastElementsKindPackedToHoley), r0); in CreateArrayDispatchOneArgument()
2972 __ StoreP(r7, FieldMemOperand(r5, AllocationSite::kTransitionInfoOffset), in CreateArrayDispatchOneArgument()
2975 __ bind(&normal_sequence); in CreateArrayDispatchOneArgument()
2980 __ mov(r0, Operand(kind)); in CreateArrayDispatchOneArgument()
2981 __ cmp(r6, r0); in CreateArrayDispatchOneArgument()
2983 __ TailCallStub(&stub, eq); in CreateArrayDispatchOneArgument()
2987 __ Abort(kUnexpectedElementsKindInArrayConstructor); in CreateArrayDispatchOneArgument()
3029 __ cmpi(r3, Operand::Zero()); in GenerateDispatchToArrayStub()
3030 __ bne(&not_zero_case); in GenerateDispatchToArrayStub()
3033 __ bind(&not_zero_case); in GenerateDispatchToArrayStub()
3034 __ cmpi(r3, Operand(1)); in GenerateDispatchToArrayStub()
3035 __ bgt(&not_one_case); in GenerateDispatchToArrayStub()
3038 __ bind(&not_one_case); in GenerateDispatchToArrayStub()
3040 __ TailCallStub(&stub); in GenerateDispatchToArrayStub()
3059 __ LoadP(r7, FieldMemOperand(r4, JSFunction::kPrototypeOrInitialMapOffset)); in Generate()
3061 __ TestIfSmi(r7, r0); in Generate()
3062 __ Assert(ne, kUnexpectedInitialMapForArrayFunction, cr0); in Generate()
3063 __ CompareObjectType(r7, r7, r8, MAP_TYPE); in Generate()
3064 __ Assert(eq, kUnexpectedInitialMapForArrayFunction); in Generate()
3067 __ AssertUndefinedOrAllocationSite(r5, r7); in Generate()
3071 __ LoadP(cp, FieldMemOperand(r4, JSFunction::kContextOffset)); in Generate()
3074 __ cmp(r6, r4); in Generate()
3075 __ bne(&subclassing); in Generate()
3079 __ CompareRoot(r5, Heap::kUndefinedValueRootIndex); in Generate()
3080 __ beq(&no_info); in Generate()
3082 __ LoadP(r6, FieldMemOperand(r5, AllocationSite::kTransitionInfoOffset)); in Generate()
3083 __ SmiUntag(r6); in Generate()
3085 __ And(r6, r6, Operand(AllocationSite::ElementsKindBits::kMask)); in Generate()
3088 __ bind(&no_info); in Generate()
3091 __ bind(&subclassing); in Generate()
3092 __ ShiftLeftImm(r0, r3, Operand(kPointerSizeLog2)); in Generate()
3093 __ StorePX(r4, MemOperand(sp, r0)); in Generate()
3094 __ addi(r3, r3, Operand(3)); in Generate()
3095 __ Push(r6, r5); in Generate()
3096 __ JumpToExternalReference(ExternalReference(Runtime::kNewArray, isolate())); in Generate()
3102 __ cmpli(r3, Operand(1)); in GenerateCase()
3105 __ TailCallStub(&stub0, lt); in GenerateCase()
3108 __ TailCallStub(&stubN, gt); in GenerateCase()
3113 __ LoadP(r6, MemOperand(sp, 0)); in GenerateCase()
3114 __ cmpi(r6, Operand::Zero()); in GenerateCase()
3118 __ TailCallStub(&stub1_holey, ne); in GenerateCase()
3122 __ TailCallStub(&stub1); in GenerateCase()
3139 __ LoadP(r6, FieldMemOperand(r4, JSFunction::kPrototypeOrInitialMapOffset)); in Generate()
3141 __ TestIfSmi(r6, r0); in Generate()
3142 __ Assert(ne, kUnexpectedInitialMapForArrayFunction, cr0); in Generate()
3143 __ CompareObjectType(r6, r6, r7, MAP_TYPE); in Generate()
3144 __ Assert(eq, kUnexpectedInitialMapForArrayFunction); in Generate()
3148 __ LoadP(r6, FieldMemOperand(r4, JSFunction::kPrototypeOrInitialMapOffset)); in Generate()
3150 __ lbz(r6, FieldMemOperand(r6, Map::kBitField2Offset)); in Generate()
3152 __ DecodeField<Map::ElementsKindBits>(r6); in Generate()
3156 __ cmpi(r6, Operand(FAST_ELEMENTS)); in Generate()
3157 __ beq(&done); in Generate()
3158 __ cmpi(r6, Operand(FAST_HOLEY_ELEMENTS)); in Generate()
3159 __ Assert(eq, kInvalidElementsKindForInternalArrayOrInternalPackedArray); in Generate()
3160 __ bind(&done); in Generate()
3164 __ cmpi(r6, Operand(FAST_ELEMENTS)); in Generate()
3165 __ beq(&fast_elements_case); in Generate()
3168 __ bind(&fast_elements_case); in Generate()
3201 __ mov(scratch, Operand(ExternalReference::is_profiling_address(isolate))); in CallApiFunctionAndReturn()
3202 __ lbz(scratch, MemOperand(scratch, 0)); in CallApiFunctionAndReturn()
3203 __ cmpi(scratch, Operand::Zero()); in CallApiFunctionAndReturn()
3206 __ mov(scratch, Operand(thunk_ref)); in CallApiFunctionAndReturn()
3207 __ isel(eq, scratch, function_address, scratch); in CallApiFunctionAndReturn()
3211 __ beq(&profiler_disabled); in CallApiFunctionAndReturn()
3212 __ mov(scratch, Operand(thunk_ref)); in CallApiFunctionAndReturn()
3213 __ b(&end_profiler_check); in CallApiFunctionAndReturn()
3214 __ bind(&profiler_disabled); in CallApiFunctionAndReturn()
3215 __ mr(scratch, function_address); in CallApiFunctionAndReturn()
3216 __ bind(&end_profiler_check); in CallApiFunctionAndReturn()
3224 __ mov(r17, Operand(next_address)); in CallApiFunctionAndReturn()
3225 __ LoadP(r14, MemOperand(r17, kNextOffset)); in CallApiFunctionAndReturn()
3226 __ LoadP(r15, MemOperand(r17, kLimitOffset)); in CallApiFunctionAndReturn()
3227 __ lwz(r16, MemOperand(r17, kLevelOffset)); in CallApiFunctionAndReturn()
3228 __ addi(r16, r16, Operand(1)); in CallApiFunctionAndReturn()
3229 __ stw(r16, MemOperand(r17, kLevelOffset)); in CallApiFunctionAndReturn()
3233 __ PushSafepointRegisters(); in CallApiFunctionAndReturn()
3234 __ PrepareCallCFunction(1, r3); in CallApiFunctionAndReturn()
3235 __ mov(r3, Operand(ExternalReference::isolate_address(isolate))); in CallApiFunctionAndReturn()
3236 __ CallCFunction(ExternalReference::log_enter_external_function(isolate), in CallApiFunctionAndReturn()
3238 __ PopSafepointRegisters(); in CallApiFunctionAndReturn()
3249 __ PushSafepointRegisters(); in CallApiFunctionAndReturn()
3250 __ PrepareCallCFunction(1, r3); in CallApiFunctionAndReturn()
3251 __ mov(r3, Operand(ExternalReference::isolate_address(isolate))); in CallApiFunctionAndReturn()
3252 __ CallCFunction(ExternalReference::log_leave_external_function(isolate), in CallApiFunctionAndReturn()
3254 __ PopSafepointRegisters(); in CallApiFunctionAndReturn()
3263 __ LoadP(r3, return_value_operand); in CallApiFunctionAndReturn()
3264 __ bind(&return_value_loaded); in CallApiFunctionAndReturn()
3267 __ StoreP(r14, MemOperand(r17, kNextOffset)); in CallApiFunctionAndReturn()
3268 if (__ emit_debug_code()) { in CallApiFunctionAndReturn()
3269 __ lwz(r4, MemOperand(r17, kLevelOffset)); in CallApiFunctionAndReturn()
3270 __ cmp(r4, r16); in CallApiFunctionAndReturn()
3271 __ Check(eq, kUnexpectedLevelAfterReturnFromApiCall); in CallApiFunctionAndReturn()
3273 __ subi(r16, r16, Operand(1)); in CallApiFunctionAndReturn()
3274 __ stw(r16, MemOperand(r17, kLevelOffset)); in CallApiFunctionAndReturn()
3275 __ LoadP(r0, MemOperand(r17, kLimitOffset)); in CallApiFunctionAndReturn()
3276 __ cmp(r15, r0); in CallApiFunctionAndReturn()
3277 __ bne(&delete_allocated_handles); in CallApiFunctionAndReturn()
3280 __ bind(&leave_exit_frame); in CallApiFunctionAndReturn()
3283 __ LoadP(cp, *context_restore_operand); in CallApiFunctionAndReturn()
3287 __ lwz(r14, *stack_space_operand); in CallApiFunctionAndReturn()
3289 __ mov(r14, Operand(stack_space)); in CallApiFunctionAndReturn()
3291 __ LeaveExitFrame(false, r14, !restore_context, stack_space_operand != NULL); in CallApiFunctionAndReturn()
3294 __ LoadRoot(r14, Heap::kTheHoleValueRootIndex); in CallApiFunctionAndReturn()
3295 __ mov(r15, Operand(ExternalReference::scheduled_exception_address(isolate))); in CallApiFunctionAndReturn()
3296 __ LoadP(r15, MemOperand(r15)); in CallApiFunctionAndReturn()
3297 __ cmp(r14, r15); in CallApiFunctionAndReturn()
3298 __ bne(&promote_scheduled_exception); in CallApiFunctionAndReturn()
3300 __ blr(); in CallApiFunctionAndReturn()
3303 __ bind(&promote_scheduled_exception); in CallApiFunctionAndReturn()
3304 __ TailCallRuntime(Runtime::kPromoteScheduledException); in CallApiFunctionAndReturn()
3307 __ bind(&delete_allocated_handles); in CallApiFunctionAndReturn()
3308 __ StoreP(r15, MemOperand(r17, kLimitOffset)); in CallApiFunctionAndReturn()
3309 __ mr(r14, r3); in CallApiFunctionAndReturn()
3310 __ PrepareCallCFunction(1, r15); in CallApiFunctionAndReturn()
3311 __ mov(r3, Operand(ExternalReference::isolate_address(isolate))); in CallApiFunctionAndReturn()
3312 __ CallCFunction(ExternalReference::delete_handle_scope_extensions(isolate), in CallApiFunctionAndReturn()
3314 __ mr(r3, r14); in CallApiFunctionAndReturn()
3315 __ b(&leave_exit_frame); in CallApiFunctionAndReturn()
3351 __ PushRoot(Heap::kUndefinedValueRootIndex); in Generate()
3354 __ push(context); in Generate()
3357 __ LoadP(context, FieldMemOperand(callee, JSFunction::kContextOffset)); in Generate()
3361 __ push(callee); in Generate()
3364 __ push(call_data); in Generate()
3368 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex); in Generate()
3371 __ push(scratch); in Generate()
3373 __ push(scratch); in Generate()
3375 __ mov(scratch, Operand(ExternalReference::isolate_address(masm->isolate()))); in Generate()
3376 __ push(scratch); in Generate()
3378 __ push(holder); in Generate()
3381 __ mr(scratch, sp); in Generate()
3395 __ EnterExitFrame(false, kApiStackSpace); in Generate()
3400 __ addi(r3, sp, Operand(kFunctionCallbackInfoOffset)); in Generate()
3402 __ StoreP(scratch, MemOperand(r3, 0 * kPointerSize)); in Generate()
3404 __ addi(ip, scratch, Operand((FCA::kArgsLength - 1 + argc()) * kPointerSize)); in Generate()
3405 __ StoreP(ip, MemOperand(r3, 1 * kPointerSize)); in Generate()
3407 __ li(ip, Operand(argc())); in Generate()
3408 __ stw(ip, MemOperand(r3, 2 * kPointerSize)); in Generate()
3459 __ push(receiver); in Generate()
3461 __ LoadP(scratch, FieldMemOperand(callback, AccessorInfo::kDataOffset)); in Generate()
3462 __ push(scratch); in Generate()
3463 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex); in Generate()
3464 __ Push(scratch, scratch); in Generate()
3465 __ mov(scratch, Operand(ExternalReference::isolate_address(isolate()))); in Generate()
3466 __ Push(scratch, holder); in Generate()
3467 __ Push(Smi::kZero); // should_throw_on_error -> false in Generate()
3468 __ LoadP(scratch, FieldMemOperand(callback, AccessorInfo::kNameOffset)); in Generate()
3469 __ push(scratch); in Generate()
3475 __ mr(r3, sp); // r3 = Handle<Name> in Generate()
3476 __ addi(r4, r3, Operand(1 * kPointerSize)); // r4 = v8::PCI::args_ in Generate()
3500 __ EnterExitFrame(false, apiStackSpace); in Generate()
3504 __ StoreP(r3, MemOperand(sp, arg0Slot * kPointerSize)); in Generate()
3505 __ addi(r3, sp, Operand(arg0Slot * kPointerSize)); in Generate()
3510 __ StoreP(r4, MemOperand(sp, accessorInfoSlot * kPointerSize)); in Generate()
3511 __ addi(r4, sp, Operand(accessorInfoSlot * kPointerSize)); in Generate()
3517 __ LoadP(scratch, FieldMemOperand(callback, AccessorInfo::kJsGetterOffset)); in Generate()
3518 __ LoadP(api_function_address, in Generate()
3528 #undef __