• Home
  • Raw
  • Download

Lines Matching full:__

25 #define __ ACCESS_MASM(masm)  macro
28 __ lsl(r5, r0, Operand(kPointerSizeLog2)); in Generate()
29 __ str(r1, MemOperand(sp, r5)); in Generate()
30 __ Push(r1); in Generate()
31 __ Push(r2); in Generate()
32 __ add(r0, r0, Operand(3)); in Generate()
33 __ TailCallRuntime(Runtime::kNewArray); in Generate()
63 __ push(descriptor.GetRegisterParameter(i)); in GenerateLightweightMiss()
65 __ CallExternalReference(miss, param_count); in GenerateLightweightMiss()
68 __ Ret(); in GenerateLightweightMiss()
89 __ Push(scratch_high, scratch_low, scratch); in Generate()
93 __ vldr(double_scratch, MemOperand(input_reg, double_offset)); in Generate()
94 __ vmov(scratch_low, scratch_high, double_scratch); in Generate()
97 __ vcvt_s32_f64(double_scratch.low(), double_scratch); in Generate()
98 __ vmov(result_reg, double_scratch.low()); in Generate()
101 __ sub(scratch, result_reg, Operand(1)); in Generate()
102 __ cmp(scratch, Operand(0x7ffffffe)); in Generate()
103 __ b(lt, &done); in Generate()
108 __ ldm(ia, input_reg, scratch_low.bit() | scratch_high.bit()); in Generate()
110 __ ldr(scratch_low, MemOperand(input_reg, double_offset)); in Generate()
111 __ ldr(scratch_high, MemOperand(input_reg, double_offset + kIntSize)); in Generate()
115 __ Ubfx(scratch, scratch_high, in Generate()
120 __ sub(scratch, scratch, Operand(HeapNumber::kExponentBias + 1)); in Generate()
125 __ cmp(scratch, Operand(83)); in Generate()
126 __ b(ge, &out_of_range); in Generate()
133 __ rsb(scratch, scratch, Operand(51), SetCC); in Generate()
134 __ b(ls, &only_low); in Generate()
137 __ mov(scratch_low, Operand(scratch_low, LSR, scratch)); in Generate()
141 __ rsb(scratch, scratch, Operand(32)); in Generate()
142 __ Ubfx(result_reg, scratch_high, in Generate()
145 __ orr(result_reg, result_reg, in Generate()
147 __ orr(result_reg, scratch_low, Operand(result_reg, LSL, scratch)); in Generate()
148 __ b(&negate); in Generate()
150 __ bind(&out_of_range); in Generate()
151 __ mov(result_reg, Operand::Zero()); in Generate()
152 __ b(&done); in Generate()
154 __ bind(&only_low); in Generate()
157 __ rsb(scratch, scratch, Operand::Zero()); in Generate()
158 __ mov(result_reg, Operand(scratch_low, LSL, scratch)); in Generate()
160 __ bind(&negate); in Generate()
167 __ eor(result_reg, result_reg, Operand(scratch_high, ASR, 31)); in Generate()
168 __ add(result_reg, result_reg, Operand(scratch_high, LSR, 31)); in Generate()
170 __ bind(&done); in Generate()
172 __ Pop(scratch_high, scratch_low, scratch); in Generate()
173 __ Ret(); in Generate()
184 __ cmp(r0, r1); in EmitIdenticalObjectComparison()
185 __ b(ne, &not_identical); in EmitIdenticalObjectComparison()
193 __ CompareObjectType(r0, r4, r4, FIRST_JS_RECEIVER_TYPE); in EmitIdenticalObjectComparison()
194 __ b(ge, slow); in EmitIdenticalObjectComparison()
196 __ cmp(r4, Operand(SYMBOL_TYPE)); in EmitIdenticalObjectComparison()
197 __ b(eq, slow); in EmitIdenticalObjectComparison()
199 __ CompareObjectType(r0, r4, r4, HEAP_NUMBER_TYPE); in EmitIdenticalObjectComparison()
200 __ b(eq, &heap_number); in EmitIdenticalObjectComparison()
203 __ cmp(r4, Operand(FIRST_JS_RECEIVER_TYPE)); in EmitIdenticalObjectComparison()
204 __ b(ge, slow); in EmitIdenticalObjectComparison()
206 __ cmp(r4, Operand(SYMBOL_TYPE)); in EmitIdenticalObjectComparison()
207 __ b(eq, slow); in EmitIdenticalObjectComparison()
212 __ cmp(r4, Operand(ODDBALL_TYPE)); in EmitIdenticalObjectComparison()
213 __ b(ne, &return_equal); in EmitIdenticalObjectComparison()
214 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex); in EmitIdenticalObjectComparison()
215 __ cmp(r0, r2); in EmitIdenticalObjectComparison()
216 __ b(ne, &return_equal); in EmitIdenticalObjectComparison()
219 __ mov(r0, Operand(GREATER)); in EmitIdenticalObjectComparison()
222 __ mov(r0, Operand(LESS)); in EmitIdenticalObjectComparison()
224 __ Ret(); in EmitIdenticalObjectComparison()
229 __ bind(&return_equal); in EmitIdenticalObjectComparison()
231 __ mov(r0, Operand(GREATER)); // Things aren't less than themselves. in EmitIdenticalObjectComparison()
233 __ mov(r0, Operand(LESS)); // Things aren't greater than themselves. in EmitIdenticalObjectComparison()
235 __ mov(r0, Operand(EQUAL)); // Things are <=, >=, ==, === themselves. in EmitIdenticalObjectComparison()
237 __ Ret(); in EmitIdenticalObjectComparison()
243 __ bind(&heap_number); in EmitIdenticalObjectComparison()
250 __ ldr(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset)); in EmitIdenticalObjectComparison()
252 __ Sbfx(r3, r2, HeapNumber::kExponentShift, HeapNumber::kExponentBits); in EmitIdenticalObjectComparison()
254 __ cmp(r3, Operand(-1)); in EmitIdenticalObjectComparison()
255 __ b(ne, &return_equal); in EmitIdenticalObjectComparison()
258 __ mov(r2, Operand(r2, LSL, HeapNumber::kNonMantissaBitsInTopWord)); in EmitIdenticalObjectComparison()
260 __ ldr(r3, FieldMemOperand(r0, HeapNumber::kMantissaOffset)); in EmitIdenticalObjectComparison()
261 __ orr(r0, r3, Operand(r2), SetCC); in EmitIdenticalObjectComparison()
268 __ Ret(eq); in EmitIdenticalObjectComparison()
270 __ mov(r0, Operand(GREATER)); // NaN <= NaN should fail. in EmitIdenticalObjectComparison()
272 __ mov(r0, Operand(LESS)); // NaN >= NaN should fail. in EmitIdenticalObjectComparison()
275 __ Ret(); in EmitIdenticalObjectComparison()
279 __ bind(&not_identical); in EmitIdenticalObjectComparison()
294 __ JumpIfSmi(rhs, &rhs_is_smi); in EmitSmiNonsmiComparison()
297 __ CompareObjectType(rhs, r4, r4, HEAP_NUMBER_TYPE); in EmitSmiNonsmiComparison()
303 __ mov(r0, Operand(NOT_EQUAL), LeaveCC, ne); in EmitSmiNonsmiComparison()
305 __ Ret(ne); in EmitSmiNonsmiComparison()
309 __ b(ne, slow); in EmitSmiNonsmiComparison()
314 __ SmiToDouble(d7, lhs); in EmitSmiNonsmiComparison()
316 __ vldr(d6, rhs, HeapNumber::kValueOffset - kHeapObjectTag); in EmitSmiNonsmiComparison()
320 __ jmp(lhs_not_nan); in EmitSmiNonsmiComparison()
322 __ bind(&rhs_is_smi); in EmitSmiNonsmiComparison()
324 __ CompareObjectType(lhs, r4, r4, HEAP_NUMBER_TYPE); in EmitSmiNonsmiComparison()
330 __ mov(r0, Operand(NOT_EQUAL), LeaveCC, ne); in EmitSmiNonsmiComparison()
332 __ Ret(ne); in EmitSmiNonsmiComparison()
336 __ b(ne, slow); in EmitSmiNonsmiComparison()
341 __ vldr(d7, lhs, HeapNumber::kValueOffset - kHeapObjectTag); in EmitSmiNonsmiComparison()
343 __ SmiToDouble(d6, rhs); in EmitSmiNonsmiComparison()
362 __ CompareObjectType(rhs, r2, r2, FIRST_JS_RECEIVER_TYPE); in EmitStrictTwoHeapObjectCompare()
363 __ b(lt, &first_non_object); in EmitStrictTwoHeapObjectCompare()
367 __ bind(&return_not_equal); in EmitStrictTwoHeapObjectCompare()
368 __ Ret(); in EmitStrictTwoHeapObjectCompare()
370 __ bind(&first_non_object); in EmitStrictTwoHeapObjectCompare()
372 __ cmp(r2, Operand(ODDBALL_TYPE)); in EmitStrictTwoHeapObjectCompare()
373 __ b(eq, &return_not_equal); in EmitStrictTwoHeapObjectCompare()
375 __ CompareObjectType(lhs, r3, r3, FIRST_JS_RECEIVER_TYPE); in EmitStrictTwoHeapObjectCompare()
376 __ b(ge, &return_not_equal); in EmitStrictTwoHeapObjectCompare()
379 __ cmp(r3, Operand(ODDBALL_TYPE)); in EmitStrictTwoHeapObjectCompare()
380 __ b(eq, &return_not_equal); in EmitStrictTwoHeapObjectCompare()
385 __ orr(r2, r2, Operand(r3)); in EmitStrictTwoHeapObjectCompare()
386 __ tst(r2, Operand(kIsNotStringMask | kIsNotInternalizedMask)); in EmitStrictTwoHeapObjectCompare()
387 __ b(eq, &return_not_equal); in EmitStrictTwoHeapObjectCompare()
401 __ CompareObjectType(rhs, r3, r2, HEAP_NUMBER_TYPE); in EmitCheckForTwoHeapNumbers()
402 __ b(ne, not_heap_numbers); in EmitCheckForTwoHeapNumbers()
403 __ ldr(r2, FieldMemOperand(lhs, HeapObject::kMapOffset)); in EmitCheckForTwoHeapNumbers()
404 __ cmp(r2, r3); in EmitCheckForTwoHeapNumbers()
405 __ b(ne, slow); // First was a heap number, second wasn't. Go slow case. in EmitCheckForTwoHeapNumbers()
409 __ vldr(d6, rhs, HeapNumber::kValueOffset - kHeapObjectTag); in EmitCheckForTwoHeapNumbers()
410 __ vldr(d7, lhs, HeapNumber::kValueOffset - kHeapObjectTag); in EmitCheckForTwoHeapNumbers()
411 __ jmp(both_loaded_as_doubles); in EmitCheckForTwoHeapNumbers()
428 __ tst(r2, Operand(kIsNotStringMask)); in EmitCheckForInternalizedStringsOrObjects()
429 __ b(ne, &object_test); in EmitCheckForInternalizedStringsOrObjects()
430 __ tst(r2, Operand(kIsNotInternalizedMask)); in EmitCheckForInternalizedStringsOrObjects()
431 __ b(ne, possible_strings); in EmitCheckForInternalizedStringsOrObjects()
432 __ CompareObjectType(lhs, r3, r3, FIRST_NONSTRING_TYPE); in EmitCheckForInternalizedStringsOrObjects()
433 __ b(ge, runtime_call); in EmitCheckForInternalizedStringsOrObjects()
434 __ tst(r3, Operand(kIsNotInternalizedMask)); in EmitCheckForInternalizedStringsOrObjects()
435 __ b(ne, possible_strings); in EmitCheckForInternalizedStringsOrObjects()
440 __ Ret(); in EmitCheckForInternalizedStringsOrObjects()
442 __ bind(&object_test); in EmitCheckForInternalizedStringsOrObjects()
443 __ ldr(r2, FieldMemOperand(lhs, HeapObject::kMapOffset)); in EmitCheckForInternalizedStringsOrObjects()
444 __ ldr(r3, FieldMemOperand(rhs, HeapObject::kMapOffset)); in EmitCheckForInternalizedStringsOrObjects()
445 __ ldrb(r4, FieldMemOperand(r2, Map::kBitFieldOffset)); in EmitCheckForInternalizedStringsOrObjects()
446 __ ldrb(r5, FieldMemOperand(r3, Map::kBitFieldOffset)); in EmitCheckForInternalizedStringsOrObjects()
447 __ tst(r4, Operand(1 << Map::kIsUndetectable)); in EmitCheckForInternalizedStringsOrObjects()
448 __ b(ne, &undetectable); in EmitCheckForInternalizedStringsOrObjects()
449 __ tst(r5, Operand(1 << Map::kIsUndetectable)); in EmitCheckForInternalizedStringsOrObjects()
450 __ b(ne, &return_unequal); in EmitCheckForInternalizedStringsOrObjects()
452 __ CompareInstanceType(r2, r2, FIRST_JS_RECEIVER_TYPE); in EmitCheckForInternalizedStringsOrObjects()
453 __ b(lt, runtime_call); in EmitCheckForInternalizedStringsOrObjects()
454 __ CompareInstanceType(r3, r3, FIRST_JS_RECEIVER_TYPE); in EmitCheckForInternalizedStringsOrObjects()
455 __ b(lt, runtime_call); in EmitCheckForInternalizedStringsOrObjects()
457 __ bind(&return_unequal); in EmitCheckForInternalizedStringsOrObjects()
459 __ Ret(); in EmitCheckForInternalizedStringsOrObjects()
461 __ bind(&undetectable); in EmitCheckForInternalizedStringsOrObjects()
462 __ tst(r5, Operand(1 << Map::kIsUndetectable)); in EmitCheckForInternalizedStringsOrObjects()
463 __ b(eq, &return_unequal); in EmitCheckForInternalizedStringsOrObjects()
468 __ CompareInstanceType(r2, r2, ODDBALL_TYPE); in EmitCheckForInternalizedStringsOrObjects()
469 __ b(eq, &return_equal); in EmitCheckForInternalizedStringsOrObjects()
470 __ CompareInstanceType(r3, r3, ODDBALL_TYPE); in EmitCheckForInternalizedStringsOrObjects()
471 __ b(ne, &return_unequal); in EmitCheckForInternalizedStringsOrObjects()
473 __ bind(&return_equal); in EmitCheckForInternalizedStringsOrObjects()
474 __ mov(r0, Operand(EQUAL)); in EmitCheckForInternalizedStringsOrObjects()
475 __ Ret(); in EmitCheckForInternalizedStringsOrObjects()
485 __ JumpIfNotSmi(input, fail); in CompareICStub_CheckInputType()
487 __ JumpIfSmi(input, &ok); in CompareICStub_CheckInputType()
488 __ CheckMap(input, scratch, Heap::kHeapNumberMapRootIndex, fail, in CompareICStub_CheckInputType()
493 __ bind(&ok); in CompareICStub_CheckInputType()
513 __ orr(r2, r1, r0); in GenerateGeneric()
514 __ JumpIfNotSmi(r2, &not_two_smis); in GenerateGeneric()
515 __ mov(r1, Operand(r1, ASR, 1)); in GenerateGeneric()
516 __ sub(r0, r1, Operand(r0, ASR, 1)); in GenerateGeneric()
517 __ Ret(); in GenerateGeneric()
518 __ bind(&not_two_smis); in GenerateGeneric()
531 __ and_(r2, lhs, Operand(rhs)); in GenerateGeneric()
532 __ JumpIfNotSmi(r2, &not_smis); in GenerateGeneric()
543 __ bind(&both_loaded_as_doubles); in GenerateGeneric()
545 __ bind(&lhs_not_nan); in GenerateGeneric()
547 __ VFPCompareAndSetFlags(d7, d6); in GenerateGeneric()
549 __ b(vs, &nan); in GenerateGeneric()
550 __ mov(r0, Operand(EQUAL), LeaveCC, eq); in GenerateGeneric()
551 __ mov(r0, Operand(LESS), LeaveCC, lt); in GenerateGeneric()
552 __ mov(r0, Operand(GREATER), LeaveCC, gt); in GenerateGeneric()
553 __ Ret(); in GenerateGeneric()
555 __ bind(&nan); in GenerateGeneric()
560 __ mov(r0, Operand(GREATER)); in GenerateGeneric()
562 __ mov(r0, Operand(LESS)); in GenerateGeneric()
564 __ Ret(); in GenerateGeneric()
566 __ bind(&not_smis); in GenerateGeneric()
589 __ bind(&check_for_internalized_strings); in GenerateGeneric()
602 __ bind(&flat_string_check); in GenerateGeneric()
604 __ JumpIfNonSmisNotBothSequentialOneByteStrings(lhs, rhs, r2, r3, &slow); in GenerateGeneric()
606 __ IncrementCounter(isolate()->counters()->string_compare_native(), 1, r2, in GenerateGeneric()
616 __ bind(&slow); in GenerateGeneric()
621 __ Push(cp); in GenerateGeneric()
622 __ Call(strict() ? isolate()->builtins()->StrictEqual() in GenerateGeneric()
625 __ Pop(cp); in GenerateGeneric()
629 __ LoadRoot(r1, Heap::kTrueValueRootIndex); in GenerateGeneric()
630 __ sub(r0, r0, r1); in GenerateGeneric()
631 __ Ret(); in GenerateGeneric()
633 __ Push(lhs, rhs); in GenerateGeneric()
641 __ mov(r0, Operand(Smi::FromInt(ncr))); in GenerateGeneric()
642 __ push(r0); in GenerateGeneric()
646 __ TailCallRuntime(Runtime::kCompare); in GenerateGeneric()
649 __ bind(&miss); in GenerateGeneric()
658 __ stm(db_w, sp, kCallerSaved | lr.bit()); in Generate()
663 __ SaveFPRegs(sp, scratch); in Generate()
669 __ PrepareCallCFunction(argument_count, fp_argument_count, scratch); in Generate()
670 __ mov(r0, Operand(ExternalReference::isolate_address(isolate()))); in Generate()
671 __ CallCFunction( in Generate()
675 __ RestoreFPRegs(sp, scratch); in Generate()
677 __ ldm(ia_w, sp, kCallerSaved | pc.bit()); // Also pop pc to get Ret(0). in Generate()
694 __ UntagAndJumpIfSmi(scratch, exponent, &int_exponent); in Generate()
696 __ vldr(double_exponent, in Generate()
702 __ TryDoubleToInt32Exact(scratch, double_exponent, double_scratch); in Generate()
703 __ b(eq, &int_exponent); in Generate()
705 __ push(lr); in Generate()
708 __ PrepareCallCFunction(0, 2, scratch); in Generate()
709 __ MovToFloatParameters(double_base, double_exponent); in Generate()
710 __ CallCFunction( in Generate()
713 __ pop(lr); in Generate()
714 __ MovFromFloatResult(double_result); in Generate()
715 __ b(&done); in Generate()
719 __ bind(&int_exponent); in Generate()
723 __ mov(scratch, exponent); in Generate()
726 __ mov(exponent, scratch); in Generate()
728 __ vmov(double_scratch, double_base); // Back up base. in Generate()
729 __ vmov(double_result, 1.0, scratch2); in Generate()
732 __ cmp(scratch, Operand::Zero()); in Generate()
733 __ rsb(scratch, scratch, Operand::Zero(), LeaveCC, mi); in Generate()
736 __ bind(&while_true); in Generate()
737 __ mov(scratch, Operand(scratch, LSR, 1), SetCC); in Generate()
738 __ vmul(double_result, double_result, double_scratch, cs); in Generate()
739 __ vmul(double_scratch, double_scratch, double_scratch, ne); in Generate()
740 __ b(ne, &while_true); in Generate()
742 __ cmp(exponent, Operand::Zero()); in Generate()
743 __ b(ge, &done); in Generate()
744 __ vmov(double_scratch, 1.0, scratch); in Generate()
745 __ vdiv(double_result, double_scratch, double_result); in Generate()
748 __ VFPCompareAndSetFlags(double_result, 0.0); in Generate()
749 __ b(ne, &done); in Generate()
752 __ vmov(single_scratch, exponent); in Generate()
753 __ vcvt_f64_s32(double_exponent, single_scratch); in Generate()
756 __ push(lr); in Generate()
759 __ PrepareCallCFunction(0, 2, scratch); in Generate()
760 __ MovToFloatParameters(double_base, double_exponent); in Generate()
761 __ CallCFunction(ExternalReference::power_double_double_function(isolate()), in Generate()
764 __ pop(lr); in Generate()
765 __ MovFromFloatResult(double_result); in Generate()
767 __ bind(&done); in Generate()
768 __ Ret(); in Generate()
815 __ mov(r5, Operand(r1)); in Generate()
819 __ mov(r1, Operand(r2)); in Generate()
822 __ add(r1, sp, Operand(r0, LSL, kPointerSizeLog2)); in Generate()
823 __ sub(r1, r1, Operand(kPointerSize)); in Generate()
828 __ EnterExitFrame(save_doubles(), 0, is_builtin_exit() in Generate()
833 __ mov(r4, Operand(r0)); in Generate()
846 __ tst(sp, Operand(frame_alignment_mask)); in Generate()
847 __ b(eq, &alignment_as_expected); in Generate()
849 __ stop("Unexpected alignment"); in Generate()
850 __ bind(&alignment_as_expected); in Generate()
859 __ mov(r2, Operand(ExternalReference::isolate_address(isolate()))); in Generate()
867 __ sub(sp, sp, Operand(result_stack_size)); in Generate()
870 __ mov(r3, Operand(ExternalReference::isolate_address(isolate()))); in Generate()
871 __ mov(r2, Operand(r1)); in Generate()
872 __ mov(r1, Operand(r0)); in Generate()
873 __ mov(r0, Operand(sp)); in Generate()
886 __ add(lr, pc, Operand(4)); in Generate()
887 __ str(lr, MemOperand(sp, result_stack_size)); in Generate()
888 __ Call(r5); in Generate()
893 __ ldr(r2, MemOperand(sp, 2 * kPointerSize)); in Generate()
894 __ ldr(r1, MemOperand(sp, 1 * kPointerSize)); in Generate()
895 __ ldr(r0, MemOperand(sp, 0 * kPointerSize)); in Generate()
901 __ CompareRoot(r0, Heap::kExceptionRootIndex); in Generate()
902 __ b(eq, &exception_returned); in Generate()
910 __ mov(r3, Operand(pending_exception_address)); in Generate()
911 __ ldr(r3, MemOperand(r3)); in Generate()
912 __ CompareRoot(r3, Heap::kTheHoleValueRootIndex); in Generate()
914 __ b(eq, &okay); in Generate()
915 __ stop("Unexpected pending exception"); in Generate()
916 __ bind(&okay); in Generate()
931 __ LeaveExitFrame(save_doubles(), argc, true); in Generate()
932 __ mov(pc, lr); in Generate()
935 __ bind(&exception_returned); in Generate()
954 __ PrepareCallCFunction(3, 0, r0); in Generate()
955 __ mov(r0, Operand(0)); in Generate()
956 __ mov(r1, Operand(0)); in Generate()
957 __ mov(r2, Operand(ExternalReference::isolate_address(isolate()))); in Generate()
958 __ CallCFunction(find_handler, 3); in Generate()
962 __ mov(cp, Operand(pending_handler_context_address)); in Generate()
963 __ ldr(cp, MemOperand(cp)); in Generate()
964 __ mov(sp, Operand(pending_handler_sp_address)); in Generate()
965 __ ldr(sp, MemOperand(sp)); in Generate()
966 __ mov(fp, Operand(pending_handler_fp_address)); in Generate()
967 __ ldr(fp, MemOperand(fp)); in Generate()
971 __ cmp(cp, Operand(0)); in Generate()
972 __ str(cp, MemOperand(fp, StandardFrameConstants::kContextOffset), ne); in Generate()
976 __ mov(r1, Operand(pending_handler_code_address)); in Generate()
977 __ ldr(r1, MemOperand(r1)); in Generate()
978 __ mov(r2, Operand(pending_handler_offset_address)); in Generate()
979 __ ldr(r2, MemOperand(r2)); in Generate()
980 __ add(r1, r1, Operand(Code::kHeaderSize - kHeapObjectTag)); // Code start in Generate()
982 __ LoadConstantPoolPointerRegisterFromCodeTargetAddress(r1); in Generate()
984 __ add(pc, r1, r2); in Generate()
1002 __ stm(db_w, sp, kCalleeSaved | lr.bit()); in Generate()
1005 __ vstm(db_w, sp, kFirstCalleeSavedDoubleReg, kLastCalleeSavedDoubleReg); in Generate()
1007 __ vmov(kDoubleRegZero, 0.0); in Generate()
1018 __ ldr(r4, MemOperand(sp, offset_to_argv)); in Generate()
1028 __ mov(r8, Operand::Zero()); in Generate()
1030 __ mov(r7, Operand(StackFrame::TypeToMarker(marker))); in Generate()
1031 __ mov(r6, Operand(StackFrame::TypeToMarker(marker))); in Generate()
1032 __ mov(r5, in Generate()
1034 __ ldr(r5, MemOperand(r5)); in Generate()
1035 __ mov(ip, Operand(-1)); // Push a bad frame pointer to fail if it is used. in Generate()
1036 __ stm(db_w, sp, r5.bit() | r6.bit() | r7.bit() | in Generate()
1041 __ add(fp, sp, Operand(-EntryFrameConstants::kCallerFPOffset)); in Generate()
1046 __ mov(r5, Operand(ExternalReference(js_entry_sp))); in Generate()
1047 __ ldr(r6, MemOperand(r5)); in Generate()
1048 __ cmp(r6, Operand::Zero()); in Generate()
1049 __ b(ne, &non_outermost_js); in Generate()
1050 __ str(fp, MemOperand(r5)); in Generate()
1051 __ mov(ip, Operand(StackFrame::OUTERMOST_JSENTRY_FRAME)); in Generate()
1053 __ b(&cont); in Generate()
1054 __ bind(&non_outermost_js); in Generate()
1055 __ mov(ip, Operand(StackFrame::INNER_JSENTRY_FRAME)); in Generate()
1056 __ bind(&cont); in Generate()
1057 __ push(ip); in Generate()
1061 __ jmp(&invoke); in Generate()
1068 __ bind(&handler_entry); in Generate()
1074 __ mov(ip, Operand(ExternalReference(Isolate::kPendingExceptionAddress, in Generate()
1077 __ str(r0, MemOperand(ip)); in Generate()
1078 __ LoadRoot(r0, Heap::kExceptionRootIndex); in Generate()
1079 __ b(&exit); in Generate()
1082 __ bind(&invoke); in Generate()
1084 __ PushStackHandler(); in Generate()
1103 __ mov(ip, Operand(construct_entry)); in Generate()
1106 __ mov(ip, Operand(entry)); in Generate()
1108 __ ldr(ip, MemOperand(ip)); // deref address in Generate()
1109 __ add(ip, ip, Operand(Code::kHeaderSize - kHeapObjectTag)); in Generate()
1112 __ Call(ip); in Generate()
1115 __ PopStackHandler(); in Generate()
1117 __ bind(&exit); // r0 holds result in Generate()
1120 __ pop(r5); in Generate()
1121 __ cmp(r5, Operand(StackFrame::OUTERMOST_JSENTRY_FRAME)); in Generate()
1122 __ b(ne, &non_outermost_js_2); in Generate()
1123 __ mov(r6, Operand::Zero()); in Generate()
1124 __ mov(r5, Operand(ExternalReference(js_entry_sp))); in Generate()
1125 __ str(r6, MemOperand(r5)); in Generate()
1126 __ bind(&non_outermost_js_2); in Generate()
1129 __ pop(r3); in Generate()
1130 __ mov(ip, in Generate()
1132 __ str(r3, MemOperand(ip)); in Generate()
1135 __ add(sp, sp, Operand(-EntryFrameConstants::kCallerFPOffset)); in Generate()
1140 __ mov(lr, Operand(pc)); in Generate()
1145 __ vldm(ia_w, sp, kFirstCalleeSavedDoubleReg, kLastCalleeSavedDoubleReg); in Generate()
1147 __ ldm(ia_w, sp, kCalleeSaved | pc.bit()); in Generate()
1155 __ TailCallRuntime(Runtime::kRegExpExec); in Generate()
1184 __ mov(r0, Operand(address_of_regexp_stack_memory_size)); in Generate()
1185 __ ldr(r0, MemOperand(r0, 0)); in Generate()
1186 __ cmp(r0, Operand::Zero()); in Generate()
1187 __ b(eq, &runtime); in Generate()
1190 __ ldr(r0, MemOperand(sp, kJSRegExpOffset)); in Generate()
1191 __ JumpIfSmi(r0, &runtime); in Generate()
1192 __ CompareObjectType(r0, r1, r1, JS_REGEXP_TYPE); in Generate()
1193 __ b(ne, &runtime); in Generate()
1196 __ ldr(regexp_data, FieldMemOperand(r0, JSRegExp::kDataOffset)); in Generate()
1198 __ SmiTst(regexp_data); in Generate()
1199 __ Check(ne, kUnexpectedTypeForRegExpDataFixedArrayExpected); in Generate()
1200 __ CompareObjectType(regexp_data, r0, r0, FIXED_ARRAY_TYPE); in Generate()
1201 __ Check(eq, kUnexpectedTypeForRegExpDataFixedArrayExpected); in Generate()
1206 __ ldr(r0, FieldMemOperand(regexp_data, JSRegExp::kDataTagOffset)); in Generate()
1207 __ cmp(r0, Operand(Smi::FromInt(JSRegExp::IRREGEXP))); in Generate()
1208 __ b(ne, &runtime); in Generate()
1212 __ ldr(r2, in Generate()
1220 __ cmp(r2, Operand(Isolate::kJSRegexpStaticOffsetsVectorSize - 2)); in Generate()
1221 __ b(hi, &runtime); in Generate()
1224 __ mov(r9, Operand::Zero()); in Generate()
1225 __ ldr(subject, MemOperand(sp, kSubjectOffset)); in Generate()
1226 __ JumpIfSmi(subject, &runtime); in Generate()
1227 __ mov(r3, subject); // Make a copy of the original subject string. in Generate()
1250 __ bind(&check_underlying); in Generate()
1251 __ ldr(r0, FieldMemOperand(subject, HeapObject::kMapOffset)); in Generate()
1252 __ ldrb(r0, FieldMemOperand(r0, Map::kInstanceTypeOffset)); in Generate()
1255 __ and_(r1, in Generate()
1262 __ b(eq, &seq_string); // Go to (4). in Generate()
1270 __ cmp(r1, Operand(kExternalStringTag)); in Generate()
1271 __ b(ge, &not_seq_nor_cons); // Go to (5). in Generate()
1275 __ ldr(r0, FieldMemOperand(subject, ConsString::kSecondOffset)); in Generate()
1276 __ CompareRoot(r0, Heap::kempty_stringRootIndex); in Generate()
1277 __ b(ne, &runtime); in Generate()
1278 __ ldr(subject, FieldMemOperand(subject, ConsString::kFirstOffset)); in Generate()
1279 __ jmp(&check_underlying); in Generate()
1282 __ bind(&seq_string); in Generate()
1288 __ ldr(r1, MemOperand(sp, kPreviousIndexOffset)); in Generate()
1289 __ JumpIfNotSmi(r1, &runtime); in Generate()
1290 __ ldr(r3, FieldMemOperand(r3, String::kLengthOffset)); in Generate()
1291 __ cmp(r3, Operand(r1)); in Generate()
1292 __ b(ls, &runtime); in Generate()
1293 __ SmiUntag(r1); in Generate()
1297 __ and_(r0, r0, Operand(kStringEncodingMask)); in Generate()
1298 __ mov(r3, Operand(r0, ASR, 3), SetCC); in Generate()
1299 __ ldr(r6, FieldMemOperand(regexp_data, JSRegExp::kDataOneByteCodeOffset), in Generate()
1301 __ ldr(r6, FieldMemOperand(regexp_data, JSRegExp::kDataUC16CodeOffset), eq); in Generate()
1308 __ JumpIfSmi(r6, &runtime); in Generate()
1316 __ IncrementCounter(isolate()->counters()->regexp_entry_native(), 1, r0, r2); in Generate()
1321 __ EnterExitFrame(false, kRegExpExecuteArguments - kParameterRegisters); in Generate()
1327 __ mov(r0, Operand(ExternalReference::isolate_address(isolate()))); in Generate()
1328 __ str(r0, MemOperand(sp, 5 * kPointerSize)); in Generate()
1331 __ mov(r0, Operand(1)); in Generate()
1332 __ str(r0, MemOperand(sp, 4 * kPointerSize)); in Generate()
1335 __ mov(r0, Operand(address_of_regexp_stack_memory_address)); in Generate()
1336 __ ldr(r0, MemOperand(r0, 0)); in Generate()
1337 __ mov(r2, Operand(address_of_regexp_stack_memory_size)); in Generate()
1338 __ ldr(r2, MemOperand(r2, 0)); in Generate()
1339 __ add(r0, r0, Operand(r2)); in Generate()
1340 __ str(r0, MemOperand(sp, 3 * kPointerSize)); in Generate()
1344 __ mov(r0, Operand::Zero()); in Generate()
1345 __ str(r0, MemOperand(sp, 2 * kPointerSize)); in Generate()
1348 __ mov(r0, in Generate()
1351 __ str(r0, MemOperand(sp, 1 * kPointerSize)); in Generate()
1355 __ add(r7, subject, Operand(SeqString::kHeaderSize - kHeapObjectTag)); in Generate()
1356 __ eor(r3, r3, Operand(1)); in Generate()
1361 __ ldr(subject, MemOperand(fp, kSubjectOffset + 2 * kPointerSize)); in Generate()
1366 __ add(r9, r7, Operand(r9, LSL, r3)); in Generate()
1367 __ add(r2, r9, Operand(r1, LSL, r3)); in Generate()
1369 __ ldr(r7, FieldMemOperand(subject, String::kLengthOffset)); in Generate()
1370 __ SmiUntag(r7); in Generate()
1371 __ add(r3, r9, Operand(r7, LSL, r3)); in Generate()
1377 __ mov(r0, subject); in Generate()
1380 __ add(r6, r6, Operand(Code::kHeaderSize - kHeapObjectTag)); in Generate()
1384 __ LeaveExitFrame(false, no_reg, true); in Generate()
1394 __ cmp(r0, Operand(1)); in Generate()
1397 __ b(eq, &success); in Generate()
1399 __ cmp(r0, Operand(NativeRegExpMacroAssembler::FAILURE)); in Generate()
1400 __ b(eq, &failure); in Generate()
1401 __ cmp(r0, Operand(NativeRegExpMacroAssembler::EXCEPTION)); in Generate()
1403 __ b(ne, &runtime); in Generate()
1408 __ mov(r1, Operand(isolate()->factory()->the_hole_value())); in Generate()
1409 __ mov(r2, Operand(ExternalReference(Isolate::kPendingExceptionAddress, in Generate()
1411 __ ldr(r0, MemOperand(r2, 0)); in Generate()
1412 __ cmp(r0, r1); in Generate()
1413 __ b(eq, &runtime); in Generate()
1416 __ TailCallRuntime(Runtime::kRegExpExecReThrow); in Generate()
1418 __ bind(&failure); in Generate()
1420 __ mov(r0, Operand(isolate()->factory()->null_value())); in Generate()
1421 __ add(sp, sp, Operand(4 * kPointerSize)); in Generate()
1422 __ Ret(); in Generate()
1425 __ bind(&success); in Generate()
1426 __ ldr(r1, in Generate()
1432 __ add(r1, r1, Operand(2)); // r1 was a smi. in Generate()
1435 __ ldr(last_match_info_elements, MemOperand(sp, kLastMatchInfoOffset)); in Generate()
1436 __ JumpIfSmi(last_match_info_elements, &runtime); in Generate()
1438 __ ldr(r0, FieldMemOperand(last_match_info_elements, HeapObject::kMapOffset)); in Generate()
1439 __ CompareRoot(r0, Heap::kFixedArrayMapRootIndex); in Generate()
1440 __ b(ne, &runtime); in Generate()
1443 __ ldr(r0, in Generate()
1445 __ add(r2, r1, Operand(RegExpMatchInfo::kLastMatchOverhead)); in Generate()
1446 __ cmp(r2, Operand::SmiUntag(r0)); in Generate()
1447 __ b(gt, &runtime); in Generate()
1452 __ SmiTag(r2, r1); in Generate()
1453 __ str(r2, FieldMemOperand(last_match_info_elements, in Generate()
1456 __ str(subject, FieldMemOperand(last_match_info_elements, in Generate()
1458 __ mov(r2, subject); in Generate()
1459 __ RecordWriteField(last_match_info_elements, in Generate()
1462 __ mov(subject, r2); in Generate()
1463 __ str(subject, FieldMemOperand(last_match_info_elements, in Generate()
1465 __ RecordWriteField(last_match_info_elements, in Generate()
1472 __ mov(r2, Operand(address_of_static_offsets_vector)); in Generate()
1479 __ add(r0, last_match_info_elements, in Generate()
1481 __ bind(&next_capture); in Generate()
1482 __ sub(r1, r1, Operand(1), SetCC); in Generate()
1483 __ b(mi, &done); in Generate()
1485 __ ldr(r3, MemOperand(r2, kPointerSize, PostIndex)); in Generate()
1487 __ SmiTag(r3); in Generate()
1488 __ str(r3, MemOperand(r0, kPointerSize, PostIndex)); in Generate()
1489 __ jmp(&next_capture); in Generate()
1490 __ bind(&done); in Generate()
1493 __ mov(r0, last_match_info_elements); in Generate()
1494 __ add(sp, sp, Operand(4 * kPointerSize)); in Generate()
1495 __ Ret(); in Generate()
1498 __ bind(&runtime); in Generate()
1499 __ TailCallRuntime(Runtime::kRegExpExec); in Generate()
1503 __ bind(&not_seq_nor_cons); in Generate()
1505 __ b(gt, &not_long_external); // Go to (7). in Generate()
1508 __ bind(&external_string); in Generate()
1509 __ ldr(r0, FieldMemOperand(subject, HeapObject::kMapOffset)); in Generate()
1510 __ ldrb(r0, FieldMemOperand(r0, Map::kInstanceTypeOffset)); in Generate()
1514 __ tst(r0, Operand(kIsIndirectStringMask)); in Generate()
1515 __ Assert(eq, kExternalStringExpectedButNotFound); in Generate()
1517 __ ldr(subject, in Generate()
1521 __ sub(subject, in Generate()
1524 __ jmp(&seq_string); // Go to (4). in Generate()
1527 __ bind(&not_long_external); in Generate()
1529 __ tst(r1, Operand(kIsNotStringMask | kShortExternalStringMask)); in Generate()
1530 __ b(ne, &runtime); in Generate()
1534 __ cmp(r1, Operand(kThinStringTag)); in Generate()
1535 __ b(eq, &thin_string); in Generate()
1537 __ ldr(r9, FieldMemOperand(subject, SlicedString::kOffsetOffset)); in Generate()
1538 __ SmiUntag(r9); in Generate()
1539 __ ldr(subject, FieldMemOperand(subject, SlicedString::kParentOffset)); in Generate()
1540 __ jmp(&check_underlying); // Go to (4). in Generate()
1542 __ bind(&thin_string); in Generate()
1543 __ ldr(subject, FieldMemOperand(subject, ThinString::kActualOffset)); in Generate()
1544 __ jmp(&check_underlying); // Go to (4). in Generate()
1557 __ SmiTag(r0); in CallStubInRecordCallTarget()
1558 __ Push(r3, r2, r1, r0); in CallStubInRecordCallTarget()
1559 __ Push(cp); in CallStubInRecordCallTarget()
1561 __ CallStub(stub); in CallStubInRecordCallTarget()
1563 __ Pop(cp); in CallStubInRecordCallTarget()
1564 __ Pop(r3, r2, r1, r0); in CallStubInRecordCallTarget()
1565 __ SmiUntag(r0); in CallStubInRecordCallTarget()
1585 __ add(r5, r2, Operand::PointerOffsetFromSmiKey(r3)); in GenerateRecordCallTarget()
1586 __ ldr(r5, FieldMemOperand(r5, FixedArray::kHeaderSize)); in GenerateRecordCallTarget()
1595 __ ldr(weak_value, FieldMemOperand(r5, WeakCell::kValueOffset)); in GenerateRecordCallTarget()
1596 __ cmp(r1, weak_value); in GenerateRecordCallTarget()
1597 __ b(eq, &done); in GenerateRecordCallTarget()
1598 __ CompareRoot(r5, Heap::kmegamorphic_symbolRootIndex); in GenerateRecordCallTarget()
1599 __ b(eq, &done); in GenerateRecordCallTarget()
1600 __ ldr(feedback_map, FieldMemOperand(r5, HeapObject::kMapOffset)); in GenerateRecordCallTarget()
1601 __ CompareRoot(feedback_map, Heap::kWeakCellMapRootIndex); in GenerateRecordCallTarget()
1602 __ b(ne, &check_allocation_site); in GenerateRecordCallTarget()
1605 __ JumpIfSmi(weak_value, &initialize); in GenerateRecordCallTarget()
1606 __ jmp(&megamorphic); in GenerateRecordCallTarget()
1608 __ bind(&check_allocation_site); in GenerateRecordCallTarget()
1613 __ CompareRoot(feedback_map, Heap::kAllocationSiteMapRootIndex); in GenerateRecordCallTarget()
1614 __ b(ne, &miss); in GenerateRecordCallTarget()
1617 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r5); in GenerateRecordCallTarget()
1618 __ cmp(r1, r5); in GenerateRecordCallTarget()
1619 __ b(ne, &megamorphic); in GenerateRecordCallTarget()
1620 __ jmp(&done); in GenerateRecordCallTarget()
1622 __ bind(&miss); in GenerateRecordCallTarget()
1626 __ CompareRoot(r5, Heap::kuninitialized_symbolRootIndex); in GenerateRecordCallTarget()
1627 __ b(eq, &initialize); in GenerateRecordCallTarget()
1630 __ bind(&megamorphic); in GenerateRecordCallTarget()
1631 __ add(r5, r2, Operand::PointerOffsetFromSmiKey(r3)); in GenerateRecordCallTarget()
1632 __ LoadRoot(ip, Heap::kmegamorphic_symbolRootIndex); in GenerateRecordCallTarget()
1633 __ str(ip, FieldMemOperand(r5, FixedArray::kHeaderSize)); in GenerateRecordCallTarget()
1634 __ jmp(&done); in GenerateRecordCallTarget()
1637 __ bind(&initialize); in GenerateRecordCallTarget()
1640 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r5); in GenerateRecordCallTarget()
1641 __ cmp(r1, r5); in GenerateRecordCallTarget()
1642 __ b(ne, &not_array_function); in GenerateRecordCallTarget()
1649 __ b(&done); in GenerateRecordCallTarget()
1651 __ bind(&not_array_function); in GenerateRecordCallTarget()
1655 __ bind(&done); in GenerateRecordCallTarget()
1658 __ add(r5, r2, Operand::PointerOffsetFromSmiKey(r3)); in GenerateRecordCallTarget()
1659 __ add(r5, r5, Operand(FixedArray::kHeaderSize + kPointerSize)); in GenerateRecordCallTarget()
1660 __ ldr(r4, FieldMemOperand(r5, 0)); in GenerateRecordCallTarget()
1661 __ add(r4, r4, Operand(Smi::FromInt(1))); in GenerateRecordCallTarget()
1662 __ str(r4, FieldMemOperand(r5, 0)); in GenerateRecordCallTarget()
1673 __ JumpIfSmi(r1, &non_function); in Generate()
1675 __ CompareObjectType(r1, r5, r5, JS_FUNCTION_TYPE); in Generate()
1676 __ b(ne, &non_function); in Generate()
1680 __ add(r5, r2, Operand::PointerOffsetFromSmiKey(r3)); in Generate()
1683 __ ldr(r2, FieldMemOperand(r5, FixedArray::kHeaderSize)); in Generate()
1684 __ ldr(r5, FieldMemOperand(r2, AllocationSite::kMapOffset)); in Generate()
1685 __ CompareRoot(r5, Heap::kAllocationSiteMapRootIndex); in Generate()
1686 __ b(eq, &feedback_register_initialized); in Generate()
1687 __ LoadRoot(r2, Heap::kUndefinedValueRootIndex); in Generate()
1688 __ bind(&feedback_register_initialized); in Generate()
1690 __ AssertUndefinedOrAllocationSite(r2, r5); in Generate()
1693 __ mov(r3, r1); in Generate()
1697 __ ldr(r4, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); in Generate()
1698 __ ldr(r4, FieldMemOperand(r4, SharedFunctionInfo::kConstructStubOffset)); in Generate()
1699 __ add(pc, r4, Operand(Code::kHeaderSize - kHeapObjectTag)); in Generate()
1701 __ bind(&non_function); in Generate()
1702 __ mov(r3, r1); in Generate()
1703 __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); in Generate()
1710 __ JumpIfSmi(object_, receiver_not_string_); in GenerateFast()
1713 __ ldr(result_, FieldMemOperand(object_, HeapObject::kMapOffset)); in GenerateFast()
1714 __ ldrb(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset)); in GenerateFast()
1716 __ tst(result_, Operand(kIsNotStringMask)); in GenerateFast()
1717 __ b(ne, receiver_not_string_); in GenerateFast()
1721 __ JumpIfNotSmi(index_, &index_not_smi_); in GenerateFast()
1722 __ bind(&got_smi_index_); in GenerateFast()
1725 __ ldr(ip, FieldMemOperand(object_, String::kLengthOffset)); in GenerateFast()
1726 __ cmp(ip, Operand(index_)); in GenerateFast()
1727 __ b(ls, index_out_of_range_); in GenerateFast()
1729 __ SmiUntag(index_); in GenerateFast()
1737 __ SmiTag(result_); in GenerateFast()
1738 __ bind(&exit_); in GenerateFast()
1745 __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase); in GenerateSlow()
1748 __ bind(&index_not_smi_); in GenerateSlow()
1750 __ CheckMap(index_, in GenerateSlow()
1757 __ Push(LoadWithVectorDescriptor::VectorRegister(), in GenerateSlow()
1761 __ Push(object_, index_); in GenerateSlow()
1763 __ CallRuntime(Runtime::kNumberToSmi); in GenerateSlow()
1766 __ Move(index_, r0); in GenerateSlow()
1768 __ Pop(LoadWithVectorDescriptor::VectorRegister(), in GenerateSlow()
1771 __ pop(object_); in GenerateSlow()
1774 __ ldr(result_, FieldMemOperand(object_, HeapObject::kMapOffset)); in GenerateSlow()
1775 __ ldrb(result_, FieldMemOperand(result_, Map::kInstanceTypeOffset)); in GenerateSlow()
1778 __ JumpIfNotSmi(index_, index_out_of_range_); in GenerateSlow()
1780 __ jmp(&got_smi_index_); in GenerateSlow()
1785 __ bind(&call_runtime_); in GenerateSlow()
1787 __ SmiTag(index_); in GenerateSlow()
1788 __ Push(object_, index_); in GenerateSlow()
1789 __ CallRuntime(Runtime::kStringCharCodeAtRT); in GenerateSlow()
1790 __ Move(result_, r0); in GenerateSlow()
1792 __ jmp(&exit_); in GenerateSlow()
1794 __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase); in GenerateSlow()
1804 __ ldr(length, FieldMemOperand(left, String::kLengthOffset)); in GenerateFlatOneByteStringEquals()
1805 __ ldr(scratch2, FieldMemOperand(right, String::kLengthOffset)); in GenerateFlatOneByteStringEquals()
1806 __ cmp(length, scratch2); in GenerateFlatOneByteStringEquals()
1807 __ b(eq, &check_zero_length); in GenerateFlatOneByteStringEquals()
1808 __ bind(&strings_not_equal); in GenerateFlatOneByteStringEquals()
1809 __ mov(r0, Operand(Smi::FromInt(NOT_EQUAL))); in GenerateFlatOneByteStringEquals()
1810 __ Ret(); in GenerateFlatOneByteStringEquals()
1814 __ bind(&check_zero_length); in GenerateFlatOneByteStringEquals()
1816 __ cmp(length, Operand::Zero()); in GenerateFlatOneByteStringEquals()
1817 __ b(ne, &compare_chars); in GenerateFlatOneByteStringEquals()
1818 __ mov(r0, Operand(Smi::FromInt(EQUAL))); in GenerateFlatOneByteStringEquals()
1819 __ Ret(); in GenerateFlatOneByteStringEquals()
1822 __ bind(&compare_chars); in GenerateFlatOneByteStringEquals()
1827 __ mov(r0, Operand(Smi::FromInt(EQUAL))); in GenerateFlatOneByteStringEquals()
1828 __ Ret(); in GenerateFlatOneByteStringEquals()
1837 __ ldr(scratch1, FieldMemOperand(left, String::kLengthOffset)); in GenerateCompareFlatOneByteStrings()
1838 __ ldr(scratch2, FieldMemOperand(right, String::kLengthOffset)); in GenerateCompareFlatOneByteStrings()
1839 __ sub(scratch3, scratch1, Operand(scratch2), SetCC); in GenerateCompareFlatOneByteStrings()
1841 __ mov(scratch1, scratch2, LeaveCC, gt); in GenerateCompareFlatOneByteStrings()
1844 __ cmp(min_length, Operand::Zero()); in GenerateCompareFlatOneByteStrings()
1845 __ b(eq, &compare_lengths); in GenerateCompareFlatOneByteStrings()
1852 __ bind(&compare_lengths); in GenerateCompareFlatOneByteStrings()
1855 __ mov(r0, Operand(length_delta), SetCC); in GenerateCompareFlatOneByteStrings()
1856 __ bind(&result_not_equal); in GenerateCompareFlatOneByteStrings()
1859 __ mov(r0, Operand(Smi::FromInt(GREATER)), LeaveCC, gt); in GenerateCompareFlatOneByteStrings()
1860 __ mov(r0, Operand(Smi::FromInt(LESS)), LeaveCC, lt); in GenerateCompareFlatOneByteStrings()
1861 __ Ret(); in GenerateCompareFlatOneByteStrings()
1871 __ SmiUntag(length); in GenerateOneByteCharsCompareLoop()
1872 __ add(scratch1, length, in GenerateOneByteCharsCompareLoop()
1874 __ add(left, left, Operand(scratch1)); in GenerateOneByteCharsCompareLoop()
1875 __ add(right, right, Operand(scratch1)); in GenerateOneByteCharsCompareLoop()
1876 __ rsb(length, length, Operand::Zero()); in GenerateOneByteCharsCompareLoop()
1881 __ bind(&loop); in GenerateOneByteCharsCompareLoop()
1882 __ ldrb(scratch1, MemOperand(left, index)); in GenerateOneByteCharsCompareLoop()
1883 __ ldrb(scratch2, MemOperand(right, index)); in GenerateOneByteCharsCompareLoop()
1884 __ cmp(scratch1, scratch2); in GenerateOneByteCharsCompareLoop()
1885 __ b(ne, chars_not_equal); in GenerateOneByteCharsCompareLoop()
1886 __ add(index, index, Operand(1), SetCC); in GenerateOneByteCharsCompareLoop()
1887 __ b(ne, &loop); in GenerateOneByteCharsCompareLoop()
1901 __ Move(r2, isolate()->factory()->undefined_value()); in Generate()
1905 __ tst(r2, Operand(kSmiTagMask)); in Generate()
1906 __ Assert(ne, kExpectedAllocationSite); in Generate()
1907 __ push(r2); in Generate()
1908 __ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset)); in Generate()
1909 __ LoadRoot(ip, Heap::kAllocationSiteMapRootIndex); in Generate()
1910 __ cmp(r2, ip); in Generate()
1911 __ pop(r2); in Generate()
1912 __ Assert(eq, kExpectedAllocationSite); in Generate()
1918 __ TailCallStub(&stub); in Generate()
1926 __ CheckMap(r1, r2, Heap::kBooleanMapRootIndex, &miss, DO_SMI_CHECK); in GenerateBooleans()
1927 __ CheckMap(r0, r3, Heap::kBooleanMapRootIndex, &miss, DO_SMI_CHECK); in GenerateBooleans()
1929 __ ldr(r1, FieldMemOperand(r1, Oddball::kToNumberOffset)); in GenerateBooleans()
1930 __ AssertSmi(r1); in GenerateBooleans()
1931 __ ldr(r0, FieldMemOperand(r0, Oddball::kToNumberOffset)); in GenerateBooleans()
1932 __ AssertSmi(r0); in GenerateBooleans()
1934 __ sub(r0, r1, r0); in GenerateBooleans()
1935 __ Ret(); in GenerateBooleans()
1937 __ bind(&miss); in GenerateBooleans()
1945 __ orr(r2, r1, r0); in GenerateSmis()
1946 __ JumpIfNotSmi(r2, &miss); in GenerateSmis()
1950 __ sub(r0, r0, r1, SetCC); in GenerateSmis()
1953 __ SmiUntag(r1); in GenerateSmis()
1954 __ sub(r0, r1, Operand::SmiUntag(r0)); in GenerateSmis()
1956 __ Ret(); in GenerateSmis()
1958 __ bind(&miss); in GenerateSmis()
1971 __ JumpIfNotSmi(r1, &miss); in GenerateNumbers()
1974 __ JumpIfNotSmi(r0, &miss); in GenerateNumbers()
1981 __ JumpIfSmi(r0, &right_smi); in GenerateNumbers()
1982 __ CheckMap(r0, r2, Heap::kHeapNumberMapRootIndex, &maybe_undefined1, in GenerateNumbers()
1984 __ sub(r2, r0, Operand(kHeapObjectTag)); in GenerateNumbers()
1985 __ vldr(d1, r2, HeapNumber::kValueOffset); in GenerateNumbers()
1986 __ b(&left); in GenerateNumbers()
1987 __ bind(&right_smi); in GenerateNumbers()
1988 __ SmiToDouble(d1, r0); in GenerateNumbers()
1990 __ bind(&left); in GenerateNumbers()
1991 __ JumpIfSmi(r1, &left_smi); in GenerateNumbers()
1992 __ CheckMap(r1, r2, Heap::kHeapNumberMapRootIndex, &maybe_undefined2, in GenerateNumbers()
1994 __ sub(r2, r1, Operand(kHeapObjectTag)); in GenerateNumbers()
1995 __ vldr(d0, r2, HeapNumber::kValueOffset); in GenerateNumbers()
1996 __ b(&done); in GenerateNumbers()
1997 __ bind(&left_smi); in GenerateNumbers()
1998 __ SmiToDouble(d0, r1); in GenerateNumbers()
2000 __ bind(&done); in GenerateNumbers()
2002 __ VFPCompareAndSetFlags(d0, d1); in GenerateNumbers()
2005 __ b(vs, &unordered); in GenerateNumbers()
2008 __ mov(r0, Operand(EQUAL), LeaveCC, eq); in GenerateNumbers()
2009 __ mov(r0, Operand(LESS), LeaveCC, lt); in GenerateNumbers()
2010 __ mov(r0, Operand(GREATER), LeaveCC, gt); in GenerateNumbers()
2011 __ Ret(); in GenerateNumbers()
2013 __ bind(&unordered); in GenerateNumbers()
2014 __ bind(&generic_stub); in GenerateNumbers()
2017 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); in GenerateNumbers()
2019 __ bind(&maybe_undefined1); in GenerateNumbers()
2021 __ CompareRoot(r0, Heap::kUndefinedValueRootIndex); in GenerateNumbers()
2022 __ b(ne, &miss); in GenerateNumbers()
2023 __ JumpIfSmi(r1, &unordered); in GenerateNumbers()
2024 __ CompareObjectType(r1, r2, r2, HEAP_NUMBER_TYPE); in GenerateNumbers()
2025 __ b(ne, &maybe_undefined2); in GenerateNumbers()
2026 __ jmp(&unordered); in GenerateNumbers()
2029 __ bind(&maybe_undefined2); in GenerateNumbers()
2031 __ CompareRoot(r1, Heap::kUndefinedValueRootIndex); in GenerateNumbers()
2032 __ b(eq, &unordered); in GenerateNumbers()
2035 __ bind(&miss); in GenerateNumbers()
2051 __ JumpIfEitherSmi(left, right, &miss); in GenerateInternalizedStrings()
2054 __ ldr(tmp1, FieldMemOperand(left, HeapObject::kMapOffset)); in GenerateInternalizedStrings()
2055 __ ldr(tmp2, FieldMemOperand(right, HeapObject::kMapOffset)); in GenerateInternalizedStrings()
2056 __ ldrb(tmp1, FieldMemOperand(tmp1, Map::kInstanceTypeOffset)); in GenerateInternalizedStrings()
2057 __ ldrb(tmp2, FieldMemOperand(tmp2, Map::kInstanceTypeOffset)); in GenerateInternalizedStrings()
2059 __ orr(tmp1, tmp1, Operand(tmp2)); in GenerateInternalizedStrings()
2060 __ tst(tmp1, Operand(kIsNotStringMask | kIsNotInternalizedMask)); in GenerateInternalizedStrings()
2061 __ b(ne, &miss); in GenerateInternalizedStrings()
2064 __ cmp(left, right); in GenerateInternalizedStrings()
2070 __ mov(r0, Operand(Smi::FromInt(EQUAL)), LeaveCC, eq); in GenerateInternalizedStrings()
2071 __ Ret(); in GenerateInternalizedStrings()
2073 __ bind(&miss); in GenerateInternalizedStrings()
2090 __ JumpIfEitherSmi(left, right, &miss); in GenerateUniqueNames()
2094 __ ldr(tmp1, FieldMemOperand(left, HeapObject::kMapOffset)); in GenerateUniqueNames()
2095 __ ldr(tmp2, FieldMemOperand(right, HeapObject::kMapOffset)); in GenerateUniqueNames()
2096 __ ldrb(tmp1, FieldMemOperand(tmp1, Map::kInstanceTypeOffset)); in GenerateUniqueNames()
2097 __ ldrb(tmp2, FieldMemOperand(tmp2, Map::kInstanceTypeOffset)); in GenerateUniqueNames()
2099 __ JumpIfNotUniqueNameInstanceType(tmp1, &miss); in GenerateUniqueNames()
2100 __ JumpIfNotUniqueNameInstanceType(tmp2, &miss); in GenerateUniqueNames()
2103 __ cmp(left, right); in GenerateUniqueNames()
2109 __ mov(r0, Operand(Smi::FromInt(EQUAL)), LeaveCC, eq); in GenerateUniqueNames()
2110 __ Ret(); in GenerateUniqueNames()
2112 __ bind(&miss); in GenerateUniqueNames()
2132 __ JumpIfEitherSmi(left, right, &miss); in GenerateStrings()
2136 __ ldr(tmp1, FieldMemOperand(left, HeapObject::kMapOffset)); in GenerateStrings()
2137 __ ldr(tmp2, FieldMemOperand(right, HeapObject::kMapOffset)); in GenerateStrings()
2138 __ ldrb(tmp1, FieldMemOperand(tmp1, Map::kInstanceTypeOffset)); in GenerateStrings()
2139 __ ldrb(tmp2, FieldMemOperand(tmp2, Map::kInstanceTypeOffset)); in GenerateStrings()
2141 __ orr(tmp3, tmp1, tmp2); in GenerateStrings()
2142 __ tst(tmp3, Operand(kIsNotStringMask)); in GenerateStrings()
2143 __ b(ne, &miss); in GenerateStrings()
2146 __ cmp(left, right); in GenerateStrings()
2149 __ mov(r0, Operand(Smi::FromInt(EQUAL)), LeaveCC, eq); in GenerateStrings()
2150 __ Ret(eq); in GenerateStrings()
2160 __ orr(tmp3, tmp1, Operand(tmp2)); in GenerateStrings()
2161 __ tst(tmp3, Operand(kIsNotInternalizedMask)); in GenerateStrings()
2165 __ Ret(eq); in GenerateStrings()
2170 __ JumpIfBothInstanceTypesAreNotSequentialOneByte(tmp1, tmp2, tmp3, tmp4, in GenerateStrings()
2183 __ bind(&runtime); in GenerateStrings()
2187 __ Push(left, right); in GenerateStrings()
2188 __ CallRuntime(Runtime::kStringEqual); in GenerateStrings()
2190 __ LoadRoot(r1, Heap::kTrueValueRootIndex); in GenerateStrings()
2191 __ sub(r0, r0, r1); in GenerateStrings()
2192 __ Ret(); in GenerateStrings()
2194 __ Push(left, right); in GenerateStrings()
2195 __ TailCallRuntime(Runtime::kStringCompare); in GenerateStrings()
2198 __ bind(&miss); in GenerateStrings()
2206 __ and_(r2, r1, Operand(r0)); in GenerateReceivers()
2207 __ JumpIfSmi(r2, &miss); in GenerateReceivers()
2210 __ CompareObjectType(r0, r2, r2, FIRST_JS_RECEIVER_TYPE); in GenerateReceivers()
2211 __ b(lt, &miss); in GenerateReceivers()
2212 __ CompareObjectType(r1, r2, r2, FIRST_JS_RECEIVER_TYPE); in GenerateReceivers()
2213 __ b(lt, &miss); in GenerateReceivers()
2216 __ sub(r0, r0, Operand(r1)); in GenerateReceivers()
2217 __ Ret(); in GenerateReceivers()
2219 __ bind(&miss); in GenerateReceivers()
2227 __ and_(r2, r1, Operand(r0)); in GenerateKnownReceivers()
2228 __ JumpIfSmi(r2, &miss); in GenerateKnownReceivers()
2229 __ GetWeakValue(r4, cell); in GenerateKnownReceivers()
2230 __ ldr(r2, FieldMemOperand(r0, HeapObject::kMapOffset)); in GenerateKnownReceivers()
2231 __ ldr(r3, FieldMemOperand(r1, HeapObject::kMapOffset)); in GenerateKnownReceivers()
2232 __ cmp(r2, r4); in GenerateKnownReceivers()
2233 __ b(ne, &miss); in GenerateKnownReceivers()
2234 __ cmp(r3, r4); in GenerateKnownReceivers()
2235 __ b(ne, &miss); in GenerateKnownReceivers()
2238 __ sub(r0, r0, Operand(r1)); in GenerateKnownReceivers()
2239 __ Ret(); in GenerateKnownReceivers()
2242 __ mov(r2, Operand(Smi::FromInt(GREATER))); in GenerateKnownReceivers()
2244 __ mov(r2, Operand(Smi::FromInt(LESS))); in GenerateKnownReceivers()
2246 __ Push(r1, r0, r2); in GenerateKnownReceivers()
2247 __ TailCallRuntime(Runtime::kCompare); in GenerateKnownReceivers()
2250 __ bind(&miss); in GenerateKnownReceivers()
2259 __ Push(r1, r0); in GenerateMiss()
2260 __ Push(lr, r1, r0); in GenerateMiss()
2261 __ mov(ip, Operand(Smi::FromInt(op()))); in GenerateMiss()
2262 __ push(ip); in GenerateMiss()
2263 __ CallRuntime(Runtime::kCompareIC_Miss); in GenerateMiss()
2265 __ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag)); in GenerateMiss()
2267 __ pop(lr); in GenerateMiss()
2268 __ Pop(r1, r0); in GenerateMiss()
2271 __ Jump(r2); in GenerateMiss()
2278 __ str(lr, MemOperand(sp, 0)); in Generate()
2279 __ blx(ip); // Call the C++ function. in Generate()
2280 __ ldr(pc, MemOperand(sp, 0)); in Generate()
2288 __ Move(ip, target); in GenerateCall()
2289 __ mov(lr, Operand(code, RelocInfo::CODE_TARGET)); in GenerateCall()
2290 __ blx(lr); // Call the stub. in GenerateCall()
2312 __ ldr(index, FieldMemOperand(properties, kCapacityOffset)); in GenerateNegativeLookup()
2313 __ sub(index, index, Operand(1)); in GenerateNegativeLookup()
2314 __ and_(index, index, Operand( in GenerateNegativeLookup()
2319 __ add(index, index, Operand(index, LSL, 1)); // index *= 3. in GenerateNegativeLookup()
2325 __ add(tmp, properties, Operand(index, LSL, 1)); in GenerateNegativeLookup()
2326 __ ldr(entity_name, FieldMemOperand(tmp, kElementsStartOffset)); in GenerateNegativeLookup()
2329 __ LoadRoot(tmp, Heap::kUndefinedValueRootIndex); in GenerateNegativeLookup()
2330 __ cmp(entity_name, tmp); in GenerateNegativeLookup()
2331 __ b(eq, done); in GenerateNegativeLookup()
2334 __ LoadRoot(tmp, Heap::kTheHoleValueRootIndex); in GenerateNegativeLookup()
2337 __ cmp(entity_name, Operand(Handle<Name>(name))); in GenerateNegativeLookup()
2338 __ b(eq, miss); in GenerateNegativeLookup()
2341 __ cmp(entity_name, tmp); in GenerateNegativeLookup()
2342 __ b(eq, &good); in GenerateNegativeLookup()
2345 __ ldr(entity_name, FieldMemOperand(entity_name, HeapObject::kMapOffset)); in GenerateNegativeLookup()
2346 __ ldrb(entity_name, in GenerateNegativeLookup()
2348 __ JumpIfNotUniqueNameInstanceType(entity_name, miss); in GenerateNegativeLookup()
2349 __ bind(&good); in GenerateNegativeLookup()
2352 __ ldr(properties, in GenerateNegativeLookup()
2360 __ stm(db_w, sp, spill_mask); in GenerateNegativeLookup()
2361 __ ldr(r0, FieldMemOperand(receiver, JSObject::kPropertiesOffset)); in GenerateNegativeLookup()
2362 __ mov(r1, Operand(Handle<Name>(name))); in GenerateNegativeLookup()
2364 __ CallStub(&stub); in GenerateNegativeLookup()
2365 __ cmp(r0, Operand::Zero()); in GenerateNegativeLookup()
2366 __ ldm(ia_w, sp, spill_mask); in GenerateNegativeLookup()
2368 __ b(eq, done); in GenerateNegativeLookup()
2369 __ b(ne, miss); in GenerateNegativeLookup()
2395 __ ldr(mask, FieldMemOperand(dictionary, kCapacityOffset)); in Generate()
2396 __ SmiUntag(mask); in Generate()
2397 __ sub(mask, mask, Operand(1)); in Generate()
2399 __ ldr(hash, FieldMemOperand(key, Name::kHashFieldOffset)); in Generate()
2401 __ LoadRoot(undefined, Heap::kUndefinedValueRootIndex); in Generate()
2412 __ add(index, hash, Operand( in Generate()
2415 __ mov(index, Operand(hash)); in Generate()
2417 __ and_(index, mask, Operand(index, LSR, Name::kHashShift)); in Generate()
2421 __ add(index, index, Operand(index, LSL, 1)); // index *= 3. in Generate()
2424 __ add(index, dictionary, Operand(index, LSL, 2)); in Generate()
2425 __ ldr(entry_key, FieldMemOperand(index, kElementsStartOffset)); in Generate()
2428 __ cmp(entry_key, Operand(undefined)); in Generate()
2429 __ b(eq, &not_in_dictionary); in Generate()
2432 __ cmp(entry_key, Operand(key)); in Generate()
2433 __ b(eq, &in_dictionary); in Generate()
2437 __ ldr(entry_key, FieldMemOperand(entry_key, HeapObject::kMapOffset)); in Generate()
2438 __ ldrb(entry_key, in Generate()
2440 __ JumpIfNotUniqueNameInstanceType(entry_key, &maybe_in_dictionary); in Generate()
2444 __ bind(&maybe_in_dictionary); in Generate()
2449 __ mov(result, Operand::Zero()); in Generate()
2450 __ Ret(); in Generate()
2453 __ bind(&in_dictionary); in Generate()
2454 __ mov(result, Operand(1)); in Generate()
2455 __ Ret(); in Generate()
2457 __ bind(&not_in_dictionary); in Generate()
2458 __ mov(result, Operand::Zero()); in Generate()
2459 __ Ret(); in Generate()
2490 __ b(&skip_to_incremental_noncompacting); in Generate()
2491 __ b(&skip_to_incremental_compacting); in Generate()
2495 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(), in Generate()
2498 __ Ret(); in Generate()
2500 __ bind(&skip_to_incremental_noncompacting); in Generate()
2503 __ bind(&skip_to_incremental_compacting); in Generate()
2521 __ ldr(regs_.scratch0(), MemOperand(regs_.address(), 0)); in GenerateIncremental()
2522 __ JumpIfNotInNewSpace(regs_.scratch0(), // Value. in GenerateIncremental()
2526 __ JumpIfInNewSpace(regs_.object(), regs_.scratch0(), in GenerateIncremental()
2535 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(), in GenerateIncremental()
2538 __ bind(&dont_need_remembered_set); in GenerateIncremental()
2545 __ Ret(); in GenerateIncremental()
2552 __ PrepareCallCFunction(argument_count, regs_.scratch0()); in InformIncrementalMarker()
2557 __ Move(address, regs_.address()); in InformIncrementalMarker()
2558 __ Move(r0, regs_.object()); in InformIncrementalMarker()
2559 __ Move(r1, address); in InformIncrementalMarker()
2560 __ mov(r2, Operand(ExternalReference::isolate_address(isolate()))); in InformIncrementalMarker()
2563 __ CallCFunction( in InformIncrementalMarker()
2580 __ JumpIfBlack(regs_.object(), regs_.scratch0(), regs_.scratch1(), &on_black); in CheckNeedsToInformIncrementalMarker()
2584 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(), in CheckNeedsToInformIncrementalMarker()
2587 __ Ret(); in CheckNeedsToInformIncrementalMarker()
2590 __ bind(&on_black); in CheckNeedsToInformIncrementalMarker()
2593 __ ldr(regs_.scratch0(), MemOperand(regs_.address(), 0)); in CheckNeedsToInformIncrementalMarker()
2598 __ CheckPageFlag(regs_.scratch0(), // Contains value. in CheckNeedsToInformIncrementalMarker()
2604 __ CheckPageFlag(regs_.object(), in CheckNeedsToInformIncrementalMarker()
2610 __ bind(&ensure_not_white); in CheckNeedsToInformIncrementalMarker()
2615 __ Push(regs_.object(), regs_.address()); in CheckNeedsToInformIncrementalMarker()
2616 __ JumpIfWhite(regs_.scratch0(), // The value. in CheckNeedsToInformIncrementalMarker()
2621 __ Pop(regs_.object(), regs_.address()); in CheckNeedsToInformIncrementalMarker()
2625 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(), in CheckNeedsToInformIncrementalMarker()
2628 __ Ret(); in CheckNeedsToInformIncrementalMarker()
2631 __ bind(&need_incremental_pop_scratch); in CheckNeedsToInformIncrementalMarker()
2632 __ Pop(regs_.object(), regs_.address()); in CheckNeedsToInformIncrementalMarker()
2634 __ bind(&need_incremental); in CheckNeedsToInformIncrementalMarker()
2642 __ Call(ces.GetCode(), RelocInfo::CODE_TARGET); in Generate()
2645 __ ldr(r1, MemOperand(fp, parameter_count_offset)); in Generate()
2647 __ add(r1, r1, Operand(1)); in Generate()
2650 __ mov(r1, Operand(r1, LSL, kPointerSizeLog2)); in Generate()
2651 __ add(sp, sp, r1); in Generate()
2652 __ Ret(); in Generate()
2661 __ push(lr); in MaybeCallEntryHook()
2662 __ CallStub(&stub); in MaybeCallEntryHook()
2663 __ pop(lr); in MaybeCallEntryHook()
2687 __ stm(db_w, sp, kSavedRegs | lr.bit()); in Generate()
2690 __ sub(r0, lr, Operand(kReturnAddressDistanceFromFunctionStart)); in Generate()
2694 __ add(r1, sp, Operand(kNumSavedRegs * kPointerSize)); in Generate()
2699 __ mov(r5, sp); in Generate()
2701 __ and_(sp, sp, Operand(-frame_alignment)); in Generate()
2707 __ mov(ip, Operand(entry_hook)); in Generate()
2712 __ mov(r2, Operand(ExternalReference::isolate_address(isolate()))); in Generate()
2715 __ mov(ip, Operand(ExternalReference(&dispatcher, in Generate()
2719 __ Call(ip); in Generate()
2723 __ mov(sp, r5); in Generate()
2727 __ ldm(ia_w, sp, kSavedRegs | pc.bit()); in Generate()
2736 __ TailCallStub(&stub); in CreateArrayDispatch()
2742 __ cmp(r3, Operand(kind)); in CreateArrayDispatch()
2744 __ TailCallStub(&stub, eq); in CreateArrayDispatch()
2748 __ Abort(kUnexpectedElementsKindInArrayConstructor); in CreateArrayDispatch()
2772 __ tst(r3, Operand(1)); in CreateArrayDispatchOneArgument()
2773 __ b(ne, &normal_sequence); in CreateArrayDispatchOneArgument()
2777 __ ldr(r5, MemOperand(sp, 0)); in CreateArrayDispatchOneArgument()
2778 __ cmp(r5, Operand::Zero()); in CreateArrayDispatchOneArgument()
2779 __ b(eq, &normal_sequence); in CreateArrayDispatchOneArgument()
2788 __ TailCallStub(&stub_holey); in CreateArrayDispatchOneArgument()
2790 __ bind(&normal_sequence); in CreateArrayDispatchOneArgument()
2794 __ TailCallStub(&stub); in CreateArrayDispatchOneArgument()
2798 __ add(r3, r3, Operand(1)); in CreateArrayDispatchOneArgument()
2801 __ ldr(r5, FieldMemOperand(r2, 0)); in CreateArrayDispatchOneArgument()
2802 __ CompareRoot(r5, Heap::kAllocationSiteMapRootIndex); in CreateArrayDispatchOneArgument()
2803 __ Assert(eq, kExpectedAllocationSite); in CreateArrayDispatchOneArgument()
2810 __ ldr(r4, FieldMemOperand(r2, AllocationSite::kTransitionInfoOffset)); in CreateArrayDispatchOneArgument()
2811 __ add(r4, r4, Operand(Smi::FromInt(kFastElementsKindPackedToHoley))); in CreateArrayDispatchOneArgument()
2812 __ str(r4, FieldMemOperand(r2, AllocationSite::kTransitionInfoOffset)); in CreateArrayDispatchOneArgument()
2814 __ bind(&normal_sequence); in CreateArrayDispatchOneArgument()
2819 __ cmp(r3, Operand(kind)); in CreateArrayDispatchOneArgument()
2821 __ TailCallStub(&stub, eq); in CreateArrayDispatchOneArgument()
2825 __ Abort(kUnexpectedElementsKindInArrayConstructor); in CreateArrayDispatchOneArgument()
2869 __ tst(r0, r0); in GenerateDispatchToArrayStub()
2870 __ b(ne, &not_zero_case); in GenerateDispatchToArrayStub()
2873 __ bind(&not_zero_case); in GenerateDispatchToArrayStub()
2874 __ cmp(r0, Operand(1)); in GenerateDispatchToArrayStub()
2875 __ b(gt, &not_one_case); in GenerateDispatchToArrayStub()
2878 __ bind(&not_one_case); in GenerateDispatchToArrayStub()
2880 __ TailCallStub(&stub); in GenerateDispatchToArrayStub()
2899 __ ldr(r4, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset)); in Generate()
2901 __ tst(r4, Operand(kSmiTagMask)); in Generate()
2902 __ Assert(ne, kUnexpectedInitialMapForArrayFunction); in Generate()
2903 __ CompareObjectType(r4, r4, r5, MAP_TYPE); in Generate()
2904 __ Assert(eq, kUnexpectedInitialMapForArrayFunction); in Generate()
2907 __ AssertUndefinedOrAllocationSite(r2, r4); in Generate()
2911 __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset)); in Generate()
2914 __ cmp(r3, r1); in Generate()
2915 __ b(ne, &subclassing); in Generate()
2919 __ CompareRoot(r2, Heap::kUndefinedValueRootIndex); in Generate()
2920 __ b(eq, &no_info); in Generate()
2922 __ ldr(r3, FieldMemOperand(r2, AllocationSite::kTransitionInfoOffset)); in Generate()
2923 __ SmiUntag(r3); in Generate()
2925 __ and_(r3, r3, Operand(AllocationSite::ElementsKindBits::kMask)); in Generate()
2928 __ bind(&no_info); in Generate()
2931 __ bind(&subclassing); in Generate()
2932 __ str(r1, MemOperand(sp, r0, LSL, kPointerSizeLog2)); in Generate()
2933 __ add(r0, r0, Operand(3)); in Generate()
2934 __ Push(r3, r2); in Generate()
2935 __ JumpToExternalReference(ExternalReference(Runtime::kNewArray, isolate())); in Generate()
2941 __ cmp(r0, Operand(1)); in GenerateCase()
2944 __ TailCallStub(&stub0, lo); in GenerateCase()
2947 __ TailCallStub(&stubN, hi); in GenerateCase()
2952 __ ldr(r3, MemOperand(sp, 0)); in GenerateCase()
2953 __ cmp(r3, Operand::Zero()); in GenerateCase()
2957 __ TailCallStub(&stub1_holey, ne); in GenerateCase()
2961 __ TailCallStub(&stub1); in GenerateCase()
2978 __ ldr(r3, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset)); in Generate()
2980 __ tst(r3, Operand(kSmiTagMask)); in Generate()
2981 __ Assert(ne, kUnexpectedInitialMapForArrayFunction); in Generate()
2982 __ CompareObjectType(r3, r3, r4, MAP_TYPE); in Generate()
2983 __ Assert(eq, kUnexpectedInitialMapForArrayFunction); in Generate()
2987 __ ldr(r3, FieldMemOperand(r1, JSFunction::kPrototypeOrInitialMapOffset)); in Generate()
2990 __ ldr(r3, FieldMemOperand(r3, Map::kBitField2Offset)); in Generate()
2992 __ DecodeField<Map::ElementsKindBits>(r3); in Generate()
2996 __ cmp(r3, Operand(FAST_ELEMENTS)); in Generate()
2997 __ b(eq, &done); in Generate()
2998 __ cmp(r3, Operand(FAST_HOLEY_ELEMENTS)); in Generate()
2999 __ Assert(eq, in Generate()
3001 __ bind(&done); in Generate()
3005 __ cmp(r3, Operand(FAST_ELEMENTS)); in Generate()
3006 __ b(eq, &fast_elements_case); in Generate()
3009 __ bind(&fast_elements_case); in Generate()
3042 __ mov(r9, Operand(ExternalReference::is_profiling_address(isolate))); in CallApiFunctionAndReturn()
3043 __ ldrb(r9, MemOperand(r9, 0)); in CallApiFunctionAndReturn()
3044 __ cmp(r9, Operand(0)); in CallApiFunctionAndReturn()
3045 __ b(eq, &profiler_disabled); in CallApiFunctionAndReturn()
3048 __ mov(r3, Operand(thunk_ref)); in CallApiFunctionAndReturn()
3049 __ jmp(&end_profiler_check); in CallApiFunctionAndReturn()
3051 __ bind(&profiler_disabled); in CallApiFunctionAndReturn()
3052 __ Move(r3, function_address); in CallApiFunctionAndReturn()
3053 __ bind(&end_profiler_check); in CallApiFunctionAndReturn()
3056 __ mov(r9, Operand(next_address)); in CallApiFunctionAndReturn()
3057 __ ldr(r4, MemOperand(r9, kNextOffset)); in CallApiFunctionAndReturn()
3058 __ ldr(r5, MemOperand(r9, kLimitOffset)); in CallApiFunctionAndReturn()
3059 __ ldr(r6, MemOperand(r9, kLevelOffset)); in CallApiFunctionAndReturn()
3060 __ add(r6, r6, Operand(1)); in CallApiFunctionAndReturn()
3061 __ str(r6, MemOperand(r9, kLevelOffset)); in CallApiFunctionAndReturn()
3065 __ PushSafepointRegisters(); in CallApiFunctionAndReturn()
3066 __ PrepareCallCFunction(1, r0); in CallApiFunctionAndReturn()
3067 __ mov(r0, Operand(ExternalReference::isolate_address(isolate))); in CallApiFunctionAndReturn()
3068 __ CallCFunction(ExternalReference::log_enter_external_function(isolate), in CallApiFunctionAndReturn()
3070 __ PopSafepointRegisters(); in CallApiFunctionAndReturn()
3081 __ PushSafepointRegisters(); in CallApiFunctionAndReturn()
3082 __ PrepareCallCFunction(1, r0); in CallApiFunctionAndReturn()
3083 __ mov(r0, Operand(ExternalReference::isolate_address(isolate))); in CallApiFunctionAndReturn()
3084 __ CallCFunction(ExternalReference::log_leave_external_function(isolate), in CallApiFunctionAndReturn()
3086 __ PopSafepointRegisters(); in CallApiFunctionAndReturn()
3095 __ ldr(r0, return_value_operand); in CallApiFunctionAndReturn()
3096 __ bind(&return_value_loaded); in CallApiFunctionAndReturn()
3099 __ str(r4, MemOperand(r9, kNextOffset)); in CallApiFunctionAndReturn()
3100 if (__ emit_debug_code()) { in CallApiFunctionAndReturn()
3101 __ ldr(r1, MemOperand(r9, kLevelOffset)); in CallApiFunctionAndReturn()
3102 __ cmp(r1, r6); in CallApiFunctionAndReturn()
3103 __ Check(eq, kUnexpectedLevelAfterReturnFromApiCall); in CallApiFunctionAndReturn()
3105 __ sub(r6, r6, Operand(1)); in CallApiFunctionAndReturn()
3106 __ str(r6, MemOperand(r9, kLevelOffset)); in CallApiFunctionAndReturn()
3107 __ ldr(ip, MemOperand(r9, kLimitOffset)); in CallApiFunctionAndReturn()
3108 __ cmp(r5, ip); in CallApiFunctionAndReturn()
3109 __ b(ne, &delete_allocated_handles); in CallApiFunctionAndReturn()
3112 __ bind(&leave_exit_frame); in CallApiFunctionAndReturn()
3115 __ ldr(cp, *context_restore_operand); in CallApiFunctionAndReturn()
3119 __ ldr(r4, *stack_space_operand); in CallApiFunctionAndReturn()
3121 __ mov(r4, Operand(stack_space)); in CallApiFunctionAndReturn()
3123 __ LeaveExitFrame(false, r4, !restore_context, stack_space_operand != NULL); in CallApiFunctionAndReturn()
3126 __ LoadRoot(r4, Heap::kTheHoleValueRootIndex); in CallApiFunctionAndReturn()
3127 __ mov(ip, Operand(ExternalReference::scheduled_exception_address(isolate))); in CallApiFunctionAndReturn()
3128 __ ldr(r5, MemOperand(ip)); in CallApiFunctionAndReturn()
3129 __ cmp(r4, r5); in CallApiFunctionAndReturn()
3130 __ b(ne, &promote_scheduled_exception); in CallApiFunctionAndReturn()
3132 __ mov(pc, lr); in CallApiFunctionAndReturn()
3135 __ bind(&promote_scheduled_exception); in CallApiFunctionAndReturn()
3136 __ TailCallRuntime(Runtime::kPromoteScheduledException); in CallApiFunctionAndReturn()
3139 __ bind(&delete_allocated_handles); in CallApiFunctionAndReturn()
3140 __ str(r5, MemOperand(r9, kLimitOffset)); in CallApiFunctionAndReturn()
3141 __ mov(r4, r0); in CallApiFunctionAndReturn()
3142 __ PrepareCallCFunction(1, r5); in CallApiFunctionAndReturn()
3143 __ mov(r0, Operand(ExternalReference::isolate_address(isolate))); in CallApiFunctionAndReturn()
3144 __ CallCFunction(ExternalReference::delete_handle_scope_extensions(isolate), in CallApiFunctionAndReturn()
3146 __ mov(r0, r4); in CallApiFunctionAndReturn()
3147 __ jmp(&leave_exit_frame); in CallApiFunctionAndReturn()
3183 __ PushRoot(Heap::kUndefinedValueRootIndex); in Generate()
3186 __ push(context); in Generate()
3189 __ ldr(context, FieldMemOperand(callee, JSFunction::kContextOffset)); in Generate()
3193 __ push(callee); in Generate()
3196 __ push(call_data); in Generate()
3200 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex); in Generate()
3203 __ push(scratch); in Generate()
3205 __ push(scratch); in Generate()
3207 __ mov(scratch, Operand(ExternalReference::isolate_address(masm->isolate()))); in Generate()
3208 __ push(scratch); in Generate()
3210 __ push(holder); in Generate()
3213 __ mov(scratch, sp); in Generate()
3220 __ EnterExitFrame(false, kApiStackSpace); in Generate()
3225 __ add(r0, sp, Operand(1 * kPointerSize)); in Generate()
3227 __ str(scratch, MemOperand(r0, 0 * kPointerSize)); in Generate()
3229 __ add(ip, scratch, Operand((FCA::kArgsLength - 1 + argc()) * kPointerSize)); in Generate()
3230 __ str(ip, MemOperand(r0, 1 * kPointerSize)); in Generate()
3232 __ mov(ip, Operand(argc())); in Generate()
3233 __ str(ip, MemOperand(r0, 2 * kPointerSize)); in Generate()
3281 __ push(receiver); in Generate()
3283 __ ldr(scratch, FieldMemOperand(callback, AccessorInfo::kDataOffset)); in Generate()
3284 __ push(scratch); in Generate()
3285 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex); in Generate()
3286 __ Push(scratch, scratch); in Generate()
3287 __ mov(scratch, Operand(ExternalReference::isolate_address(isolate()))); in Generate()
3288 __ Push(scratch, holder); in Generate()
3289 __ Push(Smi::kZero); // should_throw_on_error -> false in Generate()
3290 __ ldr(scratch, FieldMemOperand(callback, AccessorInfo::kNameOffset)); in Generate()
3291 __ push(scratch); in Generate()
3296 __ mov(r0, sp); // r0 = Handle<Name> in Generate()
3297 __ add(r1, r0, Operand(1 * kPointerSize)); // r1 = v8::PCI::args_ in Generate()
3301 __ EnterExitFrame(false, kApiStackSpace); in Generate()
3305 __ str(r1, MemOperand(sp, 1 * kPointerSize)); in Generate()
3306 __ add(r1, sp, Operand(1 * kPointerSize)); // r1 = v8::PropertyCallbackInfo& in Generate()
3311 __ ldr(scratch, FieldMemOperand(callback, AccessorInfo::kJsGetterOffset)); in Generate()
3312 __ ldr(api_function_address, in Generate()
3322 #undef __