• Home
  • Raw
  • Download

Lines Matching refs:__

39 #define __ ACCESS_MASM(masm)  macro
44 __ SmiTest(rax); in Generate()
45 __ j(not_zero, &check_heap_number, Label::kNear); in Generate()
46 __ Ret(); in Generate()
48 __ bind(&check_heap_number); in Generate()
49 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset), in Generate()
51 __ j(not_equal, &call_builtin, Label::kNear); in Generate()
52 __ Ret(); in Generate()
54 __ bind(&call_builtin); in Generate()
55 __ pop(rcx); // Pop return address. in Generate()
56 __ push(rax); in Generate()
57 __ push(rcx); // Push return address. in Generate()
58 __ InvokeBuiltin(Builtins::TO_NUMBER, JUMP_FUNCTION); in Generate()
66 __ AllocateInNewSpace(JSFunction::kSize, rax, rbx, rcx, &gc, TAG_OBJECT); in Generate()
69 __ movq(rdx, Operand(rsp, 1 * kPointerSize)); in Generate()
77 __ movq(rcx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX))); in Generate()
78 __ movq(rcx, FieldOperand(rcx, GlobalObject::kGlobalContextOffset)); in Generate()
79 __ movq(rcx, Operand(rcx, Context::SlotOffset(map_index))); in Generate()
80 __ movq(FieldOperand(rax, JSObject::kMapOffset), rcx); in Generate()
84 __ LoadRoot(rbx, Heap::kEmptyFixedArrayRootIndex); in Generate()
85 __ LoadRoot(rcx, Heap::kTheHoleValueRootIndex); in Generate()
86 __ LoadRoot(rdi, Heap::kUndefinedValueRootIndex); in Generate()
87 __ movq(FieldOperand(rax, JSObject::kPropertiesOffset), rbx); in Generate()
88 __ movq(FieldOperand(rax, JSObject::kElementsOffset), rbx); in Generate()
89 __ movq(FieldOperand(rax, JSFunction::kPrototypeOrInitialMapOffset), rcx); in Generate()
90 __ movq(FieldOperand(rax, JSFunction::kSharedFunctionInfoOffset), rdx); in Generate()
91 __ movq(FieldOperand(rax, JSFunction::kContextOffset), rsi); in Generate()
92 __ movq(FieldOperand(rax, JSFunction::kLiteralsOffset), rbx); in Generate()
93 __ movq(FieldOperand(rax, JSFunction::kNextFunctionLinkOffset), rdi); in Generate()
97 __ movq(rdx, FieldOperand(rdx, SharedFunctionInfo::kCodeOffset)); in Generate()
98 __ lea(rdx, FieldOperand(rdx, Code::kHeaderSize)); in Generate()
99 __ movq(FieldOperand(rax, JSFunction::kCodeEntryOffset), rdx); in Generate()
103 __ ret(1 * kPointerSize); in Generate()
106 __ bind(&gc); in Generate()
107 __ pop(rcx); // Temporarily remove return address. in Generate()
108 __ pop(rdx); in Generate()
109 __ push(rsi); in Generate()
110 __ push(rdx); in Generate()
111 __ PushRoot(Heap::kFalseValueRootIndex); in Generate()
112 __ push(rcx); // Restore return address. in Generate()
113 __ TailCallRuntime(Runtime::kNewClosure, 3, 1); in Generate()
121 __ AllocateInNewSpace((length * kPointerSize) + FixedArray::kHeaderSize, in Generate()
125 __ movq(rcx, Operand(rsp, 1 * kPointerSize)); in Generate()
128 __ LoadRoot(kScratchRegister, Heap::kFunctionContextMapRootIndex); in Generate()
129 __ movq(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister); in Generate()
130 __ Move(FieldOperand(rax, FixedArray::kLengthOffset), Smi::FromInt(length)); in Generate()
133 __ Set(rbx, 0); // Set to NULL. in Generate()
134 __ movq(Operand(rax, Context::SlotOffset(Context::CLOSURE_INDEX)), rcx); in Generate()
135 __ movq(Operand(rax, Context::SlotOffset(Context::PREVIOUS_INDEX)), rsi); in Generate()
136 __ movq(Operand(rax, Context::SlotOffset(Context::EXTENSION_INDEX)), rbx); in Generate()
139 __ movq(rbx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX))); in Generate()
140 __ movq(Operand(rax, Context::SlotOffset(Context::GLOBAL_INDEX)), rbx); in Generate()
143 __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex); in Generate()
145 __ movq(Operand(rax, Context::SlotOffset(i)), rbx); in Generate()
149 __ movq(rsi, rax); in Generate()
150 __ ret(1 * kPointerSize); in Generate()
153 __ bind(&gc); in Generate()
154 __ TailCallRuntime(Runtime::kNewFunctionContext, 1, 1); in Generate()
167 __ AllocateInNewSpace(FixedArray::SizeFor(length), in Generate()
171 __ movq(rcx, Operand(rsp, 1 * kPointerSize)); in Generate()
174 __ movq(rbx, Operand(rsp, 2 * kPointerSize)); in Generate()
177 __ LoadRoot(kScratchRegister, Heap::kBlockContextMapRootIndex); in Generate()
178 __ movq(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister); in Generate()
179 __ Move(FieldOperand(rax, FixedArray::kLengthOffset), Smi::FromInt(length)); in Generate()
186 __ JumpIfNotSmi(rcx, &after_sentinel, Label::kNear); in Generate()
189 __ cmpq(rcx, Immediate(0)); in Generate()
190 __ Assert(equal, message); in Generate()
192 __ movq(rcx, GlobalObjectOperand()); in Generate()
193 __ movq(rcx, FieldOperand(rcx, GlobalObject::kGlobalContextOffset)); in Generate()
194 __ movq(rcx, ContextOperand(rcx, Context::CLOSURE_INDEX)); in Generate()
195 __ bind(&after_sentinel); in Generate()
198 __ movq(ContextOperand(rax, Context::CLOSURE_INDEX), rcx); in Generate()
199 __ movq(ContextOperand(rax, Context::PREVIOUS_INDEX), rsi); in Generate()
200 __ movq(ContextOperand(rax, Context::EXTENSION_INDEX), rbx); in Generate()
203 __ movq(rbx, ContextOperand(rsi, Context::GLOBAL_INDEX)); in Generate()
204 __ movq(ContextOperand(rax, Context::GLOBAL_INDEX), rbx); in Generate()
207 __ LoadRoot(rbx, Heap::kTheHoleValueRootIndex); in Generate()
209 __ movq(ContextOperand(rax, i + Context::MIN_CONTEXT_SLOTS), rbx); in Generate()
213 __ movq(rsi, rax); in Generate()
214 __ ret(2 * kPointerSize); in Generate()
217 __ bind(&gc); in Generate()
218 __ TailCallRuntime(Runtime::kPushBlockContext, 2, 1); in Generate()
243 __ AllocateInNewSpace(size, rax, rbx, rdx, fail, TAG_OBJECT); in GenerateFastCloneShallowArrayCommon()
248 __ movq(rbx, FieldOperand(rcx, i)); in GenerateFastCloneShallowArrayCommon()
249 __ movq(FieldOperand(rax, i), rbx); in GenerateFastCloneShallowArrayCommon()
256 __ movq(rcx, FieldOperand(rcx, JSArray::kElementsOffset)); in GenerateFastCloneShallowArrayCommon()
257 __ lea(rdx, Operand(rax, JSArray::kSize)); in GenerateFastCloneShallowArrayCommon()
258 __ movq(FieldOperand(rax, JSArray::kElementsOffset), rdx); in GenerateFastCloneShallowArrayCommon()
263 __ movq(rbx, FieldOperand(rcx, i)); in GenerateFastCloneShallowArrayCommon()
264 __ movq(FieldOperand(rdx, i), rbx); in GenerateFastCloneShallowArrayCommon()
270 __ movq(rbx, FieldOperand(rcx, i)); in GenerateFastCloneShallowArrayCommon()
271 __ movq(FieldOperand(rdx, i), rbx); in GenerateFastCloneShallowArrayCommon()
274 __ movsd(xmm0, FieldOperand(rcx, i)); in GenerateFastCloneShallowArrayCommon()
275 __ movsd(FieldOperand(rdx, i), xmm0); in GenerateFastCloneShallowArrayCommon()
292 __ movq(rcx, Operand(rsp, 3 * kPointerSize)); in Generate()
293 __ movq(rax, Operand(rsp, 2 * kPointerSize)); in Generate()
295 __ movq(rcx, in Generate()
297 __ CompareRoot(rcx, Heap::kUndefinedValueRootIndex); in Generate()
299 __ j(equal, &slow_case); in Generate()
306 __ movq(rbx, FieldOperand(rcx, JSArray::kElementsOffset)); in Generate()
307 __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset), in Generate()
309 __ j(not_equal, &check_fast_elements); in Generate()
312 __ ret(3 * kPointerSize); in Generate()
314 __ bind(&check_fast_elements); in Generate()
315 __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset), in Generate()
317 __ j(not_equal, &double_elements); in Generate()
320 __ ret(3 * kPointerSize); in Generate()
322 __ bind(&double_elements); in Generate()
341 __ push(rcx); in Generate()
342 __ movq(rcx, FieldOperand(rcx, JSArray::kElementsOffset)); in Generate()
343 __ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset), in Generate()
345 __ Assert(equal, message); in Generate()
346 __ pop(rcx); in Generate()
350 __ ret(3 * kPointerSize); in Generate()
352 __ bind(&slow_case); in Generate()
353 __ TailCallRuntime(Runtime::kCreateArrayLiteralShallow, 3, 1); in Generate()
368 __ movq(rcx, Operand(rsp, 4 * kPointerSize)); in Generate()
369 __ movq(rax, Operand(rsp, 3 * kPointerSize)); in Generate()
371 __ movq(rcx, in Generate()
373 __ CompareRoot(rcx, Heap::kUndefinedValueRootIndex); in Generate()
374 __ j(equal, &slow_case); in Generate()
379 __ movq(rax, FieldOperand(rcx, HeapObject::kMapOffset)); in Generate()
380 __ movzxbq(rax, FieldOperand(rax, Map::kInstanceSizeOffset)); in Generate()
381 __ cmpq(rax, Immediate(size >> kPointerSizeLog2)); in Generate()
382 __ j(not_equal, &slow_case); in Generate()
386 __ AllocateInNewSpace(size, rax, rbx, rdx, &slow_case, TAG_OBJECT); in Generate()
388 __ movq(rbx, FieldOperand(rcx, i)); in Generate()
389 __ movq(FieldOperand(rax, i), rbx); in Generate()
393 __ ret(4 * kPointerSize); in Generate()
395 __ bind(&slow_case); in Generate()
396 __ TailCallRuntime(Runtime::kCreateObjectLiteralShallow, 4, 1); in Generate()
410 __ movq(argument, Operand(rsp, 1 * kPointerSize)); in Generate()
426 __ JumpIfNotSmi(argument, &not_smi, Label::kNear); in Generate()
429 __ movq(tos_, argument); in Generate()
431 __ ret(1 * kPointerSize); in Generate()
432 __ bind(&not_smi); in Generate()
435 __ JumpIfSmi(argument, &patch, Label::kNear); in Generate()
439 __ movq(map, FieldOperand(argument, HeapObject::kMapOffset)); in Generate()
442 __ testb(FieldOperand(map, Map::kBitFieldOffset), in Generate()
446 __ j(zero, &not_undetectable, Label::kNear); in Generate()
447 __ Set(tos_, 0); in Generate()
448 __ ret(1 * kPointerSize); in Generate()
449 __ bind(&not_undetectable); in Generate()
456 __ CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE); in Generate()
457 __ j(below, &not_js_object, Label::kNear); in Generate()
460 __ Set(tos_, 1); in Generate()
462 __ ret(1 * kPointerSize); in Generate()
463 __ bind(&not_js_object); in Generate()
469 __ CmpInstanceType(map, FIRST_NONSTRING_TYPE); in Generate()
470 __ j(above_equal, &not_string, Label::kNear); in Generate()
471 __ movq(tos_, FieldOperand(argument, String::kLengthOffset)); in Generate()
472 __ ret(1 * kPointerSize); // the string length is OK as the return value in Generate()
473 __ bind(&not_string); in Generate()
479 __ CompareRoot(map, Heap::kHeapNumberMapRootIndex); in Generate()
480 __ j(not_equal, &not_heap_number, Label::kNear); in Generate()
481 __ xorps(xmm0, xmm0); in Generate()
482 __ ucomisd(xmm0, FieldOperand(argument, HeapNumber::kValueOffset)); in Generate()
483 __ j(zero, &false_result, Label::kNear); in Generate()
486 __ Set(tos_, 1); in Generate()
488 __ ret(1 * kPointerSize); in Generate()
489 __ bind(&false_result); in Generate()
490 __ Set(tos_, 0); in Generate()
491 __ ret(1 * kPointerSize); in Generate()
492 __ bind(&not_heap_number); in Generate()
495 __ bind(&patch); in Generate()
501 __ PushCallerSaved(save_doubles_); in Generate()
503 __ PrepareCallCFunction(argument_count); in Generate()
505 __ LoadAddress(rcx, ExternalReference::isolate_address()); in Generate()
507 __ LoadAddress(rdi, ExternalReference::isolate_address()); in Generate()
511 __ CallCFunction( in Generate()
514 __ PopCallerSaved(save_doubles_); in Generate()
515 __ ret(0); in Generate()
527 __ CompareRoot(argument, value); in CheckOddball()
528 __ j(not_equal, &different_value, Label::kNear); in CheckOddball()
531 __ Set(tos_, 0); in CheckOddball()
535 __ Set(tos_, 1); in CheckOddball()
537 __ ret(1 * kPointerSize); in CheckOddball()
538 __ bind(&different_value); in CheckOddball()
544 __ pop(rcx); // Get return address, operand is now on top of stack. in GenerateTypeTransition()
545 __ Push(Smi::FromInt(tos_.code())); in GenerateTypeTransition()
546 __ Push(Smi::FromInt(types_.ToByte())); in GenerateTypeTransition()
547 __ push(rcx); // Push return address. in GenerateTypeTransition()
550 __ TailCallExternalReference( in GenerateTypeTransition()
612 __ movq(double_value, FieldOperand(source, HeapNumber::kValueOffset)); in IntegerConvert()
614 __ xorl(result, result); in IntegerConvert()
615 __ movq(xmm0, double_value); // Save copy in xmm0 in case we need it there. in IntegerConvert()
618 __ lea(double_exponent, Operand(double_value, double_value, times_1, 0)); in IntegerConvert()
619 __ shr(double_exponent, Immediate(64 - HeapNumber::kExponentBits)); in IntegerConvert()
620 __ subl(double_exponent, Immediate(HeapNumber::kExponentBias)); in IntegerConvert()
622 __ cmpl(double_exponent, Immediate(63)); in IntegerConvert()
623 __ j(above_equal, &exponent_63_plus, Label::kNear); in IntegerConvert()
625 __ cvttsd2siq(result, xmm0); in IntegerConvert()
626 __ jmp(&done, Label::kNear); in IntegerConvert()
628 __ bind(&exponent_63_plus); in IntegerConvert()
630 __ cmpl(double_exponent, Immediate(83)); in IntegerConvert()
633 __ j(above, &done, Label::kNear); in IntegerConvert()
640 __ addq(double_value, double_value); // Move sign bit to carry. in IntegerConvert()
641 __ sbbl(result, result); // And convert carry to -1 in result register. in IntegerConvert()
643 __ addl(double_value, result); in IntegerConvert()
648 __ xorl(double_value, result); in IntegerConvert()
652 __ leal(rcx, Operand(double_exponent, -HeapNumber::kMantissaBits - 1)); in IntegerConvert()
653 __ shll_cl(double_value); in IntegerConvert()
654 __ movl(result, double_value); in IntegerConvert()
657 __ xorl(result, double_value); in IntegerConvert()
658 __ leal(rcx, Operand(double_exponent, -HeapNumber::kMantissaBits - 1)); in IntegerConvert()
659 __ shll_cl(result); in IntegerConvert()
662 __ bind(&done); in IntegerConvert()
685 __ pop(rcx); // Save return address. in GenerateTypeTransition()
687 __ push(rax); // the operand in GenerateTypeTransition()
688 __ Push(Smi::FromInt(op_)); in GenerateTypeTransition()
689 __ Push(Smi::FromInt(mode_)); in GenerateTypeTransition()
690 __ Push(Smi::FromInt(operand_type_)); in GenerateTypeTransition()
692 __ push(rcx); // Push return address. in GenerateTypeTransition()
696 __ TailCallExternalReference( in GenerateTypeTransition()
719 __ bind(&slow); in GenerateSmiStubSub()
727 __ bind(&non_smi); in GenerateSmiStubBitNot()
738 __ JumpIfNotSmi(rax, non_smi, non_smi_near); in GenerateSmiCodeSub()
739 __ SmiNeg(rax, rax, &done, Label::kNear); in GenerateSmiCodeSub()
740 __ jmp(slow, slow_near); in GenerateSmiCodeSub()
741 __ bind(&done); in GenerateSmiCodeSub()
742 __ ret(0); in GenerateSmiCodeSub()
749 __ JumpIfNotSmi(rax, non_smi, non_smi_near); in GenerateSmiCodeBitNot()
750 __ SmiNot(rax, rax); in GenerateSmiCodeBitNot()
751 __ ret(0); in GenerateSmiCodeBitNot()
773 __ bind(&non_smi); in GenerateHeapNumberStubSub()
775 __ bind(&slow); in GenerateHeapNumberStubSub()
777 __ bind(&call_builtin); in GenerateHeapNumberStubSub()
786 __ bind(&non_smi); in GenerateHeapNumberStubBitNot()
788 __ bind(&slow); in GenerateHeapNumberStubBitNot()
796 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset), in GenerateHeapNumberCodeSub()
798 __ j(not_equal, slow); in GenerateHeapNumberCodeSub()
802 __ Set(kScratchRegister, 0x01); in GenerateHeapNumberCodeSub()
803 __ shl(kScratchRegister, Immediate(63)); in GenerateHeapNumberCodeSub()
804 __ xor_(FieldOperand(rax, HeapNumber::kValueOffset), kScratchRegister); in GenerateHeapNumberCodeSub()
809 __ AllocateHeapNumber(rcx, rbx, &slow_allocate_heapnumber); in GenerateHeapNumberCodeSub()
810 __ jmp(&heapnumber_allocated); in GenerateHeapNumberCodeSub()
812 __ bind(&slow_allocate_heapnumber); in GenerateHeapNumberCodeSub()
815 __ push(rax); in GenerateHeapNumberCodeSub()
816 __ CallRuntime(Runtime::kNumberAlloc, 0); in GenerateHeapNumberCodeSub()
817 __ movq(rcx, rax); in GenerateHeapNumberCodeSub()
818 __ pop(rax); in GenerateHeapNumberCodeSub()
820 __ bind(&heapnumber_allocated); in GenerateHeapNumberCodeSub()
824 __ movq(rdx, FieldOperand(rax, HeapNumber::kValueOffset)); in GenerateHeapNumberCodeSub()
825 __ Set(kScratchRegister, 0x01); in GenerateHeapNumberCodeSub()
826 __ shl(kScratchRegister, Immediate(63)); in GenerateHeapNumberCodeSub()
827 __ xor_(rdx, kScratchRegister); // Flip sign. in GenerateHeapNumberCodeSub()
828 __ movq(FieldOperand(rcx, HeapNumber::kValueOffset), rdx); in GenerateHeapNumberCodeSub()
829 __ movq(rax, rcx); in GenerateHeapNumberCodeSub()
831 __ ret(0); in GenerateHeapNumberCodeSub()
838 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset), in GenerateHeapNumberCodeBitNot()
840 __ j(not_equal, slow); in GenerateHeapNumberCodeBitNot()
846 __ notl(rax); in GenerateHeapNumberCodeBitNot()
847 __ Integer32ToSmi(rax, rax); in GenerateHeapNumberCodeBitNot()
848 __ ret(0); in GenerateHeapNumberCodeBitNot()
870 __ bind(&non_smi); in GenerateGenericStubSub()
872 __ bind(&slow); in GenerateGenericStubSub()
880 __ bind(&non_smi); in GenerateGenericStubBitNot()
882 __ bind(&slow); in GenerateGenericStubBitNot()
889 __ pop(rcx); // pop return address in GenerateGenericCodeFallback()
890 __ push(rax); in GenerateGenericCodeFallback()
891 __ push(rcx); // push return address in GenerateGenericCodeFallback()
894 __ InvokeBuiltin(Builtins::UNARY_MINUS, JUMP_FUNCTION); in GenerateGenericCodeFallback()
897 __ InvokeBuiltin(Builtins::BIT_NOT, JUMP_FUNCTION); in GenerateGenericCodeFallback()
920 __ pop(rcx); // Save return address. in GenerateTypeTransition()
921 __ push(rdx); in GenerateTypeTransition()
922 __ push(rax); in GenerateTypeTransition()
926 __ Push(Smi::FromInt(MinorKey())); in GenerateTypeTransition()
927 __ Push(Smi::FromInt(op_)); in GenerateTypeTransition()
928 __ Push(Smi::FromInt(operands_type_)); in GenerateTypeTransition()
930 __ push(rcx); // Push return address. in GenerateTypeTransition()
934 __ TailCallExternalReference( in GenerateTypeTransition()
1020 __ JumpIfNotBothSmi(left, right, &not_smis); in GenerateSmiCode()
1024 __ bind(&smi_values); in GenerateSmiCode()
1030 __ SmiAdd(right, right, left, &use_fp_on_smis); // ADD is commutative. in GenerateSmiCode()
1034 __ SmiSub(left, left, right, &use_fp_on_smis); in GenerateSmiCode()
1035 __ movq(rax, left); in GenerateSmiCode()
1040 __ SmiMul(right, right, left, &use_fp_on_smis); // MUL is commutative. in GenerateSmiCode()
1047 __ movq(rbx, rax); in GenerateSmiCode()
1048 __ movq(rcx, rdx); in GenerateSmiCode()
1049 __ SmiDiv(rax, left, right, &use_fp_on_smis); in GenerateSmiCode()
1056 __ movq(rbx, rax); in GenerateSmiCode()
1057 __ movq(rcx, rdx); in GenerateSmiCode()
1058 __ SmiMod(rax, left, right, &use_fp_on_smis); in GenerateSmiCode()
1063 __ SmiOrIfSmis(right, right, left, &not_smis); // BIT_OR is commutative. in GenerateSmiCode()
1068 __ SmiXor(right, right, left); // BIT_XOR is commutative. in GenerateSmiCode()
1073 __ SmiAnd(right, right, left); // BIT_AND is commutative. in GenerateSmiCode()
1077 __ SmiShiftLeft(left, left, right); in GenerateSmiCode()
1078 __ movq(rax, left); in GenerateSmiCode()
1082 __ SmiShiftArithmeticRight(left, left, right); in GenerateSmiCode()
1083 __ movq(rax, left); in GenerateSmiCode()
1087 __ SmiShiftLogicalRight(left, left, right, &use_fp_on_smis); in GenerateSmiCode()
1088 __ movq(rax, left); in GenerateSmiCode()
1096 __ ret(0); in GenerateSmiCode()
1102 __ bind(&use_fp_on_smis); in GenerateSmiCode()
1105 __ movq(rdx, rcx); in GenerateSmiCode()
1106 __ movq(rax, rbx); in GenerateSmiCode()
1110 __ AllocateHeapNumber(rcx, rbx, slow); in GenerateSmiCode()
1113 __ SmiToInteger32(left, left); in GenerateSmiCode()
1114 __ cvtqsi2sd(xmm0, left); in GenerateSmiCode()
1118 case Token::ADD: __ addsd(xmm0, xmm1); break; in GenerateSmiCode()
1119 case Token::SUB: __ subsd(xmm0, xmm1); break; in GenerateSmiCode()
1120 case Token::MUL: __ mulsd(xmm0, xmm1); break; in GenerateSmiCode()
1121 case Token::DIV: __ divsd(xmm0, xmm1); break; in GenerateSmiCode()
1125 __ movsd(FieldOperand(rcx, HeapNumber::kValueOffset), xmm0); in GenerateSmiCode()
1126 __ movq(rax, rcx); in GenerateSmiCode()
1127 __ ret(0); in GenerateSmiCode()
1129 __ jmp(&fail); in GenerateSmiCode()
1138 __ bind(&not_smis); in GenerateSmiCode()
1142 __ jmp(&smi_values); in GenerateSmiCode()
1143 __ bind(&fail); in GenerateSmiCode()
1158 case Token::ADD: __ addsd(xmm0, xmm1); break; in GenerateFloatingPointCode()
1159 case Token::SUB: __ subsd(xmm0, xmm1); break; in GenerateFloatingPointCode()
1160 case Token::MUL: __ mulsd(xmm0, xmm1); break; in GenerateFloatingPointCode()
1161 case Token::DIV: __ divsd(xmm0, xmm1); break; in GenerateFloatingPointCode()
1165 __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), xmm0); in GenerateFloatingPointCode()
1166 __ ret(0); in GenerateFloatingPointCode()
1171 __ jmp(allocation_failure); in GenerateFloatingPointCode()
1182 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex); in GenerateFloatingPointCode()
1186 case Token::BIT_OR: __ orl(rax, rcx); break; in GenerateFloatingPointCode()
1187 case Token::BIT_AND: __ andl(rax, rcx); break; in GenerateFloatingPointCode()
1188 case Token::BIT_XOR: __ xorl(rax, rcx); break; in GenerateFloatingPointCode()
1189 case Token::SAR: __ sarl_cl(rax); break; in GenerateFloatingPointCode()
1190 case Token::SHL: __ shll_cl(rax); break; in GenerateFloatingPointCode()
1192 __ shrl_cl(rax); in GenerateFloatingPointCode()
1195 __ testl(rax, rax); in GenerateFloatingPointCode()
1196 __ j(negative, &non_smi_shr_result); in GenerateFloatingPointCode()
1203 __ Integer32ToSmi(rax, rax); in GenerateFloatingPointCode()
1204 __ Ret(); in GenerateFloatingPointCode()
1210 __ bind(&non_smi_shr_result); in GenerateFloatingPointCode()
1212 __ movl(rbx, rax); // rbx holds result value (uint32 value as int64). in GenerateFloatingPointCode()
1216 __ AllocateInNewSpace(HeapNumber::kSize, in GenerateFloatingPointCode()
1224 __ AbortIfNotRootValue(heap_number_map, in GenerateFloatingPointCode()
1228 __ movq(FieldOperand(rax, HeapObject::kMapOffset), in GenerateFloatingPointCode()
1230 __ cvtqsi2sd(xmm0, rbx); in GenerateFloatingPointCode()
1231 __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), xmm0); in GenerateFloatingPointCode()
1232 __ Ret(); in GenerateFloatingPointCode()
1234 __ bind(&allocation_failed); in GenerateFloatingPointCode()
1237 __ Integer32ToSmi(rax, rcx); in GenerateFloatingPointCode()
1238 __ Integer32ToSmi(rdx, rbx); in GenerateFloatingPointCode()
1239 __ jmp(allocation_failure); in GenerateFloatingPointCode()
1247 __ Abort("Unexpected fall-through in " in GenerateFloatingPointCode()
1262 __ JumpIfSmi(left, &left_not_string, Label::kNear); in GenerateStringAddCode()
1263 __ CmpObjectType(left, FIRST_NONSTRING_TYPE, rcx); in GenerateStringAddCode()
1264 __ j(above_equal, &left_not_string, Label::kNear); in GenerateStringAddCode()
1267 __ TailCallStub(&string_add_left_stub); in GenerateStringAddCode()
1270 __ bind(&left_not_string); in GenerateStringAddCode()
1271 __ JumpIfSmi(right, &call_runtime, Label::kNear); in GenerateStringAddCode()
1272 __ CmpObjectType(right, FIRST_NONSTRING_TYPE, rcx); in GenerateStringAddCode()
1273 __ j(above_equal, &call_runtime, Label::kNear); in GenerateStringAddCode()
1277 __ TailCallStub(&string_add_right_stub); in GenerateStringAddCode()
1280 __ bind(&call_runtime); in GenerateStringAddCode()
1288 __ InvokeBuiltin(Builtins::ADD, JUMP_FUNCTION); in GenerateCallRuntimeCode()
1291 __ InvokeBuiltin(Builtins::SUB, JUMP_FUNCTION); in GenerateCallRuntimeCode()
1294 __ InvokeBuiltin(Builtins::MUL, JUMP_FUNCTION); in GenerateCallRuntimeCode()
1297 __ InvokeBuiltin(Builtins::DIV, JUMP_FUNCTION); in GenerateCallRuntimeCode()
1300 __ InvokeBuiltin(Builtins::MOD, JUMP_FUNCTION); in GenerateCallRuntimeCode()
1303 __ InvokeBuiltin(Builtins::BIT_OR, JUMP_FUNCTION); in GenerateCallRuntimeCode()
1306 __ InvokeBuiltin(Builtins::BIT_AND, JUMP_FUNCTION); in GenerateCallRuntimeCode()
1309 __ InvokeBuiltin(Builtins::BIT_XOR, JUMP_FUNCTION); in GenerateCallRuntimeCode()
1312 __ InvokeBuiltin(Builtins::SAR, JUMP_FUNCTION); in GenerateCallRuntimeCode()
1315 __ InvokeBuiltin(Builtins::SHL, JUMP_FUNCTION); in GenerateCallRuntimeCode()
1318 __ InvokeBuiltin(Builtins::SHR, JUMP_FUNCTION); in GenerateCallRuntimeCode()
1343 __ bind(&call_runtime); in GenerateSmiStub()
1371 __ JumpIfSmi(left, &call_runtime); in GenerateBothStringStub()
1372 __ CmpObjectType(left, FIRST_NONSTRING_TYPE, rcx); in GenerateBothStringStub()
1373 __ j(above_equal, &call_runtime); in GenerateBothStringStub()
1376 __ JumpIfSmi(right, &call_runtime); in GenerateBothStringStub()
1377 __ CmpObjectType(right, FIRST_NONSTRING_TYPE, rcx); in GenerateBothStringStub()
1378 __ j(above_equal, &call_runtime); in GenerateBothStringStub()
1382 __ TailCallStub(&string_add_stub); in GenerateBothStringStub()
1384 __ bind(&call_runtime); in GenerateBothStringStub()
1400 __ CompareRoot(rdx, Heap::kUndefinedValueRootIndex); in GenerateOddballStub()
1401 __ j(not_equal, &check, Label::kNear); in GenerateOddballStub()
1403 __ xor_(rdx, rdx); in GenerateOddballStub()
1405 __ LoadRoot(rdx, Heap::kNanValueRootIndex); in GenerateOddballStub()
1407 __ jmp(&done, Label::kNear); in GenerateOddballStub()
1408 __ bind(&check); in GenerateOddballStub()
1409 __ CompareRoot(rax, Heap::kUndefinedValueRootIndex); in GenerateOddballStub()
1410 __ j(not_equal, &done, Label::kNear); in GenerateOddballStub()
1412 __ xor_(rax, rax); in GenerateOddballStub()
1414 __ LoadRoot(rax, Heap::kNanValueRootIndex); in GenerateOddballStub()
1416 __ bind(&done); in GenerateOddballStub()
1426 __ bind(&not_number); in GenerateHeapNumberStub()
1429 __ bind(&gc_required); in GenerateHeapNumberStub()
1441 __ bind(&call_string_add_or_runtime); in GenerateGeneric()
1446 __ bind(&call_runtime); in GenerateGeneric()
1459 __ JumpIfNotSmi(rdx, &skip_allocation); in GenerateHeapResultAllocation()
1462 __ AllocateHeapNumber(rbx, rcx, alloc_failure); in GenerateHeapResultAllocation()
1465 __ movq(rdx, rbx); in GenerateHeapResultAllocation()
1466 __ bind(&skip_allocation); in GenerateHeapResultAllocation()
1468 __ movq(rax, rdx); in GenerateHeapResultAllocation()
1474 __ JumpIfNotSmi(rax, &skip_allocation); in GenerateHeapResultAllocation()
1479 __ AllocateHeapNumber(rbx, rcx, alloc_failure); in GenerateHeapResultAllocation()
1482 __ movq(rax, rbx); in GenerateHeapResultAllocation()
1483 __ bind(&skip_allocation); in GenerateHeapResultAllocation()
1491 __ pop(rcx); in GenerateRegisterArgsPush()
1492 __ push(rdx); in GenerateRegisterArgsPush()
1493 __ push(rax); in GenerateRegisterArgsPush()
1494 __ push(rcx); in GenerateRegisterArgsPush()
1519 __ movq(rax, Operand(rsp, kPointerSize)); in Generate()
1520 __ JumpIfNotSmi(rax, &input_not_smi, Label::kNear); in Generate()
1523 __ SmiToInteger32(rax, rax); in Generate()
1524 __ subq(rsp, Immediate(kDoubleSize)); in Generate()
1525 __ cvtlsi2sd(xmm1, rax); in Generate()
1526 __ movsd(Operand(rsp, 0), xmm1); in Generate()
1527 __ movq(rbx, xmm1); in Generate()
1528 __ movq(rdx, xmm1); in Generate()
1529 __ fld_d(Operand(rsp, 0)); in Generate()
1530 __ addq(rsp, Immediate(kDoubleSize)); in Generate()
1531 __ jmp(&loaded, Label::kNear); in Generate()
1533 __ bind(&input_not_smi); in Generate()
1535 __ LoadRoot(rbx, Heap::kHeapNumberMapRootIndex); in Generate()
1536 __ cmpq(rbx, FieldOperand(rax, HeapObject::kMapOffset)); in Generate()
1537 __ j(not_equal, &runtime_call); in Generate()
1540 __ fld_d(FieldOperand(rax, HeapNumber::kValueOffset)); in Generate()
1541 __ movq(rbx, FieldOperand(rax, HeapNumber::kValueOffset)); in Generate()
1542 __ movq(rdx, rbx); in Generate()
1544 __ bind(&loaded); in Generate()
1546 __ movq(rbx, xmm1); in Generate()
1547 __ movq(rdx, xmm1); in Generate()
1559 __ sar(rdx, Immediate(32)); in Generate()
1560 __ xorl(rdx, rbx); in Generate()
1561 __ movl(rcx, rdx); in Generate()
1562 __ movl(rax, rdx); in Generate()
1563 __ movl(rdi, rdx); in Generate()
1564 __ sarl(rdx, Immediate(8)); in Generate()
1565 __ sarl(rcx, Immediate(16)); in Generate()
1566 __ sarl(rax, Immediate(24)); in Generate()
1567 __ xorl(rcx, rdx); in Generate()
1568 __ xorl(rax, rdi); in Generate()
1569 __ xorl(rcx, rax); in Generate()
1571 __ andl(rcx, Immediate(TranscendentalCache::SubCache::kCacheSize - 1)); in Generate()
1578 __ movq(rax, cache_array); in Generate()
1581 __ movq(rax, Operand(rax, cache_array_index)); in Generate()
1584 __ testq(rax, rax); in Generate()
1585 __ j(zero, &runtime_call_clear_stack); // Only clears stack if TAGGED. in Generate()
1603 __ addl(rcx, rcx); in Generate()
1604 __ lea(rcx, Operand(rax, rcx, times_8, 0)); in Generate()
1607 __ cmpq(rbx, Operand(rcx, 0)); in Generate()
1608 __ j(not_equal, &cache_miss, Label::kNear); in Generate()
1611 __ IncrementCounter(counters->transcendental_cache_hit(), 1); in Generate()
1612 __ movq(rax, Operand(rcx, 2 * kIntSize)); in Generate()
1614 __ fstp(0); // Clear FPU stack. in Generate()
1615 __ ret(kPointerSize); in Generate()
1617 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset)); in Generate()
1618 __ Ret(); in Generate()
1621 __ bind(&cache_miss); in Generate()
1622 __ IncrementCounter(counters->transcendental_cache_miss(), 1); in Generate()
1625 __ AllocateHeapNumber(rax, rdi, &runtime_call_clear_stack); in Generate()
1627 __ AllocateHeapNumber(rax, rdi, &skip_cache); in Generate()
1628 __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), xmm1); in Generate()
1629 __ fld_d(FieldOperand(rax, HeapNumber::kValueOffset)); in Generate()
1632 __ movq(Operand(rcx, 0), rbx); in Generate()
1633 __ movq(Operand(rcx, 2 * kIntSize), rax); in Generate()
1634 __ fstp_d(FieldOperand(rax, HeapNumber::kValueOffset)); in Generate()
1636 __ ret(kPointerSize); in Generate()
1638 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset)); in Generate()
1639 __ Ret(); in Generate()
1642 __ bind(&skip_cache); in Generate()
1643 __ subq(rsp, Immediate(kDoubleSize)); in Generate()
1644 __ movsd(Operand(rsp, 0), xmm1); in Generate()
1645 __ fld_d(Operand(rsp, 0)); in Generate()
1647 __ fstp_d(Operand(rsp, 0)); in Generate()
1648 __ movsd(xmm1, Operand(rsp, 0)); in Generate()
1649 __ addq(rsp, Immediate(kDoubleSize)); in Generate()
1655 __ Push(Smi::FromInt(2 * kDoubleSize)); in Generate()
1656 __ CallRuntimeSaveDoubles(Runtime::kAllocateInNewSpace); in Generate()
1658 __ Ret(); in Generate()
1663 __ bind(&runtime_call_clear_stack); in Generate()
1664 __ fstp(0); in Generate()
1665 __ bind(&runtime_call); in Generate()
1666 __ TailCallExternalReference( in Generate()
1669 __ bind(&runtime_call_clear_stack); in Generate()
1670 __ bind(&runtime_call); in Generate()
1671 __ AllocateHeapNumber(rax, rdi, &skip_cache); in Generate()
1672 __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), xmm1); in Generate()
1675 __ push(rax); in Generate()
1676 __ CallRuntime(RuntimeFunction(), 1); in Generate()
1678 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset)); in Generate()
1679 __ Ret(); in Generate()
1715 __ movq(rdi, rbx); in GenerateOperation()
1717 __ shr(rdi, Immediate(HeapNumber::kMantissaBits)); in GenerateOperation()
1719 __ andl(rdi, Immediate((1 << HeapNumber::kExponentBits) - 1)); in GenerateOperation()
1721 __ cmpl(rdi, Immediate(supported_exponent_limit)); in GenerateOperation()
1722 __ j(below, &in_range); in GenerateOperation()
1724 __ cmpl(rdi, Immediate(0x7ff)); in GenerateOperation()
1726 __ j(not_equal, &non_nan_result, Label::kNear); in GenerateOperation()
1728 __ fstp(0); in GenerateOperation()
1730 __ subq(rsp, Immediate(kPointerSize)); in GenerateOperation()
1731 __ movl(Operand(rsp, 4), Immediate(0x7ff80000)); in GenerateOperation()
1732 __ movl(Operand(rsp, 0), Immediate(0x00000000)); in GenerateOperation()
1733 __ fld_d(Operand(rsp, 0)); in GenerateOperation()
1734 __ addq(rsp, Immediate(kPointerSize)); in GenerateOperation()
1735 __ jmp(&done); in GenerateOperation()
1737 __ bind(&non_nan_result); in GenerateOperation()
1740 __ movq(rdi, rax); // Save rax before using fnstsw_ax. in GenerateOperation()
1741 __ fldpi(); in GenerateOperation()
1742 __ fadd(0); in GenerateOperation()
1743 __ fld(1); in GenerateOperation()
1747 __ fwait(); in GenerateOperation()
1748 __ fnstsw_ax(); in GenerateOperation()
1750 __ testl(rax, Immediate(5)); // #IO and #ZD flags of FPU status word. in GenerateOperation()
1751 __ j(zero, &no_exceptions); in GenerateOperation()
1752 __ fnclex(); in GenerateOperation()
1753 __ bind(&no_exceptions); in GenerateOperation()
1759 __ bind(&partial_remainder_loop); in GenerateOperation()
1760 __ fprem1(); in GenerateOperation()
1761 __ fwait(); in GenerateOperation()
1762 __ fnstsw_ax(); in GenerateOperation()
1763 __ testl(rax, Immediate(0x400)); // Check C2 bit of FPU status word. in GenerateOperation()
1766 __ j(not_zero, &partial_remainder_loop); in GenerateOperation()
1769 __ fstp(2); in GenerateOperation()
1771 __ fstp(0); in GenerateOperation()
1773 __ movq(rax, rdi); // Restore rax, pointer to the new HeapNumber. in GenerateOperation()
1774 __ bind(&in_range); in GenerateOperation()
1777 __ fsin(); in GenerateOperation()
1780 __ fcos(); in GenerateOperation()
1785 __ fptan(); in GenerateOperation()
1786 __ fstp(0); // Pop FP register stack. in GenerateOperation()
1791 __ bind(&done); in GenerateOperation()
1794 __ fldln2(); in GenerateOperation()
1795 __ fxch(); in GenerateOperation()
1796 __ fyl2x(); in GenerateOperation()
1810 __ JumpIfNotSmi(rdx, &rdx_is_object); in LoadNumbersAsIntegers()
1811 __ SmiToInteger32(rdx, rdx); in LoadNumbersAsIntegers()
1812 __ JumpIfSmi(rax, &rax_is_smi); in LoadNumbersAsIntegers()
1814 __ bind(&rax_is_object); in LoadNumbersAsIntegers()
1816 __ jmp(&done); in LoadNumbersAsIntegers()
1818 __ bind(&rdx_is_object); in LoadNumbersAsIntegers()
1820 __ JumpIfNotSmi(rax, &rax_is_object); in LoadNumbersAsIntegers()
1821 __ bind(&rax_is_smi); in LoadNumbersAsIntegers()
1822 __ SmiToInteger32(rcx, rax); in LoadNumbersAsIntegers()
1824 __ bind(&done); in LoadNumbersAsIntegers()
1825 __ movl(rax, rdx); in LoadNumbersAsIntegers()
1840 __ JumpIfNotSmi(rdx, &arg1_is_object); in LoadAsIntegers()
1841 __ SmiToInteger32(r8, rdx); in LoadAsIntegers()
1842 __ jmp(&load_arg2); in LoadAsIntegers()
1845 __ bind(&check_undefined_arg1); in LoadAsIntegers()
1846 __ CompareRoot(rdx, Heap::kUndefinedValueRootIndex); in LoadAsIntegers()
1847 __ j(not_equal, conversion_failure); in LoadAsIntegers()
1848 __ Set(r8, 0); in LoadAsIntegers()
1849 __ jmp(&load_arg2); in LoadAsIntegers()
1851 __ bind(&arg1_is_object); in LoadAsIntegers()
1852 __ cmpq(FieldOperand(rdx, HeapObject::kMapOffset), heap_number_map); in LoadAsIntegers()
1853 __ j(not_equal, &check_undefined_arg1); in LoadAsIntegers()
1858 __ bind(&load_arg2); in LoadAsIntegers()
1860 __ JumpIfNotSmi(rax, &arg2_is_object); in LoadAsIntegers()
1861 __ SmiToInteger32(rcx, rax); in LoadAsIntegers()
1862 __ jmp(&done); in LoadAsIntegers()
1865 __ bind(&check_undefined_arg2); in LoadAsIntegers()
1866 __ CompareRoot(rax, Heap::kUndefinedValueRootIndex); in LoadAsIntegers()
1867 __ j(not_equal, conversion_failure); in LoadAsIntegers()
1868 __ Set(rcx, 0); in LoadAsIntegers()
1869 __ jmp(&done); in LoadAsIntegers()
1871 __ bind(&arg2_is_object); in LoadAsIntegers()
1872 __ cmpq(FieldOperand(rax, HeapObject::kMapOffset), heap_number_map); in LoadAsIntegers()
1873 __ j(not_equal, &check_undefined_arg2); in LoadAsIntegers()
1876 __ bind(&done); in LoadAsIntegers()
1877 __ movl(rax, r8); in LoadAsIntegers()
1882 __ SmiToInteger32(kScratchRegister, rdx); in LoadSSE2SmiOperands()
1883 __ cvtlsi2sd(xmm0, kScratchRegister); in LoadSSE2SmiOperands()
1884 __ SmiToInteger32(kScratchRegister, rax); in LoadSSE2SmiOperands()
1885 __ cvtlsi2sd(xmm1, kScratchRegister); in LoadSSE2SmiOperands()
1892 __ JumpIfSmi(rdx, &load_smi_rdx); in LoadSSE2NumberOperands()
1893 __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset)); in LoadSSE2NumberOperands()
1895 __ JumpIfSmi(rax, &load_smi_rax); in LoadSSE2NumberOperands()
1896 __ bind(&load_nonsmi_rax); in LoadSSE2NumberOperands()
1897 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset)); in LoadSSE2NumberOperands()
1898 __ jmp(&done); in LoadSSE2NumberOperands()
1900 __ bind(&load_smi_rdx); in LoadSSE2NumberOperands()
1901 __ SmiToInteger32(kScratchRegister, rdx); in LoadSSE2NumberOperands()
1902 __ cvtlsi2sd(xmm0, kScratchRegister); in LoadSSE2NumberOperands()
1903 __ JumpIfNotSmi(rax, &load_nonsmi_rax); in LoadSSE2NumberOperands()
1905 __ bind(&load_smi_rax); in LoadSSE2NumberOperands()
1906 __ SmiToInteger32(kScratchRegister, rax); in LoadSSE2NumberOperands()
1907 __ cvtlsi2sd(xmm1, kScratchRegister); in LoadSSE2NumberOperands()
1909 __ bind(&done); in LoadSSE2NumberOperands()
1917 __ LoadRoot(rcx, Heap::kHeapNumberMapRootIndex); in LoadSSE2UnknownOperands()
1918 __ JumpIfSmi(rdx, &load_smi_rdx); in LoadSSE2UnknownOperands()
1919 __ cmpq(FieldOperand(rdx, HeapObject::kMapOffset), rcx); in LoadSSE2UnknownOperands()
1920 __ j(not_equal, not_numbers); // Argument in rdx is not a number. in LoadSSE2UnknownOperands()
1921 __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset)); in LoadSSE2UnknownOperands()
1923 __ JumpIfSmi(rax, &load_smi_rax); in LoadSSE2UnknownOperands()
1925 __ bind(&load_nonsmi_rax); in LoadSSE2UnknownOperands()
1926 __ cmpq(FieldOperand(rax, HeapObject::kMapOffset), rcx); in LoadSSE2UnknownOperands()
1927 __ j(not_equal, not_numbers); in LoadSSE2UnknownOperands()
1928 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset)); in LoadSSE2UnknownOperands()
1929 __ jmp(&done); in LoadSSE2UnknownOperands()
1931 __ bind(&load_smi_rdx); in LoadSSE2UnknownOperands()
1932 __ SmiToInteger32(kScratchRegister, rdx); in LoadSSE2UnknownOperands()
1933 __ cvtlsi2sd(xmm0, kScratchRegister); in LoadSSE2UnknownOperands()
1934 __ JumpIfNotSmi(rax, &load_nonsmi_rax); in LoadSSE2UnknownOperands()
1936 __ bind(&load_smi_rax); in LoadSSE2UnknownOperands()
1937 __ SmiToInteger32(kScratchRegister, rax); in LoadSSE2UnknownOperands()
1938 __ cvtlsi2sd(xmm1, kScratchRegister); in LoadSSE2UnknownOperands()
1939 __ bind(&done); in LoadSSE2UnknownOperands()
1955 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex); in NumbersToSmis()
1958 __ JumpIfSmi(first, &first_smi, Label::kNear); in NumbersToSmis()
1959 __ cmpq(FieldOperand(first, HeapObject::kMapOffset), heap_number_map); in NumbersToSmis()
1960 __ j(not_equal, on_not_smis); in NumbersToSmis()
1962 __ movsd(xmm0, FieldOperand(first, HeapNumber::kValueOffset)); in NumbersToSmis()
1963 __ movq(scratch2, xmm0); in NumbersToSmis()
1964 __ cvttsd2siq(smi_result, xmm0); in NumbersToSmis()
1967 __ cvtlsi2sd(xmm1, smi_result); in NumbersToSmis()
1968 __ movq(kScratchRegister, xmm1); in NumbersToSmis()
1969 __ cmpq(scratch2, kScratchRegister); in NumbersToSmis()
1970 __ j(not_equal, on_not_smis); in NumbersToSmis()
1971 __ Integer32ToSmi(first, smi_result); in NumbersToSmis()
1973 __ JumpIfSmi(second, (on_success != NULL) ? on_success : &done); in NumbersToSmis()
1974 __ bind(&first_smi); in NumbersToSmis()
1977 __ AbortIfSmi(second); in NumbersToSmis()
1979 __ cmpq(FieldOperand(second, HeapObject::kMapOffset), heap_number_map); in NumbersToSmis()
1980 __ j(not_equal, on_not_smis); in NumbersToSmis()
1982 __ movsd(xmm0, FieldOperand(second, HeapNumber::kValueOffset)); in NumbersToSmis()
1983 __ movq(scratch2, xmm0); in NumbersToSmis()
1984 __ cvttsd2siq(smi_result, xmm0); in NumbersToSmis()
1985 __ cvtlsi2sd(xmm1, smi_result); in NumbersToSmis()
1986 __ movq(kScratchRegister, xmm1); in NumbersToSmis()
1987 __ cmpq(scratch2, kScratchRegister); in NumbersToSmis()
1988 __ j(not_equal, on_not_smis); in NumbersToSmis()
1989 __ Integer32ToSmi(second, smi_result); in NumbersToSmis()
1991 __ jmp(on_success); in NumbersToSmis()
1993 __ bind(&done); in NumbersToSmis()
2015 __ movq(scratch, Immediate(1)); in Generate()
2016 __ cvtlsi2sd(double_result, scratch); in Generate()
2023 __ movq(base, Operand(rsp, 2 * kPointerSize)); in Generate()
2024 __ movq(exponent, Operand(rsp, 1 * kPointerSize)); in Generate()
2025 __ JumpIfSmi(base, &base_is_smi, Label::kNear); in Generate()
2026 __ CompareRoot(FieldOperand(base, HeapObject::kMapOffset), in Generate()
2028 __ j(not_equal, &call_runtime); in Generate()
2030 __ movsd(double_base, FieldOperand(base, HeapNumber::kValueOffset)); in Generate()
2031 __ jmp(&unpack_exponent, Label::kNear); in Generate()
2033 __ bind(&base_is_smi); in Generate()
2034 __ SmiToInteger32(base, base); in Generate()
2035 __ cvtlsi2sd(double_base, base); in Generate()
2036 __ bind(&unpack_exponent); in Generate()
2038 __ JumpIfNotSmi(exponent, &exponent_not_smi, Label::kNear); in Generate()
2039 __ SmiToInteger32(exponent, exponent); in Generate()
2040 __ jmp(&int_exponent); in Generate()
2042 __ bind(&exponent_not_smi); in Generate()
2043 __ CompareRoot(FieldOperand(exponent, HeapObject::kMapOffset), in Generate()
2045 __ j(not_equal, &call_runtime); in Generate()
2046 __ movsd(double_exponent, FieldOperand(exponent, HeapNumber::kValueOffset)); in Generate()
2048 __ JumpIfNotSmi(exponent, &exponent_not_smi, Label::kNear); in Generate()
2049 __ SmiToInteger32(exponent, exponent); in Generate()
2050 __ jmp(&int_exponent); in Generate()
2052 __ bind(&exponent_not_smi); in Generate()
2053 __ movsd(double_exponent, FieldOperand(exponent, HeapNumber::kValueOffset)); in Generate()
2059 __ cvttsd2si(exponent, double_exponent); in Generate()
2061 __ cmpl(exponent, Immediate(0x80000000u)); in Generate()
2062 __ j(equal, &call_runtime); in Generate()
2063 __ cvtlsi2sd(double_scratch, exponent); in Generate()
2065 __ ucomisd(double_exponent, double_scratch); in Generate()
2066 __ j(equal, &int_exponent); in Generate()
2075 __ movq(scratch, V8_UINT64_C(0x3FE0000000000000), RelocInfo::NONE); in Generate()
2076 __ movq(double_scratch, scratch); in Generate()
2078 __ ucomisd(double_scratch, double_exponent); in Generate()
2079 __ j(not_equal, &not_plus_half, Label::kNear); in Generate()
2085 __ movq(scratch, V8_UINT64_C(0xFFF0000000000000), RelocInfo::NONE); in Generate()
2086 __ movq(double_scratch, scratch); in Generate()
2087 __ ucomisd(double_scratch, double_base); in Generate()
2090 __ j(not_equal, &continue_sqrt, Label::kNear); in Generate()
2091 __ j(carry, &continue_sqrt, Label::kNear); in Generate()
2094 __ xorps(double_result, double_result); in Generate()
2095 __ subsd(double_result, double_scratch); in Generate()
2096 __ jmp(&done); in Generate()
2098 __ bind(&continue_sqrt); in Generate()
2100 __ xorps(double_scratch, double_scratch); in Generate()
2101 __ addsd(double_scratch, double_base); // Convert -0 to 0. in Generate()
2102 __ sqrtsd(double_result, double_scratch); in Generate()
2103 __ jmp(&done); in Generate()
2106 __ bind(&not_plus_half); in Generate()
2108 __ subsd(double_scratch, double_result); in Generate()
2110 __ ucomisd(double_scratch, double_exponent); in Generate()
2111 __ j(not_equal, &fast_power, Label::kNear); in Generate()
2117 __ movq(scratch, V8_UINT64_C(0xFFF0000000000000), RelocInfo::NONE); in Generate()
2118 __ movq(double_scratch, scratch); in Generate()
2119 __ ucomisd(double_scratch, double_base); in Generate()
2122 __ j(not_equal, &continue_rsqrt, Label::kNear); in Generate()
2123 __ j(carry, &continue_rsqrt, Label::kNear); in Generate()
2126 __ xorps(double_result, double_result); in Generate()
2127 __ jmp(&done); in Generate()
2129 __ bind(&continue_rsqrt); in Generate()
2131 __ xorps(double_exponent, double_exponent); in Generate()
2132 __ addsd(double_exponent, double_base); // Convert -0 to +0. in Generate()
2133 __ sqrtsd(double_exponent, double_exponent); in Generate()
2134 __ divsd(double_result, double_exponent); in Generate()
2135 __ jmp(&done); in Generate()
2140 __ bind(&fast_power); in Generate()
2141 __ fnclex(); // Clear flags to catch exceptions later. in Generate()
2143 __ subq(rsp, Immediate(kDoubleSize)); in Generate()
2144 __ movsd(Operand(rsp, 0), double_exponent); in Generate()
2145 __ fld_d(Operand(rsp, 0)); // E in Generate()
2146 __ movsd(Operand(rsp, 0), double_base); in Generate()
2147 __ fld_d(Operand(rsp, 0)); // B, E in Generate()
2152 __ fyl2x(); // X in Generate()
2153 __ fld(0); // X, X in Generate()
2154 __ frndint(); // rnd(X), X in Generate()
2155 __ fsub(1); // rnd(X), X-rnd(X) in Generate()
2156 __ fxch(1); // X - rnd(X), rnd(X) in Generate()
2158 __ f2xm1(); // 2^(X-rnd(X)) - 1, rnd(X) in Generate()
2159 __ fld1(); // 1, 2^(X-rnd(X)) - 1, rnd(X) in Generate()
2160 __ faddp(1); // 1, 2^(X-rnd(X)), rnd(X) in Generate()
2162 __ fscale(); // 2^X, rnd(X) in Generate()
2163 __ fstp(1); in Generate()
2165 __ fnstsw_ax(); in Generate()
2166 __ testb(rax, Immediate(0x5F)); // Check for all but precision exception. in Generate()
2167 __ j(not_zero, &fast_power_failed, Label::kNear); in Generate()
2168 __ fstp_d(Operand(rsp, 0)); in Generate()
2169 __ movsd(double_result, Operand(rsp, 0)); in Generate()
2170 __ addq(rsp, Immediate(kDoubleSize)); in Generate()
2171 __ jmp(&done); in Generate()
2173 __ bind(&fast_power_failed); in Generate()
2174 __ fninit(); in Generate()
2175 __ addq(rsp, Immediate(kDoubleSize)); in Generate()
2176 __ jmp(&call_runtime); in Generate()
2180 __ bind(&int_exponent); in Generate()
2183 __ movq(scratch, exponent); // Back up exponent. in Generate()
2184 __ movsd(double_scratch, double_base); // Back up base. in Generate()
2185 __ movsd(double_scratch2, double_result); // Load double_exponent with 1. in Generate()
2189 __ testl(scratch, scratch); in Generate()
2190 __ j(positive, &no_neg, Label::kNear); in Generate()
2191 __ negl(scratch); in Generate()
2192 __ bind(&no_neg); in Generate()
2194 __ bind(&while_true); in Generate()
2195 __ shrl(scratch, Immediate(1)); in Generate()
2196 __ j(not_carry, &no_multiply, Label::kNear); in Generate()
2197 __ mulsd(double_result, double_scratch); in Generate()
2198 __ bind(&no_multiply); in Generate()
2200 __ mulsd(double_scratch, double_scratch); in Generate()
2201 __ j(not_zero, &while_true); in Generate()
2204 __ testl(exponent, exponent); in Generate()
2205 __ j(greater, &done); in Generate()
2206 __ divsd(double_scratch2, double_result); in Generate()
2207 __ movsd(double_result, double_scratch2); in Generate()
2210 __ xorps(double_scratch2, double_scratch2); in Generate()
2211 __ ucomisd(double_scratch2, double_result); in Generate()
2215 __ j(not_equal, &done); in Generate()
2216 __ cvtlsi2sd(double_exponent, exponent); in Generate()
2222 __ bind(&call_runtime); in Generate()
2223 __ TailCallRuntime(Runtime::kMath_pow_cfunction, 2, 1); in Generate()
2227 __ bind(&done); in Generate()
2228 __ AllocateHeapNumber(rax, rcx, &call_runtime); in Generate()
2229 __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), double_result); in Generate()
2230 __ IncrementCounter(counters->math_pow(), 1); in Generate()
2231 __ ret(2 * kPointerSize); in Generate()
2233 __ bind(&call_runtime); in Generate()
2235 __ movsd(xmm0, double_base); in Generate()
2239 __ PrepareCallCFunction(2); in Generate()
2240 __ CallCFunction( in Generate()
2244 __ movsd(double_result, xmm0); in Generate()
2246 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); in Generate()
2248 __ bind(&done); in Generate()
2249 __ IncrementCounter(counters->math_pow(), 1); in Generate()
2250 __ ret(0); in Generate()
2265 __ JumpIfNotSmi(rdx, &slow); in GenerateReadElement()
2272 __ movq(rbx, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); in GenerateReadElement()
2273 __ Cmp(Operand(rbx, StandardFrameConstants::kContextOffset), in GenerateReadElement()
2275 __ j(equal, &adaptor); in GenerateReadElement()
2280 __ cmpq(rdx, rax); in GenerateReadElement()
2281 __ j(above_equal, &slow); in GenerateReadElement()
2285 __ lea(rbx, Operand(rbp, index.reg, index.scale, 0)); in GenerateReadElement()
2287 __ movq(rax, Operand(rbx, index.reg, index.scale, kDisplacement)); in GenerateReadElement()
2288 __ Ret(); in GenerateReadElement()
2293 __ bind(&adaptor); in GenerateReadElement()
2294 __ movq(rcx, Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset)); in GenerateReadElement()
2295 __ cmpq(rdx, rcx); in GenerateReadElement()
2296 __ j(above_equal, &slow); in GenerateReadElement()
2300 __ lea(rbx, Operand(rbx, index.reg, index.scale, 0)); in GenerateReadElement()
2302 __ movq(rax, Operand(rbx, index.reg, index.scale, kDisplacement)); in GenerateReadElement()
2303 __ Ret(); in GenerateReadElement()
2307 __ bind(&slow); in GenerateReadElement()
2308 __ pop(rbx); // Return address. in GenerateReadElement()
2309 __ push(rdx); in GenerateReadElement()
2310 __ push(rbx); in GenerateReadElement()
2311 __ TailCallRuntime(Runtime::kGetArgumentsProperty, 1, 1); in GenerateReadElement()
2327 __ SmiToInteger64(rbx, Operand(rsp, 1 * kPointerSize)); in GenerateNewNonStrictFast()
2333 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); in GenerateNewNonStrictFast()
2334 __ movq(rcx, Operand(rdx, StandardFrameConstants::kContextOffset)); in GenerateNewNonStrictFast()
2335 __ Cmp(rcx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); in GenerateNewNonStrictFast()
2336 __ j(equal, &adaptor_frame); in GenerateNewNonStrictFast()
2339 __ movq(rcx, rbx); in GenerateNewNonStrictFast()
2340 __ jmp(&try_allocate, Label::kNear); in GenerateNewNonStrictFast()
2343 __ bind(&adaptor_frame); in GenerateNewNonStrictFast()
2344 __ SmiToInteger64(rcx, in GenerateNewNonStrictFast()
2347 __ lea(rdx, Operand(rdx, rcx, times_pointer_size, in GenerateNewNonStrictFast()
2349 __ movq(Operand(rsp, 2 * kPointerSize), rdx); in GenerateNewNonStrictFast()
2354 __ cmpq(rbx, rcx); in GenerateNewNonStrictFast()
2355 __ j(less_equal, &try_allocate, Label::kNear); in GenerateNewNonStrictFast()
2356 __ movq(rbx, rcx); in GenerateNewNonStrictFast()
2358 __ bind(&try_allocate); in GenerateNewNonStrictFast()
2365 __ xor_(r8, r8); in GenerateNewNonStrictFast()
2366 __ testq(rbx, rbx); in GenerateNewNonStrictFast()
2367 __ j(zero, &no_parameter_map, Label::kNear); in GenerateNewNonStrictFast()
2368 __ lea(r8, Operand(rbx, times_pointer_size, kParameterMapHeaderSize)); in GenerateNewNonStrictFast()
2369 __ bind(&no_parameter_map); in GenerateNewNonStrictFast()
2372 __ lea(r8, Operand(r8, rcx, times_pointer_size, FixedArray::kHeaderSize)); in GenerateNewNonStrictFast()
2375 __ addq(r8, Immediate(Heap::kArgumentsObjectSize)); in GenerateNewNonStrictFast()
2378 __ AllocateInNewSpace(r8, rax, rdx, rdi, &runtime, TAG_OBJECT); in GenerateNewNonStrictFast()
2384 __ movq(rdi, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX))); in GenerateNewNonStrictFast()
2385 __ movq(rdi, FieldOperand(rdi, GlobalObject::kGlobalContextOffset)); in GenerateNewNonStrictFast()
2386 __ testq(rbx, rbx); in GenerateNewNonStrictFast()
2387 __ j(not_zero, &has_mapped_parameters, Label::kNear); in GenerateNewNonStrictFast()
2390 __ movq(rdi, Operand(rdi, Context::SlotOffset(kIndex))); in GenerateNewNonStrictFast()
2391 __ jmp(&copy, Label::kNear); in GenerateNewNonStrictFast()
2394 __ bind(&has_mapped_parameters); in GenerateNewNonStrictFast()
2395 __ movq(rdi, Operand(rdi, Context::SlotOffset(kAliasedIndex))); in GenerateNewNonStrictFast()
2396 __ bind(&copy); in GenerateNewNonStrictFast()
2404 __ movq(rdx, FieldOperand(rdi, i)); in GenerateNewNonStrictFast()
2405 __ movq(FieldOperand(rax, i), rdx); in GenerateNewNonStrictFast()
2410 __ movq(rdx, Operand(rsp, 3 * kPointerSize)); in GenerateNewNonStrictFast()
2411 __ movq(FieldOperand(rax, JSObject::kHeaderSize + in GenerateNewNonStrictFast()
2418 __ Integer32ToSmi(rcx, rcx); in GenerateNewNonStrictFast()
2419 __ movq(FieldOperand(rax, JSObject::kHeaderSize + in GenerateNewNonStrictFast()
2426 __ lea(rdi, Operand(rax, Heap::kArgumentsObjectSize)); in GenerateNewNonStrictFast()
2427 __ movq(FieldOperand(rax, JSObject::kElementsOffset), rdi); in GenerateNewNonStrictFast()
2436 __ testq(rbx, rbx); in GenerateNewNonStrictFast()
2437 __ j(zero, &skip_parameter_map); in GenerateNewNonStrictFast()
2439 __ LoadRoot(kScratchRegister, Heap::kNonStrictArgumentsElementsMapRootIndex); in GenerateNewNonStrictFast()
2441 __ movq(FieldOperand(rdi, FixedArray::kMapOffset), kScratchRegister); in GenerateNewNonStrictFast()
2442 __ Integer64PlusConstantToSmi(r9, rbx, 2); in GenerateNewNonStrictFast()
2443 __ movq(FieldOperand(rdi, FixedArray::kLengthOffset), r9); in GenerateNewNonStrictFast()
2444 __ movq(FieldOperand(rdi, FixedArray::kHeaderSize + 0 * kPointerSize), rsi); in GenerateNewNonStrictFast()
2445 __ lea(r9, Operand(rdi, rbx, times_pointer_size, kParameterMapHeaderSize)); in GenerateNewNonStrictFast()
2446 __ movq(FieldOperand(rdi, FixedArray::kHeaderSize + 1 * kPointerSize), r9); in GenerateNewNonStrictFast()
2459 __ Integer32ToSmi(r9, rbx); in GenerateNewNonStrictFast()
2460 __ Move(r8, Smi::FromInt(Context::MIN_CONTEXT_SLOTS)); in GenerateNewNonStrictFast()
2461 __ addq(r8, Operand(rsp, 1 * kPointerSize)); in GenerateNewNonStrictFast()
2462 __ subq(r8, r9); in GenerateNewNonStrictFast()
2463 __ Move(r11, factory->the_hole_value()); in GenerateNewNonStrictFast()
2464 __ movq(rdx, rdi); in GenerateNewNonStrictFast()
2465 __ lea(rdi, Operand(rdi, rbx, times_pointer_size, kParameterMapHeaderSize)); in GenerateNewNonStrictFast()
2471 __ jmp(&parameters_test, Label::kNear); in GenerateNewNonStrictFast()
2473 __ bind(&parameters_loop); in GenerateNewNonStrictFast()
2474 __ SmiSubConstant(r9, r9, Smi::FromInt(1)); in GenerateNewNonStrictFast()
2475 __ SmiToInteger64(kScratchRegister, r9); in GenerateNewNonStrictFast()
2476 __ movq(FieldOperand(rdx, kScratchRegister, in GenerateNewNonStrictFast()
2480 __ movq(FieldOperand(rdi, kScratchRegister, in GenerateNewNonStrictFast()
2484 __ SmiAddConstant(r8, r8, Smi::FromInt(1)); in GenerateNewNonStrictFast()
2485 __ bind(&parameters_test); in GenerateNewNonStrictFast()
2486 __ SmiTest(r9); in GenerateNewNonStrictFast()
2487 __ j(not_zero, &parameters_loop, Label::kNear); in GenerateNewNonStrictFast()
2489 __ bind(&skip_parameter_map); in GenerateNewNonStrictFast()
2494 __ Move(FieldOperand(rdi, FixedArray::kMapOffset), in GenerateNewNonStrictFast()
2496 __ movq(FieldOperand(rdi, FixedArray::kLengthOffset), rcx); in GenerateNewNonStrictFast()
2499 __ movq(r8, rbx); in GenerateNewNonStrictFast()
2500 __ movq(rdx, Operand(rsp, 2 * kPointerSize)); in GenerateNewNonStrictFast()
2502 __ SmiToInteger64(rcx, rcx); in GenerateNewNonStrictFast()
2503 __ lea(kScratchRegister, Operand(r8, times_pointer_size, 0)); in GenerateNewNonStrictFast()
2504 __ subq(rdx, kScratchRegister); in GenerateNewNonStrictFast()
2505 __ jmp(&arguments_test, Label::kNear); in GenerateNewNonStrictFast()
2507 __ bind(&arguments_loop); in GenerateNewNonStrictFast()
2508 __ subq(rdx, Immediate(kPointerSize)); in GenerateNewNonStrictFast()
2509 __ movq(r9, Operand(rdx, 0)); in GenerateNewNonStrictFast()
2510 __ movq(FieldOperand(rdi, r8, in GenerateNewNonStrictFast()
2514 __ addq(r8, Immediate(1)); in GenerateNewNonStrictFast()
2516 __ bind(&arguments_test); in GenerateNewNonStrictFast()
2517 __ cmpq(r8, rcx); in GenerateNewNonStrictFast()
2518 __ j(less, &arguments_loop, Label::kNear); in GenerateNewNonStrictFast()
2521 __ ret(3 * kPointerSize); in GenerateNewNonStrictFast()
2525 __ bind(&runtime); in GenerateNewNonStrictFast()
2526 __ Integer32ToSmi(rcx, rcx); in GenerateNewNonStrictFast()
2527 __ movq(Operand(rsp, 1 * kPointerSize), rcx); // Patch argument count. in GenerateNewNonStrictFast()
2528 __ TailCallRuntime(Runtime::kNewStrictArgumentsFast, 3, 1); in GenerateNewNonStrictFast()
2540 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); in GenerateNewNonStrictSlow()
2541 __ movq(rcx, Operand(rdx, StandardFrameConstants::kContextOffset)); in GenerateNewNonStrictSlow()
2542 __ Cmp(rcx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); in GenerateNewNonStrictSlow()
2543 __ j(not_equal, &runtime); in GenerateNewNonStrictSlow()
2546 __ movq(rcx, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset)); in GenerateNewNonStrictSlow()
2547 __ movq(Operand(rsp, 1 * kPointerSize), rcx); in GenerateNewNonStrictSlow()
2548 __ SmiToInteger64(rcx, rcx); in GenerateNewNonStrictSlow()
2549 __ lea(rdx, Operand(rdx, rcx, times_pointer_size, in GenerateNewNonStrictSlow()
2551 __ movq(Operand(rsp, 2 * kPointerSize), rdx); in GenerateNewNonStrictSlow()
2553 __ bind(&runtime); in GenerateNewNonStrictSlow()
2554 __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1); in GenerateNewNonStrictSlow()
2566 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); in GenerateNewStrict()
2567 __ movq(rcx, Operand(rdx, StandardFrameConstants::kContextOffset)); in GenerateNewStrict()
2568 __ Cmp(rcx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); in GenerateNewStrict()
2569 __ j(equal, &adaptor_frame); in GenerateNewStrict()
2572 __ movq(rcx, Operand(rsp, 1 * kPointerSize)); in GenerateNewStrict()
2573 __ SmiToInteger64(rcx, rcx); in GenerateNewStrict()
2574 __ jmp(&try_allocate); in GenerateNewStrict()
2577 __ bind(&adaptor_frame); in GenerateNewStrict()
2578 __ movq(rcx, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset)); in GenerateNewStrict()
2579 __ movq(Operand(rsp, 1 * kPointerSize), rcx); in GenerateNewStrict()
2580 __ SmiToInteger64(rcx, rcx); in GenerateNewStrict()
2581 __ lea(rdx, Operand(rdx, rcx, times_pointer_size, in GenerateNewStrict()
2583 __ movq(Operand(rsp, 2 * kPointerSize), rdx); in GenerateNewStrict()
2588 __ bind(&try_allocate); in GenerateNewStrict()
2589 __ testq(rcx, rcx); in GenerateNewStrict()
2590 __ j(zero, &add_arguments_object, Label::kNear); in GenerateNewStrict()
2591 __ lea(rcx, Operand(rcx, times_pointer_size, FixedArray::kHeaderSize)); in GenerateNewStrict()
2592 __ bind(&add_arguments_object); in GenerateNewStrict()
2593 __ addq(rcx, Immediate(Heap::kArgumentsObjectSizeStrict)); in GenerateNewStrict()
2596 __ AllocateInNewSpace(rcx, rax, rdx, rbx, &runtime, TAG_OBJECT); in GenerateNewStrict()
2599 __ movq(rdi, Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX))); in GenerateNewStrict()
2600 __ movq(rdi, FieldOperand(rdi, GlobalObject::kGlobalContextOffset)); in GenerateNewStrict()
2603 __ movq(rdi, Operand(rdi, offset)); in GenerateNewStrict()
2607 __ movq(rbx, FieldOperand(rdi, i)); in GenerateNewStrict()
2608 __ movq(FieldOperand(rax, i), rbx); in GenerateNewStrict()
2613 __ movq(rcx, Operand(rsp, 1 * kPointerSize)); in GenerateNewStrict()
2614 __ movq(FieldOperand(rax, JSObject::kHeaderSize + in GenerateNewStrict()
2620 __ testq(rcx, rcx); in GenerateNewStrict()
2621 __ j(zero, &done); in GenerateNewStrict()
2624 __ movq(rdx, Operand(rsp, 2 * kPointerSize)); in GenerateNewStrict()
2628 __ lea(rdi, Operand(rax, Heap::kArgumentsObjectSizeStrict)); in GenerateNewStrict()
2629 __ movq(FieldOperand(rax, JSObject::kElementsOffset), rdi); in GenerateNewStrict()
2630 __ LoadRoot(kScratchRegister, Heap::kFixedArrayMapRootIndex); in GenerateNewStrict()
2631 __ movq(FieldOperand(rdi, FixedArray::kMapOffset), kScratchRegister); in GenerateNewStrict()
2634 __ movq(FieldOperand(rdi, FixedArray::kLengthOffset), rcx); in GenerateNewStrict()
2636 __ SmiToInteger64(rcx, rcx); in GenerateNewStrict()
2640 __ bind(&loop); in GenerateNewStrict()
2641 __ movq(rbx, Operand(rdx, -1 * kPointerSize)); // Skip receiver. in GenerateNewStrict()
2642 __ movq(FieldOperand(rdi, FixedArray::kHeaderSize), rbx); in GenerateNewStrict()
2643 __ addq(rdi, Immediate(kPointerSize)); in GenerateNewStrict()
2644 __ subq(rdx, Immediate(kPointerSize)); in GenerateNewStrict()
2645 __ decq(rcx); in GenerateNewStrict()
2646 __ j(not_zero, &loop); in GenerateNewStrict()
2649 __ bind(&done); in GenerateNewStrict()
2650 __ ret(3 * kPointerSize); in GenerateNewStrict()
2653 __ bind(&runtime); in GenerateNewStrict()
2654 __ TailCallRuntime(Runtime::kNewStrictArgumentsFast, 3, 1); in GenerateNewStrict()
2663 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1); in Generate()
2685 __ Load(kScratchRegister, address_of_regexp_stack_memory_size); in Generate()
2686 __ testq(kScratchRegister, kScratchRegister); in Generate()
2687 __ j(zero, &runtime); in Generate()
2690 __ movq(rax, Operand(rsp, kJSRegExpOffset)); in Generate()
2691 __ JumpIfSmi(rax, &runtime); in Generate()
2692 __ CmpObjectType(rax, JS_REGEXP_TYPE, kScratchRegister); in Generate()
2693 __ j(not_equal, &runtime); in Generate()
2695 __ movq(rax, FieldOperand(rax, JSRegExp::kDataOffset)); in Generate()
2698 __ Check(NegateCondition(is_smi), in Generate()
2700 __ CmpObjectType(rax, FIXED_ARRAY_TYPE, kScratchRegister); in Generate()
2701 __ Check(equal, "Unexpected type for RegExp data, FixedArray expected"); in Generate()
2706 __ SmiToInteger32(rbx, FieldOperand(rax, JSRegExp::kDataTagOffset)); in Generate()
2707 __ cmpl(rbx, Immediate(JSRegExp::IRREGEXP)); in Generate()
2708 __ j(not_equal, &runtime); in Generate()
2712 __ SmiToInteger32(rdx, in Generate()
2715 __ leal(rdx, Operand(rdx, rdx, times_1, 2)); in Generate()
2717 __ cmpl(rdx, Immediate(OffsetsVector::kStaticOffsetsVectorSize)); in Generate()
2718 __ j(above, &runtime); in Generate()
2723 __ movq(rdi, Operand(rsp, kSubjectOffset)); in Generate()
2724 __ JumpIfSmi(rdi, &runtime); in Generate()
2726 __ j(NegateCondition(is_string), &runtime); in Generate()
2733 __ movq(rbx, Operand(rsp, kPreviousIndexOffset)); in Generate()
2734 __ JumpIfNotSmi(rbx, &runtime); in Generate()
2735 __ SmiCompare(rbx, FieldOperand(rdi, String::kLengthOffset)); in Generate()
2736 __ j(above_equal, &runtime); in Generate()
2741 __ movq(rdi, Operand(rsp, kLastMatchInfoOffset)); in Generate()
2742 __ JumpIfSmi(rdi, &runtime); in Generate()
2743 __ CmpObjectType(rdi, JS_ARRAY_TYPE, kScratchRegister); in Generate()
2744 __ j(not_equal, &runtime); in Generate()
2746 __ movq(rbx, FieldOperand(rdi, JSArray::kElementsOffset)); in Generate()
2747 __ movq(rdi, FieldOperand(rbx, HeapObject::kMapOffset)); in Generate()
2748 __ CompareRoot(FieldOperand(rbx, HeapObject::kMapOffset), in Generate()
2750 __ j(not_equal, &runtime); in Generate()
2754 __ SmiToInteger32(rdi, FieldOperand(rbx, FixedArray::kLengthOffset)); in Generate()
2755 __ addl(rdx, Immediate(RegExpImpl::kLastMatchOverhead)); in Generate()
2756 __ cmpl(rdx, rdi); in Generate()
2757 __ j(greater, &runtime); in Generate()
2760 __ Set(r14, 0); in Generate()
2764 __ movq(rdi, Operand(rsp, kSubjectOffset)); in Generate()
2766 __ movq(r15, rdi); in Generate()
2767 __ movq(rbx, FieldOperand(rdi, HeapObject::kMapOffset)); in Generate()
2768 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset)); in Generate()
2770 __ andb(rbx, Immediate(kIsNotStringMask | in Generate()
2775 __ j(zero, &seq_two_byte_string, Label::kNear); in Generate()
2779 __ andb(rbx, Immediate(kIsNotStringMask | in Generate()
2782 __ j(zero, &seq_ascii_string, Label::kNear); in Generate()
2796 __ cmpq(rbx, Immediate(kExternalStringTag)); in Generate()
2797 __ j(less, &cons_string, Label::kNear); in Generate()
2798 __ j(equal, &external_string); in Generate()
2802 __ testb(rbx, Immediate(kIsNotStringMask | kShortExternalStringMask)); in Generate()
2803 __ j(not_zero, &runtime); in Generate()
2806 __ SmiToInteger32(r14, FieldOperand(rdi, SlicedString::kOffsetOffset)); in Generate()
2807 __ movq(rdi, FieldOperand(rdi, SlicedString::kParentOffset)); in Generate()
2811 __ jmp(&check_encoding, Label::kNear); in Generate()
2813 __ bind(&cons_string); in Generate()
2814 __ CompareRoot(FieldOperand(rdi, ConsString::kSecondOffset), in Generate()
2816 __ j(not_equal, &runtime); in Generate()
2817 __ movq(rdi, FieldOperand(rdi, ConsString::kFirstOffset)); in Generate()
2821 __ bind(&check_encoding); in Generate()
2822 __ movq(rbx, FieldOperand(rdi, HeapObject::kMapOffset)); in Generate()
2823 __ testb(FieldOperand(rbx, Map::kInstanceTypeOffset), in Generate()
2826 __ j(zero, &seq_two_byte_string, Label::kNear); in Generate()
2828 __ testb(FieldOperand(rbx, Map::kInstanceTypeOffset), in Generate()
2830 __ j(not_zero, &external_string); in Generate()
2832 __ bind(&seq_ascii_string); in Generate()
2835 __ movq(r11, FieldOperand(rax, JSRegExp::kDataAsciiCodeOffset)); in Generate()
2836 __ Set(rcx, 1); // Type is ASCII. in Generate()
2837 __ jmp(&check_code, Label::kNear); in Generate()
2839 __ bind(&seq_two_byte_string); in Generate()
2842 __ movq(r11, FieldOperand(rax, JSRegExp::kDataUC16CodeOffset)); in Generate()
2843 __ Set(rcx, 0); // Type is two byte. in Generate()
2845 __ bind(&check_code); in Generate()
2849 __ JumpIfSmi(r11, &runtime); in Generate()
2856 __ SmiToInteger64(rbx, Operand(rsp, kPreviousIndexOffset)); in Generate()
2864 __ IncrementCounter(counters->regexp_entry_native(), 1); in Generate()
2870 __ EnterApiExitFrame(argument_slots_on_stack); in Generate()
2875 __ LoadAddress(kScratchRegister, ExternalReference::isolate_address()); in Generate()
2876 __ movq(Operand(rsp, (argument_slots_on_stack - 1) * kPointerSize), in Generate()
2880 __ movq(Operand(rsp, (argument_slots_on_stack - 2) * kPointerSize), in Generate()
2884 __ movq(kScratchRegister, address_of_regexp_stack_memory_address); in Generate()
2885 __ movq(r9, Operand(kScratchRegister, 0)); in Generate()
2886 __ movq(kScratchRegister, address_of_regexp_stack_memory_size); in Generate()
2887 __ addq(r9, Operand(kScratchRegister, 0)); in Generate()
2890 __ movq(Operand(rsp, (argument_slots_on_stack - 3) * kPointerSize), r9); in Generate()
2894 __ LoadAddress(r8, in Generate()
2898 __ movq(Operand(rsp, (argument_slots_on_stack - 4) * kPointerSize), r8); in Generate()
2923 __ movq(arg2, rbx); in Generate()
2930 __ addq(rbx, r14); in Generate()
2931 __ SmiToInteger32(arg3, FieldOperand(r15, String::kLengthOffset)); in Generate()
2932 __ addq(r14, arg3); // Using arg3 as scratch. in Generate()
2937 __ testb(rcx, rcx); // Last use of rcx as encoding of subject string. in Generate()
2938 __ j(zero, &setup_two_byte, Label::kNear); in Generate()
2939 __ lea(arg4, FieldOperand(rdi, r14, times_1, SeqAsciiString::kHeaderSize)); in Generate()
2940 __ lea(arg3, FieldOperand(rdi, rbx, times_1, SeqAsciiString::kHeaderSize)); in Generate()
2941 __ jmp(&setup_rest, Label::kNear); in Generate()
2942 __ bind(&setup_two_byte); in Generate()
2943 __ lea(arg4, FieldOperand(rdi, r14, times_2, SeqTwoByteString::kHeaderSize)); in Generate()
2944 __ lea(arg3, FieldOperand(rdi, rbx, times_2, SeqTwoByteString::kHeaderSize)); in Generate()
2945 __ bind(&setup_rest); in Generate()
2952 __ movq(arg1, r15); in Generate()
2955 __ addq(r11, Immediate(Code::kHeaderSize - kHeapObjectTag)); in Generate()
2956 __ call(r11); in Generate()
2958 __ LeaveApiExitFrame(); in Generate()
2963 __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::SUCCESS)); in Generate()
2964 __ j(equal, &success, Label::kNear); in Generate()
2965 __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::EXCEPTION)); in Generate()
2966 __ j(equal, &exception); in Generate()
2967 __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::FAILURE)); in Generate()
2970 __ j(not_equal, &runtime); in Generate()
2973 __ LoadRoot(rax, Heap::kNullValueRootIndex); in Generate()
2974 __ ret(4 * kPointerSize); in Generate()
2977 __ bind(&success); in Generate()
2978 __ movq(rax, Operand(rsp, kJSRegExpOffset)); in Generate()
2979 __ movq(rcx, FieldOperand(rax, JSRegExp::kDataOffset)); in Generate()
2980 __ SmiToInteger32(rax, in Generate()
2983 __ leal(rdx, Operand(rax, rax, times_1, 2)); in Generate()
2987 __ movq(rax, Operand(rsp, kLastMatchInfoOffset)); in Generate()
2988 __ movq(rbx, FieldOperand(rax, JSArray::kElementsOffset)); in Generate()
2993 __ Integer32ToSmi(kScratchRegister, rdx); in Generate()
2994 __ movq(FieldOperand(rbx, RegExpImpl::kLastCaptureCountOffset), in Generate()
2997 __ movq(rax, Operand(rsp, kSubjectOffset)); in Generate()
2998 __ movq(FieldOperand(rbx, RegExpImpl::kLastSubjectOffset), rax); in Generate()
2999 __ RecordWriteField(rbx, in Generate()
3004 __ movq(rax, Operand(rsp, kSubjectOffset)); in Generate()
3005 __ movq(FieldOperand(rbx, RegExpImpl::kLastInputOffset), rax); in Generate()
3006 __ RecordWriteField(rbx, in Generate()
3013 __ LoadAddress(rcx, in Generate()
3022 __ bind(&next_capture); in Generate()
3023 __ subq(rdx, Immediate(1)); in Generate()
3024 __ j(negative, &done, Label::kNear); in Generate()
3026 __ movl(rdi, Operand(rcx, rdx, times_int_size, 0)); in Generate()
3027 __ Integer32ToSmi(rdi, rdi); in Generate()
3029 __ movq(FieldOperand(rbx, in Generate()
3034 __ jmp(&next_capture); in Generate()
3035 __ bind(&done); in Generate()
3038 __ movq(rax, Operand(rsp, kLastMatchInfoOffset)); in Generate()
3039 __ ret(4 * kPointerSize); in Generate()
3041 __ bind(&exception); in Generate()
3050 __ movq(rax, pending_exception_operand); in Generate()
3051 __ LoadRoot(rdx, Heap::kTheHoleValueRootIndex); in Generate()
3052 __ cmpq(rax, rdx); in Generate()
3053 __ j(equal, &runtime); in Generate()
3054 __ movq(pending_exception_operand, rdx); in Generate()
3056 __ CompareRoot(rax, Heap::kTerminationExceptionRootIndex); in Generate()
3058 __ j(equal, &termination_exception, Label::kNear); in Generate()
3059 __ Throw(rax); in Generate()
3061 __ bind(&termination_exception); in Generate()
3062 __ ThrowUncatchable(rax); in Generate()
3067 __ bind(&external_string); in Generate()
3068 __ movq(rbx, FieldOperand(rdi, HeapObject::kMapOffset)); in Generate()
3069 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset)); in Generate()
3073 __ testb(rbx, Immediate(kIsIndirectStringMask)); in Generate()
3074 __ Assert(zero, "external string expected, but not found"); in Generate()
3076 __ movq(rdi, FieldOperand(rdi, ExternalString::kResourceDataOffset)); in Generate()
3079 __ subq(rdi, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag)); in Generate()
3081 __ testb(rbx, Immediate(kStringEncodingMask)); in Generate()
3082 __ j(not_zero, &seq_ascii_string); in Generate()
3083 __ jmp(&seq_two_byte_string); in Generate()
3086 __ bind(&runtime); in Generate()
3087 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1); in Generate()
3096 __ movq(r8, Operand(rsp, kPointerSize * 3)); in Generate()
3097 __ JumpIfNotSmi(r8, &slowcase); in Generate()
3098 __ SmiToInteger32(rbx, r8); in Generate()
3099 __ cmpl(rbx, Immediate(kMaxInlineLength)); in Generate()
3100 __ j(above, &slowcase); in Generate()
3107 __ AllocateInNewSpace(JSRegExpResult::kSize + FixedArray::kHeaderSize, in Generate()
3120 __ movq(rdx, ContextOperand(rsi, Context::GLOBAL_INDEX)); in Generate()
3121 __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalContextOffset)); in Generate()
3122 __ movq(rdx, ContextOperand(rdx, Context::REGEXP_RESULT_MAP_INDEX)); in Generate()
3123 __ movq(FieldOperand(rax, HeapObject::kMapOffset), rdx); in Generate()
3126 __ LoadRoot(kScratchRegister, Heap::kEmptyFixedArrayRootIndex); in Generate()
3127 __ movq(FieldOperand(rax, JSObject::kPropertiesOffset), kScratchRegister); in Generate()
3130 __ lea(rcx, Operand(rax, JSRegExpResult::kSize)); in Generate()
3131 __ movq(FieldOperand(rax, JSObject::kElementsOffset), rcx); in Generate()
3134 __ movq(r8, Operand(rsp, kPointerSize * 1)); in Generate()
3135 __ movq(FieldOperand(rax, JSRegExpResult::kInputOffset), r8); in Generate()
3136 __ movq(r8, Operand(rsp, kPointerSize * 2)); in Generate()
3137 __ movq(FieldOperand(rax, JSRegExpResult::kIndexOffset), r8); in Generate()
3138 __ movq(r8, Operand(rsp, kPointerSize * 3)); in Generate()
3139 __ movq(FieldOperand(rax, JSArray::kLengthOffset), r8); in Generate()
3147 __ LoadRoot(kScratchRegister, Heap::kFixedArrayMapRootIndex); in Generate()
3148 __ movq(FieldOperand(rcx, HeapObject::kMapOffset), kScratchRegister); in Generate()
3150 __ Integer32ToSmi(rdx, rbx); in Generate()
3151 __ movq(FieldOperand(rcx, FixedArray::kLengthOffset), rdx); in Generate()
3153 __ LoadRoot(rdx, Heap::kTheHoleValueRootIndex); in Generate()
3154 __ lea(rcx, FieldOperand(rcx, FixedArray::kHeaderSize)); in Generate()
3161 __ testl(rbx, rbx); in Generate()
3162 __ bind(&loop); in Generate()
3163 __ j(less_equal, &done); // Jump if rcx is negative or zero. in Generate()
3164 __ subl(rbx, Immediate(1)); in Generate()
3165 __ movq(Operand(rcx, rbx, times_pointer_size, 0), rdx); in Generate()
3166 __ jmp(&loop); in Generate()
3168 __ bind(&done); in Generate()
3169 __ ret(3 * kPointerSize); in Generate()
3171 __ bind(&slowcase); in Generate()
3172 __ TailCallRuntime(Runtime::kRegExpConstructResult, 3, 1); in Generate()
3189 __ LoadRoot(number_string_cache, Heap::kNumberStringCacheRootIndex); in GenerateLookupNumberStringCache()
3193 __ SmiToInteger32( in GenerateLookupNumberStringCache()
3195 __ shrl(mask, Immediate(1)); in GenerateLookupNumberStringCache()
3196 __ subq(mask, Immediate(1)); // Make mask. in GenerateLookupNumberStringCache()
3206 __ JumpIfSmi(object, &is_smi); in GenerateLookupNumberStringCache()
3207 __ CheckMap(object, in GenerateLookupNumberStringCache()
3213 __ movl(scratch, FieldOperand(object, HeapNumber::kValueOffset + 4)); in GenerateLookupNumberStringCache()
3214 __ xor_(scratch, FieldOperand(object, HeapNumber::kValueOffset)); in GenerateLookupNumberStringCache()
3219 __ movq(probe, in GenerateLookupNumberStringCache()
3224 __ JumpIfSmi(probe, not_found); in GenerateLookupNumberStringCache()
3225 __ movsd(xmm0, FieldOperand(object, HeapNumber::kValueOffset)); in GenerateLookupNumberStringCache()
3226 __ movsd(xmm1, FieldOperand(probe, HeapNumber::kValueOffset)); in GenerateLookupNumberStringCache()
3227 __ ucomisd(xmm0, xmm1); in GenerateLookupNumberStringCache()
3228 __ j(parity_even, not_found); // Bail out if NaN is involved. in GenerateLookupNumberStringCache()
3229 __ j(not_equal, not_found); // The cache did not contain this value. in GenerateLookupNumberStringCache()
3230 __ jmp(&load_result_from_cache); in GenerateLookupNumberStringCache()
3233 __ bind(&is_smi); in GenerateLookupNumberStringCache()
3234 __ SmiToInteger32(scratch, object); in GenerateLookupNumberStringCache()
3239 __ cmpq(object, in GenerateLookupNumberStringCache()
3244 __ j(not_equal, not_found); in GenerateLookupNumberStringCache()
3247 __ bind(&load_result_from_cache); in GenerateLookupNumberStringCache()
3248 __ movq(result, in GenerateLookupNumberStringCache()
3254 __ IncrementCounter(counters->number_to_string_native(), 1); in GenerateLookupNumberStringCache()
3261 __ and_(hash, mask); in GenerateConvertHashCodeToIndex()
3266 __ shl(hash, Immediate(kPointerSizeLog2 + 1)); in GenerateConvertHashCodeToIndex()
3273 __ movq(rbx, Operand(rsp, kPointerSize)); in Generate()
3277 __ ret(1 * kPointerSize); in Generate()
3279 __ bind(&runtime); in Generate()
3281 __ TailCallRuntime(Runtime::kNumberToStringSkipCache, 1, 1); in Generate()
3302 __ JumpIfNotBothSmi(rax, rdx, &non_smi); in Generate()
3303 __ subq(rdx, rax); in Generate()
3304 __ j(no_overflow, &smi_done); in Generate()
3305 __ not_(rdx); // Correct sign in case of overflow. rdx cannot be 0 here. in Generate()
3306 __ bind(&smi_done); in Generate()
3307 __ movq(rax, rdx); in Generate()
3308 __ ret(0); in Generate()
3309 __ bind(&non_smi); in Generate()
3312 __ JumpIfNotSmi(rdx, &ok); in Generate()
3313 __ JumpIfNotSmi(rax, &ok); in Generate()
3314 __ Abort("CompareStub: smi operands"); in Generate()
3315 __ bind(&ok); in Generate()
3326 __ cmpq(rax, rdx); in Generate()
3327 __ j(not_equal, &not_identical, Label::kNear); in Generate()
3333 __ CompareRoot(rdx, Heap::kUndefinedValueRootIndex); in Generate()
3334 __ j(not_equal, &check_for_nan, Label::kNear); in Generate()
3335 __ Set(rax, NegativeComparisonResult(cc_)); in Generate()
3336 __ ret(0); in Generate()
3337 __ bind(&check_for_nan); in Generate()
3346 __ Set(rax, EQUAL); in Generate()
3347 __ ret(0); in Generate()
3351 __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset), in Generate()
3353 __ j(equal, &heap_number, Label::kNear); in Generate()
3356 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx); in Generate()
3357 __ j(above_equal, &not_identical, Label::kNear); in Generate()
3359 __ Set(rax, EQUAL); in Generate()
3360 __ ret(0); in Generate()
3362 __ bind(&heap_number); in Generate()
3367 __ Set(rax, EQUAL); in Generate()
3368 __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset)); in Generate()
3369 __ ucomisd(xmm0, xmm0); in Generate()
3370 __ setcc(parity_even, rax); in Generate()
3373 __ neg(rax); in Generate()
3375 __ ret(0); in Generate()
3378 __ bind(&not_identical); in Generate()
3393 __ SelectNonSmi(rbx, rax, rdx, &not_smis); in Generate()
3396 __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset), in Generate()
3399 __ j(equal, &slow); in Generate()
3401 __ movq(rax, rbx); in Generate()
3402 __ ret(0); in Generate()
3404 __ bind(&not_smis); in Generate()
3414 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx); in Generate()
3415 __ j(below, &first_non_object, Label::kNear); in Generate()
3419 __ bind(&return_not_equal); in Generate()
3420 __ ret(0); in Generate()
3422 __ bind(&first_non_object); in Generate()
3424 __ CmpInstanceType(rcx, ODDBALL_TYPE); in Generate()
3425 __ j(equal, &return_not_equal); in Generate()
3427 __ CmpObjectType(rdx, FIRST_SPEC_OBJECT_TYPE, rcx); in Generate()
3428 __ j(above_equal, &return_not_equal); in Generate()
3431 __ CmpInstanceType(rcx, ODDBALL_TYPE); in Generate()
3432 __ j(equal, &return_not_equal); in Generate()
3436 __ bind(&slow); in Generate()
3444 __ xorl(rax, rax); in Generate()
3445 __ xorl(rcx, rcx); in Generate()
3446 __ ucomisd(xmm0, xmm1); in Generate()
3449 __ j(parity_even, &unordered, Label::kNear); in Generate()
3451 __ setcc(above, rax); in Generate()
3452 __ setcc(below, rcx); in Generate()
3453 __ subq(rax, rcx); in Generate()
3454 __ ret(0); in Generate()
3458 __ bind(&unordered); in Generate()
3461 __ Set(rax, 1); in Generate()
3463 __ Set(rax, -1); in Generate()
3465 __ ret(0); in Generate()
3468 __ bind(&non_number_comparison); in Generate()
3480 __ ret(0); in Generate()
3483 __ bind(&check_for_strings); in Generate()
3485 __ JumpIfNotBothSequentialAsciiStrings( in Generate()
3506 __ Abort("Unexpected fall-through from string comparison"); in Generate()
3509 __ bind(&check_unequal_objects); in Generate()
3520 __ lea(rcx, Operand(rax, rdx, times_1, 0)); in Generate()
3521 __ testb(rcx, Immediate(kSmiTagMask)); in Generate()
3522 __ j(not_zero, &not_both_objects, Label::kNear); in Generate()
3523 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rbx); in Generate()
3524 __ j(below, &not_both_objects, Label::kNear); in Generate()
3525 __ CmpObjectType(rdx, FIRST_SPEC_OBJECT_TYPE, rcx); in Generate()
3526 __ j(below, &not_both_objects, Label::kNear); in Generate()
3527 __ testb(FieldOperand(rbx, Map::kBitFieldOffset), in Generate()
3529 __ j(zero, &return_unequal, Label::kNear); in Generate()
3530 __ testb(FieldOperand(rcx, Map::kBitFieldOffset), in Generate()
3532 __ j(zero, &return_unequal, Label::kNear); in Generate()
3535 __ Set(rax, EQUAL); in Generate()
3536 __ bind(&return_unequal); in Generate()
3539 __ ret(0); in Generate()
3540 __ bind(&not_both_objects); in Generate()
3544 __ pop(rcx); in Generate()
3545 __ push(rdx); in Generate()
3546 __ push(rax); in Generate()
3554 __ Push(Smi::FromInt(NegativeComparisonResult(cc_))); in Generate()
3558 __ push(rcx); in Generate()
3562 __ InvokeBuiltin(builtin, JUMP_FUNCTION); in Generate()
3570 __ JumpIfSmi(object, label); in BranchIfNonSymbol()
3571 __ movq(scratch, FieldOperand(object, HeapObject::kMapOffset)); in BranchIfNonSymbol()
3572 __ movzxbq(scratch, in BranchIfNonSymbol()
3577 __ testb(scratch, Immediate(kIsSymbolMask)); in BranchIfNonSymbol()
3578 __ j(zero, label); in BranchIfNonSymbol()
3583 __ TailCallRuntime(Runtime::kStackGuard, 0, 1); in Generate()
3588 __ TailCallRuntime(Runtime::kInterrupt, 0, 1); in Generate()
3602 __ movq(rcx, FieldOperand(rbx, JSGlobalPropertyCell::kValueOffset)); in GenerateRecordCallTarget()
3606 __ cmpq(rcx, rdi); in GenerateRecordCallTarget()
3607 __ j(equal, &done, Label::kNear); in GenerateRecordCallTarget()
3608 __ Cmp(rcx, TypeFeedbackCells::MegamorphicSentinel(isolate)); in GenerateRecordCallTarget()
3609 __ j(equal, &done, Label::kNear); in GenerateRecordCallTarget()
3613 __ Cmp(rcx, TypeFeedbackCells::UninitializedSentinel(isolate)); in GenerateRecordCallTarget()
3614 __ j(equal, &initialize, Label::kNear); in GenerateRecordCallTarget()
3617 __ Move(FieldOperand(rbx, JSGlobalPropertyCell::kValueOffset), in GenerateRecordCallTarget()
3619 __ jmp(&done, Label::kNear); in GenerateRecordCallTarget()
3622 __ bind(&initialize); in GenerateRecordCallTarget()
3623 __ movq(FieldOperand(rbx, JSGlobalPropertyCell::kValueOffset), rdi); in GenerateRecordCallTarget()
3626 __ bind(&done); in GenerateRecordCallTarget()
3642 __ movq(rax, Operand(rsp, (argc_ + 1) * kPointerSize)); in Generate()
3644 __ CompareRoot(rax, Heap::kTheHoleValueRootIndex); in Generate()
3645 __ j(not_equal, &call, Label::kNear); in Generate()
3647 __ movq(rbx, GlobalObjectOperand()); in Generate()
3648 __ movq(rbx, FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset)); in Generate()
3649 __ movq(Operand(rsp, (argc_ + 1) * kPointerSize), rbx); in Generate()
3650 __ bind(&call); in Generate()
3654 __ JumpIfSmi(rdi, &non_function); in Generate()
3656 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); in Generate()
3657 __ j(not_equal, &slow); in Generate()
3664 __ CompareRoot(rax, Heap::kTheHoleValueRootIndex); in Generate()
3665 __ j(equal, &call_as_function); in Generate()
3666 __ InvokeFunction(rdi, in Generate()
3671 __ bind(&call_as_function); in Generate()
3673 __ InvokeFunction(rdi, in Generate()
3680 __ bind(&slow); in Generate()
3682 __ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE); in Generate()
3683 __ j(not_equal, &non_function); in Generate()
3684 __ pop(rcx); in Generate()
3685 __ push(rdi); // put proxy as additional argument under return address in Generate()
3686 __ push(rcx); in Generate()
3687 __ Set(rax, argc_ + 1); in Generate()
3688 __ Set(rbx, 0); in Generate()
3689 __ SetCallKind(rcx, CALL_AS_METHOD); in Generate()
3690 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY); in Generate()
3694 __ jmp(adaptor, RelocInfo::CODE_TARGET); in Generate()
3699 __ bind(&non_function); in Generate()
3700 __ movq(Operand(rsp, (argc_ + 1) * kPointerSize), rdi); in Generate()
3701 __ Set(rax, argc_); in Generate()
3702 __ Set(rbx, 0); in Generate()
3703 __ SetCallKind(rcx, CALL_AS_METHOD); in Generate()
3704 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION); in Generate()
3707 __ Jump(adaptor, RelocInfo::CODE_TARGET); in Generate()
3718 __ JumpIfSmi(rdi, &non_function_call); in Generate()
3720 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); in Generate()
3721 __ j(not_equal, &slow); in Generate()
3728 __ movq(rbx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); in Generate()
3729 __ movq(rbx, FieldOperand(rbx, SharedFunctionInfo::kConstructStubOffset)); in Generate()
3730 __ lea(rbx, FieldOperand(rbx, Code::kHeaderSize)); in Generate()
3731 __ jmp(rbx); in Generate()
3737 __ bind(&slow); in Generate()
3738 __ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE); in Generate()
3739 __ j(not_equal, &non_function_call); in Generate()
3740 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY_AS_CONSTRUCTOR); in Generate()
3741 __ jmp(&do_call); in Generate()
3743 __ bind(&non_function_call); in Generate()
3744 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR); in Generate()
3745 __ bind(&do_call); in Generate()
3747 __ Set(rbx, 0); in Generate()
3748 __ SetCallKind(rcx, CALL_AS_METHOD); in Generate()
3749 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), in Generate()
3809 __ CheckStackAlignment(); in GenerateCore()
3818 __ movq(rcx, rax); in GenerateCore()
3820 __ movq(rdi, rax); in GenerateCore()
3822 __ movq(kScratchRegister, in GenerateCore()
3825 __ call(kScratchRegister); in GenerateCore()
3832 __ incl(scope_depth_operand); in GenerateCore()
3839 __ movq(StackSpaceOperand(0), r14); // argc. in GenerateCore()
3840 __ movq(StackSpaceOperand(1), r15); // argv. in GenerateCore()
3844 __ lea(rcx, StackSpaceOperand(0)); in GenerateCore()
3845 __ LoadAddress(rdx, ExternalReference::isolate_address()); in GenerateCore()
3849 __ lea(rcx, StackSpaceOperand(2)); in GenerateCore()
3851 __ lea(rdx, StackSpaceOperand(0)); in GenerateCore()
3852 __ LoadAddress(r8, ExternalReference::isolate_address()); in GenerateCore()
3857 __ movq(rdi, r14); // argc. in GenerateCore()
3858 __ movq(rsi, r15); // argv. in GenerateCore()
3859 __ movq(rdx, ExternalReference::isolate_address()); in GenerateCore()
3861 __ call(rbx); in GenerateCore()
3866 __ decl(scope_depth_operand); in GenerateCore()
3879 __ movq(rax, Operand(rsp, 6 * kPointerSize)); in GenerateCore()
3880 __ movq(rdx, Operand(rsp, 7 * kPointerSize)); in GenerateCore()
3883 __ lea(rcx, Operand(rax, 1)); in GenerateCore()
3885 __ testl(rcx, Immediate(kFailureTagMask)); in GenerateCore()
3886 __ j(zero, &failure_returned); in GenerateCore()
3889 __ LeaveExitFrame(save_doubles_); in GenerateCore()
3890 __ ret(0); in GenerateCore()
3893 __ bind(&failure_returned); in GenerateCore()
3898 __ testl(rax, Immediate(((1 << kFailureTypeTagSize) - 1) << kFailureTagSize)); in GenerateCore()
3899 __ j(zero, &retry, Label::kNear); in GenerateCore()
3902 __ movq(kScratchRegister, Failure::OutOfMemoryException(), RelocInfo::NONE); in GenerateCore()
3903 __ cmpq(rax, kScratchRegister); in GenerateCore()
3904 __ j(equal, throw_out_of_memory_exception); in GenerateCore()
3911 __ movq(rax, pending_exception_operand); in GenerateCore()
3912 __ LoadRoot(rdx, Heap::kTheHoleValueRootIndex); in GenerateCore()
3913 __ movq(pending_exception_operand, rdx); in GenerateCore()
3917 __ CompareRoot(rax, Heap::kTerminationExceptionRootIndex); in GenerateCore()
3918 __ j(equal, throw_termination_exception); in GenerateCore()
3921 __ jmp(throw_normal_exception); in GenerateCore()
3924 __ bind(&retry); in GenerateCore()
3946 __ EnterExitFrame(arg_stack_space, save_doubles_); in Generate()
3982 __ movq(rax, failure, RelocInfo::NONE); in Generate()
3990 __ bind(&throw_out_of_memory_exception); in Generate()
3995 __ Set(rax, static_cast<int64_t>(false)); in Generate()
3996 __ Store(external_caught, rax); in Generate()
4001 __ movq(rax, Failure::OutOfMemoryException(), RelocInfo::NONE); in Generate()
4002 __ Store(pending_exception, rax); in Generate()
4005 __ bind(&throw_termination_exception); in Generate()
4006 __ ThrowUncatchable(rax); in Generate()
4008 __ bind(&throw_normal_exception); in Generate()
4009 __ Throw(rax); in Generate()
4019 __ push(rbp); in GenerateBody()
4020 __ movq(rbp, rsp); in GenerateBody()
4027 __ movq(kScratchRegister, in GenerateBody()
4030 __ push(kScratchRegister); // context slot in GenerateBody()
4031 __ push(kScratchRegister); // function slot in GenerateBody()
4033 __ push(r12); in GenerateBody()
4034 __ push(r13); in GenerateBody()
4035 __ push(r14); in GenerateBody()
4036 __ push(r15); in GenerateBody()
4038 __ push(rdi); // Only callee save in Win64 ABI, argument in AMD64 ABI. in GenerateBody()
4039 __ push(rsi); // Only callee save in Win64 ABI, argument in AMD64 ABI. in GenerateBody()
4041 __ push(rbx); in GenerateBody()
4047 __ InitializeSmiConstantRegister(); in GenerateBody()
4048 __ InitializeRootRegister(); in GenerateBody()
4057 __ push(c_entry_fp_operand); in GenerateBody()
4062 __ Load(rax, js_entry_sp); in GenerateBody()
4063 __ testq(rax, rax); in GenerateBody()
4064 __ j(not_zero, &not_outermost_js); in GenerateBody()
4065 __ Push(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)); in GenerateBody()
4066 __ movq(rax, rbp); in GenerateBody()
4067 __ Store(js_entry_sp, rax); in GenerateBody()
4069 __ jmp(&cont); in GenerateBody()
4070 __ bind(&not_outermost_js); in GenerateBody()
4071 __ Push(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME)); in GenerateBody()
4072 __ bind(&cont); in GenerateBody()
4076 __ jmp(&invoke); in GenerateBody()
4077 __ bind(&handler_entry); in GenerateBody()
4083 __ Store(pending_exception, rax); in GenerateBody()
4084 __ movq(rax, Failure::Exception(), RelocInfo::NONE); in GenerateBody()
4085 __ jmp(&exit); in GenerateBody()
4089 __ bind(&invoke); in GenerateBody()
4090 __ PushTryHandler(StackHandler::JS_ENTRY, 0); in GenerateBody()
4093 __ LoadRoot(rax, Heap::kTheHoleValueRootIndex); in GenerateBody()
4094 __ Store(pending_exception, rax); in GenerateBody()
4097 __ push(Immediate(0)); // receiver in GenerateBody()
4107 __ Load(rax, construct_entry); in GenerateBody()
4110 __ Load(rax, entry); in GenerateBody()
4112 __ lea(kScratchRegister, FieldOperand(rax, Code::kHeaderSize)); in GenerateBody()
4113 __ call(kScratchRegister); in GenerateBody()
4116 __ PopTryHandler(); in GenerateBody()
4118 __ bind(&exit); in GenerateBody()
4120 __ pop(rbx); in GenerateBody()
4121 __ Cmp(rbx, Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)); in GenerateBody()
4122 __ j(not_equal, &not_outermost_js_2); in GenerateBody()
4123 __ movq(kScratchRegister, js_entry_sp); in GenerateBody()
4124 __ movq(Operand(kScratchRegister, 0), Immediate(0)); in GenerateBody()
4125 __ bind(&not_outermost_js_2); in GenerateBody()
4129 __ pop(c_entry_fp_operand); in GenerateBody()
4133 __ pop(rbx); in GenerateBody()
4136 __ pop(rsi); in GenerateBody()
4137 __ pop(rdi); in GenerateBody()
4139 __ pop(r15); in GenerateBody()
4140 __ pop(r14); in GenerateBody()
4141 __ pop(r13); in GenerateBody()
4142 __ pop(r12); in GenerateBody()
4143 __ addq(rsp, Immediate(2 * kPointerSize)); // remove markers in GenerateBody()
4146 __ pop(rbp); in GenerateBody()
4147 __ ret(0); in GenerateBody()
4185 __ movq(rax, Operand(rsp, 2 * kPointerSize + extra_stack_space)); in Generate()
4186 __ JumpIfSmi(rax, &slow); in Generate()
4189 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rax); in Generate()
4190 __ j(below, &slow); in Generate()
4191 __ CmpInstanceType(rax, LAST_SPEC_OBJECT_TYPE); in Generate()
4192 __ j(above, &slow); in Generate()
4195 __ movq(rdx, Operand(rsp, 1 * kPointerSize + extra_stack_space)); in Generate()
4203 __ CompareRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex); in Generate()
4204 __ j(not_equal, &miss, Label::kNear); in Generate()
4205 __ CompareRoot(rax, Heap::kInstanceofCacheMapRootIndex); in Generate()
4206 __ j(not_equal, &miss, Label::kNear); in Generate()
4207 __ LoadRoot(rax, Heap::kInstanceofCacheAnswerRootIndex); in Generate()
4208 __ ret(2 * kPointerSize); in Generate()
4209 __ bind(&miss); in Generate()
4212 __ TryGetFunctionPrototype(rdx, rbx, &slow, true); in Generate()
4215 __ JumpIfSmi(rbx, &slow); in Generate()
4216 __ CmpObjectType(rbx, FIRST_SPEC_OBJECT_TYPE, kScratchRegister); in Generate()
4217 __ j(below, &slow); in Generate()
4218 __ CmpInstanceType(kScratchRegister, LAST_SPEC_OBJECT_TYPE); in Generate()
4219 __ j(above, &slow); in Generate()
4226 __ StoreRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex); in Generate()
4227 __ StoreRoot(rax, Heap::kInstanceofCacheMapRootIndex); in Generate()
4230 __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize)); in Generate()
4231 __ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize)); in Generate()
4233 __ movl(rdi, Immediate(kWordBeforeMapCheckValue)); in Generate()
4234 __ cmpl(Operand(kScratchRegister, kOffsetToMapCheckValue - 4), rdi); in Generate()
4235 __ Assert(equal, "InstanceofStub unexpected call site cache (check)."); in Generate()
4237 __ movq(kScratchRegister, in Generate()
4239 __ movq(Operand(kScratchRegister, 0), rax); in Generate()
4242 __ movq(rcx, FieldOperand(rax, Map::kPrototypeOffset)); in Generate()
4246 __ LoadRoot(kScratchRegister, Heap::kNullValueRootIndex); in Generate()
4247 __ bind(&loop); in Generate()
4248 __ cmpq(rcx, rbx); in Generate()
4249 __ j(equal, &is_instance, Label::kNear); in Generate()
4250 __ cmpq(rcx, kScratchRegister); in Generate()
4253 __ j(equal, &is_not_instance, Label::kNear); in Generate()
4254 __ movq(rcx, FieldOperand(rcx, HeapObject::kMapOffset)); in Generate()
4255 __ movq(rcx, FieldOperand(rcx, Map::kPrototypeOffset)); in Generate()
4256 __ jmp(&loop); in Generate()
4258 __ bind(&is_instance); in Generate()
4260 __ xorl(rax, rax); in Generate()
4263 __ StoreRoot(rax, Heap::kInstanceofCacheAnswerRootIndex); in Generate()
4270 __ movl(rax, Immediate(true_offset)); in Generate()
4271 __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize)); in Generate()
4272 __ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize)); in Generate()
4273 __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax); in Generate()
4275 __ movl(rax, Immediate(kWordBeforeResultValue)); in Generate()
4276 __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax); in Generate()
4277 __ Assert(equal, "InstanceofStub unexpected call site cache (mov)."); in Generate()
4279 __ Set(rax, 0); in Generate()
4281 __ ret(2 * kPointerSize + extra_stack_space); in Generate()
4283 __ bind(&is_not_instance); in Generate()
4286 __ StoreRoot(kScratchRegister, Heap::kInstanceofCacheAnswerRootIndex); in Generate()
4293 __ movl(rax, Immediate(false_offset)); in Generate()
4294 __ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize)); in Generate()
4295 __ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize)); in Generate()
4296 __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax); in Generate()
4298 __ movl(rax, Immediate(kWordBeforeResultValue)); in Generate()
4299 __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax); in Generate()
4300 __ Assert(equal, "InstanceofStub unexpected call site cache (mov)"); in Generate()
4303 __ ret(2 * kPointerSize + extra_stack_space); in Generate()
4306 __ bind(&slow); in Generate()
4309 __ pop(rcx); in Generate()
4310 __ pop(rax); in Generate()
4311 __ push(rcx); in Generate()
4313 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION); in Generate()
4372 __ JumpIfSmi(object_, receiver_not_string_); in GenerateFast()
4375 __ movq(result_, FieldOperand(object_, HeapObject::kMapOffset)); in GenerateFast()
4376 __ movzxbl(result_, FieldOperand(result_, Map::kInstanceTypeOffset)); in GenerateFast()
4378 __ testb(result_, Immediate(kIsNotStringMask)); in GenerateFast()
4379 __ j(not_zero, receiver_not_string_); in GenerateFast()
4382 __ JumpIfNotSmi(index_, &index_not_smi_); in GenerateFast()
4383 __ bind(&got_smi_index_); in GenerateFast()
4386 __ SmiCompare(index_, FieldOperand(object_, String::kLengthOffset)); in GenerateFast()
4387 __ j(above_equal, index_out_of_range_); in GenerateFast()
4389 __ SmiToInteger32(index_, index_); in GenerateFast()
4394 __ Integer32ToSmi(result_, result_); in GenerateFast()
4395 __ bind(&exit_); in GenerateFast()
4402 __ Abort("Unexpected fallthrough to CharCodeAt slow case"); in GenerateSlow()
4406 __ bind(&index_not_smi_); in GenerateSlow()
4408 __ CheckMap(index_, in GenerateSlow()
4413 __ push(object_); in GenerateSlow()
4414 __ push(index_); // Consumed by runtime conversion function. in GenerateSlow()
4416 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1); in GenerateSlow()
4420 __ CallRuntime(Runtime::kNumberToSmi, 1); in GenerateSlow()
4425 __ movq(index_, rax); in GenerateSlow()
4427 __ pop(object_); in GenerateSlow()
4429 __ movq(result_, FieldOperand(object_, HeapObject::kMapOffset)); in GenerateSlow()
4430 __ movzxbl(result_, FieldOperand(result_, Map::kInstanceTypeOffset)); in GenerateSlow()
4433 __ JumpIfNotSmi(index_, index_out_of_range_); in GenerateSlow()
4435 __ jmp(&got_smi_index_); in GenerateSlow()
4440 __ bind(&call_runtime_); in GenerateSlow()
4442 __ push(object_); in GenerateSlow()
4443 __ Integer32ToSmi(index_, index_); in GenerateSlow()
4444 __ push(index_); in GenerateSlow()
4445 __ CallRuntime(Runtime::kStringCharCodeAt, 2); in GenerateSlow()
4447 __ movq(result_, rax); in GenerateSlow()
4450 __ jmp(&exit_); in GenerateSlow()
4452 __ Abort("Unexpected fallthrough from CharCodeAt slow case"); in GenerateSlow()
4461 __ JumpIfNotSmi(code_, &slow_case_); in GenerateFast()
4462 __ SmiCompare(code_, Smi::FromInt(String::kMaxAsciiCharCode)); in GenerateFast()
4463 __ j(above, &slow_case_); in GenerateFast()
4465 __ LoadRoot(result_, Heap::kSingleCharacterStringCacheRootIndex); in GenerateFast()
4467 __ movq(result_, FieldOperand(result_, index.reg, index.scale, in GenerateFast()
4469 __ CompareRoot(result_, Heap::kUndefinedValueRootIndex); in GenerateFast()
4470 __ j(equal, &slow_case_); in GenerateFast()
4471 __ bind(&exit_); in GenerateFast()
4478 __ Abort("Unexpected fallthrough to CharFromCode slow case"); in GenerateSlow()
4480 __ bind(&slow_case_); in GenerateSlow()
4482 __ push(code_); in GenerateSlow()
4483 __ CallRuntime(Runtime::kCharFromCode, 1); in GenerateSlow()
4485 __ movq(result_, rax); in GenerateSlow()
4488 __ jmp(&exit_); in GenerateSlow()
4490 __ Abort("Unexpected fallthrough from CharFromCode slow case"); in GenerateSlow()
4516 __ movq(rax, Operand(rsp, 2 * kPointerSize)); // First argument (left). in Generate()
4517 __ movq(rdx, Operand(rsp, 1 * kPointerSize)); // Second argument (right). in Generate()
4521 __ JumpIfSmi(rax, &call_runtime); in Generate()
4522 __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, r8); in Generate()
4523 __ j(above_equal, &call_runtime); in Generate()
4526 __ JumpIfSmi(rdx, &call_runtime); in Generate()
4527 __ CmpObjectType(rdx, FIRST_NONSTRING_TYPE, r9); in Generate()
4528 __ j(above_equal, &call_runtime); in Generate()
4550 __ movq(rcx, FieldOperand(rdx, String::kLengthOffset)); in Generate()
4551 __ SmiTest(rcx); in Generate()
4552 __ j(not_zero, &second_not_zero_length, Label::kNear); in Generate()
4555 __ IncrementCounter(counters->string_add_native(), 1); in Generate()
4556 __ ret(2 * kPointerSize); in Generate()
4557 __ bind(&second_not_zero_length); in Generate()
4558 __ movq(rbx, FieldOperand(rax, String::kLengthOffset)); in Generate()
4559 __ SmiTest(rbx); in Generate()
4560 __ j(not_zero, &both_not_zero_length, Label::kNear); in Generate()
4562 __ movq(rax, rdx); in Generate()
4563 __ IncrementCounter(counters->string_add_native(), 1); in Generate()
4564 __ ret(2 * kPointerSize); in Generate()
4574 __ bind(&both_not_zero_length); in Generate()
4579 __ movq(r8, FieldOperand(rax, HeapObject::kMapOffset)); in Generate()
4580 __ movq(r9, FieldOperand(rdx, HeapObject::kMapOffset)); in Generate()
4583 __ movzxbl(r8, FieldOperand(r8, Map::kInstanceTypeOffset)); in Generate()
4584 __ movzxbl(r9, FieldOperand(r9, Map::kInstanceTypeOffset)); in Generate()
4588 __ SmiAdd(rbx, rbx, rcx); in Generate()
4591 __ SmiCompare(rbx, Smi::FromInt(2)); in Generate()
4592 __ j(not_equal, &longer_than_two); in Generate()
4595 __ JumpIfBothInstanceTypesAreNotSequentialAscii(r8, r9, rbx, rcx, in Generate()
4599 __ movzxbq(rbx, FieldOperand(rax, SeqAsciiString::kHeaderSize)); in Generate()
4600 __ movzxbq(rcx, FieldOperand(rdx, SeqAsciiString::kHeaderSize)); in Generate()
4607 __ IncrementCounter(counters->string_add_native(), 1); in Generate()
4608 __ ret(2 * kPointerSize); in Generate()
4610 __ bind(&make_two_character_string); in Generate()
4611 __ Set(rdi, 2); in Generate()
4612 __ AllocateAsciiString(rax, rdi, r8, r9, r11, &call_runtime); in Generate()
4616 __ movzxbq(rcx, FieldOperand(rdx, SeqAsciiString::kHeaderSize)); in Generate()
4617 __ shll(rcx, Immediate(kBitsPerByte)); in Generate()
4618 __ orl(rbx, rcx); in Generate()
4620 __ movw(FieldOperand(rax, SeqAsciiString::kHeaderSize), rbx); in Generate()
4621 __ IncrementCounter(counters->string_add_native(), 1); in Generate()
4622 __ ret(2 * kPointerSize); in Generate()
4624 __ bind(&longer_than_two); in Generate()
4626 __ SmiCompare(rbx, Smi::FromInt(ConsString::kMinLength)); in Generate()
4627 __ j(below, &string_add_flat_result); in Generate()
4630 __ SmiCompare(rbx, Smi::FromInt(String::kMaxLength)); in Generate()
4631 __ j(above, &call_runtime); in Generate()
4641 __ movl(rcx, r8); in Generate()
4642 __ and_(rcx, r9); in Generate()
4645 __ testl(rcx, Immediate(kStringEncodingMask)); in Generate()
4646 __ j(zero, &non_ascii); in Generate()
4647 __ bind(&ascii_data); in Generate()
4649 __ AllocateAsciiConsString(rcx, rdi, no_reg, &call_runtime); in Generate()
4650 __ bind(&allocated); in Generate()
4652 __ movq(FieldOperand(rcx, ConsString::kLengthOffset), rbx); in Generate()
4653 __ movq(FieldOperand(rcx, ConsString::kHashFieldOffset), in Generate()
4655 __ movq(FieldOperand(rcx, ConsString::kFirstOffset), rax); in Generate()
4656 __ movq(FieldOperand(rcx, ConsString::kSecondOffset), rdx); in Generate()
4657 __ movq(rax, rcx); in Generate()
4658 __ IncrementCounter(counters->string_add_native(), 1); in Generate()
4659 __ ret(2 * kPointerSize); in Generate()
4660 __ bind(&non_ascii); in Generate()
4666 __ testb(rcx, Immediate(kAsciiDataHintMask)); in Generate()
4667 __ j(not_zero, &ascii_data); in Generate()
4668 __ xor_(r8, r9); in Generate()
4670 __ andb(r8, Immediate(kAsciiStringTag | kAsciiDataHintTag)); in Generate()
4671 __ cmpb(r8, Immediate(kAsciiStringTag | kAsciiDataHintTag)); in Generate()
4672 __ j(equal, &ascii_data); in Generate()
4674 __ AllocateTwoByteConsString(rcx, rdi, no_reg, &call_runtime); in Generate()
4675 __ jmp(&allocated); in Generate()
4688 __ bind(&string_add_flat_result); in Generate()
4690 __ SmiToInteger32(r14, FieldOperand(rax, SeqString::kLengthOffset)); in Generate()
4693 __ testb(r8, Immediate(kStringRepresentationMask)); in Generate()
4694 __ j(zero, &first_is_sequential, Label::kNear); in Generate()
4697 __ testb(r8, Immediate(kShortExternalStringMask)); in Generate()
4698 __ j(not_zero, &call_runtime); in Generate()
4699 __ movq(rcx, FieldOperand(rax, ExternalString::kResourceDataOffset)); in Generate()
4700 __ jmp(&first_prepared, Label::kNear); in Generate()
4701 __ bind(&first_is_sequential); in Generate()
4703 __ lea(rcx, FieldOperand(rax, SeqAsciiString::kHeaderSize)); in Generate()
4704 __ bind(&first_prepared); in Generate()
4707 __ xorl(r8, r9); in Generate()
4708 __ testb(r8, Immediate(kStringEncodingMask)); in Generate()
4709 __ j(not_zero, &call_runtime); in Generate()
4711 __ SmiToInteger32(r15, FieldOperand(rdx, SeqString::kLengthOffset)); in Generate()
4714 __ testb(r9, Immediate(kStringRepresentationMask)); in Generate()
4715 __ j(zero, &second_is_sequential, Label::kNear); in Generate()
4718 __ testb(r9, Immediate(kShortExternalStringMask)); in Generate()
4719 __ j(not_zero, &call_runtime); in Generate()
4720 __ movq(rdx, FieldOperand(rdx, ExternalString::kResourceDataOffset)); in Generate()
4721 __ jmp(&second_prepared, Label::kNear); in Generate()
4722 __ bind(&second_is_sequential); in Generate()
4724 __ lea(rdx, FieldOperand(rdx, SeqAsciiString::kHeaderSize)); in Generate()
4725 __ bind(&second_prepared); in Generate()
4731 __ SmiToInteger32(rbx, rbx); in Generate()
4732 __ testb(r9, Immediate(kStringEncodingMask)); in Generate()
4733 __ j(zero, &non_ascii_string_add_flat_result); in Generate()
4735 __ bind(&make_flat_ascii_string); in Generate()
4737 __ AllocateAsciiString(rax, rbx, rdi, r8, r9, &call_runtime); in Generate()
4740 __ lea(rbx, FieldOperand(rax, SeqAsciiString::kHeaderSize)); in Generate()
4749 __ IncrementCounter(counters->string_add_native(), 1); in Generate()
4750 __ ret(2 * kPointerSize); in Generate()
4752 __ bind(&non_ascii_string_add_flat_result); in Generate()
4754 __ AllocateTwoByteString(rax, rbx, rdi, r8, r9, &call_runtime); in Generate()
4757 __ lea(rbx, FieldOperand(rax, SeqTwoByteString::kHeaderSize)); in Generate()
4766 __ IncrementCounter(counters->string_add_native(), 1); in Generate()
4767 __ ret(2 * kPointerSize); in Generate()
4770 __ bind(&call_runtime); in Generate()
4771 __ TailCallRuntime(Runtime::kStringAdd, 2, 1); in Generate()
4774 __ bind(&call_builtin); in Generate()
4775 __ InvokeBuiltin(builtin_id, JUMP_FUNCTION); in Generate()
4789 __ JumpIfSmi(arg, &not_string); in GenerateConvertArgument()
4790 __ CmpObjectType(arg, FIRST_NONSTRING_TYPE, scratch1); in GenerateConvertArgument()
4791 __ j(below, &done); in GenerateConvertArgument()
4795 __ bind(&not_string); in GenerateConvertArgument()
4804 __ movq(arg, scratch1); in GenerateConvertArgument()
4805 __ movq(Operand(rsp, stack_offset), arg); in GenerateConvertArgument()
4806 __ jmp(&done); in GenerateConvertArgument()
4809 __ bind(&not_cached); in GenerateConvertArgument()
4810 __ JumpIfSmi(arg, slow); in GenerateConvertArgument()
4811 __ CmpObjectType(arg, JS_VALUE_TYPE, scratch1); // map -> scratch1. in GenerateConvertArgument()
4812 __ j(not_equal, slow); in GenerateConvertArgument()
4813 __ testb(FieldOperand(scratch1, Map::kBitField2Offset), in GenerateConvertArgument()
4815 __ j(zero, slow); in GenerateConvertArgument()
4816 __ movq(arg, FieldOperand(arg, JSValue::kValueOffset)); in GenerateConvertArgument()
4817 __ movq(Operand(rsp, stack_offset), arg); in GenerateConvertArgument()
4819 __ bind(&done); in GenerateConvertArgument()
4829 __ bind(&loop); in GenerateCopyCharacters()
4833 __ movb(kScratchRegister, Operand(src, 0)); in GenerateCopyCharacters()
4834 __ movb(Operand(dest, 0), kScratchRegister); in GenerateCopyCharacters()
4835 __ incq(src); in GenerateCopyCharacters()
4836 __ incq(dest); in GenerateCopyCharacters()
4838 __ movzxwl(kScratchRegister, Operand(src, 0)); in GenerateCopyCharacters()
4839 __ movw(Operand(dest, 0), kScratchRegister); in GenerateCopyCharacters()
4840 __ addq(src, Immediate(2)); in GenerateCopyCharacters()
4841 __ addq(dest, Immediate(2)); in GenerateCopyCharacters()
4843 __ decl(count); in GenerateCopyCharacters()
4844 __ j(not_zero, &loop); in GenerateCopyCharacters()
4863 __ testl(count, count); in GenerateCopyCharactersREP()
4864 __ j(zero, &done, Label::kNear); in GenerateCopyCharactersREP()
4869 __ addl(count, count); in GenerateCopyCharactersREP()
4874 __ testl(count, Immediate(~7)); in GenerateCopyCharactersREP()
4875 __ j(zero, &last_bytes, Label::kNear); in GenerateCopyCharactersREP()
4878 __ movl(kScratchRegister, count); in GenerateCopyCharactersREP()
4879 __ shr(count, Immediate(3)); // Number of doublewords to copy. in GenerateCopyCharactersREP()
4880 __ repmovsq(); in GenerateCopyCharactersREP()
4883 __ movl(count, kScratchRegister); in GenerateCopyCharactersREP()
4884 __ and_(count, Immediate(7)); in GenerateCopyCharactersREP()
4887 __ bind(&last_bytes); in GenerateCopyCharactersREP()
4888 __ testl(count, count); in GenerateCopyCharactersREP()
4889 __ j(zero, &done, Label::kNear); in GenerateCopyCharactersREP()
4893 __ bind(&loop); in GenerateCopyCharactersREP()
4894 __ movb(kScratchRegister, Operand(src, 0)); in GenerateCopyCharactersREP()
4895 __ movb(Operand(dest, 0), kScratchRegister); in GenerateCopyCharactersREP()
4896 __ incq(src); in GenerateCopyCharactersREP()
4897 __ incq(dest); in GenerateCopyCharactersREP()
4898 __ decl(count); in GenerateCopyCharactersREP()
4899 __ j(not_zero, &loop); in GenerateCopyCharactersREP()
4901 __ bind(&done); in GenerateCopyCharactersREP()
4918 __ leal(scratch, Operand(c1, -'0')); in GenerateTwoCharacterSymbolTableProbe()
4919 __ cmpl(scratch, Immediate(static_cast<int>('9' - '0'))); in GenerateTwoCharacterSymbolTableProbe()
4920 __ j(above, &not_array_index, Label::kNear); in GenerateTwoCharacterSymbolTableProbe()
4921 __ leal(scratch, Operand(c2, -'0')); in GenerateTwoCharacterSymbolTableProbe()
4922 __ cmpl(scratch, Immediate(static_cast<int>('9' - '0'))); in GenerateTwoCharacterSymbolTableProbe()
4923 __ j(below_equal, not_found); in GenerateTwoCharacterSymbolTableProbe()
4925 __ bind(&not_array_index); in GenerateTwoCharacterSymbolTableProbe()
4934 __ shl(c2, Immediate(kBitsPerByte)); in GenerateTwoCharacterSymbolTableProbe()
4935 __ orl(chars, c2); in GenerateTwoCharacterSymbolTableProbe()
4942 __ LoadRoot(symbol_table, Heap::kSymbolTableRootIndex); in GenerateTwoCharacterSymbolTableProbe()
4946 __ SmiToInteger32(mask, in GenerateTwoCharacterSymbolTableProbe()
4948 __ decl(mask); in GenerateTwoCharacterSymbolTableProbe()
4967 __ movl(scratch, hash); in GenerateTwoCharacterSymbolTableProbe()
4969 __ addl(scratch, Immediate(SymbolTable::GetProbeOffset(i))); in GenerateTwoCharacterSymbolTableProbe()
4971 __ andl(scratch, mask); in GenerateTwoCharacterSymbolTableProbe()
4975 __ movq(candidate, in GenerateTwoCharacterSymbolTableProbe()
4983 __ CmpObjectType(candidate, ODDBALL_TYPE, map); in GenerateTwoCharacterSymbolTableProbe()
4984 __ j(not_equal, &is_string, Label::kNear); in GenerateTwoCharacterSymbolTableProbe()
4986 __ CompareRoot(candidate, Heap::kUndefinedValueRootIndex); in GenerateTwoCharacterSymbolTableProbe()
4987 __ j(equal, not_found); in GenerateTwoCharacterSymbolTableProbe()
4990 __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex); in GenerateTwoCharacterSymbolTableProbe()
4991 __ cmpq(kScratchRegister, candidate); in GenerateTwoCharacterSymbolTableProbe()
4992 __ Assert(equal, "oddball in symbol table is not undefined or the hole"); in GenerateTwoCharacterSymbolTableProbe()
4994 __ jmp(&next_probe[i]); in GenerateTwoCharacterSymbolTableProbe()
4996 __ bind(&is_string); in GenerateTwoCharacterSymbolTableProbe()
4999 __ SmiCompare(FieldOperand(candidate, String::kLengthOffset), in GenerateTwoCharacterSymbolTableProbe()
5001 __ j(not_equal, &next_probe[i]); in GenerateTwoCharacterSymbolTableProbe()
5008 __ movzxbl(temp, FieldOperand(map, Map::kInstanceTypeOffset)); in GenerateTwoCharacterSymbolTableProbe()
5009 __ JumpIfInstanceTypeIsNotSequentialAscii( in GenerateTwoCharacterSymbolTableProbe()
5013 __ movl(temp, FieldOperand(candidate, SeqAsciiString::kHeaderSize)); in GenerateTwoCharacterSymbolTableProbe()
5014 __ andl(temp, Immediate(0x0000ffff)); in GenerateTwoCharacterSymbolTableProbe()
5015 __ cmpl(chars, temp); in GenerateTwoCharacterSymbolTableProbe()
5016 __ j(equal, &found_in_symbol_table); in GenerateTwoCharacterSymbolTableProbe()
5017 __ bind(&next_probe[i]); in GenerateTwoCharacterSymbolTableProbe()
5021 __ jmp(not_found); in GenerateTwoCharacterSymbolTableProbe()
5025 __ bind(&found_in_symbol_table); in GenerateTwoCharacterSymbolTableProbe()
5027 __ movq(rax, result); in GenerateTwoCharacterSymbolTableProbe()
5037 __ LoadRoot(scratch, Heap::kHashSeedRootIndex); in GenerateHashInit()
5038 __ SmiToInteger32(scratch, scratch); in GenerateHashInit()
5039 __ addl(scratch, character); in GenerateHashInit()
5040 __ movl(hash, scratch); in GenerateHashInit()
5041 __ shll(scratch, Immediate(10)); in GenerateHashInit()
5042 __ addl(hash, scratch); in GenerateHashInit()
5044 __ movl(scratch, hash); in GenerateHashInit()
5045 __ shrl(scratch, Immediate(6)); in GenerateHashInit()
5046 __ xorl(hash, scratch); in GenerateHashInit()
5055 __ addl(hash, character); in GenerateHashAddCharacter()
5057 __ movl(scratch, hash); in GenerateHashAddCharacter()
5058 __ shll(scratch, Immediate(10)); in GenerateHashAddCharacter()
5059 __ addl(hash, scratch); in GenerateHashAddCharacter()
5061 __ movl(scratch, hash); in GenerateHashAddCharacter()
5062 __ shrl(scratch, Immediate(6)); in GenerateHashAddCharacter()
5063 __ xorl(hash, scratch); in GenerateHashAddCharacter()
5071 __ leal(hash, Operand(hash, hash, times_8, 0)); in GenerateHashGetHash()
5073 __ movl(scratch, hash); in GenerateHashGetHash()
5074 __ shrl(scratch, Immediate(11)); in GenerateHashGetHash()
5075 __ xorl(hash, scratch); in GenerateHashGetHash()
5077 __ movl(scratch, hash); in GenerateHashGetHash()
5078 __ shll(scratch, Immediate(15)); in GenerateHashGetHash()
5079 __ addl(hash, scratch); in GenerateHashGetHash()
5081 __ andl(hash, Immediate(String::kHashBitMask)); in GenerateHashGetHash()
5085 __ j(not_zero, &hash_not_zero); in GenerateHashGetHash()
5086 __ Set(hash, StringHasher::kZeroHash); in GenerateHashGetHash()
5087 __ bind(&hash_not_zero); in GenerateHashGetHash()
5105 __ movq(rax, Operand(rsp, kStringOffset)); in Generate()
5107 __ testl(rax, Immediate(kSmiTagMask)); in Generate()
5108 __ j(zero, &runtime); in Generate()
5110 __ j(NegateCondition(is_string), &runtime); in Generate()
5116 __ movq(rcx, Operand(rsp, kToOffset)); in Generate()
5117 __ movq(rdx, Operand(rsp, kFromOffset)); in Generate()
5118 __ JumpUnlessBothNonNegativeSmi(rcx, rdx, &runtime); in Generate()
5120 __ SmiSub(rcx, rcx, rdx); // Overflow doesn't happen. in Generate()
5121 __ cmpq(FieldOperand(rax, String::kLengthOffset), rcx); in Generate()
5123 __ j(not_equal, &not_original_string, Label::kNear); in Generate()
5125 __ IncrementCounter(counters->sub_string_native(), 1); in Generate()
5126 __ ret(kArgumentsSize); in Generate()
5127 __ bind(&not_original_string); in Generate()
5131 __ SmiToInteger32(rcx, rcx); in Generate()
5132 __ cmpl(rcx, Immediate(2)); in Generate()
5133 __ j(greater, &result_longer_than_two); in Generate()
5134 __ j(less, &runtime); in Generate()
5141 __ JumpIfInstanceTypeIsNotSequentialAscii(rbx, rbx, &runtime); in Generate()
5144 __ SmiToInteger32(rdx, rdx); // From index is no longer smi. in Generate()
5145 __ movzxbq(rbx, FieldOperand(rax, rdx, times_1, SeqAsciiString::kHeaderSize)); in Generate()
5146 __ movzxbq(rdi, in Generate()
5153 __ IncrementCounter(counters->sub_string_native(), 1); in Generate()
5154 __ ret(3 * kPointerSize); in Generate()
5156 __ bind(&make_two_character_string); in Generate()
5158 __ movzxwq(rbx, FieldOperand(rax, rdx, times_1, SeqAsciiString::kHeaderSize)); in Generate()
5159 __ AllocateAsciiString(rax, rcx, r11, r14, r15, &runtime); in Generate()
5160 __ movw(FieldOperand(rax, SeqAsciiString::kHeaderSize), rbx); in Generate()
5161 __ IncrementCounter(counters->sub_string_native(), 1); in Generate()
5162 __ ret(3 * kPointerSize); in Generate()
5164 __ bind(&result_longer_than_two); in Generate()
5175 __ testb(rbx, Immediate(kIsIndirectStringMask)); in Generate()
5176 __ j(zero, &seq_or_external_string, Label::kNear); in Generate()
5178 __ testb(rbx, Immediate(kSlicedNotConsMask)); in Generate()
5179 __ j(not_zero, &sliced_string, Label::kNear); in Generate()
5182 __ CompareRoot(FieldOperand(rax, ConsString::kSecondOffset), in Generate()
5184 __ j(not_equal, &runtime); in Generate()
5185 __ movq(rdi, FieldOperand(rax, ConsString::kFirstOffset)); in Generate()
5187 __ movq(rbx, FieldOperand(rdi, HeapObject::kMapOffset)); in Generate()
5188 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset)); in Generate()
5189 __ jmp(&underlying_unpacked, Label::kNear); in Generate()
5191 __ bind(&sliced_string); in Generate()
5193 __ addq(rdx, FieldOperand(rax, SlicedString::kOffsetOffset)); in Generate()
5194 __ movq(rdi, FieldOperand(rax, SlicedString::kParentOffset)); in Generate()
5196 __ movq(rbx, FieldOperand(rdi, HeapObject::kMapOffset)); in Generate()
5197 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset)); in Generate()
5198 __ jmp(&underlying_unpacked, Label::kNear); in Generate()
5200 __ bind(&seq_or_external_string); in Generate()
5202 __ movq(rdi, rax); in Generate()
5204 __ bind(&underlying_unpacked); in Generate()
5214 __ cmpq(rcx, Immediate(SlicedString::kMinLength)); in Generate()
5216 __ j(less, &copy_routine); in Generate()
5225 __ testb(rbx, Immediate(kStringEncodingMask)); in Generate()
5226 __ j(zero, &two_byte_slice, Label::kNear); in Generate()
5227 __ AllocateAsciiSlicedString(rax, rbx, r14, &runtime); in Generate()
5228 __ jmp(&set_slice_header, Label::kNear); in Generate()
5229 __ bind(&two_byte_slice); in Generate()
5230 __ AllocateTwoByteSlicedString(rax, rbx, r14, &runtime); in Generate()
5231 __ bind(&set_slice_header); in Generate()
5232 __ Integer32ToSmi(rcx, rcx); in Generate()
5233 __ movq(FieldOperand(rax, SlicedString::kLengthOffset), rcx); in Generate()
5234 __ movq(FieldOperand(rax, SlicedString::kHashFieldOffset), in Generate()
5236 __ movq(FieldOperand(rax, SlicedString::kParentOffset), rdi); in Generate()
5237 __ movq(FieldOperand(rax, SlicedString::kOffsetOffset), rdx); in Generate()
5238 __ IncrementCounter(counters->sub_string_native(), 1); in Generate()
5239 __ ret(kArgumentsSize); in Generate()
5241 __ bind(&copy_routine); in Generate()
5253 __ testb(rbx, Immediate(kExternalStringTag)); in Generate()
5254 __ j(zero, &sequential_string); in Generate()
5259 __ testb(rbx, Immediate(kShortExternalStringMask)); in Generate()
5260 __ j(not_zero, &runtime); in Generate()
5261 __ movq(rdi, FieldOperand(rdi, ExternalString::kResourceDataOffset)); in Generate()
5264 __ subq(rdi, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag)); in Generate()
5266 __ bind(&sequential_string); in Generate()
5268 __ testb(rbx, Immediate(kStringEncodingMask)); in Generate()
5269 __ j(zero, &two_byte_sequential); in Generate()
5272 __ AllocateAsciiString(rax, rcx, r11, r14, r15, &runtime); in Generate()
5276 __ movq(r14, rsi); // esi used by following code. in Generate()
5279 __ lea(rsi, Operand(rdi, smi_as_index.reg, smi_as_index.scale, in Generate()
5283 __ lea(rdi, FieldOperand(rax, SeqAsciiString::kHeaderSize)); in Generate()
5291 __ movq(rsi, r14); // Restore rsi. in Generate()
5292 __ IncrementCounter(counters->sub_string_native(), 1); in Generate()
5293 __ ret(kArgumentsSize); in Generate()
5295 __ bind(&two_byte_sequential); in Generate()
5297 __ AllocateTwoByteString(rax, rcx, r11, r14, r15, &runtime); in Generate()
5301 __ movq(r14, rsi); // esi used by following code. in Generate()
5304 __ lea(rsi, Operand(rdi, smi_as_index.reg, smi_as_index.scale, in Generate()
5308 __ lea(rdi, FieldOperand(rax, SeqTwoByteString::kHeaderSize)); in Generate()
5316 __ movq(rsi, r14); // Restore esi. in Generate()
5317 __ IncrementCounter(counters->sub_string_native(), 1); in Generate()
5318 __ ret(kArgumentsSize); in Generate()
5321 __ bind(&runtime); in Generate()
5322 __ TailCallRuntime(Runtime::kSubString, 3, 1); in Generate()
5335 __ movq(length, FieldOperand(left, String::kLengthOffset)); in GenerateFlatAsciiStringEquals()
5336 __ SmiCompare(length, FieldOperand(right, String::kLengthOffset)); in GenerateFlatAsciiStringEquals()
5337 __ j(equal, &check_zero_length, Label::kNear); in GenerateFlatAsciiStringEquals()
5338 __ Move(rax, Smi::FromInt(NOT_EQUAL)); in GenerateFlatAsciiStringEquals()
5339 __ ret(0); in GenerateFlatAsciiStringEquals()
5343 __ bind(&check_zero_length); in GenerateFlatAsciiStringEquals()
5345 __ SmiTest(length); in GenerateFlatAsciiStringEquals()
5346 __ j(not_zero, &compare_chars, Label::kNear); in GenerateFlatAsciiStringEquals()
5347 __ Move(rax, Smi::FromInt(EQUAL)); in GenerateFlatAsciiStringEquals()
5348 __ ret(0); in GenerateFlatAsciiStringEquals()
5351 __ bind(&compare_chars); in GenerateFlatAsciiStringEquals()
5357 __ Move(rax, Smi::FromInt(EQUAL)); in GenerateFlatAsciiStringEquals()
5358 __ ret(0); in GenerateFlatAsciiStringEquals()
5361 __ bind(&strings_not_equal); in GenerateFlatAsciiStringEquals()
5362 __ Move(rax, Smi::FromInt(NOT_EQUAL)); in GenerateFlatAsciiStringEquals()
5363 __ ret(0); in GenerateFlatAsciiStringEquals()
5379 __ movq(scratch1, FieldOperand(left, String::kLengthOffset)); in GenerateCompareFlatAsciiStrings()
5380 __ movq(scratch4, scratch1); in GenerateCompareFlatAsciiStrings()
5381 __ SmiSub(scratch4, in GenerateCompareFlatAsciiStrings()
5387 __ j(less, &left_shorter, Label::kNear); in GenerateCompareFlatAsciiStrings()
5391 __ SmiSub(scratch1, scratch1, length_difference); in GenerateCompareFlatAsciiStrings()
5392 __ bind(&left_shorter); in GenerateCompareFlatAsciiStrings()
5398 __ SmiTest(min_length); in GenerateCompareFlatAsciiStrings()
5399 __ j(zero, &compare_lengths, Label::kNear); in GenerateCompareFlatAsciiStrings()
5408 __ bind(&compare_lengths); in GenerateCompareFlatAsciiStrings()
5409 __ SmiTest(length_difference); in GenerateCompareFlatAsciiStrings()
5410 __ j(not_zero, &result_not_equal, Label::kNear); in GenerateCompareFlatAsciiStrings()
5413 __ Move(rax, Smi::FromInt(EQUAL)); in GenerateCompareFlatAsciiStrings()
5414 __ ret(0); in GenerateCompareFlatAsciiStrings()
5417 __ bind(&result_not_equal); in GenerateCompareFlatAsciiStrings()
5419 __ j(greater, &result_greater, Label::kNear); in GenerateCompareFlatAsciiStrings()
5422 __ Move(rax, Smi::FromInt(LESS)); in GenerateCompareFlatAsciiStrings()
5423 __ ret(0); in GenerateCompareFlatAsciiStrings()
5426 __ bind(&result_greater); in GenerateCompareFlatAsciiStrings()
5427 __ Move(rax, Smi::FromInt(GREATER)); in GenerateCompareFlatAsciiStrings()
5428 __ ret(0); in GenerateCompareFlatAsciiStrings()
5443 __ SmiToInteger32(length, length); in GenerateAsciiCharsCompareLoop()
5444 __ lea(left, in GenerateAsciiCharsCompareLoop()
5446 __ lea(right, in GenerateAsciiCharsCompareLoop()
5448 __ neg(length); in GenerateAsciiCharsCompareLoop()
5453 __ bind(&loop); in GenerateAsciiCharsCompareLoop()
5454 __ movb(scratch, Operand(left, index, times_1, 0)); in GenerateAsciiCharsCompareLoop()
5455 __ cmpb(scratch, Operand(right, index, times_1, 0)); in GenerateAsciiCharsCompareLoop()
5456 __ j(not_equal, chars_not_equal, near_jump); in GenerateAsciiCharsCompareLoop()
5457 __ incq(index); in GenerateAsciiCharsCompareLoop()
5458 __ j(not_zero, &loop); in GenerateAsciiCharsCompareLoop()
5470 __ movq(rdx, Operand(rsp, 2 * kPointerSize)); // left in Generate()
5471 __ movq(rax, Operand(rsp, 1 * kPointerSize)); // right in Generate()
5475 __ cmpq(rdx, rax); in Generate()
5476 __ j(not_equal, &not_same, Label::kNear); in Generate()
5477 __ Move(rax, Smi::FromInt(EQUAL)); in Generate()
5479 __ IncrementCounter(counters->string_compare_native(), 1); in Generate()
5480 __ ret(2 * kPointerSize); in Generate()
5482 __ bind(&not_same); in Generate()
5485 __ JumpIfNotBothSequentialAsciiStrings(rdx, rax, rcx, rbx, &runtime); in Generate()
5488 __ IncrementCounter(counters->string_compare_native(), 1); in Generate()
5490 __ pop(rcx); in Generate()
5491 __ addq(rsp, Immediate(2 * kPointerSize)); in Generate()
5492 __ push(rcx); in Generate()
5497 __ bind(&runtime); in Generate()
5498 __ TailCallRuntime(Runtime::kStringCompare, 2, 1); in Generate()
5505 __ JumpIfNotBothSmi(rdx, rax, &miss, Label::kNear); in GenerateSmis()
5509 __ subq(rax, rdx); in GenerateSmis()
5512 __ subq(rdx, rax); in GenerateSmis()
5513 __ j(no_overflow, &done, Label::kNear); in GenerateSmis()
5515 __ SmiNot(rdx, rdx); in GenerateSmis()
5516 __ bind(&done); in GenerateSmis()
5517 __ movq(rax, rdx); in GenerateSmis()
5519 __ ret(0); in GenerateSmis()
5521 __ bind(&miss); in GenerateSmis()
5533 __ j(either_smi, &generic_stub, Label::kNear); in GenerateHeapNumbers()
5535 __ CmpObjectType(rax, HEAP_NUMBER_TYPE, rcx); in GenerateHeapNumbers()
5536 __ j(not_equal, &maybe_undefined1, Label::kNear); in GenerateHeapNumbers()
5537 __ CmpObjectType(rdx, HEAP_NUMBER_TYPE, rcx); in GenerateHeapNumbers()
5538 __ j(not_equal, &maybe_undefined2, Label::kNear); in GenerateHeapNumbers()
5541 __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset)); in GenerateHeapNumbers()
5542 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset)); in GenerateHeapNumbers()
5545 __ ucomisd(xmm0, xmm1); in GenerateHeapNumbers()
5548 __ j(parity_even, &unordered, Label::kNear); in GenerateHeapNumbers()
5552 __ movl(rax, Immediate(0)); in GenerateHeapNumbers()
5553 __ movl(rcx, Immediate(0)); in GenerateHeapNumbers()
5554 __ setcc(above, rax); // Add one to zero if carry clear and not equal. in GenerateHeapNumbers()
5555 __ sbbq(rax, rcx); // Subtract one if below (aka. carry set). in GenerateHeapNumbers()
5556 __ ret(0); in GenerateHeapNumbers()
5558 __ bind(&unordered); in GenerateHeapNumbers()
5560 __ bind(&generic_stub); in GenerateHeapNumbers()
5561 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET); in GenerateHeapNumbers()
5563 __ bind(&maybe_undefined1); in GenerateHeapNumbers()
5565 __ Cmp(rax, masm->isolate()->factory()->undefined_value()); in GenerateHeapNumbers()
5566 __ j(not_equal, &miss); in GenerateHeapNumbers()
5567 __ CmpObjectType(rdx, HEAP_NUMBER_TYPE, rcx); in GenerateHeapNumbers()
5568 __ j(not_equal, &maybe_undefined2, Label::kNear); in GenerateHeapNumbers()
5569 __ jmp(&unordered); in GenerateHeapNumbers()
5572 __ bind(&maybe_undefined2); in GenerateHeapNumbers()
5574 __ Cmp(rdx, masm->isolate()->factory()->undefined_value()); in GenerateHeapNumbers()
5575 __ j(equal, &unordered); in GenerateHeapNumbers()
5578 __ bind(&miss); in GenerateHeapNumbers()
5596 __ j(cond, &miss, Label::kNear); in GenerateSymbols()
5599 __ movq(tmp1, FieldOperand(left, HeapObject::kMapOffset)); in GenerateSymbols()
5600 __ movq(tmp2, FieldOperand(right, HeapObject::kMapOffset)); in GenerateSymbols()
5601 __ movzxbq(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset)); in GenerateSymbols()
5602 __ movzxbq(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset)); in GenerateSymbols()
5604 __ and_(tmp1, tmp2); in GenerateSymbols()
5605 __ testb(tmp1, Immediate(kIsSymbolMask)); in GenerateSymbols()
5606 __ j(zero, &miss, Label::kNear); in GenerateSymbols()
5610 __ cmpq(left, right); in GenerateSymbols()
5614 __ j(not_equal, &done, Label::kNear); in GenerateSymbols()
5617 __ Move(rax, Smi::FromInt(EQUAL)); in GenerateSymbols()
5618 __ bind(&done); in GenerateSymbols()
5619 __ ret(0); in GenerateSymbols()
5621 __ bind(&miss); in GenerateSymbols()
5641 __ j(cond, &miss); in GenerateStrings()
5645 __ movq(tmp1, FieldOperand(left, HeapObject::kMapOffset)); in GenerateStrings()
5646 __ movq(tmp2, FieldOperand(right, HeapObject::kMapOffset)); in GenerateStrings()
5647 __ movzxbq(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset)); in GenerateStrings()
5648 __ movzxbq(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset)); in GenerateStrings()
5649 __ movq(tmp3, tmp1); in GenerateStrings()
5651 __ or_(tmp3, tmp2); in GenerateStrings()
5652 __ testb(tmp3, Immediate(kIsNotStringMask)); in GenerateStrings()
5653 __ j(not_zero, &miss); in GenerateStrings()
5657 __ cmpq(left, right); in GenerateStrings()
5658 __ j(not_equal, &not_same, Label::kNear); in GenerateStrings()
5661 __ Move(rax, Smi::FromInt(EQUAL)); in GenerateStrings()
5662 __ ret(0); in GenerateStrings()
5665 __ bind(&not_same); in GenerateStrings()
5672 __ and_(tmp1, tmp2); in GenerateStrings()
5673 __ testb(tmp1, Immediate(kIsSymbolMask)); in GenerateStrings()
5674 __ j(zero, &do_compare, Label::kNear); in GenerateStrings()
5678 __ ret(0); in GenerateStrings()
5679 __ bind(&do_compare); in GenerateStrings()
5684 __ JumpIfNotBothSequentialAsciiStrings(left, right, tmp1, tmp2, &runtime); in GenerateStrings()
5696 __ bind(&runtime); in GenerateStrings()
5697 __ pop(tmp1); // Return address. in GenerateStrings()
5698 __ push(left); in GenerateStrings()
5699 __ push(right); in GenerateStrings()
5700 __ push(tmp1); in GenerateStrings()
5702 __ TailCallRuntime(Runtime::kStringEquals, 2, 1); in GenerateStrings()
5704 __ TailCallRuntime(Runtime::kStringCompare, 2, 1); in GenerateStrings()
5707 __ bind(&miss); in GenerateStrings()
5716 __ j(either_smi, &miss, Label::kNear); in GenerateObjects()
5718 __ CmpObjectType(rax, JS_OBJECT_TYPE, rcx); in GenerateObjects()
5719 __ j(not_equal, &miss, Label::kNear); in GenerateObjects()
5720 __ CmpObjectType(rdx, JS_OBJECT_TYPE, rcx); in GenerateObjects()
5721 __ j(not_equal, &miss, Label::kNear); in GenerateObjects()
5724 __ subq(rax, rdx); in GenerateObjects()
5725 __ ret(0); in GenerateObjects()
5727 __ bind(&miss); in GenerateObjects()
5735 __ j(either_smi, &miss, Label::kNear); in GenerateKnownObjects()
5737 __ movq(rcx, FieldOperand(rax, HeapObject::kMapOffset)); in GenerateKnownObjects()
5738 __ movq(rbx, FieldOperand(rdx, HeapObject::kMapOffset)); in GenerateKnownObjects()
5739 __ Cmp(rcx, known_map_); in GenerateKnownObjects()
5740 __ j(not_equal, &miss, Label::kNear); in GenerateKnownObjects()
5741 __ Cmp(rbx, known_map_); in GenerateKnownObjects()
5742 __ j(not_equal, &miss, Label::kNear); in GenerateKnownObjects()
5744 __ subq(rax, rdx); in GenerateKnownObjects()
5745 __ ret(0); in GenerateKnownObjects()
5747 __ bind(&miss); in GenerateKnownObjects()
5759 __ push(rdx); in GenerateMiss()
5760 __ push(rax); in GenerateMiss()
5761 __ push(rdx); in GenerateMiss()
5762 __ push(rax); in GenerateMiss()
5763 __ Push(Smi::FromInt(op_)); in GenerateMiss()
5764 __ CallExternalReference(miss, 3); in GenerateMiss()
5767 __ lea(rdi, FieldOperand(rax, Code::kHeaderSize)); in GenerateMiss()
5768 __ pop(rax); in GenerateMiss()
5769 __ pop(rdx); in GenerateMiss()
5773 __ jmp(rdi); in GenerateMiss()
5793 __ SmiToInteger32(index, FieldOperand(properties, kCapacityOffset)); in GenerateNegativeLookup()
5794 __ decl(index); in GenerateNegativeLookup()
5795 __ and_(index, in GenerateNegativeLookup()
5800 __ lea(index, Operand(index, index, times_2, 0)); // index *= 3. in GenerateNegativeLookup()
5805 __ movq(entity_name, Operand(properties, in GenerateNegativeLookup()
5809 __ Cmp(entity_name, masm->isolate()->factory()->undefined_value()); in GenerateNegativeLookup()
5810 __ j(equal, done); in GenerateNegativeLookup()
5813 __ Cmp(entity_name, Handle<String>(name)); in GenerateNegativeLookup()
5814 __ j(equal, miss); in GenerateNegativeLookup()
5818 __ CompareRoot(entity_name, Heap::kTheHoleValueRootIndex); in GenerateNegativeLookup()
5819 __ j(equal, &the_hole, Label::kNear); in GenerateNegativeLookup()
5822 __ movq(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset)); in GenerateNegativeLookup()
5823 __ testb(FieldOperand(entity_name, Map::kInstanceTypeOffset), in GenerateNegativeLookup()
5825 __ j(zero, miss); in GenerateNegativeLookup()
5827 __ bind(&the_hole); in GenerateNegativeLookup()
5834 __ Push(Handle<Object>(name)); in GenerateNegativeLookup()
5835 __ push(Immediate(name->Hash())); in GenerateNegativeLookup()
5836 __ CallStub(&stub); in GenerateNegativeLookup()
5837 __ testq(r0, r0); in GenerateNegativeLookup()
5838 __ j(not_zero, miss); in GenerateNegativeLookup()
5839 __ jmp(done); in GenerateNegativeLookup()
5860 if (FLAG_debug_code) __ AbortIfNotString(name); in GeneratePositiveLookup()
5862 __ SmiToInteger32(r0, FieldOperand(elements, kCapacityOffset)); in GeneratePositiveLookup()
5863 __ decl(r0); in GeneratePositiveLookup()
5867 __ movl(r1, FieldOperand(name, String::kHashFieldOffset)); in GeneratePositiveLookup()
5868 __ shrl(r1, Immediate(String::kHashShift)); in GeneratePositiveLookup()
5870 __ addl(r1, Immediate(StringDictionary::GetProbeOffset(i))); in GeneratePositiveLookup()
5872 __ and_(r1, r0); in GeneratePositiveLookup()
5876 __ lea(r1, Operand(r1, r1, times_2, 0)); // r1 = r1 * 3 in GeneratePositiveLookup()
5879 __ cmpq(name, Operand(elements, r1, times_pointer_size, in GeneratePositiveLookup()
5881 __ j(equal, done); in GeneratePositiveLookup()
5888 __ push(name); in GeneratePositiveLookup()
5889 __ movl(r0, FieldOperand(name, String::kHashFieldOffset)); in GeneratePositiveLookup()
5890 __ shrl(r0, Immediate(String::kHashShift)); in GeneratePositiveLookup()
5891 __ push(r0); in GeneratePositiveLookup()
5892 __ CallStub(&stub); in GeneratePositiveLookup()
5894 __ testq(r0, r0); in GeneratePositiveLookup()
5895 __ j(zero, miss); in GeneratePositiveLookup()
5896 __ jmp(done); in GeneratePositiveLookup()
5919 __ SmiToInteger32(scratch, FieldOperand(dictionary_, kCapacityOffset)); in Generate()
5920 __ decl(scratch); in Generate()
5921 __ push(scratch); in Generate()
5930 __ movq(scratch, Operand(rsp, 2 * kPointerSize)); in Generate()
5932 __ addl(scratch, Immediate(StringDictionary::GetProbeOffset(i))); in Generate()
5934 __ and_(scratch, Operand(rsp, 0)); in Generate()
5938 __ lea(index_, Operand(scratch, scratch, times_2, 0)); // index *= 3. in Generate()
5941 __ movq(scratch, Operand(dictionary_, in Generate()
5946 __ Cmp(scratch, masm->isolate()->factory()->undefined_value()); in Generate()
5947 __ j(equal, &not_in_dictionary); in Generate()
5950 __ cmpq(scratch, Operand(rsp, 3 * kPointerSize)); in Generate()
5951 __ j(equal, &in_dictionary); in Generate()
5959 __ movq(scratch, FieldOperand(scratch, HeapObject::kMapOffset)); in Generate()
5960 __ testb(FieldOperand(scratch, Map::kInstanceTypeOffset), in Generate()
5962 __ j(zero, &maybe_in_dictionary); in Generate()
5966 __ bind(&maybe_in_dictionary); in Generate()
5971 __ movq(scratch, Immediate(0)); in Generate()
5972 __ Drop(1); in Generate()
5973 __ ret(2 * kPointerSize); in Generate()
5976 __ bind(&in_dictionary); in Generate()
5977 __ movq(scratch, Immediate(1)); in Generate()
5978 __ Drop(1); in Generate()
5979 __ ret(2 * kPointerSize); in Generate()
5981 __ bind(&not_in_dictionary); in Generate()
5982 __ movq(scratch, Immediate(0)); in Generate()
5983 __ Drop(1); in Generate()
5984 __ ret(2 * kPointerSize); in Generate()
6085 __ jmp(&skip_to_incremental_noncompacting, Label::kNear); in Generate()
6086 __ jmp(&skip_to_incremental_compacting, Label::kFar); in Generate()
6089 __ RememberedSetHelper(object_, in Generate()
6095 __ ret(0); in Generate()
6098 __ bind(&skip_to_incremental_noncompacting); in Generate()
6101 __ bind(&skip_to_incremental_compacting); in Generate()
6117 __ movq(regs_.scratch0(), Operand(regs_.address(), 0)); in GenerateIncremental()
6118 __ JumpIfNotInNewSpace(regs_.scratch0(), in GenerateIncremental()
6122 __ CheckPageFlag(regs_.object(), in GenerateIncremental()
6134 __ RememberedSetHelper(object_, in GenerateIncremental()
6140 __ bind(&dont_need_remembered_set); in GenerateIncremental()
6147 __ ret(0); in GenerateIncremental()
6166 __ Move(address, regs_.address()); in InformIncrementalMarker()
6167 __ Move(arg1, regs_.object()); in InformIncrementalMarker()
6170 __ Move(arg2, address); in InformIncrementalMarker()
6173 __ movq(arg2, Operand(address, 0)); in InformIncrementalMarker()
6175 __ LoadAddress(arg3, ExternalReference::isolate_address()); in InformIncrementalMarker()
6179 __ PrepareCallCFunction(argument_count); in InformIncrementalMarker()
6181 __ CallCFunction( in InformIncrementalMarker()
6187 __ CallCFunction( in InformIncrementalMarker()
6206 __ JumpIfBlack(regs_.object(), in CheckNeedsToInformIncrementalMarker()
6214 __ RememberedSetHelper(object_, in CheckNeedsToInformIncrementalMarker()
6220 __ ret(0); in CheckNeedsToInformIncrementalMarker()
6223 __ bind(&on_black); in CheckNeedsToInformIncrementalMarker()
6226 __ movq(regs_.scratch0(), Operand(regs_.address(), 0)); in CheckNeedsToInformIncrementalMarker()
6231 __ CheckPageFlag(regs_.scratch0(), // Contains value. in CheckNeedsToInformIncrementalMarker()
6238 __ CheckPageFlag(regs_.object(), in CheckNeedsToInformIncrementalMarker()
6244 __ bind(&ensure_not_white); in CheckNeedsToInformIncrementalMarker()
6249 __ push(regs_.object()); in CheckNeedsToInformIncrementalMarker()
6250 __ EnsureNotWhite(regs_.scratch0(), // The value. in CheckNeedsToInformIncrementalMarker()
6255 __ pop(regs_.object()); in CheckNeedsToInformIncrementalMarker()
6259 __ RememberedSetHelper(object_, in CheckNeedsToInformIncrementalMarker()
6265 __ ret(0); in CheckNeedsToInformIncrementalMarker()
6268 __ bind(&need_incremental_pop_object); in CheckNeedsToInformIncrementalMarker()
6269 __ pop(regs_.object()); in CheckNeedsToInformIncrementalMarker()
6271 __ bind(&need_incremental); in CheckNeedsToInformIncrementalMarker()
6293 __ CheckFastElements(rdi, &double_elements); in Generate()
6296 __ JumpIfSmi(rax, &smi_element); in Generate()
6297 __ CheckFastSmiOnlyElements(rdi, &fast_elements); in Generate()
6302 __ bind(&slow_elements); in Generate()
6303 __ pop(rdi); // Pop return address and remember to put back later for tail in Generate()
6305 __ push(rbx); in Generate()
6306 __ push(rcx); in Generate()
6307 __ push(rax); in Generate()
6308 __ movq(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); in Generate()
6309 __ push(FieldOperand(rbx, JSFunction::kLiteralsOffset)); in Generate()
6310 __ push(rdx); in Generate()
6311 __ push(rdi); // Return return address so that tail call returns to right in Generate()
6313 __ TailCallRuntime(Runtime::kStoreArrayLiteralElement, 5, 1); in Generate()
6316 __ bind(&fast_elements); in Generate()
6317 __ SmiToInteger32(kScratchRegister, rcx); in Generate()
6318 __ movq(rbx, FieldOperand(rbx, JSObject::kElementsOffset)); in Generate()
6319 __ lea(rcx, FieldOperand(rbx, kScratchRegister, times_pointer_size, in Generate()
6321 __ movq(Operand(rcx, 0), rax); in Generate()
6323 __ RecordWrite(rbx, rcx, rax, in Generate()
6327 __ ret(0); in Generate()
6331 __ bind(&smi_element); in Generate()
6332 __ SmiToInteger32(kScratchRegister, rcx); in Generate()
6333 __ movq(rbx, FieldOperand(rbx, JSObject::kElementsOffset)); in Generate()
6334 __ movq(FieldOperand(rbx, kScratchRegister, times_pointer_size, in Generate()
6336 __ ret(0); in Generate()
6339 __ bind(&double_elements); in Generate()
6341 __ movq(r9, FieldOperand(rbx, JSObject::kElementsOffset)); in Generate()
6342 __ SmiToInteger32(r11, rcx); in Generate()
6343 __ StoreNumberToDoubleElements(rax, in Generate()
6348 __ ret(0); in Generate()
6351 #undef __