• Home
  • Raw
  • Download

Lines Matching full:__

29 #define __ ACCESS_MASM(masm)  macro
32 __ popq(rcx); in Generate()
33 __ movq(MemOperand(rsp, rax, times_8, 0), rdi); in Generate()
34 __ pushq(rdi); in Generate()
35 __ pushq(rbx); in Generate()
36 __ pushq(rcx); in Generate()
37 __ addq(rax, Immediate(3)); in Generate()
38 __ TailCallRuntime(Runtime::kNewArray); in Generate()
55 __ Push(descriptor.GetRegisterParameter(i)); in GenerateLightweightMiss()
57 __ CallExternalReference(miss, param_count); in GenerateLightweightMiss()
60 __ Ret(); in GenerateLightweightMiss()
65 __ PushCallerSaved(save_doubles() ? kSaveFPRegs : kDontSaveFPRegs); in Generate()
67 __ PrepareCallCFunction(argument_count); in Generate()
68 __ LoadAddress(arg_reg_1, in Generate()
72 __ CallCFunction( in Generate()
75 __ PopCallerSaved(save_doubles() ? kSaveFPRegs : kDontSaveFPRegs); in Generate()
76 __ ret(0); in Generate()
125 __ pushq(scratch1); in Generate()
126 __ pushq(save_reg); in Generate()
129 __ movl(scratch1, mantissa_operand); in Generate()
130 __ Movsd(kScratchDoubleReg, mantissa_operand); in Generate()
131 __ movl(rcx, exponent_operand); in Generate()
132 if (stash_exponent_copy) __ pushq(rcx); in Generate()
134 __ andl(rcx, Immediate(HeapNumber::kExponentMask)); in Generate()
135 __ shrl(rcx, Immediate(HeapNumber::kExponentShift)); in Generate()
136 __ leal(result_reg, MemOperand(rcx, -HeapNumber::kExponentBias)); in Generate()
137 __ cmpl(result_reg, Immediate(HeapNumber::kMantissaBits)); in Generate()
138 __ j(below, &process_64_bits); in Generate()
142 __ subl(rcx, Immediate(delta)); in Generate()
143 __ xorl(result_reg, result_reg); in Generate()
144 __ cmpl(rcx, Immediate(31)); in Generate()
145 __ j(above, &done); in Generate()
146 __ shll_cl(scratch1); in Generate()
147 __ jmp(&check_negative); in Generate()
149 __ bind(&process_64_bits); in Generate()
150 __ Cvttsd2siq(result_reg, kScratchDoubleReg); in Generate()
151 __ jmp(&done, Label::kNear); in Generate()
154 __ bind(&check_negative); in Generate()
155 __ movl(result_reg, scratch1); in Generate()
156 __ negl(result_reg); in Generate()
158 __ cmpl(MemOperand(rsp, 0), Immediate(0)); in Generate()
160 __ cmpl(exponent_operand, Immediate(0)); in Generate()
162 __ cmovl(greater, result_reg, scratch1); in Generate()
165 __ bind(&done); in Generate()
167 __ addp(rsp, Immediate(kDoubleSize)); in Generate()
171 __ movl(final_result_reg, result_reg); in Generate()
173 __ popq(save_reg); in Generate()
174 __ popq(scratch1); in Generate()
175 __ ret(0); in Generate()
183 __ LoadRoot(rcx, Heap::kHeapNumberMapRootIndex); in LoadSSE2UnknownOperands()
184 __ JumpIfSmi(rdx, &load_smi_rdx); in LoadSSE2UnknownOperands()
185 __ cmpp(FieldOperand(rdx, HeapObject::kMapOffset), rcx); in LoadSSE2UnknownOperands()
186 __ j(not_equal, not_numbers); // Argument in rdx is not a number. in LoadSSE2UnknownOperands()
187 __ Movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset)); in LoadSSE2UnknownOperands()
189 __ JumpIfSmi(rax, &load_smi_rax); in LoadSSE2UnknownOperands()
191 __ bind(&load_nonsmi_rax); in LoadSSE2UnknownOperands()
192 __ cmpp(FieldOperand(rax, HeapObject::kMapOffset), rcx); in LoadSSE2UnknownOperands()
193 __ j(not_equal, not_numbers); in LoadSSE2UnknownOperands()
194 __ Movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset)); in LoadSSE2UnknownOperands()
195 __ jmp(&done); in LoadSSE2UnknownOperands()
197 __ bind(&load_smi_rdx); in LoadSSE2UnknownOperands()
198 __ SmiToInteger32(kScratchRegister, rdx); in LoadSSE2UnknownOperands()
199 __ Cvtlsi2sd(xmm0, kScratchRegister); in LoadSSE2UnknownOperands()
200 __ JumpIfNotSmi(rax, &load_nonsmi_rax); in LoadSSE2UnknownOperands()
202 __ bind(&load_smi_rax); in LoadSSE2UnknownOperands()
203 __ SmiToInteger32(kScratchRegister, rax); in LoadSSE2UnknownOperands()
204 __ Cvtlsi2sd(xmm1, kScratchRegister); in LoadSSE2UnknownOperands()
205 __ bind(&done); in LoadSSE2UnknownOperands()
221 __ movp(scratch, Immediate(1)); in Generate()
222 __ Cvtlsi2sd(double_result, scratch); in Generate()
225 __ JumpIfNotSmi(exponent, &exponent_not_smi, Label::kNear); in Generate()
226 __ SmiToInteger32(exponent, exponent); in Generate()
227 __ jmp(&int_exponent); in Generate()
229 __ bind(&exponent_not_smi); in Generate()
230 __ Movsd(double_exponent, FieldOperand(exponent, HeapNumber::kValueOffset)); in Generate()
236 __ DoubleToI(exponent, double_exponent, double_scratch, in Generate()
240 __ jmp(&int_exponent); in Generate()
242 __ bind(&try_arithmetic_simplification); in Generate()
243 __ Cvttsd2si(exponent, double_exponent); in Generate()
245 __ cmpl(exponent, Immediate(0x1)); in Generate()
246 __ j(overflow, &call_runtime); in Generate()
250 __ bind(&fast_power); in Generate()
251 __ fnclex(); // Clear flags to catch exceptions later. in Generate()
253 __ subp(rsp, Immediate(kDoubleSize)); in Generate()
254 __ Movsd(Operand(rsp, 0), double_exponent); in Generate()
255 __ fld_d(Operand(rsp, 0)); // E in Generate()
256 __ Movsd(Operand(rsp, 0), double_base); in Generate()
257 __ fld_d(Operand(rsp, 0)); // B, E in Generate()
262 __ fyl2x(); // X in Generate()
263 __ fld(0); // X, X in Generate()
264 __ frndint(); // rnd(X), X in Generate()
265 __ fsub(1); // rnd(X), X-rnd(X) in Generate()
266 __ fxch(1); // X - rnd(X), rnd(X) in Generate()
268 __ f2xm1(); // 2^(X-rnd(X)) - 1, rnd(X) in Generate()
269 __ fld1(); // 1, 2^(X-rnd(X)) - 1, rnd(X) in Generate()
270 __ faddp(1); // 2^(X-rnd(X)), rnd(X) in Generate()
272 __ fscale(); // 2^X, rnd(X) in Generate()
273 __ fstp(1); in Generate()
275 __ fnstsw_ax(); in Generate()
276 __ testb(rax, Immediate(0x5F)); // Check for all but precision exception. in Generate()
277 __ j(not_zero, &fast_power_failed, Label::kNear); in Generate()
278 __ fstp_d(Operand(rsp, 0)); in Generate()
279 __ Movsd(double_result, Operand(rsp, 0)); in Generate()
280 __ addp(rsp, Immediate(kDoubleSize)); in Generate()
281 __ jmp(&done); in Generate()
283 __ bind(&fast_power_failed); in Generate()
284 __ fninit(); in Generate()
285 __ addp(rsp, Immediate(kDoubleSize)); in Generate()
286 __ jmp(&call_runtime); in Generate()
290 __ bind(&int_exponent); in Generate()
293 __ movp(scratch, exponent); // Back up exponent. in Generate()
294 __ Movsd(double_scratch, double_base); // Back up base. in Generate()
295 __ Movsd(double_scratch2, double_result); // Load double_exponent with 1. in Generate()
299 __ testl(scratch, scratch); in Generate()
300 __ j(positive, &no_neg, Label::kNear); in Generate()
301 __ negl(scratch); in Generate()
302 __ bind(&no_neg); in Generate()
304 __ j(zero, &while_false, Label::kNear); in Generate()
305 __ shrl(scratch, Immediate(1)); in Generate()
308 __ j(above, &while_true, Label::kNear); in Generate()
309 __ Movsd(double_result, double_scratch); in Generate()
310 __ j(zero, &while_false, Label::kNear); in Generate()
312 __ bind(&while_true); in Generate()
313 __ shrl(scratch, Immediate(1)); in Generate()
314 __ Mulsd(double_scratch, double_scratch); in Generate()
315 __ j(above, &while_true, Label::kNear); in Generate()
316 __ Mulsd(double_result, double_scratch); in Generate()
317 __ j(not_zero, &while_true); in Generate()
319 __ bind(&while_false); in Generate()
321 __ testl(exponent, exponent); in Generate()
322 __ j(greater, &done); in Generate()
323 __ Divsd(double_scratch2, double_result); in Generate()
324 __ Movsd(double_result, double_scratch2); in Generate()
327 __ Xorpd(double_scratch2, double_scratch2); in Generate()
328 __ Ucomisd(double_scratch2, double_result); in Generate()
332 __ j(not_equal, &done); in Generate()
333 __ Cvtlsi2sd(double_exponent, exponent); in Generate()
336 __ bind(&call_runtime); in Generate()
338 __ Movsd(xmm0, double_base); in Generate()
342 __ PrepareCallCFunction(2); in Generate()
343 __ CallCFunction(ExternalReference::power_double_double_function(isolate()), in Generate()
347 __ Movsd(double_result, xmm0); in Generate()
349 __ bind(&done); in Generate()
350 __ ret(0); in Generate()
358 __ TailCallRuntime(Runtime::kRegExpExec); in Generate()
384 __ Load(kScratchRegister, address_of_regexp_stack_memory_size); in Generate()
385 __ testp(kScratchRegister, kScratchRegister); in Generate()
386 __ j(zero, &runtime); in Generate()
389 __ movp(rax, args.GetArgumentOperand(JS_REG_EXP_OBJECT_ARGUMENT_INDEX)); in Generate()
390 __ JumpIfSmi(rax, &runtime); in Generate()
391 __ CmpObjectType(rax, JS_REGEXP_TYPE, kScratchRegister); in Generate()
392 __ j(not_equal, &runtime); in Generate()
395 __ movp(rax, FieldOperand(rax, JSRegExp::kDataOffset)); in Generate()
398 __ Check(NegateCondition(is_smi), in Generate()
400 __ CmpObjectType(rax, FIXED_ARRAY_TYPE, kScratchRegister); in Generate()
401 __ Check(equal, kUnexpectedTypeForRegExpDataFixedArrayExpected); in Generate()
406 __ SmiToInteger32(rbx, FieldOperand(rax, JSRegExp::kDataTagOffset)); in Generate()
407 __ cmpl(rbx, Immediate(JSRegExp::IRREGEXP)); in Generate()
408 __ j(not_equal, &runtime); in Generate()
412 __ SmiToInteger32(rdx, in Generate()
417 __ cmpl(rdx, Immediate(Isolate::kJSRegexpStaticOffsetsVectorSize / 2 - 1)); in Generate()
418 __ j(above, &runtime); in Generate()
421 __ Set(r14, 0); in Generate()
422 __ movp(rdi, args.GetArgumentOperand(SUBJECT_STRING_ARGUMENT_INDEX)); in Generate()
423 __ JumpIfSmi(rdi, &runtime); in Generate()
424 __ movp(r15, rdi); // Make a copy of the original subject string. in Generate()
450 __ bind(&check_underlying); in Generate()
451 __ movp(rbx, FieldOperand(rdi, HeapObject::kMapOffset)); in Generate()
452 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset)); in Generate()
455 __ andb(rbx, Immediate(kIsNotStringMask | in Generate()
460 __ j(zero, &seq_two_byte_string); // Go to (9). in Generate()
464 __ andb(rbx, Immediate(kIsNotStringMask | in Generate()
467 __ j(zero, &seq_one_byte_string, Label::kNear); // Go to (5). in Generate()
477 __ cmpp(rbx, Immediate(kExternalStringTag)); in Generate()
478 __ j(greater_equal, &not_seq_nor_cons); // Go to (6). in Generate()
482 __ CompareRoot(FieldOperand(rdi, ConsString::kSecondOffset), in Generate()
484 __ j(not_equal, &runtime); in Generate()
485 __ movp(rdi, FieldOperand(rdi, ConsString::kFirstOffset)); in Generate()
486 __ jmp(&check_underlying); in Generate()
489 __ bind(&seq_one_byte_string); in Generate()
491 __ movp(r11, FieldOperand(rax, JSRegExp::kDataOneByteCodeOffset)); in Generate()
492 __ Set(rcx, 1); // Type is one byte. in Generate()
495 __ bind(&check_code); in Generate()
500 __ JumpIfSmi(r11, &runtime); in Generate()
511 __ movp(rbx, args.GetArgumentOperand(PREVIOUS_INDEX_ARGUMENT_INDEX)); in Generate()
512 __ JumpIfNotSmi(rbx, &runtime); in Generate()
513 __ SmiCompare(rbx, FieldOperand(r15, String::kLengthOffset)); in Generate()
514 __ j(above_equal, &runtime); in Generate()
515 __ SmiToInteger64(rbx, rbx); in Generate()
523 __ IncrementCounter(counters->regexp_entry_native(), 1); in Generate()
529 __ EnterApiExitFrame(argument_slots_on_stack); in Generate()
532 __ LoadAddress(kScratchRegister, in Generate()
534 __ movq(Operand(rsp, (argument_slots_on_stack - 1) * kRegisterSize), in Generate()
538 __ movq(Operand(rsp, (argument_slots_on_stack - 2) * kRegisterSize), in Generate()
542 __ Move(kScratchRegister, address_of_regexp_stack_memory_address); in Generate()
543 __ movp(r9, Operand(kScratchRegister, 0)); in Generate()
544 __ Move(kScratchRegister, address_of_regexp_stack_memory_size); in Generate()
545 __ addp(r9, Operand(kScratchRegister, 0)); in Generate()
546 __ movq(Operand(rsp, (argument_slots_on_stack - 3) * kRegisterSize), r9); in Generate()
552 __ movq(Operand(rsp, (argument_slots_on_stack - 4) * kRegisterSize), in Generate()
555 __ Set(r9, 0); in Generate()
559 __ LoadAddress( in Generate()
563 __ movq(Operand(rsp, (argument_slots_on_stack - 5) * kRegisterSize), r8); in Generate()
574 __ movp(arg_reg_2, rbx); in Generate()
581 __ addp(rbx, r14); in Generate()
582 __ SmiToInteger32(arg_reg_3, FieldOperand(r15, String::kLengthOffset)); in Generate()
583 __ addp(r14, arg_reg_3); // Using arg3 as scratch. in Generate()
588 __ testb(rcx, rcx); // Last use of rcx as encoding of subject string. in Generate()
589 __ j(zero, &setup_two_byte, Label::kNear); in Generate()
590 __ leap(arg_reg_4, in Generate()
592 __ leap(arg_reg_3, in Generate()
594 __ jmp(&setup_rest, Label::kNear); in Generate()
595 __ bind(&setup_two_byte); in Generate()
596 __ leap(arg_reg_4, in Generate()
598 __ leap(arg_reg_3, in Generate()
600 __ bind(&setup_rest); in Generate()
607 __ movp(arg_reg_1, r15); in Generate()
610 __ addp(r11, Immediate(Code::kHeaderSize - kHeapObjectTag)); in Generate()
611 __ call(r11); in Generate()
613 __ LeaveApiExitFrame(true); in Generate()
618 __ cmpl(rax, Immediate(1)); in Generate()
621 __ j(equal, &success, Label::kNear); in Generate()
622 __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::EXCEPTION)); in Generate()
623 __ j(equal, &exception); in Generate()
624 __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::FAILURE)); in Generate()
627 __ j(not_equal, &runtime); in Generate()
630 __ LoadRoot(rax, Heap::kNullValueRootIndex); in Generate()
631 __ ret(REG_EXP_EXEC_ARGUMENT_COUNT * kPointerSize); in Generate()
634 __ bind(&success); in Generate()
635 __ movp(rax, args.GetArgumentOperand(JS_REG_EXP_OBJECT_ARGUMENT_INDEX)); in Generate()
636 __ movp(rcx, FieldOperand(rax, JSRegExp::kDataOffset)); in Generate()
637 __ SmiToInteger32(rax, in Generate()
640 __ leal(rdx, Operand(rax, rax, times_1, 2)); in Generate()
644 __ movp(rbx, args.GetArgumentOperand(LAST_MATCH_INFO_ARGUMENT_INDEX)); in Generate()
645 __ JumpIfSmi(rbx, &runtime); in Generate()
647 __ movp(rax, FieldOperand(rbx, HeapObject::kMapOffset)); in Generate()
648 __ CompareRoot(rax, Heap::kFixedArrayMapRootIndex); in Generate()
649 __ j(not_equal, &runtime); in Generate()
653 __ SmiToInteger32(rax, FieldOperand(rbx, FixedArray::kLengthOffset)); in Generate()
654 __ subl(rax, Immediate(RegExpMatchInfo::kLastMatchOverhead)); in Generate()
655 __ cmpl(rdx, rax); in Generate()
656 __ j(greater, &runtime); in Generate()
661 __ Integer32ToSmi(kScratchRegister, rdx); in Generate()
662 __ movp(FieldOperand(rbx, RegExpMatchInfo::kNumberOfCapturesOffset), in Generate()
665 __ movp(rax, args.GetArgumentOperand(SUBJECT_STRING_ARGUMENT_INDEX)); in Generate()
666 __ movp(FieldOperand(rbx, RegExpMatchInfo::kLastSubjectOffset), rax); in Generate()
667 __ movp(rcx, rax); in Generate()
668 __ RecordWriteField(rbx, RegExpMatchInfo::kLastSubjectOffset, rax, rdi, in Generate()
670 __ movp(rax, rcx); in Generate()
671 __ movp(FieldOperand(rbx, RegExpMatchInfo::kLastInputOffset), rax); in Generate()
672 __ RecordWriteField(rbx, RegExpMatchInfo::kLastInputOffset, rax, rdi, in Generate()
676 __ LoadAddress( in Generate()
685 __ bind(&next_capture); in Generate()
686 __ subp(rdx, Immediate(1)); in Generate()
687 __ j(negative, &done, Label::kNear); in Generate()
689 __ movl(rdi, Operand(rcx, rdx, times_int_size, 0)); in Generate()
690 __ Integer32ToSmi(rdi, rdi); in Generate()
692 __ movp(FieldOperand(rbx, rdx, times_pointer_size, in Generate()
695 __ jmp(&next_capture); in Generate()
696 __ bind(&done); in Generate()
699 __ movp(rax, rbx); in Generate()
700 __ ret(REG_EXP_EXEC_ARGUMENT_COUNT * kPointerSize); in Generate()
702 __ bind(&exception); in Generate()
711 __ movp(rax, pending_exception_operand); in Generate()
712 __ LoadRoot(rdx, Heap::kTheHoleValueRootIndex); in Generate()
713 __ cmpp(rax, rdx); in Generate()
714 __ j(equal, &runtime); in Generate()
717 __ TailCallRuntime(Runtime::kRegExpExecReThrow); in Generate()
720 __ bind(&runtime); in Generate()
721 __ TailCallRuntime(Runtime::kRegExpExec); in Generate()
725 __ bind(&not_seq_nor_cons); in Generate()
727 __ j(greater, &not_long_external, Label::kNear); // Go to (10). in Generate()
730 __ bind(&external_string); in Generate()
731 __ movp(rbx, FieldOperand(rdi, HeapObject::kMapOffset)); in Generate()
732 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset)); in Generate()
736 __ testb(rbx, Immediate(kIsIndirectStringMask)); in Generate()
737 __ Assert(zero, kExternalStringExpectedButNotFound); in Generate()
739 __ movp(rdi, FieldOperand(rdi, ExternalString::kResourceDataOffset)); in Generate()
742 __ subp(rdi, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag)); in Generate()
745 __ testb(rbx, Immediate(kStringEncodingMask)); in Generate()
746 __ j(not_zero, &seq_one_byte_string); // Go to (5). in Generate()
751 __ bind(&seq_two_byte_string); in Generate()
752 __ movp(r11, FieldOperand(rax, JSRegExp::kDataUC16CodeOffset)); in Generate()
753 __ Set(rcx, 0); // Type is two byte. in Generate()
754 __ jmp(&check_code); // Go to (E). in Generate()
757 __ bind(&not_long_external); in Generate()
760 __ testb(rbx, Immediate(kIsNotStringMask | kShortExternalStringMask)); in Generate()
761 __ j(not_zero, &runtime); in Generate()
765 __ cmpl(rbx, Immediate(kThinStringTag)); in Generate()
766 __ j(equal, &thin_string, Label::kNear); in Generate()
768 __ SmiToInteger32(r14, FieldOperand(rdi, SlicedString::kOffsetOffset)); in Generate()
769 __ movp(rdi, FieldOperand(rdi, SlicedString::kParentOffset)); in Generate()
770 __ jmp(&check_underlying); in Generate()
772 __ bind(&thin_string); in Generate()
773 __ movp(rdi, FieldOperand(rdi, ThinString::kActualOffset)); in Generate()
774 __ jmp(&check_underlying); in Generate()
791 __ JumpIfNotSmi(input, fail); in CheckInputType()
793 __ JumpIfSmi(input, &ok); in CheckInputType()
794 __ CompareMap(input, masm->isolate()->factory()->heap_number_map()); in CheckInputType()
795 __ j(not_equal, fail); in CheckInputType()
799 __ bind(&ok); in CheckInputType()
807 __ JumpIfSmi(object, label); in BranchIfNotInternalizedString()
808 __ movp(scratch, FieldOperand(object, HeapObject::kMapOffset)); in BranchIfNotInternalizedString()
809 __ movzxbp(scratch, in BranchIfNotInternalizedString()
812 __ testb(scratch, Immediate(kIsNotStringMask | kIsNotInternalizedMask)); in BranchIfNotInternalizedString()
813 __ j(not_zero, label); in BranchIfNotInternalizedString()
828 __ JumpIfNotBothSmi(rax, rdx, &non_smi); in GenerateGeneric()
829 __ subp(rdx, rax); in GenerateGeneric()
830 __ j(no_overflow, &smi_done); in GenerateGeneric()
831 __ notp(rdx); // Correct sign in case of overflow. rdx cannot be 0 here. in GenerateGeneric()
832 __ bind(&smi_done); in GenerateGeneric()
833 __ movp(rax, rdx); in GenerateGeneric()
834 __ ret(0); in GenerateGeneric()
835 __ bind(&non_smi); in GenerateGeneric()
845 __ cmpp(rax, rdx); in GenerateGeneric()
846 __ j(not_equal, &not_identical, Label::kNear); in GenerateGeneric()
851 __ CompareRoot(rdx, Heap::kUndefinedValueRootIndex); in GenerateGeneric()
853 __ j(not_equal, &check_for_nan, Label::kNear); in GenerateGeneric()
854 __ Set(rax, NegativeComparisonResult(cc)); in GenerateGeneric()
855 __ ret(0); in GenerateGeneric()
856 __ bind(&check_for_nan); in GenerateGeneric()
863 __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset), in GenerateGeneric()
865 __ j(equal, &heap_number, Label::kNear); in GenerateGeneric()
867 __ movp(rcx, FieldOperand(rax, HeapObject::kMapOffset)); in GenerateGeneric()
868 __ movzxbl(rcx, FieldOperand(rcx, Map::kInstanceTypeOffset)); in GenerateGeneric()
870 __ cmpb(rcx, Immediate(static_cast<uint8_t>(FIRST_JS_RECEIVER_TYPE))); in GenerateGeneric()
871 __ j(above_equal, &runtime_call, Label::kFar); in GenerateGeneric()
873 __ cmpb(rcx, Immediate(static_cast<uint8_t>(SYMBOL_TYPE))); in GenerateGeneric()
874 __ j(equal, &runtime_call, Label::kFar); in GenerateGeneric()
876 __ Set(rax, EQUAL); in GenerateGeneric()
877 __ ret(0); in GenerateGeneric()
879 __ bind(&heap_number); in GenerateGeneric()
884 __ Set(rax, EQUAL); in GenerateGeneric()
885 __ Movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset)); in GenerateGeneric()
886 __ Ucomisd(xmm0, xmm0); in GenerateGeneric()
887 __ setcc(parity_even, rax); in GenerateGeneric()
890 __ negp(rax); in GenerateGeneric()
892 __ ret(0); in GenerateGeneric()
894 __ bind(&not_identical); in GenerateGeneric()
909 __ SelectNonSmi(rbx, rax, rdx, &not_smis); in GenerateGeneric()
912 __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset), in GenerateGeneric()
915 __ j(equal, &slow); in GenerateGeneric()
917 __ movp(rax, rbx); in GenerateGeneric()
918 __ ret(0); in GenerateGeneric()
920 __ bind(&not_smis); in GenerateGeneric()
930 __ CmpObjectType(rax, FIRST_JS_RECEIVER_TYPE, rcx); in GenerateGeneric()
931 __ j(below, &first_non_object, Label::kNear); in GenerateGeneric()
935 __ bind(&return_not_equal); in GenerateGeneric()
936 __ ret(0); in GenerateGeneric()
938 __ bind(&first_non_object); in GenerateGeneric()
940 __ CmpInstanceType(rcx, ODDBALL_TYPE); in GenerateGeneric()
941 __ j(equal, &return_not_equal); in GenerateGeneric()
943 __ CmpObjectType(rdx, FIRST_JS_RECEIVER_TYPE, rcx); in GenerateGeneric()
944 __ j(above_equal, &return_not_equal); in GenerateGeneric()
947 __ CmpInstanceType(rcx, ODDBALL_TYPE); in GenerateGeneric()
948 __ j(equal, &return_not_equal); in GenerateGeneric()
952 __ bind(&slow); in GenerateGeneric()
959 __ xorl(rax, rax); in GenerateGeneric()
960 __ xorl(rcx, rcx); in GenerateGeneric()
961 __ Ucomisd(xmm0, xmm1); in GenerateGeneric()
964 __ j(parity_even, &unordered, Label::kNear); in GenerateGeneric()
966 __ setcc(above, rax); in GenerateGeneric()
967 __ setcc(below, rcx); in GenerateGeneric()
968 __ subp(rax, rcx); in GenerateGeneric()
969 __ ret(0); in GenerateGeneric()
973 __ bind(&unordered); in GenerateGeneric()
976 __ Set(rax, 1); in GenerateGeneric()
978 __ Set(rax, -1); in GenerateGeneric()
980 __ ret(0); in GenerateGeneric()
983 __ bind(&non_number_comparison); in GenerateGeneric()
996 __ ret(0); in GenerateGeneric()
999 __ bind(&check_for_strings); in GenerateGeneric()
1001 __ JumpIfNotBothSequentialOneByteStrings(rdx, rax, rcx, rbx, in GenerateGeneric()
1013 __ Abort(kUnexpectedFallThroughFromStringComparison); in GenerateGeneric()
1016 __ bind(&check_unequal_objects); in GenerateGeneric()
1027 __ leap(rcx, Operand(rax, rdx, times_1, 0)); in GenerateGeneric()
1028 __ testb(rcx, Immediate(kSmiTagMask)); in GenerateGeneric()
1029 __ j(not_zero, &runtime_call, Label::kNear); in GenerateGeneric()
1031 __ movp(rbx, FieldOperand(rax, HeapObject::kMapOffset)); in GenerateGeneric()
1032 __ movp(rcx, FieldOperand(rdx, HeapObject::kMapOffset)); in GenerateGeneric()
1033 __ testb(FieldOperand(rbx, Map::kBitFieldOffset), in GenerateGeneric()
1035 __ j(not_zero, &undetectable, Label::kNear); in GenerateGeneric()
1036 __ testb(FieldOperand(rcx, Map::kBitFieldOffset), in GenerateGeneric()
1038 __ j(not_zero, &return_unequal, Label::kNear); in GenerateGeneric()
1040 __ CmpInstanceType(rbx, FIRST_JS_RECEIVER_TYPE); in GenerateGeneric()
1041 __ j(below, &runtime_call, Label::kNear); in GenerateGeneric()
1042 __ CmpInstanceType(rcx, FIRST_JS_RECEIVER_TYPE); in GenerateGeneric()
1043 __ j(below, &runtime_call, Label::kNear); in GenerateGeneric()
1045 __ bind(&return_unequal); in GenerateGeneric()
1047 __ ret(0); in GenerateGeneric()
1049 __ bind(&undetectable); in GenerateGeneric()
1050 __ testb(FieldOperand(rcx, Map::kBitFieldOffset), in GenerateGeneric()
1052 __ j(zero, &return_unequal, Label::kNear); in GenerateGeneric()
1057 __ CmpInstanceType(rbx, ODDBALL_TYPE); in GenerateGeneric()
1058 __ j(zero, &return_equal, Label::kNear); in GenerateGeneric()
1059 __ CmpInstanceType(rcx, ODDBALL_TYPE); in GenerateGeneric()
1060 __ j(not_zero, &return_unequal, Label::kNear); in GenerateGeneric()
1062 __ bind(&return_equal); in GenerateGeneric()
1063 __ Set(rax, EQUAL); in GenerateGeneric()
1064 __ ret(0); in GenerateGeneric()
1066 __ bind(&runtime_call); in GenerateGeneric()
1071 __ Push(rsi); in GenerateGeneric()
1072 __ Call(strict() ? isolate()->builtins()->StrictEqual() in GenerateGeneric()
1075 __ Pop(rsi); in GenerateGeneric()
1079 __ LoadRoot(rdx, Heap::kTrueValueRootIndex); in GenerateGeneric()
1080 __ subp(rax, rdx); in GenerateGeneric()
1081 __ Ret(); in GenerateGeneric()
1084 __ PopReturnAddressTo(rcx); in GenerateGeneric()
1085 __ Push(rdx); in GenerateGeneric()
1086 __ Push(rax); in GenerateGeneric()
1087 __ Push(Smi::FromInt(NegativeComparisonResult(cc))); in GenerateGeneric()
1088 __ PushReturnAddressFrom(rcx); in GenerateGeneric()
1089 __ TailCallRuntime(Runtime::kCompare); in GenerateGeneric()
1092 __ bind(&miss); in GenerateGeneric()
1105 __ Integer32ToSmi(rax, rax); in CallStubInRecordCallTarget()
1106 __ Push(rax); in CallStubInRecordCallTarget()
1107 __ Push(rdi); in CallStubInRecordCallTarget()
1108 __ Integer32ToSmi(rdx, rdx); in CallStubInRecordCallTarget()
1109 __ Push(rdx); in CallStubInRecordCallTarget()
1110 __ Push(rbx); in CallStubInRecordCallTarget()
1111 __ Push(rsi); in CallStubInRecordCallTarget()
1113 __ CallStub(stub); in CallStubInRecordCallTarget()
1115 __ Pop(rsi); in CallStubInRecordCallTarget()
1116 __ Pop(rbx); in CallStubInRecordCallTarget()
1117 __ Pop(rdx); in CallStubInRecordCallTarget()
1118 __ Pop(rdi); in CallStubInRecordCallTarget()
1119 __ Pop(rax); in CallStubInRecordCallTarget()
1120 __ SmiToInteger32(rdx, rdx); in CallStubInRecordCallTarget()
1121 __ SmiToInteger32(rax, rax); in CallStubInRecordCallTarget()
1137 __ SmiToInteger32(rdx, rdx); in GenerateRecordCallTarget()
1138 __ movp(r11, in GenerateRecordCallTarget()
1146 __ cmpp(rdi, FieldOperand(r11, WeakCell::kValueOffset)); in GenerateRecordCallTarget()
1147 __ j(equal, &done, Label::kFar); in GenerateRecordCallTarget()
1148 __ CompareRoot(r11, Heap::kmegamorphic_symbolRootIndex); in GenerateRecordCallTarget()
1149 __ j(equal, &done, Label::kFar); in GenerateRecordCallTarget()
1150 __ CompareRoot(FieldOperand(r11, HeapObject::kMapOffset), in GenerateRecordCallTarget()
1152 __ j(not_equal, &check_allocation_site); in GenerateRecordCallTarget()
1155 __ CheckSmi(FieldOperand(r11, WeakCell::kValueOffset)); in GenerateRecordCallTarget()
1156 __ j(equal, &initialize); in GenerateRecordCallTarget()
1157 __ jmp(&megamorphic); in GenerateRecordCallTarget()
1159 __ bind(&check_allocation_site); in GenerateRecordCallTarget()
1164 __ CompareRoot(FieldOperand(r11, 0), Heap::kAllocationSiteMapRootIndex); in GenerateRecordCallTarget()
1165 __ j(not_equal, &miss); in GenerateRecordCallTarget()
1168 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r11); in GenerateRecordCallTarget()
1169 __ cmpp(rdi, r11); in GenerateRecordCallTarget()
1170 __ j(not_equal, &megamorphic); in GenerateRecordCallTarget()
1171 __ jmp(&done); in GenerateRecordCallTarget()
1173 __ bind(&miss); in GenerateRecordCallTarget()
1177 __ CompareRoot(r11, Heap::kuninitialized_symbolRootIndex); in GenerateRecordCallTarget()
1178 __ j(equal, &initialize); in GenerateRecordCallTarget()
1181 __ bind(&megamorphic); in GenerateRecordCallTarget()
1182 __ Move(FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize), in GenerateRecordCallTarget()
1184 __ jmp(&done); in GenerateRecordCallTarget()
1188 __ bind(&initialize); in GenerateRecordCallTarget()
1191 __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, r11); in GenerateRecordCallTarget()
1192 __ cmpp(rdi, r11); in GenerateRecordCallTarget()
1193 __ j(not_equal, &not_array_function); in GenerateRecordCallTarget()
1197 __ jmp(&done); in GenerateRecordCallTarget()
1199 __ bind(&not_array_function); in GenerateRecordCallTarget()
1203 __ bind(&done); in GenerateRecordCallTarget()
1205 __ SmiAddConstant(FieldOperand(rbx, rdx, times_pointer_size, in GenerateRecordCallTarget()
1219 __ JumpIfSmi(rdi, &non_function); in Generate()
1221 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, r11); in Generate()
1222 __ j(not_equal, &non_function); in Generate()
1228 __ movp(rbx, in Generate()
1230 __ CompareRoot(FieldOperand(rbx, 0), Heap::kAllocationSiteMapRootIndex); in Generate()
1231 __ j(equal, &feedback_register_initialized, Label::kNear); in Generate()
1232 __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex); in Generate()
1233 __ bind(&feedback_register_initialized); in Generate()
1235 __ AssertUndefinedOrAllocationSite(rbx); in Generate()
1238 __ movp(rdx, rdi); in Generate()
1242 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); in Generate()
1243 __ movp(rcx, FieldOperand(rcx, SharedFunctionInfo::kConstructStubOffset)); in Generate()
1244 __ leap(rcx, FieldOperand(rcx, Code::kHeaderSize)); in Generate()
1245 __ jmp(rcx); in Generate()
1247 __ bind(&non_function); in Generate()
1248 __ movp(rdx, rdi); in Generate()
1249 __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); in Generate()
1326 __ EnterApiExitFrame(arg_stack_space); in Generate()
1328 __ movp(r14, rax); in Generate()
1330 __ EnterExitFrame( in Generate()
1343 __ CheckStackAlignment(); in Generate()
1351 __ movp(kCCallArg0, r14); // argc. in Generate()
1352 __ movp(kCCallArg1, r15); // argv. in Generate()
1353 __ Move(kCCallArg2, ExternalReference::isolate_address(isolate())); in Generate()
1357 __ leap(kCCallArg0, StackSpaceOperand(kArgExtraStackSpace)); in Generate()
1359 __ movp(kCCallArg1, r14); // argc. in Generate()
1360 __ movp(kCCallArg2, r15); // argv. in Generate()
1361 __ Move(kCCallArg3, ExternalReference::isolate_address(isolate())); in Generate()
1363 __ call(rbx); in Generate()
1369 __ movq(kReturnRegister0, StackSpaceOperand(kArgExtraStackSpace + 0)); in Generate()
1370 __ movq(kReturnRegister1, StackSpaceOperand(kArgExtraStackSpace + 1)); in Generate()
1372 __ movq(kReturnRegister2, StackSpaceOperand(kArgExtraStackSpace + 2)); in Generate()
1379 __ CompareRoot(rax, Heap::kExceptionRootIndex); in Generate()
1380 __ j(equal, &exception_returned); in Generate()
1386 __ LoadRoot(r14, Heap::kTheHoleValueRootIndex); in Generate()
1391 __ cmpp(r14, pending_exception_operand); in Generate()
1392 __ j(equal, &okay, Label::kNear); in Generate()
1393 __ int3(); in Generate()
1394 __ bind(&okay); in Generate()
1398 __ LeaveExitFrame(save_doubles(), !argv_in_register()); in Generate()
1399 __ ret(0); in Generate()
1402 __ bind(&exception_returned); in Generate()
1421 __ movp(arg_reg_1, Immediate(0)); // argc. in Generate()
1422 __ movp(arg_reg_2, Immediate(0)); // argv. in Generate()
1423 __ Move(arg_reg_3, ExternalReference::isolate_address(isolate())); in Generate()
1424 __ PrepareCallCFunction(3); in Generate()
1425 __ CallCFunction(find_handler, 3); in Generate()
1429 __ movp(rsi, masm->ExternalOperand(pending_handler_context_address)); in Generate()
1430 __ movp(rsp, masm->ExternalOperand(pending_handler_sp_address)); in Generate()
1431 __ movp(rbp, masm->ExternalOperand(pending_handler_fp_address)); in Generate()
1436 __ testp(rsi, rsi); in Generate()
1437 __ j(zero, &skip, Label::kNear); in Generate()
1438 __ movp(Operand(rbp, StandardFrameConstants::kContextOffset), rsi); in Generate()
1439 __ bind(&skip); in Generate()
1442 __ movp(rdi, masm->ExternalOperand(pending_handler_code_address)); in Generate()
1443 __ movp(rdx, masm->ExternalOperand(pending_handler_offset_address)); in Generate()
1444 __ leap(rdi, FieldOperand(rdi, rdx, times_1, Code::kHeaderSize)); in Generate()
1445 __ jmp(rdi); in Generate()
1458 __ pushq(rbp); in Generate()
1459 __ movp(rbp, rsp); in Generate()
1462 __ Push(Immediate(StackFrame::TypeToMarker(type()))); // context slot in Generate()
1464 __ Load(kScratchRegister, context_address); in Generate()
1465 __ Push(kScratchRegister); // context in Generate()
1467 __ pushq(r12); in Generate()
1468 __ pushq(r13); in Generate()
1469 __ pushq(r14); in Generate()
1470 __ pushq(r15); in Generate()
1472 __ pushq(rdi); // Only callee save in Win64 ABI, argument in AMD64 ABI. in Generate()
1473 __ pushq(rsi); // Only callee save in Win64 ABI, argument in AMD64 ABI. in Generate()
1475 __ pushq(rbx); in Generate()
1479 __ subp(rsp, Immediate(EntryFrameConstants::kXMMRegistersBlockSize)); in Generate()
1480 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 0), xmm6); in Generate()
1481 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 1), xmm7); in Generate()
1482 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 2), xmm8); in Generate()
1483 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 3), xmm9); in Generate()
1484 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 4), xmm10); in Generate()
1485 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 5), xmm11); in Generate()
1486 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 6), xmm12); in Generate()
1487 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 7), xmm13); in Generate()
1488 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 8), xmm14); in Generate()
1489 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 9), xmm15); in Generate()
1494 __ InitializeRootRegister(); in Generate()
1501 __ Push(c_entry_fp_operand); in Generate()
1506 __ Load(rax, js_entry_sp); in Generate()
1507 __ testp(rax, rax); in Generate()
1508 __ j(not_zero, &not_outermost_js); in Generate()
1509 __ Push(Immediate(StackFrame::OUTERMOST_JSENTRY_FRAME)); in Generate()
1510 __ movp(rax, rbp); in Generate()
1511 __ Store(js_entry_sp, rax); in Generate()
1513 __ jmp(&cont); in Generate()
1514 __ bind(&not_outermost_js); in Generate()
1515 __ Push(Immediate(StackFrame::INNER_JSENTRY_FRAME)); in Generate()
1516 __ bind(&cont); in Generate()
1520 __ jmp(&invoke); in Generate()
1521 __ bind(&handler_entry); in Generate()
1527 __ Store(pending_exception, rax); in Generate()
1528 __ LoadRoot(rax, Heap::kExceptionRootIndex); in Generate()
1529 __ jmp(&exit); in Generate()
1532 __ bind(&invoke); in Generate()
1533 __ PushStackHandler(); in Generate()
1536 __ Push(Immediate(0)); // receiver in Generate()
1546 __ Load(rax, construct_entry); in Generate()
1549 __ Load(rax, entry); in Generate()
1551 __ leap(kScratchRegister, FieldOperand(rax, Code::kHeaderSize)); in Generate()
1552 __ call(kScratchRegister); in Generate()
1555 __ PopStackHandler(); in Generate()
1557 __ bind(&exit); in Generate()
1559 __ Pop(rbx); in Generate()
1560 __ cmpp(rbx, Immediate(StackFrame::OUTERMOST_JSENTRY_FRAME)); in Generate()
1561 __ j(not_equal, &not_outermost_js_2); in Generate()
1562 __ Move(kScratchRegister, js_entry_sp); in Generate()
1563 __ movp(Operand(kScratchRegister, 0), Immediate(0)); in Generate()
1564 __ bind(&not_outermost_js_2); in Generate()
1568 __ Pop(c_entry_fp_operand); in Generate()
1574 __ movdqu(xmm6, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 0)); in Generate()
1575 __ movdqu(xmm7, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 1)); in Generate()
1576 __ movdqu(xmm8, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 2)); in Generate()
1577 __ movdqu(xmm9, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 3)); in Generate()
1578 __ movdqu(xmm10, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 4)); in Generate()
1579 __ movdqu(xmm11, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 5)); in Generate()
1580 __ movdqu(xmm12, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 6)); in Generate()
1581 __ movdqu(xmm13, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 7)); in Generate()
1582 __ movdqu(xmm14, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 8)); in Generate()
1583 __ movdqu(xmm15, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 9)); in Generate()
1584 __ addp(rsp, Immediate(EntryFrameConstants::kXMMRegistersBlockSize)); in Generate()
1587 __ popq(rbx); in Generate()
1590 __ popq(rsi); in Generate()
1591 __ popq(rdi); in Generate()
1593 __ popq(r15); in Generate()
1594 __ popq(r14); in Generate()
1595 __ popq(r13); in Generate()
1596 __ popq(r12); in Generate()
1597 __ addp(rsp, Immediate(2 * kPointerSize)); // remove markers in Generate()
1600 __ popq(rbp); in Generate()
1601 __ ret(0); in Generate()
1611 __ JumpIfSmi(object_, receiver_not_string_); in GenerateFast()
1614 __ movp(result_, FieldOperand(object_, HeapObject::kMapOffset)); in GenerateFast()
1615 __ movzxbl(result_, FieldOperand(result_, Map::kInstanceTypeOffset)); in GenerateFast()
1617 __ testb(result_, Immediate(kIsNotStringMask)); in GenerateFast()
1618 __ j(not_zero, receiver_not_string_); in GenerateFast()
1622 __ JumpIfNotSmi(index_, &index_not_smi_); in GenerateFast()
1623 __ bind(&got_smi_index_); in GenerateFast()
1626 __ SmiCompare(index_, FieldOperand(object_, String::kLengthOffset)); in GenerateFast()
1627 __ j(above_equal, index_out_of_range_); in GenerateFast()
1629 __ SmiToInteger32(index_, index_); in GenerateFast()
1634 __ Integer32ToSmi(result_, result_); in GenerateFast()
1635 __ bind(&exit_); in GenerateFast()
1642 __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase); in GenerateSlow()
1646 __ bind(&index_not_smi_); in GenerateSlow()
1648 __ CheckMap(index_, in GenerateSlow()
1654 __ Push(LoadWithVectorDescriptor::VectorRegister()); in GenerateSlow()
1655 __ Push(LoadDescriptor::SlotRegister()); in GenerateSlow()
1657 __ Push(object_); in GenerateSlow()
1658 __ Push(index_); // Consumed by runtime conversion function. in GenerateSlow()
1659 __ CallRuntime(Runtime::kNumberToSmi); in GenerateSlow()
1663 __ movp(index_, rax); in GenerateSlow()
1665 __ Pop(object_); in GenerateSlow()
1667 __ Pop(LoadDescriptor::SlotRegister()); in GenerateSlow()
1668 __ Pop(LoadWithVectorDescriptor::VectorRegister()); in GenerateSlow()
1671 __ movp(result_, FieldOperand(object_, HeapObject::kMapOffset)); in GenerateSlow()
1672 __ movzxbl(result_, FieldOperand(result_, Map::kInstanceTypeOffset)); in GenerateSlow()
1675 __ JumpIfNotSmi(index_, index_out_of_range_); in GenerateSlow()
1677 __ jmp(&got_smi_index_); in GenerateSlow()
1682 __ bind(&call_runtime_); in GenerateSlow()
1684 __ Push(object_); in GenerateSlow()
1685 __ Integer32ToSmi(index_, index_); in GenerateSlow()
1686 __ Push(index_); in GenerateSlow()
1687 __ CallRuntime(Runtime::kStringCharCodeAtRT); in GenerateSlow()
1689 __ movp(result_, rax); in GenerateSlow()
1692 __ jmp(&exit_); in GenerateSlow()
1694 __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase); in GenerateSlow()
1706 __ movp(length, FieldOperand(left, String::kLengthOffset)); in GenerateFlatOneByteStringEquals()
1707 __ SmiCompare(length, FieldOperand(right, String::kLengthOffset)); in GenerateFlatOneByteStringEquals()
1708 __ j(equal, &check_zero_length, Label::kNear); in GenerateFlatOneByteStringEquals()
1709 __ Move(rax, Smi::FromInt(NOT_EQUAL)); in GenerateFlatOneByteStringEquals()
1710 __ ret(0); in GenerateFlatOneByteStringEquals()
1714 __ bind(&check_zero_length); in GenerateFlatOneByteStringEquals()
1716 __ SmiTest(length); in GenerateFlatOneByteStringEquals()
1717 __ j(not_zero, &compare_chars, Label::kNear); in GenerateFlatOneByteStringEquals()
1718 __ Move(rax, Smi::FromInt(EQUAL)); in GenerateFlatOneByteStringEquals()
1719 __ ret(0); in GenerateFlatOneByteStringEquals()
1722 __ bind(&compare_chars); in GenerateFlatOneByteStringEquals()
1728 __ Move(rax, Smi::FromInt(EQUAL)); in GenerateFlatOneByteStringEquals()
1729 __ ret(0); in GenerateFlatOneByteStringEquals()
1732 __ bind(&strings_not_equal); in GenerateFlatOneByteStringEquals()
1733 __ Move(rax, Smi::FromInt(NOT_EQUAL)); in GenerateFlatOneByteStringEquals()
1734 __ ret(0); in GenerateFlatOneByteStringEquals()
1746 __ movp(scratch1, FieldOperand(left, String::kLengthOffset)); in GenerateCompareFlatOneByteStrings()
1747 __ movp(scratch4, scratch1); in GenerateCompareFlatOneByteStrings()
1748 __ SmiSub(scratch4, in GenerateCompareFlatOneByteStrings()
1754 __ j(less, &left_shorter, Label::kNear); in GenerateCompareFlatOneByteStrings()
1758 __ SmiSub(scratch1, scratch1, length_difference); in GenerateCompareFlatOneByteStrings()
1759 __ bind(&left_shorter); in GenerateCompareFlatOneByteStrings()
1765 __ SmiTest(min_length); in GenerateCompareFlatOneByteStrings()
1766 __ j(zero, &compare_lengths, Label::kNear); in GenerateCompareFlatOneByteStrings()
1778 __ bind(&compare_lengths); in GenerateCompareFlatOneByteStrings()
1779 __ SmiTest(length_difference); in GenerateCompareFlatOneByteStrings()
1781 __ j(not_zero, &length_not_equal, Label::kNear); in GenerateCompareFlatOneByteStrings()
1784 __ Move(rax, Smi::FromInt(EQUAL)); in GenerateCompareFlatOneByteStrings()
1785 __ ret(0); in GenerateCompareFlatOneByteStrings()
1789 __ bind(&length_not_equal); in GenerateCompareFlatOneByteStrings()
1790 __ j(greater, &result_greater, Label::kNear); in GenerateCompareFlatOneByteStrings()
1791 __ jmp(&result_less, Label::kNear); in GenerateCompareFlatOneByteStrings()
1792 __ bind(&result_not_equal); in GenerateCompareFlatOneByteStrings()
1794 __ j(above, &result_greater, Label::kNear); in GenerateCompareFlatOneByteStrings()
1795 __ bind(&result_less); in GenerateCompareFlatOneByteStrings()
1798 __ Move(rax, Smi::FromInt(LESS)); in GenerateCompareFlatOneByteStrings()
1799 __ ret(0); in GenerateCompareFlatOneByteStrings()
1802 __ bind(&result_greater); in GenerateCompareFlatOneByteStrings()
1803 __ Move(rax, Smi::FromInt(GREATER)); in GenerateCompareFlatOneByteStrings()
1804 __ ret(0); in GenerateCompareFlatOneByteStrings()
1814 __ SmiToInteger32(length, length); in GenerateOneByteCharsCompareLoop()
1815 __ leap(left, in GenerateOneByteCharsCompareLoop()
1817 __ leap(right, in GenerateOneByteCharsCompareLoop()
1819 __ negq(length); in GenerateOneByteCharsCompareLoop()
1824 __ bind(&loop); in GenerateOneByteCharsCompareLoop()
1825 __ movb(scratch, Operand(left, index, times_1, 0)); in GenerateOneByteCharsCompareLoop()
1826 __ cmpb(scratch, Operand(right, index, times_1, 0)); in GenerateOneByteCharsCompareLoop()
1827 __ j(not_equal, chars_not_equal, near_jump); in GenerateOneByteCharsCompareLoop()
1828 __ incq(index); in GenerateOneByteCharsCompareLoop()
1829 __ j(not_zero, &loop); in GenerateOneByteCharsCompareLoop()
1843 __ Move(rcx, isolate()->factory()->undefined_value()); in Generate()
1847 __ testb(rcx, Immediate(kSmiTagMask)); in Generate()
1848 __ Assert(not_equal, kExpectedAllocationSite); in Generate()
1849 __ Cmp(FieldOperand(rcx, HeapObject::kMapOffset), in Generate()
1851 __ Assert(equal, kExpectedAllocationSite); in Generate()
1857 __ TailCallStub(&stub); in Generate()
1867 __ JumpIfSmi(rdx, &miss, miss_distance); in GenerateBooleans()
1868 __ movp(rcx, FieldOperand(rdx, HeapObject::kMapOffset)); in GenerateBooleans()
1869 __ JumpIfSmi(rax, &miss, miss_distance); in GenerateBooleans()
1870 __ movp(rbx, FieldOperand(rax, HeapObject::kMapOffset)); in GenerateBooleans()
1871 __ JumpIfNotRoot(rcx, Heap::kBooleanMapRootIndex, &miss, miss_distance); in GenerateBooleans()
1872 __ JumpIfNotRoot(rbx, Heap::kBooleanMapRootIndex, &miss, miss_distance); in GenerateBooleans()
1874 __ movp(rax, FieldOperand(rax, Oddball::kToNumberOffset)); in GenerateBooleans()
1875 __ AssertSmi(rax); in GenerateBooleans()
1876 __ movp(rdx, FieldOperand(rdx, Oddball::kToNumberOffset)); in GenerateBooleans()
1877 __ AssertSmi(rdx); in GenerateBooleans()
1878 __ pushq(rax); in GenerateBooleans()
1879 __ movq(rax, rdx); in GenerateBooleans()
1880 __ popq(rdx); in GenerateBooleans()
1882 __ subp(rax, rdx); in GenerateBooleans()
1883 __ Ret(); in GenerateBooleans()
1885 __ bind(&miss); in GenerateBooleans()
1893 __ JumpIfNotBothSmi(rdx, rax, &miss, Label::kNear); in GenerateSmis()
1897 __ subp(rax, rdx); in GenerateSmis()
1900 __ subp(rdx, rax); in GenerateSmis()
1901 __ j(no_overflow, &done, Label::kNear); in GenerateSmis()
1903 __ notp(rdx); in GenerateSmis()
1904 __ bind(&done); in GenerateSmis()
1905 __ movp(rax, rdx); in GenerateSmis()
1907 __ ret(0); in GenerateSmis()
1909 __ bind(&miss); in GenerateSmis()
1922 __ JumpIfNotSmi(rdx, &miss); in GenerateNumbers()
1925 __ JumpIfNotSmi(rax, &miss); in GenerateNumbers()
1930 __ JumpIfSmi(rax, &right_smi, Label::kNear); in GenerateNumbers()
1931 __ CompareMap(rax, isolate()->factory()->heap_number_map()); in GenerateNumbers()
1932 __ j(not_equal, &maybe_undefined1, Label::kNear); in GenerateNumbers()
1933 __ Movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset)); in GenerateNumbers()
1934 __ jmp(&left, Label::kNear); in GenerateNumbers()
1935 __ bind(&right_smi); in GenerateNumbers()
1936 __ SmiToInteger32(rcx, rax); // Can't clobber rax yet. in GenerateNumbers()
1937 __ Cvtlsi2sd(xmm1, rcx); in GenerateNumbers()
1939 __ bind(&left); in GenerateNumbers()
1940 __ JumpIfSmi(rdx, &left_smi, Label::kNear); in GenerateNumbers()
1941 __ CompareMap(rdx, isolate()->factory()->heap_number_map()); in GenerateNumbers()
1942 __ j(not_equal, &maybe_undefined2, Label::kNear); in GenerateNumbers()
1943 __ Movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset)); in GenerateNumbers()
1944 __ jmp(&done); in GenerateNumbers()
1945 __ bind(&left_smi); in GenerateNumbers()
1946 __ SmiToInteger32(rcx, rdx); // Can't clobber rdx yet. in GenerateNumbers()
1947 __ Cvtlsi2sd(xmm0, rcx); in GenerateNumbers()
1949 __ bind(&done); in GenerateNumbers()
1951 __ Ucomisd(xmm0, xmm1); in GenerateNumbers()
1954 __ j(parity_even, &unordered, Label::kNear); in GenerateNumbers()
1958 __ movl(rax, Immediate(0)); in GenerateNumbers()
1959 __ movl(rcx, Immediate(0)); in GenerateNumbers()
1960 __ setcc(above, rax); // Add one to zero if carry clear and not equal. in GenerateNumbers()
1961 __ sbbp(rax, rcx); // Subtract one if below (aka. carry set). in GenerateNumbers()
1962 __ ret(0); in GenerateNumbers()
1964 __ bind(&unordered); in GenerateNumbers()
1965 __ bind(&generic_stub); in GenerateNumbers()
1968 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET); in GenerateNumbers()
1970 __ bind(&maybe_undefined1); in GenerateNumbers()
1972 __ Cmp(rax, isolate()->factory()->undefined_value()); in GenerateNumbers()
1973 __ j(not_equal, &miss); in GenerateNumbers()
1974 __ JumpIfSmi(rdx, &unordered); in GenerateNumbers()
1975 __ CmpObjectType(rdx, HEAP_NUMBER_TYPE, rcx); in GenerateNumbers()
1976 __ j(not_equal, &maybe_undefined2, Label::kNear); in GenerateNumbers()
1977 __ jmp(&unordered); in GenerateNumbers()
1980 __ bind(&maybe_undefined2); in GenerateNumbers()
1982 __ Cmp(rdx, isolate()->factory()->undefined_value()); in GenerateNumbers()
1983 __ j(equal, &unordered); in GenerateNumbers()
1986 __ bind(&miss); in GenerateNumbers()
2004 __ j(cond, &miss, Label::kNear); in GenerateInternalizedStrings()
2007 __ movp(tmp1, FieldOperand(left, HeapObject::kMapOffset)); in GenerateInternalizedStrings()
2008 __ movp(tmp2, FieldOperand(right, HeapObject::kMapOffset)); in GenerateInternalizedStrings()
2009 __ movzxbp(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset)); in GenerateInternalizedStrings()
2010 __ movzxbp(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset)); in GenerateInternalizedStrings()
2012 __ orp(tmp1, tmp2); in GenerateInternalizedStrings()
2013 __ testb(tmp1, Immediate(kIsNotStringMask | kIsNotInternalizedMask)); in GenerateInternalizedStrings()
2014 __ j(not_zero, &miss, Label::kNear); in GenerateInternalizedStrings()
2018 __ cmpp(left, right); in GenerateInternalizedStrings()
2022 __ j(not_equal, &done, Label::kNear); in GenerateInternalizedStrings()
2025 __ Move(rax, Smi::FromInt(EQUAL)); in GenerateInternalizedStrings()
2026 __ bind(&done); in GenerateInternalizedStrings()
2027 __ ret(0); in GenerateInternalizedStrings()
2029 __ bind(&miss); in GenerateInternalizedStrings()
2047 __ j(cond, &miss, Label::kNear); in GenerateUniqueNames()
2051 __ movp(tmp1, FieldOperand(left, HeapObject::kMapOffset)); in GenerateUniqueNames()
2052 __ movp(tmp2, FieldOperand(right, HeapObject::kMapOffset)); in GenerateUniqueNames()
2053 __ movzxbp(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset)); in GenerateUniqueNames()
2054 __ movzxbp(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset)); in GenerateUniqueNames()
2056 __ JumpIfNotUniqueNameInstanceType(tmp1, &miss, Label::kNear); in GenerateUniqueNames()
2057 __ JumpIfNotUniqueNameInstanceType(tmp2, &miss, Label::kNear); in GenerateUniqueNames()
2061 __ cmpp(left, right); in GenerateUniqueNames()
2065 __ j(not_equal, &done, Label::kNear); in GenerateUniqueNames()
2068 __ Move(rax, Smi::FromInt(EQUAL)); in GenerateUniqueNames()
2069 __ bind(&done); in GenerateUniqueNames()
2070 __ ret(0); in GenerateUniqueNames()
2072 __ bind(&miss); in GenerateUniqueNames()
2092 __ j(cond, &miss); in GenerateStrings()
2096 __ movp(tmp1, FieldOperand(left, HeapObject::kMapOffset)); in GenerateStrings()
2097 __ movp(tmp2, FieldOperand(right, HeapObject::kMapOffset)); in GenerateStrings()
2098 __ movzxbp(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset)); in GenerateStrings()
2099 __ movzxbp(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset)); in GenerateStrings()
2100 __ movp(tmp3, tmp1); in GenerateStrings()
2102 __ orp(tmp3, tmp2); in GenerateStrings()
2103 __ testb(tmp3, Immediate(kIsNotStringMask)); in GenerateStrings()
2104 __ j(not_zero, &miss); in GenerateStrings()
2108 __ cmpp(left, right); in GenerateStrings()
2109 __ j(not_equal, &not_same, Label::kNear); in GenerateStrings()
2112 __ Move(rax, Smi::FromInt(EQUAL)); in GenerateStrings()
2113 __ ret(0); in GenerateStrings()
2116 __ bind(&not_same); in GenerateStrings()
2124 __ orp(tmp1, tmp2); in GenerateStrings()
2125 __ testb(tmp1, Immediate(kIsNotInternalizedMask)); in GenerateStrings()
2126 __ j(not_zero, &do_compare, Label::kNear); in GenerateStrings()
2130 __ ret(0); in GenerateStrings()
2131 __ bind(&do_compare); in GenerateStrings()
2136 __ JumpIfNotBothSequentialOneByteStrings(left, right, tmp1, tmp2, &runtime); in GenerateStrings()
2148 __ bind(&runtime); in GenerateStrings()
2152 __ Push(left); in GenerateStrings()
2153 __ Push(right); in GenerateStrings()
2154 __ CallRuntime(Runtime::kStringEqual); in GenerateStrings()
2156 __ LoadRoot(rdx, Heap::kTrueValueRootIndex); in GenerateStrings()
2157 __ subp(rax, rdx); in GenerateStrings()
2158 __ Ret(); in GenerateStrings()
2160 __ PopReturnAddressTo(tmp1); in GenerateStrings()
2161 __ Push(left); in GenerateStrings()
2162 __ Push(right); in GenerateStrings()
2163 __ PushReturnAddressFrom(tmp1); in GenerateStrings()
2164 __ TailCallRuntime(Runtime::kStringCompare); in GenerateStrings()
2167 __ bind(&miss); in GenerateStrings()
2176 __ j(either_smi, &miss, Label::kNear); in GenerateReceivers()
2179 __ CmpObjectType(rax, FIRST_JS_RECEIVER_TYPE, rcx); in GenerateReceivers()
2180 __ j(below, &miss, Label::kNear); in GenerateReceivers()
2181 __ CmpObjectType(rdx, FIRST_JS_RECEIVER_TYPE, rcx); in GenerateReceivers()
2182 __ j(below, &miss, Label::kNear); in GenerateReceivers()
2185 __ subp(rax, rdx); in GenerateReceivers()
2186 __ ret(0); in GenerateReceivers()
2188 __ bind(&miss); in GenerateReceivers()
2197 __ j(either_smi, &miss, Label::kNear); in GenerateKnownReceivers()
2199 __ GetWeakValue(rdi, cell); in GenerateKnownReceivers()
2200 __ cmpp(FieldOperand(rdx, HeapObject::kMapOffset), rdi); in GenerateKnownReceivers()
2201 __ j(not_equal, &miss, Label::kNear); in GenerateKnownReceivers()
2202 __ cmpp(FieldOperand(rax, HeapObject::kMapOffset), rdi); in GenerateKnownReceivers()
2203 __ j(not_equal, &miss, Label::kNear); in GenerateKnownReceivers()
2206 __ subp(rax, rdx); in GenerateKnownReceivers()
2207 __ ret(0); in GenerateKnownReceivers()
2209 __ PopReturnAddressTo(rcx); in GenerateKnownReceivers()
2210 __ Push(rdx); in GenerateKnownReceivers()
2211 __ Push(rax); in GenerateKnownReceivers()
2212 __ Push(Smi::FromInt(NegativeComparisonResult(GetCondition()))); in GenerateKnownReceivers()
2213 __ PushReturnAddressFrom(rcx); in GenerateKnownReceivers()
2214 __ TailCallRuntime(Runtime::kCompare); in GenerateKnownReceivers()
2217 __ bind(&miss); in GenerateKnownReceivers()
2226 __ Push(rdx); in GenerateMiss()
2227 __ Push(rax); in GenerateMiss()
2228 __ Push(rdx); in GenerateMiss()
2229 __ Push(rax); in GenerateMiss()
2230 __ Push(Smi::FromInt(op())); in GenerateMiss()
2231 __ CallRuntime(Runtime::kCompareIC_Miss); in GenerateMiss()
2234 __ leap(rdi, FieldOperand(rax, Code::kHeaderSize)); in GenerateMiss()
2235 __ Pop(rax); in GenerateMiss()
2236 __ Pop(rdx); in GenerateMiss()
2240 __ jmp(rdi); in GenerateMiss()
2261 __ SmiToInteger32(index, FieldOperand(properties, kCapacityOffset)); in GenerateNegativeLookup()
2262 __ decl(index); in GenerateNegativeLookup()
2263 __ andp(index, in GenerateNegativeLookup()
2268 __ leap(index, Operand(index, index, times_2, 0)); // index *= 3. in GenerateNegativeLookup()
2273 __ movp(entity_name, Operand(properties, in GenerateNegativeLookup()
2277 __ Cmp(entity_name, masm->isolate()->factory()->undefined_value()); in GenerateNegativeLookup()
2278 __ j(equal, done); in GenerateNegativeLookup()
2281 __ Cmp(entity_name, Handle<Name>(name)); in GenerateNegativeLookup()
2282 __ j(equal, miss); in GenerateNegativeLookup()
2286 __ CompareRoot(entity_name, Heap::kTheHoleValueRootIndex); in GenerateNegativeLookup()
2287 __ j(equal, &good, Label::kNear); in GenerateNegativeLookup()
2290 __ movp(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset)); in GenerateNegativeLookup()
2291 __ JumpIfNotUniqueNameInstanceType( in GenerateNegativeLookup()
2293 __ bind(&good); in GenerateNegativeLookup()
2298 __ Push(Handle<Object>(name)); in GenerateNegativeLookup()
2299 __ Push(Immediate(name->Hash())); in GenerateNegativeLookup()
2300 __ CallStub(&stub); in GenerateNegativeLookup()
2301 __ testp(r0, r0); in GenerateNegativeLookup()
2302 __ j(not_zero, miss); in GenerateNegativeLookup()
2303 __ jmp(done); in GenerateNegativeLookup()
2325 __ SmiToInteger32(scratch, FieldOperand(dictionary(), kCapacityOffset)); in Generate()
2326 __ decl(scratch); in Generate()
2327 __ Push(scratch); in Generate()
2338 __ movp(scratch, args.GetArgumentOperand(1)); in Generate()
2340 __ addl(scratch, Immediate(NameDictionary::GetProbeOffset(i))); in Generate()
2342 __ andp(scratch, Operand(rsp, 0)); in Generate()
2346 __ leap(index(), Operand(scratch, scratch, times_2, 0)); // index *= 3. in Generate()
2349 __ movp(scratch, Operand(dictionary(), index(), times_pointer_size, in Generate()
2352 __ Cmp(scratch, isolate()->factory()->undefined_value()); in Generate()
2353 __ j(equal, &not_in_dictionary); in Generate()
2356 __ cmpp(scratch, args.GetArgumentOperand(0)); in Generate()
2357 __ j(equal, &in_dictionary); in Generate()
2365 __ movp(scratch, FieldOperand(scratch, HeapObject::kMapOffset)); in Generate()
2366 __ JumpIfNotUniqueNameInstanceType( in Generate()
2372 __ bind(&maybe_in_dictionary); in Generate()
2377 __ movp(scratch, Immediate(0)); in Generate()
2378 __ Drop(1); in Generate()
2379 __ ret(2 * kPointerSize); in Generate()
2382 __ bind(&in_dictionary); in Generate()
2383 __ movp(scratch, Immediate(1)); in Generate()
2384 __ Drop(1); in Generate()
2385 __ ret(2 * kPointerSize); in Generate()
2387 __ bind(&not_in_dictionary); in Generate()
2388 __ movp(scratch, Immediate(0)); in Generate()
2389 __ Drop(1); in Generate()
2390 __ ret(2 * kPointerSize); in Generate()
2416 __ jmp(&skip_to_incremental_noncompacting, Label::kNear); in Generate()
2417 __ jmp(&skip_to_incremental_compacting, Label::kFar); in Generate()
2420 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(), in Generate()
2423 __ ret(0); in Generate()
2426 __ bind(&skip_to_incremental_noncompacting); in Generate()
2429 __ bind(&skip_to_incremental_compacting); in Generate()
2445 __ movp(regs_.scratch0(), Operand(regs_.address(), 0)); in GenerateIncremental()
2446 __ JumpIfNotInNewSpace(regs_.scratch0(), in GenerateIncremental()
2450 __ JumpIfInNewSpace(regs_.object(), regs_.scratch0(), in GenerateIncremental()
2459 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(), in GenerateIncremental()
2462 __ bind(&dont_need_remembered_set); in GenerateIncremental()
2469 __ ret(0); in GenerateIncremental()
2479 __ Move(address, regs_.address()); in InformIncrementalMarker()
2480 __ Move(arg_reg_1, regs_.object()); in InformIncrementalMarker()
2482 __ Move(arg_reg_2, address); in InformIncrementalMarker()
2483 __ LoadAddress(arg_reg_3, in InformIncrementalMarker()
2488 __ PrepareCallCFunction(argument_count); in InformIncrementalMarker()
2489 __ CallCFunction( in InformIncrementalMarker()
2509 __ JumpIfBlack(regs_.object(), in CheckNeedsToInformIncrementalMarker()
2517 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(), in CheckNeedsToInformIncrementalMarker()
2520 __ ret(0); in CheckNeedsToInformIncrementalMarker()
2523 __ bind(&on_black); in CheckNeedsToInformIncrementalMarker()
2526 __ movp(regs_.scratch0(), Operand(regs_.address(), 0)); in CheckNeedsToInformIncrementalMarker()
2531 __ CheckPageFlag(regs_.scratch0(), // Contains value. in CheckNeedsToInformIncrementalMarker()
2538 __ CheckPageFlag(regs_.object(), in CheckNeedsToInformIncrementalMarker()
2544 __ bind(&ensure_not_white); in CheckNeedsToInformIncrementalMarker()
2549 __ Push(regs_.object()); in CheckNeedsToInformIncrementalMarker()
2550 __ JumpIfWhite(regs_.scratch0(), // The value. in CheckNeedsToInformIncrementalMarker()
2554 __ Pop(regs_.object()); in CheckNeedsToInformIncrementalMarker()
2558 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(), in CheckNeedsToInformIncrementalMarker()
2561 __ ret(0); in CheckNeedsToInformIncrementalMarker()
2564 __ bind(&need_incremental_pop_object); in CheckNeedsToInformIncrementalMarker()
2565 __ Pop(regs_.object()); in CheckNeedsToInformIncrementalMarker()
2567 __ bind(&need_incremental); in CheckNeedsToInformIncrementalMarker()
2575 __ Call(ces.GetCode(), RelocInfo::CODE_TARGET); in Generate()
2578 __ movp(rbx, MemOperand(rbp, parameter_count_offset)); in Generate()
2580 __ PopReturnAddressTo(rcx); in Generate()
2583 __ leap(rsp, MemOperand(rsp, rbx, times_pointer_size, additional_offset)); in Generate()
2584 __ jmp(rcx); // Return to IC Miss stub, continuation still on stack. in Generate()
2600 __ pushq(arg_reg_1); in Generate()
2601 __ pushq(arg_reg_2); in Generate()
2604 __ leap(arg_reg_2, in Generate()
2608 __ movp(arg_reg_1, Operand(rsp, kNumSavedRegisters * kRegisterSize)); in Generate()
2609 __ subp(arg_reg_1, Immediate(Assembler::kShortCallInstructionLength)); in Generate()
2615 __ Move(rax, FUNCTION_ADDR(isolate()->function_entry_hook()), in Generate()
2621 __ PrepareCallCFunction(kArgumentCount); in Generate()
2622 __ CallCFunction(rax, kArgumentCount); in Generate()
2626 __ popq(arg_reg_2); in Generate()
2627 __ popq(arg_reg_1); in Generate()
2629 __ Ret(); in Generate()
2638 __ TailCallStub(&stub); in CreateArrayDispatch()
2645 __ cmpl(rdx, Immediate(kind)); in CreateArrayDispatch()
2646 __ j(not_equal, &next); in CreateArrayDispatch()
2648 __ TailCallStub(&stub); in CreateArrayDispatch()
2649 __ bind(&next); in CreateArrayDispatch()
2653 __ Abort(kUnexpectedElementsKindInArrayConstructor); in CreateArrayDispatch()
2679 __ testb(rdx, Immediate(1)); in CreateArrayDispatchOneArgument()
2680 __ j(not_zero, &normal_sequence); in CreateArrayDispatchOneArgument()
2685 __ movp(rcx, args.GetArgumentOperand(0)); in CreateArrayDispatchOneArgument()
2686 __ testp(rcx, rcx); in CreateArrayDispatchOneArgument()
2687 __ j(zero, &normal_sequence); in CreateArrayDispatchOneArgument()
2696 __ TailCallStub(&stub_holey); in CreateArrayDispatchOneArgument()
2698 __ bind(&normal_sequence); in CreateArrayDispatchOneArgument()
2702 __ TailCallStub(&stub); in CreateArrayDispatchOneArgument()
2706 __ incl(rdx); in CreateArrayDispatchOneArgument()
2711 __ Cmp(FieldOperand(rbx, 0), allocation_site_map); in CreateArrayDispatchOneArgument()
2712 __ Assert(equal, kExpectedAllocationSite); in CreateArrayDispatchOneArgument()
2719 __ SmiAddConstant(FieldOperand(rbx, AllocationSite::kTransitionInfoOffset), in CreateArrayDispatchOneArgument()
2722 __ bind(&normal_sequence); in CreateArrayDispatchOneArgument()
2728 __ cmpl(rdx, Immediate(kind)); in CreateArrayDispatchOneArgument()
2729 __ j(not_equal, &next); in CreateArrayDispatchOneArgument()
2731 __ TailCallStub(&stub); in CreateArrayDispatchOneArgument()
2732 __ bind(&next); in CreateArrayDispatchOneArgument()
2736 __ Abort(kUnexpectedElementsKindInArrayConstructor); in CreateArrayDispatchOneArgument()
2779 __ testp(rax, rax); in GenerateDispatchToArrayStub()
2780 __ j(not_zero, &not_zero_case); in GenerateDispatchToArrayStub()
2783 __ bind(&not_zero_case); in GenerateDispatchToArrayStub()
2784 __ cmpl(rax, Immediate(1)); in GenerateDispatchToArrayStub()
2785 __ j(greater, &not_one_case); in GenerateDispatchToArrayStub()
2788 __ bind(&not_one_case); in GenerateDispatchToArrayStub()
2790 __ TailCallStub(&stub); in GenerateDispatchToArrayStub()
2807 __ movp(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset)); in Generate()
2811 __ Check(not_smi, kUnexpectedInitialMapForArrayFunction); in Generate()
2812 __ CmpObjectType(rcx, MAP_TYPE, rcx); in Generate()
2813 __ Check(equal, kUnexpectedInitialMapForArrayFunction); in Generate()
2816 __ AssertUndefinedOrAllocationSite(rbx); in Generate()
2820 __ movp(rsi, FieldOperand(rdi, JSFunction::kContextOffset)); in Generate()
2823 __ cmpp(rdi, rdx); in Generate()
2824 __ j(not_equal, &subclassing); in Generate()
2829 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex); in Generate()
2830 __ j(equal, &no_info); in Generate()
2833 __ movp(rdx, FieldOperand(rbx, AllocationSite::kTransitionInfoOffset)); in Generate()
2834 __ SmiToInteger32(rdx, rdx); in Generate()
2836 __ andp(rdx, Immediate(AllocationSite::ElementsKindBits::kMask)); in Generate()
2839 __ bind(&no_info); in Generate()
2843 __ bind(&subclassing); in Generate()
2845 __ movp(args.GetReceiverOperand(), rdi); in Generate()
2846 __ addp(rax, Immediate(3)); in Generate()
2847 __ PopReturnAddressTo(rcx); in Generate()
2848 __ Push(rdx); in Generate()
2849 __ Push(rbx); in Generate()
2850 __ PushReturnAddressFrom(rcx); in Generate()
2851 __ JumpToExternalReference(ExternalReference(Runtime::kNewArray, isolate())); in Generate()
2860 __ testp(rax, rax); in GenerateCase()
2861 __ j(not_zero, &not_zero_case); in GenerateCase()
2863 __ TailCallStub(&stub0); in GenerateCase()
2865 __ bind(&not_zero_case); in GenerateCase()
2866 __ cmpl(rax, Immediate(1)); in GenerateCase()
2867 __ j(greater, &not_one_case); in GenerateCase()
2873 __ movp(rcx, args.GetArgumentOperand(0)); in GenerateCase()
2874 __ testp(rcx, rcx); in GenerateCase()
2875 __ j(zero, &normal_sequence); in GenerateCase()
2879 __ TailCallStub(&stub1_holey); in GenerateCase()
2882 __ bind(&normal_sequence); in GenerateCase()
2884 __ TailCallStub(&stub1); in GenerateCase()
2886 __ bind(&not_one_case); in GenerateCase()
2888 __ TailCallStub(&stubN); in GenerateCase()
2905 __ movp(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset)); in Generate()
2909 __ Check(not_smi, kUnexpectedInitialMapForArrayFunction); in Generate()
2910 __ CmpObjectType(rcx, MAP_TYPE, rcx); in Generate()
2911 __ Check(equal, kUnexpectedInitialMapForArrayFunction); in Generate()
2915 __ movp(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset)); in Generate()
2919 __ movzxbp(rcx, FieldOperand(rcx, Map::kBitField2Offset)); in Generate()
2921 __ DecodeField<Map::ElementsKindBits>(rcx); in Generate()
2925 __ cmpl(rcx, Immediate(FAST_ELEMENTS)); in Generate()
2926 __ j(equal, &done); in Generate()
2927 __ cmpl(rcx, Immediate(FAST_HOLEY_ELEMENTS)); in Generate()
2928 __ Assert(equal, in Generate()
2930 __ bind(&done); in Generate()
2934 __ cmpl(rcx, Immediate(FAST_ELEMENTS)); in Generate()
2935 __ j(equal, &fast_elements_case); in Generate()
2938 __ bind(&fast_elements_case); in Generate()
2955 __ EnterApiExitFrame(arg_stack_space); in PrepareCallApiFunction()
2993 __ Move(base_reg, next_address); in CallApiFunctionAndReturn()
2994 __ movp(prev_next_address_reg, Operand(base_reg, kNextOffset)); in CallApiFunctionAndReturn()
2995 __ movp(prev_limit_reg, Operand(base_reg, kLimitOffset)); in CallApiFunctionAndReturn()
2996 __ addl(Operand(base_reg, kLevelOffset), Immediate(1)); in CallApiFunctionAndReturn()
3000 __ PushSafepointRegisters(); in CallApiFunctionAndReturn()
3001 __ PrepareCallCFunction(1); in CallApiFunctionAndReturn()
3002 __ LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate)); in CallApiFunctionAndReturn()
3003 __ CallCFunction(ExternalReference::log_enter_external_function(isolate), in CallApiFunctionAndReturn()
3005 __ PopSafepointRegisters(); in CallApiFunctionAndReturn()
3010 __ Move(rax, ExternalReference::is_profiling_address(isolate)); in CallApiFunctionAndReturn()
3011 __ cmpb(Operand(rax, 0), Immediate(0)); in CallApiFunctionAndReturn()
3012 __ j(zero, &profiler_disabled); in CallApiFunctionAndReturn()
3015 __ Move(thunk_last_arg, function_address); in CallApiFunctionAndReturn()
3016 __ Move(rax, thunk_ref); in CallApiFunctionAndReturn()
3017 __ jmp(&end_profiler_check); in CallApiFunctionAndReturn()
3019 __ bind(&profiler_disabled); in CallApiFunctionAndReturn()
3021 __ Move(rax, function_address); in CallApiFunctionAndReturn()
3023 __ bind(&end_profiler_check); in CallApiFunctionAndReturn()
3026 __ call(rax); in CallApiFunctionAndReturn()
3030 __ PushSafepointRegisters(); in CallApiFunctionAndReturn()
3031 __ PrepareCallCFunction(1); in CallApiFunctionAndReturn()
3032 __ LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate)); in CallApiFunctionAndReturn()
3033 __ CallCFunction(ExternalReference::log_leave_external_function(isolate), in CallApiFunctionAndReturn()
3035 __ PopSafepointRegisters(); in CallApiFunctionAndReturn()
3039 __ movp(rax, return_value_operand); in CallApiFunctionAndReturn()
3040 __ bind(&prologue); in CallApiFunctionAndReturn()
3044 __ subl(Operand(base_reg, kLevelOffset), Immediate(1)); in CallApiFunctionAndReturn()
3045 __ movp(Operand(base_reg, kNextOffset), prev_next_address_reg); in CallApiFunctionAndReturn()
3046 __ cmpp(prev_limit_reg, Operand(base_reg, kLimitOffset)); in CallApiFunctionAndReturn()
3047 __ j(not_equal, &delete_allocated_handles); in CallApiFunctionAndReturn()
3050 __ bind(&leave_exit_frame); in CallApiFunctionAndReturn()
3053 __ movp(rsi, *context_restore_operand); in CallApiFunctionAndReturn()
3056 __ movp(rbx, *stack_space_operand); in CallApiFunctionAndReturn()
3058 __ LeaveApiExitFrame(!restore_context); in CallApiFunctionAndReturn()
3061 __ Move(rdi, scheduled_exception_address); in CallApiFunctionAndReturn()
3062 __ Cmp(Operand(rdi, 0), factory->the_hole_value()); in CallApiFunctionAndReturn()
3063 __ j(not_equal, &promote_scheduled_exception); in CallApiFunctionAndReturn()
3071 __ JumpIfSmi(return_value, &ok, Label::kNear); in CallApiFunctionAndReturn()
3072 __ movp(map, FieldOperand(return_value, HeapObject::kMapOffset)); in CallApiFunctionAndReturn()
3074 __ CmpInstanceType(map, LAST_NAME_TYPE); in CallApiFunctionAndReturn()
3075 __ j(below_equal, &ok, Label::kNear); in CallApiFunctionAndReturn()
3077 __ CmpInstanceType(map, FIRST_JS_RECEIVER_TYPE); in CallApiFunctionAndReturn()
3078 __ j(above_equal, &ok, Label::kNear); in CallApiFunctionAndReturn()
3080 __ CompareRoot(map, Heap::kHeapNumberMapRootIndex); in CallApiFunctionAndReturn()
3081 __ j(equal, &ok, Label::kNear); in CallApiFunctionAndReturn()
3083 __ CompareRoot(return_value, Heap::kUndefinedValueRootIndex); in CallApiFunctionAndReturn()
3084 __ j(equal, &ok, Label::kNear); in CallApiFunctionAndReturn()
3086 __ CompareRoot(return_value, Heap::kTrueValueRootIndex); in CallApiFunctionAndReturn()
3087 __ j(equal, &ok, Label::kNear); in CallApiFunctionAndReturn()
3089 __ CompareRoot(return_value, Heap::kFalseValueRootIndex); in CallApiFunctionAndReturn()
3090 __ j(equal, &ok, Label::kNear); in CallApiFunctionAndReturn()
3092 __ CompareRoot(return_value, Heap::kNullValueRootIndex); in CallApiFunctionAndReturn()
3093 __ j(equal, &ok, Label::kNear); in CallApiFunctionAndReturn()
3095 __ Abort(kAPICallReturnedInvalidObject); in CallApiFunctionAndReturn()
3097 __ bind(&ok); in CallApiFunctionAndReturn()
3102 __ PopReturnAddressTo(rcx); in CallApiFunctionAndReturn()
3103 __ addq(rsp, rbx); in CallApiFunctionAndReturn()
3104 __ jmp(rcx); in CallApiFunctionAndReturn()
3106 __ ret(stack_space * kPointerSize); in CallApiFunctionAndReturn()
3110 __ bind(&promote_scheduled_exception); in CallApiFunctionAndReturn()
3111 __ TailCallRuntime(Runtime::kPromoteScheduledException); in CallApiFunctionAndReturn()
3114 __ bind(&delete_allocated_handles); in CallApiFunctionAndReturn()
3115 __ movp(Operand(base_reg, kLimitOffset), prev_limit_reg); in CallApiFunctionAndReturn()
3116 __ movp(prev_limit_reg, rax); in CallApiFunctionAndReturn()
3117 __ LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate)); in CallApiFunctionAndReturn()
3118 __ LoadAddress(rax, in CallApiFunctionAndReturn()
3120 __ call(rax); in CallApiFunctionAndReturn()
3121 __ movp(rax, prev_limit_reg); in CallApiFunctionAndReturn()
3122 __ jmp(&leave_exit_frame); in CallApiFunctionAndReturn()
3159 __ PopReturnAddressTo(return_address); in Generate()
3162 __ PushRoot(Heap::kUndefinedValueRootIndex); in Generate()
3165 __ Push(context); in Generate()
3168 __ Push(callee); in Generate()
3171 __ Push(call_data); in Generate()
3174 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex); in Generate()
3177 __ Push(scratch); in Generate()
3179 __ Push(scratch); in Generate()
3181 __ Move(scratch, ExternalReference::isolate_address(masm->isolate())); in Generate()
3182 __ Push(scratch); in Generate()
3184 __ Push(holder); in Generate()
3186 __ movp(scratch, rsp); in Generate()
3188 __ PushReturnAddressFrom(return_address); in Generate()
3192 __ movp(context, FieldOperand(callee, JSFunction::kContextOffset)); in Generate()
3203 __ movp(StackSpaceOperand(0), scratch); in Generate()
3204 __ addp(scratch, Immediate((argc + FCA::kArgsLength - 1) * kPointerSize)); in Generate()
3206 __ movp(StackSpaceOperand(1), scratch); in Generate()
3208 __ Set(StackSpaceOperand(2), argc); in Generate()
3223 __ leap(arguments_arg, StackSpaceOperand(0)); in Generate()
3275 __ PopReturnAddressTo(scratch); in Generate()
3276 __ Push(receiver); in Generate()
3277 __ Push(FieldOperand(callback, AccessorInfo::kDataOffset)); in Generate()
3278 __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex); in Generate()
3279 __ Push(kScratchRegister); // return value in Generate()
3280 __ Push(kScratchRegister); // return value default in Generate()
3281 __ PushAddress(ExternalReference::isolate_address(isolate())); in Generate()
3282 __ Push(holder); in Generate()
3283 __ Push(Smi::kZero); // should_throw_on_error -> false in Generate()
3284 __ Push(FieldOperand(callback, AccessorInfo::kNameOffset)); in Generate()
3285 __ PushReturnAddressFrom(scratch); in Generate()
3294 __ leap(scratch, Operand(rsp, 2 * kPointerSize)); in Generate()
3300 __ movp(info_object, scratch); in Generate()
3302 __ leap(name_arg, Operand(scratch, -kPointerSize)); in Generate()
3305 __ leap(accessor_info_arg, info_object); in Generate()
3314 __ movp(scratch, FieldOperand(callback, AccessorInfo::kJsGetterOffset)); in Generate()
3315 __ movp(api_function_address, in Generate()
3326 #undef __