• Home
  • Raw
  • Download

Lines Matching full:__

25 #define __ ACCESS_MASM(masm)  macro
28 __ pop(ecx); in Generate()
29 __ mov(MemOperand(esp, eax, times_4, 0), edi); in Generate()
30 __ push(edi); in Generate()
31 __ push(ebx); in Generate()
32 __ push(ecx); in Generate()
33 __ add(eax, Immediate(3)); in Generate()
34 __ TailCallRuntime(Runtime::kNewArray); in Generate()
51 __ push(descriptor.GetRegisterParameter(i)); in GenerateLightweightMiss()
53 __ CallExternalReference(miss, param_count); in GenerateLightweightMiss()
56 __ ret(0); in GenerateLightweightMiss()
64 __ pushad(); in Generate()
67 __ sub(esp, Immediate(108)); in Generate()
68 __ fnsave(Operand(esp, 0)); in Generate()
73 __ PrepareCallCFunction(argument_count, ecx); in Generate()
74 __ mov(Operand(esp, 0 * kPointerSize), in Generate()
76 __ CallCFunction( in Generate()
81 __ frstor(Operand(esp, 0)); in Generate()
82 __ add(esp, Immediate(108)); in Generate()
84 __ popad(); in Generate()
85 __ ret(0); in Generate()
142 __ push(scratch1); in Generate()
143 __ push(save_reg); in Generate()
146 __ mov(scratch1, mantissa_operand); in Generate()
147 __ mov(ecx, exponent_operand); in Generate()
148 if (stash_exponent_copy) __ push(ecx); in Generate()
150 __ and_(ecx, HeapNumber::kExponentMask); in Generate()
151 __ shr(ecx, HeapNumber::kExponentShift); in Generate()
152 __ lea(result_reg, MemOperand(ecx, -HeapNumber::kExponentBias)); in Generate()
153 __ cmp(result_reg, Immediate(HeapNumber::kMantissaBits)); in Generate()
154 __ j(below, &process_64_bits); in Generate()
158 __ sub(ecx, Immediate(delta)); in Generate()
159 __ xor_(result_reg, result_reg); in Generate()
160 __ cmp(ecx, Immediate(31)); in Generate()
161 __ j(above, &done); in Generate()
162 __ shl_cl(scratch1); in Generate()
163 __ jmp(&check_negative); in Generate()
165 __ bind(&process_64_bits); in Generate()
167 __ sub(ecx, Immediate(delta)); in Generate()
168 __ neg(ecx); in Generate()
170 __ mov(result_reg, MemOperand(esp, 0)); in Generate()
172 __ mov(result_reg, exponent_operand); in Generate()
174 __ and_(result_reg, in Generate()
176 __ add(result_reg, in Generate()
178 __ shrd_cl(scratch1, result_reg); in Generate()
179 __ shr_cl(result_reg); in Generate()
180 __ test(ecx, Immediate(32)); in Generate()
183 __ j(equal, &skip_mov, Label::kNear); in Generate()
184 __ mov(scratch1, result_reg); in Generate()
185 __ bind(&skip_mov); in Generate()
189 __ bind(&check_negative); in Generate()
190 __ mov(result_reg, scratch1); in Generate()
191 __ neg(result_reg); in Generate()
193 __ cmp(MemOperand(esp, 0), Immediate(0)); in Generate()
195 __ cmp(exponent_operand, Immediate(0)); in Generate()
199 __ j(less_equal, &skip_mov, Label::kNear); in Generate()
200 __ mov(result_reg, scratch1); in Generate()
201 __ bind(&skip_mov); in Generate()
205 __ bind(&done); in Generate()
207 __ add(esp, Immediate(kDoubleSize / 2)); in Generate()
209 __ bind(&done_no_stash); in Generate()
212 __ mov(final_result_reg, result_reg); in Generate()
214 __ pop(save_reg); in Generate()
215 __ pop(scratch1); in Generate()
216 __ ret(0); in Generate()
224 __ JumpIfSmi(number, &load_smi, Label::kNear); in LoadFloatOperand()
225 __ fld_d(FieldOperand(number, HeapNumber::kValueOffset)); in LoadFloatOperand()
226 __ jmp(&done, Label::kNear); in LoadFloatOperand()
228 __ bind(&load_smi); in LoadFloatOperand()
229 __ SmiUntag(number); in LoadFloatOperand()
230 __ push(number); in LoadFloatOperand()
231 __ fild_s(Operand(esp, 0)); in LoadFloatOperand()
232 __ pop(number); in LoadFloatOperand()
234 __ bind(&done); in LoadFloatOperand()
244 __ JumpIfSmi(edx, &test_other, Label::kNear); in CheckFloatOperands()
245 __ mov(scratch, FieldOperand(edx, HeapObject::kMapOffset)); in CheckFloatOperands()
247 __ cmp(scratch, factory->heap_number_map()); in CheckFloatOperands()
248 __ j(not_equal, non_float); // argument in edx is not a number -> NaN in CheckFloatOperands()
250 __ bind(&test_other); in CheckFloatOperands()
251 __ JumpIfSmi(eax, &done, Label::kNear); in CheckFloatOperands()
252 __ mov(scratch, FieldOperand(eax, HeapObject::kMapOffset)); in CheckFloatOperands()
253 __ cmp(scratch, factory->heap_number_map()); in CheckFloatOperands()
254 __ j(not_equal, non_float); // argument in eax is not a number -> NaN in CheckFloatOperands()
257 __ bind(&done); in CheckFloatOperands()
265 __ fld_d(Operand(esp, 0 * kDoubleSize + 4)); in Generate()
267 __ fld_d(Operand(esp, 1 * kDoubleSize + 4)); in Generate()
272 __ PrepareCallCFunction(4, scratch); in Generate()
274 __ fstp_d(Operand(esp, 0 * kDoubleSize)); in Generate()
276 __ fstp_d(Operand(esp, 1 * kDoubleSize)); in Generate()
277 __ CallCFunction(ExternalReference::power_double_double_function(isolate()), in Generate()
281 __ ret(0); in Generate()
289 __ TailCallRuntime(Runtime::kRegExpExec); in Generate()
312 __ mov(ebx, Operand::StaticVariable(address_of_regexp_stack_memory_size)); in Generate()
313 __ test(ebx, ebx); in Generate()
314 __ j(zero, &runtime); in Generate()
317 __ mov(eax, Operand(esp, kJSRegExpOffset)); in Generate()
319 __ JumpIfSmi(eax, &runtime); in Generate()
320 __ CmpObjectType(eax, JS_REGEXP_TYPE, ecx); in Generate()
321 __ j(not_equal, &runtime); in Generate()
324 __ mov(ecx, FieldOperand(eax, JSRegExp::kDataOffset)); in Generate()
326 __ test(ecx, Immediate(kSmiTagMask)); in Generate()
327 __ Check(not_zero, kUnexpectedTypeForRegExpDataFixedArrayExpected); in Generate()
328 __ CmpObjectType(ecx, FIXED_ARRAY_TYPE, ebx); in Generate()
329 __ Check(equal, kUnexpectedTypeForRegExpDataFixedArrayExpected); in Generate()
334 __ mov(ebx, FieldOperand(ecx, JSRegExp::kDataTagOffset)); in Generate()
335 __ cmp(ebx, Immediate(Smi::FromInt(JSRegExp::IRREGEXP))); in Generate()
336 __ j(not_equal, &runtime); in Generate()
340 __ mov(edx, FieldOperand(ecx, JSRegExp::kIrregexpCaptureCountOffset)); in Generate()
347 __ cmp(edx, Isolate::kJSRegexpStaticOffsetsVectorSize - 2); in Generate()
348 __ j(above, &runtime); in Generate()
351 __ Move(edi, Immediate(0)); in Generate()
352 __ mov(eax, Operand(esp, kSubjectOffset)); in Generate()
353 __ JumpIfSmi(eax, &runtime); in Generate()
354 __ mov(edx, eax); // Make a copy of the original subject string. in Generate()
381 __ bind(&check_underlying); in Generate()
383 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset)); in Generate()
384 __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset)); in Generate()
386 __ and_(ebx, kIsNotStringMask | in Generate()
391 __ j(zero, &seq_two_byte_string); // Go to (9). in Generate()
395 __ and_(ebx, Immediate(kIsNotStringMask | in Generate()
398 __ j(zero, &seq_one_byte_string, Label::kNear); // Go to (5). in Generate()
408 __ cmp(ebx, Immediate(kExternalStringTag)); in Generate()
409 __ j(greater_equal, &not_seq_nor_cons); // Go to (6). in Generate()
413 __ cmp(FieldOperand(eax, ConsString::kSecondOffset), factory->empty_string()); in Generate()
414 __ j(not_equal, &runtime); in Generate()
415 __ mov(eax, FieldOperand(eax, ConsString::kFirstOffset)); in Generate()
416 __ jmp(&check_underlying); in Generate()
422 __ bind(&seq_one_byte_string); in Generate()
426 __ mov(ebx, Operand(esp, kPreviousIndexOffset)); in Generate()
427 __ JumpIfNotSmi(ebx, &runtime); in Generate()
428 __ cmp(ebx, FieldOperand(edx, String::kLengthOffset)); in Generate()
429 __ j(above_equal, &runtime); in Generate()
430 __ mov(edx, FieldOperand(ecx, JSRegExp::kDataOneByteCodeOffset)); in Generate()
431 __ Move(ecx, Immediate(1)); // Type is one byte. in Generate()
434 __ bind(&check_code); in Generate()
439 __ JumpIfSmi(edx, &runtime); in Generate()
447 __ IncrementCounter(counters->regexp_entry_native(), 1); in Generate()
451 __ EnterApiExitFrame(kRegExpExecuteArguments); in Generate()
454 __ mov(Operand(esp, 8 * kPointerSize), in Generate()
458 __ mov(Operand(esp, 7 * kPointerSize), Immediate(1)); in Generate()
461 __ mov(esi, Operand::StaticVariable(address_of_regexp_stack_memory_address)); in Generate()
462 __ add(esi, Operand::StaticVariable(address_of_regexp_stack_memory_size)); in Generate()
463 __ mov(Operand(esp, 6 * kPointerSize), esi); in Generate()
467 __ mov(Operand(esp, 5 * kPointerSize), Immediate(0)); in Generate()
470 __ mov(Operand(esp, 4 * kPointerSize), in Generate()
475 __ SmiUntag(ebx); in Generate()
476 __ mov(Operand(esp, 1 * kPointerSize), ebx); in Generate()
483 __ mov(esi, Operand(ebp, kSubjectOffset + kPointerSize)); in Generate()
484 __ mov(Operand(esp, 0 * kPointerSize), esi); in Generate()
495 __ mov(esi, FieldOperand(esi, String::kLengthOffset)); in Generate()
496 __ add(esi, edi); // Calculate input end wrt offset. in Generate()
497 __ SmiUntag(edi); in Generate()
498 __ add(ebx, edi); // Calculate input start wrt offset. in Generate()
503 __ test(ecx, ecx); in Generate()
504 __ j(zero, &setup_two_byte, Label::kNear); in Generate()
505 __ SmiUntag(esi); in Generate()
506 __ lea(ecx, FieldOperand(eax, esi, times_1, SeqOneByteString::kHeaderSize)); in Generate()
507 __ mov(Operand(esp, 3 * kPointerSize), ecx); // Argument 4. in Generate()
508 __ lea(ecx, FieldOperand(eax, ebx, times_1, SeqOneByteString::kHeaderSize)); in Generate()
509 __ mov(Operand(esp, 2 * kPointerSize), ecx); // Argument 3. in Generate()
510 __ jmp(&setup_rest, Label::kNear); in Generate()
512 __ bind(&setup_two_byte); in Generate()
515 __ lea(ecx, FieldOperand(eax, esi, times_1, SeqTwoByteString::kHeaderSize)); in Generate()
516 __ mov(Operand(esp, 3 * kPointerSize), ecx); // Argument 4. in Generate()
517 __ lea(ecx, FieldOperand(eax, ebx, times_2, SeqTwoByteString::kHeaderSize)); in Generate()
518 __ mov(Operand(esp, 2 * kPointerSize), ecx); // Argument 3. in Generate()
520 __ bind(&setup_rest); in Generate()
523 __ add(edx, Immediate(Code::kHeaderSize - kHeapObjectTag)); in Generate()
524 __ call(edx); in Generate()
527 __ LeaveApiExitFrame(true); in Generate()
531 __ cmp(eax, 1); in Generate()
534 __ j(equal, &success); in Generate()
536 __ cmp(eax, NativeRegExpMacroAssembler::FAILURE); in Generate()
537 __ j(equal, &failure); in Generate()
538 __ cmp(eax, NativeRegExpMacroAssembler::EXCEPTION); in Generate()
540 __ j(not_equal, &runtime); in Generate()
547 __ mov(edx, Immediate(isolate()->factory()->the_hole_value())); in Generate()
548 __ mov(eax, Operand::StaticVariable(pending_exception)); in Generate()
549 __ cmp(edx, eax); in Generate()
550 __ j(equal, &runtime); in Generate()
553 __ TailCallRuntime(Runtime::kRegExpExecReThrow); in Generate()
555 __ bind(&failure); in Generate()
557 __ mov(eax, factory->null_value()); in Generate()
558 __ ret(4 * kPointerSize); in Generate()
561 __ bind(&success); in Generate()
562 __ mov(eax, Operand(esp, kJSRegExpOffset)); in Generate()
563 __ mov(ecx, FieldOperand(eax, JSRegExp::kDataOffset)); in Generate()
564 __ mov(edx, FieldOperand(ecx, JSRegExp::kIrregexpCaptureCountOffset)); in Generate()
568 __ add(edx, Immediate(2)); // edx was a smi. in Generate()
572 __ mov(ebx, Operand(esp, kLastMatchInfoOffset)); in Generate()
573 __ JumpIfSmi(ebx, &runtime); in Generate()
575 __ mov(eax, FieldOperand(ebx, HeapObject::kMapOffset)); in Generate()
576 __ cmp(eax, factory->fixed_array_map()); in Generate()
577 __ j(not_equal, &runtime); in Generate()
580 __ mov(eax, FieldOperand(ebx, FixedArray::kLengthOffset)); in Generate()
581 __ SmiUntag(eax); in Generate()
582 __ sub(eax, Immediate(RegExpMatchInfo::kLastMatchOverhead)); in Generate()
583 __ cmp(edx, eax); in Generate()
584 __ j(greater, &runtime); in Generate()
589 __ SmiTag(edx); // Number of capture registers to smi. in Generate()
590 __ mov(FieldOperand(ebx, RegExpMatchInfo::kNumberOfCapturesOffset), edx); in Generate()
591 __ SmiUntag(edx); // Number of capture registers back from smi. in Generate()
593 __ mov(eax, Operand(esp, kSubjectOffset)); in Generate()
594 __ mov(ecx, eax); in Generate()
595 __ mov(FieldOperand(ebx, RegExpMatchInfo::kLastSubjectOffset), eax); in Generate()
596 __ RecordWriteField(ebx, RegExpMatchInfo::kLastSubjectOffset, eax, edi, in Generate()
598 __ mov(eax, ecx); in Generate()
599 __ mov(FieldOperand(ebx, RegExpMatchInfo::kLastInputOffset), eax); in Generate()
600 __ RecordWriteField(ebx, RegExpMatchInfo::kLastInputOffset, eax, edi, in Generate()
606 __ mov(ecx, Immediate(address_of_static_offsets_vector)); in Generate()
614 __ bind(&next_capture); in Generate()
615 __ sub(edx, Immediate(1)); in Generate()
616 __ j(negative, &done, Label::kNear); in Generate()
618 __ mov(edi, Operand(ecx, edx, times_int_size, 0)); in Generate()
619 __ SmiTag(edi); in Generate()
621 __ mov(FieldOperand(ebx, edx, times_pointer_size, in Generate()
624 __ jmp(&next_capture); in Generate()
625 __ bind(&done); in Generate()
628 __ mov(eax, ebx); in Generate()
629 __ ret(4 * kPointerSize); in Generate()
632 __ bind(&runtime); in Generate()
633 __ TailCallRuntime(Runtime::kRegExpExec); in Generate()
637 __ bind(&not_seq_nor_cons); in Generate()
639 __ j(greater, &not_long_external, Label::kNear); // Go to (10). in Generate()
642 __ bind(&external_string); in Generate()
644 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset)); in Generate()
645 __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset)); in Generate()
649 __ test_b(ebx, Immediate(kIsIndirectStringMask)); in Generate()
650 __ Assert(zero, kExternalStringExpectedButNotFound); in Generate()
652 __ mov(eax, FieldOperand(eax, ExternalString::kResourceDataOffset)); in Generate()
655 __ sub(eax, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag)); in Generate()
658 __ test_b(ebx, Immediate(kStringEncodingMask)); in Generate()
659 __ j(not_zero, &seq_one_byte_string); // Go to (5). in Generate()
665 __ bind(&seq_two_byte_string); in Generate()
669 __ mov(ebx, Operand(esp, kPreviousIndexOffset)); in Generate()
670 __ JumpIfNotSmi(ebx, &runtime); in Generate()
671 __ cmp(ebx, FieldOperand(edx, String::kLengthOffset)); in Generate()
672 __ j(above_equal, &runtime); in Generate()
673 __ mov(edx, FieldOperand(ecx, JSRegExp::kDataUC16CodeOffset)); in Generate()
674 __ Move(ecx, Immediate(0)); // Type is two byte. in Generate()
675 __ jmp(&check_code); // Go to (E). in Generate()
678 __ bind(&not_long_external); in Generate()
681 __ test(ebx, Immediate(kIsNotStringMask | kShortExternalStringTag)); in Generate()
682 __ j(not_zero, &runtime); in Generate()
686 __ cmp(ebx, Immediate(kThinStringTag)); in Generate()
687 __ j(equal, &thin_string, Label::kNear); in Generate()
689 __ mov(edi, FieldOperand(eax, SlicedString::kOffsetOffset)); in Generate()
690 __ mov(eax, FieldOperand(eax, SlicedString::kParentOffset)); in Generate()
691 __ jmp(&check_underlying); // Go to (1). in Generate()
693 __ bind(&thin_string); in Generate()
694 __ mov(eax, FieldOperand(eax, ThinString::kActualOffset)); in Generate()
695 __ jmp(&check_underlying); // Go to (1). in Generate()
712 __ JumpIfNotSmi(input, fail); in CheckInputType()
714 __ JumpIfSmi(input, &ok); in CheckInputType()
715 __ cmp(FieldOperand(input, HeapObject::kMapOffset), in CheckInputType()
717 __ j(not_equal, fail); in CheckInputType()
721 __ bind(&ok); in CheckInputType()
729 __ JumpIfSmi(object, label); in BranchIfNotInternalizedString()
730 __ mov(scratch, FieldOperand(object, HeapObject::kMapOffset)); in BranchIfNotInternalizedString()
731 __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset)); in BranchIfNotInternalizedString()
733 __ test(scratch, Immediate(kIsNotStringMask | kIsNotInternalizedMask)); in BranchIfNotInternalizedString()
734 __ j(not_zero, label); in BranchIfNotInternalizedString()
748 __ mov(ecx, edx); in GenerateGeneric()
749 __ or_(ecx, eax); in GenerateGeneric()
750 __ JumpIfNotSmi(ecx, &non_smi, Label::kNear); in GenerateGeneric()
751 __ sub(edx, eax); // Return on the result of the subtraction. in GenerateGeneric()
752 __ j(no_overflow, &smi_done, Label::kNear); in GenerateGeneric()
753 __ not_(edx); // Correct sign in case of overflow. edx is never 0 here. in GenerateGeneric()
754 __ bind(&smi_done); in GenerateGeneric()
755 __ mov(eax, edx); in GenerateGeneric()
756 __ ret(0); in GenerateGeneric()
757 __ bind(&non_smi); in GenerateGeneric()
767 __ cmp(eax, edx); in GenerateGeneric()
768 __ j(not_equal, &not_identical); in GenerateGeneric()
773 __ cmp(edx, isolate()->factory()->undefined_value()); in GenerateGeneric()
775 __ j(not_equal, &check_for_nan, Label::kNear); in GenerateGeneric()
776 __ Move(eax, Immediate(Smi::FromInt(NegativeComparisonResult(cc)))); in GenerateGeneric()
777 __ ret(0); in GenerateGeneric()
778 __ bind(&check_for_nan); in GenerateGeneric()
783 __ cmp(FieldOperand(edx, HeapObject::kMapOffset), in GenerateGeneric()
785 __ j(equal, &generic_heap_number_comparison, Label::kNear); in GenerateGeneric()
787 __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset)); in GenerateGeneric()
788 __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset)); in GenerateGeneric()
790 __ cmpb(ecx, Immediate(FIRST_JS_RECEIVER_TYPE)); in GenerateGeneric()
791 __ j(above_equal, &runtime_call, Label::kFar); in GenerateGeneric()
793 __ cmpb(ecx, Immediate(SYMBOL_TYPE)); in GenerateGeneric()
794 __ j(equal, &runtime_call, Label::kFar); in GenerateGeneric()
796 __ Move(eax, Immediate(Smi::FromInt(EQUAL))); in GenerateGeneric()
797 __ ret(0); in GenerateGeneric()
800 __ bind(&not_identical); in GenerateGeneric()
816 __ mov(ecx, Immediate(kSmiTagMask)); in GenerateGeneric()
817 __ and_(ecx, eax); in GenerateGeneric()
818 __ test(ecx, edx); in GenerateGeneric()
819 __ j(not_zero, &not_smis, Label::kNear); in GenerateGeneric()
825 __ sub(ecx, Immediate(0x01)); in GenerateGeneric()
826 __ mov(ebx, edx); in GenerateGeneric()
827 __ xor_(ebx, eax); in GenerateGeneric()
828 __ and_(ebx, ecx); // ebx holds either 0 or eax ^ edx. in GenerateGeneric()
829 __ xor_(ebx, eax); in GenerateGeneric()
833 __ cmp(FieldOperand(ebx, HeapObject::kMapOffset), in GenerateGeneric()
836 __ j(equal, &slow, Label::kNear); in GenerateGeneric()
838 __ mov(eax, ebx); in GenerateGeneric()
839 __ ret(0); in GenerateGeneric()
841 __ bind(&not_smis); in GenerateGeneric()
850 __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ecx); in GenerateGeneric()
851 __ j(below, &first_non_object, Label::kNear); in GenerateGeneric()
856 __ bind(&return_not_equal); in GenerateGeneric()
857 __ ret(0); in GenerateGeneric()
859 __ bind(&first_non_object); in GenerateGeneric()
861 __ CmpInstanceType(ecx, ODDBALL_TYPE); in GenerateGeneric()
862 __ j(equal, &return_not_equal); in GenerateGeneric()
864 __ CmpObjectType(edx, FIRST_JS_RECEIVER_TYPE, ecx); in GenerateGeneric()
865 __ j(above_equal, &return_not_equal); in GenerateGeneric()
868 __ CmpInstanceType(ecx, ODDBALL_TYPE); in GenerateGeneric()
869 __ j(equal, &return_not_equal); in GenerateGeneric()
872 __ bind(&slow); in GenerateGeneric()
878 __ bind(&generic_heap_number_comparison); in GenerateGeneric()
883 __ FCmp(); in GenerateGeneric()
886 __ j(parity_even, &unordered, Label::kNear); in GenerateGeneric()
890 __ j(below, &below_label, Label::kNear); in GenerateGeneric()
891 __ j(above, &above_label, Label::kNear); in GenerateGeneric()
893 __ Move(eax, Immediate(0)); in GenerateGeneric()
894 __ ret(0); in GenerateGeneric()
896 __ bind(&below_label); in GenerateGeneric()
897 __ mov(eax, Immediate(Smi::FromInt(-1))); in GenerateGeneric()
898 __ ret(0); in GenerateGeneric()
900 __ bind(&above_label); in GenerateGeneric()
901 __ mov(eax, Immediate(Smi::FromInt(1))); in GenerateGeneric()
902 __ ret(0); in GenerateGeneric()
906 __ bind(&unordered); in GenerateGeneric()
909 __ mov(eax, Immediate(Smi::FromInt(1))); in GenerateGeneric()
911 __ mov(eax, Immediate(Smi::FromInt(-1))); in GenerateGeneric()
913 __ ret(0); in GenerateGeneric()
916 __ bind(&non_number_comparison); in GenerateGeneric()
927 __ ret(0); in GenerateGeneric()
930 __ bind(&check_for_strings); in GenerateGeneric()
932 __ JumpIfNotBothSequentialOneByteStrings(edx, eax, ecx, ebx, in GenerateGeneric()
943 __ Abort(kUnexpectedFallThroughFromStringComparison); in GenerateGeneric()
946 __ bind(&check_unequal_objects); in GenerateGeneric()
957 __ lea(ecx, Operand(eax, edx, times_1, 0)); in GenerateGeneric()
958 __ test(ecx, Immediate(kSmiTagMask)); in GenerateGeneric()
959 __ j(not_zero, &runtime_call); in GenerateGeneric()
961 __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset)); in GenerateGeneric()
962 __ mov(ebx, FieldOperand(edx, HeapObject::kMapOffset)); in GenerateGeneric()
964 __ test_b(FieldOperand(ebx, Map::kBitFieldOffset), in GenerateGeneric()
966 __ j(not_zero, &undetectable, Label::kNear); in GenerateGeneric()
967 __ test_b(FieldOperand(ecx, Map::kBitFieldOffset), in GenerateGeneric()
969 __ j(not_zero, &return_unequal, Label::kNear); in GenerateGeneric()
971 __ CmpInstanceType(ebx, FIRST_JS_RECEIVER_TYPE); in GenerateGeneric()
972 __ j(below, &runtime_call, Label::kNear); in GenerateGeneric()
973 __ CmpInstanceType(ecx, FIRST_JS_RECEIVER_TYPE); in GenerateGeneric()
974 __ j(below, &runtime_call, Label::kNear); in GenerateGeneric()
976 __ bind(&return_unequal); in GenerateGeneric()
978 __ ret(0); // eax, edx were pushed in GenerateGeneric()
980 __ bind(&undetectable); in GenerateGeneric()
981 __ test_b(FieldOperand(ecx, Map::kBitFieldOffset), in GenerateGeneric()
983 __ j(zero, &return_unequal, Label::kNear); in GenerateGeneric()
988 __ CmpInstanceType(ebx, ODDBALL_TYPE); in GenerateGeneric()
989 __ j(zero, &return_equal, Label::kNear); in GenerateGeneric()
990 __ CmpInstanceType(ecx, ODDBALL_TYPE); in GenerateGeneric()
991 __ j(not_zero, &return_unequal, Label::kNear); in GenerateGeneric()
993 __ bind(&return_equal); in GenerateGeneric()
994 __ Move(eax, Immediate(EQUAL)); in GenerateGeneric()
995 __ ret(0); // eax, edx were pushed in GenerateGeneric()
997 __ bind(&runtime_call); in GenerateGeneric()
1002 __ Push(esi); in GenerateGeneric()
1003 __ Call(strict() ? isolate()->builtins()->StrictEqual() in GenerateGeneric()
1006 __ Pop(esi); in GenerateGeneric()
1010 __ sub(eax, Immediate(isolate()->factory()->true_value())); in GenerateGeneric()
1011 __ Ret(); in GenerateGeneric()
1014 __ pop(ecx); in GenerateGeneric()
1015 __ push(edx); in GenerateGeneric()
1016 __ push(eax); in GenerateGeneric()
1017 __ push(Immediate(Smi::FromInt(NegativeComparisonResult(cc)))); in GenerateGeneric()
1020 __ push(ecx); in GenerateGeneric()
1023 __ TailCallRuntime(Runtime::kCompare); in GenerateGeneric()
1026 __ bind(&miss); in GenerateGeneric()
1041 __ SmiTag(eax); in CallStubInRecordCallTarget()
1042 __ push(eax); in CallStubInRecordCallTarget()
1043 __ push(edi); in CallStubInRecordCallTarget()
1044 __ push(edx); in CallStubInRecordCallTarget()
1045 __ push(ebx); in CallStubInRecordCallTarget()
1046 __ push(esi); in CallStubInRecordCallTarget()
1048 __ CallStub(stub); in CallStubInRecordCallTarget()
1050 __ pop(esi); in CallStubInRecordCallTarget()
1051 __ pop(ebx); in CallStubInRecordCallTarget()
1052 __ pop(edx); in CallStubInRecordCallTarget()
1053 __ pop(edi); in CallStubInRecordCallTarget()
1054 __ pop(eax); in CallStubInRecordCallTarget()
1055 __ SmiUntag(eax); in CallStubInRecordCallTarget()
1072 __ mov(ecx, FieldOperand(ebx, edx, times_half_pointer_size, in GenerateRecordCallTarget()
1080 __ cmp(edi, FieldOperand(ecx, WeakCell::kValueOffset)); in GenerateRecordCallTarget()
1081 __ j(equal, &done, Label::kFar); in GenerateRecordCallTarget()
1082 __ CompareRoot(ecx, Heap::kmegamorphic_symbolRootIndex); in GenerateRecordCallTarget()
1083 __ j(equal, &done, Label::kFar); in GenerateRecordCallTarget()
1084 __ CompareRoot(FieldOperand(ecx, HeapObject::kMapOffset), in GenerateRecordCallTarget()
1086 __ j(not_equal, &check_allocation_site); in GenerateRecordCallTarget()
1089 __ JumpIfSmi(FieldOperand(ecx, WeakCell::kValueOffset), &initialize); in GenerateRecordCallTarget()
1090 __ jmp(&megamorphic); in GenerateRecordCallTarget()
1092 __ bind(&check_allocation_site); in GenerateRecordCallTarget()
1097 __ CompareRoot(FieldOperand(ecx, 0), Heap::kAllocationSiteMapRootIndex); in GenerateRecordCallTarget()
1098 __ j(not_equal, &miss); in GenerateRecordCallTarget()
1101 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx); in GenerateRecordCallTarget()
1102 __ cmp(edi, ecx); in GenerateRecordCallTarget()
1103 __ j(not_equal, &megamorphic); in GenerateRecordCallTarget()
1104 __ jmp(&done, Label::kFar); in GenerateRecordCallTarget()
1106 __ bind(&miss); in GenerateRecordCallTarget()
1110 __ CompareRoot(ecx, Heap::kuninitialized_symbolRootIndex); in GenerateRecordCallTarget()
1111 __ j(equal, &initialize); in GenerateRecordCallTarget()
1114 __ bind(&megamorphic); in GenerateRecordCallTarget()
1115 __ mov( in GenerateRecordCallTarget()
1118 __ jmp(&done, Label::kFar); in GenerateRecordCallTarget()
1122 __ bind(&initialize); in GenerateRecordCallTarget()
1124 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx); in GenerateRecordCallTarget()
1125 __ cmp(edi, ecx); in GenerateRecordCallTarget()
1126 __ j(not_equal, &not_array_function); in GenerateRecordCallTarget()
1133 __ jmp(&done); in GenerateRecordCallTarget()
1135 __ bind(&not_array_function); in GenerateRecordCallTarget()
1139 __ bind(&done); in GenerateRecordCallTarget()
1141 __ add(FieldOperand(ebx, edx, times_half_pointer_size, in GenerateRecordCallTarget()
1155 __ JumpIfSmi(edi, &non_function); in Generate()
1157 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx); in Generate()
1158 __ j(not_equal, &non_function); in Generate()
1164 __ mov(ebx, FieldOperand(ebx, edx, times_half_pointer_size, in Generate()
1167 __ cmp(FieldOperand(ebx, 0), Immediate(allocation_site_map)); in Generate()
1168 __ j(equal, &feedback_register_initialized); in Generate()
1169 __ mov(ebx, isolate()->factory()->undefined_value()); in Generate()
1170 __ bind(&feedback_register_initialized); in Generate()
1172 __ AssertUndefinedOrAllocationSite(ebx); in Generate()
1175 __ mov(edx, edi); in Generate()
1179 __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset)); in Generate()
1180 __ mov(ecx, FieldOperand(ecx, SharedFunctionInfo::kConstructStubOffset)); in Generate()
1181 __ lea(ecx, FieldOperand(ecx, Code::kHeaderSize)); in Generate()
1182 __ jmp(ecx); in Generate()
1184 __ bind(&non_function); in Generate()
1185 __ mov(edx, edi); in Generate()
1186 __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); in Generate()
1191 __ add(FieldOperand(feedback_vector, slot, times_half_pointer_size, in IncrementCallCount()
1201 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx); in HandleArrayCase()
1202 __ cmp(edi, ecx); in HandleArrayCase()
1203 __ j(not_equal, miss); in HandleArrayCase()
1206 __ mov(ecx, FieldOperand(ebx, edx, times_half_pointer_size, in HandleArrayCase()
1212 __ mov(ebx, ecx); in HandleArrayCase()
1213 __ mov(edx, edi); in HandleArrayCase()
1215 __ TailCallStub(&stub); in HandleArrayCase()
1230 __ mov(ecx, FieldOperand(ebx, edx, times_half_pointer_size, in Generate()
1247 __ cmp(edi, FieldOperand(ecx, WeakCell::kValueOffset)); in Generate()
1248 __ j(not_equal, &extra_checks_or_miss); in Generate()
1252 __ JumpIfSmi(edi, &extra_checks_or_miss); in Generate()
1254 __ bind(&call_function); in Generate()
1259 __ Jump(masm->isolate()->builtins()->CallFunction(convert_mode(), in Generate()
1263 __ bind(&extra_checks_or_miss); in Generate()
1266 __ cmp(ecx, Immediate(FeedbackVector::MegamorphicSentinel(isolate))); in Generate()
1267 __ j(equal, &call); in Generate()
1270 __ CompareRoot(FieldOperand(ecx, HeapObject::kMapOffset), in Generate()
1272 __ j(not_equal, &not_allocation_site); in Generate()
1277 __ bind(&not_allocation_site); in Generate()
1282 __ jmp(&miss); in Generate()
1285 __ cmp(ecx, Immediate(FeedbackVector::UninitializedSentinel(isolate))); in Generate()
1286 __ j(equal, &uninitialized); in Generate()
1290 __ AssertNotSmi(ecx); in Generate()
1291 __ CmpObjectType(ecx, JS_FUNCTION_TYPE, ecx); in Generate()
1292 __ j(not_equal, &miss); in Generate()
1293 __ mov( in Generate()
1297 __ bind(&call); in Generate()
1302 __ bind(&call_count_incremented); in Generate()
1304 __ Jump(masm->isolate()->builtins()->Call(convert_mode(), tail_call_mode()), in Generate()
1307 __ bind(&uninitialized); in Generate()
1310 __ JumpIfSmi(edi, &miss); in Generate()
1313 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx); in Generate()
1314 __ j(not_equal, &miss); in Generate()
1318 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx); in Generate()
1319 __ cmp(edi, ecx); in Generate()
1320 __ j(equal, &miss); in Generate()
1323 __ mov(ecx, FieldOperand(edi, JSFunction::kContextOffset)); in Generate()
1324 __ mov(ecx, ContextOperand(ecx, Context::NATIVE_CONTEXT_INDEX)); in Generate()
1325 __ cmp(ecx, NativeContextOperand()); in Generate()
1326 __ j(not_equal, &miss); in Generate()
1336 __ SmiTag(eax); in Generate()
1337 __ push(eax); in Generate()
1338 __ push(ebx); in Generate()
1339 __ push(edx); in Generate()
1340 __ push(edi); in Generate()
1341 __ push(esi); in Generate()
1342 __ CallStub(&create_stub); in Generate()
1343 __ pop(esi); in Generate()
1344 __ pop(edi); in Generate()
1345 __ pop(edx); in Generate()
1346 __ pop(ebx); in Generate()
1347 __ pop(eax); in Generate()
1348 __ SmiUntag(eax); in Generate()
1351 __ jmp(&call_function); in Generate()
1355 __ bind(&miss); in Generate()
1358 __ jmp(&call_count_incremented); in Generate()
1361 __ int3(); in Generate()
1369 __ SmiTag(eax); in GenerateMiss()
1370 __ push(eax); in GenerateMiss()
1373 __ push(edi); in GenerateMiss()
1374 __ push(ebx); in GenerateMiss()
1375 __ push(edx); in GenerateMiss()
1378 __ CallRuntime(Runtime::kCallIC_Miss); in GenerateMiss()
1381 __ mov(edi, eax); in GenerateMiss()
1384 __ pop(eax); in GenerateMiss()
1385 __ SmiUntag(eax); in GenerateMiss()
1448 __ EnterApiExitFrame(arg_stack_space); in Generate()
1451 __ mov(esi, ecx); in Generate()
1452 __ mov(edi, eax); in Generate()
1454 __ EnterExitFrame( in Generate()
1469 __ CheckStackAlignment(); in Generate()
1473 __ mov(Operand(esp, 0 * kPointerSize), edi); // argc. in Generate()
1474 __ mov(Operand(esp, 1 * kPointerSize), esi); // argv. in Generate()
1475 __ mov(Operand(esp, 2 * kPointerSize), in Generate()
1480 __ lea(eax, Operand(esp, 4 * kPointerSize)); in Generate()
1481 __ mov(Operand(esp, 0 * kPointerSize), eax); in Generate()
1482 __ mov(Operand(esp, 1 * kPointerSize), edi); // argc. in Generate()
1483 __ mov(Operand(esp, 2 * kPointerSize), esi); // argv. in Generate()
1484 __ mov(Operand(esp, 3 * kPointerSize), in Generate()
1487 __ call(ebx); in Generate()
1493 __ sub(esp, Immediate(kPointerSize)); in Generate()
1496 __ mov(kReturnRegister0, Operand(esp, 4 * kPointerSize)); in Generate()
1497 __ mov(kReturnRegister1, Operand(esp, 5 * kPointerSize)); in Generate()
1498 __ mov(kReturnRegister2, Operand(esp, 6 * kPointerSize)); in Generate()
1504 __ cmp(eax, isolate()->factory()->exception()); in Generate()
1505 __ j(equal, &exception_returned); in Generate()
1510 __ push(edx); in Generate()
1511 __ mov(edx, Immediate(isolate()->factory()->the_hole_value())); in Generate()
1515 __ cmp(edx, Operand::StaticVariable(pending_exception_address)); in Generate()
1517 __ j(equal, &okay, Label::kNear); in Generate()
1518 __ int3(); in Generate()
1519 __ bind(&okay); in Generate()
1520 __ pop(edx); in Generate()
1524 __ LeaveExitFrame(save_doubles(), !argv_in_register()); in Generate()
1525 __ ret(0); in Generate()
1528 __ bind(&exception_returned); in Generate()
1547 __ PrepareCallCFunction(3, eax); in Generate()
1548 __ mov(Operand(esp, 0 * kPointerSize), Immediate(0)); // argc. in Generate()
1549 __ mov(Operand(esp, 1 * kPointerSize), Immediate(0)); // argv. in Generate()
1550 __ mov(Operand(esp, 2 * kPointerSize), in Generate()
1552 __ CallCFunction(find_handler, 3); in Generate()
1556 __ mov(esi, Operand::StaticVariable(pending_handler_context_address)); in Generate()
1557 __ mov(esp, Operand::StaticVariable(pending_handler_sp_address)); in Generate()
1558 __ mov(ebp, Operand::StaticVariable(pending_handler_fp_address)); in Generate()
1563 __ test(esi, esi); in Generate()
1564 __ j(zero, &skip, Label::kNear); in Generate()
1565 __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi); in Generate()
1566 __ bind(&skip); in Generate()
1569 __ mov(edi, Operand::StaticVariable(pending_handler_code_address)); in Generate()
1570 __ mov(edx, Operand::StaticVariable(pending_handler_offset_address)); in Generate()
1573 __ push(eax); in Generate()
1574 __ mov(eax, Operand(edi, Code::kKindSpecificFlags1Offset - kHeapObjectTag)); in Generate()
1575 __ and_(eax, Immediate(1 << Code::kIsTurbofannedBit)); in Generate()
1576 __ j(zero, &not_turbo); in Generate()
1577 __ fninit(); in Generate()
1578 __ fld1(); in Generate()
1579 __ bind(&not_turbo); in Generate()
1580 __ pop(eax); in Generate()
1581 __ lea(edi, FieldOperand(edi, edx, times_1, Code::kHeaderSize)); in Generate()
1582 __ jmp(edi); in Generate()
1593 __ push(ebp); in Generate()
1594 __ mov(ebp, esp); in Generate()
1598 __ push(Immediate(Smi::FromInt(marker))); // marker in Generate()
1600 __ push(Operand::StaticVariable(context_address)); // context in Generate()
1602 __ push(edi); in Generate()
1603 __ push(esi); in Generate()
1604 __ push(ebx); in Generate()
1608 __ push(Operand::StaticVariable(c_entry_fp)); in Generate()
1612 __ cmp(Operand::StaticVariable(js_entry_sp), Immediate(0)); in Generate()
1613 __ j(not_equal, &not_outermost_js, Label::kNear); in Generate()
1614 __ mov(Operand::StaticVariable(js_entry_sp), ebp); in Generate()
1615 __ push(Immediate(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME))); in Generate()
1616 __ jmp(&invoke, Label::kNear); in Generate()
1617 __ bind(&not_outermost_js); in Generate()
1618 __ push(Immediate(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME))); in Generate()
1622 __ jmp(&invoke); in Generate()
1623 __ bind(&handler_entry); in Generate()
1629 __ mov(Operand::StaticVariable(pending_exception), eax); in Generate()
1630 __ mov(eax, Immediate(isolate()->factory()->exception())); in Generate()
1631 __ jmp(&exit); in Generate()
1634 __ bind(&invoke); in Generate()
1635 __ PushStackHandler(); in Generate()
1638 __ push(Immediate(0)); // receiver in Generate()
1647 __ mov(edx, Immediate(construct_entry)); in Generate()
1650 __ mov(edx, Immediate(entry)); in Generate()
1652 __ mov(edx, Operand(edx, 0)); // deref address in Generate()
1653 __ lea(edx, FieldOperand(edx, Code::kHeaderSize)); in Generate()
1654 __ call(edx); in Generate()
1657 __ PopStackHandler(); in Generate()
1659 __ bind(&exit); in Generate()
1661 __ pop(ebx); in Generate()
1662 __ cmp(ebx, Immediate(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME))); in Generate()
1663 __ j(not_equal, &not_outermost_js_2); in Generate()
1664 __ mov(Operand::StaticVariable(js_entry_sp), Immediate(0)); in Generate()
1665 __ bind(&not_outermost_js_2); in Generate()
1668 __ pop(Operand::StaticVariable(ExternalReference( in Generate()
1672 __ pop(ebx); in Generate()
1673 __ pop(esi); in Generate()
1674 __ pop(edi); in Generate()
1675 __ add(esp, Immediate(2 * kPointerSize)); // remove markers in Generate()
1678 __ pop(ebp); in Generate()
1679 __ ret(0); in Generate()
1689 __ JumpIfSmi(object_, receiver_not_string_); in GenerateFast()
1692 __ mov(result_, FieldOperand(object_, HeapObject::kMapOffset)); in GenerateFast()
1693 __ movzx_b(result_, FieldOperand(result_, Map::kInstanceTypeOffset)); in GenerateFast()
1695 __ test(result_, Immediate(kIsNotStringMask)); in GenerateFast()
1696 __ j(not_zero, receiver_not_string_); in GenerateFast()
1700 __ JumpIfNotSmi(index_, &index_not_smi_); in GenerateFast()
1701 __ bind(&got_smi_index_); in GenerateFast()
1704 __ cmp(index_, FieldOperand(object_, String::kLengthOffset)); in GenerateFast()
1705 __ j(above_equal, index_out_of_range_); in GenerateFast()
1707 __ SmiUntag(index_); in GenerateFast()
1713 __ SmiTag(result_); in GenerateFast()
1714 __ bind(&exit_); in GenerateFast()
1721 __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase); in GenerateSlow()
1724 __ bind(&index_not_smi_); in GenerateSlow()
1726 __ CheckMap(index_, in GenerateSlow()
1732 __ push(LoadWithVectorDescriptor::VectorRegister()); in GenerateSlow()
1733 __ push(LoadDescriptor::SlotRegister()); in GenerateSlow()
1735 __ push(object_); in GenerateSlow()
1736 __ push(index_); // Consumed by runtime conversion function. in GenerateSlow()
1737 __ CallRuntime(Runtime::kNumberToSmi); in GenerateSlow()
1741 __ mov(index_, eax); in GenerateSlow()
1743 __ pop(object_); in GenerateSlow()
1745 __ pop(LoadDescriptor::SlotRegister()); in GenerateSlow()
1746 __ pop(LoadWithVectorDescriptor::VectorRegister()); in GenerateSlow()
1749 __ mov(result_, FieldOperand(object_, HeapObject::kMapOffset)); in GenerateSlow()
1750 __ movzx_b(result_, FieldOperand(result_, Map::kInstanceTypeOffset)); in GenerateSlow()
1754 __ JumpIfNotSmi(index_, index_out_of_range_); in GenerateSlow()
1756 __ jmp(&got_smi_index_); in GenerateSlow()
1761 __ bind(&call_runtime_); in GenerateSlow()
1763 __ push(object_); in GenerateSlow()
1764 __ SmiTag(index_); in GenerateSlow()
1765 __ push(index_); in GenerateSlow()
1766 __ CallRuntime(Runtime::kStringCharCodeAtRT); in GenerateSlow()
1768 __ mov(result_, eax); in GenerateSlow()
1771 __ jmp(&exit_); in GenerateSlow()
1773 __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase); in GenerateSlow()
1785 __ mov(length, FieldOperand(left, String::kLengthOffset)); in GenerateFlatOneByteStringEquals()
1786 __ cmp(length, FieldOperand(right, String::kLengthOffset)); in GenerateFlatOneByteStringEquals()
1787 __ j(equal, &check_zero_length, Label::kNear); in GenerateFlatOneByteStringEquals()
1788 __ bind(&strings_not_equal); in GenerateFlatOneByteStringEquals()
1789 __ Move(eax, Immediate(Smi::FromInt(NOT_EQUAL))); in GenerateFlatOneByteStringEquals()
1790 __ ret(0); in GenerateFlatOneByteStringEquals()
1794 __ bind(&check_zero_length); in GenerateFlatOneByteStringEquals()
1796 __ test(length, length); in GenerateFlatOneByteStringEquals()
1797 __ j(not_zero, &compare_chars, Label::kNear); in GenerateFlatOneByteStringEquals()
1798 __ Move(eax, Immediate(Smi::FromInt(EQUAL))); in GenerateFlatOneByteStringEquals()
1799 __ ret(0); in GenerateFlatOneByteStringEquals()
1802 __ bind(&compare_chars); in GenerateFlatOneByteStringEquals()
1807 __ Move(eax, Immediate(Smi::FromInt(EQUAL))); in GenerateFlatOneByteStringEquals()
1808 __ ret(0); in GenerateFlatOneByteStringEquals()
1816 __ IncrementCounter(counters->string_compare_native(), 1); in GenerateCompareFlatOneByteStrings()
1820 __ mov(scratch1, FieldOperand(left, String::kLengthOffset)); in GenerateCompareFlatOneByteStrings()
1821 __ mov(scratch3, scratch1); in GenerateCompareFlatOneByteStrings()
1822 __ sub(scratch3, FieldOperand(right, String::kLengthOffset)); in GenerateCompareFlatOneByteStrings()
1826 __ j(less_equal, &left_shorter, Label::kNear); in GenerateCompareFlatOneByteStrings()
1828 __ sub(scratch1, length_delta); in GenerateCompareFlatOneByteStrings()
1829 __ bind(&left_shorter); in GenerateCompareFlatOneByteStrings()
1835 __ test(min_length, min_length); in GenerateCompareFlatOneByteStrings()
1836 __ j(zero, &compare_lengths, Label::kNear); in GenerateCompareFlatOneByteStrings()
1844 __ bind(&compare_lengths); in GenerateCompareFlatOneByteStrings()
1845 __ test(length_delta, length_delta); in GenerateCompareFlatOneByteStrings()
1847 __ j(not_zero, &length_not_equal, Label::kNear); in GenerateCompareFlatOneByteStrings()
1852 __ Move(eax, Immediate(Smi::FromInt(EQUAL))); in GenerateCompareFlatOneByteStrings()
1853 __ ret(0); in GenerateCompareFlatOneByteStrings()
1857 __ bind(&length_not_equal); in GenerateCompareFlatOneByteStrings()
1858 __ j(greater, &result_greater, Label::kNear); in GenerateCompareFlatOneByteStrings()
1859 __ jmp(&result_less, Label::kNear); in GenerateCompareFlatOneByteStrings()
1860 __ bind(&result_not_equal); in GenerateCompareFlatOneByteStrings()
1861 __ j(above, &result_greater, Label::kNear); in GenerateCompareFlatOneByteStrings()
1862 __ bind(&result_less); in GenerateCompareFlatOneByteStrings()
1865 __ Move(eax, Immediate(Smi::FromInt(LESS))); in GenerateCompareFlatOneByteStrings()
1866 __ ret(0); in GenerateCompareFlatOneByteStrings()
1869 __ bind(&result_greater); in GenerateCompareFlatOneByteStrings()
1870 __ Move(eax, Immediate(Smi::FromInt(GREATER))); in GenerateCompareFlatOneByteStrings()
1871 __ ret(0); in GenerateCompareFlatOneByteStrings()
1882 __ SmiUntag(length); in GenerateOneByteCharsCompareLoop()
1883 __ lea(left, in GenerateOneByteCharsCompareLoop()
1885 __ lea(right, in GenerateOneByteCharsCompareLoop()
1887 __ neg(length); in GenerateOneByteCharsCompareLoop()
1892 __ bind(&loop); in GenerateOneByteCharsCompareLoop()
1893 __ mov_b(scratch, Operand(left, index, times_1, 0)); in GenerateOneByteCharsCompareLoop()
1894 __ cmpb(scratch, Operand(right, index, times_1, 0)); in GenerateOneByteCharsCompareLoop()
1895 __ j(not_equal, chars_not_equal, chars_not_equal_near); in GenerateOneByteCharsCompareLoop()
1896 __ inc(index); in GenerateOneByteCharsCompareLoop()
1897 __ j(not_zero, &loop); in GenerateOneByteCharsCompareLoop()
1911 __ mov(ecx, isolate()->factory()->undefined_value()); in Generate()
1915 __ test(ecx, Immediate(kSmiTagMask)); in Generate()
1916 __ Assert(not_equal, kExpectedAllocationSite); in Generate()
1917 __ cmp(FieldOperand(ecx, HeapObject::kMapOffset), in Generate()
1919 __ Assert(equal, kExpectedAllocationSite); in Generate()
1925 __ TailCallStub(&stub); in Generate()
1935 __ JumpIfSmi(edx, &miss, miss_distance); in GenerateBooleans()
1936 __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset)); in GenerateBooleans()
1937 __ JumpIfSmi(eax, &miss, miss_distance); in GenerateBooleans()
1938 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset)); in GenerateBooleans()
1939 __ JumpIfNotRoot(ecx, Heap::kBooleanMapRootIndex, &miss, miss_distance); in GenerateBooleans()
1940 __ JumpIfNotRoot(ebx, Heap::kBooleanMapRootIndex, &miss, miss_distance); in GenerateBooleans()
1942 __ mov(eax, FieldOperand(eax, Oddball::kToNumberOffset)); in GenerateBooleans()
1943 __ AssertSmi(eax); in GenerateBooleans()
1944 __ mov(edx, FieldOperand(edx, Oddball::kToNumberOffset)); in GenerateBooleans()
1945 __ AssertSmi(edx); in GenerateBooleans()
1946 __ xchg(eax, edx); in GenerateBooleans()
1948 __ sub(eax, edx); in GenerateBooleans()
1949 __ Ret(); in GenerateBooleans()
1951 __ bind(&miss); in GenerateBooleans()
1959 __ mov(ecx, edx); in GenerateSmis()
1960 __ or_(ecx, eax); in GenerateSmis()
1961 __ JumpIfNotSmi(ecx, &miss, Label::kNear); in GenerateSmis()
1965 __ sub(eax, edx); in GenerateSmis()
1968 __ sub(edx, eax); in GenerateSmis()
1969 __ j(no_overflow, &done, Label::kNear); in GenerateSmis()
1971 __ not_(edx); in GenerateSmis()
1972 __ bind(&done); in GenerateSmis()
1973 __ mov(eax, edx); in GenerateSmis()
1975 __ ret(0); in GenerateSmis()
1977 __ bind(&miss); in GenerateSmis()
1990 __ JumpIfNotSmi(edx, &miss); in GenerateNumbers()
1993 __ JumpIfNotSmi(eax, &miss); in GenerateNumbers()
1998 __ JumpIfSmi(eax, &check_left, Label::kNear); in GenerateNumbers()
1999 __ cmp(FieldOperand(eax, HeapObject::kMapOffset), in GenerateNumbers()
2001 __ j(not_equal, &maybe_undefined1, Label::kNear); in GenerateNumbers()
2003 __ bind(&check_left); in GenerateNumbers()
2004 __ JumpIfSmi(edx, &generic_stub, Label::kNear); in GenerateNumbers()
2005 __ cmp(FieldOperand(edx, HeapObject::kMapOffset), in GenerateNumbers()
2007 __ j(not_equal, &maybe_undefined2, Label::kNear); in GenerateNumbers()
2009 __ bind(&unordered); in GenerateNumbers()
2010 __ bind(&generic_stub); in GenerateNumbers()
2013 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET); in GenerateNumbers()
2015 __ bind(&maybe_undefined1); in GenerateNumbers()
2017 __ cmp(eax, Immediate(isolate()->factory()->undefined_value())); in GenerateNumbers()
2018 __ j(not_equal, &miss); in GenerateNumbers()
2019 __ JumpIfSmi(edx, &unordered); in GenerateNumbers()
2020 __ CmpObjectType(edx, HEAP_NUMBER_TYPE, ecx); in GenerateNumbers()
2021 __ j(not_equal, &maybe_undefined2, Label::kNear); in GenerateNumbers()
2022 __ jmp(&unordered); in GenerateNumbers()
2025 __ bind(&maybe_undefined2); in GenerateNumbers()
2027 __ cmp(edx, Immediate(isolate()->factory()->undefined_value())); in GenerateNumbers()
2028 __ j(equal, &unordered); in GenerateNumbers()
2031 __ bind(&miss); in GenerateNumbers()
2048 __ mov(tmp1, left); in GenerateInternalizedStrings()
2050 __ and_(tmp1, right); in GenerateInternalizedStrings()
2051 __ JumpIfSmi(tmp1, &miss, Label::kNear); in GenerateInternalizedStrings()
2054 __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset)); in GenerateInternalizedStrings()
2055 __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset)); in GenerateInternalizedStrings()
2056 __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset)); in GenerateInternalizedStrings()
2057 __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset)); in GenerateInternalizedStrings()
2059 __ or_(tmp1, tmp2); in GenerateInternalizedStrings()
2060 __ test(tmp1, Immediate(kIsNotStringMask | kIsNotInternalizedMask)); in GenerateInternalizedStrings()
2061 __ j(not_zero, &miss, Label::kNear); in GenerateInternalizedStrings()
2065 __ cmp(left, right); in GenerateInternalizedStrings()
2069 __ j(not_equal, &done, Label::kNear); in GenerateInternalizedStrings()
2072 __ Move(eax, Immediate(Smi::FromInt(EQUAL))); in GenerateInternalizedStrings()
2073 __ bind(&done); in GenerateInternalizedStrings()
2074 __ ret(0); in GenerateInternalizedStrings()
2076 __ bind(&miss); in GenerateInternalizedStrings()
2093 __ mov(tmp1, left); in GenerateUniqueNames()
2095 __ and_(tmp1, right); in GenerateUniqueNames()
2096 __ JumpIfSmi(tmp1, &miss, Label::kNear); in GenerateUniqueNames()
2100 __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset)); in GenerateUniqueNames()
2101 __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset)); in GenerateUniqueNames()
2102 __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset)); in GenerateUniqueNames()
2103 __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset)); in GenerateUniqueNames()
2105 __ JumpIfNotUniqueNameInstanceType(tmp1, &miss, Label::kNear); in GenerateUniqueNames()
2106 __ JumpIfNotUniqueNameInstanceType(tmp2, &miss, Label::kNear); in GenerateUniqueNames()
2110 __ cmp(left, right); in GenerateUniqueNames()
2114 __ j(not_equal, &done, Label::kNear); in GenerateUniqueNames()
2117 __ Move(eax, Immediate(Smi::FromInt(EQUAL))); in GenerateUniqueNames()
2118 __ bind(&done); in GenerateUniqueNames()
2119 __ ret(0); in GenerateUniqueNames()
2121 __ bind(&miss); in GenerateUniqueNames()
2140 __ mov(tmp1, left); in GenerateStrings()
2142 __ and_(tmp1, right); in GenerateStrings()
2143 __ JumpIfSmi(tmp1, &miss); in GenerateStrings()
2147 __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset)); in GenerateStrings()
2148 __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset)); in GenerateStrings()
2149 __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset)); in GenerateStrings()
2150 __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset)); in GenerateStrings()
2151 __ mov(tmp3, tmp1); in GenerateStrings()
2153 __ or_(tmp3, tmp2); in GenerateStrings()
2154 __ test(tmp3, Immediate(kIsNotStringMask)); in GenerateStrings()
2155 __ j(not_zero, &miss); in GenerateStrings()
2159 __ cmp(left, right); in GenerateStrings()
2160 __ j(not_equal, &not_same, Label::kNear); in GenerateStrings()
2163 __ Move(eax, Immediate(Smi::FromInt(EQUAL))); in GenerateStrings()
2164 __ ret(0); in GenerateStrings()
2167 __ bind(&not_same); in GenerateStrings()
2176 __ or_(tmp1, tmp2); in GenerateStrings()
2177 __ test(tmp1, Immediate(kIsNotInternalizedMask)); in GenerateStrings()
2178 __ j(not_zero, &do_compare, Label::kNear); in GenerateStrings()
2182 __ ret(0); in GenerateStrings()
2183 __ bind(&do_compare); in GenerateStrings()
2188 __ JumpIfNotBothSequentialOneByteStrings(left, right, tmp1, tmp2, &runtime); in GenerateStrings()
2200 __ bind(&runtime); in GenerateStrings()
2204 __ Push(left); in GenerateStrings()
2205 __ Push(right); in GenerateStrings()
2206 __ CallRuntime(Runtime::kStringEqual); in GenerateStrings()
2208 __ sub(eax, Immediate(masm->isolate()->factory()->true_value())); in GenerateStrings()
2209 __ Ret(); in GenerateStrings()
2211 __ pop(tmp1); // Return address. in GenerateStrings()
2212 __ push(left); in GenerateStrings()
2213 __ push(right); in GenerateStrings()
2214 __ push(tmp1); in GenerateStrings()
2215 __ TailCallRuntime(Runtime::kStringCompare); in GenerateStrings()
2218 __ bind(&miss); in GenerateStrings()
2226 __ mov(ecx, edx); in GenerateReceivers()
2227 __ and_(ecx, eax); in GenerateReceivers()
2228 __ JumpIfSmi(ecx, &miss, Label::kNear); in GenerateReceivers()
2231 __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ecx); in GenerateReceivers()
2232 __ j(below, &miss, Label::kNear); in GenerateReceivers()
2233 __ CmpObjectType(edx, FIRST_JS_RECEIVER_TYPE, ecx); in GenerateReceivers()
2234 __ j(below, &miss, Label::kNear); in GenerateReceivers()
2237 __ sub(eax, edx); in GenerateReceivers()
2238 __ ret(0); in GenerateReceivers()
2240 __ bind(&miss); in GenerateReceivers()
2248 __ mov(ecx, edx); in GenerateKnownReceivers()
2249 __ and_(ecx, eax); in GenerateKnownReceivers()
2250 __ JumpIfSmi(ecx, &miss, Label::kNear); in GenerateKnownReceivers()
2252 __ GetWeakValue(edi, cell); in GenerateKnownReceivers()
2253 __ cmp(edi, FieldOperand(eax, HeapObject::kMapOffset)); in GenerateKnownReceivers()
2254 __ j(not_equal, &miss, Label::kNear); in GenerateKnownReceivers()
2255 __ cmp(edi, FieldOperand(edx, HeapObject::kMapOffset)); in GenerateKnownReceivers()
2256 __ j(not_equal, &miss, Label::kNear); in GenerateKnownReceivers()
2259 __ sub(eax, edx); in GenerateKnownReceivers()
2260 __ ret(0); in GenerateKnownReceivers()
2262 __ PopReturnAddressTo(ecx); in GenerateKnownReceivers()
2263 __ Push(edx); in GenerateKnownReceivers()
2264 __ Push(eax); in GenerateKnownReceivers()
2265 __ Push(Immediate(Smi::FromInt(NegativeComparisonResult(GetCondition())))); in GenerateKnownReceivers()
2266 __ PushReturnAddressFrom(ecx); in GenerateKnownReceivers()
2267 __ TailCallRuntime(Runtime::kCompare); in GenerateKnownReceivers()
2270 __ bind(&miss); in GenerateKnownReceivers()
2279 __ push(edx); // Preserve edx and eax. in GenerateMiss()
2280 __ push(eax); in GenerateMiss()
2281 __ push(edx); // And also use them as the arguments. in GenerateMiss()
2282 __ push(eax); in GenerateMiss()
2283 __ push(Immediate(Smi::FromInt(op()))); in GenerateMiss()
2284 __ CallRuntime(Runtime::kCompareIC_Miss); in GenerateMiss()
2286 __ lea(edi, FieldOperand(eax, Code::kHeaderSize)); in GenerateMiss()
2287 __ pop(eax); in GenerateMiss()
2288 __ pop(edx); in GenerateMiss()
2292 __ jmp(edi); in GenerateMiss()
2318 __ mov(index, FieldOperand(properties, kCapacityOffset)); in GenerateNegativeLookup()
2319 __ dec(index); in GenerateNegativeLookup()
2320 __ and_(index, in GenerateNegativeLookup()
2326 __ lea(index, Operand(index, index, times_2, 0)); // index *= 3. in GenerateNegativeLookup()
2330 __ mov(entity_name, Operand(properties, index, times_half_pointer_size, in GenerateNegativeLookup()
2332 __ cmp(entity_name, masm->isolate()->factory()->undefined_value()); in GenerateNegativeLookup()
2333 __ j(equal, done); in GenerateNegativeLookup()
2336 __ cmp(entity_name, Handle<Name>(name)); in GenerateNegativeLookup()
2337 __ j(equal, miss); in GenerateNegativeLookup()
2341 __ cmp(entity_name, masm->isolate()->factory()->the_hole_value()); in GenerateNegativeLookup()
2342 __ j(equal, &good, Label::kNear); in GenerateNegativeLookup()
2345 __ mov(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset)); in GenerateNegativeLookup()
2346 __ JumpIfNotUniqueNameInstanceType( in GenerateNegativeLookup()
2348 __ bind(&good); in GenerateNegativeLookup()
2353 __ push(Immediate(Handle<Object>(name))); in GenerateNegativeLookup()
2354 __ push(Immediate(name->Hash())); in GenerateNegativeLookup()
2355 __ CallStub(&stub); in GenerateNegativeLookup()
2356 __ test(r0, r0); in GenerateNegativeLookup()
2357 __ j(not_zero, miss); in GenerateNegativeLookup()
2358 __ jmp(done); in GenerateNegativeLookup()
2380 __ mov(scratch, FieldOperand(dictionary(), kCapacityOffset)); in Generate()
2381 __ dec(scratch); in Generate()
2382 __ SmiUntag(scratch); in Generate()
2383 __ push(scratch); in Generate()
2392 __ mov(scratch, Operand(esp, 2 * kPointerSize)); in Generate()
2394 __ add(scratch, Immediate(NameDictionary::GetProbeOffset(i))); in Generate()
2396 __ and_(scratch, Operand(esp, 0)); in Generate()
2400 __ lea(index(), Operand(scratch, scratch, times_2, 0)); // index *= 3. in Generate()
2404 __ mov(scratch, Operand(dictionary(), index(), times_pointer_size, in Generate()
2406 __ cmp(scratch, isolate()->factory()->undefined_value()); in Generate()
2407 __ j(equal, &not_in_dictionary); in Generate()
2410 __ cmp(scratch, Operand(esp, 3 * kPointerSize)); in Generate()
2411 __ j(equal, &in_dictionary); in Generate()
2419 __ mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset)); in Generate()
2420 __ JumpIfNotUniqueNameInstanceType( in Generate()
2426 __ bind(&maybe_in_dictionary); in Generate()
2431 __ mov(result(), Immediate(0)); in Generate()
2432 __ Drop(1); in Generate()
2433 __ ret(2 * kPointerSize); in Generate()
2436 __ bind(&in_dictionary); in Generate()
2437 __ mov(result(), Immediate(1)); in Generate()
2438 __ Drop(1); in Generate()
2439 __ ret(2 * kPointerSize); in Generate()
2441 __ bind(&not_in_dictionary); in Generate()
2442 __ mov(result(), Immediate(0)); in Generate()
2443 __ Drop(1); in Generate()
2444 __ ret(2 * kPointerSize); in Generate()
2469 __ jmp(&skip_to_incremental_noncompacting, Label::kNear); in Generate()
2470 __ jmp(&skip_to_incremental_compacting, Label::kFar); in Generate()
2473 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(), in Generate()
2476 __ ret(0); in Generate()
2479 __ bind(&skip_to_incremental_noncompacting); in Generate()
2482 __ bind(&skip_to_incremental_compacting); in Generate()
2498 __ mov(regs_.scratch0(), Operand(regs_.address(), 0)); in GenerateIncremental()
2499 __ JumpIfNotInNewSpace(regs_.scratch0(), // Value. in GenerateIncremental()
2503 __ JumpIfInNewSpace(regs_.object(), regs_.scratch0(), in GenerateIncremental()
2514 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(), in GenerateIncremental()
2517 __ bind(&dont_need_remembered_set); in GenerateIncremental()
2526 __ ret(0); in GenerateIncremental()
2533 __ PrepareCallCFunction(argument_count, regs_.scratch0()); in InformIncrementalMarker()
2534 __ mov(Operand(esp, 0 * kPointerSize), regs_.object()); in InformIncrementalMarker()
2535 __ mov(Operand(esp, 1 * kPointerSize), regs_.address()); // Slot. in InformIncrementalMarker()
2536 __ mov(Operand(esp, 2 * kPointerSize), in InformIncrementalMarker()
2540 __ CallCFunction( in InformIncrementalMarker()
2556 __ JumpIfBlack(regs_.object(), in CheckNeedsToInformIncrementalMarker()
2564 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(), in CheckNeedsToInformIncrementalMarker()
2567 __ ret(0); in CheckNeedsToInformIncrementalMarker()
2570 __ bind(&object_is_black); in CheckNeedsToInformIncrementalMarker()
2573 __ mov(regs_.scratch0(), Operand(regs_.address(), 0)); in CheckNeedsToInformIncrementalMarker()
2578 __ CheckPageFlag(regs_.scratch0(), // Contains value. in CheckNeedsToInformIncrementalMarker()
2585 __ CheckPageFlag(regs_.object(), in CheckNeedsToInformIncrementalMarker()
2592 __ jmp(&need_incremental); in CheckNeedsToInformIncrementalMarker()
2594 __ bind(&ensure_not_white); in CheckNeedsToInformIncrementalMarker()
2599 __ push(regs_.object()); in CheckNeedsToInformIncrementalMarker()
2600 __ JumpIfWhite(regs_.scratch0(), // The value. in CheckNeedsToInformIncrementalMarker()
2604 __ pop(regs_.object()); in CheckNeedsToInformIncrementalMarker()
2608 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(), in CheckNeedsToInformIncrementalMarker()
2611 __ ret(0); in CheckNeedsToInformIncrementalMarker()
2614 __ bind(&need_incremental_pop_object); in CheckNeedsToInformIncrementalMarker()
2615 __ pop(regs_.object()); in CheckNeedsToInformIncrementalMarker()
2617 __ bind(&need_incremental); in CheckNeedsToInformIncrementalMarker()
2625 __ call(ces.GetCode(), RelocInfo::CODE_TARGET); in Generate()
2628 __ mov(ebx, MemOperand(ebp, parameter_count_offset)); in Generate()
2630 __ pop(ecx); in Generate()
2633 __ lea(esp, MemOperand(esp, ebx, times_pointer_size, additional_offset)); in Generate()
2634 __ jmp(ecx); // Return to IC Miss stub, continuation still on stack. in Generate()
2647 __ push(eax); in Generate()
2648 __ push(ecx); in Generate()
2649 __ push(edx); in Generate()
2652 __ lea(eax, Operand(esp, (kNumSavedRegisters + 1) * kPointerSize)); in Generate()
2653 __ push(eax); in Generate()
2657 __ mov(eax, Operand(esp, (kNumSavedRegisters + 1) * kPointerSize)); in Generate()
2658 __ sub(eax, Immediate(Assembler::kCallInstructionLength)); in Generate()
2659 __ push(eax); in Generate()
2663 __ call(FUNCTION_ADDR(isolate()->function_entry_hook()), in Generate()
2665 __ add(esp, Immediate(2 * kPointerSize)); in Generate()
2668 __ pop(edx); in Generate()
2669 __ pop(ecx); in Generate()
2670 __ pop(eax); in Generate()
2672 __ ret(0); in Generate()
2680 __ TailCallStub(&stub); in CreateArrayDispatch()
2687 __ cmp(edx, kind); in CreateArrayDispatch()
2688 __ j(not_equal, &next); in CreateArrayDispatch()
2690 __ TailCallStub(&stub); in CreateArrayDispatch()
2691 __ bind(&next); in CreateArrayDispatch()
2695 __ Abort(kUnexpectedElementsKindInArrayConstructor); in CreateArrayDispatch()
2719 __ test_b(edx, Immediate(1)); in CreateArrayDispatchOneArgument()
2720 __ j(not_zero, &normal_sequence); in CreateArrayDispatchOneArgument()
2724 __ mov(ecx, Operand(esp, kPointerSize)); in CreateArrayDispatchOneArgument()
2725 __ test(ecx, ecx); in CreateArrayDispatchOneArgument()
2726 __ j(zero, &normal_sequence); in CreateArrayDispatchOneArgument()
2734 __ TailCallStub(&stub_holey); in CreateArrayDispatchOneArgument()
2736 __ bind(&normal_sequence); in CreateArrayDispatchOneArgument()
2739 __ TailCallStub(&stub); in CreateArrayDispatchOneArgument()
2743 __ inc(edx); in CreateArrayDispatchOneArgument()
2748 __ cmp(FieldOperand(ebx, 0), Immediate(allocation_site_map)); in CreateArrayDispatchOneArgument()
2749 __ Assert(equal, kExpectedAllocationSite); in CreateArrayDispatchOneArgument()
2756 __ add(FieldOperand(ebx, AllocationSite::kTransitionInfoOffset), in CreateArrayDispatchOneArgument()
2759 __ bind(&normal_sequence); in CreateArrayDispatchOneArgument()
2765 __ cmp(edx, kind); in CreateArrayDispatchOneArgument()
2766 __ j(not_equal, &next); in CreateArrayDispatchOneArgument()
2768 __ TailCallStub(&stub); in CreateArrayDispatchOneArgument()
2769 __ bind(&next); in CreateArrayDispatchOneArgument()
2773 __ Abort(kUnexpectedElementsKindInArrayConstructor); in CreateArrayDispatchOneArgument()
2815 __ test(eax, eax); in GenerateDispatchToArrayStub()
2816 __ j(not_zero, &not_zero_case); in GenerateDispatchToArrayStub()
2819 __ bind(&not_zero_case); in GenerateDispatchToArrayStub()
2820 __ cmp(eax, 1); in GenerateDispatchToArrayStub()
2821 __ j(greater, &not_one_case); in GenerateDispatchToArrayStub()
2824 __ bind(&not_one_case); in GenerateDispatchToArrayStub()
2826 __ TailCallStub(&stub); in GenerateDispatchToArrayStub()
2843 __ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset)); in Generate()
2845 __ test(ecx, Immediate(kSmiTagMask)); in Generate()
2846 __ Assert(not_zero, kUnexpectedInitialMapForArrayFunction); in Generate()
2847 __ CmpObjectType(ecx, MAP_TYPE, ecx); in Generate()
2848 __ Assert(equal, kUnexpectedInitialMapForArrayFunction); in Generate()
2851 __ AssertUndefinedOrAllocationSite(ebx); in Generate()
2857 __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset)); in Generate()
2859 __ cmp(edx, edi); in Generate()
2860 __ j(not_equal, &subclassing); in Generate()
2865 __ cmp(ebx, isolate()->factory()->undefined_value()); in Generate()
2866 __ j(equal, &no_info); in Generate()
2869 __ mov(edx, FieldOperand(ebx, AllocationSite::kTransitionInfoOffset)); in Generate()
2870 __ SmiUntag(edx); in Generate()
2872 __ and_(edx, Immediate(AllocationSite::ElementsKindBits::kMask)); in Generate()
2875 __ bind(&no_info); in Generate()
2879 __ bind(&subclassing); in Generate()
2880 __ mov(Operand(esp, eax, times_pointer_size, kPointerSize), edi); in Generate()
2881 __ add(eax, Immediate(3)); in Generate()
2882 __ PopReturnAddressTo(ecx); in Generate()
2883 __ Push(edx); in Generate()
2884 __ Push(ebx); in Generate()
2885 __ PushReturnAddressFrom(ecx); in Generate()
2886 __ JumpToExternalReference(ExternalReference(Runtime::kNewArray, isolate())); in Generate()
2894 __ test(eax, eax); in GenerateCase()
2895 __ j(not_zero, &not_zero_case); in GenerateCase()
2897 __ TailCallStub(&stub0); in GenerateCase()
2899 __ bind(&not_zero_case); in GenerateCase()
2900 __ cmp(eax, 1); in GenerateCase()
2901 __ j(greater, &not_one_case); in GenerateCase()
2906 __ mov(ecx, Operand(esp, kPointerSize)); in GenerateCase()
2907 __ test(ecx, ecx); in GenerateCase()
2908 __ j(zero, &normal_sequence); in GenerateCase()
2912 __ TailCallStub(&stub1_holey); in GenerateCase()
2915 __ bind(&normal_sequence); in GenerateCase()
2917 __ TailCallStub(&stub1); in GenerateCase()
2919 __ bind(&not_one_case); in GenerateCase()
2921 __ TailCallStub(&stubN); in GenerateCase()
2937 __ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset)); in Generate()
2939 __ test(ecx, Immediate(kSmiTagMask)); in Generate()
2940 __ Assert(not_zero, kUnexpectedInitialMapForArrayFunction); in Generate()
2941 __ CmpObjectType(ecx, MAP_TYPE, ecx); in Generate()
2942 __ Assert(equal, kUnexpectedInitialMapForArrayFunction); in Generate()
2946 __ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset)); in Generate()
2950 __ mov(ecx, FieldOperand(ecx, Map::kBitField2Offset)); in Generate()
2952 __ DecodeField<Map::ElementsKindBits>(ecx); in Generate()
2956 __ cmp(ecx, Immediate(FAST_ELEMENTS)); in Generate()
2957 __ j(equal, &done); in Generate()
2958 __ cmp(ecx, Immediate(FAST_HOLEY_ELEMENTS)); in Generate()
2959 __ Assert(equal, kInvalidElementsKindForInternalArrayOrInternalPackedArray); in Generate()
2960 __ bind(&done); in Generate()
2964 __ cmp(ecx, Immediate(FAST_ELEMENTS)); in Generate()
2965 __ j(equal, &fast_elements_case); in Generate()
2968 __ bind(&fast_elements_case); in Generate()
2979 __ AssertFunction(edi); in Generate()
2982 __ mov(edx, ebp); in Generate()
2986 __ mov(edx, Operand(edx, StandardFrameConstants::kCallerFPOffset)); in Generate()
2990 __ cmp(edi, Operand(edx, StandardFrameConstants::kFunctionOffset)); in Generate()
2991 __ j(equal, &ok); in Generate()
2992 __ Abort(kInvalidFrameForFastNewRestArgumentsStub); in Generate()
2993 __ bind(&ok); in Generate()
2999 __ mov(ebx, Operand(edx, StandardFrameConstants::kCallerFPOffset)); in Generate()
3000 __ cmp(Operand(ebx, CommonFrameConstants::kContextOrFrameTypeOffset), in Generate()
3002 __ j(not_equal, &no_rest_parameters, Label::kNear); in Generate()
3007 __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset)); in Generate()
3008 __ mov(eax, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset)); in Generate()
3009 __ sub(eax, in Generate()
3011 __ j(greater, &rest_parameters); in Generate()
3014 __ bind(&no_rest_parameters); in Generate()
3023 __ Allocate(JSArray::kSize, eax, edx, ecx, &allocate, NO_ALLOCATION_FLAGS); in Generate()
3024 __ bind(&done_allocate); in Generate()
3027 __ LoadGlobalFunction(Context::JS_ARRAY_FAST_ELEMENTS_MAP_INDEX, ecx); in Generate()
3028 __ mov(FieldOperand(eax, JSArray::kMapOffset), ecx); in Generate()
3029 __ mov(ecx, isolate()->factory()->empty_fixed_array()); in Generate()
3030 __ mov(FieldOperand(eax, JSArray::kPropertiesOffset), ecx); in Generate()
3031 __ mov(FieldOperand(eax, JSArray::kElementsOffset), ecx); in Generate()
3032 __ mov(FieldOperand(eax, JSArray::kLengthOffset), Immediate(Smi::kZero)); in Generate()
3034 __ Ret(); in Generate()
3037 __ bind(&allocate); in Generate()
3040 __ Push(Smi::FromInt(JSArray::kSize)); in Generate()
3041 __ CallRuntime(Runtime::kAllocateInNewSpace); in Generate()
3043 __ jmp(&done_allocate); in Generate()
3046 __ bind(&rest_parameters); in Generate()
3049 __ lea(ebx, in Generate()
3062 __ lea(ecx, Operand(eax, times_half_pointer_size, in Generate()
3064 __ Allocate(ecx, edx, edi, no_reg, &allocate, NO_ALLOCATION_FLAGS); in Generate()
3065 __ bind(&done_allocate); in Generate()
3068 __ mov(FieldOperand(edx, FixedArray::kMapOffset), in Generate()
3070 __ mov(FieldOperand(edx, FixedArray::kLengthOffset), eax); in Generate()
3073 __ Move(ecx, Smi::kZero); in Generate()
3074 __ bind(&loop); in Generate()
3075 __ cmp(ecx, eax); in Generate()
3076 __ j(equal, &done_loop, Label::kNear); in Generate()
3077 __ mov(edi, Operand(ebx, 0 * kPointerSize)); in Generate()
3078 __ mov(FieldOperand(edx, ecx, times_half_pointer_size, in Generate()
3081 __ sub(ebx, Immediate(1 * kPointerSize)); in Generate()
3082 __ add(ecx, Immediate(Smi::FromInt(1))); in Generate()
3083 __ jmp(&loop); in Generate()
3084 __ bind(&done_loop); in Generate()
3088 __ lea(edi, in Generate()
3090 __ LoadGlobalFunction(Context::JS_ARRAY_FAST_ELEMENTS_MAP_INDEX, ecx); in Generate()
3091 __ mov(FieldOperand(edi, JSArray::kMapOffset), ecx); in Generate()
3092 __ mov(FieldOperand(edi, JSArray::kPropertiesOffset), in Generate()
3094 __ mov(FieldOperand(edi, JSArray::kElementsOffset), edx); in Generate()
3095 __ mov(FieldOperand(edi, JSArray::kLengthOffset), eax); in Generate()
3097 __ mov(eax, edi); in Generate()
3098 __ Ret(); in Generate()
3102 __ bind(&allocate); in Generate()
3103 __ cmp(ecx, Immediate(kMaxRegularHeapObjectSize)); in Generate()
3104 __ j(greater, &too_big_for_new_space); in Generate()
3107 __ SmiTag(ecx); in Generate()
3108 __ Push(eax); in Generate()
3109 __ Push(ebx); in Generate()
3110 __ Push(ecx); in Generate()
3111 __ CallRuntime(Runtime::kAllocateInNewSpace); in Generate()
3112 __ mov(edx, eax); in Generate()
3113 __ Pop(ebx); in Generate()
3114 __ Pop(eax); in Generate()
3116 __ jmp(&done_allocate); in Generate()
3119 __ bind(&too_big_for_new_space); in Generate()
3120 __ PopReturnAddressTo(ecx); in Generate()
3126 __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset)); in Generate()
3127 __ Push(Operand(edx, StandardFrameConstants::kFunctionOffset)); in Generate()
3129 __ Push(Operand(ebp, StandardFrameConstants::kFunctionOffset)); in Generate()
3131 __ PushReturnAddressFrom(ecx); in Generate()
3132 __ TailCallRuntime(Runtime::kNewRestParameter); in Generate()
3143 __ AssertFunction(edi); in Generate()
3146 __ mov(ecx, ebp); in Generate()
3150 __ mov(ecx, Operand(ecx, StandardFrameConstants::kCallerFPOffset)); in Generate()
3154 __ cmp(edi, Operand(ecx, StandardFrameConstants::kFunctionOffset)); in Generate()
3155 __ j(equal, &ok); in Generate()
3156 __ Abort(kInvalidFrameForFastNewSloppyArgumentsStub); in Generate()
3157 __ bind(&ok); in Generate()
3161 __ mov(ebx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset)); in Generate()
3162 __ mov(ebx, in Generate()
3164 __ lea(edx, Operand(ecx, ebx, times_half_pointer_size, in Generate()
3175 __ mov(eax, Operand(ecx, StandardFrameConstants::kCallerFPOffset)); in Generate()
3176 __ mov(eax, Operand(eax, CommonFrameConstants::kContextOrFrameTypeOffset)); in Generate()
3177 __ cmp(eax, Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); in Generate()
3178 __ j(equal, &adaptor_frame, Label::kNear); in Generate()
3181 __ mov(ecx, ebx); in Generate()
3182 __ push(ebx); in Generate()
3183 __ jmp(&try_allocate, Label::kNear); in Generate()
3186 __ bind(&adaptor_frame); in Generate()
3187 __ push(ebx); in Generate()
3188 __ mov(edx, Operand(ecx, StandardFrameConstants::kCallerFPOffset)); in Generate()
3189 __ mov(ecx, Operand(edx, ArgumentsAdaptorFrameConstants::kLengthOffset)); in Generate()
3190 __ lea(edx, in Generate()
3196 __ cmp(ebx, ecx); in Generate()
3197 __ j(less_equal, &try_allocate, Label::kNear); in Generate()
3198 __ mov(ebx, ecx); in Generate()
3201 __ bind(&try_allocate); in Generate()
3202 __ push(edi); in Generate()
3203 __ push(ebx); in Generate()
3210 __ test(ebx, ebx); in Generate()
3211 __ j(zero, &no_parameter_map, Label::kNear); in Generate()
3212 __ lea(ebx, Operand(ebx, times_2, kParameterMapHeaderSize)); in Generate()
3213 __ bind(&no_parameter_map); in Generate()
3216 __ lea(ebx, Operand(ebx, ecx, times_2, FixedArray::kHeaderSize)); in Generate()
3219 __ add(ebx, Immediate(JSSloppyArgumentsObject::kSize)); in Generate()
3222 __ Allocate(ebx, eax, edi, no_reg, &runtime, NO_ALLOCATION_FLAGS); in Generate()
3231 __ mov(edi, NativeContextOperand()); in Generate()
3232 __ mov(ebx, Operand(esp, 0 * kPointerSize)); in Generate()
3233 __ test(ebx, ebx); in Generate()
3234 __ j(not_zero, &has_mapped_parameters, Label::kNear); in Generate()
3235 __ mov( in Generate()
3238 __ jmp(&instantiate, Label::kNear); in Generate()
3240 __ bind(&has_mapped_parameters); in Generate()
3241 __ mov(edi, Operand(edi, Context::SlotOffset( in Generate()
3243 __ bind(&instantiate); in Generate()
3253 __ mov(FieldOperand(eax, JSObject::kMapOffset), edi); in Generate()
3254 __ mov(FieldOperand(eax, JSObject::kPropertiesOffset), in Generate()
3256 __ mov(FieldOperand(eax, JSObject::kElementsOffset), in Generate()
3261 __ mov(edi, Operand(esp, 1 * kPointerSize)); in Generate()
3262 __ AssertNotSmi(edi); in Generate()
3263 __ mov(FieldOperand(eax, JSSloppyArgumentsObject::kCalleeOffset), edi); in Generate()
3266 __ AssertSmi(ecx); in Generate()
3267 __ mov(FieldOperand(eax, JSSloppyArgumentsObject::kLengthOffset), ecx); in Generate()
3272 __ lea(edi, Operand(eax, JSSloppyArgumentsObject::kSize)); in Generate()
3273 __ mov(FieldOperand(eax, JSObject::kElementsOffset), edi); in Generate()
3284 __ push(edx); in Generate()
3285 __ push(eax); in Generate()
3289 __ test(ebx, ebx); in Generate()
3290 __ j(zero, &skip_parameter_map); in Generate()
3292 __ mov(FieldOperand(edi, FixedArray::kMapOffset), in Generate()
3294 __ lea(eax, Operand(ebx, reinterpret_cast<intptr_t>(Smi::FromInt(2)))); in Generate()
3295 __ mov(FieldOperand(edi, FixedArray::kLengthOffset), eax); in Generate()
3296 __ mov(FieldOperand(edi, FixedArray::kHeaderSize + 0 * kPointerSize), esi); in Generate()
3297 __ lea(eax, Operand(edi, ebx, times_2, kParameterMapHeaderSize)); in Generate()
3298 __ mov(FieldOperand(edi, FixedArray::kHeaderSize + 1 * kPointerSize), eax); in Generate()
3309 __ push(ecx); in Generate()
3310 __ mov(eax, Operand(esp, 3 * kPointerSize)); in Generate()
3311 __ mov(ebx, Immediate(Smi::FromInt(Context::MIN_CONTEXT_SLOTS))); in Generate()
3312 __ add(ebx, Operand(esp, 5 * kPointerSize)); in Generate()
3313 __ sub(ebx, eax); in Generate()
3314 __ mov(ecx, isolate()->factory()->the_hole_value()); in Generate()
3315 __ mov(edx, edi); in Generate()
3316 __ lea(edi, Operand(edi, eax, times_2, kParameterMapHeaderSize)); in Generate()
3328 __ jmp(&parameters_test, Label::kNear); in Generate()
3330 __ bind(&parameters_loop); in Generate()
3331 __ sub(eax, Immediate(Smi::FromInt(1))); in Generate()
3332 __ mov(FieldOperand(edx, eax, times_2, kParameterMapHeaderSize), ebx); in Generate()
3333 __ mov(FieldOperand(edi, eax, times_2, FixedArray::kHeaderSize), ecx); in Generate()
3334 __ add(ebx, Immediate(Smi::FromInt(1))); in Generate()
3335 __ bind(&parameters_test); in Generate()
3336 __ test(eax, eax); in Generate()
3337 __ j(not_zero, &parameters_loop, Label::kNear); in Generate()
3338 __ pop(ecx); in Generate()
3340 __ bind(&skip_parameter_map); in Generate()
3350 __ mov(FieldOperand(edi, FixedArray::kMapOffset), in Generate()
3352 __ mov(FieldOperand(edi, FixedArray::kLengthOffset), ecx); in Generate()
3355 __ mov(ebx, Operand(esp, 2 * kPointerSize)); in Generate()
3356 __ mov(edx, Operand(esp, 1 * kPointerSize)); in Generate()
3357 __ sub(edx, ebx); // Is there a smarter way to do negative scaling? in Generate()
3358 __ sub(edx, ebx); in Generate()
3359 __ jmp(&arguments_test, Label::kNear); in Generate()
3361 __ bind(&arguments_loop); in Generate()
3362 __ sub(edx, Immediate(kPointerSize)); in Generate()
3363 __ mov(eax, Operand(edx, 0)); in Generate()
3364 __ mov(FieldOperand(edi, ebx, times_2, FixedArray::kHeaderSize), eax); in Generate()
3365 __ add(ebx, Immediate(Smi::FromInt(1))); in Generate()
3367 __ bind(&arguments_test); in Generate()
3368 __ cmp(ebx, ecx); in Generate()
3369 __ j(less, &arguments_loop, Label::kNear); in Generate()
3372 __ pop(eax); // Address of arguments object. in Generate()
3373 __ Drop(4); in Generate()
3376 __ ret(0); in Generate()
3379 __ bind(&runtime); in Generate()
3380 __ pop(eax); // Remove saved mapped parameter count. in Generate()
3381 __ pop(edi); // Pop saved function. in Generate()
3382 __ pop(eax); // Remove saved parameter count. in Generate()
3383 __ pop(eax); // Pop return address. in Generate()
3384 __ push(edi); // Push function. in Generate()
3385 __ push(edx); // Push parameters pointer. in Generate()
3386 __ push(ecx); // Push parameter count. in Generate()
3387 __ push(eax); // Push return address. in Generate()
3388 __ TailCallRuntime(Runtime::kNewSloppyArguments); in Generate()
3398 __ AssertFunction(edi); in Generate()
3401 __ mov(edx, ebp); in Generate()
3405 __ mov(edx, Operand(edx, StandardFrameConstants::kCallerFPOffset)); in Generate()
3409 __ cmp(edi, Operand(edx, StandardFrameConstants::kFunctionOffset)); in Generate()
3410 __ j(equal, &ok); in Generate()
3411 __ Abort(kInvalidFrameForFastNewStrictArgumentsStub); in Generate()
3412 __ bind(&ok); in Generate()
3417 __ mov(ebx, Operand(edx, StandardFrameConstants::kCallerFPOffset)); in Generate()
3418 __ cmp(Operand(ebx, CommonFrameConstants::kContextOrFrameTypeOffset), in Generate()
3420 __ j(equal, &arguments_adaptor, Label::kNear); in Generate()
3422 __ mov(eax, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset)); in Generate()
3423 __ mov(eax, in Generate()
3425 __ lea(ebx, in Generate()
3429 __ jmp(&arguments_done, Label::kNear); in Generate()
3430 __ bind(&arguments_adaptor); in Generate()
3432 __ mov(eax, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset)); in Generate()
3433 __ lea(ebx, in Generate()
3437 __ bind(&arguments_done); in Generate()
3448 __ lea(ecx, in Generate()
3451 __ Allocate(ecx, edx, edi, no_reg, &allocate, NO_ALLOCATION_FLAGS); in Generate()
3452 __ bind(&done_allocate); in Generate()
3455 __ mov(FieldOperand(edx, FixedArray::kMapOffset), in Generate()
3457 __ mov(FieldOperand(edx, FixedArray::kLengthOffset), eax); in Generate()
3460 __ Move(ecx, Smi::kZero); in Generate()
3461 __ bind(&loop); in Generate()
3462 __ cmp(ecx, eax); in Generate()
3463 __ j(equal, &done_loop, Label::kNear); in Generate()
3464 __ mov(edi, Operand(ebx, 0 * kPointerSize)); in Generate()
3465 __ mov(FieldOperand(edx, ecx, times_half_pointer_size, in Generate()
3468 __ sub(ebx, Immediate(1 * kPointerSize)); in Generate()
3469 __ add(ecx, Immediate(Smi::FromInt(1))); in Generate()
3470 __ jmp(&loop); in Generate()
3471 __ bind(&done_loop); in Generate()
3475 __ lea(edi, in Generate()
3477 __ LoadGlobalFunction(Context::STRICT_ARGUMENTS_MAP_INDEX, ecx); in Generate()
3478 __ mov(FieldOperand(edi, JSStrictArgumentsObject::kMapOffset), ecx); in Generate()
3479 __ mov(FieldOperand(edi, JSStrictArgumentsObject::kPropertiesOffset), in Generate()
3481 __ mov(FieldOperand(edi, JSStrictArgumentsObject::kElementsOffset), edx); in Generate()
3482 __ mov(FieldOperand(edi, JSStrictArgumentsObject::kLengthOffset), eax); in Generate()
3484 __ mov(eax, edi); in Generate()
3485 __ Ret(); in Generate()
3489 __ bind(&allocate); in Generate()
3490 __ cmp(ecx, Immediate(kMaxRegularHeapObjectSize)); in Generate()
3491 __ j(greater, &too_big_for_new_space); in Generate()
3494 __ SmiTag(ecx); in Generate()
3495 __ Push(eax); in Generate()
3496 __ Push(ebx); in Generate()
3497 __ Push(ecx); in Generate()
3498 __ CallRuntime(Runtime::kAllocateInNewSpace); in Generate()
3499 __ mov(edx, eax); in Generate()
3500 __ Pop(ebx); in Generate()
3501 __ Pop(eax); in Generate()
3503 __ jmp(&done_allocate); in Generate()
3506 __ bind(&too_big_for_new_space); in Generate()
3507 __ PopReturnAddressTo(ecx); in Generate()
3513 __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset)); in Generate()
3514 __ Push(Operand(edx, StandardFrameConstants::kFunctionOffset)); in Generate()
3516 __ Push(Operand(ebp, StandardFrameConstants::kFunctionOffset)); in Generate()
3518 __ PushReturnAddressFrom(ecx); in Generate()
3519 __ TailCallRuntime(Runtime::kNewStrictArguments); in Generate()
3534 __ EnterApiExitFrame(argc); in PrepareCallApiFunction()
3535 if (__ emit_debug_code()) { in PrepareCallApiFunction()
3536 __ mov(esi, Immediate(bit_cast<int32_t>(kZapValue))); in PrepareCallApiFunction()
3563 __ mov(ebx, Operand::StaticVariable(next_address)); in CallApiFunctionAndReturn()
3564 __ mov(edi, Operand::StaticVariable(limit_address)); in CallApiFunctionAndReturn()
3565 __ add(Operand::StaticVariable(level_address), Immediate(1)); in CallApiFunctionAndReturn()
3569 __ PushSafepointRegisters(); in CallApiFunctionAndReturn()
3570 __ PrepareCallCFunction(1, eax); in CallApiFunctionAndReturn()
3571 __ mov(Operand(esp, 0), in CallApiFunctionAndReturn()
3573 __ CallCFunction(ExternalReference::log_enter_external_function(isolate), in CallApiFunctionAndReturn()
3575 __ PopSafepointRegisters(); in CallApiFunctionAndReturn()
3581 __ mov(eax, Immediate(ExternalReference::is_profiling_address(isolate))); in CallApiFunctionAndReturn()
3582 __ cmpb(Operand(eax, 0), Immediate(0)); in CallApiFunctionAndReturn()
3583 __ j(zero, &profiler_disabled); in CallApiFunctionAndReturn()
3586 __ mov(thunk_last_arg, function_address); in CallApiFunctionAndReturn()
3588 __ mov(eax, Immediate(thunk_ref)); in CallApiFunctionAndReturn()
3589 __ call(eax); in CallApiFunctionAndReturn()
3590 __ jmp(&end_profiler_check); in CallApiFunctionAndReturn()
3592 __ bind(&profiler_disabled); in CallApiFunctionAndReturn()
3594 __ call(function_address); in CallApiFunctionAndReturn()
3595 __ bind(&end_profiler_check); in CallApiFunctionAndReturn()
3599 __ PushSafepointRegisters(); in CallApiFunctionAndReturn()
3600 __ PrepareCallCFunction(1, eax); in CallApiFunctionAndReturn()
3601 __ mov(Operand(esp, 0), in CallApiFunctionAndReturn()
3603 __ CallCFunction(ExternalReference::log_leave_external_function(isolate), in CallApiFunctionAndReturn()
3605 __ PopSafepointRegisters(); in CallApiFunctionAndReturn()
3610 __ mov(eax, return_value_operand); in CallApiFunctionAndReturn()
3616 __ bind(&prologue); in CallApiFunctionAndReturn()
3619 __ mov(Operand::StaticVariable(next_address), ebx); in CallApiFunctionAndReturn()
3620 __ sub(Operand::StaticVariable(level_address), Immediate(1)); in CallApiFunctionAndReturn()
3621 __ Assert(above_equal, kInvalidHandleScopeLevel); in CallApiFunctionAndReturn()
3622 __ cmp(edi, Operand::StaticVariable(limit_address)); in CallApiFunctionAndReturn()
3623 __ j(not_equal, &delete_allocated_handles); in CallApiFunctionAndReturn()
3626 __ bind(&leave_exit_frame); in CallApiFunctionAndReturn()
3629 __ mov(esi, *context_restore_operand); in CallApiFunctionAndReturn()
3632 __ mov(ebx, *stack_space_operand); in CallApiFunctionAndReturn()
3634 __ LeaveApiExitFrame(!restore_context); in CallApiFunctionAndReturn()
3639 __ cmp(Operand::StaticVariable(scheduled_exception_address), in CallApiFunctionAndReturn()
3641 __ j(not_equal, &promote_scheduled_exception); in CallApiFunctionAndReturn()
3649 __ JumpIfSmi(return_value, &ok, Label::kNear); in CallApiFunctionAndReturn()
3650 __ mov(map, FieldOperand(return_value, HeapObject::kMapOffset)); in CallApiFunctionAndReturn()
3652 __ CmpInstanceType(map, LAST_NAME_TYPE); in CallApiFunctionAndReturn()
3653 __ j(below_equal, &ok, Label::kNear); in CallApiFunctionAndReturn()
3655 __ CmpInstanceType(map, FIRST_JS_RECEIVER_TYPE); in CallApiFunctionAndReturn()
3656 __ j(above_equal, &ok, Label::kNear); in CallApiFunctionAndReturn()
3658 __ cmp(map, isolate->factory()->heap_number_map()); in CallApiFunctionAndReturn()
3659 __ j(equal, &ok, Label::kNear); in CallApiFunctionAndReturn()
3661 __ cmp(return_value, isolate->factory()->undefined_value()); in CallApiFunctionAndReturn()
3662 __ j(equal, &ok, Label::kNear); in CallApiFunctionAndReturn()
3664 __ cmp(return_value, isolate->factory()->true_value()); in CallApiFunctionAndReturn()
3665 __ j(equal, &ok, Label::kNear); in CallApiFunctionAndReturn()
3667 __ cmp(return_value, isolate->factory()->false_value()); in CallApiFunctionAndReturn()
3668 __ j(equal, &ok, Label::kNear); in CallApiFunctionAndReturn()
3670 __ cmp(return_value, isolate->factory()->null_value()); in CallApiFunctionAndReturn()
3671 __ j(equal, &ok, Label::kNear); in CallApiFunctionAndReturn()
3673 __ Abort(kAPICallReturnedInvalidObject); in CallApiFunctionAndReturn()
3675 __ bind(&ok); in CallApiFunctionAndReturn()
3680 __ pop(ecx); in CallApiFunctionAndReturn()
3681 __ add(esp, ebx); in CallApiFunctionAndReturn()
3682 __ jmp(ecx); in CallApiFunctionAndReturn()
3684 __ ret(stack_space * kPointerSize); in CallApiFunctionAndReturn()
3688 __ bind(&promote_scheduled_exception); in CallApiFunctionAndReturn()
3689 __ TailCallRuntime(Runtime::kPromoteScheduledException); in CallApiFunctionAndReturn()
3694 __ bind(&delete_allocated_handles); in CallApiFunctionAndReturn()
3695 __ mov(Operand::StaticVariable(limit_address), edi); in CallApiFunctionAndReturn()
3696 __ mov(edi, eax); in CallApiFunctionAndReturn()
3697 __ mov(Operand(esp, 0), in CallApiFunctionAndReturn()
3699 __ mov(eax, Immediate(delete_extensions)); in CallApiFunctionAndReturn()
3700 __ call(eax); in CallApiFunctionAndReturn()
3701 __ mov(eax, edi); in CallApiFunctionAndReturn()
3702 __ jmp(&leave_exit_frame); in CallApiFunctionAndReturn()
3739 __ pop(return_address); in Generate()
3742 __ PushRoot(Heap::kUndefinedValueRootIndex); in Generate()
3745 __ push(context); in Generate()
3748 __ push(callee); in Generate()
3751 __ push(call_data); in Generate()
3756 __ push(Immediate(masm->isolate()->factory()->undefined_value())); in Generate()
3758 __ push(Immediate(masm->isolate()->factory()->undefined_value())); in Generate()
3761 __ push(scratch); in Generate()
3763 __ push(scratch); in Generate()
3766 __ push(Immediate(reinterpret_cast<int>(masm->isolate()))); in Generate()
3768 __ push(holder); in Generate()
3770 __ mov(scratch, esp); in Generate()
3773 __ push(return_address); in Generate()
3777 __ mov(context, FieldOperand(callee, JSFunction::kContextOffset)); in Generate()
3793 __ mov(ApiParameterOperand(2), scratch); in Generate()
3794 __ add(scratch, Immediate((argc() + FCA::kArgsLength - 1) * kPointerSize)); in Generate()
3796 __ mov(ApiParameterOperand(3), scratch); in Generate()
3798 __ Move(ApiParameterOperand(4), Immediate(argc())); in Generate()
3801 __ lea(scratch, ApiParameterOperand(2)); in Generate()
3802 __ mov(ApiParameterOperand(0), scratch); in Generate()
3847 __ pop(scratch); // Pop return address to extend the frame. in Generate()
3848 __ push(receiver); in Generate()
3849 __ push(FieldOperand(callback, AccessorInfo::kDataOffset)); in Generate()
3850 __ PushRoot(Heap::kUndefinedValueRootIndex); // ReturnValue in Generate()
3852 __ PushRoot(Heap::kUndefinedValueRootIndex); in Generate()
3853 __ push(Immediate(ExternalReference::isolate_address(isolate()))); in Generate()
3854 __ push(holder); in Generate()
3855 __ push(Immediate(Smi::kZero)); // should_throw_on_error -> false in Generate()
3856 __ push(FieldOperand(callback, AccessorInfo::kNameOffset)); in Generate()
3857 __ push(scratch); // Restore return address. in Generate()
3868 __ lea(scratch, Operand(esp, 2 * kPointerSize)); in Generate()
3874 __ mov(info_object, scratch); in Generate()
3877 __ sub(scratch, Immediate(kPointerSize)); in Generate()
3878 __ mov(ApiParameterOperand(0), scratch); in Generate()
3880 __ lea(scratch, info_object); in Generate()
3881 __ mov(ApiParameterOperand(1), scratch); in Generate()
3888 __ mov(scratch, FieldOperand(callback, AccessorInfo::kJsGetterOffset)); in Generate()
3890 __ mov(function_address, in Generate()
3900 #undef __