• Home
  • Raw
  • Download

Lines Matching full:__

25 #define __ ACCESS_MASM(masm)  macro
28 __ pop(ecx); in Generate()
29 __ mov(MemOperand(esp, eax, times_4, 0), edi); in Generate()
30 __ push(edi); in Generate()
31 __ push(ebx); in Generate()
32 __ push(ecx); in Generate()
33 __ add(eax, Immediate(3)); in Generate()
34 __ TailCallRuntime(Runtime::kNewArray); in Generate()
51 __ push(descriptor.GetRegisterParameter(i)); in GenerateLightweightMiss()
53 __ CallExternalReference(miss, param_count); in GenerateLightweightMiss()
56 __ ret(0); in GenerateLightweightMiss()
64 __ pushad(); in Generate()
66 __ sub(esp, Immediate(kDoubleSize * XMMRegister::kMaxNumRegisters)); in Generate()
69 __ movsd(Operand(esp, i * kDoubleSize), reg); in Generate()
75 __ PrepareCallCFunction(argument_count, ecx); in Generate()
76 __ mov(Operand(esp, 0 * kPointerSize), in Generate()
78 __ CallCFunction( in Generate()
84 __ movsd(reg, Operand(esp, i * kDoubleSize)); in Generate()
86 __ add(esp, Immediate(kDoubleSize * XMMRegister::kMaxNumRegisters)); in Generate()
88 __ popad(); in Generate()
89 __ ret(0); in Generate()
152 __ push(scratch1); in Generate()
153 __ push(save_reg); in Generate()
156 __ mov(scratch1, mantissa_operand); in Generate()
160 __ fld_d(mantissa_operand); in Generate()
162 __ mov(ecx, exponent_operand); in Generate()
163 if (stash_exponent_copy) __ push(ecx); in Generate()
165 __ and_(ecx, HeapNumber::kExponentMask); in Generate()
166 __ shr(ecx, HeapNumber::kExponentShift); in Generate()
167 __ lea(result_reg, MemOperand(ecx, -HeapNumber::kExponentBias)); in Generate()
168 __ cmp(result_reg, Immediate(HeapNumber::kMantissaBits)); in Generate()
169 __ j(below, &process_64_bits); in Generate()
174 __ fstp(0); in Generate()
176 __ sub(ecx, Immediate(delta)); in Generate()
177 __ xor_(result_reg, result_reg); in Generate()
178 __ cmp(ecx, Immediate(31)); in Generate()
179 __ j(above, &done); in Generate()
180 __ shl_cl(scratch1); in Generate()
181 __ jmp(&check_negative); in Generate()
183 __ bind(&process_64_bits); in Generate()
189 __ sub(esp, Immediate(kDoubleSize / 2)); in Generate()
192 __ sub(esp, Immediate(kDoubleSize)); // Nolint. in Generate()
195 __ fisttp_d(Operand(esp, 0)); in Generate()
196 __ mov(result_reg, Operand(esp, 0)); // Load low word of answer as result in Generate()
197 __ add(esp, Immediate(kDoubleSize)); in Generate()
198 __ jmp(&done_no_stash); in Generate()
201 __ sub(ecx, Immediate(delta)); in Generate()
202 __ neg(ecx); in Generate()
204 __ mov(result_reg, MemOperand(esp, 0)); in Generate()
206 __ mov(result_reg, exponent_operand); in Generate()
208 __ and_(result_reg, in Generate()
210 __ add(result_reg, in Generate()
212 __ shrd_cl(scratch1, result_reg); in Generate()
213 __ shr_cl(result_reg); in Generate()
214 __ test(ecx, Immediate(32)); in Generate()
215 __ cmov(not_equal, scratch1, result_reg); in Generate()
219 __ bind(&check_negative); in Generate()
220 __ mov(result_reg, scratch1); in Generate()
221 __ neg(result_reg); in Generate()
223 __ cmp(MemOperand(esp, 0), Immediate(0)); in Generate()
225 __ cmp(exponent_operand, Immediate(0)); in Generate()
227 __ cmov(greater, result_reg, scratch1); in Generate()
230 __ bind(&done); in Generate()
232 __ add(esp, Immediate(kDoubleSize / 2)); in Generate()
234 __ bind(&done_no_stash); in Generate()
237 __ mov(final_result_reg, result_reg); in Generate()
239 __ pop(save_reg); in Generate()
240 __ pop(scratch1); in Generate()
241 __ ret(0); in Generate()
249 __ JumpIfSmi(number, &load_smi, Label::kNear); in LoadFloatOperand()
250 __ fld_d(FieldOperand(number, HeapNumber::kValueOffset)); in LoadFloatOperand()
251 __ jmp(&done, Label::kNear); in LoadFloatOperand()
253 __ bind(&load_smi); in LoadFloatOperand()
254 __ SmiUntag(number); in LoadFloatOperand()
255 __ push(number); in LoadFloatOperand()
256 __ fild_s(Operand(esp, 0)); in LoadFloatOperand()
257 __ pop(number); in LoadFloatOperand()
259 __ bind(&done); in LoadFloatOperand()
267 __ JumpIfSmi(edx, &load_smi_edx, Label::kNear); in LoadSSE2Operands()
269 __ cmp(FieldOperand(edx, HeapObject::kMapOffset), factory->heap_number_map()); in LoadSSE2Operands()
270 __ j(not_equal, not_numbers); // Argument in edx is not a number. in LoadSSE2Operands()
271 __ movsd(xmm0, FieldOperand(edx, HeapNumber::kValueOffset)); in LoadSSE2Operands()
272 __ bind(&load_eax); in LoadSSE2Operands()
274 __ JumpIfSmi(eax, &load_smi_eax, Label::kNear); in LoadSSE2Operands()
275 __ cmp(FieldOperand(eax, HeapObject::kMapOffset), factory->heap_number_map()); in LoadSSE2Operands()
276 __ j(equal, &load_float_eax, Label::kNear); in LoadSSE2Operands()
277 __ jmp(not_numbers); // Argument in eax is not a number. in LoadSSE2Operands()
278 __ bind(&load_smi_edx); in LoadSSE2Operands()
279 __ SmiUntag(edx); // Untag smi before converting to float. in LoadSSE2Operands()
280 __ Cvtsi2sd(xmm0, edx); in LoadSSE2Operands()
281 __ SmiTag(edx); // Retag smi for heap number overwriting test. in LoadSSE2Operands()
282 __ jmp(&load_eax); in LoadSSE2Operands()
283 __ bind(&load_smi_eax); in LoadSSE2Operands()
284 __ SmiUntag(eax); // Untag smi before converting to float. in LoadSSE2Operands()
285 __ Cvtsi2sd(xmm1, eax); in LoadSSE2Operands()
286 __ SmiTag(eax); // Retag smi for heap number overwriting test. in LoadSSE2Operands()
287 __ jmp(&done, Label::kNear); in LoadSSE2Operands()
288 __ bind(&load_float_eax); in LoadSSE2Operands()
289 __ movsd(xmm1, FieldOperand(eax, HeapNumber::kValueOffset)); in LoadSSE2Operands()
290 __ bind(&done); in LoadSSE2Operands()
300 __ JumpIfSmi(edx, &test_other, Label::kNear); in CheckFloatOperands()
301 __ mov(scratch, FieldOperand(edx, HeapObject::kMapOffset)); in CheckFloatOperands()
303 __ cmp(scratch, factory->heap_number_map()); in CheckFloatOperands()
304 __ j(not_equal, non_float); // argument in edx is not a number -> NaN in CheckFloatOperands()
306 __ bind(&test_other); in CheckFloatOperands()
307 __ JumpIfSmi(eax, &done, Label::kNear); in CheckFloatOperands()
308 __ mov(scratch, FieldOperand(eax, HeapObject::kMapOffset)); in CheckFloatOperands()
309 __ cmp(scratch, factory->heap_number_map()); in CheckFloatOperands()
310 __ j(not_equal, non_float); // argument in eax is not a number -> NaN in CheckFloatOperands()
313 __ bind(&done); in CheckFloatOperands()
329 __ mov(scratch, Immediate(1)); in Generate()
330 __ Cvtsi2sd(double_result, scratch); in Generate()
333 __ JumpIfNotSmi(exponent, &exponent_not_smi, Label::kNear); in Generate()
334 __ SmiUntag(exponent); in Generate()
335 __ jmp(&int_exponent); in Generate()
337 __ bind(&exponent_not_smi); in Generate()
338 __ movsd(double_exponent, in Generate()
344 __ DoubleToI(exponent, double_exponent, double_scratch, in Generate()
348 __ jmp(&int_exponent); in Generate()
350 __ bind(&try_arithmetic_simplification); in Generate()
352 __ cvttsd2si(exponent, Operand(double_exponent)); in Generate()
353 __ cmp(exponent, Immediate(0x1)); in Generate()
354 __ j(overflow, &call_runtime); in Generate()
358 __ bind(&fast_power); in Generate()
359 __ fnclex(); // Clear flags to catch exceptions later. in Generate()
361 __ sub(esp, Immediate(kDoubleSize)); in Generate()
362 __ movsd(Operand(esp, 0), double_exponent); in Generate()
363 __ fld_d(Operand(esp, 0)); // E in Generate()
364 __ movsd(Operand(esp, 0), double_base); in Generate()
365 __ fld_d(Operand(esp, 0)); // B, E in Generate()
370 __ fyl2x(); // X in Generate()
371 __ fld(0); // X, X in Generate()
372 __ frndint(); // rnd(X), X in Generate()
373 __ fsub(1); // rnd(X), X-rnd(X) in Generate()
374 __ fxch(1); // X - rnd(X), rnd(X) in Generate()
376 __ f2xm1(); // 2^(X-rnd(X)) - 1, rnd(X) in Generate()
377 __ fld1(); // 1, 2^(X-rnd(X)) - 1, rnd(X) in Generate()
378 __ faddp(1); // 2^(X-rnd(X)), rnd(X) in Generate()
380 __ fscale(); // 2^X, rnd(X) in Generate()
381 __ fstp(1); // 2^X in Generate()
383 __ fnstsw_ax(); in Generate()
384 __ test_b(eax, in Generate()
386 __ j(not_zero, &fast_power_failed, Label::kNear); in Generate()
387 __ fstp_d(Operand(esp, 0)); in Generate()
388 __ movsd(double_result, Operand(esp, 0)); in Generate()
389 __ add(esp, Immediate(kDoubleSize)); in Generate()
390 __ jmp(&done); in Generate()
392 __ bind(&fast_power_failed); in Generate()
393 __ fninit(); in Generate()
394 __ add(esp, Immediate(kDoubleSize)); in Generate()
395 __ jmp(&call_runtime); in Generate()
399 __ bind(&int_exponent); in Generate()
401 __ mov(scratch, exponent); // Back up exponent. in Generate()
402 __ movsd(double_scratch, double_base); // Back up base. in Generate()
403 __ movsd(double_scratch2, double_result); // Load double_exponent with 1. in Generate()
407 __ test(scratch, scratch); in Generate()
408 __ j(positive, &no_neg, Label::kNear); in Generate()
409 __ neg(scratch); in Generate()
410 __ bind(&no_neg); in Generate()
412 __ j(zero, &while_false, Label::kNear); in Generate()
413 __ shr(scratch, 1); in Generate()
416 __ j(above, &while_true, Label::kNear); in Generate()
417 __ movsd(double_result, double_scratch); in Generate()
418 __ j(zero, &while_false, Label::kNear); in Generate()
420 __ bind(&while_true); in Generate()
421 __ shr(scratch, 1); in Generate()
422 __ mulsd(double_scratch, double_scratch); in Generate()
423 __ j(above, &while_true, Label::kNear); in Generate()
424 __ mulsd(double_result, double_scratch); in Generate()
425 __ j(not_zero, &while_true); in Generate()
427 __ bind(&while_false); in Generate()
430 __ test(exponent, exponent); in Generate()
431 __ j(positive, &done); in Generate()
432 __ divsd(double_scratch2, double_result); in Generate()
433 __ movsd(double_result, double_scratch2); in Generate()
436 __ xorps(double_scratch2, double_scratch2); in Generate()
437 __ ucomisd(double_scratch2, double_result); // Result cannot be NaN. in Generate()
441 __ j(not_equal, &done); in Generate()
442 __ Cvtsi2sd(double_exponent, exponent); in Generate()
445 __ bind(&call_runtime); in Generate()
448 __ PrepareCallCFunction(4, scratch); in Generate()
449 __ movsd(Operand(esp, 0 * kDoubleSize), double_base); in Generate()
450 __ movsd(Operand(esp, 1 * kDoubleSize), double_exponent); in Generate()
451 __ CallCFunction(ExternalReference::power_double_double_function(isolate()), in Generate()
456 __ sub(esp, Immediate(kDoubleSize)); in Generate()
457 __ fstp_d(Operand(esp, 0)); in Generate()
458 __ movsd(double_result, Operand(esp, 0)); in Generate()
459 __ add(esp, Immediate(kDoubleSize)); in Generate()
461 __ bind(&done); in Generate()
462 __ ret(0); in Generate()
470 __ TailCallRuntime(Runtime::kRegExpExec); in Generate()
493 __ mov(ebx, Operand::StaticVariable(address_of_regexp_stack_memory_size)); in Generate()
494 __ test(ebx, ebx); in Generate()
495 __ j(zero, &runtime); in Generate()
498 __ mov(eax, Operand(esp, kJSRegExpOffset)); in Generate()
500 __ JumpIfSmi(eax, &runtime); in Generate()
501 __ CmpObjectType(eax, JS_REGEXP_TYPE, ecx); in Generate()
502 __ j(not_equal, &runtime); in Generate()
505 __ mov(ecx, FieldOperand(eax, JSRegExp::kDataOffset)); in Generate()
507 __ test(ecx, Immediate(kSmiTagMask)); in Generate()
508 __ Check(not_zero, kUnexpectedTypeForRegExpDataFixedArrayExpected); in Generate()
509 __ CmpObjectType(ecx, FIXED_ARRAY_TYPE, ebx); in Generate()
510 __ Check(equal, kUnexpectedTypeForRegExpDataFixedArrayExpected); in Generate()
515 __ mov(ebx, FieldOperand(ecx, JSRegExp::kDataTagOffset)); in Generate()
516 __ cmp(ebx, Immediate(Smi::FromInt(JSRegExp::IRREGEXP))); in Generate()
517 __ j(not_equal, &runtime); in Generate()
521 __ mov(edx, FieldOperand(ecx, JSRegExp::kIrregexpCaptureCountOffset)); in Generate()
528 __ cmp(edx, Isolate::kJSRegexpStaticOffsetsVectorSize - 2); in Generate()
529 __ j(above, &runtime); in Generate()
532 __ Move(edi, Immediate(0)); in Generate()
533 __ mov(eax, Operand(esp, kSubjectOffset)); in Generate()
534 __ JumpIfSmi(eax, &runtime); in Generate()
535 __ mov(edx, eax); // Make a copy of the original subject string. in Generate()
562 __ bind(&check_underlying); in Generate()
564 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset)); in Generate()
565 __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset)); in Generate()
567 __ and_(ebx, kIsNotStringMask | in Generate()
572 __ j(zero, &seq_two_byte_string); // Go to (9). in Generate()
576 __ and_(ebx, Immediate(kIsNotStringMask | in Generate()
579 __ j(zero, &seq_one_byte_string, Label::kNear); // Go to (5). in Generate()
589 __ cmp(ebx, Immediate(kExternalStringTag)); in Generate()
590 __ j(greater_equal, &not_seq_nor_cons); // Go to (6). in Generate()
594 __ cmp(FieldOperand(eax, ConsString::kSecondOffset), factory->empty_string()); in Generate()
595 __ j(not_equal, &runtime); in Generate()
596 __ mov(eax, FieldOperand(eax, ConsString::kFirstOffset)); in Generate()
597 __ jmp(&check_underlying); in Generate()
603 __ bind(&seq_one_byte_string); in Generate()
607 __ mov(ebx, Operand(esp, kPreviousIndexOffset)); in Generate()
608 __ JumpIfNotSmi(ebx, &runtime); in Generate()
609 __ cmp(ebx, FieldOperand(edx, String::kLengthOffset)); in Generate()
610 __ j(above_equal, &runtime); in Generate()
611 __ mov(edx, FieldOperand(ecx, JSRegExp::kDataOneByteCodeOffset)); in Generate()
612 __ Move(ecx, Immediate(1)); // Type is one byte. in Generate()
615 __ bind(&check_code); in Generate()
620 __ JumpIfSmi(edx, &runtime); in Generate()
628 __ IncrementCounter(counters->regexp_entry_native(), 1); in Generate()
632 __ EnterApiExitFrame(kRegExpExecuteArguments); in Generate()
635 __ mov(Operand(esp, 8 * kPointerSize), in Generate()
639 __ mov(Operand(esp, 7 * kPointerSize), Immediate(1)); in Generate()
642 __ mov(esi, Operand::StaticVariable(address_of_regexp_stack_memory_address)); in Generate()
643 __ add(esi, Operand::StaticVariable(address_of_regexp_stack_memory_size)); in Generate()
644 __ mov(Operand(esp, 6 * kPointerSize), esi); in Generate()
648 __ mov(Operand(esp, 5 * kPointerSize), Immediate(0)); in Generate()
651 __ mov(Operand(esp, 4 * kPointerSize), in Generate()
656 __ SmiUntag(ebx); in Generate()
657 __ mov(Operand(esp, 1 * kPointerSize), ebx); in Generate()
664 __ mov(esi, Operand(ebp, kSubjectOffset + kPointerSize)); in Generate()
665 __ mov(Operand(esp, 0 * kPointerSize), esi); in Generate()
676 __ mov(esi, FieldOperand(esi, String::kLengthOffset)); in Generate()
677 __ add(esi, edi); // Calculate input end wrt offset. in Generate()
678 __ SmiUntag(edi); in Generate()
679 __ add(ebx, edi); // Calculate input start wrt offset. in Generate()
684 __ test(ecx, ecx); in Generate()
685 __ j(zero, &setup_two_byte, Label::kNear); in Generate()
686 __ SmiUntag(esi); in Generate()
687 __ lea(ecx, FieldOperand(eax, esi, times_1, SeqOneByteString::kHeaderSize)); in Generate()
688 __ mov(Operand(esp, 3 * kPointerSize), ecx); // Argument 4. in Generate()
689 __ lea(ecx, FieldOperand(eax, ebx, times_1, SeqOneByteString::kHeaderSize)); in Generate()
690 __ mov(Operand(esp, 2 * kPointerSize), ecx); // Argument 3. in Generate()
691 __ jmp(&setup_rest, Label::kNear); in Generate()
693 __ bind(&setup_two_byte); in Generate()
696 __ lea(ecx, FieldOperand(eax, esi, times_1, SeqTwoByteString::kHeaderSize)); in Generate()
697 __ mov(Operand(esp, 3 * kPointerSize), ecx); // Argument 4. in Generate()
698 __ lea(ecx, FieldOperand(eax, ebx, times_2, SeqTwoByteString::kHeaderSize)); in Generate()
699 __ mov(Operand(esp, 2 * kPointerSize), ecx); // Argument 3. in Generate()
701 __ bind(&setup_rest); in Generate()
704 __ add(edx, Immediate(Code::kHeaderSize - kHeapObjectTag)); in Generate()
705 __ call(edx); in Generate()
708 __ LeaveApiExitFrame(true); in Generate()
712 __ cmp(eax, 1); in Generate()
715 __ j(equal, &success); in Generate()
717 __ cmp(eax, NativeRegExpMacroAssembler::FAILURE); in Generate()
718 __ j(equal, &failure); in Generate()
719 __ cmp(eax, NativeRegExpMacroAssembler::EXCEPTION); in Generate()
721 __ j(not_equal, &runtime); in Generate()
728 __ mov(edx, Immediate(isolate()->factory()->the_hole_value())); in Generate()
729 __ mov(eax, Operand::StaticVariable(pending_exception)); in Generate()
730 __ cmp(edx, eax); in Generate()
731 __ j(equal, &runtime); in Generate()
734 __ TailCallRuntime(Runtime::kRegExpExecReThrow); in Generate()
736 __ bind(&failure); in Generate()
738 __ mov(eax, factory->null_value()); in Generate()
739 __ ret(4 * kPointerSize); in Generate()
742 __ bind(&success); in Generate()
743 __ mov(eax, Operand(esp, kJSRegExpOffset)); in Generate()
744 __ mov(ecx, FieldOperand(eax, JSRegExp::kDataOffset)); in Generate()
745 __ mov(edx, FieldOperand(ecx, JSRegExp::kIrregexpCaptureCountOffset)); in Generate()
749 __ add(edx, Immediate(2)); // edx was a smi. in Generate()
753 __ mov(ebx, Operand(esp, kLastMatchInfoOffset)); in Generate()
754 __ JumpIfSmi(ebx, &runtime); in Generate()
756 __ mov(eax, FieldOperand(ebx, HeapObject::kMapOffset)); in Generate()
757 __ cmp(eax, factory->fixed_array_map()); in Generate()
758 __ j(not_equal, &runtime); in Generate()
761 __ mov(eax, FieldOperand(ebx, FixedArray::kLengthOffset)); in Generate()
762 __ SmiUntag(eax); in Generate()
763 __ sub(eax, Immediate(RegExpMatchInfo::kLastMatchOverhead)); in Generate()
764 __ cmp(edx, eax); in Generate()
765 __ j(greater, &runtime); in Generate()
770 __ SmiTag(edx); // Number of capture registers to smi. in Generate()
771 __ mov(FieldOperand(ebx, RegExpMatchInfo::kNumberOfCapturesOffset), edx); in Generate()
772 __ SmiUntag(edx); // Number of capture registers back from smi. in Generate()
774 __ mov(eax, Operand(esp, kSubjectOffset)); in Generate()
775 __ mov(ecx, eax); in Generate()
776 __ mov(FieldOperand(ebx, RegExpMatchInfo::kLastSubjectOffset), eax); in Generate()
777 __ RecordWriteField(ebx, RegExpMatchInfo::kLastSubjectOffset, eax, edi, in Generate()
779 __ mov(eax, ecx); in Generate()
780 __ mov(FieldOperand(ebx, RegExpMatchInfo::kLastInputOffset), eax); in Generate()
781 __ RecordWriteField(ebx, RegExpMatchInfo::kLastInputOffset, eax, edi, in Generate()
787 __ mov(ecx, Immediate(address_of_static_offsets_vector)); in Generate()
795 __ bind(&next_capture); in Generate()
796 __ sub(edx, Immediate(1)); in Generate()
797 __ j(negative, &done, Label::kNear); in Generate()
799 __ mov(edi, Operand(ecx, edx, times_int_size, 0)); in Generate()
800 __ SmiTag(edi); in Generate()
802 __ mov(FieldOperand(ebx, edx, times_pointer_size, in Generate()
805 __ jmp(&next_capture); in Generate()
806 __ bind(&done); in Generate()
809 __ mov(eax, ebx); in Generate()
810 __ ret(4 * kPointerSize); in Generate()
813 __ bind(&runtime); in Generate()
814 __ TailCallRuntime(Runtime::kRegExpExec); in Generate()
818 __ bind(&not_seq_nor_cons); in Generate()
820 __ j(greater, &not_long_external, Label::kNear); // Go to (10). in Generate()
823 __ bind(&external_string); in Generate()
825 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset)); in Generate()
826 __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset)); in Generate()
830 __ test_b(ebx, Immediate(kIsIndirectStringMask)); in Generate()
831 __ Assert(zero, kExternalStringExpectedButNotFound); in Generate()
833 __ mov(eax, FieldOperand(eax, ExternalString::kResourceDataOffset)); in Generate()
836 __ sub(eax, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag)); in Generate()
839 __ test_b(ebx, Immediate(kStringEncodingMask)); in Generate()
840 __ j(not_zero, &seq_one_byte_string); // Go to (5). in Generate()
846 __ bind(&seq_two_byte_string); in Generate()
850 __ mov(ebx, Operand(esp, kPreviousIndexOffset)); in Generate()
851 __ JumpIfNotSmi(ebx, &runtime); in Generate()
852 __ cmp(ebx, FieldOperand(edx, String::kLengthOffset)); in Generate()
853 __ j(above_equal, &runtime); in Generate()
854 __ mov(edx, FieldOperand(ecx, JSRegExp::kDataUC16CodeOffset)); in Generate()
855 __ Move(ecx, Immediate(0)); // Type is two byte. in Generate()
856 __ jmp(&check_code); // Go to (E). in Generate()
859 __ bind(&not_long_external); in Generate()
862 __ test(ebx, Immediate(kIsNotStringMask | kShortExternalStringTag)); in Generate()
863 __ j(not_zero, &runtime); in Generate()
867 __ cmp(ebx, Immediate(kThinStringTag)); in Generate()
868 __ j(equal, &thin_string, Label::kNear); in Generate()
870 __ mov(edi, FieldOperand(eax, SlicedString::kOffsetOffset)); in Generate()
871 __ mov(eax, FieldOperand(eax, SlicedString::kParentOffset)); in Generate()
872 __ jmp(&check_underlying); // Go to (1). in Generate()
874 __ bind(&thin_string); in Generate()
875 __ mov(eax, FieldOperand(eax, ThinString::kActualOffset)); in Generate()
876 __ jmp(&check_underlying); // Go to (1). in Generate()
893 __ JumpIfNotSmi(input, fail); in CheckInputType()
895 __ JumpIfSmi(input, &ok); in CheckInputType()
896 __ cmp(FieldOperand(input, HeapObject::kMapOffset), in CheckInputType()
898 __ j(not_equal, fail); in CheckInputType()
902 __ bind(&ok); in CheckInputType()
910 __ JumpIfSmi(object, label); in BranchIfNotInternalizedString()
911 __ mov(scratch, FieldOperand(object, HeapObject::kMapOffset)); in BranchIfNotInternalizedString()
912 __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset)); in BranchIfNotInternalizedString()
914 __ test(scratch, Immediate(kIsNotStringMask | kIsNotInternalizedMask)); in BranchIfNotInternalizedString()
915 __ j(not_zero, label); in BranchIfNotInternalizedString()
929 __ mov(ecx, edx); in GenerateGeneric()
930 __ or_(ecx, eax); in GenerateGeneric()
931 __ JumpIfNotSmi(ecx, &non_smi, Label::kNear); in GenerateGeneric()
932 __ sub(edx, eax); // Return on the result of the subtraction. in GenerateGeneric()
933 __ j(no_overflow, &smi_done, Label::kNear); in GenerateGeneric()
934 __ not_(edx); // Correct sign in case of overflow. edx is never 0 here. in GenerateGeneric()
935 __ bind(&smi_done); in GenerateGeneric()
936 __ mov(eax, edx); in GenerateGeneric()
937 __ ret(0); in GenerateGeneric()
938 __ bind(&non_smi); in GenerateGeneric()
948 __ cmp(eax, edx); in GenerateGeneric()
949 __ j(not_equal, &not_identical); in GenerateGeneric()
954 __ cmp(edx, isolate()->factory()->undefined_value()); in GenerateGeneric()
956 __ j(not_equal, &check_for_nan, Label::kNear); in GenerateGeneric()
957 __ Move(eax, Immediate(Smi::FromInt(NegativeComparisonResult(cc)))); in GenerateGeneric()
958 __ ret(0); in GenerateGeneric()
959 __ bind(&check_for_nan); in GenerateGeneric()
964 __ cmp(FieldOperand(edx, HeapObject::kMapOffset), in GenerateGeneric()
966 __ j(equal, &generic_heap_number_comparison, Label::kNear); in GenerateGeneric()
968 __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset)); in GenerateGeneric()
969 __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset)); in GenerateGeneric()
971 __ cmpb(ecx, Immediate(FIRST_JS_RECEIVER_TYPE)); in GenerateGeneric()
972 __ j(above_equal, &runtime_call, Label::kFar); in GenerateGeneric()
974 __ cmpb(ecx, Immediate(SYMBOL_TYPE)); in GenerateGeneric()
975 __ j(equal, &runtime_call, Label::kFar); in GenerateGeneric()
977 __ Move(eax, Immediate(Smi::FromInt(EQUAL))); in GenerateGeneric()
978 __ ret(0); in GenerateGeneric()
981 __ bind(&not_identical); in GenerateGeneric()
997 __ mov(ecx, Immediate(kSmiTagMask)); in GenerateGeneric()
998 __ and_(ecx, eax); in GenerateGeneric()
999 __ test(ecx, edx); in GenerateGeneric()
1000 __ j(not_zero, &not_smis, Label::kNear); in GenerateGeneric()
1006 __ sub(ecx, Immediate(0x01)); in GenerateGeneric()
1007 __ mov(ebx, edx); in GenerateGeneric()
1008 __ xor_(ebx, eax); in GenerateGeneric()
1009 __ and_(ebx, ecx); // ebx holds either 0 or eax ^ edx. in GenerateGeneric()
1010 __ xor_(ebx, eax); in GenerateGeneric()
1014 __ cmp(FieldOperand(ebx, HeapObject::kMapOffset), in GenerateGeneric()
1017 __ j(equal, &slow, Label::kNear); in GenerateGeneric()
1019 __ mov(eax, ebx); in GenerateGeneric()
1020 __ ret(0); in GenerateGeneric()
1022 __ bind(&not_smis); in GenerateGeneric()
1031 __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ecx); in GenerateGeneric()
1032 __ j(below, &first_non_object, Label::kNear); in GenerateGeneric()
1037 __ bind(&return_not_equal); in GenerateGeneric()
1038 __ ret(0); in GenerateGeneric()
1040 __ bind(&first_non_object); in GenerateGeneric()
1042 __ CmpInstanceType(ecx, ODDBALL_TYPE); in GenerateGeneric()
1043 __ j(equal, &return_not_equal); in GenerateGeneric()
1045 __ CmpObjectType(edx, FIRST_JS_RECEIVER_TYPE, ecx); in GenerateGeneric()
1046 __ j(above_equal, &return_not_equal); in GenerateGeneric()
1049 __ CmpInstanceType(ecx, ODDBALL_TYPE); in GenerateGeneric()
1050 __ j(equal, &return_not_equal); in GenerateGeneric()
1053 __ bind(&slow); in GenerateGeneric()
1059 __ bind(&generic_heap_number_comparison); in GenerateGeneric()
1062 __ ucomisd(xmm0, xmm1); in GenerateGeneric()
1064 __ j(parity_even, &unordered, Label::kNear); in GenerateGeneric()
1066 __ mov(eax, 0); // equal in GenerateGeneric()
1067 __ mov(ecx, Immediate(Smi::FromInt(1))); in GenerateGeneric()
1068 __ cmov(above, eax, ecx); in GenerateGeneric()
1069 __ mov(ecx, Immediate(Smi::FromInt(-1))); in GenerateGeneric()
1070 __ cmov(below, eax, ecx); in GenerateGeneric()
1071 __ ret(0); in GenerateGeneric()
1075 __ bind(&unordered); in GenerateGeneric()
1078 __ mov(eax, Immediate(Smi::FromInt(1))); in GenerateGeneric()
1080 __ mov(eax, Immediate(Smi::FromInt(-1))); in GenerateGeneric()
1082 __ ret(0); in GenerateGeneric()
1085 __ bind(&non_number_comparison); in GenerateGeneric()
1096 __ ret(0); in GenerateGeneric()
1099 __ bind(&check_for_strings); in GenerateGeneric()
1101 __ JumpIfNotBothSequentialOneByteStrings(edx, eax, ecx, ebx, in GenerateGeneric()
1112 __ Abort(kUnexpectedFallThroughFromStringComparison); in GenerateGeneric()
1115 __ bind(&check_unequal_objects); in GenerateGeneric()
1126 __ lea(ecx, Operand(eax, edx, times_1, 0)); in GenerateGeneric()
1127 __ test(ecx, Immediate(kSmiTagMask)); in GenerateGeneric()
1128 __ j(not_zero, &runtime_call); in GenerateGeneric()
1130 __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset)); in GenerateGeneric()
1131 __ mov(ebx, FieldOperand(edx, HeapObject::kMapOffset)); in GenerateGeneric()
1133 __ test_b(FieldOperand(ebx, Map::kBitFieldOffset), in GenerateGeneric()
1135 __ j(not_zero, &undetectable, Label::kNear); in GenerateGeneric()
1136 __ test_b(FieldOperand(ecx, Map::kBitFieldOffset), in GenerateGeneric()
1138 __ j(not_zero, &return_unequal, Label::kNear); in GenerateGeneric()
1140 __ CmpInstanceType(ebx, FIRST_JS_RECEIVER_TYPE); in GenerateGeneric()
1141 __ j(below, &runtime_call, Label::kNear); in GenerateGeneric()
1142 __ CmpInstanceType(ecx, FIRST_JS_RECEIVER_TYPE); in GenerateGeneric()
1143 __ j(below, &runtime_call, Label::kNear); in GenerateGeneric()
1145 __ bind(&return_unequal); in GenerateGeneric()
1147 __ ret(0); // eax, edx were pushed in GenerateGeneric()
1149 __ bind(&undetectable); in GenerateGeneric()
1150 __ test_b(FieldOperand(ecx, Map::kBitFieldOffset), in GenerateGeneric()
1152 __ j(zero, &return_unequal, Label::kNear); in GenerateGeneric()
1157 __ CmpInstanceType(ebx, ODDBALL_TYPE); in GenerateGeneric()
1158 __ j(zero, &return_equal, Label::kNear); in GenerateGeneric()
1159 __ CmpInstanceType(ecx, ODDBALL_TYPE); in GenerateGeneric()
1160 __ j(not_zero, &return_unequal, Label::kNear); in GenerateGeneric()
1162 __ bind(&return_equal); in GenerateGeneric()
1163 __ Move(eax, Immediate(EQUAL)); in GenerateGeneric()
1164 __ ret(0); // eax, edx were pushed in GenerateGeneric()
1166 __ bind(&runtime_call); in GenerateGeneric()
1171 __ Push(esi); in GenerateGeneric()
1172 __ Call(strict() ? isolate()->builtins()->StrictEqual() in GenerateGeneric()
1175 __ Pop(esi); in GenerateGeneric()
1179 __ sub(eax, Immediate(isolate()->factory()->true_value())); in GenerateGeneric()
1180 __ Ret(); in GenerateGeneric()
1183 __ pop(ecx); in GenerateGeneric()
1184 __ push(edx); in GenerateGeneric()
1185 __ push(eax); in GenerateGeneric()
1186 __ push(Immediate(Smi::FromInt(NegativeComparisonResult(cc)))); in GenerateGeneric()
1187 __ push(ecx); in GenerateGeneric()
1190 __ TailCallRuntime(Runtime::kCompare); in GenerateGeneric()
1193 __ bind(&miss); in GenerateGeneric()
1208 __ SmiTag(eax); in CallStubInRecordCallTarget()
1209 __ push(eax); in CallStubInRecordCallTarget()
1210 __ push(edi); in CallStubInRecordCallTarget()
1211 __ push(edx); in CallStubInRecordCallTarget()
1212 __ push(ebx); in CallStubInRecordCallTarget()
1213 __ push(esi); in CallStubInRecordCallTarget()
1215 __ CallStub(stub); in CallStubInRecordCallTarget()
1217 __ pop(esi); in CallStubInRecordCallTarget()
1218 __ pop(ebx); in CallStubInRecordCallTarget()
1219 __ pop(edx); in CallStubInRecordCallTarget()
1220 __ pop(edi); in CallStubInRecordCallTarget()
1221 __ pop(eax); in CallStubInRecordCallTarget()
1222 __ SmiUntag(eax); in CallStubInRecordCallTarget()
1239 __ mov(ecx, FieldOperand(ebx, edx, times_half_pointer_size, in GenerateRecordCallTarget()
1247 __ cmp(edi, FieldOperand(ecx, WeakCell::kValueOffset)); in GenerateRecordCallTarget()
1248 __ j(equal, &done, Label::kFar); in GenerateRecordCallTarget()
1249 __ CompareRoot(ecx, Heap::kmegamorphic_symbolRootIndex); in GenerateRecordCallTarget()
1250 __ j(equal, &done, Label::kFar); in GenerateRecordCallTarget()
1251 __ CompareRoot(FieldOperand(ecx, HeapObject::kMapOffset), in GenerateRecordCallTarget()
1253 __ j(not_equal, &check_allocation_site); in GenerateRecordCallTarget()
1256 __ JumpIfSmi(FieldOperand(ecx, WeakCell::kValueOffset), &initialize); in GenerateRecordCallTarget()
1257 __ jmp(&megamorphic); in GenerateRecordCallTarget()
1259 __ bind(&check_allocation_site); in GenerateRecordCallTarget()
1264 __ CompareRoot(FieldOperand(ecx, 0), Heap::kAllocationSiteMapRootIndex); in GenerateRecordCallTarget()
1265 __ j(not_equal, &miss); in GenerateRecordCallTarget()
1268 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx); in GenerateRecordCallTarget()
1269 __ cmp(edi, ecx); in GenerateRecordCallTarget()
1270 __ j(not_equal, &megamorphic); in GenerateRecordCallTarget()
1271 __ jmp(&done, Label::kFar); in GenerateRecordCallTarget()
1273 __ bind(&miss); in GenerateRecordCallTarget()
1277 __ CompareRoot(ecx, Heap::kuninitialized_symbolRootIndex); in GenerateRecordCallTarget()
1278 __ j(equal, &initialize); in GenerateRecordCallTarget()
1281 __ bind(&megamorphic); in GenerateRecordCallTarget()
1282 __ mov( in GenerateRecordCallTarget()
1285 __ jmp(&done, Label::kFar); in GenerateRecordCallTarget()
1289 __ bind(&initialize); in GenerateRecordCallTarget()
1291 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx); in GenerateRecordCallTarget()
1292 __ cmp(edi, ecx); in GenerateRecordCallTarget()
1293 __ j(not_equal, &not_array_function); in GenerateRecordCallTarget()
1300 __ jmp(&done); in GenerateRecordCallTarget()
1302 __ bind(&not_array_function); in GenerateRecordCallTarget()
1306 __ bind(&done); in GenerateRecordCallTarget()
1308 __ add(FieldOperand(ebx, edx, times_half_pointer_size, in GenerateRecordCallTarget()
1322 __ JumpIfSmi(edi, &non_function); in Generate()
1324 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx); in Generate()
1325 __ j(not_equal, &non_function); in Generate()
1331 __ mov(ebx, FieldOperand(ebx, edx, times_half_pointer_size, in Generate()
1334 __ cmp(FieldOperand(ebx, 0), Immediate(allocation_site_map)); in Generate()
1335 __ j(equal, &feedback_register_initialized); in Generate()
1336 __ mov(ebx, isolate()->factory()->undefined_value()); in Generate()
1337 __ bind(&feedback_register_initialized); in Generate()
1339 __ AssertUndefinedOrAllocationSite(ebx); in Generate()
1342 __ mov(edx, edi); in Generate()
1346 __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset)); in Generate()
1347 __ mov(ecx, FieldOperand(ecx, SharedFunctionInfo::kConstructStubOffset)); in Generate()
1348 __ lea(ecx, FieldOperand(ecx, Code::kHeaderSize)); in Generate()
1349 __ jmp(ecx); in Generate()
1351 __ bind(&non_function); in Generate()
1352 __ mov(edx, edi); in Generate()
1353 __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); in Generate()
1410 __ EnterApiExitFrame(arg_stack_space); in Generate()
1413 __ mov(esi, ecx); in Generate()
1414 __ mov(edi, eax); in Generate()
1416 __ EnterExitFrame( in Generate()
1431 __ CheckStackAlignment(); in Generate()
1435 __ mov(Operand(esp, 0 * kPointerSize), edi); // argc. in Generate()
1436 __ mov(Operand(esp, 1 * kPointerSize), esi); // argv. in Generate()
1437 __ mov(Operand(esp, 2 * kPointerSize), in Generate()
1442 __ lea(eax, Operand(esp, 4 * kPointerSize)); in Generate()
1443 __ mov(Operand(esp, 0 * kPointerSize), eax); in Generate()
1444 __ mov(Operand(esp, 1 * kPointerSize), edi); // argc. in Generate()
1445 __ mov(Operand(esp, 2 * kPointerSize), esi); // argv. in Generate()
1446 __ mov(Operand(esp, 3 * kPointerSize), in Generate()
1449 __ call(ebx); in Generate()
1455 __ sub(esp, Immediate(kPointerSize)); in Generate()
1458 __ mov(kReturnRegister0, Operand(esp, 4 * kPointerSize)); in Generate()
1459 __ mov(kReturnRegister1, Operand(esp, 5 * kPointerSize)); in Generate()
1460 __ mov(kReturnRegister2, Operand(esp, 6 * kPointerSize)); in Generate()
1466 __ cmp(eax, isolate()->factory()->exception()); in Generate()
1467 __ j(equal, &exception_returned); in Generate()
1472 __ push(edx); in Generate()
1473 __ mov(edx, Immediate(isolate()->factory()->the_hole_value())); in Generate()
1477 __ cmp(edx, Operand::StaticVariable(pending_exception_address)); in Generate()
1479 __ j(equal, &okay, Label::kNear); in Generate()
1480 __ int3(); in Generate()
1481 __ bind(&okay); in Generate()
1482 __ pop(edx); in Generate()
1486 __ LeaveExitFrame(save_doubles(), !argv_in_register()); in Generate()
1487 __ ret(0); in Generate()
1490 __ bind(&exception_returned); in Generate()
1509 __ PrepareCallCFunction(3, eax); in Generate()
1510 __ mov(Operand(esp, 0 * kPointerSize), Immediate(0)); // argc. in Generate()
1511 __ mov(Operand(esp, 1 * kPointerSize), Immediate(0)); // argv. in Generate()
1512 __ mov(Operand(esp, 2 * kPointerSize), in Generate()
1514 __ CallCFunction(find_handler, 3); in Generate()
1518 __ mov(esi, Operand::StaticVariable(pending_handler_context_address)); in Generate()
1519 __ mov(esp, Operand::StaticVariable(pending_handler_sp_address)); in Generate()
1520 __ mov(ebp, Operand::StaticVariable(pending_handler_fp_address)); in Generate()
1525 __ test(esi, esi); in Generate()
1526 __ j(zero, &skip, Label::kNear); in Generate()
1527 __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi); in Generate()
1528 __ bind(&skip); in Generate()
1531 __ mov(edi, Operand::StaticVariable(pending_handler_code_address)); in Generate()
1532 __ mov(edx, Operand::StaticVariable(pending_handler_offset_address)); in Generate()
1533 __ lea(edi, FieldOperand(edi, edx, times_1, Code::kHeaderSize)); in Generate()
1534 __ jmp(edi); in Generate()
1545 __ push(ebp); in Generate()
1546 __ mov(ebp, esp); in Generate()
1550 __ push(Immediate(StackFrame::TypeToMarker(marker))); // marker in Generate()
1552 __ push(Operand::StaticVariable(context_address)); // context in Generate()
1554 __ push(edi); in Generate()
1555 __ push(esi); in Generate()
1556 __ push(ebx); in Generate()
1560 __ push(Operand::StaticVariable(c_entry_fp)); in Generate()
1564 __ cmp(Operand::StaticVariable(js_entry_sp), Immediate(0)); in Generate()
1565 __ j(not_equal, &not_outermost_js, Label::kNear); in Generate()
1566 __ mov(Operand::StaticVariable(js_entry_sp), ebp); in Generate()
1567 __ push(Immediate(StackFrame::OUTERMOST_JSENTRY_FRAME)); in Generate()
1568 __ jmp(&invoke, Label::kNear); in Generate()
1569 __ bind(&not_outermost_js); in Generate()
1570 __ push(Immediate(StackFrame::INNER_JSENTRY_FRAME)); in Generate()
1574 __ jmp(&invoke); in Generate()
1575 __ bind(&handler_entry); in Generate()
1581 __ mov(Operand::StaticVariable(pending_exception), eax); in Generate()
1582 __ mov(eax, Immediate(isolate()->factory()->exception())); in Generate()
1583 __ jmp(&exit); in Generate()
1586 __ bind(&invoke); in Generate()
1587 __ PushStackHandler(); in Generate()
1590 __ push(Immediate(0)); // receiver in Generate()
1599 __ mov(edx, Immediate(construct_entry)); in Generate()
1602 __ mov(edx, Immediate(entry)); in Generate()
1604 __ mov(edx, Operand(edx, 0)); // deref address in Generate()
1605 __ lea(edx, FieldOperand(edx, Code::kHeaderSize)); in Generate()
1606 __ call(edx); in Generate()
1609 __ PopStackHandler(); in Generate()
1611 __ bind(&exit); in Generate()
1613 __ pop(ebx); in Generate()
1614 __ cmp(ebx, Immediate(StackFrame::OUTERMOST_JSENTRY_FRAME)); in Generate()
1615 __ j(not_equal, &not_outermost_js_2); in Generate()
1616 __ mov(Operand::StaticVariable(js_entry_sp), Immediate(0)); in Generate()
1617 __ bind(&not_outermost_js_2); in Generate()
1620 __ pop(Operand::StaticVariable(ExternalReference( in Generate()
1624 __ pop(ebx); in Generate()
1625 __ pop(esi); in Generate()
1626 __ pop(edi); in Generate()
1627 __ add(esp, Immediate(2 * kPointerSize)); // remove markers in Generate()
1630 __ pop(ebp); in Generate()
1631 __ ret(0); in Generate()
1642 __ JumpIfSmi(object_, receiver_not_string_); in GenerateFast()
1645 __ mov(result_, FieldOperand(object_, HeapObject::kMapOffset)); in GenerateFast()
1646 __ movzx_b(result_, FieldOperand(result_, Map::kInstanceTypeOffset)); in GenerateFast()
1648 __ test(result_, Immediate(kIsNotStringMask)); in GenerateFast()
1649 __ j(not_zero, receiver_not_string_); in GenerateFast()
1654 __ JumpIfNotSmi(index_, &index_not_smi_); in GenerateFast()
1655 __ bind(&got_smi_index_); in GenerateFast()
1658 __ cmp(index_, FieldOperand(object_, String::kLengthOffset)); in GenerateFast()
1659 __ j(above_equal, index_out_of_range_); in GenerateFast()
1661 __ SmiUntag(index_); in GenerateFast()
1667 __ SmiTag(result_); in GenerateFast()
1668 __ bind(&exit_); in GenerateFast()
1675 __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase); in GenerateSlow()
1678 __ bind(&index_not_smi_); in GenerateSlow()
1680 __ CheckMap(index_, in GenerateSlow()
1686 __ push(LoadWithVectorDescriptor::VectorRegister()); in GenerateSlow()
1687 __ push(LoadDescriptor::SlotRegister()); in GenerateSlow()
1689 __ push(object_); in GenerateSlow()
1690 __ push(index_); // Consumed by runtime conversion function. in GenerateSlow()
1691 __ CallRuntime(Runtime::kNumberToSmi); in GenerateSlow()
1695 __ mov(index_, eax); in GenerateSlow()
1697 __ pop(object_); in GenerateSlow()
1699 __ pop(LoadDescriptor::SlotRegister()); in GenerateSlow()
1700 __ pop(LoadWithVectorDescriptor::VectorRegister()); in GenerateSlow()
1703 __ mov(result_, FieldOperand(object_, HeapObject::kMapOffset)); in GenerateSlow()
1704 __ movzx_b(result_, FieldOperand(result_, Map::kInstanceTypeOffset)); in GenerateSlow()
1708 __ JumpIfNotSmi(index_, index_out_of_range_); in GenerateSlow()
1710 __ jmp(&got_smi_index_); in GenerateSlow()
1715 __ bind(&call_runtime_); in GenerateSlow()
1717 __ push(object_); in GenerateSlow()
1718 __ SmiTag(index_); in GenerateSlow()
1719 __ push(index_); in GenerateSlow()
1720 __ CallRuntime(Runtime::kStringCharCodeAtRT); in GenerateSlow()
1722 __ mov(result_, eax); in GenerateSlow()
1725 __ jmp(&exit_); in GenerateSlow()
1727 __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase); in GenerateSlow()
1739 __ mov(length, FieldOperand(left, String::kLengthOffset)); in GenerateFlatOneByteStringEquals()
1740 __ cmp(length, FieldOperand(right, String::kLengthOffset)); in GenerateFlatOneByteStringEquals()
1741 __ j(equal, &check_zero_length, Label::kNear); in GenerateFlatOneByteStringEquals()
1742 __ bind(&strings_not_equal); in GenerateFlatOneByteStringEquals()
1743 __ Move(eax, Immediate(Smi::FromInt(NOT_EQUAL))); in GenerateFlatOneByteStringEquals()
1744 __ ret(0); in GenerateFlatOneByteStringEquals()
1748 __ bind(&check_zero_length); in GenerateFlatOneByteStringEquals()
1750 __ test(length, length); in GenerateFlatOneByteStringEquals()
1751 __ j(not_zero, &compare_chars, Label::kNear); in GenerateFlatOneByteStringEquals()
1752 __ Move(eax, Immediate(Smi::FromInt(EQUAL))); in GenerateFlatOneByteStringEquals()
1753 __ ret(0); in GenerateFlatOneByteStringEquals()
1756 __ bind(&compare_chars); in GenerateFlatOneByteStringEquals()
1761 __ Move(eax, Immediate(Smi::FromInt(EQUAL))); in GenerateFlatOneByteStringEquals()
1762 __ ret(0); in GenerateFlatOneByteStringEquals()
1770 __ IncrementCounter(counters->string_compare_native(), 1); in GenerateCompareFlatOneByteStrings()
1774 __ mov(scratch1, FieldOperand(left, String::kLengthOffset)); in GenerateCompareFlatOneByteStrings()
1775 __ mov(scratch3, scratch1); in GenerateCompareFlatOneByteStrings()
1776 __ sub(scratch3, FieldOperand(right, String::kLengthOffset)); in GenerateCompareFlatOneByteStrings()
1780 __ j(less_equal, &left_shorter, Label::kNear); in GenerateCompareFlatOneByteStrings()
1782 __ sub(scratch1, length_delta); in GenerateCompareFlatOneByteStrings()
1783 __ bind(&left_shorter); in GenerateCompareFlatOneByteStrings()
1789 __ test(min_length, min_length); in GenerateCompareFlatOneByteStrings()
1790 __ j(zero, &compare_lengths, Label::kNear); in GenerateCompareFlatOneByteStrings()
1798 __ bind(&compare_lengths); in GenerateCompareFlatOneByteStrings()
1799 __ test(length_delta, length_delta); in GenerateCompareFlatOneByteStrings()
1801 __ j(not_zero, &length_not_equal, Label::kNear); in GenerateCompareFlatOneByteStrings()
1806 __ Move(eax, Immediate(Smi::FromInt(EQUAL))); in GenerateCompareFlatOneByteStrings()
1807 __ ret(0); in GenerateCompareFlatOneByteStrings()
1811 __ bind(&length_not_equal); in GenerateCompareFlatOneByteStrings()
1812 __ j(greater, &result_greater, Label::kNear); in GenerateCompareFlatOneByteStrings()
1813 __ jmp(&result_less, Label::kNear); in GenerateCompareFlatOneByteStrings()
1814 __ bind(&result_not_equal); in GenerateCompareFlatOneByteStrings()
1815 __ j(above, &result_greater, Label::kNear); in GenerateCompareFlatOneByteStrings()
1816 __ bind(&result_less); in GenerateCompareFlatOneByteStrings()
1819 __ Move(eax, Immediate(Smi::FromInt(LESS))); in GenerateCompareFlatOneByteStrings()
1820 __ ret(0); in GenerateCompareFlatOneByteStrings()
1823 __ bind(&result_greater); in GenerateCompareFlatOneByteStrings()
1824 __ Move(eax, Immediate(Smi::FromInt(GREATER))); in GenerateCompareFlatOneByteStrings()
1825 __ ret(0); in GenerateCompareFlatOneByteStrings()
1836 __ SmiUntag(length); in GenerateOneByteCharsCompareLoop()
1837 __ lea(left, in GenerateOneByteCharsCompareLoop()
1839 __ lea(right, in GenerateOneByteCharsCompareLoop()
1841 __ neg(length); in GenerateOneByteCharsCompareLoop()
1846 __ bind(&loop); in GenerateOneByteCharsCompareLoop()
1847 __ mov_b(scratch, Operand(left, index, times_1, 0)); in GenerateOneByteCharsCompareLoop()
1848 __ cmpb(scratch, Operand(right, index, times_1, 0)); in GenerateOneByteCharsCompareLoop()
1849 __ j(not_equal, chars_not_equal, chars_not_equal_near); in GenerateOneByteCharsCompareLoop()
1850 __ inc(index); in GenerateOneByteCharsCompareLoop()
1851 __ j(not_zero, &loop); in GenerateOneByteCharsCompareLoop()
1865 __ mov(ecx, isolate()->factory()->undefined_value()); in Generate()
1869 __ test(ecx, Immediate(kSmiTagMask)); in Generate()
1870 __ Assert(not_equal, kExpectedAllocationSite); in Generate()
1871 __ cmp(FieldOperand(ecx, HeapObject::kMapOffset), in Generate()
1873 __ Assert(equal, kExpectedAllocationSite); in Generate()
1879 __ TailCallStub(&stub); in Generate()
1889 __ JumpIfSmi(edx, &miss, miss_distance); in GenerateBooleans()
1890 __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset)); in GenerateBooleans()
1891 __ JumpIfSmi(eax, &miss, miss_distance); in GenerateBooleans()
1892 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset)); in GenerateBooleans()
1893 __ JumpIfNotRoot(ecx, Heap::kBooleanMapRootIndex, &miss, miss_distance); in GenerateBooleans()
1894 __ JumpIfNotRoot(ebx, Heap::kBooleanMapRootIndex, &miss, miss_distance); in GenerateBooleans()
1896 __ mov(eax, FieldOperand(eax, Oddball::kToNumberOffset)); in GenerateBooleans()
1897 __ AssertSmi(eax); in GenerateBooleans()
1898 __ mov(edx, FieldOperand(edx, Oddball::kToNumberOffset)); in GenerateBooleans()
1899 __ AssertSmi(edx); in GenerateBooleans()
1900 __ push(eax); in GenerateBooleans()
1901 __ mov(eax, edx); in GenerateBooleans()
1902 __ pop(edx); in GenerateBooleans()
1904 __ sub(eax, edx); in GenerateBooleans()
1905 __ Ret(); in GenerateBooleans()
1907 __ bind(&miss); in GenerateBooleans()
1915 __ mov(ecx, edx); in GenerateSmis()
1916 __ or_(ecx, eax); in GenerateSmis()
1917 __ JumpIfNotSmi(ecx, &miss, Label::kNear); in GenerateSmis()
1921 __ sub(eax, edx); in GenerateSmis()
1924 __ sub(edx, eax); in GenerateSmis()
1925 __ j(no_overflow, &done, Label::kNear); in GenerateSmis()
1927 __ not_(edx); in GenerateSmis()
1928 __ bind(&done); in GenerateSmis()
1929 __ mov(eax, edx); in GenerateSmis()
1931 __ ret(0); in GenerateSmis()
1933 __ bind(&miss); in GenerateSmis()
1946 __ JumpIfNotSmi(edx, &miss); in GenerateNumbers()
1949 __ JumpIfNotSmi(eax, &miss); in GenerateNumbers()
1954 __ JumpIfSmi(eax, &right_smi, Label::kNear); in GenerateNumbers()
1955 __ cmp(FieldOperand(eax, HeapObject::kMapOffset), in GenerateNumbers()
1957 __ j(not_equal, &maybe_undefined1, Label::kNear); in GenerateNumbers()
1958 __ movsd(xmm1, FieldOperand(eax, HeapNumber::kValueOffset)); in GenerateNumbers()
1959 __ jmp(&left, Label::kNear); in GenerateNumbers()
1960 __ bind(&right_smi); in GenerateNumbers()
1961 __ mov(ecx, eax); // Can't clobber eax because we can still jump away. in GenerateNumbers()
1962 __ SmiUntag(ecx); in GenerateNumbers()
1963 __ Cvtsi2sd(xmm1, ecx); in GenerateNumbers()
1965 __ bind(&left); in GenerateNumbers()
1966 __ JumpIfSmi(edx, &left_smi, Label::kNear); in GenerateNumbers()
1967 __ cmp(FieldOperand(edx, HeapObject::kMapOffset), in GenerateNumbers()
1969 __ j(not_equal, &maybe_undefined2, Label::kNear); in GenerateNumbers()
1970 __ movsd(xmm0, FieldOperand(edx, HeapNumber::kValueOffset)); in GenerateNumbers()
1971 __ jmp(&done); in GenerateNumbers()
1972 __ bind(&left_smi); in GenerateNumbers()
1973 __ mov(ecx, edx); // Can't clobber edx because we can still jump away. in GenerateNumbers()
1974 __ SmiUntag(ecx); in GenerateNumbers()
1975 __ Cvtsi2sd(xmm0, ecx); in GenerateNumbers()
1977 __ bind(&done); in GenerateNumbers()
1979 __ ucomisd(xmm0, xmm1); in GenerateNumbers()
1982 __ j(parity_even, &unordered, Label::kNear); in GenerateNumbers()
1986 __ mov(eax, 0); // equal in GenerateNumbers()
1987 __ mov(ecx, Immediate(Smi::FromInt(1))); in GenerateNumbers()
1988 __ cmov(above, eax, ecx); in GenerateNumbers()
1989 __ mov(ecx, Immediate(Smi::FromInt(-1))); in GenerateNumbers()
1990 __ cmov(below, eax, ecx); in GenerateNumbers()
1991 __ ret(0); in GenerateNumbers()
1993 __ bind(&unordered); in GenerateNumbers()
1994 __ bind(&generic_stub); in GenerateNumbers()
1997 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET); in GenerateNumbers()
1999 __ bind(&maybe_undefined1); in GenerateNumbers()
2001 __ cmp(eax, Immediate(isolate()->factory()->undefined_value())); in GenerateNumbers()
2002 __ j(not_equal, &miss); in GenerateNumbers()
2003 __ JumpIfSmi(edx, &unordered); in GenerateNumbers()
2004 __ CmpObjectType(edx, HEAP_NUMBER_TYPE, ecx); in GenerateNumbers()
2005 __ j(not_equal, &maybe_undefined2, Label::kNear); in GenerateNumbers()
2006 __ jmp(&unordered); in GenerateNumbers()
2009 __ bind(&maybe_undefined2); in GenerateNumbers()
2011 __ cmp(edx, Immediate(isolate()->factory()->undefined_value())); in GenerateNumbers()
2012 __ j(equal, &unordered); in GenerateNumbers()
2015 __ bind(&miss); in GenerateNumbers()
2032 __ mov(tmp1, left); in GenerateInternalizedStrings()
2034 __ and_(tmp1, right); in GenerateInternalizedStrings()
2035 __ JumpIfSmi(tmp1, &miss, Label::kNear); in GenerateInternalizedStrings()
2038 __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset)); in GenerateInternalizedStrings()
2039 __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset)); in GenerateInternalizedStrings()
2040 __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset)); in GenerateInternalizedStrings()
2041 __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset)); in GenerateInternalizedStrings()
2043 __ or_(tmp1, tmp2); in GenerateInternalizedStrings()
2044 __ test(tmp1, Immediate(kIsNotStringMask | kIsNotInternalizedMask)); in GenerateInternalizedStrings()
2045 __ j(not_zero, &miss, Label::kNear); in GenerateInternalizedStrings()
2049 __ cmp(left, right); in GenerateInternalizedStrings()
2053 __ j(not_equal, &done, Label::kNear); in GenerateInternalizedStrings()
2056 __ Move(eax, Immediate(Smi::FromInt(EQUAL))); in GenerateInternalizedStrings()
2057 __ bind(&done); in GenerateInternalizedStrings()
2058 __ ret(0); in GenerateInternalizedStrings()
2060 __ bind(&miss); in GenerateInternalizedStrings()
2077 __ mov(tmp1, left); in GenerateUniqueNames()
2079 __ and_(tmp1, right); in GenerateUniqueNames()
2080 __ JumpIfSmi(tmp1, &miss, Label::kNear); in GenerateUniqueNames()
2084 __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset)); in GenerateUniqueNames()
2085 __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset)); in GenerateUniqueNames()
2086 __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset)); in GenerateUniqueNames()
2087 __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset)); in GenerateUniqueNames()
2089 __ JumpIfNotUniqueNameInstanceType(tmp1, &miss, Label::kNear); in GenerateUniqueNames()
2090 __ JumpIfNotUniqueNameInstanceType(tmp2, &miss, Label::kNear); in GenerateUniqueNames()
2094 __ cmp(left, right); in GenerateUniqueNames()
2098 __ j(not_equal, &done, Label::kNear); in GenerateUniqueNames()
2101 __ Move(eax, Immediate(Smi::FromInt(EQUAL))); in GenerateUniqueNames()
2102 __ bind(&done); in GenerateUniqueNames()
2103 __ ret(0); in GenerateUniqueNames()
2105 __ bind(&miss); in GenerateUniqueNames()
2124 __ mov(tmp1, left); in GenerateStrings()
2126 __ and_(tmp1, right); in GenerateStrings()
2127 __ JumpIfSmi(tmp1, &miss); in GenerateStrings()
2131 __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset)); in GenerateStrings()
2132 __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset)); in GenerateStrings()
2133 __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset)); in GenerateStrings()
2134 __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset)); in GenerateStrings()
2135 __ mov(tmp3, tmp1); in GenerateStrings()
2137 __ or_(tmp3, tmp2); in GenerateStrings()
2138 __ test(tmp3, Immediate(kIsNotStringMask)); in GenerateStrings()
2139 __ j(not_zero, &miss); in GenerateStrings()
2143 __ cmp(left, right); in GenerateStrings()
2144 __ j(not_equal, &not_same, Label::kNear); in GenerateStrings()
2147 __ Move(eax, Immediate(Smi::FromInt(EQUAL))); in GenerateStrings()
2148 __ ret(0); in GenerateStrings()
2151 __ bind(&not_same); in GenerateStrings()
2160 __ or_(tmp1, tmp2); in GenerateStrings()
2161 __ test(tmp1, Immediate(kIsNotInternalizedMask)); in GenerateStrings()
2162 __ j(not_zero, &do_compare, Label::kNear); in GenerateStrings()
2166 __ ret(0); in GenerateStrings()
2167 __ bind(&do_compare); in GenerateStrings()
2172 __ JumpIfNotBothSequentialOneByteStrings(left, right, tmp1, tmp2, &runtime); in GenerateStrings()
2184 __ bind(&runtime); in GenerateStrings()
2188 __ Push(left); in GenerateStrings()
2189 __ Push(right); in GenerateStrings()
2190 __ CallRuntime(Runtime::kStringEqual); in GenerateStrings()
2192 __ sub(eax, Immediate(masm->isolate()->factory()->true_value())); in GenerateStrings()
2193 __ Ret(); in GenerateStrings()
2195 __ pop(tmp1); // Return address. in GenerateStrings()
2196 __ push(left); in GenerateStrings()
2197 __ push(right); in GenerateStrings()
2198 __ push(tmp1); in GenerateStrings()
2199 __ TailCallRuntime(Runtime::kStringCompare); in GenerateStrings()
2202 __ bind(&miss); in GenerateStrings()
2210 __ mov(ecx, edx); in GenerateReceivers()
2211 __ and_(ecx, eax); in GenerateReceivers()
2212 __ JumpIfSmi(ecx, &miss, Label::kNear); in GenerateReceivers()
2215 __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ecx); in GenerateReceivers()
2216 __ j(below, &miss, Label::kNear); in GenerateReceivers()
2217 __ CmpObjectType(edx, FIRST_JS_RECEIVER_TYPE, ecx); in GenerateReceivers()
2218 __ j(below, &miss, Label::kNear); in GenerateReceivers()
2221 __ sub(eax, edx); in GenerateReceivers()
2222 __ ret(0); in GenerateReceivers()
2224 __ bind(&miss); in GenerateReceivers()
2232 __ mov(ecx, edx); in GenerateKnownReceivers()
2233 __ and_(ecx, eax); in GenerateKnownReceivers()
2234 __ JumpIfSmi(ecx, &miss, Label::kNear); in GenerateKnownReceivers()
2236 __ GetWeakValue(edi, cell); in GenerateKnownReceivers()
2237 __ cmp(edi, FieldOperand(eax, HeapObject::kMapOffset)); in GenerateKnownReceivers()
2238 __ j(not_equal, &miss, Label::kNear); in GenerateKnownReceivers()
2239 __ cmp(edi, FieldOperand(edx, HeapObject::kMapOffset)); in GenerateKnownReceivers()
2240 __ j(not_equal, &miss, Label::kNear); in GenerateKnownReceivers()
2243 __ sub(eax, edx); in GenerateKnownReceivers()
2244 __ ret(0); in GenerateKnownReceivers()
2246 __ PopReturnAddressTo(ecx); in GenerateKnownReceivers()
2247 __ Push(edx); in GenerateKnownReceivers()
2248 __ Push(eax); in GenerateKnownReceivers()
2249 __ Push(Immediate(Smi::FromInt(NegativeComparisonResult(GetCondition())))); in GenerateKnownReceivers()
2250 __ PushReturnAddressFrom(ecx); in GenerateKnownReceivers()
2251 __ TailCallRuntime(Runtime::kCompare); in GenerateKnownReceivers()
2254 __ bind(&miss); in GenerateKnownReceivers()
2263 __ push(edx); // Preserve edx and eax. in GenerateMiss()
2264 __ push(eax); in GenerateMiss()
2265 __ push(edx); // And also use them as the arguments. in GenerateMiss()
2266 __ push(eax); in GenerateMiss()
2267 __ push(Immediate(Smi::FromInt(op()))); in GenerateMiss()
2268 __ CallRuntime(Runtime::kCompareIC_Miss); in GenerateMiss()
2270 __ lea(edi, FieldOperand(eax, Code::kHeaderSize)); in GenerateMiss()
2271 __ pop(eax); in GenerateMiss()
2272 __ pop(edx); in GenerateMiss()
2276 __ jmp(edi); in GenerateMiss()
2302 __ mov(index, FieldOperand(properties, kCapacityOffset)); in GenerateNegativeLookup()
2303 __ dec(index); in GenerateNegativeLookup()
2304 __ and_(index, in GenerateNegativeLookup()
2310 __ lea(index, Operand(index, index, times_2, 0)); // index *= 3. in GenerateNegativeLookup()
2314 __ mov(entity_name, Operand(properties, index, times_half_pointer_size, in GenerateNegativeLookup()
2316 __ cmp(entity_name, masm->isolate()->factory()->undefined_value()); in GenerateNegativeLookup()
2317 __ j(equal, done); in GenerateNegativeLookup()
2320 __ cmp(entity_name, Handle<Name>(name)); in GenerateNegativeLookup()
2321 __ j(equal, miss); in GenerateNegativeLookup()
2325 __ cmp(entity_name, masm->isolate()->factory()->the_hole_value()); in GenerateNegativeLookup()
2326 __ j(equal, &good, Label::kNear); in GenerateNegativeLookup()
2329 __ mov(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset)); in GenerateNegativeLookup()
2330 __ JumpIfNotUniqueNameInstanceType( in GenerateNegativeLookup()
2332 __ bind(&good); in GenerateNegativeLookup()
2337 __ push(Immediate(Handle<Object>(name))); in GenerateNegativeLookup()
2338 __ push(Immediate(name->Hash())); in GenerateNegativeLookup()
2339 __ CallStub(&stub); in GenerateNegativeLookup()
2340 __ test(r0, r0); in GenerateNegativeLookup()
2341 __ j(not_zero, miss); in GenerateNegativeLookup()
2342 __ jmp(done); in GenerateNegativeLookup()
2364 __ mov(scratch, FieldOperand(dictionary(), kCapacityOffset)); in Generate()
2365 __ dec(scratch); in Generate()
2366 __ SmiUntag(scratch); in Generate()
2367 __ push(scratch); in Generate()
2376 __ mov(scratch, Operand(esp, 2 * kPointerSize)); in Generate()
2378 __ add(scratch, Immediate(NameDictionary::GetProbeOffset(i))); in Generate()
2380 __ and_(scratch, Operand(esp, 0)); in Generate()
2384 __ lea(index(), Operand(scratch, scratch, times_2, 0)); // index *= 3. in Generate()
2388 __ mov(scratch, Operand(dictionary(), index(), times_pointer_size, in Generate()
2390 __ cmp(scratch, isolate()->factory()->undefined_value()); in Generate()
2391 __ j(equal, &not_in_dictionary); in Generate()
2394 __ cmp(scratch, Operand(esp, 3 * kPointerSize)); in Generate()
2395 __ j(equal, &in_dictionary); in Generate()
2403 __ mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset)); in Generate()
2404 __ JumpIfNotUniqueNameInstanceType( in Generate()
2410 __ bind(&maybe_in_dictionary); in Generate()
2415 __ mov(result(), Immediate(0)); in Generate()
2416 __ Drop(1); in Generate()
2417 __ ret(2 * kPointerSize); in Generate()
2420 __ bind(&in_dictionary); in Generate()
2421 __ mov(result(), Immediate(1)); in Generate()
2422 __ Drop(1); in Generate()
2423 __ ret(2 * kPointerSize); in Generate()
2425 __ bind(&not_in_dictionary); in Generate()
2426 __ mov(result(), Immediate(0)); in Generate()
2427 __ Drop(1); in Generate()
2428 __ ret(2 * kPointerSize); in Generate()
2453 __ jmp(&skip_to_incremental_noncompacting, Label::kNear); in Generate()
2454 __ jmp(&skip_to_incremental_compacting, Label::kFar); in Generate()
2457 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(), in Generate()
2460 __ ret(0); in Generate()
2463 __ bind(&skip_to_incremental_noncompacting); in Generate()
2466 __ bind(&skip_to_incremental_compacting); in Generate()
2482 __ mov(regs_.scratch0(), Operand(regs_.address(), 0)); in GenerateIncremental()
2483 __ JumpIfNotInNewSpace(regs_.scratch0(), // Value. in GenerateIncremental()
2487 __ JumpIfInNewSpace(regs_.object(), regs_.scratch0(), in GenerateIncremental()
2498 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(), in GenerateIncremental()
2501 __ bind(&dont_need_remembered_set); in GenerateIncremental()
2510 __ ret(0); in GenerateIncremental()
2517 __ PrepareCallCFunction(argument_count, regs_.scratch0()); in InformIncrementalMarker()
2518 __ mov(Operand(esp, 0 * kPointerSize), regs_.object()); in InformIncrementalMarker()
2519 __ mov(Operand(esp, 1 * kPointerSize), regs_.address()); // Slot. in InformIncrementalMarker()
2520 __ mov(Operand(esp, 2 * kPointerSize), in InformIncrementalMarker()
2524 __ CallCFunction( in InformIncrementalMarker()
2540 __ JumpIfBlack(regs_.object(), in CheckNeedsToInformIncrementalMarker()
2548 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(), in CheckNeedsToInformIncrementalMarker()
2551 __ ret(0); in CheckNeedsToInformIncrementalMarker()
2554 __ bind(&object_is_black); in CheckNeedsToInformIncrementalMarker()
2557 __ mov(regs_.scratch0(), Operand(regs_.address(), 0)); in CheckNeedsToInformIncrementalMarker()
2562 __ CheckPageFlag(regs_.scratch0(), // Contains value. in CheckNeedsToInformIncrementalMarker()
2569 __ CheckPageFlag(regs_.object(), in CheckNeedsToInformIncrementalMarker()
2576 __ jmp(&need_incremental); in CheckNeedsToInformIncrementalMarker()
2578 __ bind(&ensure_not_white); in CheckNeedsToInformIncrementalMarker()
2583 __ push(regs_.object()); in CheckNeedsToInformIncrementalMarker()
2584 __ JumpIfWhite(regs_.scratch0(), // The value. in CheckNeedsToInformIncrementalMarker()
2588 __ pop(regs_.object()); in CheckNeedsToInformIncrementalMarker()
2592 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(), in CheckNeedsToInformIncrementalMarker()
2595 __ ret(0); in CheckNeedsToInformIncrementalMarker()
2598 __ bind(&need_incremental_pop_object); in CheckNeedsToInformIncrementalMarker()
2599 __ pop(regs_.object()); in CheckNeedsToInformIncrementalMarker()
2601 __ bind(&need_incremental); in CheckNeedsToInformIncrementalMarker()
2609 __ call(ces.GetCode(), RelocInfo::CODE_TARGET); in Generate()
2612 __ mov(ebx, MemOperand(ebp, parameter_count_offset)); in Generate()
2614 __ pop(ecx); in Generate()
2617 __ lea(esp, MemOperand(esp, ebx, times_pointer_size, additional_offset)); in Generate()
2618 __ jmp(ecx); // Return to IC Miss stub, continuation still on stack. in Generate()
2632 __ push(eax); in Generate()
2633 __ push(ecx); in Generate()
2634 __ push(edx); in Generate()
2637 __ lea(eax, Operand(esp, (kNumSavedRegisters + 1) * kPointerSize)); in Generate()
2638 __ push(eax); in Generate()
2642 __ mov(eax, Operand(esp, (kNumSavedRegisters + 1) * kPointerSize)); in Generate()
2643 __ sub(eax, Immediate(Assembler::kCallInstructionLength)); in Generate()
2644 __ push(eax); in Generate()
2648 __ call(FUNCTION_ADDR(isolate()->function_entry_hook()), in Generate()
2650 __ add(esp, Immediate(2 * kPointerSize)); in Generate()
2653 __ pop(edx); in Generate()
2654 __ pop(ecx); in Generate()
2655 __ pop(eax); in Generate()
2657 __ ret(0); in Generate()
2668 __ TailCallStub(&stub); in CreateArrayDispatch()
2675 __ cmp(edx, kind); in CreateArrayDispatch()
2676 __ j(not_equal, &next); in CreateArrayDispatch()
2678 __ TailCallStub(&stub); in CreateArrayDispatch()
2679 __ bind(&next); in CreateArrayDispatch()
2683 __ Abort(kUnexpectedElementsKindInArrayConstructor); in CreateArrayDispatch()
2708 __ test_b(edx, Immediate(1)); in CreateArrayDispatchOneArgument()
2709 __ j(not_zero, &normal_sequence); in CreateArrayDispatchOneArgument()
2713 __ mov(ecx, Operand(esp, kPointerSize)); in CreateArrayDispatchOneArgument()
2714 __ test(ecx, ecx); in CreateArrayDispatchOneArgument()
2715 __ j(zero, &normal_sequence); in CreateArrayDispatchOneArgument()
2724 __ TailCallStub(&stub_holey); in CreateArrayDispatchOneArgument()
2726 __ bind(&normal_sequence); in CreateArrayDispatchOneArgument()
2730 __ TailCallStub(&stub); in CreateArrayDispatchOneArgument()
2734 __ inc(edx); in CreateArrayDispatchOneArgument()
2739 __ cmp(FieldOperand(ebx, 0), Immediate(allocation_site_map)); in CreateArrayDispatchOneArgument()
2740 __ Assert(equal, kExpectedAllocationSite); in CreateArrayDispatchOneArgument()
2747 __ add(FieldOperand(ebx, AllocationSite::kTransitionInfoOffset), in CreateArrayDispatchOneArgument()
2750 __ bind(&normal_sequence); in CreateArrayDispatchOneArgument()
2756 __ cmp(edx, kind); in CreateArrayDispatchOneArgument()
2757 __ j(not_equal, &next); in CreateArrayDispatchOneArgument()
2759 __ TailCallStub(&stub); in CreateArrayDispatchOneArgument()
2760 __ bind(&next); in CreateArrayDispatchOneArgument()
2764 __ Abort(kUnexpectedElementsKindInArrayConstructor); in CreateArrayDispatchOneArgument()
2807 __ test(eax, eax); in GenerateDispatchToArrayStub()
2808 __ j(not_zero, &not_zero_case); in GenerateDispatchToArrayStub()
2811 __ bind(&not_zero_case); in GenerateDispatchToArrayStub()
2812 __ cmp(eax, 1); in GenerateDispatchToArrayStub()
2813 __ j(greater, &not_one_case); in GenerateDispatchToArrayStub()
2816 __ bind(&not_one_case); in GenerateDispatchToArrayStub()
2818 __ TailCallStub(&stub); in GenerateDispatchToArrayStub()
2835 __ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset)); in Generate()
2837 __ test(ecx, Immediate(kSmiTagMask)); in Generate()
2838 __ Assert(not_zero, kUnexpectedInitialMapForArrayFunction); in Generate()
2839 __ CmpObjectType(ecx, MAP_TYPE, ecx); in Generate()
2840 __ Assert(equal, kUnexpectedInitialMapForArrayFunction); in Generate()
2843 __ AssertUndefinedOrAllocationSite(ebx); in Generate()
2849 __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset)); in Generate()
2851 __ cmp(edx, edi); in Generate()
2852 __ j(not_equal, &subclassing); in Generate()
2857 __ cmp(ebx, isolate()->factory()->undefined_value()); in Generate()
2858 __ j(equal, &no_info); in Generate()
2861 __ mov(edx, FieldOperand(ebx, AllocationSite::kTransitionInfoOffset)); in Generate()
2862 __ SmiUntag(edx); in Generate()
2864 __ and_(edx, Immediate(AllocationSite::ElementsKindBits::kMask)); in Generate()
2867 __ bind(&no_info); in Generate()
2871 __ bind(&subclassing); in Generate()
2872 __ mov(Operand(esp, eax, times_pointer_size, kPointerSize), edi); in Generate()
2873 __ add(eax, Immediate(3)); in Generate()
2874 __ PopReturnAddressTo(ecx); in Generate()
2875 __ Push(edx); in Generate()
2876 __ Push(ebx); in Generate()
2877 __ PushReturnAddressFrom(ecx); in Generate()
2878 __ JumpToExternalReference(ExternalReference(Runtime::kNewArray, isolate())); in Generate()
2887 __ test(eax, eax); in GenerateCase()
2888 __ j(not_zero, &not_zero_case); in GenerateCase()
2890 __ TailCallStub(&stub0); in GenerateCase()
2892 __ bind(&not_zero_case); in GenerateCase()
2893 __ cmp(eax, 1); in GenerateCase()
2894 __ j(greater, &not_one_case); in GenerateCase()
2899 __ mov(ecx, Operand(esp, kPointerSize)); in GenerateCase()
2900 __ test(ecx, ecx); in GenerateCase()
2901 __ j(zero, &normal_sequence); in GenerateCase()
2905 __ TailCallStub(&stub1_holey); in GenerateCase()
2908 __ bind(&normal_sequence); in GenerateCase()
2910 __ TailCallStub(&stub1); in GenerateCase()
2912 __ bind(&not_one_case); in GenerateCase()
2914 __ TailCallStub(&stubN); in GenerateCase()
2931 __ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset)); in Generate()
2933 __ test(ecx, Immediate(kSmiTagMask)); in Generate()
2934 __ Assert(not_zero, kUnexpectedInitialMapForArrayFunction); in Generate()
2935 __ CmpObjectType(ecx, MAP_TYPE, ecx); in Generate()
2936 __ Assert(equal, kUnexpectedInitialMapForArrayFunction); in Generate()
2940 __ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset)); in Generate()
2944 __ mov(ecx, FieldOperand(ecx, Map::kBitField2Offset)); in Generate()
2946 __ DecodeField<Map::ElementsKindBits>(ecx); in Generate()
2950 __ cmp(ecx, Immediate(FAST_ELEMENTS)); in Generate()
2951 __ j(equal, &done); in Generate()
2952 __ cmp(ecx, Immediate(FAST_HOLEY_ELEMENTS)); in Generate()
2953 __ Assert(equal, in Generate()
2955 __ bind(&done); in Generate()
2959 __ cmp(ecx, Immediate(FAST_ELEMENTS)); in Generate()
2960 __ j(equal, &fast_elements_case); in Generate()
2963 __ bind(&fast_elements_case); in Generate()
2979 __ EnterApiExitFrame(argc); in PrepareCallApiFunction()
2980 if (__ emit_debug_code()) { in PrepareCallApiFunction()
2981 __ mov(esi, Immediate(bit_cast<int32_t>(kZapValue))); in PrepareCallApiFunction()
3008 __ mov(ebx, Operand::StaticVariable(next_address)); in CallApiFunctionAndReturn()
3009 __ mov(edi, Operand::StaticVariable(limit_address)); in CallApiFunctionAndReturn()
3010 __ add(Operand::StaticVariable(level_address), Immediate(1)); in CallApiFunctionAndReturn()
3014 __ PushSafepointRegisters(); in CallApiFunctionAndReturn()
3015 __ PrepareCallCFunction(1, eax); in CallApiFunctionAndReturn()
3016 __ mov(Operand(esp, 0), in CallApiFunctionAndReturn()
3018 __ CallCFunction(ExternalReference::log_enter_external_function(isolate), in CallApiFunctionAndReturn()
3020 __ PopSafepointRegisters(); in CallApiFunctionAndReturn()
3026 __ mov(eax, Immediate(ExternalReference::is_profiling_address(isolate))); in CallApiFunctionAndReturn()
3027 __ cmpb(Operand(eax, 0), Immediate(0)); in CallApiFunctionAndReturn()
3028 __ j(zero, &profiler_disabled); in CallApiFunctionAndReturn()
3031 __ mov(thunk_last_arg, function_address); in CallApiFunctionAndReturn()
3033 __ mov(eax, Immediate(thunk_ref)); in CallApiFunctionAndReturn()
3034 __ call(eax); in CallApiFunctionAndReturn()
3035 __ jmp(&end_profiler_check); in CallApiFunctionAndReturn()
3037 __ bind(&profiler_disabled); in CallApiFunctionAndReturn()
3039 __ call(function_address); in CallApiFunctionAndReturn()
3040 __ bind(&end_profiler_check); in CallApiFunctionAndReturn()
3044 __ PushSafepointRegisters(); in CallApiFunctionAndReturn()
3045 __ PrepareCallCFunction(1, eax); in CallApiFunctionAndReturn()
3046 __ mov(Operand(esp, 0), in CallApiFunctionAndReturn()
3048 __ CallCFunction(ExternalReference::log_leave_external_function(isolate), in CallApiFunctionAndReturn()
3050 __ PopSafepointRegisters(); in CallApiFunctionAndReturn()
3055 __ mov(eax, return_value_operand); in CallApiFunctionAndReturn()
3061 __ bind(&prologue); in CallApiFunctionAndReturn()
3064 __ mov(Operand::StaticVariable(next_address), ebx); in CallApiFunctionAndReturn()
3065 __ sub(Operand::StaticVariable(level_address), Immediate(1)); in CallApiFunctionAndReturn()
3066 __ Assert(above_equal, kInvalidHandleScopeLevel); in CallApiFunctionAndReturn()
3067 __ cmp(edi, Operand::StaticVariable(limit_address)); in CallApiFunctionAndReturn()
3068 __ j(not_equal, &delete_allocated_handles); in CallApiFunctionAndReturn()
3071 __ bind(&leave_exit_frame); in CallApiFunctionAndReturn()
3074 __ mov(esi, *context_restore_operand); in CallApiFunctionAndReturn()
3077 __ mov(ebx, *stack_space_operand); in CallApiFunctionAndReturn()
3079 __ LeaveApiExitFrame(!restore_context); in CallApiFunctionAndReturn()
3084 __ cmp(Operand::StaticVariable(scheduled_exception_address), in CallApiFunctionAndReturn()
3086 __ j(not_equal, &promote_scheduled_exception); in CallApiFunctionAndReturn()
3094 __ JumpIfSmi(return_value, &ok, Label::kNear); in CallApiFunctionAndReturn()
3095 __ mov(map, FieldOperand(return_value, HeapObject::kMapOffset)); in CallApiFunctionAndReturn()
3097 __ CmpInstanceType(map, LAST_NAME_TYPE); in CallApiFunctionAndReturn()
3098 __ j(below_equal, &ok, Label::kNear); in CallApiFunctionAndReturn()
3100 __ CmpInstanceType(map, FIRST_JS_RECEIVER_TYPE); in CallApiFunctionAndReturn()
3101 __ j(above_equal, &ok, Label::kNear); in CallApiFunctionAndReturn()
3103 __ cmp(map, isolate->factory()->heap_number_map()); in CallApiFunctionAndReturn()
3104 __ j(equal, &ok, Label::kNear); in CallApiFunctionAndReturn()
3106 __ cmp(return_value, isolate->factory()->undefined_value()); in CallApiFunctionAndReturn()
3107 __ j(equal, &ok, Label::kNear); in CallApiFunctionAndReturn()
3109 __ cmp(return_value, isolate->factory()->true_value()); in CallApiFunctionAndReturn()
3110 __ j(equal, &ok, Label::kNear); in CallApiFunctionAndReturn()
3112 __ cmp(return_value, isolate->factory()->false_value()); in CallApiFunctionAndReturn()
3113 __ j(equal, &ok, Label::kNear); in CallApiFunctionAndReturn()
3115 __ cmp(return_value, isolate->factory()->null_value()); in CallApiFunctionAndReturn()
3116 __ j(equal, &ok, Label::kNear); in CallApiFunctionAndReturn()
3118 __ Abort(kAPICallReturnedInvalidObject); in CallApiFunctionAndReturn()
3120 __ bind(&ok); in CallApiFunctionAndReturn()
3125 __ pop(ecx); in CallApiFunctionAndReturn()
3126 __ add(esp, ebx); in CallApiFunctionAndReturn()
3127 __ jmp(ecx); in CallApiFunctionAndReturn()
3129 __ ret(stack_space * kPointerSize); in CallApiFunctionAndReturn()
3133 __ bind(&promote_scheduled_exception); in CallApiFunctionAndReturn()
3134 __ TailCallRuntime(Runtime::kPromoteScheduledException); in CallApiFunctionAndReturn()
3139 __ bind(&delete_allocated_handles); in CallApiFunctionAndReturn()
3140 __ mov(Operand::StaticVariable(limit_address), edi); in CallApiFunctionAndReturn()
3141 __ mov(edi, eax); in CallApiFunctionAndReturn()
3142 __ mov(Operand(esp, 0), in CallApiFunctionAndReturn()
3144 __ mov(eax, Immediate(delete_extensions)); in CallApiFunctionAndReturn()
3145 __ call(eax); in CallApiFunctionAndReturn()
3146 __ mov(eax, edi); in CallApiFunctionAndReturn()
3147 __ jmp(&leave_exit_frame); in CallApiFunctionAndReturn()
3184 __ pop(return_address); in Generate()
3187 __ PushRoot(Heap::kUndefinedValueRootIndex); in Generate()
3190 __ push(context); in Generate()
3193 __ push(callee); in Generate()
3196 __ push(call_data); in Generate()
3201 __ push(Immediate(masm->isolate()->factory()->undefined_value())); in Generate()
3203 __ push(Immediate(masm->isolate()->factory()->undefined_value())); in Generate()
3206 __ push(scratch); in Generate()
3208 __ push(scratch); in Generate()
3211 __ push(Immediate(reinterpret_cast<int>(masm->isolate()))); in Generate()
3213 __ push(holder); in Generate()
3215 __ mov(scratch, esp); in Generate()
3218 __ push(return_address); in Generate()
3222 __ mov(context, FieldOperand(callee, JSFunction::kContextOffset)); in Generate()
3238 __ mov(ApiParameterOperand(2), scratch); in Generate()
3239 __ add(scratch, Immediate((argc() + FCA::kArgsLength - 1) * kPointerSize)); in Generate()
3241 __ mov(ApiParameterOperand(3), scratch); in Generate()
3243 __ Move(ApiParameterOperand(4), Immediate(argc())); in Generate()
3246 __ lea(scratch, ApiParameterOperand(2)); in Generate()
3247 __ mov(ApiParameterOperand(0), scratch); in Generate()
3292 __ pop(scratch); // Pop return address to extend the frame. in Generate()
3293 __ push(receiver); in Generate()
3294 __ push(FieldOperand(callback, AccessorInfo::kDataOffset)); in Generate()
3295 __ PushRoot(Heap::kUndefinedValueRootIndex); // ReturnValue in Generate()
3297 __ PushRoot(Heap::kUndefinedValueRootIndex); in Generate()
3298 __ push(Immediate(ExternalReference::isolate_address(isolate()))); in Generate()
3299 __ push(holder); in Generate()
3300 __ push(Immediate(Smi::kZero)); // should_throw_on_error -> false in Generate()
3301 __ push(FieldOperand(callback, AccessorInfo::kNameOffset)); in Generate()
3302 __ push(scratch); // Restore return address. in Generate()
3313 __ lea(scratch, Operand(esp, 2 * kPointerSize)); in Generate()
3319 __ mov(info_object, scratch); in Generate()
3322 __ sub(scratch, Immediate(kPointerSize)); in Generate()
3323 __ mov(ApiParameterOperand(0), scratch); in Generate()
3325 __ lea(scratch, info_object); in Generate()
3326 __ mov(ApiParameterOperand(1), scratch); in Generate()
3333 __ mov(scratch, FieldOperand(callback, AccessorInfo::kJsGetterOffset)); in Generate()
3335 __ mov(function_address, in Generate()
3345 #undef __