• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #if V8_TARGET_ARCH_IA32
6 
7 #include "src/base/bits.h"
8 #include "src/bootstrapper.h"
9 #include "src/code-stubs.h"
10 #include "src/codegen.h"
11 #include "src/ia32/code-stubs-ia32.h"
12 #include "src/ia32/frames-ia32.h"
13 #include "src/ic/handler-compiler.h"
14 #include "src/ic/ic.h"
15 #include "src/ic/stub-cache.h"
16 #include "src/isolate.h"
17 #include "src/regexp/jsregexp.h"
18 #include "src/regexp/regexp-macro-assembler.h"
19 #include "src/runtime/runtime.h"
20 
21 namespace v8 {
22 namespace internal {
23 
24 
InitializeArrayConstructorDescriptor(Isolate * isolate,CodeStubDescriptor * descriptor,int constant_stack_parameter_count)25 static void InitializeArrayConstructorDescriptor(
26     Isolate* isolate, CodeStubDescriptor* descriptor,
27     int constant_stack_parameter_count) {
28   // register state
29   // eax -- number of arguments
30   // edi -- function
31   // ebx -- allocation site with elements kind
32   Address deopt_handler = Runtime::FunctionForId(
33       Runtime::kArrayConstructor)->entry;
34 
35   if (constant_stack_parameter_count == 0) {
36     descriptor->Initialize(deopt_handler, constant_stack_parameter_count,
37                            JS_FUNCTION_STUB_MODE);
38   } else {
39     descriptor->Initialize(eax, deopt_handler, constant_stack_parameter_count,
40                            JS_FUNCTION_STUB_MODE);
41   }
42 }
43 
44 
InitializeInternalArrayConstructorDescriptor(Isolate * isolate,CodeStubDescriptor * descriptor,int constant_stack_parameter_count)45 static void InitializeInternalArrayConstructorDescriptor(
46     Isolate* isolate, CodeStubDescriptor* descriptor,
47     int constant_stack_parameter_count) {
48   // register state
49   // eax -- number of arguments
50   // edi -- constructor function
51   Address deopt_handler = Runtime::FunctionForId(
52       Runtime::kInternalArrayConstructor)->entry;
53 
54   if (constant_stack_parameter_count == 0) {
55     descriptor->Initialize(deopt_handler, constant_stack_parameter_count,
56                            JS_FUNCTION_STUB_MODE);
57   } else {
58     descriptor->Initialize(eax, deopt_handler, constant_stack_parameter_count,
59                            JS_FUNCTION_STUB_MODE);
60   }
61 }
62 
63 
InitializeDescriptor(CodeStubDescriptor * descriptor)64 void ArrayNoArgumentConstructorStub::InitializeDescriptor(
65     CodeStubDescriptor* descriptor) {
66   InitializeArrayConstructorDescriptor(isolate(), descriptor, 0);
67 }
68 
69 
InitializeDescriptor(CodeStubDescriptor * descriptor)70 void ArraySingleArgumentConstructorStub::InitializeDescriptor(
71     CodeStubDescriptor* descriptor) {
72   InitializeArrayConstructorDescriptor(isolate(), descriptor, 1);
73 }
74 
75 
InitializeDescriptor(CodeStubDescriptor * descriptor)76 void ArrayNArgumentsConstructorStub::InitializeDescriptor(
77     CodeStubDescriptor* descriptor) {
78   InitializeArrayConstructorDescriptor(isolate(), descriptor, -1);
79 }
80 
81 
InitializeDescriptor(CodeStubDescriptor * descriptor)82 void InternalArrayNoArgumentConstructorStub::InitializeDescriptor(
83     CodeStubDescriptor* descriptor) {
84   InitializeInternalArrayConstructorDescriptor(isolate(), descriptor, 0);
85 }
86 
87 
InitializeDescriptor(CodeStubDescriptor * descriptor)88 void InternalArraySingleArgumentConstructorStub::InitializeDescriptor(
89     CodeStubDescriptor* descriptor) {
90   InitializeInternalArrayConstructorDescriptor(isolate(), descriptor, 1);
91 }
92 
93 
InitializeDescriptor(CodeStubDescriptor * descriptor)94 void InternalArrayNArgumentsConstructorStub::InitializeDescriptor(
95     CodeStubDescriptor* descriptor) {
96   InitializeInternalArrayConstructorDescriptor(isolate(), descriptor, -1);
97 }
98 
99 
100 #define __ ACCESS_MASM(masm)
101 
102 
GenerateLightweightMiss(MacroAssembler * masm,ExternalReference miss)103 void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm,
104                                                ExternalReference miss) {
105   // Update the static counter each time a new code stub is generated.
106   isolate()->counters()->code_stubs()->Increment();
107 
108   CallInterfaceDescriptor descriptor = GetCallInterfaceDescriptor();
109   int param_count = descriptor.GetRegisterParameterCount();
110   {
111     // Call the runtime system in a fresh internal frame.
112     FrameScope scope(masm, StackFrame::INTERNAL);
113     DCHECK(param_count == 0 ||
114            eax.is(descriptor.GetRegisterParameter(param_count - 1)));
115     // Push arguments
116     for (int i = 0; i < param_count; ++i) {
117       __ push(descriptor.GetRegisterParameter(i));
118     }
119     __ CallExternalReference(miss, param_count);
120   }
121 
122   __ ret(0);
123 }
124 
125 
Generate(MacroAssembler * masm)126 void StoreBufferOverflowStub::Generate(MacroAssembler* masm) {
127   // We don't allow a GC during a store buffer overflow so there is no need to
128   // store the registers in any particular way, but we do have to store and
129   // restore them.
130   __ pushad();
131   if (save_doubles()) {
132     __ sub(esp, Immediate(kDoubleSize * XMMRegister::kMaxNumRegisters));
133     for (int i = 0; i < XMMRegister::kMaxNumRegisters; i++) {
134       XMMRegister reg = XMMRegister::from_code(i);
135       __ movsd(Operand(esp, i * kDoubleSize), reg);
136     }
137   }
138   const int argument_count = 1;
139 
140   AllowExternalCallThatCantCauseGC scope(masm);
141   __ PrepareCallCFunction(argument_count, ecx);
142   __ mov(Operand(esp, 0 * kPointerSize),
143          Immediate(ExternalReference::isolate_address(isolate())));
144   __ CallCFunction(
145       ExternalReference::store_buffer_overflow_function(isolate()),
146       argument_count);
147   if (save_doubles()) {
148     for (int i = 0; i < XMMRegister::kMaxNumRegisters; i++) {
149       XMMRegister reg = XMMRegister::from_code(i);
150       __ movsd(reg, Operand(esp, i * kDoubleSize));
151     }
152     __ add(esp, Immediate(kDoubleSize * XMMRegister::kMaxNumRegisters));
153   }
154   __ popad();
155   __ ret(0);
156 }
157 
158 
159 class FloatingPointHelper : public AllStatic {
160  public:
161   enum ArgLocation {
162     ARGS_ON_STACK,
163     ARGS_IN_REGISTERS
164   };
165 
166   // Code pattern for loading a floating point value. Input value must
167   // be either a smi or a heap number object (fp value). Requirements:
168   // operand in register number. Returns operand as floating point number
169   // on FPU stack.
170   static void LoadFloatOperand(MacroAssembler* masm, Register number);
171 
172   // Test if operands are smi or number objects (fp). Requirements:
173   // operand_1 in eax, operand_2 in edx; falls through on float
174   // operands, jumps to the non_float label otherwise.
175   static void CheckFloatOperands(MacroAssembler* masm,
176                                  Label* non_float,
177                                  Register scratch);
178 
179   // Test if operands are numbers (smi or HeapNumber objects), and load
180   // them into xmm0 and xmm1 if they are.  Jump to label not_numbers if
181   // either operand is not a number.  Operands are in edx and eax.
182   // Leaves operands unchanged.
183   static void LoadSSE2Operands(MacroAssembler* masm, Label* not_numbers);
184 };
185 
186 
Generate(MacroAssembler * masm)187 void DoubleToIStub::Generate(MacroAssembler* masm) {
188   Register input_reg = this->source();
189   Register final_result_reg = this->destination();
190   DCHECK(is_truncating());
191 
192   Label check_negative, process_64_bits, done, done_no_stash;
193 
194   int double_offset = offset();
195 
196   // Account for return address and saved regs if input is esp.
197   if (input_reg.is(esp)) double_offset += 3 * kPointerSize;
198 
199   MemOperand mantissa_operand(MemOperand(input_reg, double_offset));
200   MemOperand exponent_operand(MemOperand(input_reg,
201                                          double_offset + kDoubleSize / 2));
202 
203   Register scratch1;
204   {
205     Register scratch_candidates[3] = { ebx, edx, edi };
206     for (int i = 0; i < 3; i++) {
207       scratch1 = scratch_candidates[i];
208       if (!final_result_reg.is(scratch1) && !input_reg.is(scratch1)) break;
209     }
210   }
211   // Since we must use ecx for shifts below, use some other register (eax)
212   // to calculate the result if ecx is the requested return register.
213   Register result_reg = final_result_reg.is(ecx) ? eax : final_result_reg;
214   // Save ecx if it isn't the return register and therefore volatile, or if it
215   // is the return register, then save the temp register we use in its stead for
216   // the result.
217   Register save_reg = final_result_reg.is(ecx) ? eax : ecx;
218   __ push(scratch1);
219   __ push(save_reg);
220 
221   bool stash_exponent_copy = !input_reg.is(esp);
222   __ mov(scratch1, mantissa_operand);
223   if (CpuFeatures::IsSupported(SSE3)) {
224     CpuFeatureScope scope(masm, SSE3);
225     // Load x87 register with heap number.
226     __ fld_d(mantissa_operand);
227   }
228   __ mov(ecx, exponent_operand);
229   if (stash_exponent_copy) __ push(ecx);
230 
231   __ and_(ecx, HeapNumber::kExponentMask);
232   __ shr(ecx, HeapNumber::kExponentShift);
233   __ lea(result_reg, MemOperand(ecx, -HeapNumber::kExponentBias));
234   __ cmp(result_reg, Immediate(HeapNumber::kMantissaBits));
235   __ j(below, &process_64_bits);
236 
237   // Result is entirely in lower 32-bits of mantissa
238   int delta = HeapNumber::kExponentBias + Double::kPhysicalSignificandSize;
239   if (CpuFeatures::IsSupported(SSE3)) {
240     __ fstp(0);
241   }
242   __ sub(ecx, Immediate(delta));
243   __ xor_(result_reg, result_reg);
244   __ cmp(ecx, Immediate(31));
245   __ j(above, &done);
246   __ shl_cl(scratch1);
247   __ jmp(&check_negative);
248 
249   __ bind(&process_64_bits);
250   if (CpuFeatures::IsSupported(SSE3)) {
251     CpuFeatureScope scope(masm, SSE3);
252     if (stash_exponent_copy) {
253       // Already a copy of the exponent on the stack, overwrite it.
254       STATIC_ASSERT(kDoubleSize == 2 * kPointerSize);
255       __ sub(esp, Immediate(kDoubleSize / 2));
256     } else {
257       // Reserve space for 64 bit answer.
258       __ sub(esp, Immediate(kDoubleSize));  // Nolint.
259     }
260     // Do conversion, which cannot fail because we checked the exponent.
261     __ fisttp_d(Operand(esp, 0));
262     __ mov(result_reg, Operand(esp, 0));  // Load low word of answer as result
263     __ add(esp, Immediate(kDoubleSize));
264     __ jmp(&done_no_stash);
265   } else {
266     // Result must be extracted from shifted 32-bit mantissa
267     __ sub(ecx, Immediate(delta));
268     __ neg(ecx);
269     if (stash_exponent_copy) {
270       __ mov(result_reg, MemOperand(esp, 0));
271     } else {
272       __ mov(result_reg, exponent_operand);
273     }
274     __ and_(result_reg,
275             Immediate(static_cast<uint32_t>(Double::kSignificandMask >> 32)));
276     __ add(result_reg,
277            Immediate(static_cast<uint32_t>(Double::kHiddenBit >> 32)));
278     __ shrd(result_reg, scratch1);
279     __ shr_cl(result_reg);
280     __ test(ecx, Immediate(32));
281     __ cmov(not_equal, scratch1, result_reg);
282   }
283 
284   // If the double was negative, negate the integer result.
285   __ bind(&check_negative);
286   __ mov(result_reg, scratch1);
287   __ neg(result_reg);
288   if (stash_exponent_copy) {
289     __ cmp(MemOperand(esp, 0), Immediate(0));
290   } else {
291     __ cmp(exponent_operand, Immediate(0));
292   }
293     __ cmov(greater, result_reg, scratch1);
294 
295   // Restore registers
296   __ bind(&done);
297   if (stash_exponent_copy) {
298     __ add(esp, Immediate(kDoubleSize / 2));
299   }
300   __ bind(&done_no_stash);
301   if (!final_result_reg.is(result_reg)) {
302     DCHECK(final_result_reg.is(ecx));
303     __ mov(final_result_reg, result_reg);
304   }
305   __ pop(save_reg);
306   __ pop(scratch1);
307   __ ret(0);
308 }
309 
310 
LoadFloatOperand(MacroAssembler * masm,Register number)311 void FloatingPointHelper::LoadFloatOperand(MacroAssembler* masm,
312                                            Register number) {
313   Label load_smi, done;
314 
315   __ JumpIfSmi(number, &load_smi, Label::kNear);
316   __ fld_d(FieldOperand(number, HeapNumber::kValueOffset));
317   __ jmp(&done, Label::kNear);
318 
319   __ bind(&load_smi);
320   __ SmiUntag(number);
321   __ push(number);
322   __ fild_s(Operand(esp, 0));
323   __ pop(number);
324 
325   __ bind(&done);
326 }
327 
328 
LoadSSE2Operands(MacroAssembler * masm,Label * not_numbers)329 void FloatingPointHelper::LoadSSE2Operands(MacroAssembler* masm,
330                                            Label* not_numbers) {
331   Label load_smi_edx, load_eax, load_smi_eax, load_float_eax, done;
332   // Load operand in edx into xmm0, or branch to not_numbers.
333   __ JumpIfSmi(edx, &load_smi_edx, Label::kNear);
334   Factory* factory = masm->isolate()->factory();
335   __ cmp(FieldOperand(edx, HeapObject::kMapOffset), factory->heap_number_map());
336   __ j(not_equal, not_numbers);  // Argument in edx is not a number.
337   __ movsd(xmm0, FieldOperand(edx, HeapNumber::kValueOffset));
338   __ bind(&load_eax);
339   // Load operand in eax into xmm1, or branch to not_numbers.
340   __ JumpIfSmi(eax, &load_smi_eax, Label::kNear);
341   __ cmp(FieldOperand(eax, HeapObject::kMapOffset), factory->heap_number_map());
342   __ j(equal, &load_float_eax, Label::kNear);
343   __ jmp(not_numbers);  // Argument in eax is not a number.
344   __ bind(&load_smi_edx);
345   __ SmiUntag(edx);  // Untag smi before converting to float.
346   __ Cvtsi2sd(xmm0, edx);
347   __ SmiTag(edx);  // Retag smi for heap number overwriting test.
348   __ jmp(&load_eax);
349   __ bind(&load_smi_eax);
350   __ SmiUntag(eax);  // Untag smi before converting to float.
351   __ Cvtsi2sd(xmm1, eax);
352   __ SmiTag(eax);  // Retag smi for heap number overwriting test.
353   __ jmp(&done, Label::kNear);
354   __ bind(&load_float_eax);
355   __ movsd(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
356   __ bind(&done);
357 }
358 
359 
CheckFloatOperands(MacroAssembler * masm,Label * non_float,Register scratch)360 void FloatingPointHelper::CheckFloatOperands(MacroAssembler* masm,
361                                              Label* non_float,
362                                              Register scratch) {
363   Label test_other, done;
364   // Test if both operands are floats or smi -> scratch=k_is_float;
365   // Otherwise scratch = k_not_float.
366   __ JumpIfSmi(edx, &test_other, Label::kNear);
367   __ mov(scratch, FieldOperand(edx, HeapObject::kMapOffset));
368   Factory* factory = masm->isolate()->factory();
369   __ cmp(scratch, factory->heap_number_map());
370   __ j(not_equal, non_float);  // argument in edx is not a number -> NaN
371 
372   __ bind(&test_other);
373   __ JumpIfSmi(eax, &done, Label::kNear);
374   __ mov(scratch, FieldOperand(eax, HeapObject::kMapOffset));
375   __ cmp(scratch, factory->heap_number_map());
376   __ j(not_equal, non_float);  // argument in eax is not a number -> NaN
377 
378   // Fall-through: Both operands are numbers.
379   __ bind(&done);
380 }
381 
382 
Generate(MacroAssembler * masm)383 void MathPowStub::Generate(MacroAssembler* masm) {
384   Factory* factory = isolate()->factory();
385   const Register exponent = MathPowTaggedDescriptor::exponent();
386   DCHECK(exponent.is(eax));
387   const Register base = edx;
388   const Register scratch = ecx;
389   const XMMRegister double_result = xmm3;
390   const XMMRegister double_base = xmm2;
391   const XMMRegister double_exponent = xmm1;
392   const XMMRegister double_scratch = xmm4;
393 
394   Label call_runtime, done, exponent_not_smi, int_exponent;
395 
396   // Save 1 in double_result - we need this several times later on.
397   __ mov(scratch, Immediate(1));
398   __ Cvtsi2sd(double_result, scratch);
399 
400   if (exponent_type() == ON_STACK) {
401     Label base_is_smi, unpack_exponent;
402     // The exponent and base are supplied as arguments on the stack.
403     // This can only happen if the stub is called from non-optimized code.
404     // Load input parameters from stack.
405     __ mov(base, Operand(esp, 2 * kPointerSize));
406     __ mov(exponent, Operand(esp, 1 * kPointerSize));
407 
408     __ JumpIfSmi(base, &base_is_smi, Label::kNear);
409     __ cmp(FieldOperand(base, HeapObject::kMapOffset),
410            factory->heap_number_map());
411     __ j(not_equal, &call_runtime);
412 
413     __ movsd(double_base, FieldOperand(base, HeapNumber::kValueOffset));
414     __ jmp(&unpack_exponent, Label::kNear);
415 
416     __ bind(&base_is_smi);
417     __ SmiUntag(base);
418     __ Cvtsi2sd(double_base, base);
419 
420     __ bind(&unpack_exponent);
421     __ JumpIfNotSmi(exponent, &exponent_not_smi, Label::kNear);
422     __ SmiUntag(exponent);
423     __ jmp(&int_exponent);
424 
425     __ bind(&exponent_not_smi);
426     __ cmp(FieldOperand(exponent, HeapObject::kMapOffset),
427            factory->heap_number_map());
428     __ j(not_equal, &call_runtime);
429     __ movsd(double_exponent,
430               FieldOperand(exponent, HeapNumber::kValueOffset));
431   } else if (exponent_type() == TAGGED) {
432     __ JumpIfNotSmi(exponent, &exponent_not_smi, Label::kNear);
433     __ SmiUntag(exponent);
434     __ jmp(&int_exponent);
435 
436     __ bind(&exponent_not_smi);
437     __ movsd(double_exponent,
438               FieldOperand(exponent, HeapNumber::kValueOffset));
439   }
440 
441   if (exponent_type() != INTEGER) {
442     Label fast_power, try_arithmetic_simplification;
443     __ DoubleToI(exponent, double_exponent, double_scratch,
444                  TREAT_MINUS_ZERO_AS_ZERO, &try_arithmetic_simplification,
445                  &try_arithmetic_simplification,
446                  &try_arithmetic_simplification);
447     __ jmp(&int_exponent);
448 
449     __ bind(&try_arithmetic_simplification);
450     // Skip to runtime if possibly NaN (indicated by the indefinite integer).
451     __ cvttsd2si(exponent, Operand(double_exponent));
452     __ cmp(exponent, Immediate(0x1));
453     __ j(overflow, &call_runtime);
454 
455     if (exponent_type() == ON_STACK) {
456       // Detect square root case.  Crankshaft detects constant +/-0.5 at
457       // compile time and uses DoMathPowHalf instead.  We then skip this check
458       // for non-constant cases of +/-0.5 as these hardly occur.
459       Label continue_sqrt, continue_rsqrt, not_plus_half;
460       // Test for 0.5.
461       // Load double_scratch with 0.5.
462       __ mov(scratch, Immediate(0x3F000000u));
463       __ movd(double_scratch, scratch);
464       __ cvtss2sd(double_scratch, double_scratch);
465       // Already ruled out NaNs for exponent.
466       __ ucomisd(double_scratch, double_exponent);
467       __ j(not_equal, &not_plus_half, Label::kNear);
468 
469       // Calculates square root of base.  Check for the special case of
470       // Math.pow(-Infinity, 0.5) == Infinity (ECMA spec, 15.8.2.13).
471       // According to IEEE-754, single-precision -Infinity has the highest
472       // 9 bits set and the lowest 23 bits cleared.
473       __ mov(scratch, 0xFF800000u);
474       __ movd(double_scratch, scratch);
475       __ cvtss2sd(double_scratch, double_scratch);
476       __ ucomisd(double_base, double_scratch);
477       // Comparing -Infinity with NaN results in "unordered", which sets the
478       // zero flag as if both were equal.  However, it also sets the carry flag.
479       __ j(not_equal, &continue_sqrt, Label::kNear);
480       __ j(carry, &continue_sqrt, Label::kNear);
481 
482       // Set result to Infinity in the special case.
483       __ xorps(double_result, double_result);
484       __ subsd(double_result, double_scratch);
485       __ jmp(&done);
486 
487       __ bind(&continue_sqrt);
488       // sqrtsd returns -0 when input is -0.  ECMA spec requires +0.
489       __ xorps(double_scratch, double_scratch);
490       __ addsd(double_scratch, double_base);  // Convert -0 to +0.
491       __ sqrtsd(double_result, double_scratch);
492       __ jmp(&done);
493 
494       // Test for -0.5.
495       __ bind(&not_plus_half);
496       // Load double_exponent with -0.5 by substracting 1.
497       __ subsd(double_scratch, double_result);
498       // Already ruled out NaNs for exponent.
499       __ ucomisd(double_scratch, double_exponent);
500       __ j(not_equal, &fast_power, Label::kNear);
501 
502       // Calculates reciprocal of square root of base.  Check for the special
503       // case of Math.pow(-Infinity, -0.5) == 0 (ECMA spec, 15.8.2.13).
504       // According to IEEE-754, single-precision -Infinity has the highest
505       // 9 bits set and the lowest 23 bits cleared.
506       __ mov(scratch, 0xFF800000u);
507       __ movd(double_scratch, scratch);
508       __ cvtss2sd(double_scratch, double_scratch);
509       __ ucomisd(double_base, double_scratch);
510       // Comparing -Infinity with NaN results in "unordered", which sets the
511       // zero flag as if both were equal.  However, it also sets the carry flag.
512       __ j(not_equal, &continue_rsqrt, Label::kNear);
513       __ j(carry, &continue_rsqrt, Label::kNear);
514 
515       // Set result to 0 in the special case.
516       __ xorps(double_result, double_result);
517       __ jmp(&done);
518 
519       __ bind(&continue_rsqrt);
520       // sqrtsd returns -0 when input is -0.  ECMA spec requires +0.
521       __ xorps(double_exponent, double_exponent);
522       __ addsd(double_exponent, double_base);  // Convert -0 to +0.
523       __ sqrtsd(double_exponent, double_exponent);
524       __ divsd(double_result, double_exponent);
525       __ jmp(&done);
526     }
527 
528     // Using FPU instructions to calculate power.
529     Label fast_power_failed;
530     __ bind(&fast_power);
531     __ fnclex();  // Clear flags to catch exceptions later.
532     // Transfer (B)ase and (E)xponent onto the FPU register stack.
533     __ sub(esp, Immediate(kDoubleSize));
534     __ movsd(Operand(esp, 0), double_exponent);
535     __ fld_d(Operand(esp, 0));  // E
536     __ movsd(Operand(esp, 0), double_base);
537     __ fld_d(Operand(esp, 0));  // B, E
538 
539     // Exponent is in st(1) and base is in st(0)
540     // B ^ E = (2^(E * log2(B)) - 1) + 1 = (2^X - 1) + 1 for X = E * log2(B)
541     // FYL2X calculates st(1) * log2(st(0))
542     __ fyl2x();    // X
543     __ fld(0);     // X, X
544     __ frndint();  // rnd(X), X
545     __ fsub(1);    // rnd(X), X-rnd(X)
546     __ fxch(1);    // X - rnd(X), rnd(X)
547     // F2XM1 calculates 2^st(0) - 1 for -1 < st(0) < 1
548     __ f2xm1();    // 2^(X-rnd(X)) - 1, rnd(X)
549     __ fld1();     // 1, 2^(X-rnd(X)) - 1, rnd(X)
550     __ faddp(1);   // 2^(X-rnd(X)), rnd(X)
551     // FSCALE calculates st(0) * 2^st(1)
552     __ fscale();   // 2^X, rnd(X)
553     __ fstp(1);    // 2^X
554     // Bail out to runtime in case of exceptions in the status word.
555     __ fnstsw_ax();
556     __ test_b(eax, 0x5F);  // We check for all but precision exception.
557     __ j(not_zero, &fast_power_failed, Label::kNear);
558     __ fstp_d(Operand(esp, 0));
559     __ movsd(double_result, Operand(esp, 0));
560     __ add(esp, Immediate(kDoubleSize));
561     __ jmp(&done);
562 
563     __ bind(&fast_power_failed);
564     __ fninit();
565     __ add(esp, Immediate(kDoubleSize));
566     __ jmp(&call_runtime);
567   }
568 
569   // Calculate power with integer exponent.
570   __ bind(&int_exponent);
571   const XMMRegister double_scratch2 = double_exponent;
572   __ mov(scratch, exponent);  // Back up exponent.
573   __ movsd(double_scratch, double_base);  // Back up base.
574   __ movsd(double_scratch2, double_result);  // Load double_exponent with 1.
575 
576   // Get absolute value of exponent.
577   Label no_neg, while_true, while_false;
578   __ test(scratch, scratch);
579   __ j(positive, &no_neg, Label::kNear);
580   __ neg(scratch);
581   __ bind(&no_neg);
582 
583   __ j(zero, &while_false, Label::kNear);
584   __ shr(scratch, 1);
585   // Above condition means CF==0 && ZF==0.  This means that the
586   // bit that has been shifted out is 0 and the result is not 0.
587   __ j(above, &while_true, Label::kNear);
588   __ movsd(double_result, double_scratch);
589   __ j(zero, &while_false, Label::kNear);
590 
591   __ bind(&while_true);
592   __ shr(scratch, 1);
593   __ mulsd(double_scratch, double_scratch);
594   __ j(above, &while_true, Label::kNear);
595   __ mulsd(double_result, double_scratch);
596   __ j(not_zero, &while_true);
597 
598   __ bind(&while_false);
599   // scratch has the original value of the exponent - if the exponent is
600   // negative, return 1/result.
601   __ test(exponent, exponent);
602   __ j(positive, &done);
603   __ divsd(double_scratch2, double_result);
604   __ movsd(double_result, double_scratch2);
605   // Test whether result is zero.  Bail out to check for subnormal result.
606   // Due to subnormals, x^-y == (1/x)^y does not hold in all cases.
607   __ xorps(double_scratch2, double_scratch2);
608   __ ucomisd(double_scratch2, double_result);  // Result cannot be NaN.
609   // double_exponent aliased as double_scratch2 has already been overwritten
610   // and may not have contained the exponent value in the first place when the
611   // exponent is a smi.  We reset it with exponent value before bailing out.
612   __ j(not_equal, &done);
613   __ Cvtsi2sd(double_exponent, exponent);
614 
615   // Returning or bailing out.
616   Counters* counters = isolate()->counters();
617   if (exponent_type() == ON_STACK) {
618     // The arguments are still on the stack.
619     __ bind(&call_runtime);
620     __ TailCallRuntime(Runtime::kMathPowRT);
621 
622     // The stub is called from non-optimized code, which expects the result
623     // as heap number in exponent.
624     __ bind(&done);
625     __ AllocateHeapNumber(eax, scratch, base, &call_runtime);
626     __ movsd(FieldOperand(eax, HeapNumber::kValueOffset), double_result);
627     __ IncrementCounter(counters->math_pow(), 1);
628     __ ret(2 * kPointerSize);
629   } else {
630     __ bind(&call_runtime);
631     {
632       AllowExternalCallThatCantCauseGC scope(masm);
633       __ PrepareCallCFunction(4, scratch);
634       __ movsd(Operand(esp, 0 * kDoubleSize), double_base);
635       __ movsd(Operand(esp, 1 * kDoubleSize), double_exponent);
636       __ CallCFunction(
637           ExternalReference::power_double_double_function(isolate()), 4);
638     }
639     // Return value is in st(0) on ia32.
640     // Store it into the (fixed) result register.
641     __ sub(esp, Immediate(kDoubleSize));
642     __ fstp_d(Operand(esp, 0));
643     __ movsd(double_result, Operand(esp, 0));
644     __ add(esp, Immediate(kDoubleSize));
645 
646     __ bind(&done);
647     __ IncrementCounter(counters->math_pow(), 1);
648     __ ret(0);
649   }
650 }
651 
652 
Generate(MacroAssembler * masm)653 void FunctionPrototypeStub::Generate(MacroAssembler* masm) {
654   Label miss;
655   Register receiver = LoadDescriptor::ReceiverRegister();
656   // With careful management, we won't have to save slot and vector on
657   // the stack. Simply handle the possibly missing case first.
658   // TODO(mvstanton): this code can be more efficient.
659   __ cmp(FieldOperand(receiver, JSFunction::kPrototypeOrInitialMapOffset),
660          Immediate(isolate()->factory()->the_hole_value()));
661   __ j(equal, &miss);
662   __ TryGetFunctionPrototype(receiver, eax, ebx, &miss);
663   __ ret(0);
664 
665   __ bind(&miss);
666   PropertyAccessCompiler::TailCallBuiltin(
667       masm, PropertyAccessCompiler::MissBuiltin(Code::LOAD_IC));
668 }
669 
670 
Generate(MacroAssembler * masm)671 void LoadIndexedInterceptorStub::Generate(MacroAssembler* masm) {
672   // Return address is on the stack.
673   Label slow;
674 
675   Register receiver = LoadDescriptor::ReceiverRegister();
676   Register key = LoadDescriptor::NameRegister();
677   Register scratch = eax;
678   DCHECK(!scratch.is(receiver) && !scratch.is(key));
679 
680   // Check that the key is an array index, that is Uint32.
681   __ test(key, Immediate(kSmiTagMask | kSmiSignMask));
682   __ j(not_zero, &slow);
683 
684   // Everything is fine, call runtime.
685   __ pop(scratch);
686   __ push(receiver);  // receiver
687   __ push(key);       // key
688   __ push(scratch);   // return address
689 
690   // Perform tail call to the entry.
691   __ TailCallRuntime(Runtime::kLoadElementWithInterceptor);
692 
693   __ bind(&slow);
694   PropertyAccessCompiler::TailCallBuiltin(
695       masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC));
696 }
697 
698 
Generate(MacroAssembler * masm)699 void LoadIndexedStringStub::Generate(MacroAssembler* masm) {
700   // Return address is on the stack.
701   Label miss;
702 
703   Register receiver = LoadDescriptor::ReceiverRegister();
704   Register index = LoadDescriptor::NameRegister();
705   Register scratch = edi;
706   DCHECK(!scratch.is(receiver) && !scratch.is(index));
707   Register result = eax;
708   DCHECK(!result.is(scratch));
709   DCHECK(!scratch.is(LoadWithVectorDescriptor::VectorRegister()) &&
710          result.is(LoadDescriptor::SlotRegister()));
711 
712   // StringCharAtGenerator doesn't use the result register until it's passed
713   // the different miss possibilities. If it did, we would have a conflict
714   // when FLAG_vector_ics is true.
715   StringCharAtGenerator char_at_generator(receiver, index, scratch, result,
716                                           &miss,  // When not a string.
717                                           &miss,  // When not a number.
718                                           &miss,  // When index out of range.
719                                           STRING_INDEX_IS_ARRAY_INDEX,
720                                           RECEIVER_IS_STRING);
721   char_at_generator.GenerateFast(masm);
722   __ ret(0);
723 
724   StubRuntimeCallHelper call_helper;
725   char_at_generator.GenerateSlow(masm, PART_OF_IC_HANDLER, call_helper);
726 
727   __ bind(&miss);
728   PropertyAccessCompiler::TailCallBuiltin(
729       masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC));
730 }
731 
732 
GenerateReadElement(MacroAssembler * masm)733 void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
734   // The key is in edx and the parameter count is in eax.
735   DCHECK(edx.is(ArgumentsAccessReadDescriptor::index()));
736   DCHECK(eax.is(ArgumentsAccessReadDescriptor::parameter_count()));
737 
738   // The displacement is used for skipping the frame pointer on the
739   // stack. It is the offset of the last parameter (if any) relative
740   // to the frame pointer.
741   static const int kDisplacement = 1 * kPointerSize;
742 
743   // Check that the key is a smi.
744   Label slow;
745   __ JumpIfNotSmi(edx, &slow, Label::kNear);
746 
747   // Check if the calling frame is an arguments adaptor frame.
748   Label adaptor;
749   __ mov(ebx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
750   __ mov(ecx, Operand(ebx, StandardFrameConstants::kContextOffset));
751   __ cmp(ecx, Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
752   __ j(equal, &adaptor, Label::kNear);
753 
754   // Check index against formal parameters count limit passed in
755   // through register eax. Use unsigned comparison to get negative
756   // check for free.
757   __ cmp(edx, eax);
758   __ j(above_equal, &slow, Label::kNear);
759 
760   // Read the argument from the stack and return it.
761   STATIC_ASSERT(kSmiTagSize == 1);
762   STATIC_ASSERT(kSmiTag == 0);  // Shifting code depends on these.
763   __ lea(ebx, Operand(ebp, eax, times_2, 0));
764   __ neg(edx);
765   __ mov(eax, Operand(ebx, edx, times_2, kDisplacement));
766   __ ret(0);
767 
768   // Arguments adaptor case: Check index against actual arguments
769   // limit found in the arguments adaptor frame. Use unsigned
770   // comparison to get negative check for free.
771   __ bind(&adaptor);
772   __ mov(ecx, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset));
773   __ cmp(edx, ecx);
774   __ j(above_equal, &slow, Label::kNear);
775 
776   // Read the argument from the stack and return it.
777   STATIC_ASSERT(kSmiTagSize == 1);
778   STATIC_ASSERT(kSmiTag == 0);  // Shifting code depends on these.
779   __ lea(ebx, Operand(ebx, ecx, times_2, 0));
780   __ neg(edx);
781   __ mov(eax, Operand(ebx, edx, times_2, kDisplacement));
782   __ ret(0);
783 
784   // Slow-case: Handle non-smi or out-of-bounds access to arguments
785   // by calling the runtime system.
786   __ bind(&slow);
787   __ pop(ebx);  // Return address.
788   __ push(edx);
789   __ push(ebx);
790   __ TailCallRuntime(Runtime::kArguments);
791 }
792 
793 
GenerateNewSloppySlow(MacroAssembler * masm)794 void ArgumentsAccessStub::GenerateNewSloppySlow(MacroAssembler* masm) {
795   // ecx : number of parameters (tagged)
796   // edx : parameters pointer
797   // edi : function
798   // esp[0] : return address
799 
800   DCHECK(edi.is(ArgumentsAccessNewDescriptor::function()));
801   DCHECK(ecx.is(ArgumentsAccessNewDescriptor::parameter_count()));
802   DCHECK(edx.is(ArgumentsAccessNewDescriptor::parameter_pointer()));
803 
804   // Check if the calling frame is an arguments adaptor frame.
805   Label runtime;
806   __ mov(ebx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
807   __ mov(eax, Operand(ebx, StandardFrameConstants::kContextOffset));
808   __ cmp(eax, Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
809   __ j(not_equal, &runtime, Label::kNear);
810 
811   // Patch the arguments.length and the parameters pointer.
812   __ mov(ecx, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset));
813   __ lea(edx,
814          Operand(ebx, ecx, times_2, StandardFrameConstants::kCallerSPOffset));
815 
816   __ bind(&runtime);
817   __ pop(eax);   // Pop return address.
818   __ push(edi);  // Push function.
819   __ push(edx);  // Push parameters pointer.
820   __ push(ecx);  // Push parameter count.
821   __ push(eax);  // Push return address.
822   __ TailCallRuntime(Runtime::kNewSloppyArguments);
823 }
824 
825 
GenerateNewSloppyFast(MacroAssembler * masm)826 void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) {
827   // ecx : number of parameters (tagged)
828   // edx : parameters pointer
829   // edi : function
830   // esp[0] : return address
831 
832   DCHECK(edi.is(ArgumentsAccessNewDescriptor::function()));
833   DCHECK(ecx.is(ArgumentsAccessNewDescriptor::parameter_count()));
834   DCHECK(edx.is(ArgumentsAccessNewDescriptor::parameter_pointer()));
835 
836   // Check if the calling frame is an arguments adaptor frame.
837   Label adaptor_frame, try_allocate, runtime;
838   __ mov(ebx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
839   __ mov(eax, Operand(ebx, StandardFrameConstants::kContextOffset));
840   __ cmp(eax, Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
841   __ j(equal, &adaptor_frame, Label::kNear);
842 
843   // No adaptor, parameter count = argument count.
844   __ mov(ebx, ecx);
845   __ push(ecx);
846   __ jmp(&try_allocate, Label::kNear);
847 
848   // We have an adaptor frame. Patch the parameters pointer.
849   __ bind(&adaptor_frame);
850   __ mov(ebx, ecx);
851   __ push(ecx);
852   __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
853   __ mov(ecx, Operand(edx, ArgumentsAdaptorFrameConstants::kLengthOffset));
854   __ lea(edx, Operand(edx, ecx, times_2,
855                       StandardFrameConstants::kCallerSPOffset));
856 
857   // ebx = parameter count (tagged)
858   // ecx = argument count (smi-tagged)
859   // Compute the mapped parameter count = min(ebx, ecx) in ebx.
860   __ cmp(ebx, ecx);
861   __ j(less_equal, &try_allocate, Label::kNear);
862   __ mov(ebx, ecx);
863 
864   // Save mapped parameter count and function.
865   __ bind(&try_allocate);
866   __ push(edi);
867   __ push(ebx);
868 
869   // Compute the sizes of backing store, parameter map, and arguments object.
870   // 1. Parameter map, has 2 extra words containing context and backing store.
871   const int kParameterMapHeaderSize =
872       FixedArray::kHeaderSize + 2 * kPointerSize;
873   Label no_parameter_map;
874   __ test(ebx, ebx);
875   __ j(zero, &no_parameter_map, Label::kNear);
876   __ lea(ebx, Operand(ebx, times_2, kParameterMapHeaderSize));
877   __ bind(&no_parameter_map);
878 
879   // 2. Backing store.
880   __ lea(ebx, Operand(ebx, ecx, times_2, FixedArray::kHeaderSize));
881 
882   // 3. Arguments object.
883   __ add(ebx, Immediate(Heap::kSloppyArgumentsObjectSize));
884 
885   // Do the allocation of all three objects in one go.
886   __ Allocate(ebx, eax, edi, no_reg, &runtime, TAG_OBJECT);
887 
888   // eax = address of new object(s) (tagged)
889   // ecx = argument count (smi-tagged)
890   // esp[0] = mapped parameter count (tagged)
891   // esp[4] = function
892   // esp[8] = parameter count (tagged)
893   // Get the arguments map from the current native context into edi.
894   Label has_mapped_parameters, instantiate;
895   __ mov(edi, NativeContextOperand());
896   __ mov(ebx, Operand(esp, 0 * kPointerSize));
897   __ test(ebx, ebx);
898   __ j(not_zero, &has_mapped_parameters, Label::kNear);
899   __ mov(
900       edi,
901       Operand(edi, Context::SlotOffset(Context::SLOPPY_ARGUMENTS_MAP_INDEX)));
902   __ jmp(&instantiate, Label::kNear);
903 
904   __ bind(&has_mapped_parameters);
905   __ mov(edi, Operand(edi, Context::SlotOffset(
906                                Context::FAST_ALIASED_ARGUMENTS_MAP_INDEX)));
907   __ bind(&instantiate);
908 
909   // eax = address of new object (tagged)
910   // ebx = mapped parameter count (tagged)
911   // ecx = argument count (smi-tagged)
912   // edi = address of arguments map (tagged)
913   // esp[0] = mapped parameter count (tagged)
914   // esp[4] = function
915   // esp[8] = parameter count (tagged)
916   // Copy the JS object part.
917   __ mov(FieldOperand(eax, JSObject::kMapOffset), edi);
918   __ mov(FieldOperand(eax, JSObject::kPropertiesOffset),
919          masm->isolate()->factory()->empty_fixed_array());
920   __ mov(FieldOperand(eax, JSObject::kElementsOffset),
921          masm->isolate()->factory()->empty_fixed_array());
922 
923   // Set up the callee in-object property.
924   STATIC_ASSERT(Heap::kArgumentsCalleeIndex == 1);
925   __ mov(edi, Operand(esp, 1 * kPointerSize));
926   __ AssertNotSmi(edi);
927   __ mov(FieldOperand(eax, JSObject::kHeaderSize +
928                                Heap::kArgumentsCalleeIndex * kPointerSize),
929          edi);
930 
931   // Use the length (smi tagged) and set that as an in-object property too.
932   __ AssertSmi(ecx);
933   STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0);
934   __ mov(FieldOperand(eax, JSObject::kHeaderSize +
935                       Heap::kArgumentsLengthIndex * kPointerSize),
936          ecx);
937 
938   // Set up the elements pointer in the allocated arguments object.
939   // If we allocated a parameter map, edi will point there, otherwise to the
940   // backing store.
941   __ lea(edi, Operand(eax, Heap::kSloppyArgumentsObjectSize));
942   __ mov(FieldOperand(eax, JSObject::kElementsOffset), edi);
943 
944   // eax = address of new object (tagged)
945   // ebx = mapped parameter count (tagged)
946   // ecx = argument count (tagged)
947   // edx = address of receiver argument
948   // edi = address of parameter map or backing store (tagged)
949   // esp[0] = mapped parameter count (tagged)
950   // esp[4] = function
951   // esp[8] = parameter count (tagged)
952   // Free two registers.
953   __ push(edx);
954   __ push(eax);
955 
956   // Initialize parameter map. If there are no mapped arguments, we're done.
957   Label skip_parameter_map;
958   __ test(ebx, ebx);
959   __ j(zero, &skip_parameter_map);
960 
961   __ mov(FieldOperand(edi, FixedArray::kMapOffset),
962          Immediate(isolate()->factory()->sloppy_arguments_elements_map()));
963   __ lea(eax, Operand(ebx, reinterpret_cast<intptr_t>(Smi::FromInt(2))));
964   __ mov(FieldOperand(edi, FixedArray::kLengthOffset), eax);
965   __ mov(FieldOperand(edi, FixedArray::kHeaderSize + 0 * kPointerSize), esi);
966   __ lea(eax, Operand(edi, ebx, times_2, kParameterMapHeaderSize));
967   __ mov(FieldOperand(edi, FixedArray::kHeaderSize + 1 * kPointerSize), eax);
968 
969   // Copy the parameter slots and the holes in the arguments.
970   // We need to fill in mapped_parameter_count slots. They index the context,
971   // where parameters are stored in reverse order, at
972   //   MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1
973   // The mapped parameter thus need to get indices
974   //   MIN_CONTEXT_SLOTS+parameter_count-1 ..
975   //       MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count
976   // We loop from right to left.
977   Label parameters_loop, parameters_test;
978   __ push(ecx);
979   __ mov(eax, Operand(esp, 3 * kPointerSize));
980   __ mov(ebx, Immediate(Smi::FromInt(Context::MIN_CONTEXT_SLOTS)));
981   __ add(ebx, Operand(esp, 5 * kPointerSize));
982   __ sub(ebx, eax);
983   __ mov(ecx, isolate()->factory()->the_hole_value());
984   __ mov(edx, edi);
985   __ lea(edi, Operand(edi, eax, times_2, kParameterMapHeaderSize));
986   // eax = loop variable (tagged)
987   // ebx = mapping index (tagged)
988   // ecx = the hole value
989   // edx = address of parameter map (tagged)
990   // edi = address of backing store (tagged)
991   // esp[0] = argument count (tagged)
992   // esp[4] = address of new object (tagged)
993   // esp[8] = address of receiver argument
994   // esp[12] = mapped parameter count (tagged)
995   // esp[16] = function
996   // esp[20] = parameter count (tagged)
997   __ jmp(&parameters_test, Label::kNear);
998 
999   __ bind(&parameters_loop);
1000   __ sub(eax, Immediate(Smi::FromInt(1)));
1001   __ mov(FieldOperand(edx, eax, times_2, kParameterMapHeaderSize), ebx);
1002   __ mov(FieldOperand(edi, eax, times_2, FixedArray::kHeaderSize), ecx);
1003   __ add(ebx, Immediate(Smi::FromInt(1)));
1004   __ bind(&parameters_test);
1005   __ test(eax, eax);
1006   __ j(not_zero, &parameters_loop, Label::kNear);
1007   __ pop(ecx);
1008 
1009   __ bind(&skip_parameter_map);
1010 
1011   // ecx = argument count (tagged)
1012   // edi = address of backing store (tagged)
1013   // esp[0] = address of new object (tagged)
1014   // esp[4] = address of receiver argument
1015   // esp[8] = mapped parameter count (tagged)
1016   // esp[12] = function
1017   // esp[16] = parameter count (tagged)
1018   // Copy arguments header and remaining slots (if there are any).
1019   __ mov(FieldOperand(edi, FixedArray::kMapOffset),
1020          Immediate(isolate()->factory()->fixed_array_map()));
1021   __ mov(FieldOperand(edi, FixedArray::kLengthOffset), ecx);
1022 
1023   Label arguments_loop, arguments_test;
1024   __ mov(ebx, Operand(esp, 2 * kPointerSize));
1025   __ mov(edx, Operand(esp, 1 * kPointerSize));
1026   __ sub(edx, ebx);  // Is there a smarter way to do negative scaling?
1027   __ sub(edx, ebx);
1028   __ jmp(&arguments_test, Label::kNear);
1029 
1030   __ bind(&arguments_loop);
1031   __ sub(edx, Immediate(kPointerSize));
1032   __ mov(eax, Operand(edx, 0));
1033   __ mov(FieldOperand(edi, ebx, times_2, FixedArray::kHeaderSize), eax);
1034   __ add(ebx, Immediate(Smi::FromInt(1)));
1035 
1036   __ bind(&arguments_test);
1037   __ cmp(ebx, ecx);
1038   __ j(less, &arguments_loop, Label::kNear);
1039 
1040   // Restore.
1041   __ pop(eax);  // Address of arguments object.
1042   __ Drop(4);
1043 
1044   // Return.
1045   __ ret(0);
1046 
1047   // Do the runtime call to allocate the arguments object.
1048   __ bind(&runtime);
1049   __ pop(eax);   // Remove saved mapped parameter count.
1050   __ pop(edi);   // Pop saved function.
1051   __ pop(eax);   // Remove saved parameter count.
1052   __ pop(eax);   // Pop return address.
1053   __ push(edi);  // Push function.
1054   __ push(edx);  // Push parameters pointer.
1055   __ push(ecx);  // Push parameter count.
1056   __ push(eax);  // Push return address.
1057   __ TailCallRuntime(Runtime::kNewSloppyArguments);
1058 }
1059 
1060 
GenerateNewStrict(MacroAssembler * masm)1061 void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
1062   // ecx : number of parameters (tagged)
1063   // edx : parameters pointer
1064   // edi : function
1065   // esp[0] : return address
1066 
1067   DCHECK(edi.is(ArgumentsAccessNewDescriptor::function()));
1068   DCHECK(ecx.is(ArgumentsAccessNewDescriptor::parameter_count()));
1069   DCHECK(edx.is(ArgumentsAccessNewDescriptor::parameter_pointer()));
1070 
1071   // Check if the calling frame is an arguments adaptor frame.
1072   Label try_allocate, runtime;
1073   __ mov(ebx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
1074   __ mov(eax, Operand(ebx, StandardFrameConstants::kContextOffset));
1075   __ cmp(eax, Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1076   __ j(not_equal, &try_allocate, Label::kNear);
1077 
1078   // Patch the arguments.length and the parameters pointer.
1079   __ mov(ecx, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset));
1080   __ lea(edx,
1081          Operand(ebx, ecx, times_2, StandardFrameConstants::kCallerSPOffset));
1082 
1083   // Try the new space allocation. Start out with computing the size of
1084   // the arguments object and the elements array.
1085   Label add_arguments_object;
1086   __ bind(&try_allocate);
1087   __ mov(eax, ecx);
1088   __ test(eax, eax);
1089   __ j(zero, &add_arguments_object, Label::kNear);
1090   __ lea(eax, Operand(eax, times_2, FixedArray::kHeaderSize));
1091   __ bind(&add_arguments_object);
1092   __ add(eax, Immediate(Heap::kStrictArgumentsObjectSize));
1093 
1094   // Do the allocation of both objects in one go.
1095   __ Allocate(eax, eax, ebx, no_reg, &runtime, TAG_OBJECT);
1096 
1097   // Get the arguments map from the current native context.
1098   __ mov(edi, NativeContextOperand());
1099   __ mov(edi, ContextOperand(edi, Context::STRICT_ARGUMENTS_MAP_INDEX));
1100 
1101   __ mov(FieldOperand(eax, JSObject::kMapOffset), edi);
1102   __ mov(FieldOperand(eax, JSObject::kPropertiesOffset),
1103          masm->isolate()->factory()->empty_fixed_array());
1104   __ mov(FieldOperand(eax, JSObject::kElementsOffset),
1105          masm->isolate()->factory()->empty_fixed_array());
1106 
1107   // Get the length (smi tagged) and set that as an in-object property too.
1108   STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0);
1109   __ AssertSmi(ecx);
1110   __ mov(FieldOperand(eax, JSObject::kHeaderSize +
1111                       Heap::kArgumentsLengthIndex * kPointerSize),
1112          ecx);
1113 
1114   // If there are no actual arguments, we're done.
1115   Label done;
1116   __ test(ecx, ecx);
1117   __ j(zero, &done, Label::kNear);
1118 
1119   // Set up the elements pointer in the allocated arguments object and
1120   // initialize the header in the elements fixed array.
1121   __ lea(edi, Operand(eax, Heap::kStrictArgumentsObjectSize));
1122   __ mov(FieldOperand(eax, JSObject::kElementsOffset), edi);
1123   __ mov(FieldOperand(edi, FixedArray::kMapOffset),
1124          Immediate(isolate()->factory()->fixed_array_map()));
1125   __ mov(FieldOperand(edi, FixedArray::kLengthOffset), ecx);
1126 
1127   // Untag the length for the loop below.
1128   __ SmiUntag(ecx);
1129 
1130   // Copy the fixed array slots.
1131   Label loop;
1132   __ bind(&loop);
1133   __ mov(ebx, Operand(edx, -1 * kPointerSize));  // Skip receiver.
1134   __ mov(FieldOperand(edi, FixedArray::kHeaderSize), ebx);
1135   __ add(edi, Immediate(kPointerSize));
1136   __ sub(edx, Immediate(kPointerSize));
1137   __ dec(ecx);
1138   __ j(not_zero, &loop);
1139 
1140   // Return.
1141   __ bind(&done);
1142   __ ret(0);
1143 
1144   // Do the runtime call to allocate the arguments object.
1145   __ bind(&runtime);
1146   __ pop(eax);   // Pop return address.
1147   __ push(edi);  // Push function.
1148   __ push(edx);  // Push parameters pointer.
1149   __ push(ecx);  // Push parameter count.
1150   __ push(eax);  // Push return address.
1151   __ TailCallRuntime(Runtime::kNewStrictArguments);
1152 }
1153 
1154 
GenerateNew(MacroAssembler * masm)1155 void RestParamAccessStub::GenerateNew(MacroAssembler* masm) {
1156   // ecx : number of parameters (tagged)
1157   // edx : parameters pointer
1158   // ebx : rest parameter index (tagged)
1159   // esp[0] : return address
1160 
1161   // Check if the calling frame is an arguments adaptor frame.
1162   Label runtime;
1163   __ mov(edi, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
1164   __ mov(eax, Operand(edi, StandardFrameConstants::kContextOffset));
1165   __ cmp(eax, Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1166   __ j(not_equal, &runtime);
1167 
1168   // Patch the arguments.length and the parameters pointer.
1169   __ mov(ecx, Operand(edi, ArgumentsAdaptorFrameConstants::kLengthOffset));
1170   __ lea(edx,
1171          Operand(edi, ecx, times_2, StandardFrameConstants::kCallerSPOffset));
1172 
1173   __ bind(&runtime);
1174   __ pop(eax);   // Save return address.
1175   __ push(ecx);  // Push number of parameters.
1176   __ push(edx);  // Push parameters pointer.
1177   __ push(ebx);  // Push rest parameter index.
1178   __ push(eax);  // Push return address.
1179   __ TailCallRuntime(Runtime::kNewRestParam);
1180 }
1181 
1182 
Generate(MacroAssembler * masm)1183 void RegExpExecStub::Generate(MacroAssembler* masm) {
1184   // Just jump directly to runtime if native RegExp is not selected at compile
1185   // time or if regexp entry in generated code is turned off runtime switch or
1186   // at compilation.
1187 #ifdef V8_INTERPRETED_REGEXP
1188   __ TailCallRuntime(Runtime::kRegExpExec);
1189 #else  // V8_INTERPRETED_REGEXP
1190 
1191   // Stack frame on entry.
1192   //  esp[0]: return address
1193   //  esp[4]: last_match_info (expected JSArray)
1194   //  esp[8]: previous index
1195   //  esp[12]: subject string
1196   //  esp[16]: JSRegExp object
1197 
1198   static const int kLastMatchInfoOffset = 1 * kPointerSize;
1199   static const int kPreviousIndexOffset = 2 * kPointerSize;
1200   static const int kSubjectOffset = 3 * kPointerSize;
1201   static const int kJSRegExpOffset = 4 * kPointerSize;
1202 
1203   Label runtime;
1204   Factory* factory = isolate()->factory();
1205 
1206   // Ensure that a RegExp stack is allocated.
1207   ExternalReference address_of_regexp_stack_memory_address =
1208       ExternalReference::address_of_regexp_stack_memory_address(isolate());
1209   ExternalReference address_of_regexp_stack_memory_size =
1210       ExternalReference::address_of_regexp_stack_memory_size(isolate());
1211   __ mov(ebx, Operand::StaticVariable(address_of_regexp_stack_memory_size));
1212   __ test(ebx, ebx);
1213   __ j(zero, &runtime);
1214 
1215   // Check that the first argument is a JSRegExp object.
1216   __ mov(eax, Operand(esp, kJSRegExpOffset));
1217   STATIC_ASSERT(kSmiTag == 0);
1218   __ JumpIfSmi(eax, &runtime);
1219   __ CmpObjectType(eax, JS_REGEXP_TYPE, ecx);
1220   __ j(not_equal, &runtime);
1221 
1222   // Check that the RegExp has been compiled (data contains a fixed array).
1223   __ mov(ecx, FieldOperand(eax, JSRegExp::kDataOffset));
1224   if (FLAG_debug_code) {
1225     __ test(ecx, Immediate(kSmiTagMask));
1226     __ Check(not_zero, kUnexpectedTypeForRegExpDataFixedArrayExpected);
1227     __ CmpObjectType(ecx, FIXED_ARRAY_TYPE, ebx);
1228     __ Check(equal, kUnexpectedTypeForRegExpDataFixedArrayExpected);
1229   }
1230 
1231   // ecx: RegExp data (FixedArray)
1232   // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP.
1233   __ mov(ebx, FieldOperand(ecx, JSRegExp::kDataTagOffset));
1234   __ cmp(ebx, Immediate(Smi::FromInt(JSRegExp::IRREGEXP)));
1235   __ j(not_equal, &runtime);
1236 
1237   // ecx: RegExp data (FixedArray)
1238   // Check that the number of captures fit in the static offsets vector buffer.
1239   __ mov(edx, FieldOperand(ecx, JSRegExp::kIrregexpCaptureCountOffset));
1240   // Check (number_of_captures + 1) * 2 <= offsets vector size
1241   // Or          number_of_captures * 2 <= offsets vector size - 2
1242   // Multiplying by 2 comes for free since edx is smi-tagged.
1243   STATIC_ASSERT(kSmiTag == 0);
1244   STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
1245   STATIC_ASSERT(Isolate::kJSRegexpStaticOffsetsVectorSize >= 2);
1246   __ cmp(edx, Isolate::kJSRegexpStaticOffsetsVectorSize - 2);
1247   __ j(above, &runtime);
1248 
1249   // Reset offset for possibly sliced string.
1250   __ Move(edi, Immediate(0));
1251   __ mov(eax, Operand(esp, kSubjectOffset));
1252   __ JumpIfSmi(eax, &runtime);
1253   __ mov(edx, eax);  // Make a copy of the original subject string.
1254   __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
1255   __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
1256 
1257   // eax: subject string
1258   // edx: subject string
1259   // ebx: subject string instance type
1260   // ecx: RegExp data (FixedArray)
1261   // Handle subject string according to its encoding and representation:
1262   // (1) Sequential two byte?  If yes, go to (9).
1263   // (2) Sequential one byte?  If yes, go to (6).
1264   // (3) Anything but sequential or cons?  If yes, go to (7).
1265   // (4) Cons string.  If the string is flat, replace subject with first string.
1266   //     Otherwise bailout.
1267   // (5a) Is subject sequential two byte?  If yes, go to (9).
1268   // (5b) Is subject external?  If yes, go to (8).
1269   // (6) One byte sequential.  Load regexp code for one byte.
1270   // (E) Carry on.
1271   /// [...]
1272 
1273   // Deferred code at the end of the stub:
1274   // (7) Not a long external string?  If yes, go to (10).
1275   // (8) External string.  Make it, offset-wise, look like a sequential string.
1276   // (8a) Is the external string one byte?  If yes, go to (6).
1277   // (9) Two byte sequential.  Load regexp code for one byte. Go to (E).
1278   // (10) Short external string or not a string?  If yes, bail out to runtime.
1279   // (11) Sliced string.  Replace subject with parent. Go to (5a).
1280 
1281   Label seq_one_byte_string /* 6 */, seq_two_byte_string /* 9 */,
1282         external_string /* 8 */, check_underlying /* 5a */,
1283         not_seq_nor_cons /* 7 */, check_code /* E */,
1284         not_long_external /* 10 */;
1285 
1286   // (1) Sequential two byte?  If yes, go to (9).
1287   __ and_(ebx, kIsNotStringMask |
1288                kStringRepresentationMask |
1289                kStringEncodingMask |
1290                kShortExternalStringMask);
1291   STATIC_ASSERT((kStringTag | kSeqStringTag | kTwoByteStringTag) == 0);
1292   __ j(zero, &seq_two_byte_string);  // Go to (9).
1293 
1294   // (2) Sequential one byte?  If yes, go to (6).
1295   // Any other sequential string must be one byte.
1296   __ and_(ebx, Immediate(kIsNotStringMask |
1297                          kStringRepresentationMask |
1298                          kShortExternalStringMask));
1299   __ j(zero, &seq_one_byte_string, Label::kNear);  // Go to (6).
1300 
1301   // (3) Anything but sequential or cons?  If yes, go to (7).
1302   // We check whether the subject string is a cons, since sequential strings
1303   // have already been covered.
1304   STATIC_ASSERT(kConsStringTag < kExternalStringTag);
1305   STATIC_ASSERT(kSlicedStringTag > kExternalStringTag);
1306   STATIC_ASSERT(kIsNotStringMask > kExternalStringTag);
1307   STATIC_ASSERT(kShortExternalStringTag > kExternalStringTag);
1308   __ cmp(ebx, Immediate(kExternalStringTag));
1309   __ j(greater_equal, &not_seq_nor_cons);  // Go to (7).
1310 
1311   // (4) Cons string.  Check that it's flat.
1312   // Replace subject with first string and reload instance type.
1313   __ cmp(FieldOperand(eax, ConsString::kSecondOffset), factory->empty_string());
1314   __ j(not_equal, &runtime);
1315   __ mov(eax, FieldOperand(eax, ConsString::kFirstOffset));
1316   __ bind(&check_underlying);
1317   __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
1318   __ mov(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
1319 
1320   // (5a) Is subject sequential two byte?  If yes, go to (9).
1321   __ test_b(ebx, kStringRepresentationMask | kStringEncodingMask);
1322   STATIC_ASSERT((kSeqStringTag | kTwoByteStringTag) == 0);
1323   __ j(zero, &seq_two_byte_string);  // Go to (9).
1324   // (5b) Is subject external?  If yes, go to (8).
1325   __ test_b(ebx, kStringRepresentationMask);
1326   // The underlying external string is never a short external string.
1327   STATIC_ASSERT(ExternalString::kMaxShortLength < ConsString::kMinLength);
1328   STATIC_ASSERT(ExternalString::kMaxShortLength < SlicedString::kMinLength);
1329   __ j(not_zero, &external_string);  // Go to (8).
1330 
1331   // eax: sequential subject string (or look-alike, external string)
1332   // edx: original subject string
1333   // ecx: RegExp data (FixedArray)
1334   // (6) One byte sequential.  Load regexp code for one byte.
1335   __ bind(&seq_one_byte_string);
1336   // Load previous index and check range before edx is overwritten.  We have
1337   // to use edx instead of eax here because it might have been only made to
1338   // look like a sequential string when it actually is an external string.
1339   __ mov(ebx, Operand(esp, kPreviousIndexOffset));
1340   __ JumpIfNotSmi(ebx, &runtime);
1341   __ cmp(ebx, FieldOperand(edx, String::kLengthOffset));
1342   __ j(above_equal, &runtime);
1343   __ mov(edx, FieldOperand(ecx, JSRegExp::kDataOneByteCodeOffset));
1344   __ Move(ecx, Immediate(1));  // Type is one byte.
1345 
1346   // (E) Carry on.  String handling is done.
1347   __ bind(&check_code);
1348   // edx: irregexp code
1349   // Check that the irregexp code has been generated for the actual string
1350   // encoding. If it has, the field contains a code object otherwise it contains
1351   // a smi (code flushing support).
1352   __ JumpIfSmi(edx, &runtime);
1353 
1354   // eax: subject string
1355   // ebx: previous index (smi)
1356   // edx: code
1357   // ecx: encoding of subject string (1 if one_byte, 0 if two_byte);
1358   // All checks done. Now push arguments for native regexp code.
1359   Counters* counters = isolate()->counters();
1360   __ IncrementCounter(counters->regexp_entry_native(), 1);
1361 
1362   // Isolates: note we add an additional parameter here (isolate pointer).
1363   static const int kRegExpExecuteArguments = 9;
1364   __ EnterApiExitFrame(kRegExpExecuteArguments);
1365 
1366   // Argument 9: Pass current isolate address.
1367   __ mov(Operand(esp, 8 * kPointerSize),
1368       Immediate(ExternalReference::isolate_address(isolate())));
1369 
1370   // Argument 8: Indicate that this is a direct call from JavaScript.
1371   __ mov(Operand(esp, 7 * kPointerSize), Immediate(1));
1372 
1373   // Argument 7: Start (high end) of backtracking stack memory area.
1374   __ mov(esi, Operand::StaticVariable(address_of_regexp_stack_memory_address));
1375   __ add(esi, Operand::StaticVariable(address_of_regexp_stack_memory_size));
1376   __ mov(Operand(esp, 6 * kPointerSize), esi);
1377 
1378   // Argument 6: Set the number of capture registers to zero to force global
1379   // regexps to behave as non-global.  This does not affect non-global regexps.
1380   __ mov(Operand(esp, 5 * kPointerSize), Immediate(0));
1381 
1382   // Argument 5: static offsets vector buffer.
1383   __ mov(Operand(esp, 4 * kPointerSize),
1384          Immediate(ExternalReference::address_of_static_offsets_vector(
1385              isolate())));
1386 
1387   // Argument 2: Previous index.
1388   __ SmiUntag(ebx);
1389   __ mov(Operand(esp, 1 * kPointerSize), ebx);
1390 
1391   // Argument 1: Original subject string.
1392   // The original subject is in the previous stack frame. Therefore we have to
1393   // use ebp, which points exactly to one pointer size below the previous esp.
1394   // (Because creating a new stack frame pushes the previous ebp onto the stack
1395   // and thereby moves up esp by one kPointerSize.)
1396   __ mov(esi, Operand(ebp, kSubjectOffset + kPointerSize));
1397   __ mov(Operand(esp, 0 * kPointerSize), esi);
1398 
1399   // esi: original subject string
1400   // eax: underlying subject string
1401   // ebx: previous index
1402   // ecx: encoding of subject string (1 if one_byte 0 if two_byte);
1403   // edx: code
1404   // Argument 4: End of string data
1405   // Argument 3: Start of string data
1406   // Prepare start and end index of the input.
1407   // Load the length from the original sliced string if that is the case.
1408   __ mov(esi, FieldOperand(esi, String::kLengthOffset));
1409   __ add(esi, edi);  // Calculate input end wrt offset.
1410   __ SmiUntag(edi);
1411   __ add(ebx, edi);  // Calculate input start wrt offset.
1412 
1413   // ebx: start index of the input string
1414   // esi: end index of the input string
1415   Label setup_two_byte, setup_rest;
1416   __ test(ecx, ecx);
1417   __ j(zero, &setup_two_byte, Label::kNear);
1418   __ SmiUntag(esi);
1419   __ lea(ecx, FieldOperand(eax, esi, times_1, SeqOneByteString::kHeaderSize));
1420   __ mov(Operand(esp, 3 * kPointerSize), ecx);  // Argument 4.
1421   __ lea(ecx, FieldOperand(eax, ebx, times_1, SeqOneByteString::kHeaderSize));
1422   __ mov(Operand(esp, 2 * kPointerSize), ecx);  // Argument 3.
1423   __ jmp(&setup_rest, Label::kNear);
1424 
1425   __ bind(&setup_two_byte);
1426   STATIC_ASSERT(kSmiTag == 0);
1427   STATIC_ASSERT(kSmiTagSize == 1);  // esi is smi (powered by 2).
1428   __ lea(ecx, FieldOperand(eax, esi, times_1, SeqTwoByteString::kHeaderSize));
1429   __ mov(Operand(esp, 3 * kPointerSize), ecx);  // Argument 4.
1430   __ lea(ecx, FieldOperand(eax, ebx, times_2, SeqTwoByteString::kHeaderSize));
1431   __ mov(Operand(esp, 2 * kPointerSize), ecx);  // Argument 3.
1432 
1433   __ bind(&setup_rest);
1434 
1435   // Locate the code entry and call it.
1436   __ add(edx, Immediate(Code::kHeaderSize - kHeapObjectTag));
1437   __ call(edx);
1438 
1439   // Drop arguments and come back to JS mode.
1440   __ LeaveApiExitFrame(true);
1441 
1442   // Check the result.
1443   Label success;
1444   __ cmp(eax, 1);
1445   // We expect exactly one result since we force the called regexp to behave
1446   // as non-global.
1447   __ j(equal, &success);
1448   Label failure;
1449   __ cmp(eax, NativeRegExpMacroAssembler::FAILURE);
1450   __ j(equal, &failure);
1451   __ cmp(eax, NativeRegExpMacroAssembler::EXCEPTION);
1452   // If not exception it can only be retry. Handle that in the runtime system.
1453   __ j(not_equal, &runtime);
1454   // Result must now be exception. If there is no pending exception already a
1455   // stack overflow (on the backtrack stack) was detected in RegExp code but
1456   // haven't created the exception yet. Handle that in the runtime system.
1457   // TODO(592): Rerunning the RegExp to get the stack overflow exception.
1458   ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
1459                                       isolate());
1460   __ mov(edx, Immediate(isolate()->factory()->the_hole_value()));
1461   __ mov(eax, Operand::StaticVariable(pending_exception));
1462   __ cmp(edx, eax);
1463   __ j(equal, &runtime);
1464 
1465   // For exception, throw the exception again.
1466   __ TailCallRuntime(Runtime::kRegExpExecReThrow);
1467 
1468   __ bind(&failure);
1469   // For failure to match, return null.
1470   __ mov(eax, factory->null_value());
1471   __ ret(4 * kPointerSize);
1472 
1473   // Load RegExp data.
1474   __ bind(&success);
1475   __ mov(eax, Operand(esp, kJSRegExpOffset));
1476   __ mov(ecx, FieldOperand(eax, JSRegExp::kDataOffset));
1477   __ mov(edx, FieldOperand(ecx, JSRegExp::kIrregexpCaptureCountOffset));
1478   // Calculate number of capture registers (number_of_captures + 1) * 2.
1479   STATIC_ASSERT(kSmiTag == 0);
1480   STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
1481   __ add(edx, Immediate(2));  // edx was a smi.
1482 
1483   // edx: Number of capture registers
1484   // Load last_match_info which is still known to be a fast case JSArray.
1485   // Check that the fourth object is a JSArray object.
1486   __ mov(eax, Operand(esp, kLastMatchInfoOffset));
1487   __ JumpIfSmi(eax, &runtime);
1488   __ CmpObjectType(eax, JS_ARRAY_TYPE, ebx);
1489   __ j(not_equal, &runtime);
1490   // Check that the JSArray is in fast case.
1491   __ mov(ebx, FieldOperand(eax, JSArray::kElementsOffset));
1492   __ mov(eax, FieldOperand(ebx, HeapObject::kMapOffset));
1493   __ cmp(eax, factory->fixed_array_map());
1494   __ j(not_equal, &runtime);
1495   // Check that the last match info has space for the capture registers and the
1496   // additional information.
1497   __ mov(eax, FieldOperand(ebx, FixedArray::kLengthOffset));
1498   __ SmiUntag(eax);
1499   __ sub(eax, Immediate(RegExpImpl::kLastMatchOverhead));
1500   __ cmp(edx, eax);
1501   __ j(greater, &runtime);
1502 
1503   // ebx: last_match_info backing store (FixedArray)
1504   // edx: number of capture registers
1505   // Store the capture count.
1506   __ SmiTag(edx);  // Number of capture registers to smi.
1507   __ mov(FieldOperand(ebx, RegExpImpl::kLastCaptureCountOffset), edx);
1508   __ SmiUntag(edx);  // Number of capture registers back from smi.
1509   // Store last subject and last input.
1510   __ mov(eax, Operand(esp, kSubjectOffset));
1511   __ mov(ecx, eax);
1512   __ mov(FieldOperand(ebx, RegExpImpl::kLastSubjectOffset), eax);
1513   __ RecordWriteField(ebx,
1514                       RegExpImpl::kLastSubjectOffset,
1515                       eax,
1516                       edi,
1517                       kDontSaveFPRegs);
1518   __ mov(eax, ecx);
1519   __ mov(FieldOperand(ebx, RegExpImpl::kLastInputOffset), eax);
1520   __ RecordWriteField(ebx,
1521                       RegExpImpl::kLastInputOffset,
1522                       eax,
1523                       edi,
1524                       kDontSaveFPRegs);
1525 
1526   // Get the static offsets vector filled by the native regexp code.
1527   ExternalReference address_of_static_offsets_vector =
1528       ExternalReference::address_of_static_offsets_vector(isolate());
1529   __ mov(ecx, Immediate(address_of_static_offsets_vector));
1530 
1531   // ebx: last_match_info backing store (FixedArray)
1532   // ecx: offsets vector
1533   // edx: number of capture registers
1534   Label next_capture, done;
1535   // Capture register counter starts from number of capture registers and
1536   // counts down until wraping after zero.
1537   __ bind(&next_capture);
1538   __ sub(edx, Immediate(1));
1539   __ j(negative, &done, Label::kNear);
1540   // Read the value from the static offsets vector buffer.
1541   __ mov(edi, Operand(ecx, edx, times_int_size, 0));
1542   __ SmiTag(edi);
1543   // Store the smi value in the last match info.
1544   __ mov(FieldOperand(ebx,
1545                       edx,
1546                       times_pointer_size,
1547                       RegExpImpl::kFirstCaptureOffset),
1548                       edi);
1549   __ jmp(&next_capture);
1550   __ bind(&done);
1551 
1552   // Return last match info.
1553   __ mov(eax, Operand(esp, kLastMatchInfoOffset));
1554   __ ret(4 * kPointerSize);
1555 
1556   // Do the runtime call to execute the regexp.
1557   __ bind(&runtime);
1558   __ TailCallRuntime(Runtime::kRegExpExec);
1559 
1560   // Deferred code for string handling.
1561   // (7) Not a long external string?  If yes, go to (10).
1562   __ bind(&not_seq_nor_cons);
1563   // Compare flags are still set from (3).
1564   __ j(greater, &not_long_external, Label::kNear);  // Go to (10).
1565 
1566   // (8) External string.  Short external strings have been ruled out.
1567   __ bind(&external_string);
1568   // Reload instance type.
1569   __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
1570   __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
1571   if (FLAG_debug_code) {
1572     // Assert that we do not have a cons or slice (indirect strings) here.
1573     // Sequential strings have already been ruled out.
1574     __ test_b(ebx, kIsIndirectStringMask);
1575     __ Assert(zero, kExternalStringExpectedButNotFound);
1576   }
1577   __ mov(eax, FieldOperand(eax, ExternalString::kResourceDataOffset));
1578   // Move the pointer so that offset-wise, it looks like a sequential string.
1579   STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
1580   __ sub(eax, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
1581   STATIC_ASSERT(kTwoByteStringTag == 0);
1582   // (8a) Is the external string one byte?  If yes, go to (6).
1583   __ test_b(ebx, kStringEncodingMask);
1584   __ j(not_zero, &seq_one_byte_string);  // Goto (6).
1585 
1586   // eax: sequential subject string (or look-alike, external string)
1587   // edx: original subject string
1588   // ecx: RegExp data (FixedArray)
1589   // (9) Two byte sequential.  Load regexp code for one byte. Go to (E).
1590   __ bind(&seq_two_byte_string);
1591   // Load previous index and check range before edx is overwritten.  We have
1592   // to use edx instead of eax here because it might have been only made to
1593   // look like a sequential string when it actually is an external string.
1594   __ mov(ebx, Operand(esp, kPreviousIndexOffset));
1595   __ JumpIfNotSmi(ebx, &runtime);
1596   __ cmp(ebx, FieldOperand(edx, String::kLengthOffset));
1597   __ j(above_equal, &runtime);
1598   __ mov(edx, FieldOperand(ecx, JSRegExp::kDataUC16CodeOffset));
1599   __ Move(ecx, Immediate(0));  // Type is two byte.
1600   __ jmp(&check_code);  // Go to (E).
1601 
1602   // (10) Not a string or a short external string?  If yes, bail out to runtime.
1603   __ bind(&not_long_external);
1604   // Catch non-string subject or short external string.
1605   STATIC_ASSERT(kNotStringTag != 0 && kShortExternalStringTag !=0);
1606   __ test(ebx, Immediate(kIsNotStringMask | kShortExternalStringTag));
1607   __ j(not_zero, &runtime);
1608 
1609   // (11) Sliced string.  Replace subject with parent.  Go to (5a).
1610   // Load offset into edi and replace subject string with parent.
1611   __ mov(edi, FieldOperand(eax, SlicedString::kOffsetOffset));
1612   __ mov(eax, FieldOperand(eax, SlicedString::kParentOffset));
1613   __ jmp(&check_underlying);  // Go to (5a).
1614 #endif  // V8_INTERPRETED_REGEXP
1615 }
1616 
1617 
NegativeComparisonResult(Condition cc)1618 static int NegativeComparisonResult(Condition cc) {
1619   DCHECK(cc != equal);
1620   DCHECK((cc == less) || (cc == less_equal)
1621       || (cc == greater) || (cc == greater_equal));
1622   return (cc == greater || cc == greater_equal) ? LESS : GREATER;
1623 }
1624 
1625 
CheckInputType(MacroAssembler * masm,Register input,CompareICState::State expected,Label * fail)1626 static void CheckInputType(MacroAssembler* masm, Register input,
1627                            CompareICState::State expected, Label* fail) {
1628   Label ok;
1629   if (expected == CompareICState::SMI) {
1630     __ JumpIfNotSmi(input, fail);
1631   } else if (expected == CompareICState::NUMBER) {
1632     __ JumpIfSmi(input, &ok);
1633     __ cmp(FieldOperand(input, HeapObject::kMapOffset),
1634            Immediate(masm->isolate()->factory()->heap_number_map()));
1635     __ j(not_equal, fail);
1636   }
1637   // We could be strict about internalized/non-internalized here, but as long as
1638   // hydrogen doesn't care, the stub doesn't have to care either.
1639   __ bind(&ok);
1640 }
1641 
1642 
BranchIfNotInternalizedString(MacroAssembler * masm,Label * label,Register object,Register scratch)1643 static void BranchIfNotInternalizedString(MacroAssembler* masm,
1644                                           Label* label,
1645                                           Register object,
1646                                           Register scratch) {
1647   __ JumpIfSmi(object, label);
1648   __ mov(scratch, FieldOperand(object, HeapObject::kMapOffset));
1649   __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
1650   STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
1651   __ test(scratch, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
1652   __ j(not_zero, label);
1653 }
1654 
1655 
GenerateGeneric(MacroAssembler * masm)1656 void CompareICStub::GenerateGeneric(MacroAssembler* masm) {
1657   Label runtime_call, check_unequal_objects;
1658   Condition cc = GetCondition();
1659 
1660   Label miss;
1661   CheckInputType(masm, edx, left(), &miss);
1662   CheckInputType(masm, eax, right(), &miss);
1663 
1664   // Compare two smis.
1665   Label non_smi, smi_done;
1666   __ mov(ecx, edx);
1667   __ or_(ecx, eax);
1668   __ JumpIfNotSmi(ecx, &non_smi, Label::kNear);
1669   __ sub(edx, eax);  // Return on the result of the subtraction.
1670   __ j(no_overflow, &smi_done, Label::kNear);
1671   __ not_(edx);  // Correct sign in case of overflow. edx is never 0 here.
1672   __ bind(&smi_done);
1673   __ mov(eax, edx);
1674   __ ret(0);
1675   __ bind(&non_smi);
1676 
1677   // NOTICE! This code is only reached after a smi-fast-case check, so
1678   // it is certain that at least one operand isn't a smi.
1679 
1680   // Identical objects can be compared fast, but there are some tricky cases
1681   // for NaN and undefined.
1682   Label generic_heap_number_comparison;
1683   {
1684     Label not_identical;
1685     __ cmp(eax, edx);
1686     __ j(not_equal, &not_identical);
1687 
1688     if (cc != equal) {
1689       // Check for undefined.  undefined OP undefined is false even though
1690       // undefined == undefined.
1691       __ cmp(edx, isolate()->factory()->undefined_value());
1692       if (is_strong(strength())) {
1693         // In strong mode, this comparison must throw, so call the runtime.
1694         __ j(equal, &runtime_call, Label::kFar);
1695       } else {
1696         Label check_for_nan;
1697         __ j(not_equal, &check_for_nan, Label::kNear);
1698         __ Move(eax, Immediate(Smi::FromInt(NegativeComparisonResult(cc))));
1699         __ ret(0);
1700         __ bind(&check_for_nan);
1701       }
1702     }
1703 
1704     // Test for NaN. Compare heap numbers in a general way,
1705     // to handle NaNs correctly.
1706     __ cmp(FieldOperand(edx, HeapObject::kMapOffset),
1707            Immediate(isolate()->factory()->heap_number_map()));
1708     __ j(equal, &generic_heap_number_comparison, Label::kNear);
1709     if (cc != equal) {
1710       __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset));
1711       __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
1712       // Call runtime on identical JSObjects.  Otherwise return equal.
1713       __ cmpb(ecx, static_cast<uint8_t>(FIRST_JS_RECEIVER_TYPE));
1714       __ j(above_equal, &runtime_call, Label::kFar);
1715       // Call runtime on identical symbols since we need to throw a TypeError.
1716       __ cmpb(ecx, static_cast<uint8_t>(SYMBOL_TYPE));
1717       __ j(equal, &runtime_call, Label::kFar);
1718       // Call runtime on identical SIMD values since we must throw a TypeError.
1719       __ cmpb(ecx, static_cast<uint8_t>(SIMD128_VALUE_TYPE));
1720       __ j(equal, &runtime_call, Label::kFar);
1721       if (is_strong(strength())) {
1722         // We have already tested for smis and heap numbers, so if both
1723         // arguments are not strings we must proceed to the slow case.
1724         __ test(ecx, Immediate(kIsNotStringMask));
1725         __ j(not_zero, &runtime_call, Label::kFar);
1726       }
1727     }
1728     __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
1729     __ ret(0);
1730 
1731 
1732     __ bind(&not_identical);
1733   }
1734 
1735   // Strict equality can quickly decide whether objects are equal.
1736   // Non-strict object equality is slower, so it is handled later in the stub.
1737   if (cc == equal && strict()) {
1738     Label slow;  // Fallthrough label.
1739     Label not_smis;
1740     // If we're doing a strict equality comparison, we don't have to do
1741     // type conversion, so we generate code to do fast comparison for objects
1742     // and oddballs. Non-smi numbers and strings still go through the usual
1743     // slow-case code.
1744     // If either is a Smi (we know that not both are), then they can only
1745     // be equal if the other is a HeapNumber. If so, use the slow case.
1746     STATIC_ASSERT(kSmiTag == 0);
1747     DCHECK_EQ(static_cast<Smi*>(0), Smi::FromInt(0));
1748     __ mov(ecx, Immediate(kSmiTagMask));
1749     __ and_(ecx, eax);
1750     __ test(ecx, edx);
1751     __ j(not_zero, &not_smis, Label::kNear);
1752     // One operand is a smi.
1753 
1754     // Check whether the non-smi is a heap number.
1755     STATIC_ASSERT(kSmiTagMask == 1);
1756     // ecx still holds eax & kSmiTag, which is either zero or one.
1757     __ sub(ecx, Immediate(0x01));
1758     __ mov(ebx, edx);
1759     __ xor_(ebx, eax);
1760     __ and_(ebx, ecx);  // ebx holds either 0 or eax ^ edx.
1761     __ xor_(ebx, eax);
1762     // if eax was smi, ebx is now edx, else eax.
1763 
1764     // Check if the non-smi operand is a heap number.
1765     __ cmp(FieldOperand(ebx, HeapObject::kMapOffset),
1766            Immediate(isolate()->factory()->heap_number_map()));
1767     // If heap number, handle it in the slow case.
1768     __ j(equal, &slow, Label::kNear);
1769     // Return non-equal (ebx is not zero)
1770     __ mov(eax, ebx);
1771     __ ret(0);
1772 
1773     __ bind(&not_smis);
1774     // If either operand is a JSObject or an oddball value, then they are not
1775     // equal since their pointers are different
1776     // There is no test for undetectability in strict equality.
1777 
1778     // Get the type of the first operand.
1779     // If the first object is a JS object, we have done pointer comparison.
1780     Label first_non_object;
1781     STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
1782     __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ecx);
1783     __ j(below, &first_non_object, Label::kNear);
1784 
1785     // Return non-zero (eax is not zero)
1786     Label return_not_equal;
1787     STATIC_ASSERT(kHeapObjectTag != 0);
1788     __ bind(&return_not_equal);
1789     __ ret(0);
1790 
1791     __ bind(&first_non_object);
1792     // Check for oddballs: true, false, null, undefined.
1793     __ CmpInstanceType(ecx, ODDBALL_TYPE);
1794     __ j(equal, &return_not_equal);
1795 
1796     __ CmpObjectType(edx, FIRST_JS_RECEIVER_TYPE, ecx);
1797     __ j(above_equal, &return_not_equal);
1798 
1799     // Check for oddballs: true, false, null, undefined.
1800     __ CmpInstanceType(ecx, ODDBALL_TYPE);
1801     __ j(equal, &return_not_equal);
1802 
1803     // Fall through to the general case.
1804     __ bind(&slow);
1805   }
1806 
1807   // Generate the number comparison code.
1808   Label non_number_comparison;
1809   Label unordered;
1810   __ bind(&generic_heap_number_comparison);
1811 
1812   FloatingPointHelper::LoadSSE2Operands(masm, &non_number_comparison);
1813   __ ucomisd(xmm0, xmm1);
1814   // Don't base result on EFLAGS when a NaN is involved.
1815   __ j(parity_even, &unordered, Label::kNear);
1816 
1817   __ mov(eax, 0);  // equal
1818   __ mov(ecx, Immediate(Smi::FromInt(1)));
1819   __ cmov(above, eax, ecx);
1820   __ mov(ecx, Immediate(Smi::FromInt(-1)));
1821   __ cmov(below, eax, ecx);
1822   __ ret(0);
1823 
1824   // If one of the numbers was NaN, then the result is always false.
1825   // The cc is never not-equal.
1826   __ bind(&unordered);
1827   DCHECK(cc != not_equal);
1828   if (cc == less || cc == less_equal) {
1829     __ mov(eax, Immediate(Smi::FromInt(1)));
1830   } else {
1831     __ mov(eax, Immediate(Smi::FromInt(-1)));
1832   }
1833   __ ret(0);
1834 
1835   // The number comparison code did not provide a valid result.
1836   __ bind(&non_number_comparison);
1837 
1838   // Fast negative check for internalized-to-internalized equality.
1839   Label check_for_strings;
1840   if (cc == equal) {
1841     BranchIfNotInternalizedString(masm, &check_for_strings, eax, ecx);
1842     BranchIfNotInternalizedString(masm, &check_for_strings, edx, ecx);
1843 
1844     // We've already checked for object identity, so if both operands
1845     // are internalized they aren't equal. Register eax already holds a
1846     // non-zero value, which indicates not equal, so just return.
1847     __ ret(0);
1848   }
1849 
1850   __ bind(&check_for_strings);
1851 
1852   __ JumpIfNotBothSequentialOneByteStrings(edx, eax, ecx, ebx,
1853                                            &check_unequal_objects);
1854 
1855   // Inline comparison of one-byte strings.
1856   if (cc == equal) {
1857     StringHelper::GenerateFlatOneByteStringEquals(masm, edx, eax, ecx, ebx);
1858   } else {
1859     StringHelper::GenerateCompareFlatOneByteStrings(masm, edx, eax, ecx, ebx,
1860                                                     edi);
1861   }
1862 #ifdef DEBUG
1863   __ Abort(kUnexpectedFallThroughFromStringComparison);
1864 #endif
1865 
1866   __ bind(&check_unequal_objects);
1867   if (cc == equal && !strict()) {
1868     // Non-strict equality.  Objects are unequal if
1869     // they are both JSObjects and not undetectable,
1870     // and their pointers are different.
1871     Label return_unequal;
1872     // At most one is a smi, so we can test for smi by adding the two.
1873     // A smi plus a heap object has the low bit set, a heap object plus
1874     // a heap object has the low bit clear.
1875     STATIC_ASSERT(kSmiTag == 0);
1876     STATIC_ASSERT(kSmiTagMask == 1);
1877     __ lea(ecx, Operand(eax, edx, times_1, 0));
1878     __ test(ecx, Immediate(kSmiTagMask));
1879     __ j(not_zero, &runtime_call, Label::kNear);
1880     __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ecx);
1881     __ j(below, &runtime_call, Label::kNear);
1882     __ CmpObjectType(edx, FIRST_JS_RECEIVER_TYPE, ebx);
1883     __ j(below, &runtime_call, Label::kNear);
1884     // We do not bail out after this point.  Both are JSObjects, and
1885     // they are equal if and only if both are undetectable.
1886     // The and of the undetectable flags is 1 if and only if they are equal.
1887     __ test_b(FieldOperand(ecx, Map::kBitFieldOffset),
1888               1 << Map::kIsUndetectable);
1889     __ j(zero, &return_unequal, Label::kNear);
1890     __ test_b(FieldOperand(ebx, Map::kBitFieldOffset),
1891               1 << Map::kIsUndetectable);
1892     __ j(zero, &return_unequal, Label::kNear);
1893     // The objects are both undetectable, so they both compare as the value
1894     // undefined, and are equal.
1895     __ Move(eax, Immediate(EQUAL));
1896     __ bind(&return_unequal);
1897     // Return non-equal by returning the non-zero object pointer in eax,
1898     // or return equal if we fell through to here.
1899     __ ret(0);  // rax, rdx were pushed
1900   }
1901   __ bind(&runtime_call);
1902 
1903   // Push arguments below the return address.
1904   __ pop(ecx);
1905   __ push(edx);
1906   __ push(eax);
1907 
1908   // Figure out which native to call and setup the arguments.
1909   if (cc == equal) {
1910     __ push(ecx);
1911     __ TailCallRuntime(strict() ? Runtime::kStrictEquals : Runtime::kEquals);
1912   } else {
1913     __ push(Immediate(Smi::FromInt(NegativeComparisonResult(cc))));
1914 
1915     // Restore return address on the stack.
1916     __ push(ecx);
1917 
1918     // Call the native; it returns -1 (less), 0 (equal), or 1 (greater)
1919     // tagged as a small integer.
1920     __ TailCallRuntime(is_strong(strength()) ? Runtime::kCompare_Strong
1921                                              : Runtime::kCompare);
1922   }
1923 
1924   __ bind(&miss);
1925   GenerateMiss(masm);
1926 }
1927 
1928 
CallStubInRecordCallTarget(MacroAssembler * masm,CodeStub * stub)1929 static void CallStubInRecordCallTarget(MacroAssembler* masm, CodeStub* stub) {
1930   // eax : number of arguments to the construct function
1931   // ebx : feedback vector
1932   // edx : slot in feedback vector (Smi)
1933   // edi : the function to call
1934 
1935   {
1936     FrameScope scope(masm, StackFrame::INTERNAL);
1937 
1938     // Number-of-arguments register must be smi-tagged to call out.
1939     __ SmiTag(eax);
1940     __ push(eax);
1941     __ push(edi);
1942     __ push(edx);
1943     __ push(ebx);
1944 
1945     __ CallStub(stub);
1946 
1947     __ pop(ebx);
1948     __ pop(edx);
1949     __ pop(edi);
1950     __ pop(eax);
1951     __ SmiUntag(eax);
1952   }
1953 }
1954 
1955 
GenerateRecordCallTarget(MacroAssembler * masm)1956 static void GenerateRecordCallTarget(MacroAssembler* masm) {
1957   // Cache the called function in a feedback vector slot.  Cache states
1958   // are uninitialized, monomorphic (indicated by a JSFunction), and
1959   // megamorphic.
1960   // eax : number of arguments to the construct function
1961   // ebx : feedback vector
1962   // edx : slot in feedback vector (Smi)
1963   // edi : the function to call
1964   Isolate* isolate = masm->isolate();
1965   Label initialize, done, miss, megamorphic, not_array_function;
1966 
1967   // Load the cache state into ecx.
1968   __ mov(ecx, FieldOperand(ebx, edx, times_half_pointer_size,
1969                            FixedArray::kHeaderSize));
1970 
1971   // A monomorphic cache hit or an already megamorphic state: invoke the
1972   // function without changing the state.
1973   // We don't know if ecx is a WeakCell or a Symbol, but it's harmless to read
1974   // at this position in a symbol (see static asserts in
1975   // type-feedback-vector.h).
1976   Label check_allocation_site;
1977   __ cmp(edi, FieldOperand(ecx, WeakCell::kValueOffset));
1978   __ j(equal, &done, Label::kFar);
1979   __ CompareRoot(ecx, Heap::kmegamorphic_symbolRootIndex);
1980   __ j(equal, &done, Label::kFar);
1981   __ CompareRoot(FieldOperand(ecx, HeapObject::kMapOffset),
1982                  Heap::kWeakCellMapRootIndex);
1983   __ j(not_equal, &check_allocation_site);
1984 
1985   // If the weak cell is cleared, we have a new chance to become monomorphic.
1986   __ JumpIfSmi(FieldOperand(ecx, WeakCell::kValueOffset), &initialize);
1987   __ jmp(&megamorphic);
1988 
1989   __ bind(&check_allocation_site);
1990   // If we came here, we need to see if we are the array function.
1991   // If we didn't have a matching function, and we didn't find the megamorph
1992   // sentinel, then we have in the slot either some other function or an
1993   // AllocationSite.
1994   __ CompareRoot(FieldOperand(ecx, 0), Heap::kAllocationSiteMapRootIndex);
1995   __ j(not_equal, &miss);
1996 
1997   // Make sure the function is the Array() function
1998   __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx);
1999   __ cmp(edi, ecx);
2000   __ j(not_equal, &megamorphic);
2001   __ jmp(&done, Label::kFar);
2002 
2003   __ bind(&miss);
2004 
2005   // A monomorphic miss (i.e, here the cache is not uninitialized) goes
2006   // megamorphic.
2007   __ CompareRoot(ecx, Heap::kuninitialized_symbolRootIndex);
2008   __ j(equal, &initialize);
2009   // MegamorphicSentinel is an immortal immovable object (undefined) so no
2010   // write-barrier is needed.
2011   __ bind(&megamorphic);
2012   __ mov(
2013       FieldOperand(ebx, edx, times_half_pointer_size, FixedArray::kHeaderSize),
2014       Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate)));
2015   __ jmp(&done, Label::kFar);
2016 
2017   // An uninitialized cache is patched with the function or sentinel to
2018   // indicate the ElementsKind if function is the Array constructor.
2019   __ bind(&initialize);
2020   // Make sure the function is the Array() function
2021   __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx);
2022   __ cmp(edi, ecx);
2023   __ j(not_equal, &not_array_function);
2024 
2025   // The target function is the Array constructor,
2026   // Create an AllocationSite if we don't already have it, store it in the
2027   // slot.
2028   CreateAllocationSiteStub create_stub(isolate);
2029   CallStubInRecordCallTarget(masm, &create_stub);
2030   __ jmp(&done);
2031 
2032   __ bind(&not_array_function);
2033   CreateWeakCellStub weak_cell_stub(isolate);
2034   CallStubInRecordCallTarget(masm, &weak_cell_stub);
2035   __ bind(&done);
2036 }
2037 
2038 
Generate(MacroAssembler * masm)2039 void CallConstructStub::Generate(MacroAssembler* masm) {
2040   // eax : number of arguments
2041   // ebx : feedback vector
2042   // edx : slot in feedback vector (Smi, for RecordCallTarget)
2043   // edi : constructor function
2044 
2045   Label non_function;
2046   // Check that function is not a smi.
2047   __ JumpIfSmi(edi, &non_function);
2048   // Check that function is a JSFunction.
2049   __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
2050   __ j(not_equal, &non_function);
2051 
2052   GenerateRecordCallTarget(masm);
2053 
2054   Label feedback_register_initialized;
2055   // Put the AllocationSite from the feedback vector into ebx, or undefined.
2056   __ mov(ebx, FieldOperand(ebx, edx, times_half_pointer_size,
2057                            FixedArray::kHeaderSize));
2058   Handle<Map> allocation_site_map = isolate()->factory()->allocation_site_map();
2059   __ cmp(FieldOperand(ebx, 0), Immediate(allocation_site_map));
2060   __ j(equal, &feedback_register_initialized);
2061   __ mov(ebx, isolate()->factory()->undefined_value());
2062   __ bind(&feedback_register_initialized);
2063 
2064   __ AssertUndefinedOrAllocationSite(ebx);
2065 
2066   // Pass new target to construct stub.
2067   __ mov(edx, edi);
2068 
2069   // Tail call to the function-specific construct stub (still in the caller
2070   // context at this point).
2071   __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2072   __ mov(ecx, FieldOperand(ecx, SharedFunctionInfo::kConstructStubOffset));
2073   __ lea(ecx, FieldOperand(ecx, Code::kHeaderSize));
2074   __ jmp(ecx);
2075 
2076   __ bind(&non_function);
2077   __ mov(edx, edi);
2078   __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
2079 }
2080 
2081 
HandleArrayCase(MacroAssembler * masm,Label * miss)2082 void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) {
2083   // edi - function
2084   // edx - slot id
2085   // ebx - vector
2086   __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx);
2087   __ cmp(edi, ecx);
2088   __ j(not_equal, miss);
2089 
2090   __ mov(eax, arg_count());
2091   // Reload ecx.
2092   __ mov(ecx, FieldOperand(ebx, edx, times_half_pointer_size,
2093                            FixedArray::kHeaderSize));
2094 
2095   // Increment the call count for monomorphic function calls.
2096   __ add(FieldOperand(ebx, edx, times_half_pointer_size,
2097                       FixedArray::kHeaderSize + kPointerSize),
2098          Immediate(Smi::FromInt(CallICNexus::kCallCountIncrement)));
2099 
2100   __ mov(ebx, ecx);
2101   __ mov(edx, edi);
2102   ArrayConstructorStub stub(masm->isolate(), arg_count());
2103   __ TailCallStub(&stub);
2104 
2105   // Unreachable.
2106 }
2107 
2108 
Generate(MacroAssembler * masm)2109 void CallICStub::Generate(MacroAssembler* masm) {
2110   // edi - function
2111   // edx - slot id
2112   // ebx - vector
2113   Isolate* isolate = masm->isolate();
2114   Label extra_checks_or_miss, call, call_function;
2115   int argc = arg_count();
2116   ParameterCount actual(argc);
2117 
2118   // The checks. First, does edi match the recorded monomorphic target?
2119   __ mov(ecx, FieldOperand(ebx, edx, times_half_pointer_size,
2120                            FixedArray::kHeaderSize));
2121 
2122   // We don't know that we have a weak cell. We might have a private symbol
2123   // or an AllocationSite, but the memory is safe to examine.
2124   // AllocationSite::kTransitionInfoOffset - contains a Smi or pointer to
2125   // FixedArray.
2126   // WeakCell::kValueOffset - contains a JSFunction or Smi(0)
2127   // Symbol::kHashFieldSlot - if the low bit is 1, then the hash is not
2128   // computed, meaning that it can't appear to be a pointer. If the low bit is
2129   // 0, then hash is computed, but the 0 bit prevents the field from appearing
2130   // to be a pointer.
2131   STATIC_ASSERT(WeakCell::kSize >= kPointerSize);
2132   STATIC_ASSERT(AllocationSite::kTransitionInfoOffset ==
2133                     WeakCell::kValueOffset &&
2134                 WeakCell::kValueOffset == Symbol::kHashFieldSlot);
2135 
2136   __ cmp(edi, FieldOperand(ecx, WeakCell::kValueOffset));
2137   __ j(not_equal, &extra_checks_or_miss);
2138 
2139   // The compare above could have been a SMI/SMI comparison. Guard against this
2140   // convincing us that we have a monomorphic JSFunction.
2141   __ JumpIfSmi(edi, &extra_checks_or_miss);
2142 
2143   // Increment the call count for monomorphic function calls.
2144   __ add(FieldOperand(ebx, edx, times_half_pointer_size,
2145                       FixedArray::kHeaderSize + kPointerSize),
2146          Immediate(Smi::FromInt(CallICNexus::kCallCountIncrement)));
2147 
2148   __ bind(&call_function);
2149   __ Set(eax, argc);
2150   __ Jump(masm->isolate()->builtins()->CallFunction(convert_mode()),
2151           RelocInfo::CODE_TARGET);
2152 
2153   __ bind(&extra_checks_or_miss);
2154   Label uninitialized, miss, not_allocation_site;
2155 
2156   __ cmp(ecx, Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate)));
2157   __ j(equal, &call);
2158 
2159   // Check if we have an allocation site.
2160   __ CompareRoot(FieldOperand(ecx, HeapObject::kMapOffset),
2161                  Heap::kAllocationSiteMapRootIndex);
2162   __ j(not_equal, &not_allocation_site);
2163 
2164   // We have an allocation site.
2165   HandleArrayCase(masm, &miss);
2166 
2167   __ bind(&not_allocation_site);
2168 
2169   // The following cases attempt to handle MISS cases without going to the
2170   // runtime.
2171   if (FLAG_trace_ic) {
2172     __ jmp(&miss);
2173   }
2174 
2175   __ cmp(ecx, Immediate(TypeFeedbackVector::UninitializedSentinel(isolate)));
2176   __ j(equal, &uninitialized);
2177 
2178   // We are going megamorphic. If the feedback is a JSFunction, it is fine
2179   // to handle it here. More complex cases are dealt with in the runtime.
2180   __ AssertNotSmi(ecx);
2181   __ CmpObjectType(ecx, JS_FUNCTION_TYPE, ecx);
2182   __ j(not_equal, &miss);
2183   __ mov(
2184       FieldOperand(ebx, edx, times_half_pointer_size, FixedArray::kHeaderSize),
2185       Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate)));
2186 
2187   __ bind(&call);
2188   __ Set(eax, argc);
2189   __ Jump(masm->isolate()->builtins()->Call(convert_mode()),
2190           RelocInfo::CODE_TARGET);
2191 
2192   __ bind(&uninitialized);
2193 
2194   // We are going monomorphic, provided we actually have a JSFunction.
2195   __ JumpIfSmi(edi, &miss);
2196 
2197   // Goto miss case if we do not have a function.
2198   __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
2199   __ j(not_equal, &miss);
2200 
2201   // Make sure the function is not the Array() function, which requires special
2202   // behavior on MISS.
2203   __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx);
2204   __ cmp(edi, ecx);
2205   __ j(equal, &miss);
2206 
2207   // Make sure the function belongs to the same native context.
2208   __ mov(ecx, FieldOperand(edi, JSFunction::kContextOffset));
2209   __ mov(ecx, ContextOperand(ecx, Context::NATIVE_CONTEXT_INDEX));
2210   __ cmp(ecx, NativeContextOperand());
2211   __ j(not_equal, &miss);
2212 
2213   // Initialize the call counter.
2214   __ mov(FieldOperand(ebx, edx, times_half_pointer_size,
2215                       FixedArray::kHeaderSize + kPointerSize),
2216          Immediate(Smi::FromInt(CallICNexus::kCallCountIncrement)));
2217 
2218   // Store the function. Use a stub since we need a frame for allocation.
2219   // ebx - vector
2220   // edx - slot
2221   // edi - function
2222   {
2223     FrameScope scope(masm, StackFrame::INTERNAL);
2224     CreateWeakCellStub create_stub(isolate);
2225     __ push(edi);
2226     __ CallStub(&create_stub);
2227     __ pop(edi);
2228   }
2229 
2230   __ jmp(&call_function);
2231 
2232   // We are here because tracing is on or we encountered a MISS case we can't
2233   // handle here.
2234   __ bind(&miss);
2235   GenerateMiss(masm);
2236 
2237   __ jmp(&call);
2238 
2239   // Unreachable
2240   __ int3();
2241 }
2242 
2243 
GenerateMiss(MacroAssembler * masm)2244 void CallICStub::GenerateMiss(MacroAssembler* masm) {
2245   FrameScope scope(masm, StackFrame::INTERNAL);
2246 
2247   // Push the function and feedback info.
2248   __ push(edi);
2249   __ push(ebx);
2250   __ push(edx);
2251 
2252   // Call the entry.
2253   __ CallRuntime(Runtime::kCallIC_Miss);
2254 
2255   // Move result to edi and exit the internal frame.
2256   __ mov(edi, eax);
2257 }
2258 
2259 
NeedsImmovableCode()2260 bool CEntryStub::NeedsImmovableCode() {
2261   return false;
2262 }
2263 
2264 
GenerateStubsAheadOfTime(Isolate * isolate)2265 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
2266   CEntryStub::GenerateAheadOfTime(isolate);
2267   StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate);
2268   StubFailureTrampolineStub::GenerateAheadOfTime(isolate);
2269   // It is important that the store buffer overflow stubs are generated first.
2270   ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate);
2271   CreateAllocationSiteStub::GenerateAheadOfTime(isolate);
2272   CreateWeakCellStub::GenerateAheadOfTime(isolate);
2273   BinaryOpICStub::GenerateAheadOfTime(isolate);
2274   BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate);
2275   StoreFastElementStub::GenerateAheadOfTime(isolate);
2276   TypeofStub::GenerateAheadOfTime(isolate);
2277 }
2278 
2279 
GenerateFPStubs(Isolate * isolate)2280 void CodeStub::GenerateFPStubs(Isolate* isolate) {
2281   // Generate if not already in cache.
2282   CEntryStub(isolate, 1, kSaveFPRegs).GetCode();
2283   isolate->set_fp_stubs_generated(true);
2284 }
2285 
2286 
GenerateAheadOfTime(Isolate * isolate)2287 void CEntryStub::GenerateAheadOfTime(Isolate* isolate) {
2288   CEntryStub stub(isolate, 1, kDontSaveFPRegs);
2289   stub.GetCode();
2290 }
2291 
2292 
Generate(MacroAssembler * masm)2293 void CEntryStub::Generate(MacroAssembler* masm) {
2294   // eax: number of arguments including receiver
2295   // ebx: pointer to C function  (C callee-saved)
2296   // ebp: frame pointer  (restored after C call)
2297   // esp: stack pointer  (restored after C call)
2298   // esi: current context (C callee-saved)
2299   // edi: JS function of the caller (C callee-saved)
2300   //
2301   // If argv_in_register():
2302   // ecx: pointer to the first argument
2303 
2304   ProfileEntryHookStub::MaybeCallEntryHook(masm);
2305 
2306   // Enter the exit frame that transitions from JavaScript to C++.
2307   if (argv_in_register()) {
2308     DCHECK(!save_doubles());
2309     __ EnterApiExitFrame(3);
2310 
2311     // Move argc and argv into the correct registers.
2312     __ mov(esi, ecx);
2313     __ mov(edi, eax);
2314   } else {
2315     __ EnterExitFrame(save_doubles());
2316   }
2317 
2318   // ebx: pointer to C function  (C callee-saved)
2319   // ebp: frame pointer  (restored after C call)
2320   // esp: stack pointer  (restored after C call)
2321   // edi: number of arguments including receiver  (C callee-saved)
2322   // esi: pointer to the first argument (C callee-saved)
2323 
2324   // Result returned in eax, or eax+edx if result size is 2.
2325 
2326   // Check stack alignment.
2327   if (FLAG_debug_code) {
2328     __ CheckStackAlignment();
2329   }
2330 
2331   // Call C function.
2332   __ mov(Operand(esp, 0 * kPointerSize), edi);  // argc.
2333   __ mov(Operand(esp, 1 * kPointerSize), esi);  // argv.
2334   __ mov(Operand(esp, 2 * kPointerSize),
2335          Immediate(ExternalReference::isolate_address(isolate())));
2336   __ call(ebx);
2337   // Result is in eax or edx:eax - do not destroy these registers!
2338 
2339   // Check result for exception sentinel.
2340   Label exception_returned;
2341   __ cmp(eax, isolate()->factory()->exception());
2342   __ j(equal, &exception_returned);
2343 
2344   // Check that there is no pending exception, otherwise we
2345   // should have returned the exception sentinel.
2346   if (FLAG_debug_code) {
2347     __ push(edx);
2348     __ mov(edx, Immediate(isolate()->factory()->the_hole_value()));
2349     Label okay;
2350     ExternalReference pending_exception_address(
2351         Isolate::kPendingExceptionAddress, isolate());
2352     __ cmp(edx, Operand::StaticVariable(pending_exception_address));
2353     // Cannot use check here as it attempts to generate call into runtime.
2354     __ j(equal, &okay, Label::kNear);
2355     __ int3();
2356     __ bind(&okay);
2357     __ pop(edx);
2358   }
2359 
2360   // Exit the JavaScript to C++ exit frame.
2361   __ LeaveExitFrame(save_doubles(), !argv_in_register());
2362   __ ret(0);
2363 
2364   // Handling of exception.
2365   __ bind(&exception_returned);
2366 
2367   ExternalReference pending_handler_context_address(
2368       Isolate::kPendingHandlerContextAddress, isolate());
2369   ExternalReference pending_handler_code_address(
2370       Isolate::kPendingHandlerCodeAddress, isolate());
2371   ExternalReference pending_handler_offset_address(
2372       Isolate::kPendingHandlerOffsetAddress, isolate());
2373   ExternalReference pending_handler_fp_address(
2374       Isolate::kPendingHandlerFPAddress, isolate());
2375   ExternalReference pending_handler_sp_address(
2376       Isolate::kPendingHandlerSPAddress, isolate());
2377 
2378   // Ask the runtime for help to determine the handler. This will set eax to
2379   // contain the current pending exception, don't clobber it.
2380   ExternalReference find_handler(Runtime::kUnwindAndFindExceptionHandler,
2381                                  isolate());
2382   {
2383     FrameScope scope(masm, StackFrame::MANUAL);
2384     __ PrepareCallCFunction(3, eax);
2385     __ mov(Operand(esp, 0 * kPointerSize), Immediate(0));  // argc.
2386     __ mov(Operand(esp, 1 * kPointerSize), Immediate(0));  // argv.
2387     __ mov(Operand(esp, 2 * kPointerSize),
2388            Immediate(ExternalReference::isolate_address(isolate())));
2389     __ CallCFunction(find_handler, 3);
2390   }
2391 
2392   // Retrieve the handler context, SP and FP.
2393   __ mov(esi, Operand::StaticVariable(pending_handler_context_address));
2394   __ mov(esp, Operand::StaticVariable(pending_handler_sp_address));
2395   __ mov(ebp, Operand::StaticVariable(pending_handler_fp_address));
2396 
2397   // If the handler is a JS frame, restore the context to the frame. Note that
2398   // the context will be set to (esi == 0) for non-JS frames.
2399   Label skip;
2400   __ test(esi, esi);
2401   __ j(zero, &skip, Label::kNear);
2402   __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi);
2403   __ bind(&skip);
2404 
2405   // Compute the handler entry address and jump to it.
2406   __ mov(edi, Operand::StaticVariable(pending_handler_code_address));
2407   __ mov(edx, Operand::StaticVariable(pending_handler_offset_address));
2408   __ lea(edi, FieldOperand(edi, edx, times_1, Code::kHeaderSize));
2409   __ jmp(edi);
2410 }
2411 
2412 
Generate(MacroAssembler * masm)2413 void JSEntryStub::Generate(MacroAssembler* masm) {
2414   Label invoke, handler_entry, exit;
2415   Label not_outermost_js, not_outermost_js_2;
2416 
2417   ProfileEntryHookStub::MaybeCallEntryHook(masm);
2418 
2419   // Set up frame.
2420   __ push(ebp);
2421   __ mov(ebp, esp);
2422 
2423   // Push marker in two places.
2424   int marker = type();
2425   __ push(Immediate(Smi::FromInt(marker)));  // context slot
2426   __ push(Immediate(Smi::FromInt(marker)));  // function slot
2427   // Save callee-saved registers (C calling conventions).
2428   __ push(edi);
2429   __ push(esi);
2430   __ push(ebx);
2431 
2432   // Save copies of the top frame descriptor on the stack.
2433   ExternalReference c_entry_fp(Isolate::kCEntryFPAddress, isolate());
2434   __ push(Operand::StaticVariable(c_entry_fp));
2435 
2436   // If this is the outermost JS call, set js_entry_sp value.
2437   ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate());
2438   __ cmp(Operand::StaticVariable(js_entry_sp), Immediate(0));
2439   __ j(not_equal, &not_outermost_js, Label::kNear);
2440   __ mov(Operand::StaticVariable(js_entry_sp), ebp);
2441   __ push(Immediate(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
2442   __ jmp(&invoke, Label::kNear);
2443   __ bind(&not_outermost_js);
2444   __ push(Immediate(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME)));
2445 
2446   // Jump to a faked try block that does the invoke, with a faked catch
2447   // block that sets the pending exception.
2448   __ jmp(&invoke);
2449   __ bind(&handler_entry);
2450   handler_offset_ = handler_entry.pos();
2451   // Caught exception: Store result (exception) in the pending exception
2452   // field in the JSEnv and return a failure sentinel.
2453   ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
2454                                       isolate());
2455   __ mov(Operand::StaticVariable(pending_exception), eax);
2456   __ mov(eax, Immediate(isolate()->factory()->exception()));
2457   __ jmp(&exit);
2458 
2459   // Invoke: Link this frame into the handler chain.
2460   __ bind(&invoke);
2461   __ PushStackHandler();
2462 
2463   // Clear any pending exceptions.
2464   __ mov(edx, Immediate(isolate()->factory()->the_hole_value()));
2465   __ mov(Operand::StaticVariable(pending_exception), edx);
2466 
2467   // Fake a receiver (NULL).
2468   __ push(Immediate(0));  // receiver
2469 
2470   // Invoke the function by calling through JS entry trampoline builtin and
2471   // pop the faked function when we return. Notice that we cannot store a
2472   // reference to the trampoline code directly in this stub, because the
2473   // builtin stubs may not have been generated yet.
2474   if (type() == StackFrame::ENTRY_CONSTRUCT) {
2475     ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline,
2476                                       isolate());
2477     __ mov(edx, Immediate(construct_entry));
2478   } else {
2479     ExternalReference entry(Builtins::kJSEntryTrampoline, isolate());
2480     __ mov(edx, Immediate(entry));
2481   }
2482   __ mov(edx, Operand(edx, 0));  // deref address
2483   __ lea(edx, FieldOperand(edx, Code::kHeaderSize));
2484   __ call(edx);
2485 
2486   // Unlink this frame from the handler chain.
2487   __ PopStackHandler();
2488 
2489   __ bind(&exit);
2490   // Check if the current stack frame is marked as the outermost JS frame.
2491   __ pop(ebx);
2492   __ cmp(ebx, Immediate(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
2493   __ j(not_equal, &not_outermost_js_2);
2494   __ mov(Operand::StaticVariable(js_entry_sp), Immediate(0));
2495   __ bind(&not_outermost_js_2);
2496 
2497   // Restore the top frame descriptor from the stack.
2498   __ pop(Operand::StaticVariable(ExternalReference(
2499       Isolate::kCEntryFPAddress, isolate())));
2500 
2501   // Restore callee-saved registers (C calling conventions).
2502   __ pop(ebx);
2503   __ pop(esi);
2504   __ pop(edi);
2505   __ add(esp, Immediate(2 * kPointerSize));  // remove markers
2506 
2507   // Restore frame pointer and return.
2508   __ pop(ebp);
2509   __ ret(0);
2510 }
2511 
2512 
Generate(MacroAssembler * masm)2513 void InstanceOfStub::Generate(MacroAssembler* masm) {
2514   Register const object = edx;                       // Object (lhs).
2515   Register const function = eax;                     // Function (rhs).
2516   Register const object_map = ecx;                   // Map of {object}.
2517   Register const function_map = ebx;                 // Map of {function}.
2518   Register const function_prototype = function_map;  // Prototype of {function}.
2519   Register const scratch = edi;
2520 
2521   DCHECK(object.is(InstanceOfDescriptor::LeftRegister()));
2522   DCHECK(function.is(InstanceOfDescriptor::RightRegister()));
2523 
2524   // Check if {object} is a smi.
2525   Label object_is_smi;
2526   __ JumpIfSmi(object, &object_is_smi, Label::kNear);
2527 
2528   // Lookup the {function} and the {object} map in the global instanceof cache.
2529   // Note: This is safe because we clear the global instanceof cache whenever
2530   // we change the prototype of any object.
2531   Label fast_case, slow_case;
2532   __ mov(object_map, FieldOperand(object, HeapObject::kMapOffset));
2533   __ CompareRoot(function, scratch, Heap::kInstanceofCacheFunctionRootIndex);
2534   __ j(not_equal, &fast_case, Label::kNear);
2535   __ CompareRoot(object_map, scratch, Heap::kInstanceofCacheMapRootIndex);
2536   __ j(not_equal, &fast_case, Label::kNear);
2537   __ LoadRoot(eax, Heap::kInstanceofCacheAnswerRootIndex);
2538   __ ret(0);
2539 
2540   // If {object} is a smi we can safely return false if {function} is a JS
2541   // function, otherwise we have to miss to the runtime and throw an exception.
2542   __ bind(&object_is_smi);
2543   __ JumpIfSmi(function, &slow_case);
2544   __ CmpObjectType(function, JS_FUNCTION_TYPE, function_map);
2545   __ j(not_equal, &slow_case);
2546   __ LoadRoot(eax, Heap::kFalseValueRootIndex);
2547   __ ret(0);
2548 
2549   // Fast-case: The {function} must be a valid JSFunction.
2550   __ bind(&fast_case);
2551   __ JumpIfSmi(function, &slow_case);
2552   __ CmpObjectType(function, JS_FUNCTION_TYPE, function_map);
2553   __ j(not_equal, &slow_case);
2554 
2555   // Ensure that {function} has an instance prototype.
2556   __ test_b(FieldOperand(function_map, Map::kBitFieldOffset),
2557             static_cast<uint8_t>(1 << Map::kHasNonInstancePrototype));
2558   __ j(not_zero, &slow_case);
2559 
2560   // Get the "prototype" (or initial map) of the {function}.
2561   __ mov(function_prototype,
2562          FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
2563   __ AssertNotSmi(function_prototype);
2564 
2565   // Resolve the prototype if the {function} has an initial map.  Afterwards the
2566   // {function_prototype} will be either the JSReceiver prototype object or the
2567   // hole value, which means that no instances of the {function} were created so
2568   // far and hence we should return false.
2569   Label function_prototype_valid;
2570   Register const function_prototype_map = scratch;
2571   __ CmpObjectType(function_prototype, MAP_TYPE, function_prototype_map);
2572   __ j(not_equal, &function_prototype_valid, Label::kNear);
2573   __ mov(function_prototype,
2574          FieldOperand(function_prototype, Map::kPrototypeOffset));
2575   __ bind(&function_prototype_valid);
2576   __ AssertNotSmi(function_prototype);
2577 
2578   // Update the global instanceof cache with the current {object} map and
2579   // {function}.  The cached answer will be set when it is known below.
2580   __ StoreRoot(function, scratch, Heap::kInstanceofCacheFunctionRootIndex);
2581   __ StoreRoot(object_map, scratch, Heap::kInstanceofCacheMapRootIndex);
2582 
2583   // Loop through the prototype chain looking for the {function} prototype.
2584   // Assume true, and change to false if not found.
2585   Label done, loop, fast_runtime_fallback;
2586   __ mov(eax, isolate()->factory()->true_value());
2587   __ bind(&loop);
2588 
2589   // Check if the object needs to be access checked.
2590   __ test_b(FieldOperand(object_map, Map::kBitFieldOffset),
2591             1 << Map::kIsAccessCheckNeeded);
2592   __ j(not_zero, &fast_runtime_fallback, Label::kNear);
2593   // Check if the current object is a Proxy.
2594   __ CmpInstanceType(object_map, JS_PROXY_TYPE);
2595   __ j(equal, &fast_runtime_fallback, Label::kNear);
2596 
2597   __ mov(object, FieldOperand(object_map, Map::kPrototypeOffset));
2598   __ cmp(object, function_prototype);
2599   __ j(equal, &done, Label::kNear);
2600   __ mov(object_map, FieldOperand(object, HeapObject::kMapOffset));
2601   __ cmp(object, isolate()->factory()->null_value());
2602   __ j(not_equal, &loop);
2603   __ mov(eax, isolate()->factory()->false_value());
2604 
2605   __ bind(&done);
2606   __ StoreRoot(eax, scratch, Heap::kInstanceofCacheAnswerRootIndex);
2607   __ ret(0);
2608 
2609   // Found Proxy or access check needed: Call the runtime.
2610   __ bind(&fast_runtime_fallback);
2611   __ PopReturnAddressTo(scratch);
2612   __ Push(object);
2613   __ Push(function_prototype);
2614   __ PushReturnAddressFrom(scratch);
2615   // Invalidate the instanceof cache.
2616   __ Move(eax, Immediate(Smi::FromInt(0)));
2617   __ StoreRoot(eax, scratch, Heap::kInstanceofCacheFunctionRootIndex);
2618   __ TailCallRuntime(Runtime::kHasInPrototypeChain);
2619 
2620   // Slow-case: Call the %InstanceOf runtime function.
2621   __ bind(&slow_case);
2622   __ PopReturnAddressTo(scratch);
2623   __ Push(object);
2624   __ Push(function);
2625   __ PushReturnAddressFrom(scratch);
2626   __ TailCallRuntime(Runtime::kInstanceOf);
2627 }
2628 
2629 
2630 // -------------------------------------------------------------------------
2631 // StringCharCodeAtGenerator
2632 
GenerateFast(MacroAssembler * masm)2633 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
2634   // If the receiver is a smi trigger the non-string case.
2635   STATIC_ASSERT(kSmiTag == 0);
2636   if (check_mode_ == RECEIVER_IS_UNKNOWN) {
2637     __ JumpIfSmi(object_, receiver_not_string_);
2638 
2639     // Fetch the instance type of the receiver into result register.
2640     __ mov(result_, FieldOperand(object_, HeapObject::kMapOffset));
2641     __ movzx_b(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
2642     // If the receiver is not a string trigger the non-string case.
2643     __ test(result_, Immediate(kIsNotStringMask));
2644     __ j(not_zero, receiver_not_string_);
2645   }
2646 
2647   // If the index is non-smi trigger the non-smi case.
2648   STATIC_ASSERT(kSmiTag == 0);
2649   __ JumpIfNotSmi(index_, &index_not_smi_);
2650   __ bind(&got_smi_index_);
2651 
2652   // Check for index out of range.
2653   __ cmp(index_, FieldOperand(object_, String::kLengthOffset));
2654   __ j(above_equal, index_out_of_range_);
2655 
2656   __ SmiUntag(index_);
2657 
2658   Factory* factory = masm->isolate()->factory();
2659   StringCharLoadGenerator::Generate(
2660       masm, factory, object_, index_, result_, &call_runtime_);
2661 
2662   __ SmiTag(result_);
2663   __ bind(&exit_);
2664 }
2665 
2666 
GenerateSlow(MacroAssembler * masm,EmbedMode embed_mode,const RuntimeCallHelper & call_helper)2667 void StringCharCodeAtGenerator::GenerateSlow(
2668     MacroAssembler* masm, EmbedMode embed_mode,
2669     const RuntimeCallHelper& call_helper) {
2670   __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase);
2671 
2672   // Index is not a smi.
2673   __ bind(&index_not_smi_);
2674   // If index is a heap number, try converting it to an integer.
2675   __ CheckMap(index_,
2676               masm->isolate()->factory()->heap_number_map(),
2677               index_not_number_,
2678               DONT_DO_SMI_CHECK);
2679   call_helper.BeforeCall(masm);
2680   if (embed_mode == PART_OF_IC_HANDLER) {
2681     __ push(LoadWithVectorDescriptor::VectorRegister());
2682     __ push(LoadDescriptor::SlotRegister());
2683   }
2684   __ push(object_);
2685   __ push(index_);  // Consumed by runtime conversion function.
2686   if (index_flags_ == STRING_INDEX_IS_NUMBER) {
2687     __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero);
2688   } else {
2689     DCHECK(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX);
2690     // NumberToSmi discards numbers that are not exact integers.
2691     __ CallRuntime(Runtime::kNumberToSmi);
2692   }
2693   if (!index_.is(eax)) {
2694     // Save the conversion result before the pop instructions below
2695     // have a chance to overwrite it.
2696     __ mov(index_, eax);
2697   }
2698   __ pop(object_);
2699   if (embed_mode == PART_OF_IC_HANDLER) {
2700     __ pop(LoadDescriptor::SlotRegister());
2701     __ pop(LoadWithVectorDescriptor::VectorRegister());
2702   }
2703   // Reload the instance type.
2704   __ mov(result_, FieldOperand(object_, HeapObject::kMapOffset));
2705   __ movzx_b(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
2706   call_helper.AfterCall(masm);
2707   // If index is still not a smi, it must be out of range.
2708   STATIC_ASSERT(kSmiTag == 0);
2709   __ JumpIfNotSmi(index_, index_out_of_range_);
2710   // Otherwise, return to the fast path.
2711   __ jmp(&got_smi_index_);
2712 
2713   // Call runtime. We get here when the receiver is a string and the
2714   // index is a number, but the code of getting the actual character
2715   // is too complex (e.g., when the string needs to be flattened).
2716   __ bind(&call_runtime_);
2717   call_helper.BeforeCall(masm);
2718   __ push(object_);
2719   __ SmiTag(index_);
2720   __ push(index_);
2721   __ CallRuntime(Runtime::kStringCharCodeAtRT);
2722   if (!result_.is(eax)) {
2723     __ mov(result_, eax);
2724   }
2725   call_helper.AfterCall(masm);
2726   __ jmp(&exit_);
2727 
2728   __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase);
2729 }
2730 
2731 
2732 // -------------------------------------------------------------------------
2733 // StringCharFromCodeGenerator
2734 
GenerateFast(MacroAssembler * masm)2735 void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) {
2736   // Fast case of Heap::LookupSingleCharacterStringFromCode.
2737   STATIC_ASSERT(kSmiTag == 0);
2738   STATIC_ASSERT(kSmiShiftSize == 0);
2739   DCHECK(base::bits::IsPowerOfTwo32(String::kMaxOneByteCharCodeU + 1));
2740   __ test(code_, Immediate(kSmiTagMask |
2741                            ((~String::kMaxOneByteCharCodeU) << kSmiTagSize)));
2742   __ j(not_zero, &slow_case_);
2743 
2744   Factory* factory = masm->isolate()->factory();
2745   __ Move(result_, Immediate(factory->single_character_string_cache()));
2746   STATIC_ASSERT(kSmiTag == 0);
2747   STATIC_ASSERT(kSmiTagSize == 1);
2748   STATIC_ASSERT(kSmiShiftSize == 0);
2749   // At this point code register contains smi tagged one byte char code.
2750   __ mov(result_, FieldOperand(result_,
2751                                code_, times_half_pointer_size,
2752                                FixedArray::kHeaderSize));
2753   __ cmp(result_, factory->undefined_value());
2754   __ j(equal, &slow_case_);
2755   __ bind(&exit_);
2756 }
2757 
2758 
GenerateSlow(MacroAssembler * masm,const RuntimeCallHelper & call_helper)2759 void StringCharFromCodeGenerator::GenerateSlow(
2760     MacroAssembler* masm,
2761     const RuntimeCallHelper& call_helper) {
2762   __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase);
2763 
2764   __ bind(&slow_case_);
2765   call_helper.BeforeCall(masm);
2766   __ push(code_);
2767   __ CallRuntime(Runtime::kStringCharFromCode);
2768   if (!result_.is(eax)) {
2769     __ mov(result_, eax);
2770   }
2771   call_helper.AfterCall(masm);
2772   __ jmp(&exit_);
2773 
2774   __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase);
2775 }
2776 
2777 
GenerateCopyCharacters(MacroAssembler * masm,Register dest,Register src,Register count,Register scratch,String::Encoding encoding)2778 void StringHelper::GenerateCopyCharacters(MacroAssembler* masm,
2779                                           Register dest,
2780                                           Register src,
2781                                           Register count,
2782                                           Register scratch,
2783                                           String::Encoding encoding) {
2784   DCHECK(!scratch.is(dest));
2785   DCHECK(!scratch.is(src));
2786   DCHECK(!scratch.is(count));
2787 
2788   // Nothing to do for zero characters.
2789   Label done;
2790   __ test(count, count);
2791   __ j(zero, &done);
2792 
2793   // Make count the number of bytes to copy.
2794   if (encoding == String::TWO_BYTE_ENCODING) {
2795     __ shl(count, 1);
2796   }
2797 
2798   Label loop;
2799   __ bind(&loop);
2800   __ mov_b(scratch, Operand(src, 0));
2801   __ mov_b(Operand(dest, 0), scratch);
2802   __ inc(src);
2803   __ inc(dest);
2804   __ dec(count);
2805   __ j(not_zero, &loop);
2806 
2807   __ bind(&done);
2808 }
2809 
2810 
Generate(MacroAssembler * masm)2811 void SubStringStub::Generate(MacroAssembler* masm) {
2812   Label runtime;
2813 
2814   // Stack frame on entry.
2815   //  esp[0]: return address
2816   //  esp[4]: to
2817   //  esp[8]: from
2818   //  esp[12]: string
2819 
2820   // Make sure first argument is a string.
2821   __ mov(eax, Operand(esp, 3 * kPointerSize));
2822   STATIC_ASSERT(kSmiTag == 0);
2823   __ JumpIfSmi(eax, &runtime);
2824   Condition is_string = masm->IsObjectStringType(eax, ebx, ebx);
2825   __ j(NegateCondition(is_string), &runtime);
2826 
2827   // eax: string
2828   // ebx: instance type
2829 
2830   // Calculate length of sub string using the smi values.
2831   __ mov(ecx, Operand(esp, 1 * kPointerSize));  // To index.
2832   __ JumpIfNotSmi(ecx, &runtime);
2833   __ mov(edx, Operand(esp, 2 * kPointerSize));  // From index.
2834   __ JumpIfNotSmi(edx, &runtime);
2835   __ sub(ecx, edx);
2836   __ cmp(ecx, FieldOperand(eax, String::kLengthOffset));
2837   Label not_original_string;
2838   // Shorter than original string's length: an actual substring.
2839   __ j(below, &not_original_string, Label::kNear);
2840   // Longer than original string's length or negative: unsafe arguments.
2841   __ j(above, &runtime);
2842   // Return original string.
2843   Counters* counters = isolate()->counters();
2844   __ IncrementCounter(counters->sub_string_native(), 1);
2845   __ ret(3 * kPointerSize);
2846   __ bind(&not_original_string);
2847 
2848   Label single_char;
2849   __ cmp(ecx, Immediate(Smi::FromInt(1)));
2850   __ j(equal, &single_char);
2851 
2852   // eax: string
2853   // ebx: instance type
2854   // ecx: sub string length (smi)
2855   // edx: from index (smi)
2856   // Deal with different string types: update the index if necessary
2857   // and put the underlying string into edi.
2858   Label underlying_unpacked, sliced_string, seq_or_external_string;
2859   // If the string is not indirect, it can only be sequential or external.
2860   STATIC_ASSERT(kIsIndirectStringMask == (kSlicedStringTag & kConsStringTag));
2861   STATIC_ASSERT(kIsIndirectStringMask != 0);
2862   __ test(ebx, Immediate(kIsIndirectStringMask));
2863   __ j(zero, &seq_or_external_string, Label::kNear);
2864 
2865   Factory* factory = isolate()->factory();
2866   __ test(ebx, Immediate(kSlicedNotConsMask));
2867   __ j(not_zero, &sliced_string, Label::kNear);
2868   // Cons string.  Check whether it is flat, then fetch first part.
2869   // Flat cons strings have an empty second part.
2870   __ cmp(FieldOperand(eax, ConsString::kSecondOffset),
2871          factory->empty_string());
2872   __ j(not_equal, &runtime);
2873   __ mov(edi, FieldOperand(eax, ConsString::kFirstOffset));
2874   // Update instance type.
2875   __ mov(ebx, FieldOperand(edi, HeapObject::kMapOffset));
2876   __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
2877   __ jmp(&underlying_unpacked, Label::kNear);
2878 
2879   __ bind(&sliced_string);
2880   // Sliced string.  Fetch parent and adjust start index by offset.
2881   __ add(edx, FieldOperand(eax, SlicedString::kOffsetOffset));
2882   __ mov(edi, FieldOperand(eax, SlicedString::kParentOffset));
2883   // Update instance type.
2884   __ mov(ebx, FieldOperand(edi, HeapObject::kMapOffset));
2885   __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
2886   __ jmp(&underlying_unpacked, Label::kNear);
2887 
2888   __ bind(&seq_or_external_string);
2889   // Sequential or external string.  Just move string to the expected register.
2890   __ mov(edi, eax);
2891 
2892   __ bind(&underlying_unpacked);
2893 
2894   if (FLAG_string_slices) {
2895     Label copy_routine;
2896     // edi: underlying subject string
2897     // ebx: instance type of underlying subject string
2898     // edx: adjusted start index (smi)
2899     // ecx: length (smi)
2900     __ cmp(ecx, Immediate(Smi::FromInt(SlicedString::kMinLength)));
2901     // Short slice.  Copy instead of slicing.
2902     __ j(less, &copy_routine);
2903     // Allocate new sliced string.  At this point we do not reload the instance
2904     // type including the string encoding because we simply rely on the info
2905     // provided by the original string.  It does not matter if the original
2906     // string's encoding is wrong because we always have to recheck encoding of
2907     // the newly created string's parent anyways due to externalized strings.
2908     Label two_byte_slice, set_slice_header;
2909     STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
2910     STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
2911     __ test(ebx, Immediate(kStringEncodingMask));
2912     __ j(zero, &two_byte_slice, Label::kNear);
2913     __ AllocateOneByteSlicedString(eax, ebx, no_reg, &runtime);
2914     __ jmp(&set_slice_header, Label::kNear);
2915     __ bind(&two_byte_slice);
2916     __ AllocateTwoByteSlicedString(eax, ebx, no_reg, &runtime);
2917     __ bind(&set_slice_header);
2918     __ mov(FieldOperand(eax, SlicedString::kLengthOffset), ecx);
2919     __ mov(FieldOperand(eax, SlicedString::kHashFieldOffset),
2920            Immediate(String::kEmptyHashField));
2921     __ mov(FieldOperand(eax, SlicedString::kParentOffset), edi);
2922     __ mov(FieldOperand(eax, SlicedString::kOffsetOffset), edx);
2923     __ IncrementCounter(counters->sub_string_native(), 1);
2924     __ ret(3 * kPointerSize);
2925 
2926     __ bind(&copy_routine);
2927   }
2928 
2929   // edi: underlying subject string
2930   // ebx: instance type of underlying subject string
2931   // edx: adjusted start index (smi)
2932   // ecx: length (smi)
2933   // The subject string can only be external or sequential string of either
2934   // encoding at this point.
2935   Label two_byte_sequential, runtime_drop_two, sequential_string;
2936   STATIC_ASSERT(kExternalStringTag != 0);
2937   STATIC_ASSERT(kSeqStringTag == 0);
2938   __ test_b(ebx, kExternalStringTag);
2939   __ j(zero, &sequential_string);
2940 
2941   // Handle external string.
2942   // Rule out short external strings.
2943   STATIC_ASSERT(kShortExternalStringTag != 0);
2944   __ test_b(ebx, kShortExternalStringMask);
2945   __ j(not_zero, &runtime);
2946   __ mov(edi, FieldOperand(edi, ExternalString::kResourceDataOffset));
2947   // Move the pointer so that offset-wise, it looks like a sequential string.
2948   STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
2949   __ sub(edi, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
2950 
2951   __ bind(&sequential_string);
2952   // Stash away (adjusted) index and (underlying) string.
2953   __ push(edx);
2954   __ push(edi);
2955   __ SmiUntag(ecx);
2956   STATIC_ASSERT((kOneByteStringTag & kStringEncodingMask) != 0);
2957   __ test_b(ebx, kStringEncodingMask);
2958   __ j(zero, &two_byte_sequential);
2959 
2960   // Sequential one byte string.  Allocate the result.
2961   __ AllocateOneByteString(eax, ecx, ebx, edx, edi, &runtime_drop_two);
2962 
2963   // eax: result string
2964   // ecx: result string length
2965   // Locate first character of result.
2966   __ mov(edi, eax);
2967   __ add(edi, Immediate(SeqOneByteString::kHeaderSize - kHeapObjectTag));
2968   // Load string argument and locate character of sub string start.
2969   __ pop(edx);
2970   __ pop(ebx);
2971   __ SmiUntag(ebx);
2972   __ lea(edx, FieldOperand(edx, ebx, times_1, SeqOneByteString::kHeaderSize));
2973 
2974   // eax: result string
2975   // ecx: result length
2976   // edi: first character of result
2977   // edx: character of sub string start
2978   StringHelper::GenerateCopyCharacters(
2979       masm, edi, edx, ecx, ebx, String::ONE_BYTE_ENCODING);
2980   __ IncrementCounter(counters->sub_string_native(), 1);
2981   __ ret(3 * kPointerSize);
2982 
2983   __ bind(&two_byte_sequential);
2984   // Sequential two-byte string.  Allocate the result.
2985   __ AllocateTwoByteString(eax, ecx, ebx, edx, edi, &runtime_drop_two);
2986 
2987   // eax: result string
2988   // ecx: result string length
2989   // Locate first character of result.
2990   __ mov(edi, eax);
2991   __ add(edi,
2992          Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
2993   // Load string argument and locate character of sub string start.
2994   __ pop(edx);
2995   __ pop(ebx);
2996   // As from is a smi it is 2 times the value which matches the size of a two
2997   // byte character.
2998   STATIC_ASSERT(kSmiTag == 0);
2999   STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
3000   __ lea(edx, FieldOperand(edx, ebx, times_1, SeqTwoByteString::kHeaderSize));
3001 
3002   // eax: result string
3003   // ecx: result length
3004   // edi: first character of result
3005   // edx: character of sub string start
3006   StringHelper::GenerateCopyCharacters(
3007       masm, edi, edx, ecx, ebx, String::TWO_BYTE_ENCODING);
3008   __ IncrementCounter(counters->sub_string_native(), 1);
3009   __ ret(3 * kPointerSize);
3010 
3011   // Drop pushed values on the stack before tail call.
3012   __ bind(&runtime_drop_two);
3013   __ Drop(2);
3014 
3015   // Just jump to runtime to create the sub string.
3016   __ bind(&runtime);
3017   __ TailCallRuntime(Runtime::kSubString);
3018 
3019   __ bind(&single_char);
3020   // eax: string
3021   // ebx: instance type
3022   // ecx: sub string length (smi)
3023   // edx: from index (smi)
3024   StringCharAtGenerator generator(eax, edx, ecx, eax, &runtime, &runtime,
3025                                   &runtime, STRING_INDEX_IS_NUMBER,
3026                                   RECEIVER_IS_STRING);
3027   generator.GenerateFast(masm);
3028   __ ret(3 * kPointerSize);
3029   generator.SkipSlow(masm, &runtime);
3030 }
3031 
3032 
Generate(MacroAssembler * masm)3033 void ToNumberStub::Generate(MacroAssembler* masm) {
3034   // The ToNumber stub takes one argument in eax.
3035   Label not_smi;
3036   __ JumpIfNotSmi(eax, &not_smi, Label::kNear);
3037   __ Ret();
3038   __ bind(&not_smi);
3039 
3040   Label not_heap_number;
3041   __ CompareMap(eax, masm->isolate()->factory()->heap_number_map());
3042   __ j(not_equal, &not_heap_number, Label::kNear);
3043   __ Ret();
3044   __ bind(&not_heap_number);
3045 
3046   Label not_string, slow_string;
3047   __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, edi);
3048   // eax: object
3049   // edi: object map
3050   __ j(above_equal, &not_string, Label::kNear);
3051   // Check if string has a cached array index.
3052   __ test(FieldOperand(eax, String::kHashFieldOffset),
3053           Immediate(String::kContainsCachedArrayIndexMask));
3054   __ j(not_zero, &slow_string, Label::kNear);
3055   __ mov(eax, FieldOperand(eax, String::kHashFieldOffset));
3056   __ IndexFromHash(eax, eax);
3057   __ Ret();
3058   __ bind(&slow_string);
3059   __ pop(ecx);   // Pop return address.
3060   __ push(eax);  // Push argument.
3061   __ push(ecx);  // Push return address.
3062   __ TailCallRuntime(Runtime::kStringToNumber);
3063   __ bind(&not_string);
3064 
3065   Label not_oddball;
3066   __ CmpInstanceType(edi, ODDBALL_TYPE);
3067   __ j(not_equal, &not_oddball, Label::kNear);
3068   __ mov(eax, FieldOperand(eax, Oddball::kToNumberOffset));
3069   __ Ret();
3070   __ bind(&not_oddball);
3071 
3072   __ pop(ecx);   // Pop return address.
3073   __ push(eax);  // Push argument.
3074   __ push(ecx);  // Push return address.
3075   __ TailCallRuntime(Runtime::kToNumber);
3076 }
3077 
3078 
Generate(MacroAssembler * masm)3079 void ToLengthStub::Generate(MacroAssembler* masm) {
3080   // The ToLength stub takes on argument in eax.
3081   Label not_smi, positive_smi;
3082   __ JumpIfNotSmi(eax, &not_smi, Label::kNear);
3083   STATIC_ASSERT(kSmiTag == 0);
3084   __ test(eax, eax);
3085   __ j(greater_equal, &positive_smi, Label::kNear);
3086   __ xor_(eax, eax);
3087   __ bind(&positive_smi);
3088   __ Ret();
3089   __ bind(&not_smi);
3090 
3091   __ pop(ecx);   // Pop return address.
3092   __ push(eax);  // Push argument.
3093   __ push(ecx);  // Push return address.
3094   __ TailCallRuntime(Runtime::kToLength);
3095 }
3096 
3097 
Generate(MacroAssembler * masm)3098 void ToStringStub::Generate(MacroAssembler* masm) {
3099   // The ToString stub takes one argument in eax.
3100   Label is_number;
3101   __ JumpIfSmi(eax, &is_number, Label::kNear);
3102 
3103   Label not_string;
3104   __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, edi);
3105   // eax: receiver
3106   // edi: receiver map
3107   __ j(above_equal, &not_string, Label::kNear);
3108   __ Ret();
3109   __ bind(&not_string);
3110 
3111   Label not_heap_number;
3112   __ CompareMap(eax, masm->isolate()->factory()->heap_number_map());
3113   __ j(not_equal, &not_heap_number, Label::kNear);
3114   __ bind(&is_number);
3115   NumberToStringStub stub(isolate());
3116   __ TailCallStub(&stub);
3117   __ bind(&not_heap_number);
3118 
3119   Label not_oddball;
3120   __ CmpInstanceType(edi, ODDBALL_TYPE);
3121   __ j(not_equal, &not_oddball, Label::kNear);
3122   __ mov(eax, FieldOperand(eax, Oddball::kToStringOffset));
3123   __ Ret();
3124   __ bind(&not_oddball);
3125 
3126   __ pop(ecx);   // Pop return address.
3127   __ push(eax);  // Push argument.
3128   __ push(ecx);  // Push return address.
3129   __ TailCallRuntime(Runtime::kToString);
3130 }
3131 
3132 
GenerateFlatOneByteStringEquals(MacroAssembler * masm,Register left,Register right,Register scratch1,Register scratch2)3133 void StringHelper::GenerateFlatOneByteStringEquals(MacroAssembler* masm,
3134                                                    Register left,
3135                                                    Register right,
3136                                                    Register scratch1,
3137                                                    Register scratch2) {
3138   Register length = scratch1;
3139 
3140   // Compare lengths.
3141   Label strings_not_equal, check_zero_length;
3142   __ mov(length, FieldOperand(left, String::kLengthOffset));
3143   __ cmp(length, FieldOperand(right, String::kLengthOffset));
3144   __ j(equal, &check_zero_length, Label::kNear);
3145   __ bind(&strings_not_equal);
3146   __ Move(eax, Immediate(Smi::FromInt(NOT_EQUAL)));
3147   __ ret(0);
3148 
3149   // Check if the length is zero.
3150   Label compare_chars;
3151   __ bind(&check_zero_length);
3152   STATIC_ASSERT(kSmiTag == 0);
3153   __ test(length, length);
3154   __ j(not_zero, &compare_chars, Label::kNear);
3155   __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
3156   __ ret(0);
3157 
3158   // Compare characters.
3159   __ bind(&compare_chars);
3160   GenerateOneByteCharsCompareLoop(masm, left, right, length, scratch2,
3161                                   &strings_not_equal, Label::kNear);
3162 
3163   // Characters are equal.
3164   __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
3165   __ ret(0);
3166 }
3167 
3168 
GenerateCompareFlatOneByteStrings(MacroAssembler * masm,Register left,Register right,Register scratch1,Register scratch2,Register scratch3)3169 void StringHelper::GenerateCompareFlatOneByteStrings(
3170     MacroAssembler* masm, Register left, Register right, Register scratch1,
3171     Register scratch2, Register scratch3) {
3172   Counters* counters = masm->isolate()->counters();
3173   __ IncrementCounter(counters->string_compare_native(), 1);
3174 
3175   // Find minimum length.
3176   Label left_shorter;
3177   __ mov(scratch1, FieldOperand(left, String::kLengthOffset));
3178   __ mov(scratch3, scratch1);
3179   __ sub(scratch3, FieldOperand(right, String::kLengthOffset));
3180 
3181   Register length_delta = scratch3;
3182 
3183   __ j(less_equal, &left_shorter, Label::kNear);
3184   // Right string is shorter. Change scratch1 to be length of right string.
3185   __ sub(scratch1, length_delta);
3186   __ bind(&left_shorter);
3187 
3188   Register min_length = scratch1;
3189 
3190   // If either length is zero, just compare lengths.
3191   Label compare_lengths;
3192   __ test(min_length, min_length);
3193   __ j(zero, &compare_lengths, Label::kNear);
3194 
3195   // Compare characters.
3196   Label result_not_equal;
3197   GenerateOneByteCharsCompareLoop(masm, left, right, min_length, scratch2,
3198                                   &result_not_equal, Label::kNear);
3199 
3200   // Compare lengths -  strings up to min-length are equal.
3201   __ bind(&compare_lengths);
3202   __ test(length_delta, length_delta);
3203   Label length_not_equal;
3204   __ j(not_zero, &length_not_equal, Label::kNear);
3205 
3206   // Result is EQUAL.
3207   STATIC_ASSERT(EQUAL == 0);
3208   STATIC_ASSERT(kSmiTag == 0);
3209   __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
3210   __ ret(0);
3211 
3212   Label result_greater;
3213   Label result_less;
3214   __ bind(&length_not_equal);
3215   __ j(greater, &result_greater, Label::kNear);
3216   __ jmp(&result_less, Label::kNear);
3217   __ bind(&result_not_equal);
3218   __ j(above, &result_greater, Label::kNear);
3219   __ bind(&result_less);
3220 
3221   // Result is LESS.
3222   __ Move(eax, Immediate(Smi::FromInt(LESS)));
3223   __ ret(0);
3224 
3225   // Result is GREATER.
3226   __ bind(&result_greater);
3227   __ Move(eax, Immediate(Smi::FromInt(GREATER)));
3228   __ ret(0);
3229 }
3230 
3231 
GenerateOneByteCharsCompareLoop(MacroAssembler * masm,Register left,Register right,Register length,Register scratch,Label * chars_not_equal,Label::Distance chars_not_equal_near)3232 void StringHelper::GenerateOneByteCharsCompareLoop(
3233     MacroAssembler* masm, Register left, Register right, Register length,
3234     Register scratch, Label* chars_not_equal,
3235     Label::Distance chars_not_equal_near) {
3236   // Change index to run from -length to -1 by adding length to string
3237   // start. This means that loop ends when index reaches zero, which
3238   // doesn't need an additional compare.
3239   __ SmiUntag(length);
3240   __ lea(left,
3241          FieldOperand(left, length, times_1, SeqOneByteString::kHeaderSize));
3242   __ lea(right,
3243          FieldOperand(right, length, times_1, SeqOneByteString::kHeaderSize));
3244   __ neg(length);
3245   Register index = length;  // index = -length;
3246 
3247   // Compare loop.
3248   Label loop;
3249   __ bind(&loop);
3250   __ mov_b(scratch, Operand(left, index, times_1, 0));
3251   __ cmpb(scratch, Operand(right, index, times_1, 0));
3252   __ j(not_equal, chars_not_equal, chars_not_equal_near);
3253   __ inc(index);
3254   __ j(not_zero, &loop);
3255 }
3256 
3257 
Generate(MacroAssembler * masm)3258 void StringCompareStub::Generate(MacroAssembler* masm) {
3259   // ----------- S t a t e -------------
3260   //  -- edx    : left string
3261   //  -- eax    : right string
3262   //  -- esp[0] : return address
3263   // -----------------------------------
3264   __ AssertString(edx);
3265   __ AssertString(eax);
3266 
3267   Label not_same;
3268   __ cmp(edx, eax);
3269   __ j(not_equal, &not_same, Label::kNear);
3270   __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
3271   __ IncrementCounter(isolate()->counters()->string_compare_native(), 1);
3272   __ Ret();
3273 
3274   __ bind(&not_same);
3275 
3276   // Check that both objects are sequential one-byte strings.
3277   Label runtime;
3278   __ JumpIfNotBothSequentialOneByteStrings(edx, eax, ecx, ebx, &runtime);
3279 
3280   // Compare flat one-byte strings.
3281   __ IncrementCounter(isolate()->counters()->string_compare_native(), 1);
3282   StringHelper::GenerateCompareFlatOneByteStrings(masm, edx, eax, ecx, ebx,
3283                                                   edi);
3284 
3285   // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater)
3286   // tagged as a small integer.
3287   __ bind(&runtime);
3288   __ PopReturnAddressTo(ecx);
3289   __ Push(edx);
3290   __ Push(eax);
3291   __ PushReturnAddressFrom(ecx);
3292   __ TailCallRuntime(Runtime::kStringCompare);
3293 }
3294 
3295 
Generate(MacroAssembler * masm)3296 void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) {
3297   // ----------- S t a t e -------------
3298   //  -- edx    : left
3299   //  -- eax    : right
3300   //  -- esp[0] : return address
3301   // -----------------------------------
3302 
3303   // Load ecx with the allocation site.  We stick an undefined dummy value here
3304   // and replace it with the real allocation site later when we instantiate this
3305   // stub in BinaryOpICWithAllocationSiteStub::GetCodeCopyFromTemplate().
3306   __ mov(ecx, handle(isolate()->heap()->undefined_value()));
3307 
3308   // Make sure that we actually patched the allocation site.
3309   if (FLAG_debug_code) {
3310     __ test(ecx, Immediate(kSmiTagMask));
3311     __ Assert(not_equal, kExpectedAllocationSite);
3312     __ cmp(FieldOperand(ecx, HeapObject::kMapOffset),
3313            isolate()->factory()->allocation_site_map());
3314     __ Assert(equal, kExpectedAllocationSite);
3315   }
3316 
3317   // Tail call into the stub that handles binary operations with allocation
3318   // sites.
3319   BinaryOpWithAllocationSiteStub stub(isolate(), state());
3320   __ TailCallStub(&stub);
3321 }
3322 
3323 
GenerateBooleans(MacroAssembler * masm)3324 void CompareICStub::GenerateBooleans(MacroAssembler* masm) {
3325   DCHECK_EQ(CompareICState::BOOLEAN, state());
3326   Label miss;
3327   Label::Distance const miss_distance =
3328       masm->emit_debug_code() ? Label::kFar : Label::kNear;
3329 
3330   __ JumpIfSmi(edx, &miss, miss_distance);
3331   __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
3332   __ JumpIfSmi(eax, &miss, miss_distance);
3333   __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
3334   __ JumpIfNotRoot(ecx, Heap::kBooleanMapRootIndex, &miss, miss_distance);
3335   __ JumpIfNotRoot(ebx, Heap::kBooleanMapRootIndex, &miss, miss_distance);
3336   if (op() != Token::EQ_STRICT && is_strong(strength())) {
3337     __ TailCallRuntime(Runtime::kThrowStrongModeImplicitConversion);
3338   } else {
3339     if (!Token::IsEqualityOp(op())) {
3340       __ mov(eax, FieldOperand(eax, Oddball::kToNumberOffset));
3341       __ AssertSmi(eax);
3342       __ mov(edx, FieldOperand(edx, Oddball::kToNumberOffset));
3343       __ AssertSmi(edx);
3344       __ push(eax);
3345       __ mov(eax, edx);
3346       __ pop(edx);
3347     }
3348     __ sub(eax, edx);
3349     __ Ret();
3350   }
3351 
3352   __ bind(&miss);
3353   GenerateMiss(masm);
3354 }
3355 
3356 
GenerateSmis(MacroAssembler * masm)3357 void CompareICStub::GenerateSmis(MacroAssembler* masm) {
3358   DCHECK(state() == CompareICState::SMI);
3359   Label miss;
3360   __ mov(ecx, edx);
3361   __ or_(ecx, eax);
3362   __ JumpIfNotSmi(ecx, &miss, Label::kNear);
3363 
3364   if (GetCondition() == equal) {
3365     // For equality we do not care about the sign of the result.
3366     __ sub(eax, edx);
3367   } else {
3368     Label done;
3369     __ sub(edx, eax);
3370     __ j(no_overflow, &done, Label::kNear);
3371     // Correct sign of result in case of overflow.
3372     __ not_(edx);
3373     __ bind(&done);
3374     __ mov(eax, edx);
3375   }
3376   __ ret(0);
3377 
3378   __ bind(&miss);
3379   GenerateMiss(masm);
3380 }
3381 
3382 
GenerateNumbers(MacroAssembler * masm)3383 void CompareICStub::GenerateNumbers(MacroAssembler* masm) {
3384   DCHECK(state() == CompareICState::NUMBER);
3385 
3386   Label generic_stub;
3387   Label unordered, maybe_undefined1, maybe_undefined2;
3388   Label miss;
3389 
3390   if (left() == CompareICState::SMI) {
3391     __ JumpIfNotSmi(edx, &miss);
3392   }
3393   if (right() == CompareICState::SMI) {
3394     __ JumpIfNotSmi(eax, &miss);
3395   }
3396 
3397   // Load left and right operand.
3398   Label done, left, left_smi, right_smi;
3399   __ JumpIfSmi(eax, &right_smi, Label::kNear);
3400   __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
3401          isolate()->factory()->heap_number_map());
3402   __ j(not_equal, &maybe_undefined1, Label::kNear);
3403   __ movsd(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
3404   __ jmp(&left, Label::kNear);
3405   __ bind(&right_smi);
3406   __ mov(ecx, eax);  // Can't clobber eax because we can still jump away.
3407   __ SmiUntag(ecx);
3408   __ Cvtsi2sd(xmm1, ecx);
3409 
3410   __ bind(&left);
3411   __ JumpIfSmi(edx, &left_smi, Label::kNear);
3412   __ cmp(FieldOperand(edx, HeapObject::kMapOffset),
3413          isolate()->factory()->heap_number_map());
3414   __ j(not_equal, &maybe_undefined2, Label::kNear);
3415   __ movsd(xmm0, FieldOperand(edx, HeapNumber::kValueOffset));
3416   __ jmp(&done);
3417   __ bind(&left_smi);
3418   __ mov(ecx, edx);  // Can't clobber edx because we can still jump away.
3419   __ SmiUntag(ecx);
3420   __ Cvtsi2sd(xmm0, ecx);
3421 
3422   __ bind(&done);
3423   // Compare operands.
3424   __ ucomisd(xmm0, xmm1);
3425 
3426   // Don't base result on EFLAGS when a NaN is involved.
3427   __ j(parity_even, &unordered, Label::kNear);
3428 
3429   // Return a result of -1, 0, or 1, based on EFLAGS.
3430   // Performing mov, because xor would destroy the flag register.
3431   __ mov(eax, 0);  // equal
3432   __ mov(ecx, Immediate(Smi::FromInt(1)));
3433   __ cmov(above, eax, ecx);
3434   __ mov(ecx, Immediate(Smi::FromInt(-1)));
3435   __ cmov(below, eax, ecx);
3436   __ ret(0);
3437 
3438   __ bind(&unordered);
3439   __ bind(&generic_stub);
3440   CompareICStub stub(isolate(), op(), strength(), CompareICState::GENERIC,
3441                      CompareICState::GENERIC, CompareICState::GENERIC);
3442   __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
3443 
3444   __ bind(&maybe_undefined1);
3445   if (Token::IsOrderedRelationalCompareOp(op())) {
3446     __ cmp(eax, Immediate(isolate()->factory()->undefined_value()));
3447     __ j(not_equal, &miss);
3448     __ JumpIfSmi(edx, &unordered);
3449     __ CmpObjectType(edx, HEAP_NUMBER_TYPE, ecx);
3450     __ j(not_equal, &maybe_undefined2, Label::kNear);
3451     __ jmp(&unordered);
3452   }
3453 
3454   __ bind(&maybe_undefined2);
3455   if (Token::IsOrderedRelationalCompareOp(op())) {
3456     __ cmp(edx, Immediate(isolate()->factory()->undefined_value()));
3457     __ j(equal, &unordered);
3458   }
3459 
3460   __ bind(&miss);
3461   GenerateMiss(masm);
3462 }
3463 
3464 
GenerateInternalizedStrings(MacroAssembler * masm)3465 void CompareICStub::GenerateInternalizedStrings(MacroAssembler* masm) {
3466   DCHECK(state() == CompareICState::INTERNALIZED_STRING);
3467   DCHECK(GetCondition() == equal);
3468 
3469   // Registers containing left and right operands respectively.
3470   Register left = edx;
3471   Register right = eax;
3472   Register tmp1 = ecx;
3473   Register tmp2 = ebx;
3474 
3475   // Check that both operands are heap objects.
3476   Label miss;
3477   __ mov(tmp1, left);
3478   STATIC_ASSERT(kSmiTag == 0);
3479   __ and_(tmp1, right);
3480   __ JumpIfSmi(tmp1, &miss, Label::kNear);
3481 
3482   // Check that both operands are internalized strings.
3483   __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset));
3484   __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset));
3485   __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
3486   __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
3487   STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
3488   __ or_(tmp1, tmp2);
3489   __ test(tmp1, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
3490   __ j(not_zero, &miss, Label::kNear);
3491 
3492   // Internalized strings are compared by identity.
3493   Label done;
3494   __ cmp(left, right);
3495   // Make sure eax is non-zero. At this point input operands are
3496   // guaranteed to be non-zero.
3497   DCHECK(right.is(eax));
3498   __ j(not_equal, &done, Label::kNear);
3499   STATIC_ASSERT(EQUAL == 0);
3500   STATIC_ASSERT(kSmiTag == 0);
3501   __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
3502   __ bind(&done);
3503   __ ret(0);
3504 
3505   __ bind(&miss);
3506   GenerateMiss(masm);
3507 }
3508 
3509 
GenerateUniqueNames(MacroAssembler * masm)3510 void CompareICStub::GenerateUniqueNames(MacroAssembler* masm) {
3511   DCHECK(state() == CompareICState::UNIQUE_NAME);
3512   DCHECK(GetCondition() == equal);
3513 
3514   // Registers containing left and right operands respectively.
3515   Register left = edx;
3516   Register right = eax;
3517   Register tmp1 = ecx;
3518   Register tmp2 = ebx;
3519 
3520   // Check that both operands are heap objects.
3521   Label miss;
3522   __ mov(tmp1, left);
3523   STATIC_ASSERT(kSmiTag == 0);
3524   __ and_(tmp1, right);
3525   __ JumpIfSmi(tmp1, &miss, Label::kNear);
3526 
3527   // Check that both operands are unique names. This leaves the instance
3528   // types loaded in tmp1 and tmp2.
3529   __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset));
3530   __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset));
3531   __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
3532   __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
3533 
3534   __ JumpIfNotUniqueNameInstanceType(tmp1, &miss, Label::kNear);
3535   __ JumpIfNotUniqueNameInstanceType(tmp2, &miss, Label::kNear);
3536 
3537   // Unique names are compared by identity.
3538   Label done;
3539   __ cmp(left, right);
3540   // Make sure eax is non-zero. At this point input operands are
3541   // guaranteed to be non-zero.
3542   DCHECK(right.is(eax));
3543   __ j(not_equal, &done, Label::kNear);
3544   STATIC_ASSERT(EQUAL == 0);
3545   STATIC_ASSERT(kSmiTag == 0);
3546   __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
3547   __ bind(&done);
3548   __ ret(0);
3549 
3550   __ bind(&miss);
3551   GenerateMiss(masm);
3552 }
3553 
3554 
GenerateStrings(MacroAssembler * masm)3555 void CompareICStub::GenerateStrings(MacroAssembler* masm) {
3556   DCHECK(state() == CompareICState::STRING);
3557   Label miss;
3558 
3559   bool equality = Token::IsEqualityOp(op());
3560 
3561   // Registers containing left and right operands respectively.
3562   Register left = edx;
3563   Register right = eax;
3564   Register tmp1 = ecx;
3565   Register tmp2 = ebx;
3566   Register tmp3 = edi;
3567 
3568   // Check that both operands are heap objects.
3569   __ mov(tmp1, left);
3570   STATIC_ASSERT(kSmiTag == 0);
3571   __ and_(tmp1, right);
3572   __ JumpIfSmi(tmp1, &miss);
3573 
3574   // Check that both operands are strings. This leaves the instance
3575   // types loaded in tmp1 and tmp2.
3576   __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset));
3577   __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset));
3578   __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
3579   __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
3580   __ mov(tmp3, tmp1);
3581   STATIC_ASSERT(kNotStringTag != 0);
3582   __ or_(tmp3, tmp2);
3583   __ test(tmp3, Immediate(kIsNotStringMask));
3584   __ j(not_zero, &miss);
3585 
3586   // Fast check for identical strings.
3587   Label not_same;
3588   __ cmp(left, right);
3589   __ j(not_equal, &not_same, Label::kNear);
3590   STATIC_ASSERT(EQUAL == 0);
3591   STATIC_ASSERT(kSmiTag == 0);
3592   __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
3593   __ ret(0);
3594 
3595   // Handle not identical strings.
3596   __ bind(&not_same);
3597 
3598   // Check that both strings are internalized. If they are, we're done
3599   // because we already know they are not identical.  But in the case of
3600   // non-equality compare, we still need to determine the order. We
3601   // also know they are both strings.
3602   if (equality) {
3603     Label do_compare;
3604     STATIC_ASSERT(kInternalizedTag == 0);
3605     __ or_(tmp1, tmp2);
3606     __ test(tmp1, Immediate(kIsNotInternalizedMask));
3607     __ j(not_zero, &do_compare, Label::kNear);
3608     // Make sure eax is non-zero. At this point input operands are
3609     // guaranteed to be non-zero.
3610     DCHECK(right.is(eax));
3611     __ ret(0);
3612     __ bind(&do_compare);
3613   }
3614 
3615   // Check that both strings are sequential one-byte.
3616   Label runtime;
3617   __ JumpIfNotBothSequentialOneByteStrings(left, right, tmp1, tmp2, &runtime);
3618 
3619   // Compare flat one byte strings. Returns when done.
3620   if (equality) {
3621     StringHelper::GenerateFlatOneByteStringEquals(masm, left, right, tmp1,
3622                                                   tmp2);
3623   } else {
3624     StringHelper::GenerateCompareFlatOneByteStrings(masm, left, right, tmp1,
3625                                                     tmp2, tmp3);
3626   }
3627 
3628   // Handle more complex cases in runtime.
3629   __ bind(&runtime);
3630   __ pop(tmp1);  // Return address.
3631   __ push(left);
3632   __ push(right);
3633   __ push(tmp1);
3634   if (equality) {
3635     __ TailCallRuntime(Runtime::kStringEquals);
3636   } else {
3637     __ TailCallRuntime(Runtime::kStringCompare);
3638   }
3639 
3640   __ bind(&miss);
3641   GenerateMiss(masm);
3642 }
3643 
3644 
GenerateReceivers(MacroAssembler * masm)3645 void CompareICStub::GenerateReceivers(MacroAssembler* masm) {
3646   DCHECK_EQ(CompareICState::RECEIVER, state());
3647   Label miss;
3648   __ mov(ecx, edx);
3649   __ and_(ecx, eax);
3650   __ JumpIfSmi(ecx, &miss, Label::kNear);
3651 
3652   STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
3653   __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ecx);
3654   __ j(below, &miss, Label::kNear);
3655   __ CmpObjectType(edx, FIRST_JS_RECEIVER_TYPE, ecx);
3656   __ j(below, &miss, Label::kNear);
3657 
3658   DCHECK_EQ(equal, GetCondition());
3659   __ sub(eax, edx);
3660   __ ret(0);
3661 
3662   __ bind(&miss);
3663   GenerateMiss(masm);
3664 }
3665 
3666 
GenerateKnownReceivers(MacroAssembler * masm)3667 void CompareICStub::GenerateKnownReceivers(MacroAssembler* masm) {
3668   Label miss;
3669   Handle<WeakCell> cell = Map::WeakCellForMap(known_map_);
3670   __ mov(ecx, edx);
3671   __ and_(ecx, eax);
3672   __ JumpIfSmi(ecx, &miss, Label::kNear);
3673 
3674   __ GetWeakValue(edi, cell);
3675   __ cmp(edi, FieldOperand(eax, HeapObject::kMapOffset));
3676   __ j(not_equal, &miss, Label::kNear);
3677   __ cmp(edi, FieldOperand(edx, HeapObject::kMapOffset));
3678   __ j(not_equal, &miss, Label::kNear);
3679 
3680   if (Token::IsEqualityOp(op())) {
3681     __ sub(eax, edx);
3682     __ ret(0);
3683   } else if (is_strong(strength())) {
3684     __ TailCallRuntime(Runtime::kThrowStrongModeImplicitConversion);
3685   } else {
3686     __ PopReturnAddressTo(ecx);
3687     __ Push(edx);
3688     __ Push(eax);
3689     __ Push(Immediate(Smi::FromInt(NegativeComparisonResult(GetCondition()))));
3690     __ PushReturnAddressFrom(ecx);
3691     __ TailCallRuntime(Runtime::kCompare);
3692   }
3693 
3694   __ bind(&miss);
3695   GenerateMiss(masm);
3696 }
3697 
3698 
GenerateMiss(MacroAssembler * masm)3699 void CompareICStub::GenerateMiss(MacroAssembler* masm) {
3700   {
3701     // Call the runtime system in a fresh internal frame.
3702     FrameScope scope(masm, StackFrame::INTERNAL);
3703     __ push(edx);  // Preserve edx and eax.
3704     __ push(eax);
3705     __ push(edx);  // And also use them as the arguments.
3706     __ push(eax);
3707     __ push(Immediate(Smi::FromInt(op())));
3708     __ CallRuntime(Runtime::kCompareIC_Miss);
3709     // Compute the entry point of the rewritten stub.
3710     __ lea(edi, FieldOperand(eax, Code::kHeaderSize));
3711     __ pop(eax);
3712     __ pop(edx);
3713   }
3714 
3715   // Do a tail call to the rewritten stub.
3716   __ jmp(edi);
3717 }
3718 
3719 
3720 // Helper function used to check that the dictionary doesn't contain
3721 // the property. This function may return false negatives, so miss_label
3722 // must always call a backup property check that is complete.
3723 // This function is safe to call if the receiver has fast properties.
3724 // Name must be a unique name and receiver must be a heap object.
GenerateNegativeLookup(MacroAssembler * masm,Label * miss,Label * done,Register properties,Handle<Name> name,Register r0)3725 void NameDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm,
3726                                                       Label* miss,
3727                                                       Label* done,
3728                                                       Register properties,
3729                                                       Handle<Name> name,
3730                                                       Register r0) {
3731   DCHECK(name->IsUniqueName());
3732 
3733   // If names of slots in range from 1 to kProbes - 1 for the hash value are
3734   // not equal to the name and kProbes-th slot is not used (its name is the
3735   // undefined value), it guarantees the hash table doesn't contain the
3736   // property. It's true even if some slots represent deleted properties
3737   // (their names are the hole value).
3738   for (int i = 0; i < kInlinedProbes; i++) {
3739     // Compute the masked index: (hash + i + i * i) & mask.
3740     Register index = r0;
3741     // Capacity is smi 2^n.
3742     __ mov(index, FieldOperand(properties, kCapacityOffset));
3743     __ dec(index);
3744     __ and_(index,
3745             Immediate(Smi::FromInt(name->Hash() +
3746                                    NameDictionary::GetProbeOffset(i))));
3747 
3748     // Scale the index by multiplying by the entry size.
3749     STATIC_ASSERT(NameDictionary::kEntrySize == 3);
3750     __ lea(index, Operand(index, index, times_2, 0));  // index *= 3.
3751     Register entity_name = r0;
3752     // Having undefined at this place means the name is not contained.
3753     STATIC_ASSERT(kSmiTagSize == 1);
3754     __ mov(entity_name, Operand(properties, index, times_half_pointer_size,
3755                                 kElementsStartOffset - kHeapObjectTag));
3756     __ cmp(entity_name, masm->isolate()->factory()->undefined_value());
3757     __ j(equal, done);
3758 
3759     // Stop if found the property.
3760     __ cmp(entity_name, Handle<Name>(name));
3761     __ j(equal, miss);
3762 
3763     Label good;
3764     // Check for the hole and skip.
3765     __ cmp(entity_name, masm->isolate()->factory()->the_hole_value());
3766     __ j(equal, &good, Label::kNear);
3767 
3768     // Check if the entry name is not a unique name.
3769     __ mov(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset));
3770     __ JumpIfNotUniqueNameInstanceType(
3771         FieldOperand(entity_name, Map::kInstanceTypeOffset), miss);
3772     __ bind(&good);
3773   }
3774 
3775   NameDictionaryLookupStub stub(masm->isolate(), properties, r0, r0,
3776                                 NEGATIVE_LOOKUP);
3777   __ push(Immediate(Handle<Object>(name)));
3778   __ push(Immediate(name->Hash()));
3779   __ CallStub(&stub);
3780   __ test(r0, r0);
3781   __ j(not_zero, miss);
3782   __ jmp(done);
3783 }
3784 
3785 
3786 // Probe the name dictionary in the |elements| register. Jump to the
3787 // |done| label if a property with the given name is found leaving the
3788 // index into the dictionary in |r0|. Jump to the |miss| label
3789 // otherwise.
GeneratePositiveLookup(MacroAssembler * masm,Label * miss,Label * done,Register elements,Register name,Register r0,Register r1)3790 void NameDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm,
3791                                                       Label* miss,
3792                                                       Label* done,
3793                                                       Register elements,
3794                                                       Register name,
3795                                                       Register r0,
3796                                                       Register r1) {
3797   DCHECK(!elements.is(r0));
3798   DCHECK(!elements.is(r1));
3799   DCHECK(!name.is(r0));
3800   DCHECK(!name.is(r1));
3801 
3802   __ AssertName(name);
3803 
3804   __ mov(r1, FieldOperand(elements, kCapacityOffset));
3805   __ shr(r1, kSmiTagSize);  // convert smi to int
3806   __ dec(r1);
3807 
3808   // Generate an unrolled loop that performs a few probes before
3809   // giving up. Measurements done on Gmail indicate that 2 probes
3810   // cover ~93% of loads from dictionaries.
3811   for (int i = 0; i < kInlinedProbes; i++) {
3812     // Compute the masked index: (hash + i + i * i) & mask.
3813     __ mov(r0, FieldOperand(name, Name::kHashFieldOffset));
3814     __ shr(r0, Name::kHashShift);
3815     if (i > 0) {
3816       __ add(r0, Immediate(NameDictionary::GetProbeOffset(i)));
3817     }
3818     __ and_(r0, r1);
3819 
3820     // Scale the index by multiplying by the entry size.
3821     STATIC_ASSERT(NameDictionary::kEntrySize == 3);
3822     __ lea(r0, Operand(r0, r0, times_2, 0));  // r0 = r0 * 3
3823 
3824     // Check if the key is identical to the name.
3825     __ cmp(name, Operand(elements,
3826                          r0,
3827                          times_4,
3828                          kElementsStartOffset - kHeapObjectTag));
3829     __ j(equal, done);
3830   }
3831 
3832   NameDictionaryLookupStub stub(masm->isolate(), elements, r1, r0,
3833                                 POSITIVE_LOOKUP);
3834   __ push(name);
3835   __ mov(r0, FieldOperand(name, Name::kHashFieldOffset));
3836   __ shr(r0, Name::kHashShift);
3837   __ push(r0);
3838   __ CallStub(&stub);
3839 
3840   __ test(r1, r1);
3841   __ j(zero, miss);
3842   __ jmp(done);
3843 }
3844 
3845 
Generate(MacroAssembler * masm)3846 void NameDictionaryLookupStub::Generate(MacroAssembler* masm) {
3847   // This stub overrides SometimesSetsUpAFrame() to return false.  That means
3848   // we cannot call anything that could cause a GC from this stub.
3849   // Stack frame on entry:
3850   //  esp[0 * kPointerSize]: return address.
3851   //  esp[1 * kPointerSize]: key's hash.
3852   //  esp[2 * kPointerSize]: key.
3853   // Registers:
3854   //  dictionary_: NameDictionary to probe.
3855   //  result_: used as scratch.
3856   //  index_: will hold an index of entry if lookup is successful.
3857   //          might alias with result_.
3858   // Returns:
3859   //  result_ is zero if lookup failed, non zero otherwise.
3860 
3861   Label in_dictionary, maybe_in_dictionary, not_in_dictionary;
3862 
3863   Register scratch = result();
3864 
3865   __ mov(scratch, FieldOperand(dictionary(), kCapacityOffset));
3866   __ dec(scratch);
3867   __ SmiUntag(scratch);
3868   __ push(scratch);
3869 
3870   // If names of slots in range from 1 to kProbes - 1 for the hash value are
3871   // not equal to the name and kProbes-th slot is not used (its name is the
3872   // undefined value), it guarantees the hash table doesn't contain the
3873   // property. It's true even if some slots represent deleted properties
3874   // (their names are the null value).
3875   for (int i = kInlinedProbes; i < kTotalProbes; i++) {
3876     // Compute the masked index: (hash + i + i * i) & mask.
3877     __ mov(scratch, Operand(esp, 2 * kPointerSize));
3878     if (i > 0) {
3879       __ add(scratch, Immediate(NameDictionary::GetProbeOffset(i)));
3880     }
3881     __ and_(scratch, Operand(esp, 0));
3882 
3883     // Scale the index by multiplying by the entry size.
3884     STATIC_ASSERT(NameDictionary::kEntrySize == 3);
3885     __ lea(index(), Operand(scratch, scratch, times_2, 0));  // index *= 3.
3886 
3887     // Having undefined at this place means the name is not contained.
3888     STATIC_ASSERT(kSmiTagSize == 1);
3889     __ mov(scratch, Operand(dictionary(), index(), times_pointer_size,
3890                             kElementsStartOffset - kHeapObjectTag));
3891     __ cmp(scratch, isolate()->factory()->undefined_value());
3892     __ j(equal, &not_in_dictionary);
3893 
3894     // Stop if found the property.
3895     __ cmp(scratch, Operand(esp, 3 * kPointerSize));
3896     __ j(equal, &in_dictionary);
3897 
3898     if (i != kTotalProbes - 1 && mode() == NEGATIVE_LOOKUP) {
3899       // If we hit a key that is not a unique name during negative
3900       // lookup we have to bailout as this key might be equal to the
3901       // key we are looking for.
3902 
3903       // Check if the entry name is not a unique name.
3904       __ mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
3905       __ JumpIfNotUniqueNameInstanceType(
3906           FieldOperand(scratch, Map::kInstanceTypeOffset),
3907           &maybe_in_dictionary);
3908     }
3909   }
3910 
3911   __ bind(&maybe_in_dictionary);
3912   // If we are doing negative lookup then probing failure should be
3913   // treated as a lookup success. For positive lookup probing failure
3914   // should be treated as lookup failure.
3915   if (mode() == POSITIVE_LOOKUP) {
3916     __ mov(result(), Immediate(0));
3917     __ Drop(1);
3918     __ ret(2 * kPointerSize);
3919   }
3920 
3921   __ bind(&in_dictionary);
3922   __ mov(result(), Immediate(1));
3923   __ Drop(1);
3924   __ ret(2 * kPointerSize);
3925 
3926   __ bind(&not_in_dictionary);
3927   __ mov(result(), Immediate(0));
3928   __ Drop(1);
3929   __ ret(2 * kPointerSize);
3930 }
3931 
3932 
GenerateFixedRegStubsAheadOfTime(Isolate * isolate)3933 void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(
3934     Isolate* isolate) {
3935   StoreBufferOverflowStub stub(isolate, kDontSaveFPRegs);
3936   stub.GetCode();
3937   StoreBufferOverflowStub stub2(isolate, kSaveFPRegs);
3938   stub2.GetCode();
3939 }
3940 
3941 
3942 // Takes the input in 3 registers: address_ value_ and object_.  A pointer to
3943 // the value has just been written into the object, now this stub makes sure
3944 // we keep the GC informed.  The word in the object where the value has been
3945 // written is in the address register.
Generate(MacroAssembler * masm)3946 void RecordWriteStub::Generate(MacroAssembler* masm) {
3947   Label skip_to_incremental_noncompacting;
3948   Label skip_to_incremental_compacting;
3949 
3950   // The first two instructions are generated with labels so as to get the
3951   // offset fixed up correctly by the bind(Label*) call.  We patch it back and
3952   // forth between a compare instructions (a nop in this position) and the
3953   // real branch when we start and stop incremental heap marking.
3954   __ jmp(&skip_to_incremental_noncompacting, Label::kNear);
3955   __ jmp(&skip_to_incremental_compacting, Label::kFar);
3956 
3957   if (remembered_set_action() == EMIT_REMEMBERED_SET) {
3958     __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
3959                            MacroAssembler::kReturnAtEnd);
3960   } else {
3961     __ ret(0);
3962   }
3963 
3964   __ bind(&skip_to_incremental_noncompacting);
3965   GenerateIncremental(masm, INCREMENTAL);
3966 
3967   __ bind(&skip_to_incremental_compacting);
3968   GenerateIncremental(masm, INCREMENTAL_COMPACTION);
3969 
3970   // Initial mode of the stub is expected to be STORE_BUFFER_ONLY.
3971   // Will be checked in IncrementalMarking::ActivateGeneratedStub.
3972   masm->set_byte_at(0, kTwoByteNopInstruction);
3973   masm->set_byte_at(2, kFiveByteNopInstruction);
3974 }
3975 
3976 
GenerateIncremental(MacroAssembler * masm,Mode mode)3977 void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) {
3978   regs_.Save(masm);
3979 
3980   if (remembered_set_action() == EMIT_REMEMBERED_SET) {
3981     Label dont_need_remembered_set;
3982 
3983     __ mov(regs_.scratch0(), Operand(regs_.address(), 0));
3984     __ JumpIfNotInNewSpace(regs_.scratch0(),  // Value.
3985                            regs_.scratch0(),
3986                            &dont_need_remembered_set);
3987 
3988     __ CheckPageFlag(regs_.object(),
3989                      regs_.scratch0(),
3990                      1 << MemoryChunk::SCAN_ON_SCAVENGE,
3991                      not_zero,
3992                      &dont_need_remembered_set);
3993 
3994     // First notify the incremental marker if necessary, then update the
3995     // remembered set.
3996     CheckNeedsToInformIncrementalMarker(
3997         masm,
3998         kUpdateRememberedSetOnNoNeedToInformIncrementalMarker,
3999         mode);
4000     InformIncrementalMarker(masm);
4001     regs_.Restore(masm);
4002     __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
4003                            MacroAssembler::kReturnAtEnd);
4004 
4005     __ bind(&dont_need_remembered_set);
4006   }
4007 
4008   CheckNeedsToInformIncrementalMarker(
4009       masm,
4010       kReturnOnNoNeedToInformIncrementalMarker,
4011       mode);
4012   InformIncrementalMarker(masm);
4013   regs_.Restore(masm);
4014   __ ret(0);
4015 }
4016 
4017 
InformIncrementalMarker(MacroAssembler * masm)4018 void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm) {
4019   regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode());
4020   int argument_count = 3;
4021   __ PrepareCallCFunction(argument_count, regs_.scratch0());
4022   __ mov(Operand(esp, 0 * kPointerSize), regs_.object());
4023   __ mov(Operand(esp, 1 * kPointerSize), regs_.address());  // Slot.
4024   __ mov(Operand(esp, 2 * kPointerSize),
4025          Immediate(ExternalReference::isolate_address(isolate())));
4026 
4027   AllowExternalCallThatCantCauseGC scope(masm);
4028   __ CallCFunction(
4029       ExternalReference::incremental_marking_record_write_function(isolate()),
4030       argument_count);
4031 
4032   regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode());
4033 }
4034 
4035 
CheckNeedsToInformIncrementalMarker(MacroAssembler * masm,OnNoNeedToInformIncrementalMarker on_no_need,Mode mode)4036 void RecordWriteStub::CheckNeedsToInformIncrementalMarker(
4037     MacroAssembler* masm,
4038     OnNoNeedToInformIncrementalMarker on_no_need,
4039     Mode mode) {
4040   Label object_is_black, need_incremental, need_incremental_pop_object;
4041 
4042   __ mov(regs_.scratch0(), Immediate(~Page::kPageAlignmentMask));
4043   __ and_(regs_.scratch0(), regs_.object());
4044   __ mov(regs_.scratch1(),
4045          Operand(regs_.scratch0(),
4046                  MemoryChunk::kWriteBarrierCounterOffset));
4047   __ sub(regs_.scratch1(), Immediate(1));
4048   __ mov(Operand(regs_.scratch0(),
4049                  MemoryChunk::kWriteBarrierCounterOffset),
4050          regs_.scratch1());
4051   __ j(negative, &need_incremental);
4052 
4053   // Let's look at the color of the object:  If it is not black we don't have
4054   // to inform the incremental marker.
4055   __ JumpIfBlack(regs_.object(),
4056                  regs_.scratch0(),
4057                  regs_.scratch1(),
4058                  &object_is_black,
4059                  Label::kNear);
4060 
4061   regs_.Restore(masm);
4062   if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
4063     __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
4064                            MacroAssembler::kReturnAtEnd);
4065   } else {
4066     __ ret(0);
4067   }
4068 
4069   __ bind(&object_is_black);
4070 
4071   // Get the value from the slot.
4072   __ mov(regs_.scratch0(), Operand(regs_.address(), 0));
4073 
4074   if (mode == INCREMENTAL_COMPACTION) {
4075     Label ensure_not_white;
4076 
4077     __ CheckPageFlag(regs_.scratch0(),  // Contains value.
4078                      regs_.scratch1(),  // Scratch.
4079                      MemoryChunk::kEvacuationCandidateMask,
4080                      zero,
4081                      &ensure_not_white,
4082                      Label::kNear);
4083 
4084     __ CheckPageFlag(regs_.object(),
4085                      regs_.scratch1(),  // Scratch.
4086                      MemoryChunk::kSkipEvacuationSlotsRecordingMask,
4087                      not_zero,
4088                      &ensure_not_white,
4089                      Label::kNear);
4090 
4091     __ jmp(&need_incremental);
4092 
4093     __ bind(&ensure_not_white);
4094   }
4095 
4096   // We need an extra register for this, so we push the object register
4097   // temporarily.
4098   __ push(regs_.object());
4099   __ JumpIfWhite(regs_.scratch0(),  // The value.
4100                  regs_.scratch1(),  // Scratch.
4101                  regs_.object(),    // Scratch.
4102                  &need_incremental_pop_object, Label::kNear);
4103   __ pop(regs_.object());
4104 
4105   regs_.Restore(masm);
4106   if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
4107     __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
4108                            MacroAssembler::kReturnAtEnd);
4109   } else {
4110     __ ret(0);
4111   }
4112 
4113   __ bind(&need_incremental_pop_object);
4114   __ pop(regs_.object());
4115 
4116   __ bind(&need_incremental);
4117 
4118   // Fall through when we need to inform the incremental marker.
4119 }
4120 
4121 
Generate(MacroAssembler * masm)4122 void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
4123   CEntryStub ces(isolate(), 1, kSaveFPRegs);
4124   __ call(ces.GetCode(), RelocInfo::CODE_TARGET);
4125   int parameter_count_offset =
4126       StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset;
4127   __ mov(ebx, MemOperand(ebp, parameter_count_offset));
4128   masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE);
4129   __ pop(ecx);
4130   int additional_offset =
4131       function_mode() == JS_FUNCTION_STUB_MODE ? kPointerSize : 0;
4132   __ lea(esp, MemOperand(esp, ebx, times_pointer_size, additional_offset));
4133   __ jmp(ecx);  // Return to IC Miss stub, continuation still on stack.
4134 }
4135 
4136 
Generate(MacroAssembler * masm)4137 void LoadICTrampolineStub::Generate(MacroAssembler* masm) {
4138   __ EmitLoadTypeFeedbackVector(LoadWithVectorDescriptor::VectorRegister());
4139   LoadICStub stub(isolate(), state());
4140   stub.GenerateForTrampoline(masm);
4141 }
4142 
4143 
Generate(MacroAssembler * masm)4144 void KeyedLoadICTrampolineStub::Generate(MacroAssembler* masm) {
4145   __ EmitLoadTypeFeedbackVector(LoadWithVectorDescriptor::VectorRegister());
4146   KeyedLoadICStub stub(isolate(), state());
4147   stub.GenerateForTrampoline(masm);
4148 }
4149 
4150 
HandleArrayCases(MacroAssembler * masm,Register receiver,Register key,Register vector,Register slot,Register feedback,bool is_polymorphic,Label * miss)4151 static void HandleArrayCases(MacroAssembler* masm, Register receiver,
4152                              Register key, Register vector, Register slot,
4153                              Register feedback, bool is_polymorphic,
4154                              Label* miss) {
4155   // feedback initially contains the feedback array
4156   Label next, next_loop, prepare_next;
4157   Label load_smi_map, compare_map;
4158   Label start_polymorphic;
4159 
4160   __ push(receiver);
4161   __ push(vector);
4162 
4163   Register receiver_map = receiver;
4164   Register cached_map = vector;
4165 
4166   // Receiver might not be a heap object.
4167   __ JumpIfSmi(receiver, &load_smi_map);
4168   __ mov(receiver_map, FieldOperand(receiver, 0));
4169   __ bind(&compare_map);
4170   __ mov(cached_map, FieldOperand(feedback, FixedArray::OffsetOfElementAt(0)));
4171 
4172   // A named keyed load might have a 2 element array, all other cases can count
4173   // on an array with at least 2 {map, handler} pairs, so they can go right
4174   // into polymorphic array handling.
4175   __ cmp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset));
4176   __ j(not_equal, is_polymorphic ? &start_polymorphic : &next);
4177 
4178   // found, now call handler.
4179   Register handler = feedback;
4180   __ mov(handler, FieldOperand(feedback, FixedArray::OffsetOfElementAt(1)));
4181   __ pop(vector);
4182   __ pop(receiver);
4183   __ lea(handler, FieldOperand(handler, Code::kHeaderSize));
4184   __ jmp(handler);
4185 
4186   if (!is_polymorphic) {
4187     __ bind(&next);
4188     __ cmp(FieldOperand(feedback, FixedArray::kLengthOffset),
4189            Immediate(Smi::FromInt(2)));
4190     __ j(not_equal, &start_polymorphic);
4191     __ pop(vector);
4192     __ pop(receiver);
4193     __ jmp(miss);
4194   }
4195 
4196   // Polymorphic, we have to loop from 2 to N
4197   __ bind(&start_polymorphic);
4198   __ push(key);
4199   Register counter = key;
4200   __ mov(counter, Immediate(Smi::FromInt(2)));
4201   __ bind(&next_loop);
4202   __ mov(cached_map, FieldOperand(feedback, counter, times_half_pointer_size,
4203                                   FixedArray::kHeaderSize));
4204   __ cmp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset));
4205   __ j(not_equal, &prepare_next);
4206   __ mov(handler, FieldOperand(feedback, counter, times_half_pointer_size,
4207                                FixedArray::kHeaderSize + kPointerSize));
4208   __ pop(key);
4209   __ pop(vector);
4210   __ pop(receiver);
4211   __ lea(handler, FieldOperand(handler, Code::kHeaderSize));
4212   __ jmp(handler);
4213 
4214   __ bind(&prepare_next);
4215   __ add(counter, Immediate(Smi::FromInt(2)));
4216   __ cmp(counter, FieldOperand(feedback, FixedArray::kLengthOffset));
4217   __ j(less, &next_loop);
4218 
4219   // We exhausted our array of map handler pairs.
4220   __ pop(key);
4221   __ pop(vector);
4222   __ pop(receiver);
4223   __ jmp(miss);
4224 
4225   __ bind(&load_smi_map);
4226   __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex);
4227   __ jmp(&compare_map);
4228 }
4229 
4230 
HandleMonomorphicCase(MacroAssembler * masm,Register receiver,Register key,Register vector,Register slot,Register weak_cell,Label * miss)4231 static void HandleMonomorphicCase(MacroAssembler* masm, Register receiver,
4232                                   Register key, Register vector, Register slot,
4233                                   Register weak_cell, Label* miss) {
4234   // feedback initially contains the feedback array
4235   Label compare_smi_map;
4236 
4237   // Move the weak map into the weak_cell register.
4238   Register ic_map = weak_cell;
4239   __ mov(ic_map, FieldOperand(weak_cell, WeakCell::kValueOffset));
4240 
4241   // Receiver might not be a heap object.
4242   __ JumpIfSmi(receiver, &compare_smi_map);
4243   __ cmp(ic_map, FieldOperand(receiver, 0));
4244   __ j(not_equal, miss);
4245   Register handler = weak_cell;
4246   __ mov(handler, FieldOperand(vector, slot, times_half_pointer_size,
4247                                FixedArray::kHeaderSize + kPointerSize));
4248   __ lea(handler, FieldOperand(handler, Code::kHeaderSize));
4249   __ jmp(handler);
4250 
4251   // In microbenchmarks, it made sense to unroll this code so that the call to
4252   // the handler is duplicated for a HeapObject receiver and a Smi receiver.
4253   __ bind(&compare_smi_map);
4254   __ CompareRoot(ic_map, Heap::kHeapNumberMapRootIndex);
4255   __ j(not_equal, miss);
4256   __ mov(handler, FieldOperand(vector, slot, times_half_pointer_size,
4257                                FixedArray::kHeaderSize + kPointerSize));
4258   __ lea(handler, FieldOperand(handler, Code::kHeaderSize));
4259   __ jmp(handler);
4260 }
4261 
4262 
Generate(MacroAssembler * masm)4263 void LoadICStub::Generate(MacroAssembler* masm) { GenerateImpl(masm, false); }
4264 
4265 
GenerateForTrampoline(MacroAssembler * masm)4266 void LoadICStub::GenerateForTrampoline(MacroAssembler* masm) {
4267   GenerateImpl(masm, true);
4268 }
4269 
4270 
GenerateImpl(MacroAssembler * masm,bool in_frame)4271 void LoadICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
4272   Register receiver = LoadWithVectorDescriptor::ReceiverRegister();  // edx
4273   Register name = LoadWithVectorDescriptor::NameRegister();          // ecx
4274   Register vector = LoadWithVectorDescriptor::VectorRegister();      // ebx
4275   Register slot = LoadWithVectorDescriptor::SlotRegister();          // eax
4276   Register scratch = edi;
4277   __ mov(scratch, FieldOperand(vector, slot, times_half_pointer_size,
4278                                FixedArray::kHeaderSize));
4279 
4280   // Is it a weak cell?
4281   Label try_array;
4282   Label not_array, smi_key, key_okay, miss;
4283   __ CompareRoot(FieldOperand(scratch, 0), Heap::kWeakCellMapRootIndex);
4284   __ j(not_equal, &try_array);
4285   HandleMonomorphicCase(masm, receiver, name, vector, slot, scratch, &miss);
4286 
4287   // Is it a fixed array?
4288   __ bind(&try_array);
4289   __ CompareRoot(FieldOperand(scratch, 0), Heap::kFixedArrayMapRootIndex);
4290   __ j(not_equal, &not_array);
4291   HandleArrayCases(masm, receiver, name, vector, slot, scratch, true, &miss);
4292 
4293   __ bind(&not_array);
4294   __ CompareRoot(scratch, Heap::kmegamorphic_symbolRootIndex);
4295   __ j(not_equal, &miss);
4296   __ push(slot);
4297   __ push(vector);
4298   Code::Flags code_flags = Code::RemoveTypeAndHolderFromFlags(
4299       Code::ComputeHandlerFlags(Code::LOAD_IC));
4300   masm->isolate()->stub_cache()->GenerateProbe(masm, Code::LOAD_IC, code_flags,
4301                                                receiver, name, vector, scratch);
4302   __ pop(vector);
4303   __ pop(slot);
4304 
4305   __ bind(&miss);
4306   LoadIC::GenerateMiss(masm);
4307 }
4308 
4309 
Generate(MacroAssembler * masm)4310 void KeyedLoadICStub::Generate(MacroAssembler* masm) {
4311   GenerateImpl(masm, false);
4312 }
4313 
4314 
GenerateForTrampoline(MacroAssembler * masm)4315 void KeyedLoadICStub::GenerateForTrampoline(MacroAssembler* masm) {
4316   GenerateImpl(masm, true);
4317 }
4318 
4319 
GenerateImpl(MacroAssembler * masm,bool in_frame)4320 void KeyedLoadICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
4321   Register receiver = LoadWithVectorDescriptor::ReceiverRegister();  // edx
4322   Register key = LoadWithVectorDescriptor::NameRegister();           // ecx
4323   Register vector = LoadWithVectorDescriptor::VectorRegister();      // ebx
4324   Register slot = LoadWithVectorDescriptor::SlotRegister();          // eax
4325   Register feedback = edi;
4326   __ mov(feedback, FieldOperand(vector, slot, times_half_pointer_size,
4327                                 FixedArray::kHeaderSize));
4328   // Is it a weak cell?
4329   Label try_array;
4330   Label not_array, smi_key, key_okay, miss;
4331   __ CompareRoot(FieldOperand(feedback, 0), Heap::kWeakCellMapRootIndex);
4332   __ j(not_equal, &try_array);
4333   HandleMonomorphicCase(masm, receiver, key, vector, slot, feedback, &miss);
4334 
4335   __ bind(&try_array);
4336   // Is it a fixed array?
4337   __ CompareRoot(FieldOperand(feedback, 0), Heap::kFixedArrayMapRootIndex);
4338   __ j(not_equal, &not_array);
4339 
4340   // We have a polymorphic element handler.
4341   Label polymorphic, try_poly_name;
4342   __ bind(&polymorphic);
4343   HandleArrayCases(masm, receiver, key, vector, slot, feedback, true, &miss);
4344 
4345   __ bind(&not_array);
4346   // Is it generic?
4347   __ CompareRoot(feedback, Heap::kmegamorphic_symbolRootIndex);
4348   __ j(not_equal, &try_poly_name);
4349   Handle<Code> megamorphic_stub =
4350       KeyedLoadIC::ChooseMegamorphicStub(masm->isolate(), GetExtraICState());
4351   __ jmp(megamorphic_stub, RelocInfo::CODE_TARGET);
4352 
4353   __ bind(&try_poly_name);
4354   // We might have a name in feedback, and a fixed array in the next slot.
4355   __ cmp(key, feedback);
4356   __ j(not_equal, &miss);
4357   // If the name comparison succeeded, we know we have a fixed array with
4358   // at least one map/handler pair.
4359   __ mov(feedback, FieldOperand(vector, slot, times_half_pointer_size,
4360                                 FixedArray::kHeaderSize + kPointerSize));
4361   HandleArrayCases(masm, receiver, key, vector, slot, feedback, false, &miss);
4362 
4363   __ bind(&miss);
4364   KeyedLoadIC::GenerateMiss(masm);
4365 }
4366 
4367 
Generate(MacroAssembler * masm)4368 void VectorStoreICTrampolineStub::Generate(MacroAssembler* masm) {
4369   __ EmitLoadTypeFeedbackVector(VectorStoreICDescriptor::VectorRegister());
4370   VectorStoreICStub stub(isolate(), state());
4371   stub.GenerateForTrampoline(masm);
4372 }
4373 
4374 
Generate(MacroAssembler * masm)4375 void VectorKeyedStoreICTrampolineStub::Generate(MacroAssembler* masm) {
4376   __ EmitLoadTypeFeedbackVector(VectorStoreICDescriptor::VectorRegister());
4377   VectorKeyedStoreICStub stub(isolate(), state());
4378   stub.GenerateForTrampoline(masm);
4379 }
4380 
4381 
Generate(MacroAssembler * masm)4382 void VectorStoreICStub::Generate(MacroAssembler* masm) {
4383   GenerateImpl(masm, false);
4384 }
4385 
4386 
GenerateForTrampoline(MacroAssembler * masm)4387 void VectorStoreICStub::GenerateForTrampoline(MacroAssembler* masm) {
4388   GenerateImpl(masm, true);
4389 }
4390 
4391 
4392 // value is on the stack already.
HandlePolymorphicStoreCase(MacroAssembler * masm,Register receiver,Register key,Register vector,Register slot,Register feedback,bool is_polymorphic,Label * miss)4393 static void HandlePolymorphicStoreCase(MacroAssembler* masm, Register receiver,
4394                                        Register key, Register vector,
4395                                        Register slot, Register feedback,
4396                                        bool is_polymorphic, Label* miss) {
4397   // feedback initially contains the feedback array
4398   Label next, next_loop, prepare_next;
4399   Label load_smi_map, compare_map;
4400   Label start_polymorphic;
4401   Label pop_and_miss;
4402   ExternalReference virtual_register =
4403       ExternalReference::virtual_handler_register(masm->isolate());
4404 
4405   __ push(receiver);
4406   __ push(vector);
4407 
4408   Register receiver_map = receiver;
4409   Register cached_map = vector;
4410 
4411   // Receiver might not be a heap object.
4412   __ JumpIfSmi(receiver, &load_smi_map);
4413   __ mov(receiver_map, FieldOperand(receiver, 0));
4414   __ bind(&compare_map);
4415   __ mov(cached_map, FieldOperand(feedback, FixedArray::OffsetOfElementAt(0)));
4416 
4417   // A named keyed store might have a 2 element array, all other cases can count
4418   // on an array with at least 2 {map, handler} pairs, so they can go right
4419   // into polymorphic array handling.
4420   __ cmp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset));
4421   __ j(not_equal, &start_polymorphic);
4422 
4423   // found, now call handler.
4424   Register handler = feedback;
4425   DCHECK(handler.is(VectorStoreICDescriptor::ValueRegister()));
4426   __ mov(handler, FieldOperand(feedback, FixedArray::OffsetOfElementAt(1)));
4427   __ pop(vector);
4428   __ pop(receiver);
4429   __ lea(handler, FieldOperand(handler, Code::kHeaderSize));
4430   __ mov(Operand::StaticVariable(virtual_register), handler);
4431   __ pop(handler);  // Pop "value".
4432   __ jmp(Operand::StaticVariable(virtual_register));
4433 
4434   // Polymorphic, we have to loop from 2 to N
4435   __ bind(&start_polymorphic);
4436   __ push(key);
4437   Register counter = key;
4438   __ mov(counter, Immediate(Smi::FromInt(2)));
4439 
4440   if (!is_polymorphic) {
4441     // If is_polymorphic is false, we may only have a two element array.
4442     // Check against length now in that case.
4443     __ cmp(counter, FieldOperand(feedback, FixedArray::kLengthOffset));
4444     __ j(greater_equal, &pop_and_miss);
4445   }
4446 
4447   __ bind(&next_loop);
4448   __ mov(cached_map, FieldOperand(feedback, counter, times_half_pointer_size,
4449                                   FixedArray::kHeaderSize));
4450   __ cmp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset));
4451   __ j(not_equal, &prepare_next);
4452   __ mov(handler, FieldOperand(feedback, counter, times_half_pointer_size,
4453                                FixedArray::kHeaderSize + kPointerSize));
4454   __ lea(handler, FieldOperand(handler, Code::kHeaderSize));
4455   __ pop(key);
4456   __ pop(vector);
4457   __ pop(receiver);
4458   __ mov(Operand::StaticVariable(virtual_register), handler);
4459   __ pop(handler);  // Pop "value".
4460   __ jmp(Operand::StaticVariable(virtual_register));
4461 
4462   __ bind(&prepare_next);
4463   __ add(counter, Immediate(Smi::FromInt(2)));
4464   __ cmp(counter, FieldOperand(feedback, FixedArray::kLengthOffset));
4465   __ j(less, &next_loop);
4466 
4467   // We exhausted our array of map handler pairs.
4468   __ bind(&pop_and_miss);
4469   __ pop(key);
4470   __ pop(vector);
4471   __ pop(receiver);
4472   __ jmp(miss);
4473 
4474   __ bind(&load_smi_map);
4475   __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex);
4476   __ jmp(&compare_map);
4477 }
4478 
4479 
HandleMonomorphicStoreCase(MacroAssembler * masm,Register receiver,Register key,Register vector,Register slot,Register weak_cell,Label * miss)4480 static void HandleMonomorphicStoreCase(MacroAssembler* masm, Register receiver,
4481                                        Register key, Register vector,
4482                                        Register slot, Register weak_cell,
4483                                        Label* miss) {
4484   // The store ic value is on the stack.
4485   DCHECK(weak_cell.is(VectorStoreICDescriptor::ValueRegister()));
4486   ExternalReference virtual_register =
4487       ExternalReference::virtual_handler_register(masm->isolate());
4488 
4489   // feedback initially contains the feedback array
4490   Label compare_smi_map;
4491 
4492   // Move the weak map into the weak_cell register.
4493   Register ic_map = weak_cell;
4494   __ mov(ic_map, FieldOperand(weak_cell, WeakCell::kValueOffset));
4495 
4496   // Receiver might not be a heap object.
4497   __ JumpIfSmi(receiver, &compare_smi_map);
4498   __ cmp(ic_map, FieldOperand(receiver, 0));
4499   __ j(not_equal, miss);
4500   __ mov(weak_cell, FieldOperand(vector, slot, times_half_pointer_size,
4501                                  FixedArray::kHeaderSize + kPointerSize));
4502   __ lea(weak_cell, FieldOperand(weak_cell, Code::kHeaderSize));
4503   // Put the store ic value back in it's register.
4504   __ mov(Operand::StaticVariable(virtual_register), weak_cell);
4505   __ pop(weak_cell);  // Pop "value".
4506   // jump to the handler.
4507   __ jmp(Operand::StaticVariable(virtual_register));
4508 
4509   // In microbenchmarks, it made sense to unroll this code so that the call to
4510   // the handler is duplicated for a HeapObject receiver and a Smi receiver.
4511   __ bind(&compare_smi_map);
4512   __ CompareRoot(ic_map, Heap::kHeapNumberMapRootIndex);
4513   __ j(not_equal, miss);
4514   __ mov(weak_cell, FieldOperand(vector, slot, times_half_pointer_size,
4515                                  FixedArray::kHeaderSize + kPointerSize));
4516   __ lea(weak_cell, FieldOperand(weak_cell, Code::kHeaderSize));
4517   __ mov(Operand::StaticVariable(virtual_register), weak_cell);
4518   __ pop(weak_cell);  // Pop "value".
4519   // jump to the handler.
4520   __ jmp(Operand::StaticVariable(virtual_register));
4521 }
4522 
4523 
GenerateImpl(MacroAssembler * masm,bool in_frame)4524 void VectorStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
4525   Register receiver = VectorStoreICDescriptor::ReceiverRegister();  // edx
4526   Register key = VectorStoreICDescriptor::NameRegister();           // ecx
4527   Register value = VectorStoreICDescriptor::ValueRegister();        // eax
4528   Register vector = VectorStoreICDescriptor::VectorRegister();      // ebx
4529   Register slot = VectorStoreICDescriptor::SlotRegister();          // edi
4530   Label miss;
4531 
4532   __ push(value);
4533 
4534   Register scratch = value;
4535   __ mov(scratch, FieldOperand(vector, slot, times_half_pointer_size,
4536                                FixedArray::kHeaderSize));
4537 
4538   // Is it a weak cell?
4539   Label try_array;
4540   Label not_array, smi_key, key_okay;
4541   __ CompareRoot(FieldOperand(scratch, 0), Heap::kWeakCellMapRootIndex);
4542   __ j(not_equal, &try_array);
4543   HandleMonomorphicStoreCase(masm, receiver, key, vector, slot, scratch, &miss);
4544 
4545   // Is it a fixed array?
4546   __ bind(&try_array);
4547   __ CompareRoot(FieldOperand(scratch, 0), Heap::kFixedArrayMapRootIndex);
4548   __ j(not_equal, &not_array);
4549   HandlePolymorphicStoreCase(masm, receiver, key, vector, slot, scratch, true,
4550                              &miss);
4551 
4552   __ bind(&not_array);
4553   __ CompareRoot(scratch, Heap::kmegamorphic_symbolRootIndex);
4554   __ j(not_equal, &miss);
4555 
4556   __ pop(value);
4557   __ push(slot);
4558   __ push(vector);
4559   Code::Flags code_flags = Code::RemoveTypeAndHolderFromFlags(
4560       Code::ComputeHandlerFlags(Code::STORE_IC));
4561   masm->isolate()->stub_cache()->GenerateProbe(masm, Code::STORE_IC, code_flags,
4562                                                receiver, key, slot, no_reg);
4563   __ pop(vector);
4564   __ pop(slot);
4565   Label no_pop_miss;
4566   __ jmp(&no_pop_miss);
4567 
4568   __ bind(&miss);
4569   __ pop(value);
4570   __ bind(&no_pop_miss);
4571   StoreIC::GenerateMiss(masm);
4572 }
4573 
4574 
Generate(MacroAssembler * masm)4575 void VectorKeyedStoreICStub::Generate(MacroAssembler* masm) {
4576   GenerateImpl(masm, false);
4577 }
4578 
4579 
GenerateForTrampoline(MacroAssembler * masm)4580 void VectorKeyedStoreICStub::GenerateForTrampoline(MacroAssembler* masm) {
4581   GenerateImpl(masm, true);
4582 }
4583 
4584 
HandlePolymorphicKeyedStoreCase(MacroAssembler * masm,Register receiver,Register key,Register vector,Register slot,Register feedback,Label * miss)4585 static void HandlePolymorphicKeyedStoreCase(MacroAssembler* masm,
4586                                             Register receiver, Register key,
4587                                             Register vector, Register slot,
4588                                             Register feedback, Label* miss) {
4589   // feedback initially contains the feedback array
4590   Label next, next_loop, prepare_next;
4591   Label load_smi_map, compare_map;
4592   Label transition_call;
4593   Label pop_and_miss;
4594   ExternalReference virtual_register =
4595       ExternalReference::virtual_handler_register(masm->isolate());
4596   ExternalReference virtual_slot =
4597       ExternalReference::virtual_slot_register(masm->isolate());
4598 
4599   __ push(receiver);
4600   __ push(vector);
4601 
4602   Register receiver_map = receiver;
4603   Register cached_map = vector;
4604   Register value = StoreDescriptor::ValueRegister();
4605 
4606   // Receiver might not be a heap object.
4607   __ JumpIfSmi(receiver, &load_smi_map);
4608   __ mov(receiver_map, FieldOperand(receiver, 0));
4609   __ bind(&compare_map);
4610 
4611   // Polymorphic, we have to loop from 0 to N - 1
4612   __ push(key);
4613   // Current stack layout:
4614   // - esp[0]    -- key
4615   // - esp[4]    -- vector
4616   // - esp[8]    -- receiver
4617   // - esp[12]   -- value
4618   // - esp[16]   -- return address
4619   //
4620   // Required stack layout for handler call:
4621   // - esp[0]    -- return address
4622   // - receiver, key, value, vector, slot in registers.
4623   // - handler in virtual register.
4624   Register counter = key;
4625   __ mov(counter, Immediate(Smi::FromInt(0)));
4626   __ bind(&next_loop);
4627   __ mov(cached_map, FieldOperand(feedback, counter, times_half_pointer_size,
4628                                   FixedArray::kHeaderSize));
4629   __ cmp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset));
4630   __ j(not_equal, &prepare_next);
4631   __ mov(cached_map, FieldOperand(feedback, counter, times_half_pointer_size,
4632                                   FixedArray::kHeaderSize + kPointerSize));
4633   __ CompareRoot(cached_map, Heap::kUndefinedValueRootIndex);
4634   __ j(not_equal, &transition_call);
4635   __ mov(feedback, FieldOperand(feedback, counter, times_half_pointer_size,
4636                                 FixedArray::kHeaderSize + 2 * kPointerSize));
4637   __ pop(key);
4638   __ pop(vector);
4639   __ pop(receiver);
4640   __ lea(feedback, FieldOperand(feedback, Code::kHeaderSize));
4641   __ mov(Operand::StaticVariable(virtual_register), feedback);
4642   __ pop(value);
4643   __ jmp(Operand::StaticVariable(virtual_register));
4644 
4645   __ bind(&transition_call);
4646   // Current stack layout:
4647   // - esp[0]    -- key
4648   // - esp[4]    -- vector
4649   // - esp[8]    -- receiver
4650   // - esp[12]   -- value
4651   // - esp[16]   -- return address
4652   //
4653   // Required stack layout for handler call:
4654   // - esp[0]    -- return address
4655   // - receiver, key, value, map, vector in registers.
4656   // - handler and slot in virtual registers.
4657   __ mov(Operand::StaticVariable(virtual_slot), slot);
4658   __ mov(feedback, FieldOperand(feedback, counter, times_half_pointer_size,
4659                                 FixedArray::kHeaderSize + 2 * kPointerSize));
4660   __ lea(feedback, FieldOperand(feedback, Code::kHeaderSize));
4661   __ mov(Operand::StaticVariable(virtual_register), feedback);
4662 
4663   __ mov(cached_map, FieldOperand(cached_map, WeakCell::kValueOffset));
4664   // The weak cell may have been cleared.
4665   __ JumpIfSmi(cached_map, &pop_and_miss);
4666   DCHECK(!cached_map.is(VectorStoreTransitionDescriptor::MapRegister()));
4667   __ mov(VectorStoreTransitionDescriptor::MapRegister(), cached_map);
4668 
4669   // Pop key into place.
4670   __ pop(key);
4671   __ pop(vector);
4672   __ pop(receiver);
4673   __ pop(value);
4674   __ jmp(Operand::StaticVariable(virtual_register));
4675 
4676   __ bind(&prepare_next);
4677   __ add(counter, Immediate(Smi::FromInt(3)));
4678   __ cmp(counter, FieldOperand(feedback, FixedArray::kLengthOffset));
4679   __ j(less, &next_loop);
4680 
4681   // We exhausted our array of map handler pairs.
4682   __ bind(&pop_and_miss);
4683   __ pop(key);
4684   __ pop(vector);
4685   __ pop(receiver);
4686   __ jmp(miss);
4687 
4688   __ bind(&load_smi_map);
4689   __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex);
4690   __ jmp(&compare_map);
4691 }
4692 
4693 
GenerateImpl(MacroAssembler * masm,bool in_frame)4694 void VectorKeyedStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
4695   Register receiver = VectorStoreICDescriptor::ReceiverRegister();  // edx
4696   Register key = VectorStoreICDescriptor::NameRegister();           // ecx
4697   Register value = VectorStoreICDescriptor::ValueRegister();        // eax
4698   Register vector = VectorStoreICDescriptor::VectorRegister();      // ebx
4699   Register slot = VectorStoreICDescriptor::SlotRegister();          // edi
4700   Label miss;
4701 
4702   __ push(value);
4703 
4704   Register scratch = value;
4705   __ mov(scratch, FieldOperand(vector, slot, times_half_pointer_size,
4706                                FixedArray::kHeaderSize));
4707 
4708   // Is it a weak cell?
4709   Label try_array;
4710   Label not_array, smi_key, key_okay;
4711   __ CompareRoot(FieldOperand(scratch, 0), Heap::kWeakCellMapRootIndex);
4712   __ j(not_equal, &try_array);
4713   HandleMonomorphicStoreCase(masm, receiver, key, vector, slot, scratch, &miss);
4714 
4715   // Is it a fixed array?
4716   __ bind(&try_array);
4717   __ CompareRoot(FieldOperand(scratch, 0), Heap::kFixedArrayMapRootIndex);
4718   __ j(not_equal, &not_array);
4719   HandlePolymorphicKeyedStoreCase(masm, receiver, key, vector, slot, scratch,
4720                                   &miss);
4721 
4722   __ bind(&not_array);
4723   Label try_poly_name;
4724   __ CompareRoot(scratch, Heap::kmegamorphic_symbolRootIndex);
4725   __ j(not_equal, &try_poly_name);
4726 
4727   __ pop(value);
4728 
4729   Handle<Code> megamorphic_stub =
4730       KeyedStoreIC::ChooseMegamorphicStub(masm->isolate(), GetExtraICState());
4731   __ jmp(megamorphic_stub, RelocInfo::CODE_TARGET);
4732 
4733   __ bind(&try_poly_name);
4734   // We might have a name in feedback, and a fixed array in the next slot.
4735   __ cmp(key, scratch);
4736   __ j(not_equal, &miss);
4737   // If the name comparison succeeded, we know we have a fixed array with
4738   // at least one map/handler pair.
4739   __ mov(scratch, FieldOperand(vector, slot, times_half_pointer_size,
4740                                FixedArray::kHeaderSize + kPointerSize));
4741   HandlePolymorphicStoreCase(masm, receiver, key, vector, slot, scratch, false,
4742                              &miss);
4743 
4744   __ bind(&miss);
4745   __ pop(value);
4746   KeyedStoreIC::GenerateMiss(masm);
4747 }
4748 
4749 
Generate(MacroAssembler * masm)4750 void CallICTrampolineStub::Generate(MacroAssembler* masm) {
4751   __ EmitLoadTypeFeedbackVector(ebx);
4752   CallICStub stub(isolate(), state());
4753   __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
4754 }
4755 
4756 
MaybeCallEntryHook(MacroAssembler * masm)4757 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
4758   if (masm->isolate()->function_entry_hook() != NULL) {
4759     ProfileEntryHookStub stub(masm->isolate());
4760     masm->CallStub(&stub);
4761   }
4762 }
4763 
4764 
Generate(MacroAssembler * masm)4765 void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
4766   // Save volatile registers.
4767   const int kNumSavedRegisters = 3;
4768   __ push(eax);
4769   __ push(ecx);
4770   __ push(edx);
4771 
4772   // Calculate and push the original stack pointer.
4773   __ lea(eax, Operand(esp, (kNumSavedRegisters + 1) * kPointerSize));
4774   __ push(eax);
4775 
4776   // Retrieve our return address and use it to calculate the calling
4777   // function's address.
4778   __ mov(eax, Operand(esp, (kNumSavedRegisters + 1) * kPointerSize));
4779   __ sub(eax, Immediate(Assembler::kCallInstructionLength));
4780   __ push(eax);
4781 
4782   // Call the entry hook.
4783   DCHECK(isolate()->function_entry_hook() != NULL);
4784   __ call(FUNCTION_ADDR(isolate()->function_entry_hook()),
4785           RelocInfo::RUNTIME_ENTRY);
4786   __ add(esp, Immediate(2 * kPointerSize));
4787 
4788   // Restore ecx.
4789   __ pop(edx);
4790   __ pop(ecx);
4791   __ pop(eax);
4792 
4793   __ ret(0);
4794 }
4795 
4796 
4797 template<class T>
CreateArrayDispatch(MacroAssembler * masm,AllocationSiteOverrideMode mode)4798 static void CreateArrayDispatch(MacroAssembler* masm,
4799                                 AllocationSiteOverrideMode mode) {
4800   if (mode == DISABLE_ALLOCATION_SITES) {
4801     T stub(masm->isolate(),
4802            GetInitialFastElementsKind(),
4803            mode);
4804     __ TailCallStub(&stub);
4805   } else if (mode == DONT_OVERRIDE) {
4806     int last_index = GetSequenceIndexFromFastElementsKind(
4807         TERMINAL_FAST_ELEMENTS_KIND);
4808     for (int i = 0; i <= last_index; ++i) {
4809       Label next;
4810       ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
4811       __ cmp(edx, kind);
4812       __ j(not_equal, &next);
4813       T stub(masm->isolate(), kind);
4814       __ TailCallStub(&stub);
4815       __ bind(&next);
4816     }
4817 
4818     // If we reached this point there is a problem.
4819     __ Abort(kUnexpectedElementsKindInArrayConstructor);
4820   } else {
4821     UNREACHABLE();
4822   }
4823 }
4824 
4825 
CreateArrayDispatchOneArgument(MacroAssembler * masm,AllocationSiteOverrideMode mode)4826 static void CreateArrayDispatchOneArgument(MacroAssembler* masm,
4827                                            AllocationSiteOverrideMode mode) {
4828   // ebx - allocation site (if mode != DISABLE_ALLOCATION_SITES)
4829   // edx - kind (if mode != DISABLE_ALLOCATION_SITES)
4830   // eax - number of arguments
4831   // edi - constructor?
4832   // esp[0] - return address
4833   // esp[4] - last argument
4834   Label normal_sequence;
4835   if (mode == DONT_OVERRIDE) {
4836     STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
4837     STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
4838     STATIC_ASSERT(FAST_ELEMENTS == 2);
4839     STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
4840     STATIC_ASSERT(FAST_DOUBLE_ELEMENTS == 4);
4841     STATIC_ASSERT(FAST_HOLEY_DOUBLE_ELEMENTS == 5);
4842 
4843     // is the low bit set? If so, we are holey and that is good.
4844     __ test_b(edx, 1);
4845     __ j(not_zero, &normal_sequence);
4846   }
4847 
4848   // look at the first argument
4849   __ mov(ecx, Operand(esp, kPointerSize));
4850   __ test(ecx, ecx);
4851   __ j(zero, &normal_sequence);
4852 
4853   if (mode == DISABLE_ALLOCATION_SITES) {
4854     ElementsKind initial = GetInitialFastElementsKind();
4855     ElementsKind holey_initial = GetHoleyElementsKind(initial);
4856 
4857     ArraySingleArgumentConstructorStub stub_holey(masm->isolate(),
4858                                                   holey_initial,
4859                                                   DISABLE_ALLOCATION_SITES);
4860     __ TailCallStub(&stub_holey);
4861 
4862     __ bind(&normal_sequence);
4863     ArraySingleArgumentConstructorStub stub(masm->isolate(),
4864                                             initial,
4865                                             DISABLE_ALLOCATION_SITES);
4866     __ TailCallStub(&stub);
4867   } else if (mode == DONT_OVERRIDE) {
4868     // We are going to create a holey array, but our kind is non-holey.
4869     // Fix kind and retry.
4870     __ inc(edx);
4871 
4872     if (FLAG_debug_code) {
4873       Handle<Map> allocation_site_map =
4874           masm->isolate()->factory()->allocation_site_map();
4875       __ cmp(FieldOperand(ebx, 0), Immediate(allocation_site_map));
4876       __ Assert(equal, kExpectedAllocationSite);
4877     }
4878 
4879     // Save the resulting elements kind in type info. We can't just store r3
4880     // in the AllocationSite::transition_info field because elements kind is
4881     // restricted to a portion of the field...upper bits need to be left alone.
4882     STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
4883     __ add(FieldOperand(ebx, AllocationSite::kTransitionInfoOffset),
4884            Immediate(Smi::FromInt(kFastElementsKindPackedToHoley)));
4885 
4886     __ bind(&normal_sequence);
4887     int last_index = GetSequenceIndexFromFastElementsKind(
4888         TERMINAL_FAST_ELEMENTS_KIND);
4889     for (int i = 0; i <= last_index; ++i) {
4890       Label next;
4891       ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
4892       __ cmp(edx, kind);
4893       __ j(not_equal, &next);
4894       ArraySingleArgumentConstructorStub stub(masm->isolate(), kind);
4895       __ TailCallStub(&stub);
4896       __ bind(&next);
4897     }
4898 
4899     // If we reached this point there is a problem.
4900     __ Abort(kUnexpectedElementsKindInArrayConstructor);
4901   } else {
4902     UNREACHABLE();
4903   }
4904 }
4905 
4906 
4907 template<class T>
ArrayConstructorStubAheadOfTimeHelper(Isolate * isolate)4908 static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) {
4909   int to_index = GetSequenceIndexFromFastElementsKind(
4910       TERMINAL_FAST_ELEMENTS_KIND);
4911   for (int i = 0; i <= to_index; ++i) {
4912     ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
4913     T stub(isolate, kind);
4914     stub.GetCode();
4915     if (AllocationSite::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE) {
4916       T stub1(isolate, kind, DISABLE_ALLOCATION_SITES);
4917       stub1.GetCode();
4918     }
4919   }
4920 }
4921 
4922 
GenerateStubsAheadOfTime(Isolate * isolate)4923 void ArrayConstructorStubBase::GenerateStubsAheadOfTime(Isolate* isolate) {
4924   ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>(
4925       isolate);
4926   ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>(
4927       isolate);
4928   ArrayConstructorStubAheadOfTimeHelper<ArrayNArgumentsConstructorStub>(
4929       isolate);
4930 }
4931 
4932 
GenerateStubsAheadOfTime(Isolate * isolate)4933 void InternalArrayConstructorStubBase::GenerateStubsAheadOfTime(
4934     Isolate* isolate) {
4935   ElementsKind kinds[2] = { FAST_ELEMENTS, FAST_HOLEY_ELEMENTS };
4936   for (int i = 0; i < 2; i++) {
4937     // For internal arrays we only need a few things
4938     InternalArrayNoArgumentConstructorStub stubh1(isolate, kinds[i]);
4939     stubh1.GetCode();
4940     InternalArraySingleArgumentConstructorStub stubh2(isolate, kinds[i]);
4941     stubh2.GetCode();
4942     InternalArrayNArgumentsConstructorStub stubh3(isolate, kinds[i]);
4943     stubh3.GetCode();
4944   }
4945 }
4946 
4947 
GenerateDispatchToArrayStub(MacroAssembler * masm,AllocationSiteOverrideMode mode)4948 void ArrayConstructorStub::GenerateDispatchToArrayStub(
4949     MacroAssembler* masm,
4950     AllocationSiteOverrideMode mode) {
4951   if (argument_count() == ANY) {
4952     Label not_zero_case, not_one_case;
4953     __ test(eax, eax);
4954     __ j(not_zero, &not_zero_case);
4955     CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
4956 
4957     __ bind(&not_zero_case);
4958     __ cmp(eax, 1);
4959     __ j(greater, &not_one_case);
4960     CreateArrayDispatchOneArgument(masm, mode);
4961 
4962     __ bind(&not_one_case);
4963     CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm, mode);
4964   } else if (argument_count() == NONE) {
4965     CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
4966   } else if (argument_count() == ONE) {
4967     CreateArrayDispatchOneArgument(masm, mode);
4968   } else if (argument_count() == MORE_THAN_ONE) {
4969     CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm, mode);
4970   } else {
4971     UNREACHABLE();
4972   }
4973 }
4974 
4975 
Generate(MacroAssembler * masm)4976 void ArrayConstructorStub::Generate(MacroAssembler* masm) {
4977   // ----------- S t a t e -------------
4978   //  -- eax : argc (only if argument_count() is ANY or MORE_THAN_ONE)
4979   //  -- ebx : AllocationSite or undefined
4980   //  -- edi : constructor
4981   //  -- edx : Original constructor
4982   //  -- esp[0] : return address
4983   //  -- esp[4] : last argument
4984   // -----------------------------------
4985   if (FLAG_debug_code) {
4986     // The array construct code is only set for the global and natives
4987     // builtin Array functions which always have maps.
4988 
4989     // Initial map for the builtin Array function should be a map.
4990     __ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
4991     // Will both indicate a NULL and a Smi.
4992     __ test(ecx, Immediate(kSmiTagMask));
4993     __ Assert(not_zero, kUnexpectedInitialMapForArrayFunction);
4994     __ CmpObjectType(ecx, MAP_TYPE, ecx);
4995     __ Assert(equal, kUnexpectedInitialMapForArrayFunction);
4996 
4997     // We should either have undefined in ebx or a valid AllocationSite
4998     __ AssertUndefinedOrAllocationSite(ebx);
4999   }
5000 
5001   Label subclassing;
5002 
5003   // Enter the context of the Array function.
5004   __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
5005 
5006   __ cmp(edx, edi);
5007   __ j(not_equal, &subclassing);
5008 
5009   Label no_info;
5010   // If the feedback vector is the undefined value call an array constructor
5011   // that doesn't use AllocationSites.
5012   __ cmp(ebx, isolate()->factory()->undefined_value());
5013   __ j(equal, &no_info);
5014 
5015   // Only look at the lower 16 bits of the transition info.
5016   __ mov(edx, FieldOperand(ebx, AllocationSite::kTransitionInfoOffset));
5017   __ SmiUntag(edx);
5018   STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
5019   __ and_(edx, Immediate(AllocationSite::ElementsKindBits::kMask));
5020   GenerateDispatchToArrayStub(masm, DONT_OVERRIDE);
5021 
5022   __ bind(&no_info);
5023   GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES);
5024 
5025   // Subclassing.
5026   __ bind(&subclassing);
5027   switch (argument_count()) {
5028     case ANY:
5029     case MORE_THAN_ONE:
5030       __ mov(Operand(esp, eax, times_pointer_size, kPointerSize), edi);
5031       __ add(eax, Immediate(3));
5032       break;
5033     case NONE:
5034       __ mov(Operand(esp, 1 * kPointerSize), edi);
5035       __ mov(eax, Immediate(3));
5036       break;
5037     case ONE:
5038       __ mov(Operand(esp, 2 * kPointerSize), edi);
5039       __ mov(eax, Immediate(4));
5040       break;
5041   }
5042   __ PopReturnAddressTo(ecx);
5043   __ Push(edx);
5044   __ Push(ebx);
5045   __ PushReturnAddressFrom(ecx);
5046   __ JumpToExternalReference(ExternalReference(Runtime::kNewArray, isolate()));
5047 }
5048 
5049 
GenerateCase(MacroAssembler * masm,ElementsKind kind)5050 void InternalArrayConstructorStub::GenerateCase(
5051     MacroAssembler* masm, ElementsKind kind) {
5052   Label not_zero_case, not_one_case;
5053   Label normal_sequence;
5054 
5055   __ test(eax, eax);
5056   __ j(not_zero, &not_zero_case);
5057   InternalArrayNoArgumentConstructorStub stub0(isolate(), kind);
5058   __ TailCallStub(&stub0);
5059 
5060   __ bind(&not_zero_case);
5061   __ cmp(eax, 1);
5062   __ j(greater, &not_one_case);
5063 
5064   if (IsFastPackedElementsKind(kind)) {
5065     // We might need to create a holey array
5066     // look at the first argument
5067     __ mov(ecx, Operand(esp, kPointerSize));
5068     __ test(ecx, ecx);
5069     __ j(zero, &normal_sequence);
5070 
5071     InternalArraySingleArgumentConstructorStub
5072         stub1_holey(isolate(), GetHoleyElementsKind(kind));
5073     __ TailCallStub(&stub1_holey);
5074   }
5075 
5076   __ bind(&normal_sequence);
5077   InternalArraySingleArgumentConstructorStub stub1(isolate(), kind);
5078   __ TailCallStub(&stub1);
5079 
5080   __ bind(&not_one_case);
5081   InternalArrayNArgumentsConstructorStub stubN(isolate(), kind);
5082   __ TailCallStub(&stubN);
5083 }
5084 
5085 
Generate(MacroAssembler * masm)5086 void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
5087   // ----------- S t a t e -------------
5088   //  -- eax : argc
5089   //  -- edi : constructor
5090   //  -- esp[0] : return address
5091   //  -- esp[4] : last argument
5092   // -----------------------------------
5093 
5094   if (FLAG_debug_code) {
5095     // The array construct code is only set for the global and natives
5096     // builtin Array functions which always have maps.
5097 
5098     // Initial map for the builtin Array function should be a map.
5099     __ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
5100     // Will both indicate a NULL and a Smi.
5101     __ test(ecx, Immediate(kSmiTagMask));
5102     __ Assert(not_zero, kUnexpectedInitialMapForArrayFunction);
5103     __ CmpObjectType(ecx, MAP_TYPE, ecx);
5104     __ Assert(equal, kUnexpectedInitialMapForArrayFunction);
5105   }
5106 
5107   // Figure out the right elements kind
5108   __ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
5109 
5110   // Load the map's "bit field 2" into |result|. We only need the first byte,
5111   // but the following masking takes care of that anyway.
5112   __ mov(ecx, FieldOperand(ecx, Map::kBitField2Offset));
5113   // Retrieve elements_kind from bit field 2.
5114   __ DecodeField<Map::ElementsKindBits>(ecx);
5115 
5116   if (FLAG_debug_code) {
5117     Label done;
5118     __ cmp(ecx, Immediate(FAST_ELEMENTS));
5119     __ j(equal, &done);
5120     __ cmp(ecx, Immediate(FAST_HOLEY_ELEMENTS));
5121     __ Assert(equal,
5122               kInvalidElementsKindForInternalArrayOrInternalPackedArray);
5123     __ bind(&done);
5124   }
5125 
5126   Label fast_elements_case;
5127   __ cmp(ecx, Immediate(FAST_ELEMENTS));
5128   __ j(equal, &fast_elements_case);
5129   GenerateCase(masm, FAST_HOLEY_ELEMENTS);
5130 
5131   __ bind(&fast_elements_case);
5132   GenerateCase(masm, FAST_ELEMENTS);
5133 }
5134 
5135 
Generate(MacroAssembler * masm)5136 void LoadGlobalViaContextStub::Generate(MacroAssembler* masm) {
5137   Register context_reg = esi;
5138   Register slot_reg = ebx;
5139   Register result_reg = eax;
5140   Label slow_case;
5141 
5142   // Go up context chain to the script context.
5143   for (int i = 0; i < depth(); ++i) {
5144     __ mov(result_reg, ContextOperand(context_reg, Context::PREVIOUS_INDEX));
5145     context_reg = result_reg;
5146   }
5147 
5148   // Load the PropertyCell value at the specified slot.
5149   __ mov(result_reg, ContextOperand(context_reg, slot_reg));
5150   __ mov(result_reg, FieldOperand(result_reg, PropertyCell::kValueOffset));
5151 
5152   // Check that value is not the_hole.
5153   __ CompareRoot(result_reg, Heap::kTheHoleValueRootIndex);
5154   __ j(equal, &slow_case, Label::kNear);
5155   __ Ret();
5156 
5157   // Fallback to the runtime.
5158   __ bind(&slow_case);
5159   __ SmiTag(slot_reg);
5160   __ Pop(result_reg);  // Pop return address.
5161   __ Push(slot_reg);
5162   __ Push(result_reg);  // Push return address.
5163   __ TailCallRuntime(Runtime::kLoadGlobalViaContext);
5164 }
5165 
5166 
Generate(MacroAssembler * masm)5167 void StoreGlobalViaContextStub::Generate(MacroAssembler* masm) {
5168   Register context_reg = esi;
5169   Register slot_reg = ebx;
5170   Register value_reg = eax;
5171   Register cell_reg = edi;
5172   Register cell_details_reg = edx;
5173   Register cell_value_reg = ecx;
5174   Label fast_heapobject_case, fast_smi_case, slow_case;
5175 
5176   if (FLAG_debug_code) {
5177     __ CompareRoot(value_reg, Heap::kTheHoleValueRootIndex);
5178     __ Check(not_equal, kUnexpectedValue);
5179   }
5180 
5181   // Go up context chain to the script context.
5182   for (int i = 0; i < depth(); ++i) {
5183     __ mov(cell_reg, ContextOperand(context_reg, Context::PREVIOUS_INDEX));
5184     context_reg = cell_reg;
5185   }
5186 
5187   // Load the PropertyCell at the specified slot.
5188   __ mov(cell_reg, ContextOperand(context_reg, slot_reg));
5189 
5190   // Load PropertyDetails for the cell (actually only the cell_type and kind).
5191   __ mov(cell_details_reg,
5192          FieldOperand(cell_reg, PropertyCell::kDetailsOffset));
5193   __ SmiUntag(cell_details_reg);
5194   __ and_(cell_details_reg,
5195           Immediate(PropertyDetails::PropertyCellTypeField::kMask |
5196                     PropertyDetails::KindField::kMask |
5197                     PropertyDetails::kAttributesReadOnlyMask));
5198 
5199   // Check if PropertyCell holds mutable data.
5200   Label not_mutable_data;
5201   __ cmp(cell_details_reg,
5202          Immediate(PropertyDetails::PropertyCellTypeField::encode(
5203                        PropertyCellType::kMutable) |
5204                    PropertyDetails::KindField::encode(kData)));
5205   __ j(not_equal, &not_mutable_data);
5206   __ JumpIfSmi(value_reg, &fast_smi_case);
5207   __ bind(&fast_heapobject_case);
5208   __ mov(FieldOperand(cell_reg, PropertyCell::kValueOffset), value_reg);
5209   __ RecordWriteField(cell_reg, PropertyCell::kValueOffset, value_reg,
5210                       cell_details_reg, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
5211                       OMIT_SMI_CHECK);
5212   // RecordWriteField clobbers the value register, so we need to reload.
5213   __ mov(value_reg, FieldOperand(cell_reg, PropertyCell::kValueOffset));
5214   __ Ret();
5215   __ bind(&not_mutable_data);
5216 
5217   // Check if PropertyCell value matches the new value (relevant for Constant,
5218   // ConstantType and Undefined cells).
5219   Label not_same_value;
5220   __ mov(cell_value_reg, FieldOperand(cell_reg, PropertyCell::kValueOffset));
5221   __ cmp(cell_value_reg, value_reg);
5222   __ j(not_equal, &not_same_value,
5223        FLAG_debug_code ? Label::kFar : Label::kNear);
5224   // Make sure the PropertyCell is not marked READ_ONLY.
5225   __ test(cell_details_reg,
5226           Immediate(PropertyDetails::kAttributesReadOnlyMask));
5227   __ j(not_zero, &slow_case);
5228   if (FLAG_debug_code) {
5229     Label done;
5230     // This can only be true for Constant, ConstantType and Undefined cells,
5231     // because we never store the_hole via this stub.
5232     __ cmp(cell_details_reg,
5233            Immediate(PropertyDetails::PropertyCellTypeField::encode(
5234                          PropertyCellType::kConstant) |
5235                      PropertyDetails::KindField::encode(kData)));
5236     __ j(equal, &done);
5237     __ cmp(cell_details_reg,
5238            Immediate(PropertyDetails::PropertyCellTypeField::encode(
5239                          PropertyCellType::kConstantType) |
5240                      PropertyDetails::KindField::encode(kData)));
5241     __ j(equal, &done);
5242     __ cmp(cell_details_reg,
5243            Immediate(PropertyDetails::PropertyCellTypeField::encode(
5244                          PropertyCellType::kUndefined) |
5245                      PropertyDetails::KindField::encode(kData)));
5246     __ Check(equal, kUnexpectedValue);
5247     __ bind(&done);
5248   }
5249   __ Ret();
5250   __ bind(&not_same_value);
5251 
5252   // Check if PropertyCell contains data with constant type (and is not
5253   // READ_ONLY).
5254   __ cmp(cell_details_reg,
5255          Immediate(PropertyDetails::PropertyCellTypeField::encode(
5256                        PropertyCellType::kConstantType) |
5257                    PropertyDetails::KindField::encode(kData)));
5258   __ j(not_equal, &slow_case, Label::kNear);
5259 
5260   // Now either both old and new values must be SMIs or both must be heap
5261   // objects with same map.
5262   Label value_is_heap_object;
5263   __ JumpIfNotSmi(value_reg, &value_is_heap_object, Label::kNear);
5264   __ JumpIfNotSmi(cell_value_reg, &slow_case, Label::kNear);
5265   // Old and new values are SMIs, no need for a write barrier here.
5266   __ bind(&fast_smi_case);
5267   __ mov(FieldOperand(cell_reg, PropertyCell::kValueOffset), value_reg);
5268   __ Ret();
5269   __ bind(&value_is_heap_object);
5270   __ JumpIfSmi(cell_value_reg, &slow_case, Label::kNear);
5271   Register cell_value_map_reg = cell_value_reg;
5272   __ mov(cell_value_map_reg,
5273          FieldOperand(cell_value_reg, HeapObject::kMapOffset));
5274   __ cmp(cell_value_map_reg, FieldOperand(value_reg, HeapObject::kMapOffset));
5275   __ j(equal, &fast_heapobject_case);
5276 
5277   // Fallback to the runtime.
5278   __ bind(&slow_case);
5279   __ SmiTag(slot_reg);
5280   __ Pop(cell_reg);  // Pop return address.
5281   __ Push(slot_reg);
5282   __ Push(value_reg);
5283   __ Push(cell_reg);  // Push return address.
5284   __ TailCallRuntime(is_strict(language_mode())
5285                          ? Runtime::kStoreGlobalViaContext_Strict
5286                          : Runtime::kStoreGlobalViaContext_Sloppy);
5287 }
5288 
5289 
5290 // Generates an Operand for saving parameters after PrepareCallApiFunction.
ApiParameterOperand(int index)5291 static Operand ApiParameterOperand(int index) {
5292   return Operand(esp, index * kPointerSize);
5293 }
5294 
5295 
5296 // Prepares stack to put arguments (aligns and so on). Reserves
5297 // space for return value if needed (assumes the return value is a handle).
5298 // Arguments must be stored in ApiParameterOperand(0), ApiParameterOperand(1)
5299 // etc. Saves context (esi). If space was reserved for return value then
5300 // stores the pointer to the reserved slot into esi.
PrepareCallApiFunction(MacroAssembler * masm,int argc)5301 static void PrepareCallApiFunction(MacroAssembler* masm, int argc) {
5302   __ EnterApiExitFrame(argc);
5303   if (__ emit_debug_code()) {
5304     __ mov(esi, Immediate(bit_cast<int32_t>(kZapValue)));
5305   }
5306 }
5307 
5308 
5309 // Calls an API function.  Allocates HandleScope, extracts returned value
5310 // from handle and propagates exceptions.  Clobbers ebx, edi and
5311 // caller-save registers.  Restores context.  On return removes
5312 // stack_space * kPointerSize (GCed).
CallApiFunctionAndReturn(MacroAssembler * masm,Register function_address,ExternalReference thunk_ref,Operand thunk_last_arg,int stack_space,Operand * stack_space_operand,Operand return_value_operand,Operand * context_restore_operand)5313 static void CallApiFunctionAndReturn(MacroAssembler* masm,
5314                                      Register function_address,
5315                                      ExternalReference thunk_ref,
5316                                      Operand thunk_last_arg, int stack_space,
5317                                      Operand* stack_space_operand,
5318                                      Operand return_value_operand,
5319                                      Operand* context_restore_operand) {
5320   Isolate* isolate = masm->isolate();
5321 
5322   ExternalReference next_address =
5323       ExternalReference::handle_scope_next_address(isolate);
5324   ExternalReference limit_address =
5325       ExternalReference::handle_scope_limit_address(isolate);
5326   ExternalReference level_address =
5327       ExternalReference::handle_scope_level_address(isolate);
5328 
5329   DCHECK(edx.is(function_address));
5330   // Allocate HandleScope in callee-save registers.
5331   __ mov(ebx, Operand::StaticVariable(next_address));
5332   __ mov(edi, Operand::StaticVariable(limit_address));
5333   __ add(Operand::StaticVariable(level_address), Immediate(1));
5334 
5335   if (FLAG_log_timer_events) {
5336     FrameScope frame(masm, StackFrame::MANUAL);
5337     __ PushSafepointRegisters();
5338     __ PrepareCallCFunction(1, eax);
5339     __ mov(Operand(esp, 0),
5340            Immediate(ExternalReference::isolate_address(isolate)));
5341     __ CallCFunction(ExternalReference::log_enter_external_function(isolate),
5342                      1);
5343     __ PopSafepointRegisters();
5344   }
5345 
5346 
5347   Label profiler_disabled;
5348   Label end_profiler_check;
5349   __ mov(eax, Immediate(ExternalReference::is_profiling_address(isolate)));
5350   __ cmpb(Operand(eax, 0), 0);
5351   __ j(zero, &profiler_disabled);
5352 
5353   // Additional parameter is the address of the actual getter function.
5354   __ mov(thunk_last_arg, function_address);
5355   // Call the api function.
5356   __ mov(eax, Immediate(thunk_ref));
5357   __ call(eax);
5358   __ jmp(&end_profiler_check);
5359 
5360   __ bind(&profiler_disabled);
5361   // Call the api function.
5362   __ call(function_address);
5363   __ bind(&end_profiler_check);
5364 
5365   if (FLAG_log_timer_events) {
5366     FrameScope frame(masm, StackFrame::MANUAL);
5367     __ PushSafepointRegisters();
5368     __ PrepareCallCFunction(1, eax);
5369     __ mov(Operand(esp, 0),
5370            Immediate(ExternalReference::isolate_address(isolate)));
5371     __ CallCFunction(ExternalReference::log_leave_external_function(isolate),
5372                      1);
5373     __ PopSafepointRegisters();
5374   }
5375 
5376   Label prologue;
5377   // Load the value from ReturnValue
5378   __ mov(eax, return_value_operand);
5379 
5380   Label promote_scheduled_exception;
5381   Label delete_allocated_handles;
5382   Label leave_exit_frame;
5383 
5384   __ bind(&prologue);
5385   // No more valid handles (the result handle was the last one). Restore
5386   // previous handle scope.
5387   __ mov(Operand::StaticVariable(next_address), ebx);
5388   __ sub(Operand::StaticVariable(level_address), Immediate(1));
5389   __ Assert(above_equal, kInvalidHandleScopeLevel);
5390   __ cmp(edi, Operand::StaticVariable(limit_address));
5391   __ j(not_equal, &delete_allocated_handles);
5392 
5393   // Leave the API exit frame.
5394   __ bind(&leave_exit_frame);
5395   bool restore_context = context_restore_operand != NULL;
5396   if (restore_context) {
5397     __ mov(esi, *context_restore_operand);
5398   }
5399   if (stack_space_operand != nullptr) {
5400     __ mov(ebx, *stack_space_operand);
5401   }
5402   __ LeaveApiExitFrame(!restore_context);
5403 
5404   // Check if the function scheduled an exception.
5405   ExternalReference scheduled_exception_address =
5406       ExternalReference::scheduled_exception_address(isolate);
5407   __ cmp(Operand::StaticVariable(scheduled_exception_address),
5408          Immediate(isolate->factory()->the_hole_value()));
5409   __ j(not_equal, &promote_scheduled_exception);
5410 
5411 #if DEBUG
5412   // Check if the function returned a valid JavaScript value.
5413   Label ok;
5414   Register return_value = eax;
5415   Register map = ecx;
5416 
5417   __ JumpIfSmi(return_value, &ok, Label::kNear);
5418   __ mov(map, FieldOperand(return_value, HeapObject::kMapOffset));
5419 
5420   __ CmpInstanceType(map, LAST_NAME_TYPE);
5421   __ j(below_equal, &ok, Label::kNear);
5422 
5423   __ CmpInstanceType(map, FIRST_JS_RECEIVER_TYPE);
5424   __ j(above_equal, &ok, Label::kNear);
5425 
5426   __ cmp(map, isolate->factory()->heap_number_map());
5427   __ j(equal, &ok, Label::kNear);
5428 
5429   __ cmp(return_value, isolate->factory()->undefined_value());
5430   __ j(equal, &ok, Label::kNear);
5431 
5432   __ cmp(return_value, isolate->factory()->true_value());
5433   __ j(equal, &ok, Label::kNear);
5434 
5435   __ cmp(return_value, isolate->factory()->false_value());
5436   __ j(equal, &ok, Label::kNear);
5437 
5438   __ cmp(return_value, isolate->factory()->null_value());
5439   __ j(equal, &ok, Label::kNear);
5440 
5441   __ Abort(kAPICallReturnedInvalidObject);
5442 
5443   __ bind(&ok);
5444 #endif
5445 
5446   if (stack_space_operand != nullptr) {
5447     DCHECK_EQ(0, stack_space);
5448     __ pop(ecx);
5449     __ add(esp, ebx);
5450     __ jmp(ecx);
5451   } else {
5452     __ ret(stack_space * kPointerSize);
5453   }
5454 
5455   // Re-throw by promoting a scheduled exception.
5456   __ bind(&promote_scheduled_exception);
5457   __ TailCallRuntime(Runtime::kPromoteScheduledException);
5458 
5459   // HandleScope limit has changed. Delete allocated extensions.
5460   ExternalReference delete_extensions =
5461       ExternalReference::delete_handle_scope_extensions(isolate);
5462   __ bind(&delete_allocated_handles);
5463   __ mov(Operand::StaticVariable(limit_address), edi);
5464   __ mov(edi, eax);
5465   __ mov(Operand(esp, 0),
5466          Immediate(ExternalReference::isolate_address(isolate)));
5467   __ mov(eax, Immediate(delete_extensions));
5468   __ call(eax);
5469   __ mov(eax, edi);
5470   __ jmp(&leave_exit_frame);
5471 }
5472 
5473 
CallApiFunctionStubHelper(MacroAssembler * masm,const ParameterCount & argc,bool return_first_arg,bool call_data_undefined)5474 static void CallApiFunctionStubHelper(MacroAssembler* masm,
5475                                       const ParameterCount& argc,
5476                                       bool return_first_arg,
5477                                       bool call_data_undefined) {
5478   // ----------- S t a t e -------------
5479   //  -- edi                 : callee
5480   //  -- ebx                 : call_data
5481   //  -- ecx                 : holder
5482   //  -- edx                 : api_function_address
5483   //  -- esi                 : context
5484   //  -- eax                 : number of arguments if argc is a register
5485   //  --
5486   //  -- esp[0]              : return address
5487   //  -- esp[4]              : last argument
5488   //  -- ...
5489   //  -- esp[argc * 4]       : first argument
5490   //  -- esp[(argc + 1) * 4] : receiver
5491   // -----------------------------------
5492 
5493   Register callee = edi;
5494   Register call_data = ebx;
5495   Register holder = ecx;
5496   Register api_function_address = edx;
5497   Register context = esi;
5498   Register return_address = eax;
5499 
5500   typedef FunctionCallbackArguments FCA;
5501 
5502   STATIC_ASSERT(FCA::kContextSaveIndex == 6);
5503   STATIC_ASSERT(FCA::kCalleeIndex == 5);
5504   STATIC_ASSERT(FCA::kDataIndex == 4);
5505   STATIC_ASSERT(FCA::kReturnValueOffset == 3);
5506   STATIC_ASSERT(FCA::kReturnValueDefaultValueIndex == 2);
5507   STATIC_ASSERT(FCA::kIsolateIndex == 1);
5508   STATIC_ASSERT(FCA::kHolderIndex == 0);
5509   STATIC_ASSERT(FCA::kArgsLength == 7);
5510 
5511   DCHECK(argc.is_immediate() || eax.is(argc.reg()));
5512 
5513   if (argc.is_immediate()) {
5514     __ pop(return_address);
5515     // context save.
5516     __ push(context);
5517   } else {
5518     // pop return address and save context
5519     __ xchg(context, Operand(esp, 0));
5520     return_address = context;
5521   }
5522 
5523   // callee
5524   __ push(callee);
5525 
5526   // call data
5527   __ push(call_data);
5528 
5529   Register scratch = call_data;
5530   if (!call_data_undefined) {
5531     // return value
5532     __ push(Immediate(masm->isolate()->factory()->undefined_value()));
5533     // return value default
5534     __ push(Immediate(masm->isolate()->factory()->undefined_value()));
5535   } else {
5536     // return value
5537     __ push(scratch);
5538     // return value default
5539     __ push(scratch);
5540   }
5541   // isolate
5542   __ push(Immediate(reinterpret_cast<int>(masm->isolate())));
5543   // holder
5544   __ push(holder);
5545 
5546   __ mov(scratch, esp);
5547 
5548   // push return address
5549   __ push(return_address);
5550 
5551   // load context from callee
5552   __ mov(context, FieldOperand(callee, JSFunction::kContextOffset));
5553 
5554   // API function gets reference to the v8::Arguments. If CPU profiler
5555   // is enabled wrapper function will be called and we need to pass
5556   // address of the callback as additional parameter, always allocate
5557   // space for it.
5558   const int kApiArgc = 1 + 1;
5559 
5560   // Allocate the v8::Arguments structure in the arguments' space since
5561   // it's not controlled by GC.
5562   const int kApiStackSpace = 4;
5563 
5564   PrepareCallApiFunction(masm, kApiArgc + kApiStackSpace);
5565 
5566   // FunctionCallbackInfo::implicit_args_.
5567   __ mov(ApiParameterOperand(2), scratch);
5568   if (argc.is_immediate()) {
5569     __ add(scratch,
5570            Immediate((argc.immediate() + FCA::kArgsLength - 1) * kPointerSize));
5571     // FunctionCallbackInfo::values_.
5572     __ mov(ApiParameterOperand(3), scratch);
5573     // FunctionCallbackInfo::length_.
5574     __ Move(ApiParameterOperand(4), Immediate(argc.immediate()));
5575     // FunctionCallbackInfo::is_construct_call_.
5576     __ Move(ApiParameterOperand(5), Immediate(0));
5577   } else {
5578     __ lea(scratch, Operand(scratch, argc.reg(), times_pointer_size,
5579                             (FCA::kArgsLength - 1) * kPointerSize));
5580     // FunctionCallbackInfo::values_.
5581     __ mov(ApiParameterOperand(3), scratch);
5582     // FunctionCallbackInfo::length_.
5583     __ mov(ApiParameterOperand(4), argc.reg());
5584     // FunctionCallbackInfo::is_construct_call_.
5585     __ lea(argc.reg(), Operand(argc.reg(), times_pointer_size,
5586                                (FCA::kArgsLength + 1) * kPointerSize));
5587     __ mov(ApiParameterOperand(5), argc.reg());
5588   }
5589 
5590   // v8::InvocationCallback's argument.
5591   __ lea(scratch, ApiParameterOperand(2));
5592   __ mov(ApiParameterOperand(0), scratch);
5593 
5594   ExternalReference thunk_ref =
5595       ExternalReference::invoke_function_callback(masm->isolate());
5596 
5597   Operand context_restore_operand(ebp,
5598                                   (2 + FCA::kContextSaveIndex) * kPointerSize);
5599   // Stores return the first js argument
5600   int return_value_offset = 0;
5601   if (return_first_arg) {
5602     return_value_offset = 2 + FCA::kArgsLength;
5603   } else {
5604     return_value_offset = 2 + FCA::kReturnValueOffset;
5605   }
5606   Operand return_value_operand(ebp, return_value_offset * kPointerSize);
5607   int stack_space = 0;
5608   Operand is_construct_call_operand = ApiParameterOperand(5);
5609   Operand* stack_space_operand = &is_construct_call_operand;
5610   if (argc.is_immediate()) {
5611     stack_space = argc.immediate() + FCA::kArgsLength + 1;
5612     stack_space_operand = nullptr;
5613   }
5614   CallApiFunctionAndReturn(masm, api_function_address, thunk_ref,
5615                            ApiParameterOperand(1), stack_space,
5616                            stack_space_operand, return_value_operand,
5617                            &context_restore_operand);
5618 }
5619 
5620 
Generate(MacroAssembler * masm)5621 void CallApiFunctionStub::Generate(MacroAssembler* masm) {
5622   bool call_data_undefined = this->call_data_undefined();
5623   CallApiFunctionStubHelper(masm, ParameterCount(eax), false,
5624                             call_data_undefined);
5625 }
5626 
5627 
Generate(MacroAssembler * masm)5628 void CallApiAccessorStub::Generate(MacroAssembler* masm) {
5629   bool is_store = this->is_store();
5630   int argc = this->argc();
5631   bool call_data_undefined = this->call_data_undefined();
5632   CallApiFunctionStubHelper(masm, ParameterCount(argc), is_store,
5633                             call_data_undefined);
5634 }
5635 
5636 
Generate(MacroAssembler * masm)5637 void CallApiGetterStub::Generate(MacroAssembler* masm) {
5638   // ----------- S t a t e -------------
5639   //  -- esp[0]                  : return address
5640   //  -- esp[4]                  : name
5641   //  -- esp[8 - kArgsLength*4]  : PropertyCallbackArguments object
5642   //  -- ...
5643   //  -- edx                    : api_function_address
5644   // -----------------------------------
5645   DCHECK(edx.is(ApiGetterDescriptor::function_address()));
5646 
5647   // array for v8::Arguments::values_, handler for name and pointer
5648   // to the values (it considered as smi in GC).
5649   const int kStackSpace = PropertyCallbackArguments::kArgsLength + 2;
5650   // Allocate space for opional callback address parameter in case
5651   // CPU profiler is active.
5652   const int kApiArgc = 2 + 1;
5653 
5654   Register api_function_address = edx;
5655   Register scratch = ebx;
5656 
5657   // load address of name
5658   __ lea(scratch, Operand(esp, 1 * kPointerSize));
5659 
5660   PrepareCallApiFunction(masm, kApiArgc);
5661   __ mov(ApiParameterOperand(0), scratch);  // name.
5662   __ add(scratch, Immediate(kPointerSize));
5663   __ mov(ApiParameterOperand(1), scratch);  // arguments pointer.
5664 
5665   ExternalReference thunk_ref =
5666       ExternalReference::invoke_accessor_getter_callback(isolate());
5667 
5668   CallApiFunctionAndReturn(masm, api_function_address, thunk_ref,
5669                            ApiParameterOperand(2), kStackSpace, nullptr,
5670                            Operand(ebp, 7 * kPointerSize), NULL);
5671 }
5672 
5673 
5674 #undef __
5675 
5676 }  // namespace internal
5677 }  // namespace v8
5678 
5679 #endif  // V8_TARGET_ARCH_IA32
5680