• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 //     * Redistributions of source code must retain the above copyright
7 //       notice, this list of conditions and the following disclaimer.
8 //     * Redistributions in binary form must reproduce the above
9 //       copyright notice, this list of conditions and the following
10 //       disclaimer in the documentation and/or other materials provided
11 //       with the distribution.
12 //     * Neither the name of Google Inc. nor the names of its
13 //       contributors may be used to endorse or promote products derived
14 //       from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #if V8_TARGET_ARCH_IA32
31 
32 #include "bootstrapper.h"
33 #include "code-stubs.h"
34 #include "isolate.h"
35 #include "jsregexp.h"
36 #include "regexp-macro-assembler.h"
37 #include "runtime.h"
38 #include "stub-cache.h"
39 #include "codegen.h"
40 #include "runtime.h"
41 
42 namespace v8 {
43 namespace internal {
44 
45 
InitializeInterfaceDescriptor(Isolate * isolate,CodeStubInterfaceDescriptor * descriptor)46 void FastNewClosureStub::InitializeInterfaceDescriptor(
47     Isolate* isolate,
48     CodeStubInterfaceDescriptor* descriptor) {
49   static Register registers[] = { ebx };
50   descriptor->register_param_count_ = 1;
51   descriptor->register_params_ = registers;
52   descriptor->deoptimization_handler_ =
53       Runtime::FunctionForId(Runtime::kNewClosureFromStubFailure)->entry;
54 }
55 
56 
InitializeInterfaceDescriptor(Isolate * isolate,CodeStubInterfaceDescriptor * descriptor)57 void ToNumberStub::InitializeInterfaceDescriptor(
58     Isolate* isolate,
59     CodeStubInterfaceDescriptor* descriptor) {
60   static Register registers[] = { eax };
61   descriptor->register_param_count_ = 1;
62   descriptor->register_params_ = registers;
63   descriptor->deoptimization_handler_ = NULL;
64 }
65 
66 
InitializeInterfaceDescriptor(Isolate * isolate,CodeStubInterfaceDescriptor * descriptor)67 void NumberToStringStub::InitializeInterfaceDescriptor(
68     Isolate* isolate,
69     CodeStubInterfaceDescriptor* descriptor) {
70   static Register registers[] = { eax };
71   descriptor->register_param_count_ = 1;
72   descriptor->register_params_ = registers;
73   descriptor->deoptimization_handler_ =
74       Runtime::FunctionForId(Runtime::kNumberToString)->entry;
75 }
76 
77 
InitializeInterfaceDescriptor(Isolate * isolate,CodeStubInterfaceDescriptor * descriptor)78 void FastCloneShallowArrayStub::InitializeInterfaceDescriptor(
79     Isolate* isolate,
80     CodeStubInterfaceDescriptor* descriptor) {
81   static Register registers[] = { eax, ebx, ecx };
82   descriptor->register_param_count_ = 3;
83   descriptor->register_params_ = registers;
84   descriptor->deoptimization_handler_ =
85       Runtime::FunctionForId(Runtime::kCreateArrayLiteralStubBailout)->entry;
86 }
87 
88 
InitializeInterfaceDescriptor(Isolate * isolate,CodeStubInterfaceDescriptor * descriptor)89 void FastCloneShallowObjectStub::InitializeInterfaceDescriptor(
90     Isolate* isolate,
91     CodeStubInterfaceDescriptor* descriptor) {
92   static Register registers[] = { eax, ebx, ecx, edx };
93   descriptor->register_param_count_ = 4;
94   descriptor->register_params_ = registers;
95   descriptor->deoptimization_handler_ =
96       Runtime::FunctionForId(Runtime::kCreateObjectLiteral)->entry;
97 }
98 
99 
InitializeInterfaceDescriptor(Isolate * isolate,CodeStubInterfaceDescriptor * descriptor)100 void CreateAllocationSiteStub::InitializeInterfaceDescriptor(
101     Isolate* isolate,
102     CodeStubInterfaceDescriptor* descriptor) {
103   static Register registers[] = { ebx };
104   descriptor->register_param_count_ = 1;
105   descriptor->register_params_ = registers;
106   descriptor->deoptimization_handler_ = NULL;
107 }
108 
109 
InitializeInterfaceDescriptor(Isolate * isolate,CodeStubInterfaceDescriptor * descriptor)110 void KeyedLoadFastElementStub::InitializeInterfaceDescriptor(
111     Isolate* isolate,
112     CodeStubInterfaceDescriptor* descriptor) {
113   static Register registers[] = { edx, ecx };
114   descriptor->register_param_count_ = 2;
115   descriptor->register_params_ = registers;
116   descriptor->deoptimization_handler_ =
117       FUNCTION_ADDR(KeyedLoadIC_MissFromStubFailure);
118 }
119 
120 
InitializeInterfaceDescriptor(Isolate * isolate,CodeStubInterfaceDescriptor * descriptor)121 void KeyedLoadDictionaryElementStub::InitializeInterfaceDescriptor(
122     Isolate* isolate,
123     CodeStubInterfaceDescriptor* descriptor) {
124   static Register registers[] = { edx, ecx };
125   descriptor->register_param_count_ = 2;
126   descriptor->register_params_ = registers;
127   descriptor->deoptimization_handler_ =
128       FUNCTION_ADDR(KeyedLoadIC_MissFromStubFailure);
129 }
130 
131 
InitializeInterfaceDescriptor(Isolate * isolate,CodeStubInterfaceDescriptor * descriptor)132 void LoadFieldStub::InitializeInterfaceDescriptor(
133     Isolate* isolate,
134     CodeStubInterfaceDescriptor* descriptor) {
135   static Register registers[] = { edx };
136   descriptor->register_param_count_ = 1;
137   descriptor->register_params_ = registers;
138   descriptor->deoptimization_handler_ = NULL;
139 }
140 
141 
InitializeInterfaceDescriptor(Isolate * isolate,CodeStubInterfaceDescriptor * descriptor)142 void KeyedLoadFieldStub::InitializeInterfaceDescriptor(
143     Isolate* isolate,
144     CodeStubInterfaceDescriptor* descriptor) {
145   static Register registers[] = { edx };
146   descriptor->register_param_count_ = 1;
147   descriptor->register_params_ = registers;
148   descriptor->deoptimization_handler_ = NULL;
149 }
150 
151 
InitializeInterfaceDescriptor(Isolate * isolate,CodeStubInterfaceDescriptor * descriptor)152 void KeyedArrayCallStub::InitializeInterfaceDescriptor(
153     Isolate* isolate,
154     CodeStubInterfaceDescriptor* descriptor) {
155   static Register registers[] = { ecx };
156   descriptor->register_param_count_ = 1;
157   descriptor->register_params_ = registers;
158   descriptor->continuation_type_ = TAIL_CALL_CONTINUATION;
159   descriptor->handler_arguments_mode_ = PASS_ARGUMENTS;
160   descriptor->deoptimization_handler_ =
161       FUNCTION_ADDR(KeyedCallIC_MissFromStubFailure);
162 }
163 
164 
InitializeInterfaceDescriptor(Isolate * isolate,CodeStubInterfaceDescriptor * descriptor)165 void KeyedStoreFastElementStub::InitializeInterfaceDescriptor(
166     Isolate* isolate,
167     CodeStubInterfaceDescriptor* descriptor) {
168   static Register registers[] = { edx, ecx, eax };
169   descriptor->register_param_count_ = 3;
170   descriptor->register_params_ = registers;
171   descriptor->deoptimization_handler_ =
172       FUNCTION_ADDR(KeyedStoreIC_MissFromStubFailure);
173 }
174 
175 
InitializeInterfaceDescriptor(Isolate * isolate,CodeStubInterfaceDescriptor * descriptor)176 void TransitionElementsKindStub::InitializeInterfaceDescriptor(
177     Isolate* isolate,
178     CodeStubInterfaceDescriptor* descriptor) {
179   static Register registers[] = { eax, ebx };
180   descriptor->register_param_count_ = 2;
181   descriptor->register_params_ = registers;
182   descriptor->deoptimization_handler_ =
183       Runtime::FunctionForId(Runtime::kTransitionElementsKind)->entry;
184 }
185 
186 
InitializeArrayConstructorDescriptor(Isolate * isolate,CodeStubInterfaceDescriptor * descriptor,int constant_stack_parameter_count)187 static void InitializeArrayConstructorDescriptor(
188     Isolate* isolate,
189     CodeStubInterfaceDescriptor* descriptor,
190     int constant_stack_parameter_count) {
191   // register state
192   // eax -- number of arguments
193   // edi -- function
194   // ebx -- type info cell with elements kind
195   static Register registers_variable_args[] = { edi, ebx, eax };
196   static Register registers_no_args[] = { edi, ebx };
197 
198   if (constant_stack_parameter_count == 0) {
199     descriptor->register_param_count_ = 2;
200     descriptor->register_params_ = registers_no_args;
201   } else {
202     // stack param count needs (constructor pointer, and single argument)
203     descriptor->handler_arguments_mode_ = PASS_ARGUMENTS;
204     descriptor->stack_parameter_count_ = eax;
205     descriptor->register_param_count_ = 3;
206     descriptor->register_params_ = registers_variable_args;
207   }
208 
209   descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count;
210   descriptor->function_mode_ = JS_FUNCTION_STUB_MODE;
211   descriptor->deoptimization_handler_ =
212       Runtime::FunctionForId(Runtime::kArrayConstructor)->entry;
213 }
214 
215 
InitializeInternalArrayConstructorDescriptor(Isolate * isolate,CodeStubInterfaceDescriptor * descriptor,int constant_stack_parameter_count)216 static void InitializeInternalArrayConstructorDescriptor(
217     Isolate* isolate,
218     CodeStubInterfaceDescriptor* descriptor,
219     int constant_stack_parameter_count) {
220   // register state
221   // eax -- number of arguments
222   // edi -- constructor function
223   static Register registers_variable_args[] = { edi, eax };
224   static Register registers_no_args[] = { edi };
225 
226   if (constant_stack_parameter_count == 0) {
227     descriptor->register_param_count_ = 1;
228     descriptor->register_params_ = registers_no_args;
229   } else {
230     // stack param count needs (constructor pointer, and single argument)
231     descriptor->handler_arguments_mode_ = PASS_ARGUMENTS;
232     descriptor->stack_parameter_count_ = eax;
233     descriptor->register_param_count_ = 2;
234     descriptor->register_params_ = registers_variable_args;
235   }
236 
237   descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count;
238   descriptor->function_mode_ = JS_FUNCTION_STUB_MODE;
239   descriptor->deoptimization_handler_ =
240       Runtime::FunctionForId(Runtime::kInternalArrayConstructor)->entry;
241 }
242 
243 
InitializeInterfaceDescriptor(Isolate * isolate,CodeStubInterfaceDescriptor * descriptor)244 void ArrayNoArgumentConstructorStub::InitializeInterfaceDescriptor(
245     Isolate* isolate,
246     CodeStubInterfaceDescriptor* descriptor) {
247   InitializeArrayConstructorDescriptor(isolate, descriptor, 0);
248 }
249 
250 
InitializeInterfaceDescriptor(Isolate * isolate,CodeStubInterfaceDescriptor * descriptor)251 void ArraySingleArgumentConstructorStub::InitializeInterfaceDescriptor(
252     Isolate* isolate,
253     CodeStubInterfaceDescriptor* descriptor) {
254   InitializeArrayConstructorDescriptor(isolate, descriptor, 1);
255 }
256 
257 
InitializeInterfaceDescriptor(Isolate * isolate,CodeStubInterfaceDescriptor * descriptor)258 void ArrayNArgumentsConstructorStub::InitializeInterfaceDescriptor(
259     Isolate* isolate,
260     CodeStubInterfaceDescriptor* descriptor) {
261   InitializeArrayConstructorDescriptor(isolate, descriptor, -1);
262 }
263 
264 
InitializeInterfaceDescriptor(Isolate * isolate,CodeStubInterfaceDescriptor * descriptor)265 void InternalArrayNoArgumentConstructorStub::InitializeInterfaceDescriptor(
266     Isolate* isolate,
267     CodeStubInterfaceDescriptor* descriptor) {
268   InitializeInternalArrayConstructorDescriptor(isolate, descriptor, 0);
269 }
270 
271 
InitializeInterfaceDescriptor(Isolate * isolate,CodeStubInterfaceDescriptor * descriptor)272 void InternalArraySingleArgumentConstructorStub::InitializeInterfaceDescriptor(
273     Isolate* isolate,
274     CodeStubInterfaceDescriptor* descriptor) {
275   InitializeInternalArrayConstructorDescriptor(isolate, descriptor, 1);
276 }
277 
278 
InitializeInterfaceDescriptor(Isolate * isolate,CodeStubInterfaceDescriptor * descriptor)279 void InternalArrayNArgumentsConstructorStub::InitializeInterfaceDescriptor(
280     Isolate* isolate,
281     CodeStubInterfaceDescriptor* descriptor) {
282   InitializeInternalArrayConstructorDescriptor(isolate, descriptor, -1);
283 }
284 
285 
InitializeInterfaceDescriptor(Isolate * isolate,CodeStubInterfaceDescriptor * descriptor)286 void CompareNilICStub::InitializeInterfaceDescriptor(
287     Isolate* isolate,
288     CodeStubInterfaceDescriptor* descriptor) {
289   static Register registers[] = { eax };
290   descriptor->register_param_count_ = 1;
291   descriptor->register_params_ = registers;
292   descriptor->deoptimization_handler_ =
293       FUNCTION_ADDR(CompareNilIC_Miss);
294   descriptor->SetMissHandler(
295       ExternalReference(IC_Utility(IC::kCompareNilIC_Miss), isolate));
296 }
297 
InitializeInterfaceDescriptor(Isolate * isolate,CodeStubInterfaceDescriptor * descriptor)298 void ToBooleanStub::InitializeInterfaceDescriptor(
299     Isolate* isolate,
300     CodeStubInterfaceDescriptor* descriptor) {
301   static Register registers[] = { eax };
302   descriptor->register_param_count_ = 1;
303   descriptor->register_params_ = registers;
304   descriptor->deoptimization_handler_ =
305       FUNCTION_ADDR(ToBooleanIC_Miss);
306   descriptor->SetMissHandler(
307       ExternalReference(IC_Utility(IC::kToBooleanIC_Miss), isolate));
308 }
309 
310 
InitializeInterfaceDescriptor(Isolate * isolate,CodeStubInterfaceDescriptor * descriptor)311 void StoreGlobalStub::InitializeInterfaceDescriptor(
312     Isolate* isolate,
313     CodeStubInterfaceDescriptor* descriptor) {
314   static Register registers[] = { edx, ecx, eax };
315   descriptor->register_param_count_ = 3;
316   descriptor->register_params_ = registers;
317   descriptor->deoptimization_handler_ =
318       FUNCTION_ADDR(StoreIC_MissFromStubFailure);
319 }
320 
321 
InitializeInterfaceDescriptor(Isolate * isolate,CodeStubInterfaceDescriptor * descriptor)322 void ElementsTransitionAndStoreStub::InitializeInterfaceDescriptor(
323     Isolate* isolate,
324     CodeStubInterfaceDescriptor* descriptor) {
325   static Register registers[] = { eax, ebx, ecx, edx };
326   descriptor->register_param_count_ = 4;
327   descriptor->register_params_ = registers;
328   descriptor->deoptimization_handler_ =
329       FUNCTION_ADDR(ElementsTransitionAndStoreIC_Miss);
330 }
331 
332 
InitializeInterfaceDescriptor(Isolate * isolate,CodeStubInterfaceDescriptor * descriptor)333 void BinaryOpICStub::InitializeInterfaceDescriptor(
334     Isolate* isolate,
335     CodeStubInterfaceDescriptor* descriptor) {
336   static Register registers[] = { edx, eax };
337   descriptor->register_param_count_ = 2;
338   descriptor->register_params_ = registers;
339   descriptor->deoptimization_handler_ = FUNCTION_ADDR(BinaryOpIC_Miss);
340   descriptor->SetMissHandler(
341       ExternalReference(IC_Utility(IC::kBinaryOpIC_Miss), isolate));
342 }
343 
344 
InitializeInterfaceDescriptor(Isolate * isolate,CodeStubInterfaceDescriptor * descriptor)345 void NewStringAddStub::InitializeInterfaceDescriptor(
346     Isolate* isolate,
347     CodeStubInterfaceDescriptor* descriptor) {
348   static Register registers[] = { edx, eax };
349   descriptor->register_param_count_ = 2;
350   descriptor->register_params_ = registers;
351   descriptor->deoptimization_handler_ =
352       Runtime::FunctionForId(Runtime::kStringAdd)->entry;
353 }
354 
355 
356 #define __ ACCESS_MASM(masm)
357 
358 
GenerateLightweightMiss(MacroAssembler * masm)359 void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm) {
360   // Update the static counter each time a new code stub is generated.
361   Isolate* isolate = masm->isolate();
362   isolate->counters()->code_stubs()->Increment();
363 
364   CodeStubInterfaceDescriptor* descriptor = GetInterfaceDescriptor(isolate);
365   int param_count = descriptor->register_param_count_;
366   {
367     // Call the runtime system in a fresh internal frame.
368     FrameScope scope(masm, StackFrame::INTERNAL);
369     ASSERT(descriptor->register_param_count_ == 0 ||
370            eax.is(descriptor->register_params_[param_count - 1]));
371     // Push arguments
372     for (int i = 0; i < param_count; ++i) {
373       __ push(descriptor->register_params_[i]);
374     }
375     ExternalReference miss = descriptor->miss_handler();
376     __ CallExternalReference(miss, descriptor->register_param_count_);
377   }
378 
379   __ ret(0);
380 }
381 
382 
Generate(MacroAssembler * masm)383 void FastNewContextStub::Generate(MacroAssembler* masm) {
384   // Try to allocate the context in new space.
385   Label gc;
386   int length = slots_ + Context::MIN_CONTEXT_SLOTS;
387   __ Allocate((length * kPointerSize) + FixedArray::kHeaderSize,
388               eax, ebx, ecx, &gc, TAG_OBJECT);
389 
390   // Get the function from the stack.
391   __ mov(ecx, Operand(esp, 1 * kPointerSize));
392 
393   // Set up the object header.
394   Factory* factory = masm->isolate()->factory();
395   __ mov(FieldOperand(eax, HeapObject::kMapOffset),
396          factory->function_context_map());
397   __ mov(FieldOperand(eax, Context::kLengthOffset),
398          Immediate(Smi::FromInt(length)));
399 
400   // Set up the fixed slots.
401   __ Set(ebx, Immediate(0));  // Set to NULL.
402   __ mov(Operand(eax, Context::SlotOffset(Context::CLOSURE_INDEX)), ecx);
403   __ mov(Operand(eax, Context::SlotOffset(Context::PREVIOUS_INDEX)), esi);
404   __ mov(Operand(eax, Context::SlotOffset(Context::EXTENSION_INDEX)), ebx);
405 
406   // Copy the global object from the previous context.
407   __ mov(ebx, Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
408   __ mov(Operand(eax, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)), ebx);
409 
410   // Initialize the rest of the slots to undefined.
411   __ mov(ebx, factory->undefined_value());
412   for (int i = Context::MIN_CONTEXT_SLOTS; i < length; i++) {
413     __ mov(Operand(eax, Context::SlotOffset(i)), ebx);
414   }
415 
416   // Return and remove the on-stack parameter.
417   __ mov(esi, eax);
418   __ ret(1 * kPointerSize);
419 
420   // Need to collect. Call into runtime system.
421   __ bind(&gc);
422   __ TailCallRuntime(Runtime::kNewFunctionContext, 1, 1);
423 }
424 
425 
Generate(MacroAssembler * masm)426 void FastNewBlockContextStub::Generate(MacroAssembler* masm) {
427   // Stack layout on entry:
428   //
429   // [esp + (1 * kPointerSize)]: function
430   // [esp + (2 * kPointerSize)]: serialized scope info
431 
432   // Try to allocate the context in new space.
433   Label gc;
434   int length = slots_ + Context::MIN_CONTEXT_SLOTS;
435   __ Allocate(FixedArray::SizeFor(length), eax, ebx, ecx, &gc, TAG_OBJECT);
436 
437   // Get the function or sentinel from the stack.
438   __ mov(ecx, Operand(esp, 1 * kPointerSize));
439 
440   // Get the serialized scope info from the stack.
441   __ mov(ebx, Operand(esp, 2 * kPointerSize));
442 
443   // Set up the object header.
444   Factory* factory = masm->isolate()->factory();
445   __ mov(FieldOperand(eax, HeapObject::kMapOffset),
446          factory->block_context_map());
447   __ mov(FieldOperand(eax, Context::kLengthOffset),
448          Immediate(Smi::FromInt(length)));
449 
450   // If this block context is nested in the native context we get a smi
451   // sentinel instead of a function. The block context should get the
452   // canonical empty function of the native context as its closure which
453   // we still have to look up.
454   Label after_sentinel;
455   __ JumpIfNotSmi(ecx, &after_sentinel, Label::kNear);
456   if (FLAG_debug_code) {
457     __ cmp(ecx, 0);
458     __ Assert(equal, kExpected0AsASmiSentinel);
459   }
460   __ mov(ecx, GlobalObjectOperand());
461   __ mov(ecx, FieldOperand(ecx, GlobalObject::kNativeContextOffset));
462   __ mov(ecx, ContextOperand(ecx, Context::CLOSURE_INDEX));
463   __ bind(&after_sentinel);
464 
465   // Set up the fixed slots.
466   __ mov(ContextOperand(eax, Context::CLOSURE_INDEX), ecx);
467   __ mov(ContextOperand(eax, Context::PREVIOUS_INDEX), esi);
468   __ mov(ContextOperand(eax, Context::EXTENSION_INDEX), ebx);
469 
470   // Copy the global object from the previous context.
471   __ mov(ebx, ContextOperand(esi, Context::GLOBAL_OBJECT_INDEX));
472   __ mov(ContextOperand(eax, Context::GLOBAL_OBJECT_INDEX), ebx);
473 
474   // Initialize the rest of the slots to the hole value.
475   if (slots_ == 1) {
476     __ mov(ContextOperand(eax, Context::MIN_CONTEXT_SLOTS),
477            factory->the_hole_value());
478   } else {
479     __ mov(ebx, factory->the_hole_value());
480     for (int i = 0; i < slots_; i++) {
481       __ mov(ContextOperand(eax, i + Context::MIN_CONTEXT_SLOTS), ebx);
482     }
483   }
484 
485   // Return and remove the on-stack parameters.
486   __ mov(esi, eax);
487   __ ret(2 * kPointerSize);
488 
489   // Need to collect. Call into runtime system.
490   __ bind(&gc);
491   __ TailCallRuntime(Runtime::kPushBlockContext, 2, 1);
492 }
493 
494 
Generate(MacroAssembler * masm)495 void StoreBufferOverflowStub::Generate(MacroAssembler* masm) {
496   // We don't allow a GC during a store buffer overflow so there is no need to
497   // store the registers in any particular way, but we do have to store and
498   // restore them.
499   __ pushad();
500   if (save_doubles_ == kSaveFPRegs) {
501     CpuFeatureScope scope(masm, SSE2);
502     __ sub(esp, Immediate(kDoubleSize * XMMRegister::kNumRegisters));
503     for (int i = 0; i < XMMRegister::kNumRegisters; i++) {
504       XMMRegister reg = XMMRegister::from_code(i);
505       __ movsd(Operand(esp, i * kDoubleSize), reg);
506     }
507   }
508   const int argument_count = 1;
509 
510   AllowExternalCallThatCantCauseGC scope(masm);
511   __ PrepareCallCFunction(argument_count, ecx);
512   __ mov(Operand(esp, 0 * kPointerSize),
513          Immediate(ExternalReference::isolate_address(masm->isolate())));
514   __ CallCFunction(
515       ExternalReference::store_buffer_overflow_function(masm->isolate()),
516       argument_count);
517   if (save_doubles_ == kSaveFPRegs) {
518     CpuFeatureScope scope(masm, SSE2);
519     for (int i = 0; i < XMMRegister::kNumRegisters; i++) {
520       XMMRegister reg = XMMRegister::from_code(i);
521       __ movsd(reg, Operand(esp, i * kDoubleSize));
522     }
523     __ add(esp, Immediate(kDoubleSize * XMMRegister::kNumRegisters));
524   }
525   __ popad();
526   __ ret(0);
527 }
528 
529 
530 class FloatingPointHelper : public AllStatic {
531  public:
532   enum ArgLocation {
533     ARGS_ON_STACK,
534     ARGS_IN_REGISTERS
535   };
536 
537   // Code pattern for loading a floating point value. Input value must
538   // be either a smi or a heap number object (fp value). Requirements:
539   // operand in register number. Returns operand as floating point number
540   // on FPU stack.
541   static void LoadFloatOperand(MacroAssembler* masm, Register number);
542 
543   // Test if operands are smi or number objects (fp). Requirements:
544   // operand_1 in eax, operand_2 in edx; falls through on float
545   // operands, jumps to the non_float label otherwise.
546   static void CheckFloatOperands(MacroAssembler* masm,
547                                  Label* non_float,
548                                  Register scratch);
549 
550   // Test if operands are numbers (smi or HeapNumber objects), and load
551   // them into xmm0 and xmm1 if they are.  Jump to label not_numbers if
552   // either operand is not a number.  Operands are in edx and eax.
553   // Leaves operands unchanged.
554   static void LoadSSE2Operands(MacroAssembler* masm, Label* not_numbers);
555 };
556 
557 
Generate(MacroAssembler * masm)558 void DoubleToIStub::Generate(MacroAssembler* masm) {
559   Register input_reg = this->source();
560   Register final_result_reg = this->destination();
561   ASSERT(is_truncating());
562 
563   Label check_negative, process_64_bits, done, done_no_stash;
564 
565   int double_offset = offset();
566 
567   // Account for return address and saved regs if input is esp.
568   if (input_reg.is(esp)) double_offset += 3 * kPointerSize;
569 
570   MemOperand mantissa_operand(MemOperand(input_reg, double_offset));
571   MemOperand exponent_operand(MemOperand(input_reg,
572                                          double_offset + kDoubleSize / 2));
573 
574   Register scratch1;
575   {
576     Register scratch_candidates[3] = { ebx, edx, edi };
577     for (int i = 0; i < 3; i++) {
578       scratch1 = scratch_candidates[i];
579       if (!final_result_reg.is(scratch1) && !input_reg.is(scratch1)) break;
580     }
581   }
582   // Since we must use ecx for shifts below, use some other register (eax)
583   // to calculate the result if ecx is the requested return register.
584   Register result_reg = final_result_reg.is(ecx) ? eax : final_result_reg;
585   // Save ecx if it isn't the return register and therefore volatile, or if it
586   // is the return register, then save the temp register we use in its stead for
587   // the result.
588   Register save_reg = final_result_reg.is(ecx) ? eax : ecx;
589   __ push(scratch1);
590   __ push(save_reg);
591 
592   bool stash_exponent_copy = !input_reg.is(esp);
593   __ mov(scratch1, mantissa_operand);
594   if (CpuFeatures::IsSupported(SSE3)) {
595     CpuFeatureScope scope(masm, SSE3);
596     // Load x87 register with heap number.
597     __ fld_d(mantissa_operand);
598   }
599   __ mov(ecx, exponent_operand);
600   if (stash_exponent_copy) __ push(ecx);
601 
602   __ and_(ecx, HeapNumber::kExponentMask);
603   __ shr(ecx, HeapNumber::kExponentShift);
604   __ lea(result_reg, MemOperand(ecx, -HeapNumber::kExponentBias));
605   __ cmp(result_reg, Immediate(HeapNumber::kMantissaBits));
606   __ j(below, &process_64_bits);
607 
608   // Result is entirely in lower 32-bits of mantissa
609   int delta = HeapNumber::kExponentBias + Double::kPhysicalSignificandSize;
610   if (CpuFeatures::IsSupported(SSE3)) {
611     __ fstp(0);
612   }
613   __ sub(ecx, Immediate(delta));
614   __ xor_(result_reg, result_reg);
615   __ cmp(ecx, Immediate(31));
616   __ j(above, &done);
617   __ shl_cl(scratch1);
618   __ jmp(&check_negative);
619 
620   __ bind(&process_64_bits);
621   if (CpuFeatures::IsSupported(SSE3)) {
622     CpuFeatureScope scope(masm, SSE3);
623     if (stash_exponent_copy) {
624       // Already a copy of the exponent on the stack, overwrite it.
625       STATIC_ASSERT(kDoubleSize == 2 * kPointerSize);
626       __ sub(esp, Immediate(kDoubleSize / 2));
627     } else {
628       // Reserve space for 64 bit answer.
629       __ sub(esp, Immediate(kDoubleSize));  // Nolint.
630     }
631     // Do conversion, which cannot fail because we checked the exponent.
632     __ fisttp_d(Operand(esp, 0));
633     __ mov(result_reg, Operand(esp, 0));  // Load low word of answer as result
634     __ add(esp, Immediate(kDoubleSize));
635     __ jmp(&done_no_stash);
636   } else {
637     // Result must be extracted from shifted 32-bit mantissa
638     __ sub(ecx, Immediate(delta));
639     __ neg(ecx);
640     if (stash_exponent_copy) {
641       __ mov(result_reg, MemOperand(esp, 0));
642     } else {
643       __ mov(result_reg, exponent_operand);
644     }
645     __ and_(result_reg,
646             Immediate(static_cast<uint32_t>(Double::kSignificandMask >> 32)));
647     __ add(result_reg,
648            Immediate(static_cast<uint32_t>(Double::kHiddenBit >> 32)));
649     __ shrd(result_reg, scratch1);
650     __ shr_cl(result_reg);
651     __ test(ecx, Immediate(32));
652     if (CpuFeatures::IsSupported(CMOV)) {
653       CpuFeatureScope use_cmov(masm, CMOV);
654       __ cmov(not_equal, scratch1, result_reg);
655     } else {
656       Label skip_mov;
657       __ j(equal, &skip_mov, Label::kNear);
658       __ mov(scratch1, result_reg);
659       __ bind(&skip_mov);
660     }
661   }
662 
663   // If the double was negative, negate the integer result.
664   __ bind(&check_negative);
665   __ mov(result_reg, scratch1);
666   __ neg(result_reg);
667   if (stash_exponent_copy) {
668     __ cmp(MemOperand(esp, 0), Immediate(0));
669   } else {
670     __ cmp(exponent_operand, Immediate(0));
671   }
672   if (CpuFeatures::IsSupported(CMOV)) {
673     CpuFeatureScope use_cmov(masm, CMOV);
674     __ cmov(greater, result_reg, scratch1);
675   } else {
676     Label skip_mov;
677     __ j(less_equal, &skip_mov, Label::kNear);
678     __ mov(result_reg, scratch1);
679     __ bind(&skip_mov);
680   }
681 
682   // Restore registers
683   __ bind(&done);
684   if (stash_exponent_copy) {
685     __ add(esp, Immediate(kDoubleSize / 2));
686   }
687   __ bind(&done_no_stash);
688   if (!final_result_reg.is(result_reg)) {
689     ASSERT(final_result_reg.is(ecx));
690     __ mov(final_result_reg, result_reg);
691   }
692   __ pop(save_reg);
693   __ pop(scratch1);
694   __ ret(0);
695 }
696 
697 
Generate(MacroAssembler * masm)698 void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
699   // TAGGED case:
700   //   Input:
701   //     esp[4]: tagged number input argument (should be number).
702   //     esp[0]: return address.
703   //   Output:
704   //     eax: tagged double result.
705   // UNTAGGED case:
706   //   Input::
707   //     esp[0]: return address.
708   //     xmm1: untagged double input argument
709   //   Output:
710   //     xmm1: untagged double result.
711 
712   Label runtime_call;
713   Label runtime_call_clear_stack;
714   Label skip_cache;
715   const bool tagged = (argument_type_ == TAGGED);
716   if (tagged) {
717     // Test that eax is a number.
718     Label input_not_smi;
719     Label loaded;
720     __ mov(eax, Operand(esp, kPointerSize));
721     __ JumpIfNotSmi(eax, &input_not_smi, Label::kNear);
722     // Input is a smi. Untag and load it onto the FPU stack.
723     // Then load the low and high words of the double into ebx, edx.
724     STATIC_ASSERT(kSmiTagSize == 1);
725     __ sar(eax, 1);
726     __ sub(esp, Immediate(2 * kPointerSize));
727     __ mov(Operand(esp, 0), eax);
728     __ fild_s(Operand(esp, 0));
729     __ fst_d(Operand(esp, 0));
730     __ pop(edx);
731     __ pop(ebx);
732     __ jmp(&loaded, Label::kNear);
733     __ bind(&input_not_smi);
734     // Check if input is a HeapNumber.
735     __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
736     Factory* factory = masm->isolate()->factory();
737     __ cmp(ebx, Immediate(factory->heap_number_map()));
738     __ j(not_equal, &runtime_call);
739     // Input is a HeapNumber. Push it on the FPU stack and load its
740     // low and high words into ebx, edx.
741     __ fld_d(FieldOperand(eax, HeapNumber::kValueOffset));
742     __ mov(edx, FieldOperand(eax, HeapNumber::kExponentOffset));
743     __ mov(ebx, FieldOperand(eax, HeapNumber::kMantissaOffset));
744 
745     __ bind(&loaded);
746   } else {  // UNTAGGED.
747     CpuFeatureScope scope(masm, SSE2);
748     if (CpuFeatures::IsSupported(SSE4_1)) {
749       CpuFeatureScope sse4_scope(masm, SSE4_1);
750       __ pextrd(edx, xmm1, 0x1);  // copy xmm1[63..32] to edx.
751     } else {
752       __ pshufd(xmm0, xmm1, 0x1);
753       __ movd(edx, xmm0);
754     }
755     __ movd(ebx, xmm1);
756   }
757 
758   // ST[0] or xmm1  == double value
759   // ebx = low 32 bits of double value
760   // edx = high 32 bits of double value
761   // Compute hash (the shifts are arithmetic):
762   //   h = (low ^ high); h ^= h >> 16; h ^= h >> 8; h = h & (cacheSize - 1);
763   __ mov(ecx, ebx);
764   __ xor_(ecx, edx);
765   __ mov(eax, ecx);
766   __ sar(eax, 16);
767   __ xor_(ecx, eax);
768   __ mov(eax, ecx);
769   __ sar(eax, 8);
770   __ xor_(ecx, eax);
771   ASSERT(IsPowerOf2(TranscendentalCache::SubCache::kCacheSize));
772   __ and_(ecx,
773           Immediate(TranscendentalCache::SubCache::kCacheSize - 1));
774 
775   // ST[0] or xmm1 == double value.
776   // ebx = low 32 bits of double value.
777   // edx = high 32 bits of double value.
778   // ecx = TranscendentalCache::hash(double value).
779   ExternalReference cache_array =
780       ExternalReference::transcendental_cache_array_address(masm->isolate());
781   __ mov(eax, Immediate(cache_array));
782   int cache_array_index =
783       type_ * sizeof(masm->isolate()->transcendental_cache()->caches_[0]);
784   __ mov(eax, Operand(eax, cache_array_index));
785   // Eax points to the cache for the type type_.
786   // If NULL, the cache hasn't been initialized yet, so go through runtime.
787   __ test(eax, eax);
788   __ j(zero, &runtime_call_clear_stack);
789 #ifdef DEBUG
790   // Check that the layout of cache elements match expectations.
791   { TranscendentalCache::SubCache::Element test_elem[2];
792     char* elem_start = reinterpret_cast<char*>(&test_elem[0]);
793     char* elem2_start = reinterpret_cast<char*>(&test_elem[1]);
794     char* elem_in0  = reinterpret_cast<char*>(&(test_elem[0].in[0]));
795     char* elem_in1  = reinterpret_cast<char*>(&(test_elem[0].in[1]));
796     char* elem_out = reinterpret_cast<char*>(&(test_elem[0].output));
797     CHECK_EQ(12, elem2_start - elem_start);  // Two uint_32's and a pointer.
798     CHECK_EQ(0, elem_in0 - elem_start);
799     CHECK_EQ(kIntSize, elem_in1 - elem_start);
800     CHECK_EQ(2 * kIntSize, elem_out - elem_start);
801   }
802 #endif
803   // Find the address of the ecx'th entry in the cache, i.e., &eax[ecx*12].
804   __ lea(ecx, Operand(ecx, ecx, times_2, 0));
805   __ lea(ecx, Operand(eax, ecx, times_4, 0));
806   // Check if cache matches: Double value is stored in uint32_t[2] array.
807   Label cache_miss;
808   __ cmp(ebx, Operand(ecx, 0));
809   __ j(not_equal, &cache_miss, Label::kNear);
810   __ cmp(edx, Operand(ecx, kIntSize));
811   __ j(not_equal, &cache_miss, Label::kNear);
812   // Cache hit!
813   Counters* counters = masm->isolate()->counters();
814   __ IncrementCounter(counters->transcendental_cache_hit(), 1);
815   __ mov(eax, Operand(ecx, 2 * kIntSize));
816   if (tagged) {
817     __ fstp(0);
818     __ ret(kPointerSize);
819   } else {  // UNTAGGED.
820     CpuFeatureScope scope(masm, SSE2);
821     __ movsd(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
822     __ Ret();
823   }
824 
825   __ bind(&cache_miss);
826   __ IncrementCounter(counters->transcendental_cache_miss(), 1);
827   // Update cache with new value.
828   // We are short on registers, so use no_reg as scratch.
829   // This gives slightly larger code.
830   if (tagged) {
831     __ AllocateHeapNumber(eax, edi, no_reg, &runtime_call_clear_stack);
832   } else {  // UNTAGGED.
833     CpuFeatureScope scope(masm, SSE2);
834     __ AllocateHeapNumber(eax, edi, no_reg, &skip_cache);
835     __ sub(esp, Immediate(kDoubleSize));
836     __ movsd(Operand(esp, 0), xmm1);
837     __ fld_d(Operand(esp, 0));
838     __ add(esp, Immediate(kDoubleSize));
839   }
840   GenerateOperation(masm, type_);
841   __ mov(Operand(ecx, 0), ebx);
842   __ mov(Operand(ecx, kIntSize), edx);
843   __ mov(Operand(ecx, 2 * kIntSize), eax);
844   __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
845   if (tagged) {
846     __ ret(kPointerSize);
847   } else {  // UNTAGGED.
848     CpuFeatureScope scope(masm, SSE2);
849     __ movsd(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
850     __ Ret();
851 
852     // Skip cache and return answer directly, only in untagged case.
853     __ bind(&skip_cache);
854     __ sub(esp, Immediate(kDoubleSize));
855     __ movsd(Operand(esp, 0), xmm1);
856     __ fld_d(Operand(esp, 0));
857     GenerateOperation(masm, type_);
858     __ fstp_d(Operand(esp, 0));
859     __ movsd(xmm1, Operand(esp, 0));
860     __ add(esp, Immediate(kDoubleSize));
861     // We return the value in xmm1 without adding it to the cache, but
862     // we cause a scavenging GC so that future allocations will succeed.
863     {
864       FrameScope scope(masm, StackFrame::INTERNAL);
865       // Allocate an unused object bigger than a HeapNumber.
866       __ push(Immediate(Smi::FromInt(2 * kDoubleSize)));
867       __ CallRuntimeSaveDoubles(Runtime::kAllocateInNewSpace);
868     }
869     __ Ret();
870   }
871 
872   // Call runtime, doing whatever allocation and cleanup is necessary.
873   if (tagged) {
874     __ bind(&runtime_call_clear_stack);
875     __ fstp(0);
876     __ bind(&runtime_call);
877     ExternalReference runtime =
878         ExternalReference(RuntimeFunction(), masm->isolate());
879     __ TailCallExternalReference(runtime, 1, 1);
880   } else {  // UNTAGGED.
881     CpuFeatureScope scope(masm, SSE2);
882     __ bind(&runtime_call_clear_stack);
883     __ bind(&runtime_call);
884     __ AllocateHeapNumber(eax, edi, no_reg, &skip_cache);
885     __ movsd(FieldOperand(eax, HeapNumber::kValueOffset), xmm1);
886     {
887       FrameScope scope(masm, StackFrame::INTERNAL);
888       __ push(eax);
889       __ CallRuntime(RuntimeFunction(), 1);
890     }
891     __ movsd(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
892     __ Ret();
893   }
894 }
895 
896 
RuntimeFunction()897 Runtime::FunctionId TranscendentalCacheStub::RuntimeFunction() {
898   switch (type_) {
899     case TranscendentalCache::SIN: return Runtime::kMath_sin;
900     case TranscendentalCache::COS: return Runtime::kMath_cos;
901     case TranscendentalCache::TAN: return Runtime::kMath_tan;
902     case TranscendentalCache::LOG: return Runtime::kMath_log;
903     default:
904       UNIMPLEMENTED();
905       return Runtime::kAbort;
906   }
907 }
908 
909 
GenerateOperation(MacroAssembler * masm,TranscendentalCache::Type type)910 void TranscendentalCacheStub::GenerateOperation(
911     MacroAssembler* masm, TranscendentalCache::Type type) {
912   // Only free register is edi.
913   // Input value is on FP stack, and also in ebx/edx.
914   // Input value is possibly in xmm1.
915   // Address of result (a newly allocated HeapNumber) may be in eax.
916   if (type == TranscendentalCache::SIN ||
917       type == TranscendentalCache::COS ||
918       type == TranscendentalCache::TAN) {
919     // Both fsin and fcos require arguments in the range +/-2^63 and
920     // return NaN for infinities and NaN. They can share all code except
921     // the actual fsin/fcos operation.
922     Label in_range, done;
923     // If argument is outside the range -2^63..2^63, fsin/cos doesn't
924     // work. We must reduce it to the appropriate range.
925     __ mov(edi, edx);
926     __ and_(edi, Immediate(0x7ff00000));  // Exponent only.
927     int supported_exponent_limit =
928         (63 + HeapNumber::kExponentBias) << HeapNumber::kExponentShift;
929     __ cmp(edi, Immediate(supported_exponent_limit));
930     __ j(below, &in_range, Label::kNear);
931     // Check for infinity and NaN. Both return NaN for sin.
932     __ cmp(edi, Immediate(0x7ff00000));
933     Label non_nan_result;
934     __ j(not_equal, &non_nan_result, Label::kNear);
935     // Input is +/-Infinity or NaN. Result is NaN.
936     __ fstp(0);
937     // NaN is represented by 0x7ff8000000000000.
938     __ push(Immediate(0x7ff80000));
939     __ push(Immediate(0));
940     __ fld_d(Operand(esp, 0));
941     __ add(esp, Immediate(2 * kPointerSize));
942     __ jmp(&done, Label::kNear);
943 
944     __ bind(&non_nan_result);
945 
946     // Use fpmod to restrict argument to the range +/-2*PI.
947     __ mov(edi, eax);  // Save eax before using fnstsw_ax.
948     __ fldpi();
949     __ fadd(0);
950     __ fld(1);
951     // FPU Stack: input, 2*pi, input.
952     {
953       Label no_exceptions;
954       __ fwait();
955       __ fnstsw_ax();
956       // Clear if Illegal Operand or Zero Division exceptions are set.
957       __ test(eax, Immediate(5));
958       __ j(zero, &no_exceptions, Label::kNear);
959       __ fnclex();
960       __ bind(&no_exceptions);
961     }
962 
963     // Compute st(0) % st(1)
964     {
965       Label partial_remainder_loop;
966       __ bind(&partial_remainder_loop);
967       __ fprem1();
968       __ fwait();
969       __ fnstsw_ax();
970       __ test(eax, Immediate(0x400 /* C2 */));
971       // If C2 is set, computation only has partial result. Loop to
972       // continue computation.
973       __ j(not_zero, &partial_remainder_loop);
974     }
975     // FPU Stack: input, 2*pi, input % 2*pi
976     __ fstp(2);
977     __ fstp(0);
978     __ mov(eax, edi);  // Restore eax (allocated HeapNumber pointer).
979 
980     // FPU Stack: input % 2*pi
981     __ bind(&in_range);
982     switch (type) {
983       case TranscendentalCache::SIN:
984         __ fsin();
985         break;
986       case TranscendentalCache::COS:
987         __ fcos();
988         break;
989       case TranscendentalCache::TAN:
990         // FPTAN calculates tangent onto st(0) and pushes 1.0 onto the
991         // FP register stack.
992         __ fptan();
993         __ fstp(0);  // Pop FP register stack.
994         break;
995       default:
996         UNREACHABLE();
997     }
998     __ bind(&done);
999   } else {
1000     ASSERT(type == TranscendentalCache::LOG);
1001     __ fldln2();
1002     __ fxch();
1003     __ fyl2x();
1004   }
1005 }
1006 
1007 
LoadFloatOperand(MacroAssembler * masm,Register number)1008 void FloatingPointHelper::LoadFloatOperand(MacroAssembler* masm,
1009                                            Register number) {
1010   Label load_smi, done;
1011 
1012   __ JumpIfSmi(number, &load_smi, Label::kNear);
1013   __ fld_d(FieldOperand(number, HeapNumber::kValueOffset));
1014   __ jmp(&done, Label::kNear);
1015 
1016   __ bind(&load_smi);
1017   __ SmiUntag(number);
1018   __ push(number);
1019   __ fild_s(Operand(esp, 0));
1020   __ pop(number);
1021 
1022   __ bind(&done);
1023 }
1024 
1025 
LoadSSE2Operands(MacroAssembler * masm,Label * not_numbers)1026 void FloatingPointHelper::LoadSSE2Operands(MacroAssembler* masm,
1027                                            Label* not_numbers) {
1028   Label load_smi_edx, load_eax, load_smi_eax, load_float_eax, done;
1029   // Load operand in edx into xmm0, or branch to not_numbers.
1030   __ JumpIfSmi(edx, &load_smi_edx, Label::kNear);
1031   Factory* factory = masm->isolate()->factory();
1032   __ cmp(FieldOperand(edx, HeapObject::kMapOffset), factory->heap_number_map());
1033   __ j(not_equal, not_numbers);  // Argument in edx is not a number.
1034   __ movsd(xmm0, FieldOperand(edx, HeapNumber::kValueOffset));
1035   __ bind(&load_eax);
1036   // Load operand in eax into xmm1, or branch to not_numbers.
1037   __ JumpIfSmi(eax, &load_smi_eax, Label::kNear);
1038   __ cmp(FieldOperand(eax, HeapObject::kMapOffset), factory->heap_number_map());
1039   __ j(equal, &load_float_eax, Label::kNear);
1040   __ jmp(not_numbers);  // Argument in eax is not a number.
1041   __ bind(&load_smi_edx);
1042   __ SmiUntag(edx);  // Untag smi before converting to float.
1043   __ Cvtsi2sd(xmm0, edx);
1044   __ SmiTag(edx);  // Retag smi for heap number overwriting test.
1045   __ jmp(&load_eax);
1046   __ bind(&load_smi_eax);
1047   __ SmiUntag(eax);  // Untag smi before converting to float.
1048   __ Cvtsi2sd(xmm1, eax);
1049   __ SmiTag(eax);  // Retag smi for heap number overwriting test.
1050   __ jmp(&done, Label::kNear);
1051   __ bind(&load_float_eax);
1052   __ movsd(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
1053   __ bind(&done);
1054 }
1055 
1056 
CheckFloatOperands(MacroAssembler * masm,Label * non_float,Register scratch)1057 void FloatingPointHelper::CheckFloatOperands(MacroAssembler* masm,
1058                                              Label* non_float,
1059                                              Register scratch) {
1060   Label test_other, done;
1061   // Test if both operands are floats or smi -> scratch=k_is_float;
1062   // Otherwise scratch = k_not_float.
1063   __ JumpIfSmi(edx, &test_other, Label::kNear);
1064   __ mov(scratch, FieldOperand(edx, HeapObject::kMapOffset));
1065   Factory* factory = masm->isolate()->factory();
1066   __ cmp(scratch, factory->heap_number_map());
1067   __ j(not_equal, non_float);  // argument in edx is not a number -> NaN
1068 
1069   __ bind(&test_other);
1070   __ JumpIfSmi(eax, &done, Label::kNear);
1071   __ mov(scratch, FieldOperand(eax, HeapObject::kMapOffset));
1072   __ cmp(scratch, factory->heap_number_map());
1073   __ j(not_equal, non_float);  // argument in eax is not a number -> NaN
1074 
1075   // Fall-through: Both operands are numbers.
1076   __ bind(&done);
1077 }
1078 
1079 
Generate(MacroAssembler * masm)1080 void MathPowStub::Generate(MacroAssembler* masm) {
1081   CpuFeatureScope use_sse2(masm, SSE2);
1082   Factory* factory = masm->isolate()->factory();
1083   const Register exponent = eax;
1084   const Register base = edx;
1085   const Register scratch = ecx;
1086   const XMMRegister double_result = xmm3;
1087   const XMMRegister double_base = xmm2;
1088   const XMMRegister double_exponent = xmm1;
1089   const XMMRegister double_scratch = xmm4;
1090 
1091   Label call_runtime, done, exponent_not_smi, int_exponent;
1092 
1093   // Save 1 in double_result - we need this several times later on.
1094   __ mov(scratch, Immediate(1));
1095   __ Cvtsi2sd(double_result, scratch);
1096 
1097   if (exponent_type_ == ON_STACK) {
1098     Label base_is_smi, unpack_exponent;
1099     // The exponent and base are supplied as arguments on the stack.
1100     // This can only happen if the stub is called from non-optimized code.
1101     // Load input parameters from stack.
1102     __ mov(base, Operand(esp, 2 * kPointerSize));
1103     __ mov(exponent, Operand(esp, 1 * kPointerSize));
1104 
1105     __ JumpIfSmi(base, &base_is_smi, Label::kNear);
1106     __ cmp(FieldOperand(base, HeapObject::kMapOffset),
1107            factory->heap_number_map());
1108     __ j(not_equal, &call_runtime);
1109 
1110     __ movsd(double_base, FieldOperand(base, HeapNumber::kValueOffset));
1111     __ jmp(&unpack_exponent, Label::kNear);
1112 
1113     __ bind(&base_is_smi);
1114     __ SmiUntag(base);
1115     __ Cvtsi2sd(double_base, base);
1116 
1117     __ bind(&unpack_exponent);
1118     __ JumpIfNotSmi(exponent, &exponent_not_smi, Label::kNear);
1119     __ SmiUntag(exponent);
1120     __ jmp(&int_exponent);
1121 
1122     __ bind(&exponent_not_smi);
1123     __ cmp(FieldOperand(exponent, HeapObject::kMapOffset),
1124            factory->heap_number_map());
1125     __ j(not_equal, &call_runtime);
1126     __ movsd(double_exponent,
1127               FieldOperand(exponent, HeapNumber::kValueOffset));
1128   } else if (exponent_type_ == TAGGED) {
1129     __ JumpIfNotSmi(exponent, &exponent_not_smi, Label::kNear);
1130     __ SmiUntag(exponent);
1131     __ jmp(&int_exponent);
1132 
1133     __ bind(&exponent_not_smi);
1134     __ movsd(double_exponent,
1135               FieldOperand(exponent, HeapNumber::kValueOffset));
1136   }
1137 
1138   if (exponent_type_ != INTEGER) {
1139     Label fast_power, try_arithmetic_simplification;
1140     __ DoubleToI(exponent, double_exponent, double_scratch,
1141                  TREAT_MINUS_ZERO_AS_ZERO, &try_arithmetic_simplification);
1142     __ jmp(&int_exponent);
1143 
1144     __ bind(&try_arithmetic_simplification);
1145     // Skip to runtime if possibly NaN (indicated by the indefinite integer).
1146     __ cvttsd2si(exponent, Operand(double_exponent));
1147     __ cmp(exponent, Immediate(0x80000000u));
1148     __ j(equal, &call_runtime);
1149 
1150     if (exponent_type_ == ON_STACK) {
1151       // Detect square root case.  Crankshaft detects constant +/-0.5 at
1152       // compile time and uses DoMathPowHalf instead.  We then skip this check
1153       // for non-constant cases of +/-0.5 as these hardly occur.
1154       Label continue_sqrt, continue_rsqrt, not_plus_half;
1155       // Test for 0.5.
1156       // Load double_scratch with 0.5.
1157       __ mov(scratch, Immediate(0x3F000000u));
1158       __ movd(double_scratch, scratch);
1159       __ cvtss2sd(double_scratch, double_scratch);
1160       // Already ruled out NaNs for exponent.
1161       __ ucomisd(double_scratch, double_exponent);
1162       __ j(not_equal, &not_plus_half, Label::kNear);
1163 
1164       // Calculates square root of base.  Check for the special case of
1165       // Math.pow(-Infinity, 0.5) == Infinity (ECMA spec, 15.8.2.13).
1166       // According to IEEE-754, single-precision -Infinity has the highest
1167       // 9 bits set and the lowest 23 bits cleared.
1168       __ mov(scratch, 0xFF800000u);
1169       __ movd(double_scratch, scratch);
1170       __ cvtss2sd(double_scratch, double_scratch);
1171       __ ucomisd(double_base, double_scratch);
1172       // Comparing -Infinity with NaN results in "unordered", which sets the
1173       // zero flag as if both were equal.  However, it also sets the carry flag.
1174       __ j(not_equal, &continue_sqrt, Label::kNear);
1175       __ j(carry, &continue_sqrt, Label::kNear);
1176 
1177       // Set result to Infinity in the special case.
1178       __ xorps(double_result, double_result);
1179       __ subsd(double_result, double_scratch);
1180       __ jmp(&done);
1181 
1182       __ bind(&continue_sqrt);
1183       // sqrtsd returns -0 when input is -0.  ECMA spec requires +0.
1184       __ xorps(double_scratch, double_scratch);
1185       __ addsd(double_scratch, double_base);  // Convert -0 to +0.
1186       __ sqrtsd(double_result, double_scratch);
1187       __ jmp(&done);
1188 
1189       // Test for -0.5.
1190       __ bind(&not_plus_half);
1191       // Load double_exponent with -0.5 by substracting 1.
1192       __ subsd(double_scratch, double_result);
1193       // Already ruled out NaNs for exponent.
1194       __ ucomisd(double_scratch, double_exponent);
1195       __ j(not_equal, &fast_power, Label::kNear);
1196 
1197       // Calculates reciprocal of square root of base.  Check for the special
1198       // case of Math.pow(-Infinity, -0.5) == 0 (ECMA spec, 15.8.2.13).
1199       // According to IEEE-754, single-precision -Infinity has the highest
1200       // 9 bits set and the lowest 23 bits cleared.
1201       __ mov(scratch, 0xFF800000u);
1202       __ movd(double_scratch, scratch);
1203       __ cvtss2sd(double_scratch, double_scratch);
1204       __ ucomisd(double_base, double_scratch);
1205       // Comparing -Infinity with NaN results in "unordered", which sets the
1206       // zero flag as if both were equal.  However, it also sets the carry flag.
1207       __ j(not_equal, &continue_rsqrt, Label::kNear);
1208       __ j(carry, &continue_rsqrt, Label::kNear);
1209 
1210       // Set result to 0 in the special case.
1211       __ xorps(double_result, double_result);
1212       __ jmp(&done);
1213 
1214       __ bind(&continue_rsqrt);
1215       // sqrtsd returns -0 when input is -0.  ECMA spec requires +0.
1216       __ xorps(double_exponent, double_exponent);
1217       __ addsd(double_exponent, double_base);  // Convert -0 to +0.
1218       __ sqrtsd(double_exponent, double_exponent);
1219       __ divsd(double_result, double_exponent);
1220       __ jmp(&done);
1221     }
1222 
1223     // Using FPU instructions to calculate power.
1224     Label fast_power_failed;
1225     __ bind(&fast_power);
1226     __ fnclex();  // Clear flags to catch exceptions later.
1227     // Transfer (B)ase and (E)xponent onto the FPU register stack.
1228     __ sub(esp, Immediate(kDoubleSize));
1229     __ movsd(Operand(esp, 0), double_exponent);
1230     __ fld_d(Operand(esp, 0));  // E
1231     __ movsd(Operand(esp, 0), double_base);
1232     __ fld_d(Operand(esp, 0));  // B, E
1233 
1234     // Exponent is in st(1) and base is in st(0)
1235     // B ^ E = (2^(E * log2(B)) - 1) + 1 = (2^X - 1) + 1 for X = E * log2(B)
1236     // FYL2X calculates st(1) * log2(st(0))
1237     __ fyl2x();    // X
1238     __ fld(0);     // X, X
1239     __ frndint();  // rnd(X), X
1240     __ fsub(1);    // rnd(X), X-rnd(X)
1241     __ fxch(1);    // X - rnd(X), rnd(X)
1242     // F2XM1 calculates 2^st(0) - 1 for -1 < st(0) < 1
1243     __ f2xm1();    // 2^(X-rnd(X)) - 1, rnd(X)
1244     __ fld1();     // 1, 2^(X-rnd(X)) - 1, rnd(X)
1245     __ faddp(1);   // 2^(X-rnd(X)), rnd(X)
1246     // FSCALE calculates st(0) * 2^st(1)
1247     __ fscale();   // 2^X, rnd(X)
1248     __ fstp(1);    // 2^X
1249     // Bail out to runtime in case of exceptions in the status word.
1250     __ fnstsw_ax();
1251     __ test_b(eax, 0x5F);  // We check for all but precision exception.
1252     __ j(not_zero, &fast_power_failed, Label::kNear);
1253     __ fstp_d(Operand(esp, 0));
1254     __ movsd(double_result, Operand(esp, 0));
1255     __ add(esp, Immediate(kDoubleSize));
1256     __ jmp(&done);
1257 
1258     __ bind(&fast_power_failed);
1259     __ fninit();
1260     __ add(esp, Immediate(kDoubleSize));
1261     __ jmp(&call_runtime);
1262   }
1263 
1264   // Calculate power with integer exponent.
1265   __ bind(&int_exponent);
1266   const XMMRegister double_scratch2 = double_exponent;
1267   __ mov(scratch, exponent);  // Back up exponent.
1268   __ movsd(double_scratch, double_base);  // Back up base.
1269   __ movsd(double_scratch2, double_result);  // Load double_exponent with 1.
1270 
1271   // Get absolute value of exponent.
1272   Label no_neg, while_true, while_false;
1273   __ test(scratch, scratch);
1274   __ j(positive, &no_neg, Label::kNear);
1275   __ neg(scratch);
1276   __ bind(&no_neg);
1277 
1278   __ j(zero, &while_false, Label::kNear);
1279   __ shr(scratch, 1);
1280   // Above condition means CF==0 && ZF==0.  This means that the
1281   // bit that has been shifted out is 0 and the result is not 0.
1282   __ j(above, &while_true, Label::kNear);
1283   __ movsd(double_result, double_scratch);
1284   __ j(zero, &while_false, Label::kNear);
1285 
1286   __ bind(&while_true);
1287   __ shr(scratch, 1);
1288   __ mulsd(double_scratch, double_scratch);
1289   __ j(above, &while_true, Label::kNear);
1290   __ mulsd(double_result, double_scratch);
1291   __ j(not_zero, &while_true);
1292 
1293   __ bind(&while_false);
1294   // scratch has the original value of the exponent - if the exponent is
1295   // negative, return 1/result.
1296   __ test(exponent, exponent);
1297   __ j(positive, &done);
1298   __ divsd(double_scratch2, double_result);
1299   __ movsd(double_result, double_scratch2);
1300   // Test whether result is zero.  Bail out to check for subnormal result.
1301   // Due to subnormals, x^-y == (1/x)^y does not hold in all cases.
1302   __ xorps(double_scratch2, double_scratch2);
1303   __ ucomisd(double_scratch2, double_result);  // Result cannot be NaN.
1304   // double_exponent aliased as double_scratch2 has already been overwritten
1305   // and may not have contained the exponent value in the first place when the
1306   // exponent is a smi.  We reset it with exponent value before bailing out.
1307   __ j(not_equal, &done);
1308   __ Cvtsi2sd(double_exponent, exponent);
1309 
1310   // Returning or bailing out.
1311   Counters* counters = masm->isolate()->counters();
1312   if (exponent_type_ == ON_STACK) {
1313     // The arguments are still on the stack.
1314     __ bind(&call_runtime);
1315     __ TailCallRuntime(Runtime::kMath_pow_cfunction, 2, 1);
1316 
1317     // The stub is called from non-optimized code, which expects the result
1318     // as heap number in exponent.
1319     __ bind(&done);
1320     __ AllocateHeapNumber(eax, scratch, base, &call_runtime);
1321     __ movsd(FieldOperand(eax, HeapNumber::kValueOffset), double_result);
1322     __ IncrementCounter(counters->math_pow(), 1);
1323     __ ret(2 * kPointerSize);
1324   } else {
1325     __ bind(&call_runtime);
1326     {
1327       AllowExternalCallThatCantCauseGC scope(masm);
1328       __ PrepareCallCFunction(4, scratch);
1329       __ movsd(Operand(esp, 0 * kDoubleSize), double_base);
1330       __ movsd(Operand(esp, 1 * kDoubleSize), double_exponent);
1331       __ CallCFunction(
1332           ExternalReference::power_double_double_function(masm->isolate()), 4);
1333     }
1334     // Return value is in st(0) on ia32.
1335     // Store it into the (fixed) result register.
1336     __ sub(esp, Immediate(kDoubleSize));
1337     __ fstp_d(Operand(esp, 0));
1338     __ movsd(double_result, Operand(esp, 0));
1339     __ add(esp, Immediate(kDoubleSize));
1340 
1341     __ bind(&done);
1342     __ IncrementCounter(counters->math_pow(), 1);
1343     __ ret(0);
1344   }
1345 }
1346 
1347 
Generate(MacroAssembler * masm)1348 void FunctionPrototypeStub::Generate(MacroAssembler* masm) {
1349   // ----------- S t a t e -------------
1350   //  -- ecx    : name
1351   //  -- edx    : receiver
1352   //  -- esp[0] : return address
1353   // -----------------------------------
1354   Label miss;
1355 
1356   if (kind() == Code::KEYED_LOAD_IC) {
1357     __ cmp(ecx, Immediate(masm->isolate()->factory()->prototype_string()));
1358     __ j(not_equal, &miss);
1359   }
1360 
1361   StubCompiler::GenerateLoadFunctionPrototype(masm, edx, eax, ebx, &miss);
1362   __ bind(&miss);
1363   StubCompiler::TailCallBuiltin(
1364       masm, BaseLoadStoreStubCompiler::MissBuiltin(kind()));
1365 }
1366 
1367 
Generate(MacroAssembler * masm)1368 void StringLengthStub::Generate(MacroAssembler* masm) {
1369   // ----------- S t a t e -------------
1370   //  -- ecx    : name
1371   //  -- edx    : receiver
1372   //  -- esp[0] : return address
1373   // -----------------------------------
1374   Label miss;
1375 
1376   if (kind() == Code::KEYED_LOAD_IC) {
1377     __ cmp(ecx, Immediate(masm->isolate()->factory()->length_string()));
1378     __ j(not_equal, &miss);
1379   }
1380 
1381   StubCompiler::GenerateLoadStringLength(masm, edx, eax, ebx, &miss);
1382   __ bind(&miss);
1383   StubCompiler::TailCallBuiltin(
1384       masm, BaseLoadStoreStubCompiler::MissBuiltin(kind()));
1385 }
1386 
1387 
Generate(MacroAssembler * masm)1388 void StoreArrayLengthStub::Generate(MacroAssembler* masm) {
1389   // ----------- S t a t e -------------
1390   //  -- eax    : value
1391   //  -- ecx    : name
1392   //  -- edx    : receiver
1393   //  -- esp[0] : return address
1394   // -----------------------------------
1395   //
1396   // This accepts as a receiver anything JSArray::SetElementsLength accepts
1397   // (currently anything except for external arrays which means anything with
1398   // elements of FixedArray type).  Value must be a number, but only smis are
1399   // accepted as the most common case.
1400 
1401   Label miss;
1402 
1403   Register receiver = edx;
1404   Register value = eax;
1405   Register scratch = ebx;
1406 
1407   if (kind() == Code::KEYED_STORE_IC) {
1408     __ cmp(ecx, Immediate(masm->isolate()->factory()->length_string()));
1409     __ j(not_equal, &miss);
1410   }
1411 
1412   // Check that the receiver isn't a smi.
1413   __ JumpIfSmi(receiver, &miss);
1414 
1415   // Check that the object is a JS array.
1416   __ CmpObjectType(receiver, JS_ARRAY_TYPE, scratch);
1417   __ j(not_equal, &miss);
1418 
1419   // Check that elements are FixedArray.
1420   // We rely on StoreIC_ArrayLength below to deal with all types of
1421   // fast elements (including COW).
1422   __ mov(scratch, FieldOperand(receiver, JSArray::kElementsOffset));
1423   __ CmpObjectType(scratch, FIXED_ARRAY_TYPE, scratch);
1424   __ j(not_equal, &miss);
1425 
1426   // Check that the array has fast properties, otherwise the length
1427   // property might have been redefined.
1428   __ mov(scratch, FieldOperand(receiver, JSArray::kPropertiesOffset));
1429   __ CompareRoot(FieldOperand(scratch, FixedArray::kMapOffset),
1430                  Heap::kHashTableMapRootIndex);
1431   __ j(equal, &miss);
1432 
1433   // Check that value is a smi.
1434   __ JumpIfNotSmi(value, &miss);
1435 
1436   // Prepare tail call to StoreIC_ArrayLength.
1437   __ pop(scratch);
1438   __ push(receiver);
1439   __ push(value);
1440   __ push(scratch);  // return address
1441 
1442   ExternalReference ref =
1443       ExternalReference(IC_Utility(IC::kStoreIC_ArrayLength), masm->isolate());
1444   __ TailCallExternalReference(ref, 2, 1);
1445 
1446   __ bind(&miss);
1447 
1448   StubCompiler::TailCallBuiltin(
1449       masm, BaseLoadStoreStubCompiler::MissBuiltin(kind()));
1450 }
1451 
1452 
GenerateReadElement(MacroAssembler * masm)1453 void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
1454   // The key is in edx and the parameter count is in eax.
1455 
1456   // The displacement is used for skipping the frame pointer on the
1457   // stack. It is the offset of the last parameter (if any) relative
1458   // to the frame pointer.
1459   static const int kDisplacement = 1 * kPointerSize;
1460 
1461   // Check that the key is a smi.
1462   Label slow;
1463   __ JumpIfNotSmi(edx, &slow, Label::kNear);
1464 
1465   // Check if the calling frame is an arguments adaptor frame.
1466   Label adaptor;
1467   __ mov(ebx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
1468   __ mov(ecx, Operand(ebx, StandardFrameConstants::kContextOffset));
1469   __ cmp(ecx, Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1470   __ j(equal, &adaptor, Label::kNear);
1471 
1472   // Check index against formal parameters count limit passed in
1473   // through register eax. Use unsigned comparison to get negative
1474   // check for free.
1475   __ cmp(edx, eax);
1476   __ j(above_equal, &slow, Label::kNear);
1477 
1478   // Read the argument from the stack and return it.
1479   STATIC_ASSERT(kSmiTagSize == 1);
1480   STATIC_ASSERT(kSmiTag == 0);  // Shifting code depends on these.
1481   __ lea(ebx, Operand(ebp, eax, times_2, 0));
1482   __ neg(edx);
1483   __ mov(eax, Operand(ebx, edx, times_2, kDisplacement));
1484   __ ret(0);
1485 
1486   // Arguments adaptor case: Check index against actual arguments
1487   // limit found in the arguments adaptor frame. Use unsigned
1488   // comparison to get negative check for free.
1489   __ bind(&adaptor);
1490   __ mov(ecx, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset));
1491   __ cmp(edx, ecx);
1492   __ j(above_equal, &slow, Label::kNear);
1493 
1494   // Read the argument from the stack and return it.
1495   STATIC_ASSERT(kSmiTagSize == 1);
1496   STATIC_ASSERT(kSmiTag == 0);  // Shifting code depends on these.
1497   __ lea(ebx, Operand(ebx, ecx, times_2, 0));
1498   __ neg(edx);
1499   __ mov(eax, Operand(ebx, edx, times_2, kDisplacement));
1500   __ ret(0);
1501 
1502   // Slow-case: Handle non-smi or out-of-bounds access to arguments
1503   // by calling the runtime system.
1504   __ bind(&slow);
1505   __ pop(ebx);  // Return address.
1506   __ push(edx);
1507   __ push(ebx);
1508   __ TailCallRuntime(Runtime::kGetArgumentsProperty, 1, 1);
1509 }
1510 
1511 
GenerateNewNonStrictSlow(MacroAssembler * masm)1512 void ArgumentsAccessStub::GenerateNewNonStrictSlow(MacroAssembler* masm) {
1513   // esp[0] : return address
1514   // esp[4] : number of parameters
1515   // esp[8] : receiver displacement
1516   // esp[12] : function
1517 
1518   // Check if the calling frame is an arguments adaptor frame.
1519   Label runtime;
1520   __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
1521   __ mov(ecx, Operand(edx, StandardFrameConstants::kContextOffset));
1522   __ cmp(ecx, Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1523   __ j(not_equal, &runtime, Label::kNear);
1524 
1525   // Patch the arguments.length and the parameters pointer.
1526   __ mov(ecx, Operand(edx, ArgumentsAdaptorFrameConstants::kLengthOffset));
1527   __ mov(Operand(esp, 1 * kPointerSize), ecx);
1528   __ lea(edx, Operand(edx, ecx, times_2,
1529               StandardFrameConstants::kCallerSPOffset));
1530   __ mov(Operand(esp, 2 * kPointerSize), edx);
1531 
1532   __ bind(&runtime);
1533   __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1);
1534 }
1535 
1536 
GenerateNewNonStrictFast(MacroAssembler * masm)1537 void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) {
1538   Isolate* isolate = masm->isolate();
1539 
1540   // esp[0] : return address
1541   // esp[4] : number of parameters (tagged)
1542   // esp[8] : receiver displacement
1543   // esp[12] : function
1544 
1545   // ebx = parameter count (tagged)
1546   __ mov(ebx, Operand(esp, 1 * kPointerSize));
1547 
1548   // Check if the calling frame is an arguments adaptor frame.
1549   // TODO(rossberg): Factor out some of the bits that are shared with the other
1550   // Generate* functions.
1551   Label runtime;
1552   Label adaptor_frame, try_allocate;
1553   __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
1554   __ mov(ecx, Operand(edx, StandardFrameConstants::kContextOffset));
1555   __ cmp(ecx, Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1556   __ j(equal, &adaptor_frame, Label::kNear);
1557 
1558   // No adaptor, parameter count = argument count.
1559   __ mov(ecx, ebx);
1560   __ jmp(&try_allocate, Label::kNear);
1561 
1562   // We have an adaptor frame. Patch the parameters pointer.
1563   __ bind(&adaptor_frame);
1564   __ mov(ecx, Operand(edx, ArgumentsAdaptorFrameConstants::kLengthOffset));
1565   __ lea(edx, Operand(edx, ecx, times_2,
1566                       StandardFrameConstants::kCallerSPOffset));
1567   __ mov(Operand(esp, 2 * kPointerSize), edx);
1568 
1569   // ebx = parameter count (tagged)
1570   // ecx = argument count (tagged)
1571   // esp[4] = parameter count (tagged)
1572   // esp[8] = address of receiver argument
1573   // Compute the mapped parameter count = min(ebx, ecx) in ebx.
1574   __ cmp(ebx, ecx);
1575   __ j(less_equal, &try_allocate, Label::kNear);
1576   __ mov(ebx, ecx);
1577 
1578   __ bind(&try_allocate);
1579 
1580   // Save mapped parameter count.
1581   __ push(ebx);
1582 
1583   // Compute the sizes of backing store, parameter map, and arguments object.
1584   // 1. Parameter map, has 2 extra words containing context and backing store.
1585   const int kParameterMapHeaderSize =
1586       FixedArray::kHeaderSize + 2 * kPointerSize;
1587   Label no_parameter_map;
1588   __ test(ebx, ebx);
1589   __ j(zero, &no_parameter_map, Label::kNear);
1590   __ lea(ebx, Operand(ebx, times_2, kParameterMapHeaderSize));
1591   __ bind(&no_parameter_map);
1592 
1593   // 2. Backing store.
1594   __ lea(ebx, Operand(ebx, ecx, times_2, FixedArray::kHeaderSize));
1595 
1596   // 3. Arguments object.
1597   __ add(ebx, Immediate(Heap::kArgumentsObjectSize));
1598 
1599   // Do the allocation of all three objects in one go.
1600   __ Allocate(ebx, eax, edx, edi, &runtime, TAG_OBJECT);
1601 
1602   // eax = address of new object(s) (tagged)
1603   // ecx = argument count (tagged)
1604   // esp[0] = mapped parameter count (tagged)
1605   // esp[8] = parameter count (tagged)
1606   // esp[12] = address of receiver argument
1607   // Get the arguments boilerplate from the current native context into edi.
1608   Label has_mapped_parameters, copy;
1609   __ mov(edi, Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
1610   __ mov(edi, FieldOperand(edi, GlobalObject::kNativeContextOffset));
1611   __ mov(ebx, Operand(esp, 0 * kPointerSize));
1612   __ test(ebx, ebx);
1613   __ j(not_zero, &has_mapped_parameters, Label::kNear);
1614   __ mov(edi, Operand(edi,
1615          Context::SlotOffset(Context::ARGUMENTS_BOILERPLATE_INDEX)));
1616   __ jmp(&copy, Label::kNear);
1617 
1618   __ bind(&has_mapped_parameters);
1619   __ mov(edi, Operand(edi,
1620             Context::SlotOffset(Context::ALIASED_ARGUMENTS_BOILERPLATE_INDEX)));
1621   __ bind(&copy);
1622 
1623   // eax = address of new object (tagged)
1624   // ebx = mapped parameter count (tagged)
1625   // ecx = argument count (tagged)
1626   // edi = address of boilerplate object (tagged)
1627   // esp[0] = mapped parameter count (tagged)
1628   // esp[8] = parameter count (tagged)
1629   // esp[12] = address of receiver argument
1630   // Copy the JS object part.
1631   for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) {
1632     __ mov(edx, FieldOperand(edi, i));
1633     __ mov(FieldOperand(eax, i), edx);
1634   }
1635 
1636   // Set up the callee in-object property.
1637   STATIC_ASSERT(Heap::kArgumentsCalleeIndex == 1);
1638   __ mov(edx, Operand(esp, 4 * kPointerSize));
1639   __ mov(FieldOperand(eax, JSObject::kHeaderSize +
1640                       Heap::kArgumentsCalleeIndex * kPointerSize),
1641          edx);
1642 
1643   // Use the length (smi tagged) and set that as an in-object property too.
1644   STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0);
1645   __ mov(FieldOperand(eax, JSObject::kHeaderSize +
1646                       Heap::kArgumentsLengthIndex * kPointerSize),
1647          ecx);
1648 
1649   // Set up the elements pointer in the allocated arguments object.
1650   // If we allocated a parameter map, edi will point there, otherwise to the
1651   // backing store.
1652   __ lea(edi, Operand(eax, Heap::kArgumentsObjectSize));
1653   __ mov(FieldOperand(eax, JSObject::kElementsOffset), edi);
1654 
1655   // eax = address of new object (tagged)
1656   // ebx = mapped parameter count (tagged)
1657   // ecx = argument count (tagged)
1658   // edi = address of parameter map or backing store (tagged)
1659   // esp[0] = mapped parameter count (tagged)
1660   // esp[8] = parameter count (tagged)
1661   // esp[12] = address of receiver argument
1662   // Free a register.
1663   __ push(eax);
1664 
1665   // Initialize parameter map. If there are no mapped arguments, we're done.
1666   Label skip_parameter_map;
1667   __ test(ebx, ebx);
1668   __ j(zero, &skip_parameter_map);
1669 
1670   __ mov(FieldOperand(edi, FixedArray::kMapOffset),
1671          Immediate(isolate->factory()->non_strict_arguments_elements_map()));
1672   __ lea(eax, Operand(ebx, reinterpret_cast<intptr_t>(Smi::FromInt(2))));
1673   __ mov(FieldOperand(edi, FixedArray::kLengthOffset), eax);
1674   __ mov(FieldOperand(edi, FixedArray::kHeaderSize + 0 * kPointerSize), esi);
1675   __ lea(eax, Operand(edi, ebx, times_2, kParameterMapHeaderSize));
1676   __ mov(FieldOperand(edi, FixedArray::kHeaderSize + 1 * kPointerSize), eax);
1677 
1678   // Copy the parameter slots and the holes in the arguments.
1679   // We need to fill in mapped_parameter_count slots. They index the context,
1680   // where parameters are stored in reverse order, at
1681   //   MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1
1682   // The mapped parameter thus need to get indices
1683   //   MIN_CONTEXT_SLOTS+parameter_count-1 ..
1684   //       MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count
1685   // We loop from right to left.
1686   Label parameters_loop, parameters_test;
1687   __ push(ecx);
1688   __ mov(eax, Operand(esp, 2 * kPointerSize));
1689   __ mov(ebx, Immediate(Smi::FromInt(Context::MIN_CONTEXT_SLOTS)));
1690   __ add(ebx, Operand(esp, 4 * kPointerSize));
1691   __ sub(ebx, eax);
1692   __ mov(ecx, isolate->factory()->the_hole_value());
1693   __ mov(edx, edi);
1694   __ lea(edi, Operand(edi, eax, times_2, kParameterMapHeaderSize));
1695   // eax = loop variable (tagged)
1696   // ebx = mapping index (tagged)
1697   // ecx = the hole value
1698   // edx = address of parameter map (tagged)
1699   // edi = address of backing store (tagged)
1700   // esp[0] = argument count (tagged)
1701   // esp[4] = address of new object (tagged)
1702   // esp[8] = mapped parameter count (tagged)
1703   // esp[16] = parameter count (tagged)
1704   // esp[20] = address of receiver argument
1705   __ jmp(&parameters_test, Label::kNear);
1706 
1707   __ bind(&parameters_loop);
1708   __ sub(eax, Immediate(Smi::FromInt(1)));
1709   __ mov(FieldOperand(edx, eax, times_2, kParameterMapHeaderSize), ebx);
1710   __ mov(FieldOperand(edi, eax, times_2, FixedArray::kHeaderSize), ecx);
1711   __ add(ebx, Immediate(Smi::FromInt(1)));
1712   __ bind(&parameters_test);
1713   __ test(eax, eax);
1714   __ j(not_zero, &parameters_loop, Label::kNear);
1715   __ pop(ecx);
1716 
1717   __ bind(&skip_parameter_map);
1718 
1719   // ecx = argument count (tagged)
1720   // edi = address of backing store (tagged)
1721   // esp[0] = address of new object (tagged)
1722   // esp[4] = mapped parameter count (tagged)
1723   // esp[12] = parameter count (tagged)
1724   // esp[16] = address of receiver argument
1725   // Copy arguments header and remaining slots (if there are any).
1726   __ mov(FieldOperand(edi, FixedArray::kMapOffset),
1727          Immediate(isolate->factory()->fixed_array_map()));
1728   __ mov(FieldOperand(edi, FixedArray::kLengthOffset), ecx);
1729 
1730   Label arguments_loop, arguments_test;
1731   __ mov(ebx, Operand(esp, 1 * kPointerSize));
1732   __ mov(edx, Operand(esp, 4 * kPointerSize));
1733   __ sub(edx, ebx);  // Is there a smarter way to do negative scaling?
1734   __ sub(edx, ebx);
1735   __ jmp(&arguments_test, Label::kNear);
1736 
1737   __ bind(&arguments_loop);
1738   __ sub(edx, Immediate(kPointerSize));
1739   __ mov(eax, Operand(edx, 0));
1740   __ mov(FieldOperand(edi, ebx, times_2, FixedArray::kHeaderSize), eax);
1741   __ add(ebx, Immediate(Smi::FromInt(1)));
1742 
1743   __ bind(&arguments_test);
1744   __ cmp(ebx, ecx);
1745   __ j(less, &arguments_loop, Label::kNear);
1746 
1747   // Restore.
1748   __ pop(eax);  // Address of arguments object.
1749   __ pop(ebx);  // Parameter count.
1750 
1751   // Return and remove the on-stack parameters.
1752   __ ret(3 * kPointerSize);
1753 
1754   // Do the runtime call to allocate the arguments object.
1755   __ bind(&runtime);
1756   __ pop(eax);  // Remove saved parameter count.
1757   __ mov(Operand(esp, 1 * kPointerSize), ecx);  // Patch argument count.
1758   __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1);
1759 }
1760 
1761 
GenerateNewStrict(MacroAssembler * masm)1762 void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
1763   Isolate* isolate = masm->isolate();
1764 
1765   // esp[0] : return address
1766   // esp[4] : number of parameters
1767   // esp[8] : receiver displacement
1768   // esp[12] : function
1769 
1770   // Check if the calling frame is an arguments adaptor frame.
1771   Label adaptor_frame, try_allocate, runtime;
1772   __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
1773   __ mov(ecx, Operand(edx, StandardFrameConstants::kContextOffset));
1774   __ cmp(ecx, Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1775   __ j(equal, &adaptor_frame, Label::kNear);
1776 
1777   // Get the length from the frame.
1778   __ mov(ecx, Operand(esp, 1 * kPointerSize));
1779   __ jmp(&try_allocate, Label::kNear);
1780 
1781   // Patch the arguments.length and the parameters pointer.
1782   __ bind(&adaptor_frame);
1783   __ mov(ecx, Operand(edx, ArgumentsAdaptorFrameConstants::kLengthOffset));
1784   __ mov(Operand(esp, 1 * kPointerSize), ecx);
1785   __ lea(edx, Operand(edx, ecx, times_2,
1786                       StandardFrameConstants::kCallerSPOffset));
1787   __ mov(Operand(esp, 2 * kPointerSize), edx);
1788 
1789   // Try the new space allocation. Start out with computing the size of
1790   // the arguments object and the elements array.
1791   Label add_arguments_object;
1792   __ bind(&try_allocate);
1793   __ test(ecx, ecx);
1794   __ j(zero, &add_arguments_object, Label::kNear);
1795   __ lea(ecx, Operand(ecx, times_2, FixedArray::kHeaderSize));
1796   __ bind(&add_arguments_object);
1797   __ add(ecx, Immediate(Heap::kArgumentsObjectSizeStrict));
1798 
1799   // Do the allocation of both objects in one go.
1800   __ Allocate(ecx, eax, edx, ebx, &runtime, TAG_OBJECT);
1801 
1802   // Get the arguments boilerplate from the current native context.
1803   __ mov(edi, Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
1804   __ mov(edi, FieldOperand(edi, GlobalObject::kNativeContextOffset));
1805   const int offset =
1806       Context::SlotOffset(Context::STRICT_MODE_ARGUMENTS_BOILERPLATE_INDEX);
1807   __ mov(edi, Operand(edi, offset));
1808 
1809   // Copy the JS object part.
1810   for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) {
1811     __ mov(ebx, FieldOperand(edi, i));
1812     __ mov(FieldOperand(eax, i), ebx);
1813   }
1814 
1815   // Get the length (smi tagged) and set that as an in-object property too.
1816   STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0);
1817   __ mov(ecx, Operand(esp, 1 * kPointerSize));
1818   __ mov(FieldOperand(eax, JSObject::kHeaderSize +
1819                       Heap::kArgumentsLengthIndex * kPointerSize),
1820          ecx);
1821 
1822   // If there are no actual arguments, we're done.
1823   Label done;
1824   __ test(ecx, ecx);
1825   __ j(zero, &done, Label::kNear);
1826 
1827   // Get the parameters pointer from the stack.
1828   __ mov(edx, Operand(esp, 2 * kPointerSize));
1829 
1830   // Set up the elements pointer in the allocated arguments object and
1831   // initialize the header in the elements fixed array.
1832   __ lea(edi, Operand(eax, Heap::kArgumentsObjectSizeStrict));
1833   __ mov(FieldOperand(eax, JSObject::kElementsOffset), edi);
1834   __ mov(FieldOperand(edi, FixedArray::kMapOffset),
1835          Immediate(isolate->factory()->fixed_array_map()));
1836 
1837   __ mov(FieldOperand(edi, FixedArray::kLengthOffset), ecx);
1838   // Untag the length for the loop below.
1839   __ SmiUntag(ecx);
1840 
1841   // Copy the fixed array slots.
1842   Label loop;
1843   __ bind(&loop);
1844   __ mov(ebx, Operand(edx, -1 * kPointerSize));  // Skip receiver.
1845   __ mov(FieldOperand(edi, FixedArray::kHeaderSize), ebx);
1846   __ add(edi, Immediate(kPointerSize));
1847   __ sub(edx, Immediate(kPointerSize));
1848   __ dec(ecx);
1849   __ j(not_zero, &loop);
1850 
1851   // Return and remove the on-stack parameters.
1852   __ bind(&done);
1853   __ ret(3 * kPointerSize);
1854 
1855   // Do the runtime call to allocate the arguments object.
1856   __ bind(&runtime);
1857   __ TailCallRuntime(Runtime::kNewStrictArgumentsFast, 3, 1);
1858 }
1859 
1860 
Generate(MacroAssembler * masm)1861 void RegExpExecStub::Generate(MacroAssembler* masm) {
1862   // Just jump directly to runtime if native RegExp is not selected at compile
1863   // time or if regexp entry in generated code is turned off runtime switch or
1864   // at compilation.
1865 #ifdef V8_INTERPRETED_REGEXP
1866   __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
1867 #else  // V8_INTERPRETED_REGEXP
1868 
1869   // Stack frame on entry.
1870   //  esp[0]: return address
1871   //  esp[4]: last_match_info (expected JSArray)
1872   //  esp[8]: previous index
1873   //  esp[12]: subject string
1874   //  esp[16]: JSRegExp object
1875 
1876   static const int kLastMatchInfoOffset = 1 * kPointerSize;
1877   static const int kPreviousIndexOffset = 2 * kPointerSize;
1878   static const int kSubjectOffset = 3 * kPointerSize;
1879   static const int kJSRegExpOffset = 4 * kPointerSize;
1880 
1881   Label runtime;
1882   Factory* factory = masm->isolate()->factory();
1883 
1884   // Ensure that a RegExp stack is allocated.
1885   ExternalReference address_of_regexp_stack_memory_address =
1886       ExternalReference::address_of_regexp_stack_memory_address(
1887           masm->isolate());
1888   ExternalReference address_of_regexp_stack_memory_size =
1889       ExternalReference::address_of_regexp_stack_memory_size(masm->isolate());
1890   __ mov(ebx, Operand::StaticVariable(address_of_regexp_stack_memory_size));
1891   __ test(ebx, ebx);
1892   __ j(zero, &runtime);
1893 
1894   // Check that the first argument is a JSRegExp object.
1895   __ mov(eax, Operand(esp, kJSRegExpOffset));
1896   STATIC_ASSERT(kSmiTag == 0);
1897   __ JumpIfSmi(eax, &runtime);
1898   __ CmpObjectType(eax, JS_REGEXP_TYPE, ecx);
1899   __ j(not_equal, &runtime);
1900 
1901   // Check that the RegExp has been compiled (data contains a fixed array).
1902   __ mov(ecx, FieldOperand(eax, JSRegExp::kDataOffset));
1903   if (FLAG_debug_code) {
1904     __ test(ecx, Immediate(kSmiTagMask));
1905     __ Check(not_zero, kUnexpectedTypeForRegExpDataFixedArrayExpected);
1906     __ CmpObjectType(ecx, FIXED_ARRAY_TYPE, ebx);
1907     __ Check(equal, kUnexpectedTypeForRegExpDataFixedArrayExpected);
1908   }
1909 
1910   // ecx: RegExp data (FixedArray)
1911   // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP.
1912   __ mov(ebx, FieldOperand(ecx, JSRegExp::kDataTagOffset));
1913   __ cmp(ebx, Immediate(Smi::FromInt(JSRegExp::IRREGEXP)));
1914   __ j(not_equal, &runtime);
1915 
1916   // ecx: RegExp data (FixedArray)
1917   // Check that the number of captures fit in the static offsets vector buffer.
1918   __ mov(edx, FieldOperand(ecx, JSRegExp::kIrregexpCaptureCountOffset));
1919   // Check (number_of_captures + 1) * 2 <= offsets vector size
1920   // Or          number_of_captures * 2 <= offsets vector size - 2
1921   // Multiplying by 2 comes for free since edx is smi-tagged.
1922   STATIC_ASSERT(kSmiTag == 0);
1923   STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
1924   STATIC_ASSERT(Isolate::kJSRegexpStaticOffsetsVectorSize >= 2);
1925   __ cmp(edx, Isolate::kJSRegexpStaticOffsetsVectorSize - 2);
1926   __ j(above, &runtime);
1927 
1928   // Reset offset for possibly sliced string.
1929   __ Set(edi, Immediate(0));
1930   __ mov(eax, Operand(esp, kSubjectOffset));
1931   __ JumpIfSmi(eax, &runtime);
1932   __ mov(edx, eax);  // Make a copy of the original subject string.
1933   __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
1934   __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
1935 
1936   // eax: subject string
1937   // edx: subject string
1938   // ebx: subject string instance type
1939   // ecx: RegExp data (FixedArray)
1940   // Handle subject string according to its encoding and representation:
1941   // (1) Sequential two byte?  If yes, go to (9).
1942   // (2) Sequential one byte?  If yes, go to (6).
1943   // (3) Anything but sequential or cons?  If yes, go to (7).
1944   // (4) Cons string.  If the string is flat, replace subject with first string.
1945   //     Otherwise bailout.
1946   // (5a) Is subject sequential two byte?  If yes, go to (9).
1947   // (5b) Is subject external?  If yes, go to (8).
1948   // (6) One byte sequential.  Load regexp code for one byte.
1949   // (E) Carry on.
1950   /// [...]
1951 
1952   // Deferred code at the end of the stub:
1953   // (7) Not a long external string?  If yes, go to (10).
1954   // (8) External string.  Make it, offset-wise, look like a sequential string.
1955   // (8a) Is the external string one byte?  If yes, go to (6).
1956   // (9) Two byte sequential.  Load regexp code for one byte. Go to (E).
1957   // (10) Short external string or not a string?  If yes, bail out to runtime.
1958   // (11) Sliced string.  Replace subject with parent. Go to (5a).
1959 
1960   Label seq_one_byte_string /* 6 */, seq_two_byte_string /* 9 */,
1961         external_string /* 8 */, check_underlying /* 5a */,
1962         not_seq_nor_cons /* 7 */, check_code /* E */,
1963         not_long_external /* 10 */;
1964 
1965   // (1) Sequential two byte?  If yes, go to (9).
1966   __ and_(ebx, kIsNotStringMask |
1967                kStringRepresentationMask |
1968                kStringEncodingMask |
1969                kShortExternalStringMask);
1970   STATIC_ASSERT((kStringTag | kSeqStringTag | kTwoByteStringTag) == 0);
1971   __ j(zero, &seq_two_byte_string);  // Go to (9).
1972 
1973   // (2) Sequential one byte?  If yes, go to (6).
1974   // Any other sequential string must be one byte.
1975   __ and_(ebx, Immediate(kIsNotStringMask |
1976                          kStringRepresentationMask |
1977                          kShortExternalStringMask));
1978   __ j(zero, &seq_one_byte_string, Label::kNear);  // Go to (6).
1979 
1980   // (3) Anything but sequential or cons?  If yes, go to (7).
1981   // We check whether the subject string is a cons, since sequential strings
1982   // have already been covered.
1983   STATIC_ASSERT(kConsStringTag < kExternalStringTag);
1984   STATIC_ASSERT(kSlicedStringTag > kExternalStringTag);
1985   STATIC_ASSERT(kIsNotStringMask > kExternalStringTag);
1986   STATIC_ASSERT(kShortExternalStringTag > kExternalStringTag);
1987   __ cmp(ebx, Immediate(kExternalStringTag));
1988   __ j(greater_equal, &not_seq_nor_cons);  // Go to (7).
1989 
1990   // (4) Cons string.  Check that it's flat.
1991   // Replace subject with first string and reload instance type.
1992   __ cmp(FieldOperand(eax, ConsString::kSecondOffset), factory->empty_string());
1993   __ j(not_equal, &runtime);
1994   __ mov(eax, FieldOperand(eax, ConsString::kFirstOffset));
1995   __ bind(&check_underlying);
1996   __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
1997   __ mov(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
1998 
1999   // (5a) Is subject sequential two byte?  If yes, go to (9).
2000   __ test_b(ebx, kStringRepresentationMask | kStringEncodingMask);
2001   STATIC_ASSERT((kSeqStringTag | kTwoByteStringTag) == 0);
2002   __ j(zero, &seq_two_byte_string);  // Go to (9).
2003   // (5b) Is subject external?  If yes, go to (8).
2004   __ test_b(ebx, kStringRepresentationMask);
2005   // The underlying external string is never a short external string.
2006   STATIC_CHECK(ExternalString::kMaxShortLength < ConsString::kMinLength);
2007   STATIC_CHECK(ExternalString::kMaxShortLength < SlicedString::kMinLength);
2008   __ j(not_zero, &external_string);  // Go to (8).
2009 
2010   // eax: sequential subject string (or look-alike, external string)
2011   // edx: original subject string
2012   // ecx: RegExp data (FixedArray)
2013   // (6) One byte sequential.  Load regexp code for one byte.
2014   __ bind(&seq_one_byte_string);
2015   // Load previous index and check range before edx is overwritten.  We have
2016   // to use edx instead of eax here because it might have been only made to
2017   // look like a sequential string when it actually is an external string.
2018   __ mov(ebx, Operand(esp, kPreviousIndexOffset));
2019   __ JumpIfNotSmi(ebx, &runtime);
2020   __ cmp(ebx, FieldOperand(edx, String::kLengthOffset));
2021   __ j(above_equal, &runtime);
2022   __ mov(edx, FieldOperand(ecx, JSRegExp::kDataAsciiCodeOffset));
2023   __ Set(ecx, Immediate(1));  // Type is one byte.
2024 
2025   // (E) Carry on.  String handling is done.
2026   __ bind(&check_code);
2027   // edx: irregexp code
2028   // Check that the irregexp code has been generated for the actual string
2029   // encoding. If it has, the field contains a code object otherwise it contains
2030   // a smi (code flushing support).
2031   __ JumpIfSmi(edx, &runtime);
2032 
2033   // eax: subject string
2034   // ebx: previous index (smi)
2035   // edx: code
2036   // ecx: encoding of subject string (1 if ASCII, 0 if two_byte);
2037   // All checks done. Now push arguments for native regexp code.
2038   Counters* counters = masm->isolate()->counters();
2039   __ IncrementCounter(counters->regexp_entry_native(), 1);
2040 
2041   // Isolates: note we add an additional parameter here (isolate pointer).
2042   static const int kRegExpExecuteArguments = 9;
2043   __ EnterApiExitFrame(kRegExpExecuteArguments);
2044 
2045   // Argument 9: Pass current isolate address.
2046   __ mov(Operand(esp, 8 * kPointerSize),
2047       Immediate(ExternalReference::isolate_address(masm->isolate())));
2048 
2049   // Argument 8: Indicate that this is a direct call from JavaScript.
2050   __ mov(Operand(esp, 7 * kPointerSize), Immediate(1));
2051 
2052   // Argument 7: Start (high end) of backtracking stack memory area.
2053   __ mov(esi, Operand::StaticVariable(address_of_regexp_stack_memory_address));
2054   __ add(esi, Operand::StaticVariable(address_of_regexp_stack_memory_size));
2055   __ mov(Operand(esp, 6 * kPointerSize), esi);
2056 
2057   // Argument 6: Set the number of capture registers to zero to force global
2058   // regexps to behave as non-global.  This does not affect non-global regexps.
2059   __ mov(Operand(esp, 5 * kPointerSize), Immediate(0));
2060 
2061   // Argument 5: static offsets vector buffer.
2062   __ mov(Operand(esp, 4 * kPointerSize),
2063          Immediate(ExternalReference::address_of_static_offsets_vector(
2064              masm->isolate())));
2065 
2066   // Argument 2: Previous index.
2067   __ SmiUntag(ebx);
2068   __ mov(Operand(esp, 1 * kPointerSize), ebx);
2069 
2070   // Argument 1: Original subject string.
2071   // The original subject is in the previous stack frame. Therefore we have to
2072   // use ebp, which points exactly to one pointer size below the previous esp.
2073   // (Because creating a new stack frame pushes the previous ebp onto the stack
2074   // and thereby moves up esp by one kPointerSize.)
2075   __ mov(esi, Operand(ebp, kSubjectOffset + kPointerSize));
2076   __ mov(Operand(esp, 0 * kPointerSize), esi);
2077 
2078   // esi: original subject string
2079   // eax: underlying subject string
2080   // ebx: previous index
2081   // ecx: encoding of subject string (1 if ASCII 0 if two_byte);
2082   // edx: code
2083   // Argument 4: End of string data
2084   // Argument 3: Start of string data
2085   // Prepare start and end index of the input.
2086   // Load the length from the original sliced string if that is the case.
2087   __ mov(esi, FieldOperand(esi, String::kLengthOffset));
2088   __ add(esi, edi);  // Calculate input end wrt offset.
2089   __ SmiUntag(edi);
2090   __ add(ebx, edi);  // Calculate input start wrt offset.
2091 
2092   // ebx: start index of the input string
2093   // esi: end index of the input string
2094   Label setup_two_byte, setup_rest;
2095   __ test(ecx, ecx);
2096   __ j(zero, &setup_two_byte, Label::kNear);
2097   __ SmiUntag(esi);
2098   __ lea(ecx, FieldOperand(eax, esi, times_1, SeqOneByteString::kHeaderSize));
2099   __ mov(Operand(esp, 3 * kPointerSize), ecx);  // Argument 4.
2100   __ lea(ecx, FieldOperand(eax, ebx, times_1, SeqOneByteString::kHeaderSize));
2101   __ mov(Operand(esp, 2 * kPointerSize), ecx);  // Argument 3.
2102   __ jmp(&setup_rest, Label::kNear);
2103 
2104   __ bind(&setup_two_byte);
2105   STATIC_ASSERT(kSmiTag == 0);
2106   STATIC_ASSERT(kSmiTagSize == 1);  // esi is smi (powered by 2).
2107   __ lea(ecx, FieldOperand(eax, esi, times_1, SeqTwoByteString::kHeaderSize));
2108   __ mov(Operand(esp, 3 * kPointerSize), ecx);  // Argument 4.
2109   __ lea(ecx, FieldOperand(eax, ebx, times_2, SeqTwoByteString::kHeaderSize));
2110   __ mov(Operand(esp, 2 * kPointerSize), ecx);  // Argument 3.
2111 
2112   __ bind(&setup_rest);
2113 
2114   // Locate the code entry and call it.
2115   __ add(edx, Immediate(Code::kHeaderSize - kHeapObjectTag));
2116   __ call(edx);
2117 
2118   // Drop arguments and come back to JS mode.
2119   __ LeaveApiExitFrame(true);
2120 
2121   // Check the result.
2122   Label success;
2123   __ cmp(eax, 1);
2124   // We expect exactly one result since we force the called regexp to behave
2125   // as non-global.
2126   __ j(equal, &success);
2127   Label failure;
2128   __ cmp(eax, NativeRegExpMacroAssembler::FAILURE);
2129   __ j(equal, &failure);
2130   __ cmp(eax, NativeRegExpMacroAssembler::EXCEPTION);
2131   // If not exception it can only be retry. Handle that in the runtime system.
2132   __ j(not_equal, &runtime);
2133   // Result must now be exception. If there is no pending exception already a
2134   // stack overflow (on the backtrack stack) was detected in RegExp code but
2135   // haven't created the exception yet. Handle that in the runtime system.
2136   // TODO(592): Rerunning the RegExp to get the stack overflow exception.
2137   ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
2138                                       masm->isolate());
2139   __ mov(edx, Immediate(masm->isolate()->factory()->the_hole_value()));
2140   __ mov(eax, Operand::StaticVariable(pending_exception));
2141   __ cmp(edx, eax);
2142   __ j(equal, &runtime);
2143   // For exception, throw the exception again.
2144 
2145   // Clear the pending exception variable.
2146   __ mov(Operand::StaticVariable(pending_exception), edx);
2147 
2148   // Special handling of termination exceptions which are uncatchable
2149   // by javascript code.
2150   __ cmp(eax, factory->termination_exception());
2151   Label throw_termination_exception;
2152   __ j(equal, &throw_termination_exception, Label::kNear);
2153 
2154   // Handle normal exception by following handler chain.
2155   __ Throw(eax);
2156 
2157   __ bind(&throw_termination_exception);
2158   __ ThrowUncatchable(eax);
2159 
2160   __ bind(&failure);
2161   // For failure to match, return null.
2162   __ mov(eax, factory->null_value());
2163   __ ret(4 * kPointerSize);
2164 
2165   // Load RegExp data.
2166   __ bind(&success);
2167   __ mov(eax, Operand(esp, kJSRegExpOffset));
2168   __ mov(ecx, FieldOperand(eax, JSRegExp::kDataOffset));
2169   __ mov(edx, FieldOperand(ecx, JSRegExp::kIrregexpCaptureCountOffset));
2170   // Calculate number of capture registers (number_of_captures + 1) * 2.
2171   STATIC_ASSERT(kSmiTag == 0);
2172   STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
2173   __ add(edx, Immediate(2));  // edx was a smi.
2174 
2175   // edx: Number of capture registers
2176   // Load last_match_info which is still known to be a fast case JSArray.
2177   // Check that the fourth object is a JSArray object.
2178   __ mov(eax, Operand(esp, kLastMatchInfoOffset));
2179   __ JumpIfSmi(eax, &runtime);
2180   __ CmpObjectType(eax, JS_ARRAY_TYPE, ebx);
2181   __ j(not_equal, &runtime);
2182   // Check that the JSArray is in fast case.
2183   __ mov(ebx, FieldOperand(eax, JSArray::kElementsOffset));
2184   __ mov(eax, FieldOperand(ebx, HeapObject::kMapOffset));
2185   __ cmp(eax, factory->fixed_array_map());
2186   __ j(not_equal, &runtime);
2187   // Check that the last match info has space for the capture registers and the
2188   // additional information.
2189   __ mov(eax, FieldOperand(ebx, FixedArray::kLengthOffset));
2190   __ SmiUntag(eax);
2191   __ sub(eax, Immediate(RegExpImpl::kLastMatchOverhead));
2192   __ cmp(edx, eax);
2193   __ j(greater, &runtime);
2194 
2195   // ebx: last_match_info backing store (FixedArray)
2196   // edx: number of capture registers
2197   // Store the capture count.
2198   __ SmiTag(edx);  // Number of capture registers to smi.
2199   __ mov(FieldOperand(ebx, RegExpImpl::kLastCaptureCountOffset), edx);
2200   __ SmiUntag(edx);  // Number of capture registers back from smi.
2201   // Store last subject and last input.
2202   __ mov(eax, Operand(esp, kSubjectOffset));
2203   __ mov(ecx, eax);
2204   __ mov(FieldOperand(ebx, RegExpImpl::kLastSubjectOffset), eax);
2205   __ RecordWriteField(ebx,
2206                       RegExpImpl::kLastSubjectOffset,
2207                       eax,
2208                       edi,
2209                       kDontSaveFPRegs);
2210   __ mov(eax, ecx);
2211   __ mov(FieldOperand(ebx, RegExpImpl::kLastInputOffset), eax);
2212   __ RecordWriteField(ebx,
2213                       RegExpImpl::kLastInputOffset,
2214                       eax,
2215                       edi,
2216                       kDontSaveFPRegs);
2217 
2218   // Get the static offsets vector filled by the native regexp code.
2219   ExternalReference address_of_static_offsets_vector =
2220       ExternalReference::address_of_static_offsets_vector(masm->isolate());
2221   __ mov(ecx, Immediate(address_of_static_offsets_vector));
2222 
2223   // ebx: last_match_info backing store (FixedArray)
2224   // ecx: offsets vector
2225   // edx: number of capture registers
2226   Label next_capture, done;
2227   // Capture register counter starts from number of capture registers and
2228   // counts down until wraping after zero.
2229   __ bind(&next_capture);
2230   __ sub(edx, Immediate(1));
2231   __ j(negative, &done, Label::kNear);
2232   // Read the value from the static offsets vector buffer.
2233   __ mov(edi, Operand(ecx, edx, times_int_size, 0));
2234   __ SmiTag(edi);
2235   // Store the smi value in the last match info.
2236   __ mov(FieldOperand(ebx,
2237                       edx,
2238                       times_pointer_size,
2239                       RegExpImpl::kFirstCaptureOffset),
2240                       edi);
2241   __ jmp(&next_capture);
2242   __ bind(&done);
2243 
2244   // Return last match info.
2245   __ mov(eax, Operand(esp, kLastMatchInfoOffset));
2246   __ ret(4 * kPointerSize);
2247 
2248   // Do the runtime call to execute the regexp.
2249   __ bind(&runtime);
2250   __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
2251 
2252   // Deferred code for string handling.
2253   // (7) Not a long external string?  If yes, go to (10).
2254   __ bind(&not_seq_nor_cons);
2255   // Compare flags are still set from (3).
2256   __ j(greater, &not_long_external, Label::kNear);  // Go to (10).
2257 
2258   // (8) External string.  Short external strings have been ruled out.
2259   __ bind(&external_string);
2260   // Reload instance type.
2261   __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
2262   __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
2263   if (FLAG_debug_code) {
2264     // Assert that we do not have a cons or slice (indirect strings) here.
2265     // Sequential strings have already been ruled out.
2266     __ test_b(ebx, kIsIndirectStringMask);
2267     __ Assert(zero, kExternalStringExpectedButNotFound);
2268   }
2269   __ mov(eax, FieldOperand(eax, ExternalString::kResourceDataOffset));
2270   // Move the pointer so that offset-wise, it looks like a sequential string.
2271   STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
2272   __ sub(eax, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
2273   STATIC_ASSERT(kTwoByteStringTag == 0);
2274   // (8a) Is the external string one byte?  If yes, go to (6).
2275   __ test_b(ebx, kStringEncodingMask);
2276   __ j(not_zero, &seq_one_byte_string);  // Goto (6).
2277 
2278   // eax: sequential subject string (or look-alike, external string)
2279   // edx: original subject string
2280   // ecx: RegExp data (FixedArray)
2281   // (9) Two byte sequential.  Load regexp code for one byte. Go to (E).
2282   __ bind(&seq_two_byte_string);
2283   // Load previous index and check range before edx is overwritten.  We have
2284   // to use edx instead of eax here because it might have been only made to
2285   // look like a sequential string when it actually is an external string.
2286   __ mov(ebx, Operand(esp, kPreviousIndexOffset));
2287   __ JumpIfNotSmi(ebx, &runtime);
2288   __ cmp(ebx, FieldOperand(edx, String::kLengthOffset));
2289   __ j(above_equal, &runtime);
2290   __ mov(edx, FieldOperand(ecx, JSRegExp::kDataUC16CodeOffset));
2291   __ Set(ecx, Immediate(0));  // Type is two byte.
2292   __ jmp(&check_code);  // Go to (E).
2293 
2294   // (10) Not a string or a short external string?  If yes, bail out to runtime.
2295   __ bind(&not_long_external);
2296   // Catch non-string subject or short external string.
2297   STATIC_ASSERT(kNotStringTag != 0 && kShortExternalStringTag !=0);
2298   __ test(ebx, Immediate(kIsNotStringMask | kShortExternalStringTag));
2299   __ j(not_zero, &runtime);
2300 
2301   // (11) Sliced string.  Replace subject with parent.  Go to (5a).
2302   // Load offset into edi and replace subject string with parent.
2303   __ mov(edi, FieldOperand(eax, SlicedString::kOffsetOffset));
2304   __ mov(eax, FieldOperand(eax, SlicedString::kParentOffset));
2305   __ jmp(&check_underlying);  // Go to (5a).
2306 #endif  // V8_INTERPRETED_REGEXP
2307 }
2308 
2309 
Generate(MacroAssembler * masm)2310 void RegExpConstructResultStub::Generate(MacroAssembler* masm) {
2311   const int kMaxInlineLength = 100;
2312   Label slowcase;
2313   Label done;
2314   __ mov(ebx, Operand(esp, kPointerSize * 3));
2315   __ JumpIfNotSmi(ebx, &slowcase);
2316   __ cmp(ebx, Immediate(Smi::FromInt(kMaxInlineLength)));
2317   __ j(above, &slowcase);
2318   // Smi-tagging is equivalent to multiplying by 2.
2319   STATIC_ASSERT(kSmiTag == 0);
2320   STATIC_ASSERT(kSmiTagSize == 1);
2321   // Allocate RegExpResult followed by FixedArray with size in ebx.
2322   // JSArray:   [Map][empty properties][Elements][Length-smi][index][input]
2323   // Elements:  [Map][Length][..elements..]
2324   __ Allocate(JSRegExpResult::kSize + FixedArray::kHeaderSize,
2325               times_pointer_size,
2326               ebx,  // In: Number of elements as a smi
2327               REGISTER_VALUE_IS_SMI,
2328               eax,  // Out: Start of allocation (tagged).
2329               ecx,  // Out: End of allocation.
2330               edx,  // Scratch register
2331               &slowcase,
2332               TAG_OBJECT);
2333   // eax: Start of allocated area, object-tagged.
2334 
2335   // Set JSArray map to global.regexp_result_map().
2336   // Set empty properties FixedArray.
2337   // Set elements to point to FixedArray allocated right after the JSArray.
2338   // Interleave operations for better latency.
2339   __ mov(edx, ContextOperand(esi, Context::GLOBAL_OBJECT_INDEX));
2340   Factory* factory = masm->isolate()->factory();
2341   __ mov(ecx, Immediate(factory->empty_fixed_array()));
2342   __ lea(ebx, Operand(eax, JSRegExpResult::kSize));
2343   __ mov(edx, FieldOperand(edx, GlobalObject::kNativeContextOffset));
2344   __ mov(FieldOperand(eax, JSObject::kElementsOffset), ebx);
2345   __ mov(FieldOperand(eax, JSObject::kPropertiesOffset), ecx);
2346   __ mov(edx, ContextOperand(edx, Context::REGEXP_RESULT_MAP_INDEX));
2347   __ mov(FieldOperand(eax, HeapObject::kMapOffset), edx);
2348 
2349   // Set input, index and length fields from arguments.
2350   __ mov(ecx, Operand(esp, kPointerSize * 1));
2351   __ mov(FieldOperand(eax, JSRegExpResult::kInputOffset), ecx);
2352   __ mov(ecx, Operand(esp, kPointerSize * 2));
2353   __ mov(FieldOperand(eax, JSRegExpResult::kIndexOffset), ecx);
2354   __ mov(ecx, Operand(esp, kPointerSize * 3));
2355   __ mov(FieldOperand(eax, JSArray::kLengthOffset), ecx);
2356 
2357   // Fill out the elements FixedArray.
2358   // eax: JSArray.
2359   // ebx: FixedArray.
2360   // ecx: Number of elements in array, as smi.
2361 
2362   // Set map.
2363   __ mov(FieldOperand(ebx, HeapObject::kMapOffset),
2364          Immediate(factory->fixed_array_map()));
2365   // Set length.
2366   __ mov(FieldOperand(ebx, FixedArray::kLengthOffset), ecx);
2367   // Fill contents of fixed-array with undefined.
2368   __ SmiUntag(ecx);
2369   __ mov(edx, Immediate(factory->undefined_value()));
2370   __ lea(ebx, FieldOperand(ebx, FixedArray::kHeaderSize));
2371   // Fill fixed array elements with undefined.
2372   // eax: JSArray.
2373   // ecx: Number of elements to fill.
2374   // ebx: Start of elements in FixedArray.
2375   // edx: undefined.
2376   Label loop;
2377   __ test(ecx, ecx);
2378   __ bind(&loop);
2379   __ j(less_equal, &done, Label::kNear);  // Jump if ecx is negative or zero.
2380   __ sub(ecx, Immediate(1));
2381   __ mov(Operand(ebx, ecx, times_pointer_size, 0), edx);
2382   __ jmp(&loop);
2383 
2384   __ bind(&done);
2385   __ ret(3 * kPointerSize);
2386 
2387   __ bind(&slowcase);
2388   __ TailCallRuntime(Runtime::kRegExpConstructResult, 3, 1);
2389 }
2390 
2391 
NegativeComparisonResult(Condition cc)2392 static int NegativeComparisonResult(Condition cc) {
2393   ASSERT(cc != equal);
2394   ASSERT((cc == less) || (cc == less_equal)
2395       || (cc == greater) || (cc == greater_equal));
2396   return (cc == greater || cc == greater_equal) ? LESS : GREATER;
2397 }
2398 
2399 
CheckInputType(MacroAssembler * masm,Register input,CompareIC::State expected,Label * fail)2400 static void CheckInputType(MacroAssembler* masm,
2401                            Register input,
2402                            CompareIC::State expected,
2403                            Label* fail) {
2404   Label ok;
2405   if (expected == CompareIC::SMI) {
2406     __ JumpIfNotSmi(input, fail);
2407   } else if (expected == CompareIC::NUMBER) {
2408     __ JumpIfSmi(input, &ok);
2409     __ cmp(FieldOperand(input, HeapObject::kMapOffset),
2410            Immediate(masm->isolate()->factory()->heap_number_map()));
2411     __ j(not_equal, fail);
2412   }
2413   // We could be strict about internalized/non-internalized here, but as long as
2414   // hydrogen doesn't care, the stub doesn't have to care either.
2415   __ bind(&ok);
2416 }
2417 
2418 
BranchIfNotInternalizedString(MacroAssembler * masm,Label * label,Register object,Register scratch)2419 static void BranchIfNotInternalizedString(MacroAssembler* masm,
2420                                           Label* label,
2421                                           Register object,
2422                                           Register scratch) {
2423   __ JumpIfSmi(object, label);
2424   __ mov(scratch, FieldOperand(object, HeapObject::kMapOffset));
2425   __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
2426   STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
2427   __ test(scratch, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
2428   __ j(not_zero, label);
2429 }
2430 
2431 
GenerateGeneric(MacroAssembler * masm)2432 void ICCompareStub::GenerateGeneric(MacroAssembler* masm) {
2433   Label check_unequal_objects;
2434   Condition cc = GetCondition();
2435 
2436   Label miss;
2437   CheckInputType(masm, edx, left_, &miss);
2438   CheckInputType(masm, eax, right_, &miss);
2439 
2440   // Compare two smis.
2441   Label non_smi, smi_done;
2442   __ mov(ecx, edx);
2443   __ or_(ecx, eax);
2444   __ JumpIfNotSmi(ecx, &non_smi, Label::kNear);
2445   __ sub(edx, eax);  // Return on the result of the subtraction.
2446   __ j(no_overflow, &smi_done, Label::kNear);
2447   __ not_(edx);  // Correct sign in case of overflow. edx is never 0 here.
2448   __ bind(&smi_done);
2449   __ mov(eax, edx);
2450   __ ret(0);
2451   __ bind(&non_smi);
2452 
2453   // NOTICE! This code is only reached after a smi-fast-case check, so
2454   // it is certain that at least one operand isn't a smi.
2455 
2456   // Identical objects can be compared fast, but there are some tricky cases
2457   // for NaN and undefined.
2458   Label generic_heap_number_comparison;
2459   {
2460     Label not_identical;
2461     __ cmp(eax, edx);
2462     __ j(not_equal, &not_identical);
2463 
2464     if (cc != equal) {
2465       // Check for undefined.  undefined OP undefined is false even though
2466       // undefined == undefined.
2467       Label check_for_nan;
2468       __ cmp(edx, masm->isolate()->factory()->undefined_value());
2469       __ j(not_equal, &check_for_nan, Label::kNear);
2470       __ Set(eax, Immediate(Smi::FromInt(NegativeComparisonResult(cc))));
2471       __ ret(0);
2472       __ bind(&check_for_nan);
2473     }
2474 
2475     // Test for NaN. Compare heap numbers in a general way,
2476     // to hanlde NaNs correctly.
2477     __ cmp(FieldOperand(edx, HeapObject::kMapOffset),
2478            Immediate(masm->isolate()->factory()->heap_number_map()));
2479     __ j(equal, &generic_heap_number_comparison, Label::kNear);
2480     if (cc != equal) {
2481       // Call runtime on identical JSObjects.  Otherwise return equal.
2482       __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx);
2483       __ j(above_equal, &not_identical);
2484     }
2485     __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
2486     __ ret(0);
2487 
2488 
2489     __ bind(&not_identical);
2490   }
2491 
2492   // Strict equality can quickly decide whether objects are equal.
2493   // Non-strict object equality is slower, so it is handled later in the stub.
2494   if (cc == equal && strict()) {
2495     Label slow;  // Fallthrough label.
2496     Label not_smis;
2497     // If we're doing a strict equality comparison, we don't have to do
2498     // type conversion, so we generate code to do fast comparison for objects
2499     // and oddballs. Non-smi numbers and strings still go through the usual
2500     // slow-case code.
2501     // If either is a Smi (we know that not both are), then they can only
2502     // be equal if the other is a HeapNumber. If so, use the slow case.
2503     STATIC_ASSERT(kSmiTag == 0);
2504     ASSERT_EQ(0, Smi::FromInt(0));
2505     __ mov(ecx, Immediate(kSmiTagMask));
2506     __ and_(ecx, eax);
2507     __ test(ecx, edx);
2508     __ j(not_zero, &not_smis, Label::kNear);
2509     // One operand is a smi.
2510 
2511     // Check whether the non-smi is a heap number.
2512     STATIC_ASSERT(kSmiTagMask == 1);
2513     // ecx still holds eax & kSmiTag, which is either zero or one.
2514     __ sub(ecx, Immediate(0x01));
2515     __ mov(ebx, edx);
2516     __ xor_(ebx, eax);
2517     __ and_(ebx, ecx);  // ebx holds either 0 or eax ^ edx.
2518     __ xor_(ebx, eax);
2519     // if eax was smi, ebx is now edx, else eax.
2520 
2521     // Check if the non-smi operand is a heap number.
2522     __ cmp(FieldOperand(ebx, HeapObject::kMapOffset),
2523            Immediate(masm->isolate()->factory()->heap_number_map()));
2524     // If heap number, handle it in the slow case.
2525     __ j(equal, &slow, Label::kNear);
2526     // Return non-equal (ebx is not zero)
2527     __ mov(eax, ebx);
2528     __ ret(0);
2529 
2530     __ bind(&not_smis);
2531     // If either operand is a JSObject or an oddball value, then they are not
2532     // equal since their pointers are different
2533     // There is no test for undetectability in strict equality.
2534 
2535     // Get the type of the first operand.
2536     // If the first object is a JS object, we have done pointer comparison.
2537     Label first_non_object;
2538     STATIC_ASSERT(LAST_TYPE == LAST_SPEC_OBJECT_TYPE);
2539     __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx);
2540     __ j(below, &first_non_object, Label::kNear);
2541 
2542     // Return non-zero (eax is not zero)
2543     Label return_not_equal;
2544     STATIC_ASSERT(kHeapObjectTag != 0);
2545     __ bind(&return_not_equal);
2546     __ ret(0);
2547 
2548     __ bind(&first_non_object);
2549     // Check for oddballs: true, false, null, undefined.
2550     __ CmpInstanceType(ecx, ODDBALL_TYPE);
2551     __ j(equal, &return_not_equal);
2552 
2553     __ CmpObjectType(edx, FIRST_SPEC_OBJECT_TYPE, ecx);
2554     __ j(above_equal, &return_not_equal);
2555 
2556     // Check for oddballs: true, false, null, undefined.
2557     __ CmpInstanceType(ecx, ODDBALL_TYPE);
2558     __ j(equal, &return_not_equal);
2559 
2560     // Fall through to the general case.
2561     __ bind(&slow);
2562   }
2563 
2564   // Generate the number comparison code.
2565   Label non_number_comparison;
2566   Label unordered;
2567   __ bind(&generic_heap_number_comparison);
2568   if (CpuFeatures::IsSupported(SSE2)) {
2569     CpuFeatureScope use_sse2(masm, SSE2);
2570     CpuFeatureScope use_cmov(masm, CMOV);
2571 
2572     FloatingPointHelper::LoadSSE2Operands(masm, &non_number_comparison);
2573     __ ucomisd(xmm0, xmm1);
2574 
2575     // Don't base result on EFLAGS when a NaN is involved.
2576     __ j(parity_even, &unordered, Label::kNear);
2577     // Return a result of -1, 0, or 1, based on EFLAGS.
2578     __ mov(eax, 0);  // equal
2579     __ mov(ecx, Immediate(Smi::FromInt(1)));
2580     __ cmov(above, eax, ecx);
2581     __ mov(ecx, Immediate(Smi::FromInt(-1)));
2582     __ cmov(below, eax, ecx);
2583     __ ret(0);
2584   } else {
2585     FloatingPointHelper::CheckFloatOperands(
2586         masm, &non_number_comparison, ebx);
2587     FloatingPointHelper::LoadFloatOperand(masm, eax);
2588     FloatingPointHelper::LoadFloatOperand(masm, edx);
2589     __ FCmp();
2590 
2591     // Don't base result on EFLAGS when a NaN is involved.
2592     __ j(parity_even, &unordered, Label::kNear);
2593 
2594     Label below_label, above_label;
2595     // Return a result of -1, 0, or 1, based on EFLAGS.
2596     __ j(below, &below_label, Label::kNear);
2597     __ j(above, &above_label, Label::kNear);
2598 
2599     __ Set(eax, Immediate(0));
2600     __ ret(0);
2601 
2602     __ bind(&below_label);
2603     __ mov(eax, Immediate(Smi::FromInt(-1)));
2604     __ ret(0);
2605 
2606     __ bind(&above_label);
2607     __ mov(eax, Immediate(Smi::FromInt(1)));
2608     __ ret(0);
2609   }
2610 
2611   // If one of the numbers was NaN, then the result is always false.
2612   // The cc is never not-equal.
2613   __ bind(&unordered);
2614   ASSERT(cc != not_equal);
2615   if (cc == less || cc == less_equal) {
2616     __ mov(eax, Immediate(Smi::FromInt(1)));
2617   } else {
2618     __ mov(eax, Immediate(Smi::FromInt(-1)));
2619   }
2620   __ ret(0);
2621 
2622   // The number comparison code did not provide a valid result.
2623   __ bind(&non_number_comparison);
2624 
2625   // Fast negative check for internalized-to-internalized equality.
2626   Label check_for_strings;
2627   if (cc == equal) {
2628     BranchIfNotInternalizedString(masm, &check_for_strings, eax, ecx);
2629     BranchIfNotInternalizedString(masm, &check_for_strings, edx, ecx);
2630 
2631     // We've already checked for object identity, so if both operands
2632     // are internalized they aren't equal. Register eax already holds a
2633     // non-zero value, which indicates not equal, so just return.
2634     __ ret(0);
2635   }
2636 
2637   __ bind(&check_for_strings);
2638 
2639   __ JumpIfNotBothSequentialAsciiStrings(edx, eax, ecx, ebx,
2640                                          &check_unequal_objects);
2641 
2642   // Inline comparison of ASCII strings.
2643   if (cc == equal) {
2644     StringCompareStub::GenerateFlatAsciiStringEquals(masm,
2645                                                      edx,
2646                                                      eax,
2647                                                      ecx,
2648                                                      ebx);
2649   } else {
2650     StringCompareStub::GenerateCompareFlatAsciiStrings(masm,
2651                                                        edx,
2652                                                        eax,
2653                                                        ecx,
2654                                                        ebx,
2655                                                        edi);
2656   }
2657 #ifdef DEBUG
2658   __ Abort(kUnexpectedFallThroughFromStringComparison);
2659 #endif
2660 
2661   __ bind(&check_unequal_objects);
2662   if (cc == equal && !strict()) {
2663     // Non-strict equality.  Objects are unequal if
2664     // they are both JSObjects and not undetectable,
2665     // and their pointers are different.
2666     Label not_both_objects;
2667     Label return_unequal;
2668     // At most one is a smi, so we can test for smi by adding the two.
2669     // A smi plus a heap object has the low bit set, a heap object plus
2670     // a heap object has the low bit clear.
2671     STATIC_ASSERT(kSmiTag == 0);
2672     STATIC_ASSERT(kSmiTagMask == 1);
2673     __ lea(ecx, Operand(eax, edx, times_1, 0));
2674     __ test(ecx, Immediate(kSmiTagMask));
2675     __ j(not_zero, &not_both_objects, Label::kNear);
2676     __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx);
2677     __ j(below, &not_both_objects, Label::kNear);
2678     __ CmpObjectType(edx, FIRST_SPEC_OBJECT_TYPE, ebx);
2679     __ j(below, &not_both_objects, Label::kNear);
2680     // We do not bail out after this point.  Both are JSObjects, and
2681     // they are equal if and only if both are undetectable.
2682     // The and of the undetectable flags is 1 if and only if they are equal.
2683     __ test_b(FieldOperand(ecx, Map::kBitFieldOffset),
2684               1 << Map::kIsUndetectable);
2685     __ j(zero, &return_unequal, Label::kNear);
2686     __ test_b(FieldOperand(ebx, Map::kBitFieldOffset),
2687               1 << Map::kIsUndetectable);
2688     __ j(zero, &return_unequal, Label::kNear);
2689     // The objects are both undetectable, so they both compare as the value
2690     // undefined, and are equal.
2691     __ Set(eax, Immediate(EQUAL));
2692     __ bind(&return_unequal);
2693     // Return non-equal by returning the non-zero object pointer in eax,
2694     // or return equal if we fell through to here.
2695     __ ret(0);  // rax, rdx were pushed
2696     __ bind(&not_both_objects);
2697   }
2698 
2699   // Push arguments below the return address.
2700   __ pop(ecx);
2701   __ push(edx);
2702   __ push(eax);
2703 
2704   // Figure out which native to call and setup the arguments.
2705   Builtins::JavaScript builtin;
2706   if (cc == equal) {
2707     builtin = strict() ? Builtins::STRICT_EQUALS : Builtins::EQUALS;
2708   } else {
2709     builtin = Builtins::COMPARE;
2710     __ push(Immediate(Smi::FromInt(NegativeComparisonResult(cc))));
2711   }
2712 
2713   // Restore return address on the stack.
2714   __ push(ecx);
2715 
2716   // Call the native; it returns -1 (less), 0 (equal), or 1 (greater)
2717   // tagged as a small integer.
2718   __ InvokeBuiltin(builtin, JUMP_FUNCTION);
2719 
2720   __ bind(&miss);
2721   GenerateMiss(masm);
2722 }
2723 
2724 
GenerateRecordCallTarget(MacroAssembler * masm)2725 static void GenerateRecordCallTarget(MacroAssembler* masm) {
2726   // Cache the called function in a global property cell.  Cache states
2727   // are uninitialized, monomorphic (indicated by a JSFunction), and
2728   // megamorphic.
2729   // eax : number of arguments to the construct function
2730   // ebx : cache cell for call target
2731   // edi : the function to call
2732   Isolate* isolate = masm->isolate();
2733   Label initialize, done, miss, megamorphic, not_array_function;
2734 
2735   // Load the cache state into ecx.
2736   __ mov(ecx, FieldOperand(ebx, Cell::kValueOffset));
2737 
2738   // A monomorphic cache hit or an already megamorphic state: invoke the
2739   // function without changing the state.
2740   __ cmp(ecx, edi);
2741   __ j(equal, &done);
2742   __ cmp(ecx, Immediate(TypeFeedbackCells::MegamorphicSentinel(isolate)));
2743   __ j(equal, &done);
2744 
2745   // If we came here, we need to see if we are the array function.
2746   // If we didn't have a matching function, and we didn't find the megamorph
2747   // sentinel, then we have in the cell either some other function or an
2748   // AllocationSite. Do a map check on the object in ecx.
2749   Handle<Map> allocation_site_map =
2750       masm->isolate()->factory()->allocation_site_map();
2751   __ cmp(FieldOperand(ecx, 0), Immediate(allocation_site_map));
2752   __ j(not_equal, &miss);
2753 
2754   // Load the global or builtins object from the current context
2755   __ LoadGlobalContext(ecx);
2756   // Make sure the function is the Array() function
2757   __ cmp(edi, Operand(ecx,
2758                       Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX)));
2759   __ j(not_equal, &megamorphic);
2760   __ jmp(&done);
2761 
2762   __ bind(&miss);
2763 
2764   // A monomorphic miss (i.e, here the cache is not uninitialized) goes
2765   // megamorphic.
2766   __ cmp(ecx, Immediate(TypeFeedbackCells::UninitializedSentinel(isolate)));
2767   __ j(equal, &initialize);
2768   // MegamorphicSentinel is an immortal immovable object (undefined) so no
2769   // write-barrier is needed.
2770   __ bind(&megamorphic);
2771   __ mov(FieldOperand(ebx, Cell::kValueOffset),
2772          Immediate(TypeFeedbackCells::MegamorphicSentinel(isolate)));
2773   __ jmp(&done, Label::kNear);
2774 
2775   // An uninitialized cache is patched with the function or sentinel to
2776   // indicate the ElementsKind if function is the Array constructor.
2777   __ bind(&initialize);
2778   __ LoadGlobalContext(ecx);
2779   // Make sure the function is the Array() function
2780   __ cmp(edi, Operand(ecx,
2781                       Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX)));
2782   __ j(not_equal, &not_array_function);
2783 
2784   // The target function is the Array constructor,
2785   // Create an AllocationSite if we don't already have it, store it in the cell
2786   {
2787     FrameScope scope(masm, StackFrame::INTERNAL);
2788 
2789     // Arguments register must be smi-tagged to call out.
2790     __ SmiTag(eax);
2791     __ push(eax);
2792     __ push(edi);
2793     __ push(ebx);
2794 
2795     CreateAllocationSiteStub create_stub;
2796     __ CallStub(&create_stub);
2797 
2798     __ pop(ebx);
2799     __ pop(edi);
2800     __ pop(eax);
2801     __ SmiUntag(eax);
2802   }
2803   __ jmp(&done);
2804 
2805   __ bind(&not_array_function);
2806   __ mov(FieldOperand(ebx, Cell::kValueOffset), edi);
2807   // No need for a write barrier here - cells are rescanned.
2808 
2809   __ bind(&done);
2810 }
2811 
2812 
Generate(MacroAssembler * masm)2813 void CallFunctionStub::Generate(MacroAssembler* masm) {
2814   // ebx : cache cell for call target
2815   // edi : the function to call
2816   Isolate* isolate = masm->isolate();
2817   Label slow, non_function;
2818 
2819   // The receiver might implicitly be the global object. This is
2820   // indicated by passing the hole as the receiver to the call
2821   // function stub.
2822   if (ReceiverMightBeImplicit()) {
2823     Label receiver_ok;
2824     // Get the receiver from the stack.
2825     // +1 ~ return address
2826     __ mov(eax, Operand(esp, (argc_ + 1) * kPointerSize));
2827     // Call as function is indicated with the hole.
2828     __ cmp(eax, isolate->factory()->the_hole_value());
2829     __ j(not_equal, &receiver_ok, Label::kNear);
2830     // Patch the receiver on the stack with the global receiver object.
2831     __ mov(ecx, GlobalObjectOperand());
2832     __ mov(ecx, FieldOperand(ecx, GlobalObject::kGlobalReceiverOffset));
2833     __ mov(Operand(esp, (argc_ + 1) * kPointerSize), ecx);
2834     __ bind(&receiver_ok);
2835   }
2836 
2837   // Check that the function really is a JavaScript function.
2838   __ JumpIfSmi(edi, &non_function);
2839   // Goto slow case if we do not have a function.
2840   __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
2841   __ j(not_equal, &slow);
2842 
2843   if (RecordCallTarget()) {
2844     GenerateRecordCallTarget(masm);
2845   }
2846 
2847   // Fast-case: Just invoke the function.
2848   ParameterCount actual(argc_);
2849 
2850   if (ReceiverMightBeImplicit()) {
2851     Label call_as_function;
2852     __ cmp(eax, isolate->factory()->the_hole_value());
2853     __ j(equal, &call_as_function);
2854     __ InvokeFunction(edi,
2855                       actual,
2856                       JUMP_FUNCTION,
2857                       NullCallWrapper(),
2858                       CALL_AS_METHOD);
2859     __ bind(&call_as_function);
2860   }
2861   __ InvokeFunction(edi,
2862                     actual,
2863                     JUMP_FUNCTION,
2864                     NullCallWrapper(),
2865                     CALL_AS_FUNCTION);
2866 
2867   // Slow-case: Non-function called.
2868   __ bind(&slow);
2869   if (RecordCallTarget()) {
2870     // If there is a call target cache, mark it megamorphic in the
2871     // non-function case.  MegamorphicSentinel is an immortal immovable
2872     // object (undefined) so no write barrier is needed.
2873     __ mov(FieldOperand(ebx, Cell::kValueOffset),
2874            Immediate(TypeFeedbackCells::MegamorphicSentinel(isolate)));
2875   }
2876   // Check for function proxy.
2877   __ CmpInstanceType(ecx, JS_FUNCTION_PROXY_TYPE);
2878   __ j(not_equal, &non_function);
2879   __ pop(ecx);
2880   __ push(edi);  // put proxy as additional argument under return address
2881   __ push(ecx);
2882   __ Set(eax, Immediate(argc_ + 1));
2883   __ Set(ebx, Immediate(0));
2884   __ SetCallKind(ecx, CALL_AS_FUNCTION);
2885   __ GetBuiltinEntry(edx, Builtins::CALL_FUNCTION_PROXY);
2886   {
2887     Handle<Code> adaptor = isolate->builtins()->ArgumentsAdaptorTrampoline();
2888     __ jmp(adaptor, RelocInfo::CODE_TARGET);
2889   }
2890 
2891   // CALL_NON_FUNCTION expects the non-function callee as receiver (instead
2892   // of the original receiver from the call site).
2893   __ bind(&non_function);
2894   __ mov(Operand(esp, (argc_ + 1) * kPointerSize), edi);
2895   __ Set(eax, Immediate(argc_));
2896   __ Set(ebx, Immediate(0));
2897   __ SetCallKind(ecx, CALL_AS_METHOD);
2898   __ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION);
2899   Handle<Code> adaptor = isolate->builtins()->ArgumentsAdaptorTrampoline();
2900   __ jmp(adaptor, RelocInfo::CODE_TARGET);
2901 }
2902 
2903 
Generate(MacroAssembler * masm)2904 void CallConstructStub::Generate(MacroAssembler* masm) {
2905   // eax : number of arguments
2906   // ebx : cache cell for call target
2907   // edi : constructor function
2908   Label slow, non_function_call;
2909 
2910   // Check that function is not a smi.
2911   __ JumpIfSmi(edi, &non_function_call);
2912   // Check that function is a JSFunction.
2913   __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
2914   __ j(not_equal, &slow);
2915 
2916   if (RecordCallTarget()) {
2917     GenerateRecordCallTarget(masm);
2918   }
2919 
2920   // Jump to the function-specific construct stub.
2921   Register jmp_reg = ecx;
2922   __ mov(jmp_reg, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2923   __ mov(jmp_reg, FieldOperand(jmp_reg,
2924                                SharedFunctionInfo::kConstructStubOffset));
2925   __ lea(jmp_reg, FieldOperand(jmp_reg, Code::kHeaderSize));
2926   __ jmp(jmp_reg);
2927 
2928   // edi: called object
2929   // eax: number of arguments
2930   // ecx: object map
2931   Label do_call;
2932   __ bind(&slow);
2933   __ CmpInstanceType(ecx, JS_FUNCTION_PROXY_TYPE);
2934   __ j(not_equal, &non_function_call);
2935   __ GetBuiltinEntry(edx, Builtins::CALL_FUNCTION_PROXY_AS_CONSTRUCTOR);
2936   __ jmp(&do_call);
2937 
2938   __ bind(&non_function_call);
2939   __ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
2940   __ bind(&do_call);
2941   // Set expected number of arguments to zero (not changing eax).
2942   __ Set(ebx, Immediate(0));
2943   Handle<Code> arguments_adaptor =
2944       masm->isolate()->builtins()->ArgumentsAdaptorTrampoline();
2945   __ SetCallKind(ecx, CALL_AS_METHOD);
2946   __ jmp(arguments_adaptor, RelocInfo::CODE_TARGET);
2947 }
2948 
2949 
NeedsImmovableCode()2950 bool CEntryStub::NeedsImmovableCode() {
2951   return false;
2952 }
2953 
2954 
GenerateStubsAheadOfTime(Isolate * isolate)2955 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
2956   CEntryStub::GenerateAheadOfTime(isolate);
2957   StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate);
2958   StubFailureTrampolineStub::GenerateAheadOfTime(isolate);
2959   // It is important that the store buffer overflow stubs are generated first.
2960   ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate);
2961   CreateAllocationSiteStub::GenerateAheadOfTime(isolate);
2962   if (Serializer::enabled()) {
2963     PlatformFeatureScope sse2(SSE2);
2964     BinaryOpICStub::GenerateAheadOfTime(isolate);
2965   } else {
2966     BinaryOpICStub::GenerateAheadOfTime(isolate);
2967   }
2968 }
2969 
2970 
GenerateFPStubs(Isolate * isolate)2971 void CodeStub::GenerateFPStubs(Isolate* isolate) {
2972   if (CpuFeatures::IsSupported(SSE2)) {
2973     CEntryStub save_doubles(1, kSaveFPRegs);
2974     // Stubs might already be in the snapshot, detect that and don't regenerate,
2975     // which would lead to code stub initialization state being messed up.
2976     Code* save_doubles_code;
2977     if (!save_doubles.FindCodeInCache(&save_doubles_code, isolate)) {
2978       save_doubles_code = *(save_doubles.GetCode(isolate));
2979     }
2980     isolate->set_fp_stubs_generated(true);
2981   }
2982 }
2983 
2984 
GenerateAheadOfTime(Isolate * isolate)2985 void CEntryStub::GenerateAheadOfTime(Isolate* isolate) {
2986   CEntryStub stub(1, kDontSaveFPRegs);
2987   stub.GetCode(isolate);
2988 }
2989 
2990 
JumpIfOOM(MacroAssembler * masm,Register value,Register scratch,Label * oom_label)2991 static void JumpIfOOM(MacroAssembler* masm,
2992                       Register value,
2993                       Register scratch,
2994                       Label* oom_label) {
2995   __ mov(scratch, value);
2996   STATIC_ASSERT(Failure::OUT_OF_MEMORY_EXCEPTION == 3);
2997   STATIC_ASSERT(kFailureTag == 3);
2998   __ and_(scratch, 0xf);
2999   __ cmp(scratch, 0xf);
3000   __ j(equal, oom_label);
3001 }
3002 
3003 
GenerateCore(MacroAssembler * masm,Label * throw_normal_exception,Label * throw_termination_exception,Label * throw_out_of_memory_exception,bool do_gc,bool always_allocate_scope)3004 void CEntryStub::GenerateCore(MacroAssembler* masm,
3005                               Label* throw_normal_exception,
3006                               Label* throw_termination_exception,
3007                               Label* throw_out_of_memory_exception,
3008                               bool do_gc,
3009                               bool always_allocate_scope) {
3010   // eax: result parameter for PerformGC, if any
3011   // ebx: pointer to C function  (C callee-saved)
3012   // ebp: frame pointer  (restored after C call)
3013   // esp: stack pointer  (restored after C call)
3014   // edi: number of arguments including receiver  (C callee-saved)
3015   // esi: pointer to the first argument (C callee-saved)
3016 
3017   // Result returned in eax, or eax+edx if result_size_ is 2.
3018 
3019   // Check stack alignment.
3020   if (FLAG_debug_code) {
3021     __ CheckStackAlignment();
3022   }
3023 
3024   if (do_gc) {
3025     // Pass failure code returned from last attempt as first argument to
3026     // PerformGC. No need to use PrepareCallCFunction/CallCFunction here as the
3027     // stack alignment is known to be correct. This function takes one argument
3028     // which is passed on the stack, and we know that the stack has been
3029     // prepared to pass at least one argument.
3030     __ mov(Operand(esp, 1 * kPointerSize),
3031            Immediate(ExternalReference::isolate_address(masm->isolate())));
3032     __ mov(Operand(esp, 0 * kPointerSize), eax);  // Result.
3033     __ call(FUNCTION_ADDR(Runtime::PerformGC), RelocInfo::RUNTIME_ENTRY);
3034   }
3035 
3036   ExternalReference scope_depth =
3037       ExternalReference::heap_always_allocate_scope_depth(masm->isolate());
3038   if (always_allocate_scope) {
3039     __ inc(Operand::StaticVariable(scope_depth));
3040   }
3041 
3042   // Call C function.
3043   __ mov(Operand(esp, 0 * kPointerSize), edi);  // argc.
3044   __ mov(Operand(esp, 1 * kPointerSize), esi);  // argv.
3045   __ mov(Operand(esp, 2 * kPointerSize),
3046          Immediate(ExternalReference::isolate_address(masm->isolate())));
3047   __ call(ebx);
3048   // Result is in eax or edx:eax - do not destroy these registers!
3049 
3050   if (always_allocate_scope) {
3051     __ dec(Operand::StaticVariable(scope_depth));
3052   }
3053 
3054   // Runtime functions should not return 'the hole'.  Allowing it to escape may
3055   // lead to crashes in the IC code later.
3056   if (FLAG_debug_code) {
3057     Label okay;
3058     __ cmp(eax, masm->isolate()->factory()->the_hole_value());
3059     __ j(not_equal, &okay, Label::kNear);
3060     __ int3();
3061     __ bind(&okay);
3062   }
3063 
3064   // Check for failure result.
3065   Label failure_returned;
3066   STATIC_ASSERT(((kFailureTag + 1) & kFailureTagMask) == 0);
3067   __ lea(ecx, Operand(eax, 1));
3068   // Lower 2 bits of ecx are 0 iff eax has failure tag.
3069   __ test(ecx, Immediate(kFailureTagMask));
3070   __ j(zero, &failure_returned);
3071 
3072   ExternalReference pending_exception_address(
3073       Isolate::kPendingExceptionAddress, masm->isolate());
3074 
3075   // Check that there is no pending exception, otherwise we
3076   // should have returned some failure value.
3077   if (FLAG_debug_code) {
3078     __ push(edx);
3079     __ mov(edx, Immediate(masm->isolate()->factory()->the_hole_value()));
3080     Label okay;
3081     __ cmp(edx, Operand::StaticVariable(pending_exception_address));
3082     // Cannot use check here as it attempts to generate call into runtime.
3083     __ j(equal, &okay, Label::kNear);
3084     __ int3();
3085     __ bind(&okay);
3086     __ pop(edx);
3087   }
3088 
3089   // Exit the JavaScript to C++ exit frame.
3090   __ LeaveExitFrame(save_doubles_ == kSaveFPRegs);
3091   __ ret(0);
3092 
3093   // Handling of failure.
3094   __ bind(&failure_returned);
3095 
3096   Label retry;
3097   // If the returned exception is RETRY_AFTER_GC continue at retry label
3098   STATIC_ASSERT(Failure::RETRY_AFTER_GC == 0);
3099   __ test(eax, Immediate(((1 << kFailureTypeTagSize) - 1) << kFailureTagSize));
3100   __ j(zero, &retry, Label::kNear);
3101 
3102   // Special handling of out of memory exceptions.
3103   JumpIfOOM(masm, eax, ecx, throw_out_of_memory_exception);
3104 
3105   // Retrieve the pending exception.
3106   __ mov(eax, Operand::StaticVariable(pending_exception_address));
3107 
3108   // See if we just retrieved an OOM exception.
3109   JumpIfOOM(masm, eax, ecx, throw_out_of_memory_exception);
3110 
3111   // Clear the pending exception.
3112   __ mov(edx, Immediate(masm->isolate()->factory()->the_hole_value()));
3113   __ mov(Operand::StaticVariable(pending_exception_address), edx);
3114 
3115   // Special handling of termination exceptions which are uncatchable
3116   // by javascript code.
3117   __ cmp(eax, masm->isolate()->factory()->termination_exception());
3118   __ j(equal, throw_termination_exception);
3119 
3120   // Handle normal exception.
3121   __ jmp(throw_normal_exception);
3122 
3123   // Retry.
3124   __ bind(&retry);
3125 }
3126 
3127 
Generate(MacroAssembler * masm)3128 void CEntryStub::Generate(MacroAssembler* masm) {
3129   // eax: number of arguments including receiver
3130   // ebx: pointer to C function  (C callee-saved)
3131   // ebp: frame pointer  (restored after C call)
3132   // esp: stack pointer  (restored after C call)
3133   // esi: current context (C callee-saved)
3134   // edi: JS function of the caller (C callee-saved)
3135 
3136   ProfileEntryHookStub::MaybeCallEntryHook(masm);
3137 
3138   // NOTE: Invocations of builtins may return failure objects instead
3139   // of a proper result. The builtin entry handles this by performing
3140   // a garbage collection and retrying the builtin (twice).
3141 
3142   // Enter the exit frame that transitions from JavaScript to C++.
3143   __ EnterExitFrame(save_doubles_ == kSaveFPRegs);
3144 
3145   // eax: result parameter for PerformGC, if any (setup below)
3146   // ebx: pointer to builtin function  (C callee-saved)
3147   // ebp: frame pointer  (restored after C call)
3148   // esp: stack pointer  (restored after C call)
3149   // edi: number of arguments including receiver (C callee-saved)
3150   // esi: argv pointer (C callee-saved)
3151 
3152   Label throw_normal_exception;
3153   Label throw_termination_exception;
3154   Label throw_out_of_memory_exception;
3155 
3156   // Call into the runtime system.
3157   GenerateCore(masm,
3158                &throw_normal_exception,
3159                &throw_termination_exception,
3160                &throw_out_of_memory_exception,
3161                false,
3162                false);
3163 
3164   // Do space-specific GC and retry runtime call.
3165   GenerateCore(masm,
3166                &throw_normal_exception,
3167                &throw_termination_exception,
3168                &throw_out_of_memory_exception,
3169                true,
3170                false);
3171 
3172   // Do full GC and retry runtime call one final time.
3173   Failure* failure = Failure::InternalError();
3174   __ mov(eax, Immediate(reinterpret_cast<int32_t>(failure)));
3175   GenerateCore(masm,
3176                &throw_normal_exception,
3177                &throw_termination_exception,
3178                &throw_out_of_memory_exception,
3179                true,
3180                true);
3181 
3182   __ bind(&throw_out_of_memory_exception);
3183   // Set external caught exception to false.
3184   Isolate* isolate = masm->isolate();
3185   ExternalReference external_caught(Isolate::kExternalCaughtExceptionAddress,
3186                                     isolate);
3187   __ mov(Operand::StaticVariable(external_caught), Immediate(false));
3188 
3189   // Set pending exception and eax to out of memory exception.
3190   ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
3191                                       isolate);
3192   Label already_have_failure;
3193   JumpIfOOM(masm, eax, ecx, &already_have_failure);
3194   __ mov(eax, reinterpret_cast<int32_t>(Failure::OutOfMemoryException(0x1)));
3195   __ bind(&already_have_failure);
3196   __ mov(Operand::StaticVariable(pending_exception), eax);
3197   // Fall through to the next label.
3198 
3199   __ bind(&throw_termination_exception);
3200   __ ThrowUncatchable(eax);
3201 
3202   __ bind(&throw_normal_exception);
3203   __ Throw(eax);
3204 }
3205 
3206 
GenerateBody(MacroAssembler * masm,bool is_construct)3207 void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
3208   Label invoke, handler_entry, exit;
3209   Label not_outermost_js, not_outermost_js_2;
3210 
3211   ProfileEntryHookStub::MaybeCallEntryHook(masm);
3212 
3213   // Set up frame.
3214   __ push(ebp);
3215   __ mov(ebp, esp);
3216 
3217   // Push marker in two places.
3218   int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY;
3219   __ push(Immediate(Smi::FromInt(marker)));  // context slot
3220   __ push(Immediate(Smi::FromInt(marker)));  // function slot
3221   // Save callee-saved registers (C calling conventions).
3222   __ push(edi);
3223   __ push(esi);
3224   __ push(ebx);
3225 
3226   // Save copies of the top frame descriptor on the stack.
3227   ExternalReference c_entry_fp(Isolate::kCEntryFPAddress, masm->isolate());
3228   __ push(Operand::StaticVariable(c_entry_fp));
3229 
3230   // If this is the outermost JS call, set js_entry_sp value.
3231   ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress,
3232                                 masm->isolate());
3233   __ cmp(Operand::StaticVariable(js_entry_sp), Immediate(0));
3234   __ j(not_equal, &not_outermost_js, Label::kNear);
3235   __ mov(Operand::StaticVariable(js_entry_sp), ebp);
3236   __ push(Immediate(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
3237   __ jmp(&invoke, Label::kNear);
3238   __ bind(&not_outermost_js);
3239   __ push(Immediate(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME)));
3240 
3241   // Jump to a faked try block that does the invoke, with a faked catch
3242   // block that sets the pending exception.
3243   __ jmp(&invoke);
3244   __ bind(&handler_entry);
3245   handler_offset_ = handler_entry.pos();
3246   // Caught exception: Store result (exception) in the pending exception
3247   // field in the JSEnv and return a failure sentinel.
3248   ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
3249                                       masm->isolate());
3250   __ mov(Operand::StaticVariable(pending_exception), eax);
3251   __ mov(eax, reinterpret_cast<int32_t>(Failure::Exception()));
3252   __ jmp(&exit);
3253 
3254   // Invoke: Link this frame into the handler chain.  There's only one
3255   // handler block in this code object, so its index is 0.
3256   __ bind(&invoke);
3257   __ PushTryHandler(StackHandler::JS_ENTRY, 0);
3258 
3259   // Clear any pending exceptions.
3260   __ mov(edx, Immediate(masm->isolate()->factory()->the_hole_value()));
3261   __ mov(Operand::StaticVariable(pending_exception), edx);
3262 
3263   // Fake a receiver (NULL).
3264   __ push(Immediate(0));  // receiver
3265 
3266   // Invoke the function by calling through JS entry trampoline builtin and
3267   // pop the faked function when we return. Notice that we cannot store a
3268   // reference to the trampoline code directly in this stub, because the
3269   // builtin stubs may not have been generated yet.
3270   if (is_construct) {
3271     ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline,
3272                                       masm->isolate());
3273     __ mov(edx, Immediate(construct_entry));
3274   } else {
3275     ExternalReference entry(Builtins::kJSEntryTrampoline,
3276                             masm->isolate());
3277     __ mov(edx, Immediate(entry));
3278   }
3279   __ mov(edx, Operand(edx, 0));  // deref address
3280   __ lea(edx, FieldOperand(edx, Code::kHeaderSize));
3281   __ call(edx);
3282 
3283   // Unlink this frame from the handler chain.
3284   __ PopTryHandler();
3285 
3286   __ bind(&exit);
3287   // Check if the current stack frame is marked as the outermost JS frame.
3288   __ pop(ebx);
3289   __ cmp(ebx, Immediate(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
3290   __ j(not_equal, &not_outermost_js_2);
3291   __ mov(Operand::StaticVariable(js_entry_sp), Immediate(0));
3292   __ bind(&not_outermost_js_2);
3293 
3294   // Restore the top frame descriptor from the stack.
3295   __ pop(Operand::StaticVariable(ExternalReference(
3296       Isolate::kCEntryFPAddress,
3297       masm->isolate())));
3298 
3299   // Restore callee-saved registers (C calling conventions).
3300   __ pop(ebx);
3301   __ pop(esi);
3302   __ pop(edi);
3303   __ add(esp, Immediate(2 * kPointerSize));  // remove markers
3304 
3305   // Restore frame pointer and return.
3306   __ pop(ebp);
3307   __ ret(0);
3308 }
3309 
3310 
3311 // Generate stub code for instanceof.
3312 // This code can patch a call site inlined cache of the instance of check,
3313 // which looks like this.
3314 //
3315 //   81 ff XX XX XX XX   cmp    edi, <the hole, patched to a map>
3316 //   75 0a               jne    <some near label>
3317 //   b8 XX XX XX XX      mov    eax, <the hole, patched to either true or false>
3318 //
3319 // If call site patching is requested the stack will have the delta from the
3320 // return address to the cmp instruction just below the return address. This
3321 // also means that call site patching can only take place with arguments in
3322 // registers. TOS looks like this when call site patching is requested
3323 //
3324 //   esp[0] : return address
3325 //   esp[4] : delta from return address to cmp instruction
3326 //
Generate(MacroAssembler * masm)3327 void InstanceofStub::Generate(MacroAssembler* masm) {
3328   // Call site inlining and patching implies arguments in registers.
3329   ASSERT(HasArgsInRegisters() || !HasCallSiteInlineCheck());
3330 
3331   // Fixed register usage throughout the stub.
3332   Register object = eax;  // Object (lhs).
3333   Register map = ebx;  // Map of the object.
3334   Register function = edx;  // Function (rhs).
3335   Register prototype = edi;  // Prototype of the function.
3336   Register scratch = ecx;
3337 
3338   // Constants describing the call site code to patch.
3339   static const int kDeltaToCmpImmediate = 2;
3340   static const int kDeltaToMov = 8;
3341   static const int kDeltaToMovImmediate = 9;
3342   static const int8_t kCmpEdiOperandByte1 = BitCast<int8_t, uint8_t>(0x3b);
3343   static const int8_t kCmpEdiOperandByte2 = BitCast<int8_t, uint8_t>(0x3d);
3344   static const int8_t kMovEaxImmediateByte = BitCast<int8_t, uint8_t>(0xb8);
3345 
3346   ASSERT_EQ(object.code(), InstanceofStub::left().code());
3347   ASSERT_EQ(function.code(), InstanceofStub::right().code());
3348 
3349   // Get the object and function - they are always both needed.
3350   Label slow, not_js_object;
3351   if (!HasArgsInRegisters()) {
3352     __ mov(object, Operand(esp, 2 * kPointerSize));
3353     __ mov(function, Operand(esp, 1 * kPointerSize));
3354   }
3355 
3356   // Check that the left hand is a JS object.
3357   __ JumpIfSmi(object, &not_js_object);
3358   __ IsObjectJSObjectType(object, map, scratch, &not_js_object);
3359 
3360   // If there is a call site cache don't look in the global cache, but do the
3361   // real lookup and update the call site cache.
3362   if (!HasCallSiteInlineCheck()) {
3363     // Look up the function and the map in the instanceof cache.
3364     Label miss;
3365     __ CompareRoot(function, scratch, Heap::kInstanceofCacheFunctionRootIndex);
3366     __ j(not_equal, &miss, Label::kNear);
3367     __ CompareRoot(map, scratch, Heap::kInstanceofCacheMapRootIndex);
3368     __ j(not_equal, &miss, Label::kNear);
3369     __ LoadRoot(eax, Heap::kInstanceofCacheAnswerRootIndex);
3370     __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
3371     __ bind(&miss);
3372   }
3373 
3374   // Get the prototype of the function.
3375   __ TryGetFunctionPrototype(function, prototype, scratch, &slow, true);
3376 
3377   // Check that the function prototype is a JS object.
3378   __ JumpIfSmi(prototype, &slow);
3379   __ IsObjectJSObjectType(prototype, scratch, scratch, &slow);
3380 
3381   // Update the global instanceof or call site inlined cache with the current
3382   // map and function. The cached answer will be set when it is known below.
3383   if (!HasCallSiteInlineCheck()) {
3384     __ StoreRoot(map, scratch, Heap::kInstanceofCacheMapRootIndex);
3385     __ StoreRoot(function, scratch, Heap::kInstanceofCacheFunctionRootIndex);
3386   } else {
3387     // The constants for the code patching are based on no push instructions
3388     // at the call site.
3389     ASSERT(HasArgsInRegisters());
3390     // Get return address and delta to inlined map check.
3391     __ mov(scratch, Operand(esp, 0 * kPointerSize));
3392     __ sub(scratch, Operand(esp, 1 * kPointerSize));
3393     if (FLAG_debug_code) {
3394       __ cmpb(Operand(scratch, 0), kCmpEdiOperandByte1);
3395       __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheCmp1);
3396       __ cmpb(Operand(scratch, 1), kCmpEdiOperandByte2);
3397       __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheCmp2);
3398     }
3399     __ mov(scratch, Operand(scratch, kDeltaToCmpImmediate));
3400     __ mov(Operand(scratch, 0), map);
3401   }
3402 
3403   // Loop through the prototype chain of the object looking for the function
3404   // prototype.
3405   __ mov(scratch, FieldOperand(map, Map::kPrototypeOffset));
3406   Label loop, is_instance, is_not_instance;
3407   __ bind(&loop);
3408   __ cmp(scratch, prototype);
3409   __ j(equal, &is_instance, Label::kNear);
3410   Factory* factory = masm->isolate()->factory();
3411   __ cmp(scratch, Immediate(factory->null_value()));
3412   __ j(equal, &is_not_instance, Label::kNear);
3413   __ mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
3414   __ mov(scratch, FieldOperand(scratch, Map::kPrototypeOffset));
3415   __ jmp(&loop);
3416 
3417   __ bind(&is_instance);
3418   if (!HasCallSiteInlineCheck()) {
3419     __ mov(eax, Immediate(0));
3420     __ StoreRoot(eax, scratch, Heap::kInstanceofCacheAnswerRootIndex);
3421   } else {
3422     // Get return address and delta to inlined map check.
3423     __ mov(eax, factory->true_value());
3424     __ mov(scratch, Operand(esp, 0 * kPointerSize));
3425     __ sub(scratch, Operand(esp, 1 * kPointerSize));
3426     if (FLAG_debug_code) {
3427       __ cmpb(Operand(scratch, kDeltaToMov), kMovEaxImmediateByte);
3428       __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheMov);
3429     }
3430     __ mov(Operand(scratch, kDeltaToMovImmediate), eax);
3431     if (!ReturnTrueFalseObject()) {
3432       __ Set(eax, Immediate(0));
3433     }
3434   }
3435   __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
3436 
3437   __ bind(&is_not_instance);
3438   if (!HasCallSiteInlineCheck()) {
3439     __ mov(eax, Immediate(Smi::FromInt(1)));
3440     __ StoreRoot(eax, scratch, Heap::kInstanceofCacheAnswerRootIndex);
3441   } else {
3442     // Get return address and delta to inlined map check.
3443     __ mov(eax, factory->false_value());
3444     __ mov(scratch, Operand(esp, 0 * kPointerSize));
3445     __ sub(scratch, Operand(esp, 1 * kPointerSize));
3446     if (FLAG_debug_code) {
3447       __ cmpb(Operand(scratch, kDeltaToMov), kMovEaxImmediateByte);
3448       __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheMov);
3449     }
3450     __ mov(Operand(scratch, kDeltaToMovImmediate), eax);
3451     if (!ReturnTrueFalseObject()) {
3452       __ Set(eax, Immediate(Smi::FromInt(1)));
3453     }
3454   }
3455   __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
3456 
3457   Label object_not_null, object_not_null_or_smi;
3458   __ bind(&not_js_object);
3459   // Before null, smi and string value checks, check that the rhs is a function
3460   // as for a non-function rhs an exception needs to be thrown.
3461   __ JumpIfSmi(function, &slow, Label::kNear);
3462   __ CmpObjectType(function, JS_FUNCTION_TYPE, scratch);
3463   __ j(not_equal, &slow, Label::kNear);
3464 
3465   // Null is not instance of anything.
3466   __ cmp(object, factory->null_value());
3467   __ j(not_equal, &object_not_null, Label::kNear);
3468   __ Set(eax, Immediate(Smi::FromInt(1)));
3469   __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
3470 
3471   __ bind(&object_not_null);
3472   // Smi values is not instance of anything.
3473   __ JumpIfNotSmi(object, &object_not_null_or_smi, Label::kNear);
3474   __ Set(eax, Immediate(Smi::FromInt(1)));
3475   __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
3476 
3477   __ bind(&object_not_null_or_smi);
3478   // String values is not instance of anything.
3479   Condition is_string = masm->IsObjectStringType(object, scratch, scratch);
3480   __ j(NegateCondition(is_string), &slow, Label::kNear);
3481   __ Set(eax, Immediate(Smi::FromInt(1)));
3482   __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
3483 
3484   // Slow-case: Go through the JavaScript implementation.
3485   __ bind(&slow);
3486   if (!ReturnTrueFalseObject()) {
3487     // Tail call the builtin which returns 0 or 1.
3488     if (HasArgsInRegisters()) {
3489       // Push arguments below return address.
3490       __ pop(scratch);
3491       __ push(object);
3492       __ push(function);
3493       __ push(scratch);
3494     }
3495     __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION);
3496   } else {
3497     // Call the builtin and convert 0/1 to true/false.
3498     {
3499       FrameScope scope(masm, StackFrame::INTERNAL);
3500       __ push(object);
3501       __ push(function);
3502       __ InvokeBuiltin(Builtins::INSTANCE_OF, CALL_FUNCTION);
3503     }
3504     Label true_value, done;
3505     __ test(eax, eax);
3506     __ j(zero, &true_value, Label::kNear);
3507     __ mov(eax, factory->false_value());
3508     __ jmp(&done, Label::kNear);
3509     __ bind(&true_value);
3510     __ mov(eax, factory->true_value());
3511     __ bind(&done);
3512     __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
3513   }
3514 }
3515 
3516 
left()3517 Register InstanceofStub::left() { return eax; }
3518 
3519 
right()3520 Register InstanceofStub::right() { return edx; }
3521 
3522 
3523 // -------------------------------------------------------------------------
3524 // StringCharCodeAtGenerator
3525 
GenerateFast(MacroAssembler * masm)3526 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
3527   // If the receiver is a smi trigger the non-string case.
3528   STATIC_ASSERT(kSmiTag == 0);
3529   __ JumpIfSmi(object_, receiver_not_string_);
3530 
3531   // Fetch the instance type of the receiver into result register.
3532   __ mov(result_, FieldOperand(object_, HeapObject::kMapOffset));
3533   __ movzx_b(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
3534   // If the receiver is not a string trigger the non-string case.
3535   __ test(result_, Immediate(kIsNotStringMask));
3536   __ j(not_zero, receiver_not_string_);
3537 
3538   // If the index is non-smi trigger the non-smi case.
3539   STATIC_ASSERT(kSmiTag == 0);
3540   __ JumpIfNotSmi(index_, &index_not_smi_);
3541   __ bind(&got_smi_index_);
3542 
3543   // Check for index out of range.
3544   __ cmp(index_, FieldOperand(object_, String::kLengthOffset));
3545   __ j(above_equal, index_out_of_range_);
3546 
3547   __ SmiUntag(index_);
3548 
3549   Factory* factory = masm->isolate()->factory();
3550   StringCharLoadGenerator::Generate(
3551       masm, factory, object_, index_, result_, &call_runtime_);
3552 
3553   __ SmiTag(result_);
3554   __ bind(&exit_);
3555 }
3556 
3557 
GenerateSlow(MacroAssembler * masm,const RuntimeCallHelper & call_helper)3558 void StringCharCodeAtGenerator::GenerateSlow(
3559     MacroAssembler* masm,
3560     const RuntimeCallHelper& call_helper) {
3561   __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase);
3562 
3563   // Index is not a smi.
3564   __ bind(&index_not_smi_);
3565   // If index is a heap number, try converting it to an integer.
3566   __ CheckMap(index_,
3567               masm->isolate()->factory()->heap_number_map(),
3568               index_not_number_,
3569               DONT_DO_SMI_CHECK);
3570   call_helper.BeforeCall(masm);
3571   __ push(object_);
3572   __ push(index_);  // Consumed by runtime conversion function.
3573   if (index_flags_ == STRING_INDEX_IS_NUMBER) {
3574     __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1);
3575   } else {
3576     ASSERT(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX);
3577     // NumberToSmi discards numbers that are not exact integers.
3578     __ CallRuntime(Runtime::kNumberToSmi, 1);
3579   }
3580   if (!index_.is(eax)) {
3581     // Save the conversion result before the pop instructions below
3582     // have a chance to overwrite it.
3583     __ mov(index_, eax);
3584   }
3585   __ pop(object_);
3586   // Reload the instance type.
3587   __ mov(result_, FieldOperand(object_, HeapObject::kMapOffset));
3588   __ movzx_b(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
3589   call_helper.AfterCall(masm);
3590   // If index is still not a smi, it must be out of range.
3591   STATIC_ASSERT(kSmiTag == 0);
3592   __ JumpIfNotSmi(index_, index_out_of_range_);
3593   // Otherwise, return to the fast path.
3594   __ jmp(&got_smi_index_);
3595 
3596   // Call runtime. We get here when the receiver is a string and the
3597   // index is a number, but the code of getting the actual character
3598   // is too complex (e.g., when the string needs to be flattened).
3599   __ bind(&call_runtime_);
3600   call_helper.BeforeCall(masm);
3601   __ push(object_);
3602   __ SmiTag(index_);
3603   __ push(index_);
3604   __ CallRuntime(Runtime::kStringCharCodeAt, 2);
3605   if (!result_.is(eax)) {
3606     __ mov(result_, eax);
3607   }
3608   call_helper.AfterCall(masm);
3609   __ jmp(&exit_);
3610 
3611   __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase);
3612 }
3613 
3614 
3615 // -------------------------------------------------------------------------
3616 // StringCharFromCodeGenerator
3617 
GenerateFast(MacroAssembler * masm)3618 void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) {
3619   // Fast case of Heap::LookupSingleCharacterStringFromCode.
3620   STATIC_ASSERT(kSmiTag == 0);
3621   STATIC_ASSERT(kSmiShiftSize == 0);
3622   ASSERT(IsPowerOf2(String::kMaxOneByteCharCode + 1));
3623   __ test(code_,
3624           Immediate(kSmiTagMask |
3625                     ((~String::kMaxOneByteCharCode) << kSmiTagSize)));
3626   __ j(not_zero, &slow_case_);
3627 
3628   Factory* factory = masm->isolate()->factory();
3629   __ Set(result_, Immediate(factory->single_character_string_cache()));
3630   STATIC_ASSERT(kSmiTag == 0);
3631   STATIC_ASSERT(kSmiTagSize == 1);
3632   STATIC_ASSERT(kSmiShiftSize == 0);
3633   // At this point code register contains smi tagged ASCII char code.
3634   __ mov(result_, FieldOperand(result_,
3635                                code_, times_half_pointer_size,
3636                                FixedArray::kHeaderSize));
3637   __ cmp(result_, factory->undefined_value());
3638   __ j(equal, &slow_case_);
3639   __ bind(&exit_);
3640 }
3641 
3642 
GenerateSlow(MacroAssembler * masm,const RuntimeCallHelper & call_helper)3643 void StringCharFromCodeGenerator::GenerateSlow(
3644     MacroAssembler* masm,
3645     const RuntimeCallHelper& call_helper) {
3646   __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase);
3647 
3648   __ bind(&slow_case_);
3649   call_helper.BeforeCall(masm);
3650   __ push(code_);
3651   __ CallRuntime(Runtime::kCharFromCode, 1);
3652   if (!result_.is(eax)) {
3653     __ mov(result_, eax);
3654   }
3655   call_helper.AfterCall(masm);
3656   __ jmp(&exit_);
3657 
3658   __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase);
3659 }
3660 
3661 
Generate(MacroAssembler * masm)3662 void StringAddStub::Generate(MacroAssembler* masm) {
3663   Label call_runtime, call_builtin;
3664   Builtins::JavaScript builtin_id = Builtins::ADD;
3665 
3666   // Load the two arguments.
3667   __ mov(eax, Operand(esp, 2 * kPointerSize));  // First argument.
3668   __ mov(edx, Operand(esp, 1 * kPointerSize));  // Second argument.
3669 
3670   // Make sure that both arguments are strings if not known in advance.
3671   // Otherwise, at least one of the arguments is definitely a string,
3672   // and we convert the one that is not known to be a string.
3673   if ((flags_ & STRING_ADD_CHECK_BOTH) == STRING_ADD_CHECK_BOTH) {
3674     ASSERT((flags_ & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT);
3675     ASSERT((flags_ & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT);
3676     __ JumpIfSmi(eax, &call_runtime);
3677     __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, ebx);
3678     __ j(above_equal, &call_runtime);
3679 
3680     // First argument is a a string, test second.
3681     __ JumpIfSmi(edx, &call_runtime);
3682     __ CmpObjectType(edx, FIRST_NONSTRING_TYPE, ebx);
3683     __ j(above_equal, &call_runtime);
3684   } else if ((flags_ & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT) {
3685     ASSERT((flags_ & STRING_ADD_CHECK_RIGHT) == 0);
3686     GenerateConvertArgument(masm, 2 * kPointerSize, eax, ebx, ecx, edi,
3687                             &call_builtin);
3688     builtin_id = Builtins::STRING_ADD_RIGHT;
3689   } else if ((flags_ & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT) {
3690     ASSERT((flags_ & STRING_ADD_CHECK_LEFT) == 0);
3691     GenerateConvertArgument(masm, 1 * kPointerSize, edx, ebx, ecx, edi,
3692                             &call_builtin);
3693     builtin_id = Builtins::STRING_ADD_LEFT;
3694   }
3695 
3696   // Both arguments are strings.
3697   // eax: first string
3698   // edx: second string
3699   // Check if either of the strings are empty. In that case return the other.
3700   Label second_not_zero_length, both_not_zero_length;
3701   __ mov(ecx, FieldOperand(edx, String::kLengthOffset));
3702   STATIC_ASSERT(kSmiTag == 0);
3703   __ test(ecx, ecx);
3704   __ j(not_zero, &second_not_zero_length, Label::kNear);
3705   // Second string is empty, result is first string which is already in eax.
3706   Counters* counters = masm->isolate()->counters();
3707   __ IncrementCounter(counters->string_add_native(), 1);
3708   __ ret(2 * kPointerSize);
3709   __ bind(&second_not_zero_length);
3710   __ mov(ebx, FieldOperand(eax, String::kLengthOffset));
3711   STATIC_ASSERT(kSmiTag == 0);
3712   __ test(ebx, ebx);
3713   __ j(not_zero, &both_not_zero_length, Label::kNear);
3714   // First string is empty, result is second string which is in edx.
3715   __ mov(eax, edx);
3716   __ IncrementCounter(counters->string_add_native(), 1);
3717   __ ret(2 * kPointerSize);
3718 
3719   // Both strings are non-empty.
3720   // eax: first string
3721   // ebx: length of first string as a smi
3722   // ecx: length of second string as a smi
3723   // edx: second string
3724   // Look at the length of the result of adding the two strings.
3725   Label string_add_flat_result, longer_than_two;
3726   __ bind(&both_not_zero_length);
3727   __ add(ebx, ecx);
3728   STATIC_ASSERT(Smi::kMaxValue == String::kMaxLength);
3729   // Handle exceptionally long strings in the runtime system.
3730   __ j(overflow, &call_runtime);
3731   // Use the string table when adding two one character strings, as it
3732   // helps later optimizations to return an internalized string here.
3733   __ cmp(ebx, Immediate(Smi::FromInt(2)));
3734   __ j(not_equal, &longer_than_two);
3735 
3736   // Check that both strings are non-external ASCII strings.
3737   __ JumpIfNotBothSequentialAsciiStrings(eax, edx, ebx, ecx, &call_runtime);
3738 
3739   // Get the two characters forming the new string.
3740   __ movzx_b(ebx, FieldOperand(eax, SeqOneByteString::kHeaderSize));
3741   __ movzx_b(ecx, FieldOperand(edx, SeqOneByteString::kHeaderSize));
3742 
3743   // Try to lookup two character string in string table. If it is not found
3744   // just allocate a new one.
3745   Label make_two_character_string, make_two_character_string_no_reload;
3746   StringHelper::GenerateTwoCharacterStringTableProbe(
3747       masm, ebx, ecx, eax, edx, edi,
3748       &make_two_character_string_no_reload, &make_two_character_string);
3749   __ IncrementCounter(counters->string_add_native(), 1);
3750   __ ret(2 * kPointerSize);
3751 
3752   // Allocate a two character string.
3753   __ bind(&make_two_character_string);
3754   // Reload the arguments.
3755   __ mov(eax, Operand(esp, 2 * kPointerSize));  // First argument.
3756   __ mov(edx, Operand(esp, 1 * kPointerSize));  // Second argument.
3757   // Get the two characters forming the new string.
3758   __ movzx_b(ebx, FieldOperand(eax, SeqOneByteString::kHeaderSize));
3759   __ movzx_b(ecx, FieldOperand(edx, SeqOneByteString::kHeaderSize));
3760   __ bind(&make_two_character_string_no_reload);
3761   __ IncrementCounter(counters->string_add_make_two_char(), 1);
3762   __ AllocateAsciiString(eax, 2, edi, edx, &call_runtime);
3763   // Pack both characters in ebx.
3764   __ shl(ecx, kBitsPerByte);
3765   __ or_(ebx, ecx);
3766   // Set the characters in the new string.
3767   __ mov_w(FieldOperand(eax, SeqOneByteString::kHeaderSize), ebx);
3768   __ IncrementCounter(counters->string_add_native(), 1);
3769   __ ret(2 * kPointerSize);
3770 
3771   __ bind(&longer_than_two);
3772   // Check if resulting string will be flat.
3773   __ cmp(ebx, Immediate(Smi::FromInt(ConsString::kMinLength)));
3774   __ j(below, &string_add_flat_result);
3775 
3776   // If result is not supposed to be flat allocate a cons string object. If both
3777   // strings are ASCII the result is an ASCII cons string.
3778   Label non_ascii, allocated, ascii_data;
3779   __ mov(edi, FieldOperand(eax, HeapObject::kMapOffset));
3780   __ movzx_b(ecx, FieldOperand(edi, Map::kInstanceTypeOffset));
3781   __ mov(edi, FieldOperand(edx, HeapObject::kMapOffset));
3782   __ movzx_b(edi, FieldOperand(edi, Map::kInstanceTypeOffset));
3783   __ and_(ecx, edi);
3784   STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
3785   STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
3786   __ test(ecx, Immediate(kStringEncodingMask));
3787   __ j(zero, &non_ascii);
3788   __ bind(&ascii_data);
3789   // Allocate an ASCII cons string.
3790   __ AllocateAsciiConsString(ecx, edi, no_reg, &call_runtime);
3791   __ bind(&allocated);
3792   // Fill the fields of the cons string.
3793   __ AssertSmi(ebx);
3794   __ mov(FieldOperand(ecx, ConsString::kLengthOffset), ebx);
3795   __ mov(FieldOperand(ecx, ConsString::kHashFieldOffset),
3796          Immediate(String::kEmptyHashField));
3797 
3798   Label skip_write_barrier, after_writing;
3799   ExternalReference high_promotion_mode = ExternalReference::
3800       new_space_high_promotion_mode_active_address(masm->isolate());
3801   __ test(Operand::StaticVariable(high_promotion_mode), Immediate(1));
3802   __ j(zero, &skip_write_barrier);
3803 
3804   __ mov(FieldOperand(ecx, ConsString::kFirstOffset), eax);
3805   __ RecordWriteField(ecx,
3806                      ConsString::kFirstOffset,
3807                      eax,
3808                      ebx,
3809                      kDontSaveFPRegs);
3810   __ mov(FieldOperand(ecx, ConsString::kSecondOffset), edx);
3811   __ RecordWriteField(ecx,
3812                      ConsString::kSecondOffset,
3813                      edx,
3814                      ebx,
3815                      kDontSaveFPRegs);
3816   __ jmp(&after_writing);
3817 
3818   __ bind(&skip_write_barrier);
3819   __ mov(FieldOperand(ecx, ConsString::kFirstOffset), eax);
3820   __ mov(FieldOperand(ecx, ConsString::kSecondOffset), edx);
3821 
3822   __ bind(&after_writing);
3823 
3824   __ mov(eax, ecx);
3825   __ IncrementCounter(counters->string_add_native(), 1);
3826   __ ret(2 * kPointerSize);
3827   __ bind(&non_ascii);
3828   // At least one of the strings is two-byte. Check whether it happens
3829   // to contain only one byte characters.
3830   // ecx: first instance type AND second instance type.
3831   // edi: second instance type.
3832   __ test(ecx, Immediate(kOneByteDataHintMask));
3833   __ j(not_zero, &ascii_data);
3834   __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset));
3835   __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
3836   __ xor_(edi, ecx);
3837   STATIC_ASSERT(kOneByteStringTag != 0 && kOneByteDataHintTag != 0);
3838   __ and_(edi, kOneByteStringTag | kOneByteDataHintTag);
3839   __ cmp(edi, kOneByteStringTag | kOneByteDataHintTag);
3840   __ j(equal, &ascii_data);
3841   // Allocate a two byte cons string.
3842   __ AllocateTwoByteConsString(ecx, edi, no_reg, &call_runtime);
3843   __ jmp(&allocated);
3844 
3845   // We cannot encounter sliced strings or cons strings here since:
3846   STATIC_ASSERT(SlicedString::kMinLength >= ConsString::kMinLength);
3847   // Handle creating a flat result from either external or sequential strings.
3848   // Locate the first characters' locations.
3849   // eax: first string
3850   // ebx: length of resulting flat string as a smi
3851   // edx: second string
3852   Label first_prepared, second_prepared;
3853   Label first_is_sequential, second_is_sequential;
3854   __ bind(&string_add_flat_result);
3855   __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset));
3856   __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
3857   // ecx: instance type of first string
3858   STATIC_ASSERT(kSeqStringTag == 0);
3859   __ test_b(ecx, kStringRepresentationMask);
3860   __ j(zero, &first_is_sequential, Label::kNear);
3861   // Rule out short external string and load string resource.
3862   STATIC_ASSERT(kShortExternalStringTag != 0);
3863   __ test_b(ecx, kShortExternalStringMask);
3864   __ j(not_zero, &call_runtime);
3865   __ mov(eax, FieldOperand(eax, ExternalString::kResourceDataOffset));
3866   STATIC_ASSERT(SeqOneByteString::kHeaderSize == SeqTwoByteString::kHeaderSize);
3867   __ jmp(&first_prepared, Label::kNear);
3868   __ bind(&first_is_sequential);
3869   __ add(eax, Immediate(SeqOneByteString::kHeaderSize - kHeapObjectTag));
3870   __ bind(&first_prepared);
3871 
3872   __ mov(edi, FieldOperand(edx, HeapObject::kMapOffset));
3873   __ movzx_b(edi, FieldOperand(edi, Map::kInstanceTypeOffset));
3874   // Check whether both strings have same encoding.
3875   // edi: instance type of second string
3876   __ xor_(ecx, edi);
3877   __ test_b(ecx, kStringEncodingMask);
3878   __ j(not_zero, &call_runtime);
3879   STATIC_ASSERT(kSeqStringTag == 0);
3880   __ test_b(edi, kStringRepresentationMask);
3881   __ j(zero, &second_is_sequential, Label::kNear);
3882   // Rule out short external string and load string resource.
3883   STATIC_ASSERT(kShortExternalStringTag != 0);
3884   __ test_b(edi, kShortExternalStringMask);
3885   __ j(not_zero, &call_runtime);
3886   __ mov(edx, FieldOperand(edx, ExternalString::kResourceDataOffset));
3887   STATIC_ASSERT(SeqOneByteString::kHeaderSize == SeqTwoByteString::kHeaderSize);
3888   __ jmp(&second_prepared, Label::kNear);
3889   __ bind(&second_is_sequential);
3890   __ add(edx, Immediate(SeqOneByteString::kHeaderSize - kHeapObjectTag));
3891   __ bind(&second_prepared);
3892 
3893   // Push the addresses of both strings' first characters onto the stack.
3894   __ push(edx);
3895   __ push(eax);
3896 
3897   Label non_ascii_string_add_flat_result, call_runtime_drop_two;
3898   // edi: instance type of second string
3899   // First string and second string have the same encoding.
3900   STATIC_ASSERT(kTwoByteStringTag == 0);
3901   __ test_b(edi, kStringEncodingMask);
3902   __ j(zero, &non_ascii_string_add_flat_result);
3903 
3904   // Both strings are ASCII strings.
3905   // ebx: length of resulting flat string as a smi
3906   __ SmiUntag(ebx);
3907   __ AllocateAsciiString(eax, ebx, ecx, edx, edi, &call_runtime_drop_two);
3908   // eax: result string
3909   __ mov(ecx, eax);
3910   // Locate first character of result.
3911   __ add(ecx, Immediate(SeqOneByteString::kHeaderSize - kHeapObjectTag));
3912   // Load first argument's length and first character location.  Account for
3913   // values currently on the stack when fetching arguments from it.
3914   __ mov(edx, Operand(esp, 4 * kPointerSize));
3915   __ mov(edi, FieldOperand(edx, String::kLengthOffset));
3916   __ SmiUntag(edi);
3917   __ pop(edx);
3918   // eax: result string
3919   // ecx: first character of result
3920   // edx: first char of first argument
3921   // edi: length of first argument
3922   StringHelper::GenerateCopyCharacters(masm, ecx, edx, edi, ebx, true);
3923   // Load second argument's length and first character location.  Account for
3924   // values currently on the stack when fetching arguments from it.
3925   __ mov(edx, Operand(esp, 2 * kPointerSize));
3926   __ mov(edi, FieldOperand(edx, String::kLengthOffset));
3927   __ SmiUntag(edi);
3928   __ pop(edx);
3929   // eax: result string
3930   // ecx: next character of result
3931   // edx: first char of second argument
3932   // edi: length of second argument
3933   StringHelper::GenerateCopyCharacters(masm, ecx, edx, edi, ebx, true);
3934   __ IncrementCounter(counters->string_add_native(), 1);
3935   __ ret(2 * kPointerSize);
3936 
3937   // Handle creating a flat two byte result.
3938   // eax: first string - known to be two byte
3939   // ebx: length of resulting flat string as a smi
3940   // edx: second string
3941   __ bind(&non_ascii_string_add_flat_result);
3942   // Both strings are two byte strings.
3943   __ SmiUntag(ebx);
3944   __ AllocateTwoByteString(eax, ebx, ecx, edx, edi, &call_runtime_drop_two);
3945   // eax: result string
3946   __ mov(ecx, eax);
3947   // Locate first character of result.
3948   __ add(ecx, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
3949   // Load second argument's length and first character location.  Account for
3950   // values currently on the stack when fetching arguments from it.
3951   __ mov(edx, Operand(esp, 4 * kPointerSize));
3952   __ mov(edi, FieldOperand(edx, String::kLengthOffset));
3953   __ SmiUntag(edi);
3954   __ pop(edx);
3955   // eax: result string
3956   // ecx: first character of result
3957   // edx: first char of first argument
3958   // edi: length of first argument
3959   StringHelper::GenerateCopyCharacters(masm, ecx, edx, edi, ebx, false);
3960   // Load second argument's length and first character location.  Account for
3961   // values currently on the stack when fetching arguments from it.
3962   __ mov(edx, Operand(esp, 2 * kPointerSize));
3963   __ mov(edi, FieldOperand(edx, String::kLengthOffset));
3964   __ SmiUntag(edi);
3965   __ pop(edx);
3966   // eax: result string
3967   // ecx: next character of result
3968   // edx: first char of second argument
3969   // edi: length of second argument
3970   StringHelper::GenerateCopyCharacters(masm, ecx, edx, edi, ebx, false);
3971   __ IncrementCounter(counters->string_add_native(), 1);
3972   __ ret(2 * kPointerSize);
3973 
3974   // Recover stack pointer before jumping to runtime.
3975   __ bind(&call_runtime_drop_two);
3976   __ Drop(2);
3977   // Just jump to runtime to add the two strings.
3978   __ bind(&call_runtime);
3979   __ TailCallRuntime(Runtime::kStringAdd, 2, 1);
3980 
3981   if (call_builtin.is_linked()) {
3982     __ bind(&call_builtin);
3983     __ InvokeBuiltin(builtin_id, JUMP_FUNCTION);
3984   }
3985 }
3986 
3987 
GenerateRegisterArgsPush(MacroAssembler * masm)3988 void StringAddStub::GenerateRegisterArgsPush(MacroAssembler* masm) {
3989   __ push(eax);
3990   __ push(edx);
3991 }
3992 
3993 
GenerateRegisterArgsPop(MacroAssembler * masm,Register temp)3994 void StringAddStub::GenerateRegisterArgsPop(MacroAssembler* masm,
3995                                             Register temp) {
3996   __ pop(temp);
3997   __ pop(edx);
3998   __ pop(eax);
3999   __ push(temp);
4000 }
4001 
4002 
GenerateConvertArgument(MacroAssembler * masm,int stack_offset,Register arg,Register scratch1,Register scratch2,Register scratch3,Label * slow)4003 void StringAddStub::GenerateConvertArgument(MacroAssembler* masm,
4004                                             int stack_offset,
4005                                             Register arg,
4006                                             Register scratch1,
4007                                             Register scratch2,
4008                                             Register scratch3,
4009                                             Label* slow) {
4010   // First check if the argument is already a string.
4011   Label not_string, done;
4012   __ JumpIfSmi(arg, &not_string);
4013   __ CmpObjectType(arg, FIRST_NONSTRING_TYPE, scratch1);
4014   __ j(below, &done);
4015 
4016   // Check the number to string cache.
4017   __ bind(&not_string);
4018   // Puts the cached result into scratch1.
4019   __ LookupNumberStringCache(arg, scratch1, scratch2, scratch3, slow);
4020   __ mov(arg, scratch1);
4021   __ mov(Operand(esp, stack_offset), arg);
4022   __ bind(&done);
4023 }
4024 
4025 
GenerateCopyCharacters(MacroAssembler * masm,Register dest,Register src,Register count,Register scratch,bool ascii)4026 void StringHelper::GenerateCopyCharacters(MacroAssembler* masm,
4027                                           Register dest,
4028                                           Register src,
4029                                           Register count,
4030                                           Register scratch,
4031                                           bool ascii) {
4032   Label loop;
4033   __ bind(&loop);
4034   // This loop just copies one character at a time, as it is only used for very
4035   // short strings.
4036   if (ascii) {
4037     __ mov_b(scratch, Operand(src, 0));
4038     __ mov_b(Operand(dest, 0), scratch);
4039     __ add(src, Immediate(1));
4040     __ add(dest, Immediate(1));
4041   } else {
4042     __ mov_w(scratch, Operand(src, 0));
4043     __ mov_w(Operand(dest, 0), scratch);
4044     __ add(src, Immediate(2));
4045     __ add(dest, Immediate(2));
4046   }
4047   __ sub(count, Immediate(1));
4048   __ j(not_zero, &loop);
4049 }
4050 
4051 
GenerateCopyCharactersREP(MacroAssembler * masm,Register dest,Register src,Register count,Register scratch,bool ascii)4052 void StringHelper::GenerateCopyCharactersREP(MacroAssembler* masm,
4053                                              Register dest,
4054                                              Register src,
4055                                              Register count,
4056                                              Register scratch,
4057                                              bool ascii) {
4058   // Copy characters using rep movs of doublewords.
4059   // The destination is aligned on a 4 byte boundary because we are
4060   // copying to the beginning of a newly allocated string.
4061   ASSERT(dest.is(edi));  // rep movs destination
4062   ASSERT(src.is(esi));  // rep movs source
4063   ASSERT(count.is(ecx));  // rep movs count
4064   ASSERT(!scratch.is(dest));
4065   ASSERT(!scratch.is(src));
4066   ASSERT(!scratch.is(count));
4067 
4068   // Nothing to do for zero characters.
4069   Label done;
4070   __ test(count, count);
4071   __ j(zero, &done);
4072 
4073   // Make count the number of bytes to copy.
4074   if (!ascii) {
4075     __ shl(count, 1);
4076   }
4077 
4078   // Don't enter the rep movs if there are less than 4 bytes to copy.
4079   Label last_bytes;
4080   __ test(count, Immediate(~3));
4081   __ j(zero, &last_bytes, Label::kNear);
4082 
4083   // Copy from edi to esi using rep movs instruction.
4084   __ mov(scratch, count);
4085   __ sar(count, 2);  // Number of doublewords to copy.
4086   __ cld();
4087   __ rep_movs();
4088 
4089   // Find number of bytes left.
4090   __ mov(count, scratch);
4091   __ and_(count, 3);
4092 
4093   // Check if there are more bytes to copy.
4094   __ bind(&last_bytes);
4095   __ test(count, count);
4096   __ j(zero, &done);
4097 
4098   // Copy remaining characters.
4099   Label loop;
4100   __ bind(&loop);
4101   __ mov_b(scratch, Operand(src, 0));
4102   __ mov_b(Operand(dest, 0), scratch);
4103   __ add(src, Immediate(1));
4104   __ add(dest, Immediate(1));
4105   __ sub(count, Immediate(1));
4106   __ j(not_zero, &loop);
4107 
4108   __ bind(&done);
4109 }
4110 
4111 
GenerateTwoCharacterStringTableProbe(MacroAssembler * masm,Register c1,Register c2,Register scratch1,Register scratch2,Register scratch3,Label * not_probed,Label * not_found)4112 void StringHelper::GenerateTwoCharacterStringTableProbe(MacroAssembler* masm,
4113                                                         Register c1,
4114                                                         Register c2,
4115                                                         Register scratch1,
4116                                                         Register scratch2,
4117                                                         Register scratch3,
4118                                                         Label* not_probed,
4119                                                         Label* not_found) {
4120   // Register scratch3 is the general scratch register in this function.
4121   Register scratch = scratch3;
4122 
4123   // Make sure that both characters are not digits as such strings has a
4124   // different hash algorithm. Don't try to look for these in the string table.
4125   Label not_array_index;
4126   __ mov(scratch, c1);
4127   __ sub(scratch, Immediate(static_cast<int>('0')));
4128   __ cmp(scratch, Immediate(static_cast<int>('9' - '0')));
4129   __ j(above, &not_array_index, Label::kNear);
4130   __ mov(scratch, c2);
4131   __ sub(scratch, Immediate(static_cast<int>('0')));
4132   __ cmp(scratch, Immediate(static_cast<int>('9' - '0')));
4133   __ j(below_equal, not_probed);
4134 
4135   __ bind(&not_array_index);
4136   // Calculate the two character string hash.
4137   Register hash = scratch1;
4138   GenerateHashInit(masm, hash, c1, scratch);
4139   GenerateHashAddCharacter(masm, hash, c2, scratch);
4140   GenerateHashGetHash(masm, hash, scratch);
4141 
4142   // Collect the two characters in a register.
4143   Register chars = c1;
4144   __ shl(c2, kBitsPerByte);
4145   __ or_(chars, c2);
4146 
4147   // chars: two character string, char 1 in byte 0 and char 2 in byte 1.
4148   // hash:  hash of two character string.
4149 
4150   // Load the string table.
4151   Register string_table = c2;
4152   __ LoadRoot(string_table, Heap::kStringTableRootIndex);
4153 
4154   // Calculate capacity mask from the string table capacity.
4155   Register mask = scratch2;
4156   __ mov(mask, FieldOperand(string_table, StringTable::kCapacityOffset));
4157   __ SmiUntag(mask);
4158   __ sub(mask, Immediate(1));
4159 
4160   // Registers
4161   // chars:        two character string, char 1 in byte 0 and char 2 in byte 1.
4162   // hash:         hash of two character string
4163   // string_table: string table
4164   // mask:         capacity mask
4165   // scratch:      -
4166 
4167   // Perform a number of probes in the string table.
4168   static const int kProbes = 4;
4169   Label found_in_string_table;
4170   Label next_probe[kProbes], next_probe_pop_mask[kProbes];
4171   Register candidate = scratch;  // Scratch register contains candidate.
4172   for (int i = 0; i < kProbes; i++) {
4173     // Calculate entry in string table.
4174     __ mov(scratch, hash);
4175     if (i > 0) {
4176       __ add(scratch, Immediate(StringTable::GetProbeOffset(i)));
4177     }
4178     __ and_(scratch, mask);
4179 
4180     // Load the entry from the string table.
4181     STATIC_ASSERT(StringTable::kEntrySize == 1);
4182     __ mov(candidate,
4183            FieldOperand(string_table,
4184                         scratch,
4185                         times_pointer_size,
4186                         StringTable::kElementsStartOffset));
4187 
4188     // If entry is undefined no string with this hash can be found.
4189     Factory* factory = masm->isolate()->factory();
4190     __ cmp(candidate, factory->undefined_value());
4191     __ j(equal, not_found);
4192     __ cmp(candidate, factory->the_hole_value());
4193     __ j(equal, &next_probe[i]);
4194 
4195     // If length is not 2 the string is not a candidate.
4196     __ cmp(FieldOperand(candidate, String::kLengthOffset),
4197            Immediate(Smi::FromInt(2)));
4198     __ j(not_equal, &next_probe[i]);
4199 
4200     // As we are out of registers save the mask on the stack and use that
4201     // register as a temporary.
4202     __ push(mask);
4203     Register temp = mask;
4204 
4205     // Check that the candidate is a non-external ASCII string.
4206     __ mov(temp, FieldOperand(candidate, HeapObject::kMapOffset));
4207     __ movzx_b(temp, FieldOperand(temp, Map::kInstanceTypeOffset));
4208     __ JumpIfInstanceTypeIsNotSequentialAscii(
4209         temp, temp, &next_probe_pop_mask[i]);
4210 
4211     // Check if the two characters match.
4212     __ mov(temp, FieldOperand(candidate, SeqOneByteString::kHeaderSize));
4213     __ and_(temp, 0x0000ffff);
4214     __ cmp(chars, temp);
4215     __ j(equal, &found_in_string_table);
4216     __ bind(&next_probe_pop_mask[i]);
4217     __ pop(mask);
4218     __ bind(&next_probe[i]);
4219   }
4220 
4221   // No matching 2 character string found by probing.
4222   __ jmp(not_found);
4223 
4224   // Scratch register contains result when we fall through to here.
4225   Register result = candidate;
4226   __ bind(&found_in_string_table);
4227   __ pop(mask);  // Pop saved mask from the stack.
4228   if (!result.is(eax)) {
4229     __ mov(eax, result);
4230   }
4231 }
4232 
4233 
GenerateHashInit(MacroAssembler * masm,Register hash,Register character,Register scratch)4234 void StringHelper::GenerateHashInit(MacroAssembler* masm,
4235                                     Register hash,
4236                                     Register character,
4237                                     Register scratch) {
4238   // hash = (seed + character) + ((seed + character) << 10);
4239   if (Serializer::enabled()) {
4240     __ LoadRoot(scratch, Heap::kHashSeedRootIndex);
4241     __ SmiUntag(scratch);
4242     __ add(scratch, character);
4243     __ mov(hash, scratch);
4244     __ shl(scratch, 10);
4245     __ add(hash, scratch);
4246   } else {
4247     int32_t seed = masm->isolate()->heap()->HashSeed();
4248     __ lea(scratch, Operand(character, seed));
4249     __ shl(scratch, 10);
4250     __ lea(hash, Operand(scratch, character, times_1, seed));
4251   }
4252   // hash ^= hash >> 6;
4253   __ mov(scratch, hash);
4254   __ shr(scratch, 6);
4255   __ xor_(hash, scratch);
4256 }
4257 
4258 
GenerateHashAddCharacter(MacroAssembler * masm,Register hash,Register character,Register scratch)4259 void StringHelper::GenerateHashAddCharacter(MacroAssembler* masm,
4260                                             Register hash,
4261                                             Register character,
4262                                             Register scratch) {
4263   // hash += character;
4264   __ add(hash, character);
4265   // hash += hash << 10;
4266   __ mov(scratch, hash);
4267   __ shl(scratch, 10);
4268   __ add(hash, scratch);
4269   // hash ^= hash >> 6;
4270   __ mov(scratch, hash);
4271   __ shr(scratch, 6);
4272   __ xor_(hash, scratch);
4273 }
4274 
4275 
GenerateHashGetHash(MacroAssembler * masm,Register hash,Register scratch)4276 void StringHelper::GenerateHashGetHash(MacroAssembler* masm,
4277                                        Register hash,
4278                                        Register scratch) {
4279   // hash += hash << 3;
4280   __ mov(scratch, hash);
4281   __ shl(scratch, 3);
4282   __ add(hash, scratch);
4283   // hash ^= hash >> 11;
4284   __ mov(scratch, hash);
4285   __ shr(scratch, 11);
4286   __ xor_(hash, scratch);
4287   // hash += hash << 15;
4288   __ mov(scratch, hash);
4289   __ shl(scratch, 15);
4290   __ add(hash, scratch);
4291 
4292   __ and_(hash, String::kHashBitMask);
4293 
4294   // if (hash == 0) hash = 27;
4295   Label hash_not_zero;
4296   __ j(not_zero, &hash_not_zero, Label::kNear);
4297   __ mov(hash, Immediate(StringHasher::kZeroHash));
4298   __ bind(&hash_not_zero);
4299 }
4300 
4301 
Generate(MacroAssembler * masm)4302 void SubStringStub::Generate(MacroAssembler* masm) {
4303   Label runtime;
4304 
4305   // Stack frame on entry.
4306   //  esp[0]: return address
4307   //  esp[4]: to
4308   //  esp[8]: from
4309   //  esp[12]: string
4310 
4311   // Make sure first argument is a string.
4312   __ mov(eax, Operand(esp, 3 * kPointerSize));
4313   STATIC_ASSERT(kSmiTag == 0);
4314   __ JumpIfSmi(eax, &runtime);
4315   Condition is_string = masm->IsObjectStringType(eax, ebx, ebx);
4316   __ j(NegateCondition(is_string), &runtime);
4317 
4318   // eax: string
4319   // ebx: instance type
4320 
4321   // Calculate length of sub string using the smi values.
4322   __ mov(ecx, Operand(esp, 1 * kPointerSize));  // To index.
4323   __ JumpIfNotSmi(ecx, &runtime);
4324   __ mov(edx, Operand(esp, 2 * kPointerSize));  // From index.
4325   __ JumpIfNotSmi(edx, &runtime);
4326   __ sub(ecx, edx);
4327   __ cmp(ecx, FieldOperand(eax, String::kLengthOffset));
4328   Label not_original_string;
4329   // Shorter than original string's length: an actual substring.
4330   __ j(below, &not_original_string, Label::kNear);
4331   // Longer than original string's length or negative: unsafe arguments.
4332   __ j(above, &runtime);
4333   // Return original string.
4334   Counters* counters = masm->isolate()->counters();
4335   __ IncrementCounter(counters->sub_string_native(), 1);
4336   __ ret(3 * kPointerSize);
4337   __ bind(&not_original_string);
4338 
4339   Label single_char;
4340   __ cmp(ecx, Immediate(Smi::FromInt(1)));
4341   __ j(equal, &single_char);
4342 
4343   // eax: string
4344   // ebx: instance type
4345   // ecx: sub string length (smi)
4346   // edx: from index (smi)
4347   // Deal with different string types: update the index if necessary
4348   // and put the underlying string into edi.
4349   Label underlying_unpacked, sliced_string, seq_or_external_string;
4350   // If the string is not indirect, it can only be sequential or external.
4351   STATIC_ASSERT(kIsIndirectStringMask == (kSlicedStringTag & kConsStringTag));
4352   STATIC_ASSERT(kIsIndirectStringMask != 0);
4353   __ test(ebx, Immediate(kIsIndirectStringMask));
4354   __ j(zero, &seq_or_external_string, Label::kNear);
4355 
4356   Factory* factory = masm->isolate()->factory();
4357   __ test(ebx, Immediate(kSlicedNotConsMask));
4358   __ j(not_zero, &sliced_string, Label::kNear);
4359   // Cons string.  Check whether it is flat, then fetch first part.
4360   // Flat cons strings have an empty second part.
4361   __ cmp(FieldOperand(eax, ConsString::kSecondOffset),
4362          factory->empty_string());
4363   __ j(not_equal, &runtime);
4364   __ mov(edi, FieldOperand(eax, ConsString::kFirstOffset));
4365   // Update instance type.
4366   __ mov(ebx, FieldOperand(edi, HeapObject::kMapOffset));
4367   __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
4368   __ jmp(&underlying_unpacked, Label::kNear);
4369 
4370   __ bind(&sliced_string);
4371   // Sliced string.  Fetch parent and adjust start index by offset.
4372   __ add(edx, FieldOperand(eax, SlicedString::kOffsetOffset));
4373   __ mov(edi, FieldOperand(eax, SlicedString::kParentOffset));
4374   // Update instance type.
4375   __ mov(ebx, FieldOperand(edi, HeapObject::kMapOffset));
4376   __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
4377   __ jmp(&underlying_unpacked, Label::kNear);
4378 
4379   __ bind(&seq_or_external_string);
4380   // Sequential or external string.  Just move string to the expected register.
4381   __ mov(edi, eax);
4382 
4383   __ bind(&underlying_unpacked);
4384 
4385   if (FLAG_string_slices) {
4386     Label copy_routine;
4387     // edi: underlying subject string
4388     // ebx: instance type of underlying subject string
4389     // edx: adjusted start index (smi)
4390     // ecx: length (smi)
4391     __ cmp(ecx, Immediate(Smi::FromInt(SlicedString::kMinLength)));
4392     // Short slice.  Copy instead of slicing.
4393     __ j(less, &copy_routine);
4394     // Allocate new sliced string.  At this point we do not reload the instance
4395     // type including the string encoding because we simply rely on the info
4396     // provided by the original string.  It does not matter if the original
4397     // string's encoding is wrong because we always have to recheck encoding of
4398     // the newly created string's parent anyways due to externalized strings.
4399     Label two_byte_slice, set_slice_header;
4400     STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
4401     STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
4402     __ test(ebx, Immediate(kStringEncodingMask));
4403     __ j(zero, &two_byte_slice, Label::kNear);
4404     __ AllocateAsciiSlicedString(eax, ebx, no_reg, &runtime);
4405     __ jmp(&set_slice_header, Label::kNear);
4406     __ bind(&two_byte_slice);
4407     __ AllocateTwoByteSlicedString(eax, ebx, no_reg, &runtime);
4408     __ bind(&set_slice_header);
4409     __ mov(FieldOperand(eax, SlicedString::kLengthOffset), ecx);
4410     __ mov(FieldOperand(eax, SlicedString::kHashFieldOffset),
4411            Immediate(String::kEmptyHashField));
4412     __ mov(FieldOperand(eax, SlicedString::kParentOffset), edi);
4413     __ mov(FieldOperand(eax, SlicedString::kOffsetOffset), edx);
4414     __ IncrementCounter(counters->sub_string_native(), 1);
4415     __ ret(3 * kPointerSize);
4416 
4417     __ bind(&copy_routine);
4418   }
4419 
4420   // edi: underlying subject string
4421   // ebx: instance type of underlying subject string
4422   // edx: adjusted start index (smi)
4423   // ecx: length (smi)
4424   // The subject string can only be external or sequential string of either
4425   // encoding at this point.
4426   Label two_byte_sequential, runtime_drop_two, sequential_string;
4427   STATIC_ASSERT(kExternalStringTag != 0);
4428   STATIC_ASSERT(kSeqStringTag == 0);
4429   __ test_b(ebx, kExternalStringTag);
4430   __ j(zero, &sequential_string);
4431 
4432   // Handle external string.
4433   // Rule out short external strings.
4434   STATIC_CHECK(kShortExternalStringTag != 0);
4435   __ test_b(ebx, kShortExternalStringMask);
4436   __ j(not_zero, &runtime);
4437   __ mov(edi, FieldOperand(edi, ExternalString::kResourceDataOffset));
4438   // Move the pointer so that offset-wise, it looks like a sequential string.
4439   STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
4440   __ sub(edi, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
4441 
4442   __ bind(&sequential_string);
4443   // Stash away (adjusted) index and (underlying) string.
4444   __ push(edx);
4445   __ push(edi);
4446   __ SmiUntag(ecx);
4447   STATIC_ASSERT((kOneByteStringTag & kStringEncodingMask) != 0);
4448   __ test_b(ebx, kStringEncodingMask);
4449   __ j(zero, &two_byte_sequential);
4450 
4451   // Sequential ASCII string.  Allocate the result.
4452   __ AllocateAsciiString(eax, ecx, ebx, edx, edi, &runtime_drop_two);
4453 
4454   // eax: result string
4455   // ecx: result string length
4456   __ mov(edx, esi);  // esi used by following code.
4457   // Locate first character of result.
4458   __ mov(edi, eax);
4459   __ add(edi, Immediate(SeqOneByteString::kHeaderSize - kHeapObjectTag));
4460   // Load string argument and locate character of sub string start.
4461   __ pop(esi);
4462   __ pop(ebx);
4463   __ SmiUntag(ebx);
4464   __ lea(esi, FieldOperand(esi, ebx, times_1, SeqOneByteString::kHeaderSize));
4465 
4466   // eax: result string
4467   // ecx: result length
4468   // edx: original value of esi
4469   // edi: first character of result
4470   // esi: character of sub string start
4471   StringHelper::GenerateCopyCharactersREP(masm, edi, esi, ecx, ebx, true);
4472   __ mov(esi, edx);  // Restore esi.
4473   __ IncrementCounter(counters->sub_string_native(), 1);
4474   __ ret(3 * kPointerSize);
4475 
4476   __ bind(&two_byte_sequential);
4477   // Sequential two-byte string.  Allocate the result.
4478   __ AllocateTwoByteString(eax, ecx, ebx, edx, edi, &runtime_drop_two);
4479 
4480   // eax: result string
4481   // ecx: result string length
4482   __ mov(edx, esi);  // esi used by following code.
4483   // Locate first character of result.
4484   __ mov(edi, eax);
4485   __ add(edi,
4486          Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
4487   // Load string argument and locate character of sub string start.
4488   __ pop(esi);
4489   __ pop(ebx);
4490   // As from is a smi it is 2 times the value which matches the size of a two
4491   // byte character.
4492   STATIC_ASSERT(kSmiTag == 0);
4493   STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
4494   __ lea(esi, FieldOperand(esi, ebx, times_1, SeqTwoByteString::kHeaderSize));
4495 
4496   // eax: result string
4497   // ecx: result length
4498   // edx: original value of esi
4499   // edi: first character of result
4500   // esi: character of sub string start
4501   StringHelper::GenerateCopyCharactersREP(masm, edi, esi, ecx, ebx, false);
4502   __ mov(esi, edx);  // Restore esi.
4503   __ IncrementCounter(counters->sub_string_native(), 1);
4504   __ ret(3 * kPointerSize);
4505 
4506   // Drop pushed values on the stack before tail call.
4507   __ bind(&runtime_drop_two);
4508   __ Drop(2);
4509 
4510   // Just jump to runtime to create the sub string.
4511   __ bind(&runtime);
4512   __ TailCallRuntime(Runtime::kSubString, 3, 1);
4513 
4514   __ bind(&single_char);
4515   // eax: string
4516   // ebx: instance type
4517   // ecx: sub string length (smi)
4518   // edx: from index (smi)
4519   StringCharAtGenerator generator(
4520       eax, edx, ecx, eax, &runtime, &runtime, &runtime, STRING_INDEX_IS_NUMBER);
4521   generator.GenerateFast(masm);
4522   __ ret(3 * kPointerSize);
4523   generator.SkipSlow(masm, &runtime);
4524 }
4525 
4526 
GenerateFlatAsciiStringEquals(MacroAssembler * masm,Register left,Register right,Register scratch1,Register scratch2)4527 void StringCompareStub::GenerateFlatAsciiStringEquals(MacroAssembler* masm,
4528                                                       Register left,
4529                                                       Register right,
4530                                                       Register scratch1,
4531                                                       Register scratch2) {
4532   Register length = scratch1;
4533 
4534   // Compare lengths.
4535   Label strings_not_equal, check_zero_length;
4536   __ mov(length, FieldOperand(left, String::kLengthOffset));
4537   __ cmp(length, FieldOperand(right, String::kLengthOffset));
4538   __ j(equal, &check_zero_length, Label::kNear);
4539   __ bind(&strings_not_equal);
4540   __ Set(eax, Immediate(Smi::FromInt(NOT_EQUAL)));
4541   __ ret(0);
4542 
4543   // Check if the length is zero.
4544   Label compare_chars;
4545   __ bind(&check_zero_length);
4546   STATIC_ASSERT(kSmiTag == 0);
4547   __ test(length, length);
4548   __ j(not_zero, &compare_chars, Label::kNear);
4549   __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
4550   __ ret(0);
4551 
4552   // Compare characters.
4553   __ bind(&compare_chars);
4554   GenerateAsciiCharsCompareLoop(masm, left, right, length, scratch2,
4555                                 &strings_not_equal, Label::kNear);
4556 
4557   // Characters are equal.
4558   __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
4559   __ ret(0);
4560 }
4561 
4562 
GenerateCompareFlatAsciiStrings(MacroAssembler * masm,Register left,Register right,Register scratch1,Register scratch2,Register scratch3)4563 void StringCompareStub::GenerateCompareFlatAsciiStrings(MacroAssembler* masm,
4564                                                         Register left,
4565                                                         Register right,
4566                                                         Register scratch1,
4567                                                         Register scratch2,
4568                                                         Register scratch3) {
4569   Counters* counters = masm->isolate()->counters();
4570   __ IncrementCounter(counters->string_compare_native(), 1);
4571 
4572   // Find minimum length.
4573   Label left_shorter;
4574   __ mov(scratch1, FieldOperand(left, String::kLengthOffset));
4575   __ mov(scratch3, scratch1);
4576   __ sub(scratch3, FieldOperand(right, String::kLengthOffset));
4577 
4578   Register length_delta = scratch3;
4579 
4580   __ j(less_equal, &left_shorter, Label::kNear);
4581   // Right string is shorter. Change scratch1 to be length of right string.
4582   __ sub(scratch1, length_delta);
4583   __ bind(&left_shorter);
4584 
4585   Register min_length = scratch1;
4586 
4587   // If either length is zero, just compare lengths.
4588   Label compare_lengths;
4589   __ test(min_length, min_length);
4590   __ j(zero, &compare_lengths, Label::kNear);
4591 
4592   // Compare characters.
4593   Label result_not_equal;
4594   GenerateAsciiCharsCompareLoop(masm, left, right, min_length, scratch2,
4595                                 &result_not_equal, Label::kNear);
4596 
4597   // Compare lengths -  strings up to min-length are equal.
4598   __ bind(&compare_lengths);
4599   __ test(length_delta, length_delta);
4600   Label length_not_equal;
4601   __ j(not_zero, &length_not_equal, Label::kNear);
4602 
4603   // Result is EQUAL.
4604   STATIC_ASSERT(EQUAL == 0);
4605   STATIC_ASSERT(kSmiTag == 0);
4606   __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
4607   __ ret(0);
4608 
4609   Label result_greater;
4610   Label result_less;
4611   __ bind(&length_not_equal);
4612   __ j(greater, &result_greater, Label::kNear);
4613   __ jmp(&result_less, Label::kNear);
4614   __ bind(&result_not_equal);
4615   __ j(above, &result_greater, Label::kNear);
4616   __ bind(&result_less);
4617 
4618   // Result is LESS.
4619   __ Set(eax, Immediate(Smi::FromInt(LESS)));
4620   __ ret(0);
4621 
4622   // Result is GREATER.
4623   __ bind(&result_greater);
4624   __ Set(eax, Immediate(Smi::FromInt(GREATER)));
4625   __ ret(0);
4626 }
4627 
4628 
GenerateAsciiCharsCompareLoop(MacroAssembler * masm,Register left,Register right,Register length,Register scratch,Label * chars_not_equal,Label::Distance chars_not_equal_near)4629 void StringCompareStub::GenerateAsciiCharsCompareLoop(
4630     MacroAssembler* masm,
4631     Register left,
4632     Register right,
4633     Register length,
4634     Register scratch,
4635     Label* chars_not_equal,
4636     Label::Distance chars_not_equal_near) {
4637   // Change index to run from -length to -1 by adding length to string
4638   // start. This means that loop ends when index reaches zero, which
4639   // doesn't need an additional compare.
4640   __ SmiUntag(length);
4641   __ lea(left,
4642          FieldOperand(left, length, times_1, SeqOneByteString::kHeaderSize));
4643   __ lea(right,
4644          FieldOperand(right, length, times_1, SeqOneByteString::kHeaderSize));
4645   __ neg(length);
4646   Register index = length;  // index = -length;
4647 
4648   // Compare loop.
4649   Label loop;
4650   __ bind(&loop);
4651   __ mov_b(scratch, Operand(left, index, times_1, 0));
4652   __ cmpb(scratch, Operand(right, index, times_1, 0));
4653   __ j(not_equal, chars_not_equal, chars_not_equal_near);
4654   __ inc(index);
4655   __ j(not_zero, &loop);
4656 }
4657 
4658 
Generate(MacroAssembler * masm)4659 void StringCompareStub::Generate(MacroAssembler* masm) {
4660   Label runtime;
4661 
4662   // Stack frame on entry.
4663   //  esp[0]: return address
4664   //  esp[4]: right string
4665   //  esp[8]: left string
4666 
4667   __ mov(edx, Operand(esp, 2 * kPointerSize));  // left
4668   __ mov(eax, Operand(esp, 1 * kPointerSize));  // right
4669 
4670   Label not_same;
4671   __ cmp(edx, eax);
4672   __ j(not_equal, &not_same, Label::kNear);
4673   STATIC_ASSERT(EQUAL == 0);
4674   STATIC_ASSERT(kSmiTag == 0);
4675   __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
4676   __ IncrementCounter(masm->isolate()->counters()->string_compare_native(), 1);
4677   __ ret(2 * kPointerSize);
4678 
4679   __ bind(&not_same);
4680 
4681   // Check that both objects are sequential ASCII strings.
4682   __ JumpIfNotBothSequentialAsciiStrings(edx, eax, ecx, ebx, &runtime);
4683 
4684   // Compare flat ASCII strings.
4685   // Drop arguments from the stack.
4686   __ pop(ecx);
4687   __ add(esp, Immediate(2 * kPointerSize));
4688   __ push(ecx);
4689   GenerateCompareFlatAsciiStrings(masm, edx, eax, ecx, ebx, edi);
4690 
4691   // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater)
4692   // tagged as a small integer.
4693   __ bind(&runtime);
4694   __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
4695 }
4696 
4697 
GenerateSmis(MacroAssembler * masm)4698 void ICCompareStub::GenerateSmis(MacroAssembler* masm) {
4699   ASSERT(state_ == CompareIC::SMI);
4700   Label miss;
4701   __ mov(ecx, edx);
4702   __ or_(ecx, eax);
4703   __ JumpIfNotSmi(ecx, &miss, Label::kNear);
4704 
4705   if (GetCondition() == equal) {
4706     // For equality we do not care about the sign of the result.
4707     __ sub(eax, edx);
4708   } else {
4709     Label done;
4710     __ sub(edx, eax);
4711     __ j(no_overflow, &done, Label::kNear);
4712     // Correct sign of result in case of overflow.
4713     __ not_(edx);
4714     __ bind(&done);
4715     __ mov(eax, edx);
4716   }
4717   __ ret(0);
4718 
4719   __ bind(&miss);
4720   GenerateMiss(masm);
4721 }
4722 
4723 
GenerateNumbers(MacroAssembler * masm)4724 void ICCompareStub::GenerateNumbers(MacroAssembler* masm) {
4725   ASSERT(state_ == CompareIC::NUMBER);
4726 
4727   Label generic_stub;
4728   Label unordered, maybe_undefined1, maybe_undefined2;
4729   Label miss;
4730 
4731   if (left_ == CompareIC::SMI) {
4732     __ JumpIfNotSmi(edx, &miss);
4733   }
4734   if (right_ == CompareIC::SMI) {
4735     __ JumpIfNotSmi(eax, &miss);
4736   }
4737 
4738   // Inlining the double comparison and falling back to the general compare
4739   // stub if NaN is involved or SSE2 or CMOV is unsupported.
4740   if (CpuFeatures::IsSupported(SSE2) && CpuFeatures::IsSupported(CMOV)) {
4741     CpuFeatureScope scope1(masm, SSE2);
4742     CpuFeatureScope scope2(masm, CMOV);
4743 
4744     // Load left and right operand.
4745     Label done, left, left_smi, right_smi;
4746     __ JumpIfSmi(eax, &right_smi, Label::kNear);
4747     __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
4748            masm->isolate()->factory()->heap_number_map());
4749     __ j(not_equal, &maybe_undefined1, Label::kNear);
4750     __ movsd(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
4751     __ jmp(&left, Label::kNear);
4752     __ bind(&right_smi);
4753     __ mov(ecx, eax);  // Can't clobber eax because we can still jump away.
4754     __ SmiUntag(ecx);
4755     __ Cvtsi2sd(xmm1, ecx);
4756 
4757     __ bind(&left);
4758     __ JumpIfSmi(edx, &left_smi, Label::kNear);
4759     __ cmp(FieldOperand(edx, HeapObject::kMapOffset),
4760            masm->isolate()->factory()->heap_number_map());
4761     __ j(not_equal, &maybe_undefined2, Label::kNear);
4762     __ movsd(xmm0, FieldOperand(edx, HeapNumber::kValueOffset));
4763     __ jmp(&done);
4764     __ bind(&left_smi);
4765     __ mov(ecx, edx);  // Can't clobber edx because we can still jump away.
4766     __ SmiUntag(ecx);
4767     __ Cvtsi2sd(xmm0, ecx);
4768 
4769     __ bind(&done);
4770     // Compare operands.
4771     __ ucomisd(xmm0, xmm1);
4772 
4773     // Don't base result on EFLAGS when a NaN is involved.
4774     __ j(parity_even, &unordered, Label::kNear);
4775 
4776     // Return a result of -1, 0, or 1, based on EFLAGS.
4777     // Performing mov, because xor would destroy the flag register.
4778     __ mov(eax, 0);  // equal
4779     __ mov(ecx, Immediate(Smi::FromInt(1)));
4780     __ cmov(above, eax, ecx);
4781     __ mov(ecx, Immediate(Smi::FromInt(-1)));
4782     __ cmov(below, eax, ecx);
4783     __ ret(0);
4784   } else {
4785     __ mov(ecx, edx);
4786     __ and_(ecx, eax);
4787     __ JumpIfSmi(ecx, &generic_stub, Label::kNear);
4788 
4789     __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
4790            masm->isolate()->factory()->heap_number_map());
4791     __ j(not_equal, &maybe_undefined1, Label::kNear);
4792     __ cmp(FieldOperand(edx, HeapObject::kMapOffset),
4793            masm->isolate()->factory()->heap_number_map());
4794     __ j(not_equal, &maybe_undefined2, Label::kNear);
4795   }
4796 
4797   __ bind(&unordered);
4798   __ bind(&generic_stub);
4799   ICCompareStub stub(op_, CompareIC::GENERIC, CompareIC::GENERIC,
4800                      CompareIC::GENERIC);
4801   __ jmp(stub.GetCode(masm->isolate()), RelocInfo::CODE_TARGET);
4802 
4803   __ bind(&maybe_undefined1);
4804   if (Token::IsOrderedRelationalCompareOp(op_)) {
4805     __ cmp(eax, Immediate(masm->isolate()->factory()->undefined_value()));
4806     __ j(not_equal, &miss);
4807     __ JumpIfSmi(edx, &unordered);
4808     __ CmpObjectType(edx, HEAP_NUMBER_TYPE, ecx);
4809     __ j(not_equal, &maybe_undefined2, Label::kNear);
4810     __ jmp(&unordered);
4811   }
4812 
4813   __ bind(&maybe_undefined2);
4814   if (Token::IsOrderedRelationalCompareOp(op_)) {
4815     __ cmp(edx, Immediate(masm->isolate()->factory()->undefined_value()));
4816     __ j(equal, &unordered);
4817   }
4818 
4819   __ bind(&miss);
4820   GenerateMiss(masm);
4821 }
4822 
4823 
GenerateInternalizedStrings(MacroAssembler * masm)4824 void ICCompareStub::GenerateInternalizedStrings(MacroAssembler* masm) {
4825   ASSERT(state_ == CompareIC::INTERNALIZED_STRING);
4826   ASSERT(GetCondition() == equal);
4827 
4828   // Registers containing left and right operands respectively.
4829   Register left = edx;
4830   Register right = eax;
4831   Register tmp1 = ecx;
4832   Register tmp2 = ebx;
4833 
4834   // Check that both operands are heap objects.
4835   Label miss;
4836   __ mov(tmp1, left);
4837   STATIC_ASSERT(kSmiTag == 0);
4838   __ and_(tmp1, right);
4839   __ JumpIfSmi(tmp1, &miss, Label::kNear);
4840 
4841   // Check that both operands are internalized strings.
4842   __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset));
4843   __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset));
4844   __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
4845   __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
4846   STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
4847   __ or_(tmp1, tmp2);
4848   __ test(tmp1, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
4849   __ j(not_zero, &miss, Label::kNear);
4850 
4851   // Internalized strings are compared by identity.
4852   Label done;
4853   __ cmp(left, right);
4854   // Make sure eax is non-zero. At this point input operands are
4855   // guaranteed to be non-zero.
4856   ASSERT(right.is(eax));
4857   __ j(not_equal, &done, Label::kNear);
4858   STATIC_ASSERT(EQUAL == 0);
4859   STATIC_ASSERT(kSmiTag == 0);
4860   __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
4861   __ bind(&done);
4862   __ ret(0);
4863 
4864   __ bind(&miss);
4865   GenerateMiss(masm);
4866 }
4867 
4868 
GenerateUniqueNames(MacroAssembler * masm)4869 void ICCompareStub::GenerateUniqueNames(MacroAssembler* masm) {
4870   ASSERT(state_ == CompareIC::UNIQUE_NAME);
4871   ASSERT(GetCondition() == equal);
4872 
4873   // Registers containing left and right operands respectively.
4874   Register left = edx;
4875   Register right = eax;
4876   Register tmp1 = ecx;
4877   Register tmp2 = ebx;
4878 
4879   // Check that both operands are heap objects.
4880   Label miss;
4881   __ mov(tmp1, left);
4882   STATIC_ASSERT(kSmiTag == 0);
4883   __ and_(tmp1, right);
4884   __ JumpIfSmi(tmp1, &miss, Label::kNear);
4885 
4886   // Check that both operands are unique names. This leaves the instance
4887   // types loaded in tmp1 and tmp2.
4888   __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset));
4889   __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset));
4890   __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
4891   __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
4892 
4893   __ JumpIfNotUniqueName(tmp1, &miss, Label::kNear);
4894   __ JumpIfNotUniqueName(tmp2, &miss, Label::kNear);
4895 
4896   // Unique names are compared by identity.
4897   Label done;
4898   __ cmp(left, right);
4899   // Make sure eax is non-zero. At this point input operands are
4900   // guaranteed to be non-zero.
4901   ASSERT(right.is(eax));
4902   __ j(not_equal, &done, Label::kNear);
4903   STATIC_ASSERT(EQUAL == 0);
4904   STATIC_ASSERT(kSmiTag == 0);
4905   __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
4906   __ bind(&done);
4907   __ ret(0);
4908 
4909   __ bind(&miss);
4910   GenerateMiss(masm);
4911 }
4912 
4913 
GenerateStrings(MacroAssembler * masm)4914 void ICCompareStub::GenerateStrings(MacroAssembler* masm) {
4915   ASSERT(state_ == CompareIC::STRING);
4916   Label miss;
4917 
4918   bool equality = Token::IsEqualityOp(op_);
4919 
4920   // Registers containing left and right operands respectively.
4921   Register left = edx;
4922   Register right = eax;
4923   Register tmp1 = ecx;
4924   Register tmp2 = ebx;
4925   Register tmp3 = edi;
4926 
4927   // Check that both operands are heap objects.
4928   __ mov(tmp1, left);
4929   STATIC_ASSERT(kSmiTag == 0);
4930   __ and_(tmp1, right);
4931   __ JumpIfSmi(tmp1, &miss);
4932 
4933   // Check that both operands are strings. This leaves the instance
4934   // types loaded in tmp1 and tmp2.
4935   __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset));
4936   __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset));
4937   __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
4938   __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
4939   __ mov(tmp3, tmp1);
4940   STATIC_ASSERT(kNotStringTag != 0);
4941   __ or_(tmp3, tmp2);
4942   __ test(tmp3, Immediate(kIsNotStringMask));
4943   __ j(not_zero, &miss);
4944 
4945   // Fast check for identical strings.
4946   Label not_same;
4947   __ cmp(left, right);
4948   __ j(not_equal, &not_same, Label::kNear);
4949   STATIC_ASSERT(EQUAL == 0);
4950   STATIC_ASSERT(kSmiTag == 0);
4951   __ Set(eax, Immediate(Smi::FromInt(EQUAL)));
4952   __ ret(0);
4953 
4954   // Handle not identical strings.
4955   __ bind(&not_same);
4956 
4957   // Check that both strings are internalized. If they are, we're done
4958   // because we already know they are not identical.  But in the case of
4959   // non-equality compare, we still need to determine the order. We
4960   // also know they are both strings.
4961   if (equality) {
4962     Label do_compare;
4963     STATIC_ASSERT(kInternalizedTag == 0);
4964     __ or_(tmp1, tmp2);
4965     __ test(tmp1, Immediate(kIsNotInternalizedMask));
4966     __ j(not_zero, &do_compare, Label::kNear);
4967     // Make sure eax is non-zero. At this point input operands are
4968     // guaranteed to be non-zero.
4969     ASSERT(right.is(eax));
4970     __ ret(0);
4971     __ bind(&do_compare);
4972   }
4973 
4974   // Check that both strings are sequential ASCII.
4975   Label runtime;
4976   __ JumpIfNotBothSequentialAsciiStrings(left, right, tmp1, tmp2, &runtime);
4977 
4978   // Compare flat ASCII strings. Returns when done.
4979   if (equality) {
4980     StringCompareStub::GenerateFlatAsciiStringEquals(
4981         masm, left, right, tmp1, tmp2);
4982   } else {
4983     StringCompareStub::GenerateCompareFlatAsciiStrings(
4984         masm, left, right, tmp1, tmp2, tmp3);
4985   }
4986 
4987   // Handle more complex cases in runtime.
4988   __ bind(&runtime);
4989   __ pop(tmp1);  // Return address.
4990   __ push(left);
4991   __ push(right);
4992   __ push(tmp1);
4993   if (equality) {
4994     __ TailCallRuntime(Runtime::kStringEquals, 2, 1);
4995   } else {
4996     __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
4997   }
4998 
4999   __ bind(&miss);
5000   GenerateMiss(masm);
5001 }
5002 
5003 
GenerateObjects(MacroAssembler * masm)5004 void ICCompareStub::GenerateObjects(MacroAssembler* masm) {
5005   ASSERT(state_ == CompareIC::OBJECT);
5006   Label miss;
5007   __ mov(ecx, edx);
5008   __ and_(ecx, eax);
5009   __ JumpIfSmi(ecx, &miss, Label::kNear);
5010 
5011   __ CmpObjectType(eax, JS_OBJECT_TYPE, ecx);
5012   __ j(not_equal, &miss, Label::kNear);
5013   __ CmpObjectType(edx, JS_OBJECT_TYPE, ecx);
5014   __ j(not_equal, &miss, Label::kNear);
5015 
5016   ASSERT(GetCondition() == equal);
5017   __ sub(eax, edx);
5018   __ ret(0);
5019 
5020   __ bind(&miss);
5021   GenerateMiss(masm);
5022 }
5023 
5024 
GenerateKnownObjects(MacroAssembler * masm)5025 void ICCompareStub::GenerateKnownObjects(MacroAssembler* masm) {
5026   Label miss;
5027   __ mov(ecx, edx);
5028   __ and_(ecx, eax);
5029   __ JumpIfSmi(ecx, &miss, Label::kNear);
5030 
5031   __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset));
5032   __ mov(ebx, FieldOperand(edx, HeapObject::kMapOffset));
5033   __ cmp(ecx, known_map_);
5034   __ j(not_equal, &miss, Label::kNear);
5035   __ cmp(ebx, known_map_);
5036   __ j(not_equal, &miss, Label::kNear);
5037 
5038   __ sub(eax, edx);
5039   __ ret(0);
5040 
5041   __ bind(&miss);
5042   GenerateMiss(masm);
5043 }
5044 
5045 
GenerateMiss(MacroAssembler * masm)5046 void ICCompareStub::GenerateMiss(MacroAssembler* masm) {
5047   {
5048     // Call the runtime system in a fresh internal frame.
5049     ExternalReference miss = ExternalReference(IC_Utility(IC::kCompareIC_Miss),
5050                                                masm->isolate());
5051     FrameScope scope(masm, StackFrame::INTERNAL);
5052     __ push(edx);  // Preserve edx and eax.
5053     __ push(eax);
5054     __ push(edx);  // And also use them as the arguments.
5055     __ push(eax);
5056     __ push(Immediate(Smi::FromInt(op_)));
5057     __ CallExternalReference(miss, 3);
5058     // Compute the entry point of the rewritten stub.
5059     __ lea(edi, FieldOperand(eax, Code::kHeaderSize));
5060     __ pop(eax);
5061     __ pop(edx);
5062   }
5063 
5064   // Do a tail call to the rewritten stub.
5065   __ jmp(edi);
5066 }
5067 
5068 
5069 // Helper function used to check that the dictionary doesn't contain
5070 // the property. This function may return false negatives, so miss_label
5071 // must always call a backup property check that is complete.
5072 // This function is safe to call if the receiver has fast properties.
5073 // Name must be a unique name and receiver must be a heap object.
GenerateNegativeLookup(MacroAssembler * masm,Label * miss,Label * done,Register properties,Handle<Name> name,Register r0)5074 void NameDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm,
5075                                                       Label* miss,
5076                                                       Label* done,
5077                                                       Register properties,
5078                                                       Handle<Name> name,
5079                                                       Register r0) {
5080   ASSERT(name->IsUniqueName());
5081 
5082   // If names of slots in range from 1 to kProbes - 1 for the hash value are
5083   // not equal to the name and kProbes-th slot is not used (its name is the
5084   // undefined value), it guarantees the hash table doesn't contain the
5085   // property. It's true even if some slots represent deleted properties
5086   // (their names are the hole value).
5087   for (int i = 0; i < kInlinedProbes; i++) {
5088     // Compute the masked index: (hash + i + i * i) & mask.
5089     Register index = r0;
5090     // Capacity is smi 2^n.
5091     __ mov(index, FieldOperand(properties, kCapacityOffset));
5092     __ dec(index);
5093     __ and_(index,
5094             Immediate(Smi::FromInt(name->Hash() +
5095                                    NameDictionary::GetProbeOffset(i))));
5096 
5097     // Scale the index by multiplying by the entry size.
5098     ASSERT(NameDictionary::kEntrySize == 3);
5099     __ lea(index, Operand(index, index, times_2, 0));  // index *= 3.
5100     Register entity_name = r0;
5101     // Having undefined at this place means the name is not contained.
5102     ASSERT_EQ(kSmiTagSize, 1);
5103     __ mov(entity_name, Operand(properties, index, times_half_pointer_size,
5104                                 kElementsStartOffset - kHeapObjectTag));
5105     __ cmp(entity_name, masm->isolate()->factory()->undefined_value());
5106     __ j(equal, done);
5107 
5108     // Stop if found the property.
5109     __ cmp(entity_name, Handle<Name>(name));
5110     __ j(equal, miss);
5111 
5112     Label good;
5113     // Check for the hole and skip.
5114     __ cmp(entity_name, masm->isolate()->factory()->the_hole_value());
5115     __ j(equal, &good, Label::kNear);
5116 
5117     // Check if the entry name is not a unique name.
5118     __ mov(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset));
5119     __ JumpIfNotUniqueName(FieldOperand(entity_name, Map::kInstanceTypeOffset),
5120                            miss);
5121     __ bind(&good);
5122   }
5123 
5124   NameDictionaryLookupStub stub(properties, r0, r0, NEGATIVE_LOOKUP);
5125   __ push(Immediate(Handle<Object>(name)));
5126   __ push(Immediate(name->Hash()));
5127   __ CallStub(&stub);
5128   __ test(r0, r0);
5129   __ j(not_zero, miss);
5130   __ jmp(done);
5131 }
5132 
5133 
5134 // Probe the name dictionary in the |elements| register. Jump to the
5135 // |done| label if a property with the given name is found leaving the
5136 // index into the dictionary in |r0|. Jump to the |miss| label
5137 // otherwise.
GeneratePositiveLookup(MacroAssembler * masm,Label * miss,Label * done,Register elements,Register name,Register r0,Register r1)5138 void NameDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm,
5139                                                       Label* miss,
5140                                                       Label* done,
5141                                                       Register elements,
5142                                                       Register name,
5143                                                       Register r0,
5144                                                       Register r1) {
5145   ASSERT(!elements.is(r0));
5146   ASSERT(!elements.is(r1));
5147   ASSERT(!name.is(r0));
5148   ASSERT(!name.is(r1));
5149 
5150   __ AssertName(name);
5151 
5152   __ mov(r1, FieldOperand(elements, kCapacityOffset));
5153   __ shr(r1, kSmiTagSize);  // convert smi to int
5154   __ dec(r1);
5155 
5156   // Generate an unrolled loop that performs a few probes before
5157   // giving up. Measurements done on Gmail indicate that 2 probes
5158   // cover ~93% of loads from dictionaries.
5159   for (int i = 0; i < kInlinedProbes; i++) {
5160     // Compute the masked index: (hash + i + i * i) & mask.
5161     __ mov(r0, FieldOperand(name, Name::kHashFieldOffset));
5162     __ shr(r0, Name::kHashShift);
5163     if (i > 0) {
5164       __ add(r0, Immediate(NameDictionary::GetProbeOffset(i)));
5165     }
5166     __ and_(r0, r1);
5167 
5168     // Scale the index by multiplying by the entry size.
5169     ASSERT(NameDictionary::kEntrySize == 3);
5170     __ lea(r0, Operand(r0, r0, times_2, 0));  // r0 = r0 * 3
5171 
5172     // Check if the key is identical to the name.
5173     __ cmp(name, Operand(elements,
5174                          r0,
5175                          times_4,
5176                          kElementsStartOffset - kHeapObjectTag));
5177     __ j(equal, done);
5178   }
5179 
5180   NameDictionaryLookupStub stub(elements, r1, r0, POSITIVE_LOOKUP);
5181   __ push(name);
5182   __ mov(r0, FieldOperand(name, Name::kHashFieldOffset));
5183   __ shr(r0, Name::kHashShift);
5184   __ push(r0);
5185   __ CallStub(&stub);
5186 
5187   __ test(r1, r1);
5188   __ j(zero, miss);
5189   __ jmp(done);
5190 }
5191 
5192 
Generate(MacroAssembler * masm)5193 void NameDictionaryLookupStub::Generate(MacroAssembler* masm) {
5194   // This stub overrides SometimesSetsUpAFrame() to return false.  That means
5195   // we cannot call anything that could cause a GC from this stub.
5196   // Stack frame on entry:
5197   //  esp[0 * kPointerSize]: return address.
5198   //  esp[1 * kPointerSize]: key's hash.
5199   //  esp[2 * kPointerSize]: key.
5200   // Registers:
5201   //  dictionary_: NameDictionary to probe.
5202   //  result_: used as scratch.
5203   //  index_: will hold an index of entry if lookup is successful.
5204   //          might alias with result_.
5205   // Returns:
5206   //  result_ is zero if lookup failed, non zero otherwise.
5207 
5208   Label in_dictionary, maybe_in_dictionary, not_in_dictionary;
5209 
5210   Register scratch = result_;
5211 
5212   __ mov(scratch, FieldOperand(dictionary_, kCapacityOffset));
5213   __ dec(scratch);
5214   __ SmiUntag(scratch);
5215   __ push(scratch);
5216 
5217   // If names of slots in range from 1 to kProbes - 1 for the hash value are
5218   // not equal to the name and kProbes-th slot is not used (its name is the
5219   // undefined value), it guarantees the hash table doesn't contain the
5220   // property. It's true even if some slots represent deleted properties
5221   // (their names are the null value).
5222   for (int i = kInlinedProbes; i < kTotalProbes; i++) {
5223     // Compute the masked index: (hash + i + i * i) & mask.
5224     __ mov(scratch, Operand(esp, 2 * kPointerSize));
5225     if (i > 0) {
5226       __ add(scratch, Immediate(NameDictionary::GetProbeOffset(i)));
5227     }
5228     __ and_(scratch, Operand(esp, 0));
5229 
5230     // Scale the index by multiplying by the entry size.
5231     ASSERT(NameDictionary::kEntrySize == 3);
5232     __ lea(index_, Operand(scratch, scratch, times_2, 0));  // index *= 3.
5233 
5234     // Having undefined at this place means the name is not contained.
5235     ASSERT_EQ(kSmiTagSize, 1);
5236     __ mov(scratch, Operand(dictionary_,
5237                             index_,
5238                             times_pointer_size,
5239                             kElementsStartOffset - kHeapObjectTag));
5240     __ cmp(scratch, masm->isolate()->factory()->undefined_value());
5241     __ j(equal, &not_in_dictionary);
5242 
5243     // Stop if found the property.
5244     __ cmp(scratch, Operand(esp, 3 * kPointerSize));
5245     __ j(equal, &in_dictionary);
5246 
5247     if (i != kTotalProbes - 1 && mode_ == NEGATIVE_LOOKUP) {
5248       // If we hit a key that is not a unique name during negative
5249       // lookup we have to bailout as this key might be equal to the
5250       // key we are looking for.
5251 
5252       // Check if the entry name is not a unique name.
5253       __ mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
5254       __ JumpIfNotUniqueName(FieldOperand(scratch, Map::kInstanceTypeOffset),
5255                              &maybe_in_dictionary);
5256     }
5257   }
5258 
5259   __ bind(&maybe_in_dictionary);
5260   // If we are doing negative lookup then probing failure should be
5261   // treated as a lookup success. For positive lookup probing failure
5262   // should be treated as lookup failure.
5263   if (mode_ == POSITIVE_LOOKUP) {
5264     __ mov(result_, Immediate(0));
5265     __ Drop(1);
5266     __ ret(2 * kPointerSize);
5267   }
5268 
5269   __ bind(&in_dictionary);
5270   __ mov(result_, Immediate(1));
5271   __ Drop(1);
5272   __ ret(2 * kPointerSize);
5273 
5274   __ bind(&not_in_dictionary);
5275   __ mov(result_, Immediate(0));
5276   __ Drop(1);
5277   __ ret(2 * kPointerSize);
5278 }
5279 
5280 
GenerateFixedRegStubsAheadOfTime(Isolate * isolate)5281 void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(
5282     Isolate* isolate) {
5283   StoreBufferOverflowStub stub(kDontSaveFPRegs);
5284   stub.GetCode(isolate);
5285   if (CpuFeatures::IsSafeForSnapshot(SSE2)) {
5286     StoreBufferOverflowStub stub2(kSaveFPRegs);
5287     stub2.GetCode(isolate);
5288   }
5289 }
5290 
5291 
CanUseFPRegisters()5292 bool CodeStub::CanUseFPRegisters() {
5293   return CpuFeatures::IsSupported(SSE2);
5294 }
5295 
5296 
5297 // Takes the input in 3 registers: address_ value_ and object_.  A pointer to
5298 // the value has just been written into the object, now this stub makes sure
5299 // we keep the GC informed.  The word in the object where the value has been
5300 // written is in the address register.
Generate(MacroAssembler * masm)5301 void RecordWriteStub::Generate(MacroAssembler* masm) {
5302   Label skip_to_incremental_noncompacting;
5303   Label skip_to_incremental_compacting;
5304 
5305   // The first two instructions are generated with labels so as to get the
5306   // offset fixed up correctly by the bind(Label*) call.  We patch it back and
5307   // forth between a compare instructions (a nop in this position) and the
5308   // real branch when we start and stop incremental heap marking.
5309   __ jmp(&skip_to_incremental_noncompacting, Label::kNear);
5310   __ jmp(&skip_to_incremental_compacting, Label::kFar);
5311 
5312   if (remembered_set_action_ == EMIT_REMEMBERED_SET) {
5313     __ RememberedSetHelper(object_,
5314                            address_,
5315                            value_,
5316                            save_fp_regs_mode_,
5317                            MacroAssembler::kReturnAtEnd);
5318   } else {
5319     __ ret(0);
5320   }
5321 
5322   __ bind(&skip_to_incremental_noncompacting);
5323   GenerateIncremental(masm, INCREMENTAL);
5324 
5325   __ bind(&skip_to_incremental_compacting);
5326   GenerateIncremental(masm, INCREMENTAL_COMPACTION);
5327 
5328   // Initial mode of the stub is expected to be STORE_BUFFER_ONLY.
5329   // Will be checked in IncrementalMarking::ActivateGeneratedStub.
5330   masm->set_byte_at(0, kTwoByteNopInstruction);
5331   masm->set_byte_at(2, kFiveByteNopInstruction);
5332 }
5333 
5334 
GenerateIncremental(MacroAssembler * masm,Mode mode)5335 void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) {
5336   regs_.Save(masm);
5337 
5338   if (remembered_set_action_ == EMIT_REMEMBERED_SET) {
5339     Label dont_need_remembered_set;
5340 
5341     __ mov(regs_.scratch0(), Operand(regs_.address(), 0));
5342     __ JumpIfNotInNewSpace(regs_.scratch0(),  // Value.
5343                            regs_.scratch0(),
5344                            &dont_need_remembered_set);
5345 
5346     __ CheckPageFlag(regs_.object(),
5347                      regs_.scratch0(),
5348                      1 << MemoryChunk::SCAN_ON_SCAVENGE,
5349                      not_zero,
5350                      &dont_need_remembered_set);
5351 
5352     // First notify the incremental marker if necessary, then update the
5353     // remembered set.
5354     CheckNeedsToInformIncrementalMarker(
5355         masm,
5356         kUpdateRememberedSetOnNoNeedToInformIncrementalMarker,
5357         mode);
5358     InformIncrementalMarker(masm, mode);
5359     regs_.Restore(masm);
5360     __ RememberedSetHelper(object_,
5361                            address_,
5362                            value_,
5363                            save_fp_regs_mode_,
5364                            MacroAssembler::kReturnAtEnd);
5365 
5366     __ bind(&dont_need_remembered_set);
5367   }
5368 
5369   CheckNeedsToInformIncrementalMarker(
5370       masm,
5371       kReturnOnNoNeedToInformIncrementalMarker,
5372       mode);
5373   InformIncrementalMarker(masm, mode);
5374   regs_.Restore(masm);
5375   __ ret(0);
5376 }
5377 
5378 
InformIncrementalMarker(MacroAssembler * masm,Mode mode)5379 void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm, Mode mode) {
5380   regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode_);
5381   int argument_count = 3;
5382   __ PrepareCallCFunction(argument_count, regs_.scratch0());
5383   __ mov(Operand(esp, 0 * kPointerSize), regs_.object());
5384   __ mov(Operand(esp, 1 * kPointerSize), regs_.address());  // Slot.
5385   __ mov(Operand(esp, 2 * kPointerSize),
5386          Immediate(ExternalReference::isolate_address(masm->isolate())));
5387 
5388   AllowExternalCallThatCantCauseGC scope(masm);
5389   if (mode == INCREMENTAL_COMPACTION) {
5390     __ CallCFunction(
5391         ExternalReference::incremental_evacuation_record_write_function(
5392             masm->isolate()),
5393         argument_count);
5394   } else {
5395     ASSERT(mode == INCREMENTAL);
5396     __ CallCFunction(
5397         ExternalReference::incremental_marking_record_write_function(
5398             masm->isolate()),
5399         argument_count);
5400   }
5401   regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode_);
5402 }
5403 
5404 
CheckNeedsToInformIncrementalMarker(MacroAssembler * masm,OnNoNeedToInformIncrementalMarker on_no_need,Mode mode)5405 void RecordWriteStub::CheckNeedsToInformIncrementalMarker(
5406     MacroAssembler* masm,
5407     OnNoNeedToInformIncrementalMarker on_no_need,
5408     Mode mode) {
5409   Label object_is_black, need_incremental, need_incremental_pop_object;
5410 
5411   __ mov(regs_.scratch0(), Immediate(~Page::kPageAlignmentMask));
5412   __ and_(regs_.scratch0(), regs_.object());
5413   __ mov(regs_.scratch1(),
5414          Operand(regs_.scratch0(),
5415                  MemoryChunk::kWriteBarrierCounterOffset));
5416   __ sub(regs_.scratch1(), Immediate(1));
5417   __ mov(Operand(regs_.scratch0(),
5418                  MemoryChunk::kWriteBarrierCounterOffset),
5419          regs_.scratch1());
5420   __ j(negative, &need_incremental);
5421 
5422   // Let's look at the color of the object:  If it is not black we don't have
5423   // to inform the incremental marker.
5424   __ JumpIfBlack(regs_.object(),
5425                  regs_.scratch0(),
5426                  regs_.scratch1(),
5427                  &object_is_black,
5428                  Label::kNear);
5429 
5430   regs_.Restore(masm);
5431   if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
5432     __ RememberedSetHelper(object_,
5433                            address_,
5434                            value_,
5435                            save_fp_regs_mode_,
5436                            MacroAssembler::kReturnAtEnd);
5437   } else {
5438     __ ret(0);
5439   }
5440 
5441   __ bind(&object_is_black);
5442 
5443   // Get the value from the slot.
5444   __ mov(regs_.scratch0(), Operand(regs_.address(), 0));
5445 
5446   if (mode == INCREMENTAL_COMPACTION) {
5447     Label ensure_not_white;
5448 
5449     __ CheckPageFlag(regs_.scratch0(),  // Contains value.
5450                      regs_.scratch1(),  // Scratch.
5451                      MemoryChunk::kEvacuationCandidateMask,
5452                      zero,
5453                      &ensure_not_white,
5454                      Label::kNear);
5455 
5456     __ CheckPageFlag(regs_.object(),
5457                      regs_.scratch1(),  // Scratch.
5458                      MemoryChunk::kSkipEvacuationSlotsRecordingMask,
5459                      not_zero,
5460                      &ensure_not_white,
5461                      Label::kNear);
5462 
5463     __ jmp(&need_incremental);
5464 
5465     __ bind(&ensure_not_white);
5466   }
5467 
5468   // We need an extra register for this, so we push the object register
5469   // temporarily.
5470   __ push(regs_.object());
5471   __ EnsureNotWhite(regs_.scratch0(),  // The value.
5472                     regs_.scratch1(),  // Scratch.
5473                     regs_.object(),  // Scratch.
5474                     &need_incremental_pop_object,
5475                     Label::kNear);
5476   __ pop(regs_.object());
5477 
5478   regs_.Restore(masm);
5479   if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
5480     __ RememberedSetHelper(object_,
5481                            address_,
5482                            value_,
5483                            save_fp_regs_mode_,
5484                            MacroAssembler::kReturnAtEnd);
5485   } else {
5486     __ ret(0);
5487   }
5488 
5489   __ bind(&need_incremental_pop_object);
5490   __ pop(regs_.object());
5491 
5492   __ bind(&need_incremental);
5493 
5494   // Fall through when we need to inform the incremental marker.
5495 }
5496 
5497 
Generate(MacroAssembler * masm)5498 void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
5499   // ----------- S t a t e -------------
5500   //  -- eax    : element value to store
5501   //  -- ecx    : element index as smi
5502   //  -- esp[0] : return address
5503   //  -- esp[4] : array literal index in function
5504   //  -- esp[8] : array literal
5505   // clobbers ebx, edx, edi
5506   // -----------------------------------
5507 
5508   Label element_done;
5509   Label double_elements;
5510   Label smi_element;
5511   Label slow_elements;
5512   Label slow_elements_from_double;
5513   Label fast_elements;
5514 
5515   // Get array literal index, array literal and its map.
5516   __ mov(edx, Operand(esp, 1 * kPointerSize));
5517   __ mov(ebx, Operand(esp, 2 * kPointerSize));
5518   __ mov(edi, FieldOperand(ebx, JSObject::kMapOffset));
5519 
5520   __ CheckFastElements(edi, &double_elements);
5521 
5522   // Check for FAST_*_SMI_ELEMENTS or FAST_*_ELEMENTS elements
5523   __ JumpIfSmi(eax, &smi_element);
5524   __ CheckFastSmiElements(edi, &fast_elements, Label::kNear);
5525 
5526   // Store into the array literal requires a elements transition. Call into
5527   // the runtime.
5528 
5529   __ bind(&slow_elements);
5530   __ pop(edi);  // Pop return address and remember to put back later for tail
5531                 // call.
5532   __ push(ebx);
5533   __ push(ecx);
5534   __ push(eax);
5535   __ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
5536   __ push(FieldOperand(ebx, JSFunction::kLiteralsOffset));
5537   __ push(edx);
5538   __ push(edi);  // Return return address so that tail call returns to right
5539                  // place.
5540   __ TailCallRuntime(Runtime::kStoreArrayLiteralElement, 5, 1);
5541 
5542   __ bind(&slow_elements_from_double);
5543   __ pop(edx);
5544   __ jmp(&slow_elements);
5545 
5546   // Array literal has ElementsKind of FAST_*_ELEMENTS and value is an object.
5547   __ bind(&fast_elements);
5548   __ mov(ebx, FieldOperand(ebx, JSObject::kElementsOffset));
5549   __ lea(ecx, FieldOperand(ebx, ecx, times_half_pointer_size,
5550                            FixedArrayBase::kHeaderSize));
5551   __ mov(Operand(ecx, 0), eax);
5552   // Update the write barrier for the array store.
5553   __ RecordWrite(ebx, ecx, eax,
5554                  kDontSaveFPRegs,
5555                  EMIT_REMEMBERED_SET,
5556                  OMIT_SMI_CHECK);
5557   __ ret(0);
5558 
5559   // Array literal has ElementsKind of FAST_*_SMI_ELEMENTS or FAST_*_ELEMENTS,
5560   // and value is Smi.
5561   __ bind(&smi_element);
5562   __ mov(ebx, FieldOperand(ebx, JSObject::kElementsOffset));
5563   __ mov(FieldOperand(ebx, ecx, times_half_pointer_size,
5564                       FixedArrayBase::kHeaderSize), eax);
5565   __ ret(0);
5566 
5567   // Array literal has ElementsKind of FAST_*_DOUBLE_ELEMENTS.
5568   __ bind(&double_elements);
5569 
5570   __ push(edx);
5571   __ mov(edx, FieldOperand(ebx, JSObject::kElementsOffset));
5572   __ StoreNumberToDoubleElements(eax,
5573                                  edx,
5574                                  ecx,
5575                                  edi,
5576                                  xmm0,
5577                                  &slow_elements_from_double,
5578                                  false);
5579   __ pop(edx);
5580   __ ret(0);
5581 }
5582 
5583 
Generate(MacroAssembler * masm)5584 void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
5585   CEntryStub ces(1, fp_registers_ ? kSaveFPRegs : kDontSaveFPRegs);
5586   __ call(ces.GetCode(masm->isolate()), RelocInfo::CODE_TARGET);
5587   int parameter_count_offset =
5588       StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset;
5589   __ mov(ebx, MemOperand(ebp, parameter_count_offset));
5590   masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE);
5591   __ pop(ecx);
5592   int additional_offset = function_mode_ == JS_FUNCTION_STUB_MODE
5593       ? kPointerSize
5594       : 0;
5595   __ lea(esp, MemOperand(esp, ebx, times_pointer_size, additional_offset));
5596   __ jmp(ecx);  // Return to IC Miss stub, continuation still on stack.
5597 }
5598 
5599 
Generate(MacroAssembler * masm)5600 void StubFailureTailCallTrampolineStub::Generate(MacroAssembler* masm) {
5601   CEntryStub ces(1, fp_registers_ ? kSaveFPRegs : kDontSaveFPRegs);
5602   __ call(ces.GetCode(masm->isolate()), RelocInfo::CODE_TARGET);
5603   __ mov(edi, eax);
5604   int parameter_count_offset =
5605       StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset;
5606   __ mov(eax, MemOperand(ebp, parameter_count_offset));
5607   // The parameter count above includes the receiver for the arguments passed to
5608   // the deoptimization handler. Subtract the receiver for the parameter count
5609   // for the call.
5610   __ sub(eax, Immediate(1));
5611   masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE);
5612   ParameterCount argument_count(eax);
5613   __ InvokeFunction(
5614       edi, argument_count, JUMP_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
5615 }
5616 
5617 
MaybeCallEntryHook(MacroAssembler * masm)5618 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
5619   if (masm->isolate()->function_entry_hook() != NULL) {
5620     ProfileEntryHookStub stub;
5621     masm->CallStub(&stub);
5622   }
5623 }
5624 
5625 
Generate(MacroAssembler * masm)5626 void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
5627   // Save volatile registers.
5628   const int kNumSavedRegisters = 3;
5629   __ push(eax);
5630   __ push(ecx);
5631   __ push(edx);
5632 
5633   // Calculate and push the original stack pointer.
5634   __ lea(eax, Operand(esp, (kNumSavedRegisters + 1) * kPointerSize));
5635   __ push(eax);
5636 
5637   // Retrieve our return address and use it to calculate the calling
5638   // function's address.
5639   __ mov(eax, Operand(esp, (kNumSavedRegisters + 1) * kPointerSize));
5640   __ sub(eax, Immediate(Assembler::kCallInstructionLength));
5641   __ push(eax);
5642 
5643   // Call the entry hook.
5644   ASSERT(masm->isolate()->function_entry_hook() != NULL);
5645   __ call(FUNCTION_ADDR(masm->isolate()->function_entry_hook()),
5646           RelocInfo::RUNTIME_ENTRY);
5647   __ add(esp, Immediate(2 * kPointerSize));
5648 
5649   // Restore ecx.
5650   __ pop(edx);
5651   __ pop(ecx);
5652   __ pop(eax);
5653 
5654   __ ret(0);
5655 }
5656 
5657 
5658 template<class T>
CreateArrayDispatch(MacroAssembler * masm,AllocationSiteOverrideMode mode)5659 static void CreateArrayDispatch(MacroAssembler* masm,
5660                                 AllocationSiteOverrideMode mode) {
5661   if (mode == DISABLE_ALLOCATION_SITES) {
5662     T stub(GetInitialFastElementsKind(),
5663            CONTEXT_CHECK_REQUIRED,
5664            mode);
5665     __ TailCallStub(&stub);
5666   } else if (mode == DONT_OVERRIDE) {
5667     int last_index = GetSequenceIndexFromFastElementsKind(
5668         TERMINAL_FAST_ELEMENTS_KIND);
5669     for (int i = 0; i <= last_index; ++i) {
5670       Label next;
5671       ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
5672       __ cmp(edx, kind);
5673       __ j(not_equal, &next);
5674       T stub(kind);
5675       __ TailCallStub(&stub);
5676       __ bind(&next);
5677     }
5678 
5679     // If we reached this point there is a problem.
5680     __ Abort(kUnexpectedElementsKindInArrayConstructor);
5681   } else {
5682     UNREACHABLE();
5683   }
5684 }
5685 
5686 
CreateArrayDispatchOneArgument(MacroAssembler * masm,AllocationSiteOverrideMode mode)5687 static void CreateArrayDispatchOneArgument(MacroAssembler* masm,
5688                                            AllocationSiteOverrideMode mode) {
5689   // ebx - type info cell (if mode != DISABLE_ALLOCATION_SITES)
5690   // edx - kind (if mode != DISABLE_ALLOCATION_SITES)
5691   // eax - number of arguments
5692   // edi - constructor?
5693   // esp[0] - return address
5694   // esp[4] - last argument
5695   Label normal_sequence;
5696   if (mode == DONT_OVERRIDE) {
5697     ASSERT(FAST_SMI_ELEMENTS == 0);
5698     ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
5699     ASSERT(FAST_ELEMENTS == 2);
5700     ASSERT(FAST_HOLEY_ELEMENTS == 3);
5701     ASSERT(FAST_DOUBLE_ELEMENTS == 4);
5702     ASSERT(FAST_HOLEY_DOUBLE_ELEMENTS == 5);
5703 
5704     // is the low bit set? If so, we are holey and that is good.
5705     __ test_b(edx, 1);
5706     __ j(not_zero, &normal_sequence);
5707   }
5708 
5709   // look at the first argument
5710   __ mov(ecx, Operand(esp, kPointerSize));
5711   __ test(ecx, ecx);
5712   __ j(zero, &normal_sequence);
5713 
5714   if (mode == DISABLE_ALLOCATION_SITES) {
5715     ElementsKind initial = GetInitialFastElementsKind();
5716     ElementsKind holey_initial = GetHoleyElementsKind(initial);
5717 
5718     ArraySingleArgumentConstructorStub stub_holey(holey_initial,
5719                                                   CONTEXT_CHECK_REQUIRED,
5720                                                   DISABLE_ALLOCATION_SITES);
5721     __ TailCallStub(&stub_holey);
5722 
5723     __ bind(&normal_sequence);
5724     ArraySingleArgumentConstructorStub stub(initial,
5725                                             CONTEXT_CHECK_REQUIRED,
5726                                             DISABLE_ALLOCATION_SITES);
5727     __ TailCallStub(&stub);
5728   } else if (mode == DONT_OVERRIDE) {
5729     // We are going to create a holey array, but our kind is non-holey.
5730     // Fix kind and retry.
5731     __ inc(edx);
5732     __ mov(ecx, FieldOperand(ebx, Cell::kValueOffset));
5733     if (FLAG_debug_code) {
5734       Handle<Map> allocation_site_map =
5735           masm->isolate()->factory()->allocation_site_map();
5736       __ cmp(FieldOperand(ecx, 0), Immediate(allocation_site_map));
5737       __ Assert(equal, kExpectedAllocationSiteInCell);
5738     }
5739 
5740     // Save the resulting elements kind in type info. We can't just store r3
5741     // in the AllocationSite::transition_info field because elements kind is
5742     // restricted to a portion of the field...upper bits need to be left alone.
5743     STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
5744     __ add(FieldOperand(ecx, AllocationSite::kTransitionInfoOffset),
5745            Immediate(Smi::FromInt(kFastElementsKindPackedToHoley)));
5746 
5747     __ bind(&normal_sequence);
5748     int last_index = GetSequenceIndexFromFastElementsKind(
5749         TERMINAL_FAST_ELEMENTS_KIND);
5750     for (int i = 0; i <= last_index; ++i) {
5751       Label next;
5752       ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
5753       __ cmp(edx, kind);
5754       __ j(not_equal, &next);
5755       ArraySingleArgumentConstructorStub stub(kind);
5756       __ TailCallStub(&stub);
5757       __ bind(&next);
5758     }
5759 
5760     // If we reached this point there is a problem.
5761     __ Abort(kUnexpectedElementsKindInArrayConstructor);
5762   } else {
5763     UNREACHABLE();
5764   }
5765 }
5766 
5767 
5768 template<class T>
ArrayConstructorStubAheadOfTimeHelper(Isolate * isolate)5769 static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) {
5770   ElementsKind initial_kind = GetInitialFastElementsKind();
5771   ElementsKind initial_holey_kind = GetHoleyElementsKind(initial_kind);
5772 
5773   int to_index = GetSequenceIndexFromFastElementsKind(
5774       TERMINAL_FAST_ELEMENTS_KIND);
5775   for (int i = 0; i <= to_index; ++i) {
5776     ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
5777     T stub(kind);
5778     stub.GetCode(isolate);
5779     if (AllocationSite::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE ||
5780         (!FLAG_track_allocation_sites &&
5781          (kind == initial_kind || kind == initial_holey_kind))) {
5782       T stub1(kind, CONTEXT_CHECK_REQUIRED, DISABLE_ALLOCATION_SITES);
5783       stub1.GetCode(isolate);
5784     }
5785   }
5786 }
5787 
5788 
GenerateStubsAheadOfTime(Isolate * isolate)5789 void ArrayConstructorStubBase::GenerateStubsAheadOfTime(Isolate* isolate) {
5790   ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>(
5791       isolate);
5792   ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>(
5793       isolate);
5794   ArrayConstructorStubAheadOfTimeHelper<ArrayNArgumentsConstructorStub>(
5795       isolate);
5796 }
5797 
5798 
GenerateStubsAheadOfTime(Isolate * isolate)5799 void InternalArrayConstructorStubBase::GenerateStubsAheadOfTime(
5800     Isolate* isolate) {
5801   ElementsKind kinds[2] = { FAST_ELEMENTS, FAST_HOLEY_ELEMENTS };
5802   for (int i = 0; i < 2; i++) {
5803     // For internal arrays we only need a few things
5804     InternalArrayNoArgumentConstructorStub stubh1(kinds[i]);
5805     stubh1.GetCode(isolate);
5806     InternalArraySingleArgumentConstructorStub stubh2(kinds[i]);
5807     stubh2.GetCode(isolate);
5808     InternalArrayNArgumentsConstructorStub stubh3(kinds[i]);
5809     stubh3.GetCode(isolate);
5810   }
5811 }
5812 
5813 
GenerateDispatchToArrayStub(MacroAssembler * masm,AllocationSiteOverrideMode mode)5814 void ArrayConstructorStub::GenerateDispatchToArrayStub(
5815     MacroAssembler* masm,
5816     AllocationSiteOverrideMode mode) {
5817   if (argument_count_ == ANY) {
5818     Label not_zero_case, not_one_case;
5819     __ test(eax, eax);
5820     __ j(not_zero, &not_zero_case);
5821     CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
5822 
5823     __ bind(&not_zero_case);
5824     __ cmp(eax, 1);
5825     __ j(greater, &not_one_case);
5826     CreateArrayDispatchOneArgument(masm, mode);
5827 
5828     __ bind(&not_one_case);
5829     CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm, mode);
5830   } else if (argument_count_ == NONE) {
5831     CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
5832   } else if (argument_count_ == ONE) {
5833     CreateArrayDispatchOneArgument(masm, mode);
5834   } else if (argument_count_ == MORE_THAN_ONE) {
5835     CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm, mode);
5836   } else {
5837     UNREACHABLE();
5838   }
5839 }
5840 
5841 
Generate(MacroAssembler * masm)5842 void ArrayConstructorStub::Generate(MacroAssembler* masm) {
5843   // ----------- S t a t e -------------
5844   //  -- eax : argc (only if argument_count_ == ANY)
5845   //  -- ebx : type info cell
5846   //  -- edi : constructor
5847   //  -- esp[0] : return address
5848   //  -- esp[4] : last argument
5849   // -----------------------------------
5850   Handle<Object> undefined_sentinel(
5851       masm->isolate()->heap()->undefined_value(),
5852       masm->isolate());
5853 
5854   if (FLAG_debug_code) {
5855     // The array construct code is only set for the global and natives
5856     // builtin Array functions which always have maps.
5857 
5858     // Initial map for the builtin Array function should be a map.
5859     __ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
5860     // Will both indicate a NULL and a Smi.
5861     __ test(ecx, Immediate(kSmiTagMask));
5862     __ Assert(not_zero, kUnexpectedInitialMapForArrayFunction);
5863     __ CmpObjectType(ecx, MAP_TYPE, ecx);
5864     __ Assert(equal, kUnexpectedInitialMapForArrayFunction);
5865 
5866     // We should either have undefined in ebx or a valid cell
5867     Label okay_here;
5868     Handle<Map> cell_map = masm->isolate()->factory()->cell_map();
5869     __ cmp(ebx, Immediate(undefined_sentinel));
5870     __ j(equal, &okay_here);
5871     __ cmp(FieldOperand(ebx, 0), Immediate(cell_map));
5872     __ Assert(equal, kExpectedPropertyCellInRegisterEbx);
5873     __ bind(&okay_here);
5874   }
5875 
5876   Label no_info;
5877   // If the type cell is undefined, or contains anything other than an
5878   // AllocationSite, call an array constructor that doesn't use AllocationSites.
5879   __ cmp(ebx, Immediate(undefined_sentinel));
5880   __ j(equal, &no_info);
5881   __ mov(edx, FieldOperand(ebx, Cell::kValueOffset));
5882   __ cmp(FieldOperand(edx, 0), Immediate(
5883       masm->isolate()->factory()->allocation_site_map()));
5884   __ j(not_equal, &no_info);
5885 
5886   // Only look at the lower 16 bits of the transition info.
5887   __ mov(edx, FieldOperand(edx, AllocationSite::kTransitionInfoOffset));
5888   __ SmiUntag(edx);
5889   STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
5890   __ and_(edx, Immediate(AllocationSite::ElementsKindBits::kMask));
5891   GenerateDispatchToArrayStub(masm, DONT_OVERRIDE);
5892 
5893   __ bind(&no_info);
5894   GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES);
5895 }
5896 
5897 
GenerateCase(MacroAssembler * masm,ElementsKind kind)5898 void InternalArrayConstructorStub::GenerateCase(
5899     MacroAssembler* masm, ElementsKind kind) {
5900   Label not_zero_case, not_one_case;
5901   Label normal_sequence;
5902 
5903   __ test(eax, eax);
5904   __ j(not_zero, &not_zero_case);
5905   InternalArrayNoArgumentConstructorStub stub0(kind);
5906   __ TailCallStub(&stub0);
5907 
5908   __ bind(&not_zero_case);
5909   __ cmp(eax, 1);
5910   __ j(greater, &not_one_case);
5911 
5912   if (IsFastPackedElementsKind(kind)) {
5913     // We might need to create a holey array
5914     // look at the first argument
5915     __ mov(ecx, Operand(esp, kPointerSize));
5916     __ test(ecx, ecx);
5917     __ j(zero, &normal_sequence);
5918 
5919     InternalArraySingleArgumentConstructorStub
5920         stub1_holey(GetHoleyElementsKind(kind));
5921     __ TailCallStub(&stub1_holey);
5922   }
5923 
5924   __ bind(&normal_sequence);
5925   InternalArraySingleArgumentConstructorStub stub1(kind);
5926   __ TailCallStub(&stub1);
5927 
5928   __ bind(&not_one_case);
5929   InternalArrayNArgumentsConstructorStub stubN(kind);
5930   __ TailCallStub(&stubN);
5931 }
5932 
5933 
Generate(MacroAssembler * masm)5934 void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
5935   // ----------- S t a t e -------------
5936   //  -- eax : argc
5937   //  -- ebx : type info cell
5938   //  -- edi : constructor
5939   //  -- esp[0] : return address
5940   //  -- esp[4] : last argument
5941   // -----------------------------------
5942 
5943   if (FLAG_debug_code) {
5944     // The array construct code is only set for the global and natives
5945     // builtin Array functions which always have maps.
5946 
5947     // Initial map for the builtin Array function should be a map.
5948     __ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
5949     // Will both indicate a NULL and a Smi.
5950     __ test(ecx, Immediate(kSmiTagMask));
5951     __ Assert(not_zero, kUnexpectedInitialMapForArrayFunction);
5952     __ CmpObjectType(ecx, MAP_TYPE, ecx);
5953     __ Assert(equal, kUnexpectedInitialMapForArrayFunction);
5954   }
5955 
5956   // Figure out the right elements kind
5957   __ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
5958 
5959   // Load the map's "bit field 2" into |result|. We only need the first byte,
5960   // but the following masking takes care of that anyway.
5961   __ mov(ecx, FieldOperand(ecx, Map::kBitField2Offset));
5962   // Retrieve elements_kind from bit field 2.
5963   __ and_(ecx, Map::kElementsKindMask);
5964   __ shr(ecx, Map::kElementsKindShift);
5965 
5966   if (FLAG_debug_code) {
5967     Label done;
5968     __ cmp(ecx, Immediate(FAST_ELEMENTS));
5969     __ j(equal, &done);
5970     __ cmp(ecx, Immediate(FAST_HOLEY_ELEMENTS));
5971     __ Assert(equal,
5972               kInvalidElementsKindForInternalArrayOrInternalPackedArray);
5973     __ bind(&done);
5974   }
5975 
5976   Label fast_elements_case;
5977   __ cmp(ecx, Immediate(FAST_ELEMENTS));
5978   __ j(equal, &fast_elements_case);
5979   GenerateCase(masm, FAST_HOLEY_ELEMENTS);
5980 
5981   __ bind(&fast_elements_case);
5982   GenerateCase(masm, FAST_ELEMENTS);
5983 }
5984 
5985 
5986 #undef __
5987 
5988 } }  // namespace v8::internal
5989 
5990 #endif  // V8_TARGET_ARCH_IA32
5991