1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/v8.h"
6
7 #if V8_TARGET_ARCH_IA32
8
9 #include "src/bootstrapper.h"
10 #include "src/code-stubs.h"
11 #include "src/isolate.h"
12 #include "src/jsregexp.h"
13 #include "src/regexp-macro-assembler.h"
14 #include "src/runtime.h"
15 #include "src/stub-cache.h"
16 #include "src/codegen.h"
17 #include "src/runtime.h"
18
19 namespace v8 {
20 namespace internal {
21
22
InitializeInterfaceDescriptor(CodeStubInterfaceDescriptor * descriptor)23 void FastNewClosureStub::InitializeInterfaceDescriptor(
24 CodeStubInterfaceDescriptor* descriptor) {
25 static Register registers[] = { ebx };
26 descriptor->register_param_count_ = 1;
27 descriptor->register_params_ = registers;
28 descriptor->deoptimization_handler_ =
29 Runtime::FunctionForId(Runtime::kHiddenNewClosureFromStubFailure)->entry;
30 }
31
32
InitializeInterfaceDescriptor(CodeStubInterfaceDescriptor * descriptor)33 void FastNewContextStub::InitializeInterfaceDescriptor(
34 CodeStubInterfaceDescriptor* descriptor) {
35 static Register registers[] = { edi };
36 descriptor->register_param_count_ = 1;
37 descriptor->register_params_ = registers;
38 descriptor->deoptimization_handler_ = NULL;
39 }
40
41
InitializeInterfaceDescriptor(CodeStubInterfaceDescriptor * descriptor)42 void ToNumberStub::InitializeInterfaceDescriptor(
43 CodeStubInterfaceDescriptor* descriptor) {
44 static Register registers[] = { eax };
45 descriptor->register_param_count_ = 1;
46 descriptor->register_params_ = registers;
47 descriptor->deoptimization_handler_ = NULL;
48 }
49
50
InitializeInterfaceDescriptor(CodeStubInterfaceDescriptor * descriptor)51 void NumberToStringStub::InitializeInterfaceDescriptor(
52 CodeStubInterfaceDescriptor* descriptor) {
53 static Register registers[] = { eax };
54 descriptor->register_param_count_ = 1;
55 descriptor->register_params_ = registers;
56 descriptor->deoptimization_handler_ =
57 Runtime::FunctionForId(Runtime::kHiddenNumberToString)->entry;
58 }
59
60
InitializeInterfaceDescriptor(CodeStubInterfaceDescriptor * descriptor)61 void FastCloneShallowArrayStub::InitializeInterfaceDescriptor(
62 CodeStubInterfaceDescriptor* descriptor) {
63 static Register registers[] = { eax, ebx, ecx };
64 descriptor->register_param_count_ = 3;
65 descriptor->register_params_ = registers;
66 static Representation representations[] = {
67 Representation::Tagged(),
68 Representation::Smi(),
69 Representation::Tagged() };
70 descriptor->register_param_representations_ = representations;
71 descriptor->deoptimization_handler_ =
72 Runtime::FunctionForId(
73 Runtime::kHiddenCreateArrayLiteralStubBailout)->entry;
74 }
75
76
InitializeInterfaceDescriptor(CodeStubInterfaceDescriptor * descriptor)77 void FastCloneShallowObjectStub::InitializeInterfaceDescriptor(
78 CodeStubInterfaceDescriptor* descriptor) {
79 static Register registers[] = { eax, ebx, ecx, edx };
80 descriptor->register_param_count_ = 4;
81 descriptor->register_params_ = registers;
82 descriptor->deoptimization_handler_ =
83 Runtime::FunctionForId(Runtime::kHiddenCreateObjectLiteral)->entry;
84 }
85
86
InitializeInterfaceDescriptor(CodeStubInterfaceDescriptor * descriptor)87 void CreateAllocationSiteStub::InitializeInterfaceDescriptor(
88 CodeStubInterfaceDescriptor* descriptor) {
89 static Register registers[] = { ebx, edx };
90 descriptor->register_param_count_ = 2;
91 descriptor->register_params_ = registers;
92 descriptor->deoptimization_handler_ = NULL;
93 }
94
95
InitializeInterfaceDescriptor(CodeStubInterfaceDescriptor * descriptor)96 void KeyedLoadFastElementStub::InitializeInterfaceDescriptor(
97 CodeStubInterfaceDescriptor* descriptor) {
98 static Register registers[] = { edx, ecx };
99 descriptor->register_param_count_ = 2;
100 descriptor->register_params_ = registers;
101 descriptor->deoptimization_handler_ =
102 FUNCTION_ADDR(KeyedLoadIC_MissFromStubFailure);
103 }
104
105
InitializeInterfaceDescriptor(CodeStubInterfaceDescriptor * descriptor)106 void KeyedLoadDictionaryElementStub::InitializeInterfaceDescriptor(
107 CodeStubInterfaceDescriptor* descriptor) {
108 static Register registers[] = { edx, ecx };
109 descriptor->register_param_count_ = 2;
110 descriptor->register_params_ = registers;
111 descriptor->deoptimization_handler_ =
112 FUNCTION_ADDR(KeyedLoadIC_MissFromStubFailure);
113 }
114
115
InitializeInterfaceDescriptor(CodeStubInterfaceDescriptor * descriptor)116 void RegExpConstructResultStub::InitializeInterfaceDescriptor(
117 CodeStubInterfaceDescriptor* descriptor) {
118 static Register registers[] = { ecx, ebx, eax };
119 descriptor->register_param_count_ = 3;
120 descriptor->register_params_ = registers;
121 descriptor->deoptimization_handler_ =
122 Runtime::FunctionForId(Runtime::kHiddenRegExpConstructResult)->entry;
123 }
124
125
InitializeInterfaceDescriptor(CodeStubInterfaceDescriptor * descriptor)126 void KeyedLoadGenericElementStub::InitializeInterfaceDescriptor(
127 CodeStubInterfaceDescriptor* descriptor) {
128 static Register registers[] = { edx, ecx };
129 descriptor->register_param_count_ = 2;
130 descriptor->register_params_ = registers;
131 descriptor->deoptimization_handler_ =
132 Runtime::FunctionForId(Runtime::kKeyedGetProperty)->entry;
133 }
134
135
InitializeInterfaceDescriptor(CodeStubInterfaceDescriptor * descriptor)136 void LoadFieldStub::InitializeInterfaceDescriptor(
137 CodeStubInterfaceDescriptor* descriptor) {
138 static Register registers[] = { edx };
139 descriptor->register_param_count_ = 1;
140 descriptor->register_params_ = registers;
141 descriptor->deoptimization_handler_ = NULL;
142 }
143
144
InitializeInterfaceDescriptor(CodeStubInterfaceDescriptor * descriptor)145 void KeyedLoadFieldStub::InitializeInterfaceDescriptor(
146 CodeStubInterfaceDescriptor* descriptor) {
147 static Register registers[] = { edx };
148 descriptor->register_param_count_ = 1;
149 descriptor->register_params_ = registers;
150 descriptor->deoptimization_handler_ = NULL;
151 }
152
153
InitializeInterfaceDescriptor(CodeStubInterfaceDescriptor * descriptor)154 void StringLengthStub::InitializeInterfaceDescriptor(
155 CodeStubInterfaceDescriptor* descriptor) {
156 static Register registers[] = { edx, ecx };
157 descriptor->register_param_count_ = 2;
158 descriptor->register_params_ = registers;
159 descriptor->deoptimization_handler_ = NULL;
160 }
161
162
InitializeInterfaceDescriptor(CodeStubInterfaceDescriptor * descriptor)163 void KeyedStringLengthStub::InitializeInterfaceDescriptor(
164 CodeStubInterfaceDescriptor* descriptor) {
165 static Register registers[] = { edx, ecx };
166 descriptor->register_param_count_ = 2;
167 descriptor->register_params_ = registers;
168 descriptor->deoptimization_handler_ = NULL;
169 }
170
171
InitializeInterfaceDescriptor(CodeStubInterfaceDescriptor * descriptor)172 void KeyedStoreFastElementStub::InitializeInterfaceDescriptor(
173 CodeStubInterfaceDescriptor* descriptor) {
174 static Register registers[] = { edx, ecx, eax };
175 descriptor->register_param_count_ = 3;
176 descriptor->register_params_ = registers;
177 descriptor->deoptimization_handler_ =
178 FUNCTION_ADDR(KeyedStoreIC_MissFromStubFailure);
179 }
180
181
InitializeInterfaceDescriptor(CodeStubInterfaceDescriptor * descriptor)182 void TransitionElementsKindStub::InitializeInterfaceDescriptor(
183 CodeStubInterfaceDescriptor* descriptor) {
184 static Register registers[] = { eax, ebx };
185 descriptor->register_param_count_ = 2;
186 descriptor->register_params_ = registers;
187 descriptor->deoptimization_handler_ =
188 Runtime::FunctionForId(Runtime::kTransitionElementsKind)->entry;
189 }
190
191
InitializeArrayConstructorDescriptor(Isolate * isolate,CodeStubInterfaceDescriptor * descriptor,int constant_stack_parameter_count)192 static void InitializeArrayConstructorDescriptor(
193 Isolate* isolate,
194 CodeStubInterfaceDescriptor* descriptor,
195 int constant_stack_parameter_count) {
196 // register state
197 // eax -- number of arguments
198 // edi -- function
199 // ebx -- allocation site with elements kind
200 static Register registers_variable_args[] = { edi, ebx, eax };
201 static Register registers_no_args[] = { edi, ebx };
202
203 if (constant_stack_parameter_count == 0) {
204 descriptor->register_param_count_ = 2;
205 descriptor->register_params_ = registers_no_args;
206 } else {
207 // stack param count needs (constructor pointer, and single argument)
208 descriptor->handler_arguments_mode_ = PASS_ARGUMENTS;
209 descriptor->stack_parameter_count_ = eax;
210 descriptor->register_param_count_ = 3;
211 descriptor->register_params_ = registers_variable_args;
212 static Representation representations[] = {
213 Representation::Tagged(),
214 Representation::Tagged(),
215 Representation::Integer32() };
216 descriptor->register_param_representations_ = representations;
217 }
218
219 descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count;
220 descriptor->function_mode_ = JS_FUNCTION_STUB_MODE;
221 descriptor->deoptimization_handler_ =
222 Runtime::FunctionForId(Runtime::kHiddenArrayConstructor)->entry;
223 }
224
225
InitializeInternalArrayConstructorDescriptor(CodeStubInterfaceDescriptor * descriptor,int constant_stack_parameter_count)226 static void InitializeInternalArrayConstructorDescriptor(
227 CodeStubInterfaceDescriptor* descriptor,
228 int constant_stack_parameter_count) {
229 // register state
230 // eax -- number of arguments
231 // edi -- constructor function
232 static Register registers_variable_args[] = { edi, eax };
233 static Register registers_no_args[] = { edi };
234
235 if (constant_stack_parameter_count == 0) {
236 descriptor->register_param_count_ = 1;
237 descriptor->register_params_ = registers_no_args;
238 } else {
239 // stack param count needs (constructor pointer, and single argument)
240 descriptor->handler_arguments_mode_ = PASS_ARGUMENTS;
241 descriptor->stack_parameter_count_ = eax;
242 descriptor->register_param_count_ = 2;
243 descriptor->register_params_ = registers_variable_args;
244 static Representation representations[] = {
245 Representation::Tagged(),
246 Representation::Integer32() };
247 descriptor->register_param_representations_ = representations;
248 }
249
250 descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count;
251 descriptor->function_mode_ = JS_FUNCTION_STUB_MODE;
252 descriptor->deoptimization_handler_ =
253 Runtime::FunctionForId(Runtime::kHiddenInternalArrayConstructor)->entry;
254 }
255
256
InitializeInterfaceDescriptor(CodeStubInterfaceDescriptor * descriptor)257 void ArrayNoArgumentConstructorStub::InitializeInterfaceDescriptor(
258 CodeStubInterfaceDescriptor* descriptor) {
259 InitializeArrayConstructorDescriptor(isolate(), descriptor, 0);
260 }
261
262
InitializeInterfaceDescriptor(CodeStubInterfaceDescriptor * descriptor)263 void ArraySingleArgumentConstructorStub::InitializeInterfaceDescriptor(
264 CodeStubInterfaceDescriptor* descriptor) {
265 InitializeArrayConstructorDescriptor(isolate(), descriptor, 1);
266 }
267
268
InitializeInterfaceDescriptor(CodeStubInterfaceDescriptor * descriptor)269 void ArrayNArgumentsConstructorStub::InitializeInterfaceDescriptor(
270 CodeStubInterfaceDescriptor* descriptor) {
271 InitializeArrayConstructorDescriptor(isolate(), descriptor, -1);
272 }
273
274
InitializeInterfaceDescriptor(CodeStubInterfaceDescriptor * descriptor)275 void InternalArrayNoArgumentConstructorStub::InitializeInterfaceDescriptor(
276 CodeStubInterfaceDescriptor* descriptor) {
277 InitializeInternalArrayConstructorDescriptor(descriptor, 0);
278 }
279
280
InitializeInterfaceDescriptor(CodeStubInterfaceDescriptor * descriptor)281 void InternalArraySingleArgumentConstructorStub::InitializeInterfaceDescriptor(
282 CodeStubInterfaceDescriptor* descriptor) {
283 InitializeInternalArrayConstructorDescriptor(descriptor, 1);
284 }
285
286
InitializeInterfaceDescriptor(CodeStubInterfaceDescriptor * descriptor)287 void InternalArrayNArgumentsConstructorStub::InitializeInterfaceDescriptor(
288 CodeStubInterfaceDescriptor* descriptor) {
289 InitializeInternalArrayConstructorDescriptor(descriptor, -1);
290 }
291
292
InitializeInterfaceDescriptor(CodeStubInterfaceDescriptor * descriptor)293 void CompareNilICStub::InitializeInterfaceDescriptor(
294 CodeStubInterfaceDescriptor* descriptor) {
295 static Register registers[] = { eax };
296 descriptor->register_param_count_ = 1;
297 descriptor->register_params_ = registers;
298 descriptor->deoptimization_handler_ =
299 FUNCTION_ADDR(CompareNilIC_Miss);
300 descriptor->SetMissHandler(
301 ExternalReference(IC_Utility(IC::kCompareNilIC_Miss), isolate()));
302 }
303
InitializeInterfaceDescriptor(CodeStubInterfaceDescriptor * descriptor)304 void ToBooleanStub::InitializeInterfaceDescriptor(
305 CodeStubInterfaceDescriptor* descriptor) {
306 static Register registers[] = { eax };
307 descriptor->register_param_count_ = 1;
308 descriptor->register_params_ = registers;
309 descriptor->deoptimization_handler_ =
310 FUNCTION_ADDR(ToBooleanIC_Miss);
311 descriptor->SetMissHandler(
312 ExternalReference(IC_Utility(IC::kToBooleanIC_Miss), isolate()));
313 }
314
315
InitializeInterfaceDescriptor(CodeStubInterfaceDescriptor * descriptor)316 void StoreGlobalStub::InitializeInterfaceDescriptor(
317 CodeStubInterfaceDescriptor* descriptor) {
318 static Register registers[] = { edx, ecx, eax };
319 descriptor->register_param_count_ = 3;
320 descriptor->register_params_ = registers;
321 descriptor->deoptimization_handler_ =
322 FUNCTION_ADDR(StoreIC_MissFromStubFailure);
323 }
324
325
InitializeInterfaceDescriptor(CodeStubInterfaceDescriptor * descriptor)326 void ElementsTransitionAndStoreStub::InitializeInterfaceDescriptor(
327 CodeStubInterfaceDescriptor* descriptor) {
328 static Register registers[] = { eax, ebx, ecx, edx };
329 descriptor->register_param_count_ = 4;
330 descriptor->register_params_ = registers;
331 descriptor->deoptimization_handler_ =
332 FUNCTION_ADDR(ElementsTransitionAndStoreIC_Miss);
333 }
334
335
InitializeInterfaceDescriptor(CodeStubInterfaceDescriptor * descriptor)336 void BinaryOpICStub::InitializeInterfaceDescriptor(
337 CodeStubInterfaceDescriptor* descriptor) {
338 static Register registers[] = { edx, eax };
339 descriptor->register_param_count_ = 2;
340 descriptor->register_params_ = registers;
341 descriptor->deoptimization_handler_ = FUNCTION_ADDR(BinaryOpIC_Miss);
342 descriptor->SetMissHandler(
343 ExternalReference(IC_Utility(IC::kBinaryOpIC_Miss), isolate()));
344 }
345
346
InitializeInterfaceDescriptor(CodeStubInterfaceDescriptor * descriptor)347 void BinaryOpWithAllocationSiteStub::InitializeInterfaceDescriptor(
348 CodeStubInterfaceDescriptor* descriptor) {
349 static Register registers[] = { ecx, edx, eax };
350 descriptor->register_param_count_ = 3;
351 descriptor->register_params_ = registers;
352 descriptor->deoptimization_handler_ =
353 FUNCTION_ADDR(BinaryOpIC_MissWithAllocationSite);
354 }
355
356
InitializeInterfaceDescriptor(CodeStubInterfaceDescriptor * descriptor)357 void StringAddStub::InitializeInterfaceDescriptor(
358 CodeStubInterfaceDescriptor* descriptor) {
359 static Register registers[] = { edx, eax };
360 descriptor->register_param_count_ = 2;
361 descriptor->register_params_ = registers;
362 descriptor->deoptimization_handler_ =
363 Runtime::FunctionForId(Runtime::kHiddenStringAdd)->entry;
364 }
365
366
InitializeForIsolate(Isolate * isolate)367 void CallDescriptors::InitializeForIsolate(Isolate* isolate) {
368 {
369 CallInterfaceDescriptor* descriptor =
370 isolate->call_descriptor(Isolate::ArgumentAdaptorCall);
371 static Register registers[] = { edi, // JSFunction
372 esi, // context
373 eax, // actual number of arguments
374 ebx, // expected number of arguments
375 };
376 static Representation representations[] = {
377 Representation::Tagged(), // JSFunction
378 Representation::Tagged(), // context
379 Representation::Integer32(), // actual number of arguments
380 Representation::Integer32(), // expected number of arguments
381 };
382 descriptor->register_param_count_ = 4;
383 descriptor->register_params_ = registers;
384 descriptor->param_representations_ = representations;
385 }
386 {
387 CallInterfaceDescriptor* descriptor =
388 isolate->call_descriptor(Isolate::KeyedCall);
389 static Register registers[] = { esi, // context
390 ecx, // key
391 };
392 static Representation representations[] = {
393 Representation::Tagged(), // context
394 Representation::Tagged(), // key
395 };
396 descriptor->register_param_count_ = 2;
397 descriptor->register_params_ = registers;
398 descriptor->param_representations_ = representations;
399 }
400 {
401 CallInterfaceDescriptor* descriptor =
402 isolate->call_descriptor(Isolate::NamedCall);
403 static Register registers[] = { esi, // context
404 ecx, // name
405 };
406 static Representation representations[] = {
407 Representation::Tagged(), // context
408 Representation::Tagged(), // name
409 };
410 descriptor->register_param_count_ = 2;
411 descriptor->register_params_ = registers;
412 descriptor->param_representations_ = representations;
413 }
414 {
415 CallInterfaceDescriptor* descriptor =
416 isolate->call_descriptor(Isolate::CallHandler);
417 static Register registers[] = { esi, // context
418 edx, // receiver
419 };
420 static Representation representations[] = {
421 Representation::Tagged(), // context
422 Representation::Tagged(), // receiver
423 };
424 descriptor->register_param_count_ = 2;
425 descriptor->register_params_ = registers;
426 descriptor->param_representations_ = representations;
427 }
428 {
429 CallInterfaceDescriptor* descriptor =
430 isolate->call_descriptor(Isolate::ApiFunctionCall);
431 static Register registers[] = { eax, // callee
432 ebx, // call_data
433 ecx, // holder
434 edx, // api_function_address
435 esi, // context
436 };
437 static Representation representations[] = {
438 Representation::Tagged(), // callee
439 Representation::Tagged(), // call_data
440 Representation::Tagged(), // holder
441 Representation::External(), // api_function_address
442 Representation::Tagged(), // context
443 };
444 descriptor->register_param_count_ = 5;
445 descriptor->register_params_ = registers;
446 descriptor->param_representations_ = representations;
447 }
448 }
449
450
451 #define __ ACCESS_MASM(masm)
452
453
GenerateLightweightMiss(MacroAssembler * masm)454 void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm) {
455 // Update the static counter each time a new code stub is generated.
456 isolate()->counters()->code_stubs()->Increment();
457
458 CodeStubInterfaceDescriptor* descriptor = GetInterfaceDescriptor();
459 int param_count = descriptor->register_param_count_;
460 {
461 // Call the runtime system in a fresh internal frame.
462 FrameScope scope(masm, StackFrame::INTERNAL);
463 ASSERT(descriptor->register_param_count_ == 0 ||
464 eax.is(descriptor->register_params_[param_count - 1]));
465 // Push arguments
466 for (int i = 0; i < param_count; ++i) {
467 __ push(descriptor->register_params_[i]);
468 }
469 ExternalReference miss = descriptor->miss_handler();
470 __ CallExternalReference(miss, descriptor->register_param_count_);
471 }
472
473 __ ret(0);
474 }
475
476
Generate(MacroAssembler * masm)477 void StoreBufferOverflowStub::Generate(MacroAssembler* masm) {
478 // We don't allow a GC during a store buffer overflow so there is no need to
479 // store the registers in any particular way, but we do have to store and
480 // restore them.
481 __ pushad();
482 if (save_doubles_ == kSaveFPRegs) {
483 __ sub(esp, Immediate(kDoubleSize * XMMRegister::kMaxNumRegisters));
484 for (int i = 0; i < XMMRegister::kMaxNumRegisters; i++) {
485 XMMRegister reg = XMMRegister::from_code(i);
486 __ movsd(Operand(esp, i * kDoubleSize), reg);
487 }
488 }
489 const int argument_count = 1;
490
491 AllowExternalCallThatCantCauseGC scope(masm);
492 __ PrepareCallCFunction(argument_count, ecx);
493 __ mov(Operand(esp, 0 * kPointerSize),
494 Immediate(ExternalReference::isolate_address(isolate())));
495 __ CallCFunction(
496 ExternalReference::store_buffer_overflow_function(isolate()),
497 argument_count);
498 if (save_doubles_ == kSaveFPRegs) {
499 for (int i = 0; i < XMMRegister::kMaxNumRegisters; i++) {
500 XMMRegister reg = XMMRegister::from_code(i);
501 __ movsd(reg, Operand(esp, i * kDoubleSize));
502 }
503 __ add(esp, Immediate(kDoubleSize * XMMRegister::kMaxNumRegisters));
504 }
505 __ popad();
506 __ ret(0);
507 }
508
509
510 class FloatingPointHelper : public AllStatic {
511 public:
512 enum ArgLocation {
513 ARGS_ON_STACK,
514 ARGS_IN_REGISTERS
515 };
516
517 // Code pattern for loading a floating point value. Input value must
518 // be either a smi or a heap number object (fp value). Requirements:
519 // operand in register number. Returns operand as floating point number
520 // on FPU stack.
521 static void LoadFloatOperand(MacroAssembler* masm, Register number);
522
523 // Test if operands are smi or number objects (fp). Requirements:
524 // operand_1 in eax, operand_2 in edx; falls through on float
525 // operands, jumps to the non_float label otherwise.
526 static void CheckFloatOperands(MacroAssembler* masm,
527 Label* non_float,
528 Register scratch);
529
530 // Test if operands are numbers (smi or HeapNumber objects), and load
531 // them into xmm0 and xmm1 if they are. Jump to label not_numbers if
532 // either operand is not a number. Operands are in edx and eax.
533 // Leaves operands unchanged.
534 static void LoadSSE2Operands(MacroAssembler* masm, Label* not_numbers);
535 };
536
537
Generate(MacroAssembler * masm)538 void DoubleToIStub::Generate(MacroAssembler* masm) {
539 Register input_reg = this->source();
540 Register final_result_reg = this->destination();
541 ASSERT(is_truncating());
542
543 Label check_negative, process_64_bits, done, done_no_stash;
544
545 int double_offset = offset();
546
547 // Account for return address and saved regs if input is esp.
548 if (input_reg.is(esp)) double_offset += 3 * kPointerSize;
549
550 MemOperand mantissa_operand(MemOperand(input_reg, double_offset));
551 MemOperand exponent_operand(MemOperand(input_reg,
552 double_offset + kDoubleSize / 2));
553
554 Register scratch1;
555 {
556 Register scratch_candidates[3] = { ebx, edx, edi };
557 for (int i = 0; i < 3; i++) {
558 scratch1 = scratch_candidates[i];
559 if (!final_result_reg.is(scratch1) && !input_reg.is(scratch1)) break;
560 }
561 }
562 // Since we must use ecx for shifts below, use some other register (eax)
563 // to calculate the result if ecx is the requested return register.
564 Register result_reg = final_result_reg.is(ecx) ? eax : final_result_reg;
565 // Save ecx if it isn't the return register and therefore volatile, or if it
566 // is the return register, then save the temp register we use in its stead for
567 // the result.
568 Register save_reg = final_result_reg.is(ecx) ? eax : ecx;
569 __ push(scratch1);
570 __ push(save_reg);
571
572 bool stash_exponent_copy = !input_reg.is(esp);
573 __ mov(scratch1, mantissa_operand);
574 if (CpuFeatures::IsSupported(SSE3)) {
575 CpuFeatureScope scope(masm, SSE3);
576 // Load x87 register with heap number.
577 __ fld_d(mantissa_operand);
578 }
579 __ mov(ecx, exponent_operand);
580 if (stash_exponent_copy) __ push(ecx);
581
582 __ and_(ecx, HeapNumber::kExponentMask);
583 __ shr(ecx, HeapNumber::kExponentShift);
584 __ lea(result_reg, MemOperand(ecx, -HeapNumber::kExponentBias));
585 __ cmp(result_reg, Immediate(HeapNumber::kMantissaBits));
586 __ j(below, &process_64_bits);
587
588 // Result is entirely in lower 32-bits of mantissa
589 int delta = HeapNumber::kExponentBias + Double::kPhysicalSignificandSize;
590 if (CpuFeatures::IsSupported(SSE3)) {
591 __ fstp(0);
592 }
593 __ sub(ecx, Immediate(delta));
594 __ xor_(result_reg, result_reg);
595 __ cmp(ecx, Immediate(31));
596 __ j(above, &done);
597 __ shl_cl(scratch1);
598 __ jmp(&check_negative);
599
600 __ bind(&process_64_bits);
601 if (CpuFeatures::IsSupported(SSE3)) {
602 CpuFeatureScope scope(masm, SSE3);
603 if (stash_exponent_copy) {
604 // Already a copy of the exponent on the stack, overwrite it.
605 STATIC_ASSERT(kDoubleSize == 2 * kPointerSize);
606 __ sub(esp, Immediate(kDoubleSize / 2));
607 } else {
608 // Reserve space for 64 bit answer.
609 __ sub(esp, Immediate(kDoubleSize)); // Nolint.
610 }
611 // Do conversion, which cannot fail because we checked the exponent.
612 __ fisttp_d(Operand(esp, 0));
613 __ mov(result_reg, Operand(esp, 0)); // Load low word of answer as result
614 __ add(esp, Immediate(kDoubleSize));
615 __ jmp(&done_no_stash);
616 } else {
617 // Result must be extracted from shifted 32-bit mantissa
618 __ sub(ecx, Immediate(delta));
619 __ neg(ecx);
620 if (stash_exponent_copy) {
621 __ mov(result_reg, MemOperand(esp, 0));
622 } else {
623 __ mov(result_reg, exponent_operand);
624 }
625 __ and_(result_reg,
626 Immediate(static_cast<uint32_t>(Double::kSignificandMask >> 32)));
627 __ add(result_reg,
628 Immediate(static_cast<uint32_t>(Double::kHiddenBit >> 32)));
629 __ shrd(result_reg, scratch1);
630 __ shr_cl(result_reg);
631 __ test(ecx, Immediate(32));
632 __ cmov(not_equal, scratch1, result_reg);
633 }
634
635 // If the double was negative, negate the integer result.
636 __ bind(&check_negative);
637 __ mov(result_reg, scratch1);
638 __ neg(result_reg);
639 if (stash_exponent_copy) {
640 __ cmp(MemOperand(esp, 0), Immediate(0));
641 } else {
642 __ cmp(exponent_operand, Immediate(0));
643 }
644 __ cmov(greater, result_reg, scratch1);
645
646 // Restore registers
647 __ bind(&done);
648 if (stash_exponent_copy) {
649 __ add(esp, Immediate(kDoubleSize / 2));
650 }
651 __ bind(&done_no_stash);
652 if (!final_result_reg.is(result_reg)) {
653 ASSERT(final_result_reg.is(ecx));
654 __ mov(final_result_reg, result_reg);
655 }
656 __ pop(save_reg);
657 __ pop(scratch1);
658 __ ret(0);
659 }
660
661
LoadFloatOperand(MacroAssembler * masm,Register number)662 void FloatingPointHelper::LoadFloatOperand(MacroAssembler* masm,
663 Register number) {
664 Label load_smi, done;
665
666 __ JumpIfSmi(number, &load_smi, Label::kNear);
667 __ fld_d(FieldOperand(number, HeapNumber::kValueOffset));
668 __ jmp(&done, Label::kNear);
669
670 __ bind(&load_smi);
671 __ SmiUntag(number);
672 __ push(number);
673 __ fild_s(Operand(esp, 0));
674 __ pop(number);
675
676 __ bind(&done);
677 }
678
679
LoadSSE2Operands(MacroAssembler * masm,Label * not_numbers)680 void FloatingPointHelper::LoadSSE2Operands(MacroAssembler* masm,
681 Label* not_numbers) {
682 Label load_smi_edx, load_eax, load_smi_eax, load_float_eax, done;
683 // Load operand in edx into xmm0, or branch to not_numbers.
684 __ JumpIfSmi(edx, &load_smi_edx, Label::kNear);
685 Factory* factory = masm->isolate()->factory();
686 __ cmp(FieldOperand(edx, HeapObject::kMapOffset), factory->heap_number_map());
687 __ j(not_equal, not_numbers); // Argument in edx is not a number.
688 __ movsd(xmm0, FieldOperand(edx, HeapNumber::kValueOffset));
689 __ bind(&load_eax);
690 // Load operand in eax into xmm1, or branch to not_numbers.
691 __ JumpIfSmi(eax, &load_smi_eax, Label::kNear);
692 __ cmp(FieldOperand(eax, HeapObject::kMapOffset), factory->heap_number_map());
693 __ j(equal, &load_float_eax, Label::kNear);
694 __ jmp(not_numbers); // Argument in eax is not a number.
695 __ bind(&load_smi_edx);
696 __ SmiUntag(edx); // Untag smi before converting to float.
697 __ Cvtsi2sd(xmm0, edx);
698 __ SmiTag(edx); // Retag smi for heap number overwriting test.
699 __ jmp(&load_eax);
700 __ bind(&load_smi_eax);
701 __ SmiUntag(eax); // Untag smi before converting to float.
702 __ Cvtsi2sd(xmm1, eax);
703 __ SmiTag(eax); // Retag smi for heap number overwriting test.
704 __ jmp(&done, Label::kNear);
705 __ bind(&load_float_eax);
706 __ movsd(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
707 __ bind(&done);
708 }
709
710
CheckFloatOperands(MacroAssembler * masm,Label * non_float,Register scratch)711 void FloatingPointHelper::CheckFloatOperands(MacroAssembler* masm,
712 Label* non_float,
713 Register scratch) {
714 Label test_other, done;
715 // Test if both operands are floats or smi -> scratch=k_is_float;
716 // Otherwise scratch = k_not_float.
717 __ JumpIfSmi(edx, &test_other, Label::kNear);
718 __ mov(scratch, FieldOperand(edx, HeapObject::kMapOffset));
719 Factory* factory = masm->isolate()->factory();
720 __ cmp(scratch, factory->heap_number_map());
721 __ j(not_equal, non_float); // argument in edx is not a number -> NaN
722
723 __ bind(&test_other);
724 __ JumpIfSmi(eax, &done, Label::kNear);
725 __ mov(scratch, FieldOperand(eax, HeapObject::kMapOffset));
726 __ cmp(scratch, factory->heap_number_map());
727 __ j(not_equal, non_float); // argument in eax is not a number -> NaN
728
729 // Fall-through: Both operands are numbers.
730 __ bind(&done);
731 }
732
733
Generate(MacroAssembler * masm)734 void MathPowStub::Generate(MacroAssembler* masm) {
735 Factory* factory = isolate()->factory();
736 const Register exponent = eax;
737 const Register base = edx;
738 const Register scratch = ecx;
739 const XMMRegister double_result = xmm3;
740 const XMMRegister double_base = xmm2;
741 const XMMRegister double_exponent = xmm1;
742 const XMMRegister double_scratch = xmm4;
743
744 Label call_runtime, done, exponent_not_smi, int_exponent;
745
746 // Save 1 in double_result - we need this several times later on.
747 __ mov(scratch, Immediate(1));
748 __ Cvtsi2sd(double_result, scratch);
749
750 if (exponent_type_ == ON_STACK) {
751 Label base_is_smi, unpack_exponent;
752 // The exponent and base are supplied as arguments on the stack.
753 // This can only happen if the stub is called from non-optimized code.
754 // Load input parameters from stack.
755 __ mov(base, Operand(esp, 2 * kPointerSize));
756 __ mov(exponent, Operand(esp, 1 * kPointerSize));
757
758 __ JumpIfSmi(base, &base_is_smi, Label::kNear);
759 __ cmp(FieldOperand(base, HeapObject::kMapOffset),
760 factory->heap_number_map());
761 __ j(not_equal, &call_runtime);
762
763 __ movsd(double_base, FieldOperand(base, HeapNumber::kValueOffset));
764 __ jmp(&unpack_exponent, Label::kNear);
765
766 __ bind(&base_is_smi);
767 __ SmiUntag(base);
768 __ Cvtsi2sd(double_base, base);
769
770 __ bind(&unpack_exponent);
771 __ JumpIfNotSmi(exponent, &exponent_not_smi, Label::kNear);
772 __ SmiUntag(exponent);
773 __ jmp(&int_exponent);
774
775 __ bind(&exponent_not_smi);
776 __ cmp(FieldOperand(exponent, HeapObject::kMapOffset),
777 factory->heap_number_map());
778 __ j(not_equal, &call_runtime);
779 __ movsd(double_exponent,
780 FieldOperand(exponent, HeapNumber::kValueOffset));
781 } else if (exponent_type_ == TAGGED) {
782 __ JumpIfNotSmi(exponent, &exponent_not_smi, Label::kNear);
783 __ SmiUntag(exponent);
784 __ jmp(&int_exponent);
785
786 __ bind(&exponent_not_smi);
787 __ movsd(double_exponent,
788 FieldOperand(exponent, HeapNumber::kValueOffset));
789 }
790
791 if (exponent_type_ != INTEGER) {
792 Label fast_power, try_arithmetic_simplification;
793 __ DoubleToI(exponent, double_exponent, double_scratch,
794 TREAT_MINUS_ZERO_AS_ZERO, &try_arithmetic_simplification);
795 __ jmp(&int_exponent);
796
797 __ bind(&try_arithmetic_simplification);
798 // Skip to runtime if possibly NaN (indicated by the indefinite integer).
799 __ cvttsd2si(exponent, Operand(double_exponent));
800 __ cmp(exponent, Immediate(0x1));
801 __ j(overflow, &call_runtime);
802
803 if (exponent_type_ == ON_STACK) {
804 // Detect square root case. Crankshaft detects constant +/-0.5 at
805 // compile time and uses DoMathPowHalf instead. We then skip this check
806 // for non-constant cases of +/-0.5 as these hardly occur.
807 Label continue_sqrt, continue_rsqrt, not_plus_half;
808 // Test for 0.5.
809 // Load double_scratch with 0.5.
810 __ mov(scratch, Immediate(0x3F000000u));
811 __ movd(double_scratch, scratch);
812 __ cvtss2sd(double_scratch, double_scratch);
813 // Already ruled out NaNs for exponent.
814 __ ucomisd(double_scratch, double_exponent);
815 __ j(not_equal, ¬_plus_half, Label::kNear);
816
817 // Calculates square root of base. Check for the special case of
818 // Math.pow(-Infinity, 0.5) == Infinity (ECMA spec, 15.8.2.13).
819 // According to IEEE-754, single-precision -Infinity has the highest
820 // 9 bits set and the lowest 23 bits cleared.
821 __ mov(scratch, 0xFF800000u);
822 __ movd(double_scratch, scratch);
823 __ cvtss2sd(double_scratch, double_scratch);
824 __ ucomisd(double_base, double_scratch);
825 // Comparing -Infinity with NaN results in "unordered", which sets the
826 // zero flag as if both were equal. However, it also sets the carry flag.
827 __ j(not_equal, &continue_sqrt, Label::kNear);
828 __ j(carry, &continue_sqrt, Label::kNear);
829
830 // Set result to Infinity in the special case.
831 __ xorps(double_result, double_result);
832 __ subsd(double_result, double_scratch);
833 __ jmp(&done);
834
835 __ bind(&continue_sqrt);
836 // sqrtsd returns -0 when input is -0. ECMA spec requires +0.
837 __ xorps(double_scratch, double_scratch);
838 __ addsd(double_scratch, double_base); // Convert -0 to +0.
839 __ sqrtsd(double_result, double_scratch);
840 __ jmp(&done);
841
842 // Test for -0.5.
843 __ bind(¬_plus_half);
844 // Load double_exponent with -0.5 by substracting 1.
845 __ subsd(double_scratch, double_result);
846 // Already ruled out NaNs for exponent.
847 __ ucomisd(double_scratch, double_exponent);
848 __ j(not_equal, &fast_power, Label::kNear);
849
850 // Calculates reciprocal of square root of base. Check for the special
851 // case of Math.pow(-Infinity, -0.5) == 0 (ECMA spec, 15.8.2.13).
852 // According to IEEE-754, single-precision -Infinity has the highest
853 // 9 bits set and the lowest 23 bits cleared.
854 __ mov(scratch, 0xFF800000u);
855 __ movd(double_scratch, scratch);
856 __ cvtss2sd(double_scratch, double_scratch);
857 __ ucomisd(double_base, double_scratch);
858 // Comparing -Infinity with NaN results in "unordered", which sets the
859 // zero flag as if both were equal. However, it also sets the carry flag.
860 __ j(not_equal, &continue_rsqrt, Label::kNear);
861 __ j(carry, &continue_rsqrt, Label::kNear);
862
863 // Set result to 0 in the special case.
864 __ xorps(double_result, double_result);
865 __ jmp(&done);
866
867 __ bind(&continue_rsqrt);
868 // sqrtsd returns -0 when input is -0. ECMA spec requires +0.
869 __ xorps(double_exponent, double_exponent);
870 __ addsd(double_exponent, double_base); // Convert -0 to +0.
871 __ sqrtsd(double_exponent, double_exponent);
872 __ divsd(double_result, double_exponent);
873 __ jmp(&done);
874 }
875
876 // Using FPU instructions to calculate power.
877 Label fast_power_failed;
878 __ bind(&fast_power);
879 __ fnclex(); // Clear flags to catch exceptions later.
880 // Transfer (B)ase and (E)xponent onto the FPU register stack.
881 __ sub(esp, Immediate(kDoubleSize));
882 __ movsd(Operand(esp, 0), double_exponent);
883 __ fld_d(Operand(esp, 0)); // E
884 __ movsd(Operand(esp, 0), double_base);
885 __ fld_d(Operand(esp, 0)); // B, E
886
887 // Exponent is in st(1) and base is in st(0)
888 // B ^ E = (2^(E * log2(B)) - 1) + 1 = (2^X - 1) + 1 for X = E * log2(B)
889 // FYL2X calculates st(1) * log2(st(0))
890 __ fyl2x(); // X
891 __ fld(0); // X, X
892 __ frndint(); // rnd(X), X
893 __ fsub(1); // rnd(X), X-rnd(X)
894 __ fxch(1); // X - rnd(X), rnd(X)
895 // F2XM1 calculates 2^st(0) - 1 for -1 < st(0) < 1
896 __ f2xm1(); // 2^(X-rnd(X)) - 1, rnd(X)
897 __ fld1(); // 1, 2^(X-rnd(X)) - 1, rnd(X)
898 __ faddp(1); // 2^(X-rnd(X)), rnd(X)
899 // FSCALE calculates st(0) * 2^st(1)
900 __ fscale(); // 2^X, rnd(X)
901 __ fstp(1); // 2^X
902 // Bail out to runtime in case of exceptions in the status word.
903 __ fnstsw_ax();
904 __ test_b(eax, 0x5F); // We check for all but precision exception.
905 __ j(not_zero, &fast_power_failed, Label::kNear);
906 __ fstp_d(Operand(esp, 0));
907 __ movsd(double_result, Operand(esp, 0));
908 __ add(esp, Immediate(kDoubleSize));
909 __ jmp(&done);
910
911 __ bind(&fast_power_failed);
912 __ fninit();
913 __ add(esp, Immediate(kDoubleSize));
914 __ jmp(&call_runtime);
915 }
916
917 // Calculate power with integer exponent.
918 __ bind(&int_exponent);
919 const XMMRegister double_scratch2 = double_exponent;
920 __ mov(scratch, exponent); // Back up exponent.
921 __ movsd(double_scratch, double_base); // Back up base.
922 __ movsd(double_scratch2, double_result); // Load double_exponent with 1.
923
924 // Get absolute value of exponent.
925 Label no_neg, while_true, while_false;
926 __ test(scratch, scratch);
927 __ j(positive, &no_neg, Label::kNear);
928 __ neg(scratch);
929 __ bind(&no_neg);
930
931 __ j(zero, &while_false, Label::kNear);
932 __ shr(scratch, 1);
933 // Above condition means CF==0 && ZF==0. This means that the
934 // bit that has been shifted out is 0 and the result is not 0.
935 __ j(above, &while_true, Label::kNear);
936 __ movsd(double_result, double_scratch);
937 __ j(zero, &while_false, Label::kNear);
938
939 __ bind(&while_true);
940 __ shr(scratch, 1);
941 __ mulsd(double_scratch, double_scratch);
942 __ j(above, &while_true, Label::kNear);
943 __ mulsd(double_result, double_scratch);
944 __ j(not_zero, &while_true);
945
946 __ bind(&while_false);
947 // scratch has the original value of the exponent - if the exponent is
948 // negative, return 1/result.
949 __ test(exponent, exponent);
950 __ j(positive, &done);
951 __ divsd(double_scratch2, double_result);
952 __ movsd(double_result, double_scratch2);
953 // Test whether result is zero. Bail out to check for subnormal result.
954 // Due to subnormals, x^-y == (1/x)^y does not hold in all cases.
955 __ xorps(double_scratch2, double_scratch2);
956 __ ucomisd(double_scratch2, double_result); // Result cannot be NaN.
957 // double_exponent aliased as double_scratch2 has already been overwritten
958 // and may not have contained the exponent value in the first place when the
959 // exponent is a smi. We reset it with exponent value before bailing out.
960 __ j(not_equal, &done);
961 __ Cvtsi2sd(double_exponent, exponent);
962
963 // Returning or bailing out.
964 Counters* counters = isolate()->counters();
965 if (exponent_type_ == ON_STACK) {
966 // The arguments are still on the stack.
967 __ bind(&call_runtime);
968 __ TailCallRuntime(Runtime::kHiddenMathPow, 2, 1);
969
970 // The stub is called from non-optimized code, which expects the result
971 // as heap number in exponent.
972 __ bind(&done);
973 __ AllocateHeapNumber(eax, scratch, base, &call_runtime);
974 __ movsd(FieldOperand(eax, HeapNumber::kValueOffset), double_result);
975 __ IncrementCounter(counters->math_pow(), 1);
976 __ ret(2 * kPointerSize);
977 } else {
978 __ bind(&call_runtime);
979 {
980 AllowExternalCallThatCantCauseGC scope(masm);
981 __ PrepareCallCFunction(4, scratch);
982 __ movsd(Operand(esp, 0 * kDoubleSize), double_base);
983 __ movsd(Operand(esp, 1 * kDoubleSize), double_exponent);
984 __ CallCFunction(
985 ExternalReference::power_double_double_function(isolate()), 4);
986 }
987 // Return value is in st(0) on ia32.
988 // Store it into the (fixed) result register.
989 __ sub(esp, Immediate(kDoubleSize));
990 __ fstp_d(Operand(esp, 0));
991 __ movsd(double_result, Operand(esp, 0));
992 __ add(esp, Immediate(kDoubleSize));
993
994 __ bind(&done);
995 __ IncrementCounter(counters->math_pow(), 1);
996 __ ret(0);
997 }
998 }
999
1000
Generate(MacroAssembler * masm)1001 void FunctionPrototypeStub::Generate(MacroAssembler* masm) {
1002 // ----------- S t a t e -------------
1003 // -- ecx : name
1004 // -- edx : receiver
1005 // -- esp[0] : return address
1006 // -----------------------------------
1007 Label miss;
1008
1009 if (kind() == Code::KEYED_LOAD_IC) {
1010 __ cmp(ecx, Immediate(isolate()->factory()->prototype_string()));
1011 __ j(not_equal, &miss);
1012 }
1013
1014 StubCompiler::GenerateLoadFunctionPrototype(masm, edx, eax, ebx, &miss);
1015 __ bind(&miss);
1016 StubCompiler::TailCallBuiltin(
1017 masm, BaseLoadStoreStubCompiler::MissBuiltin(kind()));
1018 }
1019
1020
GenerateReadElement(MacroAssembler * masm)1021 void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
1022 // The key is in edx and the parameter count is in eax.
1023
1024 // The displacement is used for skipping the frame pointer on the
1025 // stack. It is the offset of the last parameter (if any) relative
1026 // to the frame pointer.
1027 static const int kDisplacement = 1 * kPointerSize;
1028
1029 // Check that the key is a smi.
1030 Label slow;
1031 __ JumpIfNotSmi(edx, &slow, Label::kNear);
1032
1033 // Check if the calling frame is an arguments adaptor frame.
1034 Label adaptor;
1035 __ mov(ebx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
1036 __ mov(ecx, Operand(ebx, StandardFrameConstants::kContextOffset));
1037 __ cmp(ecx, Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1038 __ j(equal, &adaptor, Label::kNear);
1039
1040 // Check index against formal parameters count limit passed in
1041 // through register eax. Use unsigned comparison to get negative
1042 // check for free.
1043 __ cmp(edx, eax);
1044 __ j(above_equal, &slow, Label::kNear);
1045
1046 // Read the argument from the stack and return it.
1047 STATIC_ASSERT(kSmiTagSize == 1);
1048 STATIC_ASSERT(kSmiTag == 0); // Shifting code depends on these.
1049 __ lea(ebx, Operand(ebp, eax, times_2, 0));
1050 __ neg(edx);
1051 __ mov(eax, Operand(ebx, edx, times_2, kDisplacement));
1052 __ ret(0);
1053
1054 // Arguments adaptor case: Check index against actual arguments
1055 // limit found in the arguments adaptor frame. Use unsigned
1056 // comparison to get negative check for free.
1057 __ bind(&adaptor);
1058 __ mov(ecx, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset));
1059 __ cmp(edx, ecx);
1060 __ j(above_equal, &slow, Label::kNear);
1061
1062 // Read the argument from the stack and return it.
1063 STATIC_ASSERT(kSmiTagSize == 1);
1064 STATIC_ASSERT(kSmiTag == 0); // Shifting code depends on these.
1065 __ lea(ebx, Operand(ebx, ecx, times_2, 0));
1066 __ neg(edx);
1067 __ mov(eax, Operand(ebx, edx, times_2, kDisplacement));
1068 __ ret(0);
1069
1070 // Slow-case: Handle non-smi or out-of-bounds access to arguments
1071 // by calling the runtime system.
1072 __ bind(&slow);
1073 __ pop(ebx); // Return address.
1074 __ push(edx);
1075 __ push(ebx);
1076 __ TailCallRuntime(Runtime::kGetArgumentsProperty, 1, 1);
1077 }
1078
1079
GenerateNewSloppySlow(MacroAssembler * masm)1080 void ArgumentsAccessStub::GenerateNewSloppySlow(MacroAssembler* masm) {
1081 // esp[0] : return address
1082 // esp[4] : number of parameters
1083 // esp[8] : receiver displacement
1084 // esp[12] : function
1085
1086 // Check if the calling frame is an arguments adaptor frame.
1087 Label runtime;
1088 __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
1089 __ mov(ecx, Operand(edx, StandardFrameConstants::kContextOffset));
1090 __ cmp(ecx, Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1091 __ j(not_equal, &runtime, Label::kNear);
1092
1093 // Patch the arguments.length and the parameters pointer.
1094 __ mov(ecx, Operand(edx, ArgumentsAdaptorFrameConstants::kLengthOffset));
1095 __ mov(Operand(esp, 1 * kPointerSize), ecx);
1096 __ lea(edx, Operand(edx, ecx, times_2,
1097 StandardFrameConstants::kCallerSPOffset));
1098 __ mov(Operand(esp, 2 * kPointerSize), edx);
1099
1100 __ bind(&runtime);
1101 __ TailCallRuntime(Runtime::kHiddenNewSloppyArguments, 3, 1);
1102 }
1103
1104
GenerateNewSloppyFast(MacroAssembler * masm)1105 void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) {
1106 // esp[0] : return address
1107 // esp[4] : number of parameters (tagged)
1108 // esp[8] : receiver displacement
1109 // esp[12] : function
1110
1111 // ebx = parameter count (tagged)
1112 __ mov(ebx, Operand(esp, 1 * kPointerSize));
1113
1114 // Check if the calling frame is an arguments adaptor frame.
1115 // TODO(rossberg): Factor out some of the bits that are shared with the other
1116 // Generate* functions.
1117 Label runtime;
1118 Label adaptor_frame, try_allocate;
1119 __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
1120 __ mov(ecx, Operand(edx, StandardFrameConstants::kContextOffset));
1121 __ cmp(ecx, Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1122 __ j(equal, &adaptor_frame, Label::kNear);
1123
1124 // No adaptor, parameter count = argument count.
1125 __ mov(ecx, ebx);
1126 __ jmp(&try_allocate, Label::kNear);
1127
1128 // We have an adaptor frame. Patch the parameters pointer.
1129 __ bind(&adaptor_frame);
1130 __ mov(ecx, Operand(edx, ArgumentsAdaptorFrameConstants::kLengthOffset));
1131 __ lea(edx, Operand(edx, ecx, times_2,
1132 StandardFrameConstants::kCallerSPOffset));
1133 __ mov(Operand(esp, 2 * kPointerSize), edx);
1134
1135 // ebx = parameter count (tagged)
1136 // ecx = argument count (tagged)
1137 // esp[4] = parameter count (tagged)
1138 // esp[8] = address of receiver argument
1139 // Compute the mapped parameter count = min(ebx, ecx) in ebx.
1140 __ cmp(ebx, ecx);
1141 __ j(less_equal, &try_allocate, Label::kNear);
1142 __ mov(ebx, ecx);
1143
1144 __ bind(&try_allocate);
1145
1146 // Save mapped parameter count.
1147 __ push(ebx);
1148
1149 // Compute the sizes of backing store, parameter map, and arguments object.
1150 // 1. Parameter map, has 2 extra words containing context and backing store.
1151 const int kParameterMapHeaderSize =
1152 FixedArray::kHeaderSize + 2 * kPointerSize;
1153 Label no_parameter_map;
1154 __ test(ebx, ebx);
1155 __ j(zero, &no_parameter_map, Label::kNear);
1156 __ lea(ebx, Operand(ebx, times_2, kParameterMapHeaderSize));
1157 __ bind(&no_parameter_map);
1158
1159 // 2. Backing store.
1160 __ lea(ebx, Operand(ebx, ecx, times_2, FixedArray::kHeaderSize));
1161
1162 // 3. Arguments object.
1163 __ add(ebx, Immediate(Heap::kSloppyArgumentsObjectSize));
1164
1165 // Do the allocation of all three objects in one go.
1166 __ Allocate(ebx, eax, edx, edi, &runtime, TAG_OBJECT);
1167
1168 // eax = address of new object(s) (tagged)
1169 // ecx = argument count (tagged)
1170 // esp[0] = mapped parameter count (tagged)
1171 // esp[8] = parameter count (tagged)
1172 // esp[12] = address of receiver argument
1173 // Get the arguments boilerplate from the current native context into edi.
1174 Label has_mapped_parameters, copy;
1175 __ mov(edi, Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
1176 __ mov(edi, FieldOperand(edi, GlobalObject::kNativeContextOffset));
1177 __ mov(ebx, Operand(esp, 0 * kPointerSize));
1178 __ test(ebx, ebx);
1179 __ j(not_zero, &has_mapped_parameters, Label::kNear);
1180 __ mov(edi, Operand(edi,
1181 Context::SlotOffset(Context::SLOPPY_ARGUMENTS_BOILERPLATE_INDEX)));
1182 __ jmp(©, Label::kNear);
1183
1184 __ bind(&has_mapped_parameters);
1185 __ mov(edi, Operand(edi,
1186 Context::SlotOffset(Context::ALIASED_ARGUMENTS_BOILERPLATE_INDEX)));
1187 __ bind(©);
1188
1189 // eax = address of new object (tagged)
1190 // ebx = mapped parameter count (tagged)
1191 // ecx = argument count (tagged)
1192 // edi = address of boilerplate object (tagged)
1193 // esp[0] = mapped parameter count (tagged)
1194 // esp[8] = parameter count (tagged)
1195 // esp[12] = address of receiver argument
1196 // Copy the JS object part.
1197 for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) {
1198 __ mov(edx, FieldOperand(edi, i));
1199 __ mov(FieldOperand(eax, i), edx);
1200 }
1201
1202 // Set up the callee in-object property.
1203 STATIC_ASSERT(Heap::kArgumentsCalleeIndex == 1);
1204 __ mov(edx, Operand(esp, 4 * kPointerSize));
1205 __ mov(FieldOperand(eax, JSObject::kHeaderSize +
1206 Heap::kArgumentsCalleeIndex * kPointerSize),
1207 edx);
1208
1209 // Use the length (smi tagged) and set that as an in-object property too.
1210 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0);
1211 __ mov(FieldOperand(eax, JSObject::kHeaderSize +
1212 Heap::kArgumentsLengthIndex * kPointerSize),
1213 ecx);
1214
1215 // Set up the elements pointer in the allocated arguments object.
1216 // If we allocated a parameter map, edi will point there, otherwise to the
1217 // backing store.
1218 __ lea(edi, Operand(eax, Heap::kSloppyArgumentsObjectSize));
1219 __ mov(FieldOperand(eax, JSObject::kElementsOffset), edi);
1220
1221 // eax = address of new object (tagged)
1222 // ebx = mapped parameter count (tagged)
1223 // ecx = argument count (tagged)
1224 // edi = address of parameter map or backing store (tagged)
1225 // esp[0] = mapped parameter count (tagged)
1226 // esp[8] = parameter count (tagged)
1227 // esp[12] = address of receiver argument
1228 // Free a register.
1229 __ push(eax);
1230
1231 // Initialize parameter map. If there are no mapped arguments, we're done.
1232 Label skip_parameter_map;
1233 __ test(ebx, ebx);
1234 __ j(zero, &skip_parameter_map);
1235
1236 __ mov(FieldOperand(edi, FixedArray::kMapOffset),
1237 Immediate(isolate()->factory()->sloppy_arguments_elements_map()));
1238 __ lea(eax, Operand(ebx, reinterpret_cast<intptr_t>(Smi::FromInt(2))));
1239 __ mov(FieldOperand(edi, FixedArray::kLengthOffset), eax);
1240 __ mov(FieldOperand(edi, FixedArray::kHeaderSize + 0 * kPointerSize), esi);
1241 __ lea(eax, Operand(edi, ebx, times_2, kParameterMapHeaderSize));
1242 __ mov(FieldOperand(edi, FixedArray::kHeaderSize + 1 * kPointerSize), eax);
1243
1244 // Copy the parameter slots and the holes in the arguments.
1245 // We need to fill in mapped_parameter_count slots. They index the context,
1246 // where parameters are stored in reverse order, at
1247 // MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1
1248 // The mapped parameter thus need to get indices
1249 // MIN_CONTEXT_SLOTS+parameter_count-1 ..
1250 // MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count
1251 // We loop from right to left.
1252 Label parameters_loop, parameters_test;
1253 __ push(ecx);
1254 __ mov(eax, Operand(esp, 2 * kPointerSize));
1255 __ mov(ebx, Immediate(Smi::FromInt(Context::MIN_CONTEXT_SLOTS)));
1256 __ add(ebx, Operand(esp, 4 * kPointerSize));
1257 __ sub(ebx, eax);
1258 __ mov(ecx, isolate()->factory()->the_hole_value());
1259 __ mov(edx, edi);
1260 __ lea(edi, Operand(edi, eax, times_2, kParameterMapHeaderSize));
1261 // eax = loop variable (tagged)
1262 // ebx = mapping index (tagged)
1263 // ecx = the hole value
1264 // edx = address of parameter map (tagged)
1265 // edi = address of backing store (tagged)
1266 // esp[0] = argument count (tagged)
1267 // esp[4] = address of new object (tagged)
1268 // esp[8] = mapped parameter count (tagged)
1269 // esp[16] = parameter count (tagged)
1270 // esp[20] = address of receiver argument
1271 __ jmp(¶meters_test, Label::kNear);
1272
1273 __ bind(¶meters_loop);
1274 __ sub(eax, Immediate(Smi::FromInt(1)));
1275 __ mov(FieldOperand(edx, eax, times_2, kParameterMapHeaderSize), ebx);
1276 __ mov(FieldOperand(edi, eax, times_2, FixedArray::kHeaderSize), ecx);
1277 __ add(ebx, Immediate(Smi::FromInt(1)));
1278 __ bind(¶meters_test);
1279 __ test(eax, eax);
1280 __ j(not_zero, ¶meters_loop, Label::kNear);
1281 __ pop(ecx);
1282
1283 __ bind(&skip_parameter_map);
1284
1285 // ecx = argument count (tagged)
1286 // edi = address of backing store (tagged)
1287 // esp[0] = address of new object (tagged)
1288 // esp[4] = mapped parameter count (tagged)
1289 // esp[12] = parameter count (tagged)
1290 // esp[16] = address of receiver argument
1291 // Copy arguments header and remaining slots (if there are any).
1292 __ mov(FieldOperand(edi, FixedArray::kMapOffset),
1293 Immediate(isolate()->factory()->fixed_array_map()));
1294 __ mov(FieldOperand(edi, FixedArray::kLengthOffset), ecx);
1295
1296 Label arguments_loop, arguments_test;
1297 __ mov(ebx, Operand(esp, 1 * kPointerSize));
1298 __ mov(edx, Operand(esp, 4 * kPointerSize));
1299 __ sub(edx, ebx); // Is there a smarter way to do negative scaling?
1300 __ sub(edx, ebx);
1301 __ jmp(&arguments_test, Label::kNear);
1302
1303 __ bind(&arguments_loop);
1304 __ sub(edx, Immediate(kPointerSize));
1305 __ mov(eax, Operand(edx, 0));
1306 __ mov(FieldOperand(edi, ebx, times_2, FixedArray::kHeaderSize), eax);
1307 __ add(ebx, Immediate(Smi::FromInt(1)));
1308
1309 __ bind(&arguments_test);
1310 __ cmp(ebx, ecx);
1311 __ j(less, &arguments_loop, Label::kNear);
1312
1313 // Restore.
1314 __ pop(eax); // Address of arguments object.
1315 __ pop(ebx); // Parameter count.
1316
1317 // Return and remove the on-stack parameters.
1318 __ ret(3 * kPointerSize);
1319
1320 // Do the runtime call to allocate the arguments object.
1321 __ bind(&runtime);
1322 __ pop(eax); // Remove saved parameter count.
1323 __ mov(Operand(esp, 1 * kPointerSize), ecx); // Patch argument count.
1324 __ TailCallRuntime(Runtime::kHiddenNewSloppyArguments, 3, 1);
1325 }
1326
1327
GenerateNewStrict(MacroAssembler * masm)1328 void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
1329 // esp[0] : return address
1330 // esp[4] : number of parameters
1331 // esp[8] : receiver displacement
1332 // esp[12] : function
1333
1334 // Check if the calling frame is an arguments adaptor frame.
1335 Label adaptor_frame, try_allocate, runtime;
1336 __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
1337 __ mov(ecx, Operand(edx, StandardFrameConstants::kContextOffset));
1338 __ cmp(ecx, Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1339 __ j(equal, &adaptor_frame, Label::kNear);
1340
1341 // Get the length from the frame.
1342 __ mov(ecx, Operand(esp, 1 * kPointerSize));
1343 __ jmp(&try_allocate, Label::kNear);
1344
1345 // Patch the arguments.length and the parameters pointer.
1346 __ bind(&adaptor_frame);
1347 __ mov(ecx, Operand(edx, ArgumentsAdaptorFrameConstants::kLengthOffset));
1348 __ mov(Operand(esp, 1 * kPointerSize), ecx);
1349 __ lea(edx, Operand(edx, ecx, times_2,
1350 StandardFrameConstants::kCallerSPOffset));
1351 __ mov(Operand(esp, 2 * kPointerSize), edx);
1352
1353 // Try the new space allocation. Start out with computing the size of
1354 // the arguments object and the elements array.
1355 Label add_arguments_object;
1356 __ bind(&try_allocate);
1357 __ test(ecx, ecx);
1358 __ j(zero, &add_arguments_object, Label::kNear);
1359 __ lea(ecx, Operand(ecx, times_2, FixedArray::kHeaderSize));
1360 __ bind(&add_arguments_object);
1361 __ add(ecx, Immediate(Heap::kStrictArgumentsObjectSize));
1362
1363 // Do the allocation of both objects in one go.
1364 __ Allocate(ecx, eax, edx, ebx, &runtime, TAG_OBJECT);
1365
1366 // Get the arguments boilerplate from the current native context.
1367 __ mov(edi, Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
1368 __ mov(edi, FieldOperand(edi, GlobalObject::kNativeContextOffset));
1369 const int offset =
1370 Context::SlotOffset(Context::STRICT_ARGUMENTS_BOILERPLATE_INDEX);
1371 __ mov(edi, Operand(edi, offset));
1372
1373 // Copy the JS object part.
1374 for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) {
1375 __ mov(ebx, FieldOperand(edi, i));
1376 __ mov(FieldOperand(eax, i), ebx);
1377 }
1378
1379 // Get the length (smi tagged) and set that as an in-object property too.
1380 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0);
1381 __ mov(ecx, Operand(esp, 1 * kPointerSize));
1382 __ mov(FieldOperand(eax, JSObject::kHeaderSize +
1383 Heap::kArgumentsLengthIndex * kPointerSize),
1384 ecx);
1385
1386 // If there are no actual arguments, we're done.
1387 Label done;
1388 __ test(ecx, ecx);
1389 __ j(zero, &done, Label::kNear);
1390
1391 // Get the parameters pointer from the stack.
1392 __ mov(edx, Operand(esp, 2 * kPointerSize));
1393
1394 // Set up the elements pointer in the allocated arguments object and
1395 // initialize the header in the elements fixed array.
1396 __ lea(edi, Operand(eax, Heap::kStrictArgumentsObjectSize));
1397 __ mov(FieldOperand(eax, JSObject::kElementsOffset), edi);
1398 __ mov(FieldOperand(edi, FixedArray::kMapOffset),
1399 Immediate(isolate()->factory()->fixed_array_map()));
1400
1401 __ mov(FieldOperand(edi, FixedArray::kLengthOffset), ecx);
1402 // Untag the length for the loop below.
1403 __ SmiUntag(ecx);
1404
1405 // Copy the fixed array slots.
1406 Label loop;
1407 __ bind(&loop);
1408 __ mov(ebx, Operand(edx, -1 * kPointerSize)); // Skip receiver.
1409 __ mov(FieldOperand(edi, FixedArray::kHeaderSize), ebx);
1410 __ add(edi, Immediate(kPointerSize));
1411 __ sub(edx, Immediate(kPointerSize));
1412 __ dec(ecx);
1413 __ j(not_zero, &loop);
1414
1415 // Return and remove the on-stack parameters.
1416 __ bind(&done);
1417 __ ret(3 * kPointerSize);
1418
1419 // Do the runtime call to allocate the arguments object.
1420 __ bind(&runtime);
1421 __ TailCallRuntime(Runtime::kHiddenNewStrictArguments, 3, 1);
1422 }
1423
1424
Generate(MacroAssembler * masm)1425 void RegExpExecStub::Generate(MacroAssembler* masm) {
1426 // Just jump directly to runtime if native RegExp is not selected at compile
1427 // time or if regexp entry in generated code is turned off runtime switch or
1428 // at compilation.
1429 #ifdef V8_INTERPRETED_REGEXP
1430 __ TailCallRuntime(Runtime::kHiddenRegExpExec, 4, 1);
1431 #else // V8_INTERPRETED_REGEXP
1432
1433 // Stack frame on entry.
1434 // esp[0]: return address
1435 // esp[4]: last_match_info (expected JSArray)
1436 // esp[8]: previous index
1437 // esp[12]: subject string
1438 // esp[16]: JSRegExp object
1439
1440 static const int kLastMatchInfoOffset = 1 * kPointerSize;
1441 static const int kPreviousIndexOffset = 2 * kPointerSize;
1442 static const int kSubjectOffset = 3 * kPointerSize;
1443 static const int kJSRegExpOffset = 4 * kPointerSize;
1444
1445 Label runtime;
1446 Factory* factory = isolate()->factory();
1447
1448 // Ensure that a RegExp stack is allocated.
1449 ExternalReference address_of_regexp_stack_memory_address =
1450 ExternalReference::address_of_regexp_stack_memory_address(isolate());
1451 ExternalReference address_of_regexp_stack_memory_size =
1452 ExternalReference::address_of_regexp_stack_memory_size(isolate());
1453 __ mov(ebx, Operand::StaticVariable(address_of_regexp_stack_memory_size));
1454 __ test(ebx, ebx);
1455 __ j(zero, &runtime);
1456
1457 // Check that the first argument is a JSRegExp object.
1458 __ mov(eax, Operand(esp, kJSRegExpOffset));
1459 STATIC_ASSERT(kSmiTag == 0);
1460 __ JumpIfSmi(eax, &runtime);
1461 __ CmpObjectType(eax, JS_REGEXP_TYPE, ecx);
1462 __ j(not_equal, &runtime);
1463
1464 // Check that the RegExp has been compiled (data contains a fixed array).
1465 __ mov(ecx, FieldOperand(eax, JSRegExp::kDataOffset));
1466 if (FLAG_debug_code) {
1467 __ test(ecx, Immediate(kSmiTagMask));
1468 __ Check(not_zero, kUnexpectedTypeForRegExpDataFixedArrayExpected);
1469 __ CmpObjectType(ecx, FIXED_ARRAY_TYPE, ebx);
1470 __ Check(equal, kUnexpectedTypeForRegExpDataFixedArrayExpected);
1471 }
1472
1473 // ecx: RegExp data (FixedArray)
1474 // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP.
1475 __ mov(ebx, FieldOperand(ecx, JSRegExp::kDataTagOffset));
1476 __ cmp(ebx, Immediate(Smi::FromInt(JSRegExp::IRREGEXP)));
1477 __ j(not_equal, &runtime);
1478
1479 // ecx: RegExp data (FixedArray)
1480 // Check that the number of captures fit in the static offsets vector buffer.
1481 __ mov(edx, FieldOperand(ecx, JSRegExp::kIrregexpCaptureCountOffset));
1482 // Check (number_of_captures + 1) * 2 <= offsets vector size
1483 // Or number_of_captures * 2 <= offsets vector size - 2
1484 // Multiplying by 2 comes for free since edx is smi-tagged.
1485 STATIC_ASSERT(kSmiTag == 0);
1486 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
1487 STATIC_ASSERT(Isolate::kJSRegexpStaticOffsetsVectorSize >= 2);
1488 __ cmp(edx, Isolate::kJSRegexpStaticOffsetsVectorSize - 2);
1489 __ j(above, &runtime);
1490
1491 // Reset offset for possibly sliced string.
1492 __ Move(edi, Immediate(0));
1493 __ mov(eax, Operand(esp, kSubjectOffset));
1494 __ JumpIfSmi(eax, &runtime);
1495 __ mov(edx, eax); // Make a copy of the original subject string.
1496 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
1497 __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
1498
1499 // eax: subject string
1500 // edx: subject string
1501 // ebx: subject string instance type
1502 // ecx: RegExp data (FixedArray)
1503 // Handle subject string according to its encoding and representation:
1504 // (1) Sequential two byte? If yes, go to (9).
1505 // (2) Sequential one byte? If yes, go to (6).
1506 // (3) Anything but sequential or cons? If yes, go to (7).
1507 // (4) Cons string. If the string is flat, replace subject with first string.
1508 // Otherwise bailout.
1509 // (5a) Is subject sequential two byte? If yes, go to (9).
1510 // (5b) Is subject external? If yes, go to (8).
1511 // (6) One byte sequential. Load regexp code for one byte.
1512 // (E) Carry on.
1513 /// [...]
1514
1515 // Deferred code at the end of the stub:
1516 // (7) Not a long external string? If yes, go to (10).
1517 // (8) External string. Make it, offset-wise, look like a sequential string.
1518 // (8a) Is the external string one byte? If yes, go to (6).
1519 // (9) Two byte sequential. Load regexp code for one byte. Go to (E).
1520 // (10) Short external string or not a string? If yes, bail out to runtime.
1521 // (11) Sliced string. Replace subject with parent. Go to (5a).
1522
1523 Label seq_one_byte_string /* 6 */, seq_two_byte_string /* 9 */,
1524 external_string /* 8 */, check_underlying /* 5a */,
1525 not_seq_nor_cons /* 7 */, check_code /* E */,
1526 not_long_external /* 10 */;
1527
1528 // (1) Sequential two byte? If yes, go to (9).
1529 __ and_(ebx, kIsNotStringMask |
1530 kStringRepresentationMask |
1531 kStringEncodingMask |
1532 kShortExternalStringMask);
1533 STATIC_ASSERT((kStringTag | kSeqStringTag | kTwoByteStringTag) == 0);
1534 __ j(zero, &seq_two_byte_string); // Go to (9).
1535
1536 // (2) Sequential one byte? If yes, go to (6).
1537 // Any other sequential string must be one byte.
1538 __ and_(ebx, Immediate(kIsNotStringMask |
1539 kStringRepresentationMask |
1540 kShortExternalStringMask));
1541 __ j(zero, &seq_one_byte_string, Label::kNear); // Go to (6).
1542
1543 // (3) Anything but sequential or cons? If yes, go to (7).
1544 // We check whether the subject string is a cons, since sequential strings
1545 // have already been covered.
1546 STATIC_ASSERT(kConsStringTag < kExternalStringTag);
1547 STATIC_ASSERT(kSlicedStringTag > kExternalStringTag);
1548 STATIC_ASSERT(kIsNotStringMask > kExternalStringTag);
1549 STATIC_ASSERT(kShortExternalStringTag > kExternalStringTag);
1550 __ cmp(ebx, Immediate(kExternalStringTag));
1551 __ j(greater_equal, ¬_seq_nor_cons); // Go to (7).
1552
1553 // (4) Cons string. Check that it's flat.
1554 // Replace subject with first string and reload instance type.
1555 __ cmp(FieldOperand(eax, ConsString::kSecondOffset), factory->empty_string());
1556 __ j(not_equal, &runtime);
1557 __ mov(eax, FieldOperand(eax, ConsString::kFirstOffset));
1558 __ bind(&check_underlying);
1559 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
1560 __ mov(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
1561
1562 // (5a) Is subject sequential two byte? If yes, go to (9).
1563 __ test_b(ebx, kStringRepresentationMask | kStringEncodingMask);
1564 STATIC_ASSERT((kSeqStringTag | kTwoByteStringTag) == 0);
1565 __ j(zero, &seq_two_byte_string); // Go to (9).
1566 // (5b) Is subject external? If yes, go to (8).
1567 __ test_b(ebx, kStringRepresentationMask);
1568 // The underlying external string is never a short external string.
1569 STATIC_ASSERT(ExternalString::kMaxShortLength < ConsString::kMinLength);
1570 STATIC_ASSERT(ExternalString::kMaxShortLength < SlicedString::kMinLength);
1571 __ j(not_zero, &external_string); // Go to (8).
1572
1573 // eax: sequential subject string (or look-alike, external string)
1574 // edx: original subject string
1575 // ecx: RegExp data (FixedArray)
1576 // (6) One byte sequential. Load regexp code for one byte.
1577 __ bind(&seq_one_byte_string);
1578 // Load previous index and check range before edx is overwritten. We have
1579 // to use edx instead of eax here because it might have been only made to
1580 // look like a sequential string when it actually is an external string.
1581 __ mov(ebx, Operand(esp, kPreviousIndexOffset));
1582 __ JumpIfNotSmi(ebx, &runtime);
1583 __ cmp(ebx, FieldOperand(edx, String::kLengthOffset));
1584 __ j(above_equal, &runtime);
1585 __ mov(edx, FieldOperand(ecx, JSRegExp::kDataAsciiCodeOffset));
1586 __ Move(ecx, Immediate(1)); // Type is one byte.
1587
1588 // (E) Carry on. String handling is done.
1589 __ bind(&check_code);
1590 // edx: irregexp code
1591 // Check that the irregexp code has been generated for the actual string
1592 // encoding. If it has, the field contains a code object otherwise it contains
1593 // a smi (code flushing support).
1594 __ JumpIfSmi(edx, &runtime);
1595
1596 // eax: subject string
1597 // ebx: previous index (smi)
1598 // edx: code
1599 // ecx: encoding of subject string (1 if ASCII, 0 if two_byte);
1600 // All checks done. Now push arguments for native regexp code.
1601 Counters* counters = isolate()->counters();
1602 __ IncrementCounter(counters->regexp_entry_native(), 1);
1603
1604 // Isolates: note we add an additional parameter here (isolate pointer).
1605 static const int kRegExpExecuteArguments = 9;
1606 __ EnterApiExitFrame(kRegExpExecuteArguments);
1607
1608 // Argument 9: Pass current isolate address.
1609 __ mov(Operand(esp, 8 * kPointerSize),
1610 Immediate(ExternalReference::isolate_address(isolate())));
1611
1612 // Argument 8: Indicate that this is a direct call from JavaScript.
1613 __ mov(Operand(esp, 7 * kPointerSize), Immediate(1));
1614
1615 // Argument 7: Start (high end) of backtracking stack memory area.
1616 __ mov(esi, Operand::StaticVariable(address_of_regexp_stack_memory_address));
1617 __ add(esi, Operand::StaticVariable(address_of_regexp_stack_memory_size));
1618 __ mov(Operand(esp, 6 * kPointerSize), esi);
1619
1620 // Argument 6: Set the number of capture registers to zero to force global
1621 // regexps to behave as non-global. This does not affect non-global regexps.
1622 __ mov(Operand(esp, 5 * kPointerSize), Immediate(0));
1623
1624 // Argument 5: static offsets vector buffer.
1625 __ mov(Operand(esp, 4 * kPointerSize),
1626 Immediate(ExternalReference::address_of_static_offsets_vector(
1627 isolate())));
1628
1629 // Argument 2: Previous index.
1630 __ SmiUntag(ebx);
1631 __ mov(Operand(esp, 1 * kPointerSize), ebx);
1632
1633 // Argument 1: Original subject string.
1634 // The original subject is in the previous stack frame. Therefore we have to
1635 // use ebp, which points exactly to one pointer size below the previous esp.
1636 // (Because creating a new stack frame pushes the previous ebp onto the stack
1637 // and thereby moves up esp by one kPointerSize.)
1638 __ mov(esi, Operand(ebp, kSubjectOffset + kPointerSize));
1639 __ mov(Operand(esp, 0 * kPointerSize), esi);
1640
1641 // esi: original subject string
1642 // eax: underlying subject string
1643 // ebx: previous index
1644 // ecx: encoding of subject string (1 if ASCII 0 if two_byte);
1645 // edx: code
1646 // Argument 4: End of string data
1647 // Argument 3: Start of string data
1648 // Prepare start and end index of the input.
1649 // Load the length from the original sliced string if that is the case.
1650 __ mov(esi, FieldOperand(esi, String::kLengthOffset));
1651 __ add(esi, edi); // Calculate input end wrt offset.
1652 __ SmiUntag(edi);
1653 __ add(ebx, edi); // Calculate input start wrt offset.
1654
1655 // ebx: start index of the input string
1656 // esi: end index of the input string
1657 Label setup_two_byte, setup_rest;
1658 __ test(ecx, ecx);
1659 __ j(zero, &setup_two_byte, Label::kNear);
1660 __ SmiUntag(esi);
1661 __ lea(ecx, FieldOperand(eax, esi, times_1, SeqOneByteString::kHeaderSize));
1662 __ mov(Operand(esp, 3 * kPointerSize), ecx); // Argument 4.
1663 __ lea(ecx, FieldOperand(eax, ebx, times_1, SeqOneByteString::kHeaderSize));
1664 __ mov(Operand(esp, 2 * kPointerSize), ecx); // Argument 3.
1665 __ jmp(&setup_rest, Label::kNear);
1666
1667 __ bind(&setup_two_byte);
1668 STATIC_ASSERT(kSmiTag == 0);
1669 STATIC_ASSERT(kSmiTagSize == 1); // esi is smi (powered by 2).
1670 __ lea(ecx, FieldOperand(eax, esi, times_1, SeqTwoByteString::kHeaderSize));
1671 __ mov(Operand(esp, 3 * kPointerSize), ecx); // Argument 4.
1672 __ lea(ecx, FieldOperand(eax, ebx, times_2, SeqTwoByteString::kHeaderSize));
1673 __ mov(Operand(esp, 2 * kPointerSize), ecx); // Argument 3.
1674
1675 __ bind(&setup_rest);
1676
1677 // Locate the code entry and call it.
1678 __ add(edx, Immediate(Code::kHeaderSize - kHeapObjectTag));
1679 __ call(edx);
1680
1681 // Drop arguments and come back to JS mode.
1682 __ LeaveApiExitFrame(true);
1683
1684 // Check the result.
1685 Label success;
1686 __ cmp(eax, 1);
1687 // We expect exactly one result since we force the called regexp to behave
1688 // as non-global.
1689 __ j(equal, &success);
1690 Label failure;
1691 __ cmp(eax, NativeRegExpMacroAssembler::FAILURE);
1692 __ j(equal, &failure);
1693 __ cmp(eax, NativeRegExpMacroAssembler::EXCEPTION);
1694 // If not exception it can only be retry. Handle that in the runtime system.
1695 __ j(not_equal, &runtime);
1696 // Result must now be exception. If there is no pending exception already a
1697 // stack overflow (on the backtrack stack) was detected in RegExp code but
1698 // haven't created the exception yet. Handle that in the runtime system.
1699 // TODO(592): Rerunning the RegExp to get the stack overflow exception.
1700 ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
1701 isolate());
1702 __ mov(edx, Immediate(isolate()->factory()->the_hole_value()));
1703 __ mov(eax, Operand::StaticVariable(pending_exception));
1704 __ cmp(edx, eax);
1705 __ j(equal, &runtime);
1706 // For exception, throw the exception again.
1707
1708 // Clear the pending exception variable.
1709 __ mov(Operand::StaticVariable(pending_exception), edx);
1710
1711 // Special handling of termination exceptions which are uncatchable
1712 // by javascript code.
1713 __ cmp(eax, factory->termination_exception());
1714 Label throw_termination_exception;
1715 __ j(equal, &throw_termination_exception, Label::kNear);
1716
1717 // Handle normal exception by following handler chain.
1718 __ Throw(eax);
1719
1720 __ bind(&throw_termination_exception);
1721 __ ThrowUncatchable(eax);
1722
1723 __ bind(&failure);
1724 // For failure to match, return null.
1725 __ mov(eax, factory->null_value());
1726 __ ret(4 * kPointerSize);
1727
1728 // Load RegExp data.
1729 __ bind(&success);
1730 __ mov(eax, Operand(esp, kJSRegExpOffset));
1731 __ mov(ecx, FieldOperand(eax, JSRegExp::kDataOffset));
1732 __ mov(edx, FieldOperand(ecx, JSRegExp::kIrregexpCaptureCountOffset));
1733 // Calculate number of capture registers (number_of_captures + 1) * 2.
1734 STATIC_ASSERT(kSmiTag == 0);
1735 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
1736 __ add(edx, Immediate(2)); // edx was a smi.
1737
1738 // edx: Number of capture registers
1739 // Load last_match_info which is still known to be a fast case JSArray.
1740 // Check that the fourth object is a JSArray object.
1741 __ mov(eax, Operand(esp, kLastMatchInfoOffset));
1742 __ JumpIfSmi(eax, &runtime);
1743 __ CmpObjectType(eax, JS_ARRAY_TYPE, ebx);
1744 __ j(not_equal, &runtime);
1745 // Check that the JSArray is in fast case.
1746 __ mov(ebx, FieldOperand(eax, JSArray::kElementsOffset));
1747 __ mov(eax, FieldOperand(ebx, HeapObject::kMapOffset));
1748 __ cmp(eax, factory->fixed_array_map());
1749 __ j(not_equal, &runtime);
1750 // Check that the last match info has space for the capture registers and the
1751 // additional information.
1752 __ mov(eax, FieldOperand(ebx, FixedArray::kLengthOffset));
1753 __ SmiUntag(eax);
1754 __ sub(eax, Immediate(RegExpImpl::kLastMatchOverhead));
1755 __ cmp(edx, eax);
1756 __ j(greater, &runtime);
1757
1758 // ebx: last_match_info backing store (FixedArray)
1759 // edx: number of capture registers
1760 // Store the capture count.
1761 __ SmiTag(edx); // Number of capture registers to smi.
1762 __ mov(FieldOperand(ebx, RegExpImpl::kLastCaptureCountOffset), edx);
1763 __ SmiUntag(edx); // Number of capture registers back from smi.
1764 // Store last subject and last input.
1765 __ mov(eax, Operand(esp, kSubjectOffset));
1766 __ mov(ecx, eax);
1767 __ mov(FieldOperand(ebx, RegExpImpl::kLastSubjectOffset), eax);
1768 __ RecordWriteField(ebx,
1769 RegExpImpl::kLastSubjectOffset,
1770 eax,
1771 edi,
1772 kDontSaveFPRegs);
1773 __ mov(eax, ecx);
1774 __ mov(FieldOperand(ebx, RegExpImpl::kLastInputOffset), eax);
1775 __ RecordWriteField(ebx,
1776 RegExpImpl::kLastInputOffset,
1777 eax,
1778 edi,
1779 kDontSaveFPRegs);
1780
1781 // Get the static offsets vector filled by the native regexp code.
1782 ExternalReference address_of_static_offsets_vector =
1783 ExternalReference::address_of_static_offsets_vector(isolate());
1784 __ mov(ecx, Immediate(address_of_static_offsets_vector));
1785
1786 // ebx: last_match_info backing store (FixedArray)
1787 // ecx: offsets vector
1788 // edx: number of capture registers
1789 Label next_capture, done;
1790 // Capture register counter starts from number of capture registers and
1791 // counts down until wraping after zero.
1792 __ bind(&next_capture);
1793 __ sub(edx, Immediate(1));
1794 __ j(negative, &done, Label::kNear);
1795 // Read the value from the static offsets vector buffer.
1796 __ mov(edi, Operand(ecx, edx, times_int_size, 0));
1797 __ SmiTag(edi);
1798 // Store the smi value in the last match info.
1799 __ mov(FieldOperand(ebx,
1800 edx,
1801 times_pointer_size,
1802 RegExpImpl::kFirstCaptureOffset),
1803 edi);
1804 __ jmp(&next_capture);
1805 __ bind(&done);
1806
1807 // Return last match info.
1808 __ mov(eax, Operand(esp, kLastMatchInfoOffset));
1809 __ ret(4 * kPointerSize);
1810
1811 // Do the runtime call to execute the regexp.
1812 __ bind(&runtime);
1813 __ TailCallRuntime(Runtime::kHiddenRegExpExec, 4, 1);
1814
1815 // Deferred code for string handling.
1816 // (7) Not a long external string? If yes, go to (10).
1817 __ bind(¬_seq_nor_cons);
1818 // Compare flags are still set from (3).
1819 __ j(greater, ¬_long_external, Label::kNear); // Go to (10).
1820
1821 // (8) External string. Short external strings have been ruled out.
1822 __ bind(&external_string);
1823 // Reload instance type.
1824 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
1825 __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
1826 if (FLAG_debug_code) {
1827 // Assert that we do not have a cons or slice (indirect strings) here.
1828 // Sequential strings have already been ruled out.
1829 __ test_b(ebx, kIsIndirectStringMask);
1830 __ Assert(zero, kExternalStringExpectedButNotFound);
1831 }
1832 __ mov(eax, FieldOperand(eax, ExternalString::kResourceDataOffset));
1833 // Move the pointer so that offset-wise, it looks like a sequential string.
1834 STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
1835 __ sub(eax, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
1836 STATIC_ASSERT(kTwoByteStringTag == 0);
1837 // (8a) Is the external string one byte? If yes, go to (6).
1838 __ test_b(ebx, kStringEncodingMask);
1839 __ j(not_zero, &seq_one_byte_string); // Goto (6).
1840
1841 // eax: sequential subject string (or look-alike, external string)
1842 // edx: original subject string
1843 // ecx: RegExp data (FixedArray)
1844 // (9) Two byte sequential. Load regexp code for one byte. Go to (E).
1845 __ bind(&seq_two_byte_string);
1846 // Load previous index and check range before edx is overwritten. We have
1847 // to use edx instead of eax here because it might have been only made to
1848 // look like a sequential string when it actually is an external string.
1849 __ mov(ebx, Operand(esp, kPreviousIndexOffset));
1850 __ JumpIfNotSmi(ebx, &runtime);
1851 __ cmp(ebx, FieldOperand(edx, String::kLengthOffset));
1852 __ j(above_equal, &runtime);
1853 __ mov(edx, FieldOperand(ecx, JSRegExp::kDataUC16CodeOffset));
1854 __ Move(ecx, Immediate(0)); // Type is two byte.
1855 __ jmp(&check_code); // Go to (E).
1856
1857 // (10) Not a string or a short external string? If yes, bail out to runtime.
1858 __ bind(¬_long_external);
1859 // Catch non-string subject or short external string.
1860 STATIC_ASSERT(kNotStringTag != 0 && kShortExternalStringTag !=0);
1861 __ test(ebx, Immediate(kIsNotStringMask | kShortExternalStringTag));
1862 __ j(not_zero, &runtime);
1863
1864 // (11) Sliced string. Replace subject with parent. Go to (5a).
1865 // Load offset into edi and replace subject string with parent.
1866 __ mov(edi, FieldOperand(eax, SlicedString::kOffsetOffset));
1867 __ mov(eax, FieldOperand(eax, SlicedString::kParentOffset));
1868 __ jmp(&check_underlying); // Go to (5a).
1869 #endif // V8_INTERPRETED_REGEXP
1870 }
1871
1872
NegativeComparisonResult(Condition cc)1873 static int NegativeComparisonResult(Condition cc) {
1874 ASSERT(cc != equal);
1875 ASSERT((cc == less) || (cc == less_equal)
1876 || (cc == greater) || (cc == greater_equal));
1877 return (cc == greater || cc == greater_equal) ? LESS : GREATER;
1878 }
1879
1880
CheckInputType(MacroAssembler * masm,Register input,CompareIC::State expected,Label * fail)1881 static void CheckInputType(MacroAssembler* masm,
1882 Register input,
1883 CompareIC::State expected,
1884 Label* fail) {
1885 Label ok;
1886 if (expected == CompareIC::SMI) {
1887 __ JumpIfNotSmi(input, fail);
1888 } else if (expected == CompareIC::NUMBER) {
1889 __ JumpIfSmi(input, &ok);
1890 __ cmp(FieldOperand(input, HeapObject::kMapOffset),
1891 Immediate(masm->isolate()->factory()->heap_number_map()));
1892 __ j(not_equal, fail);
1893 }
1894 // We could be strict about internalized/non-internalized here, but as long as
1895 // hydrogen doesn't care, the stub doesn't have to care either.
1896 __ bind(&ok);
1897 }
1898
1899
BranchIfNotInternalizedString(MacroAssembler * masm,Label * label,Register object,Register scratch)1900 static void BranchIfNotInternalizedString(MacroAssembler* masm,
1901 Label* label,
1902 Register object,
1903 Register scratch) {
1904 __ JumpIfSmi(object, label);
1905 __ mov(scratch, FieldOperand(object, HeapObject::kMapOffset));
1906 __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
1907 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
1908 __ test(scratch, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
1909 __ j(not_zero, label);
1910 }
1911
1912
GenerateGeneric(MacroAssembler * masm)1913 void ICCompareStub::GenerateGeneric(MacroAssembler* masm) {
1914 Label check_unequal_objects;
1915 Condition cc = GetCondition();
1916
1917 Label miss;
1918 CheckInputType(masm, edx, left_, &miss);
1919 CheckInputType(masm, eax, right_, &miss);
1920
1921 // Compare two smis.
1922 Label non_smi, smi_done;
1923 __ mov(ecx, edx);
1924 __ or_(ecx, eax);
1925 __ JumpIfNotSmi(ecx, &non_smi, Label::kNear);
1926 __ sub(edx, eax); // Return on the result of the subtraction.
1927 __ j(no_overflow, &smi_done, Label::kNear);
1928 __ not_(edx); // Correct sign in case of overflow. edx is never 0 here.
1929 __ bind(&smi_done);
1930 __ mov(eax, edx);
1931 __ ret(0);
1932 __ bind(&non_smi);
1933
1934 // NOTICE! This code is only reached after a smi-fast-case check, so
1935 // it is certain that at least one operand isn't a smi.
1936
1937 // Identical objects can be compared fast, but there are some tricky cases
1938 // for NaN and undefined.
1939 Label generic_heap_number_comparison;
1940 {
1941 Label not_identical;
1942 __ cmp(eax, edx);
1943 __ j(not_equal, ¬_identical);
1944
1945 if (cc != equal) {
1946 // Check for undefined. undefined OP undefined is false even though
1947 // undefined == undefined.
1948 Label check_for_nan;
1949 __ cmp(edx, isolate()->factory()->undefined_value());
1950 __ j(not_equal, &check_for_nan, Label::kNear);
1951 __ Move(eax, Immediate(Smi::FromInt(NegativeComparisonResult(cc))));
1952 __ ret(0);
1953 __ bind(&check_for_nan);
1954 }
1955
1956 // Test for NaN. Compare heap numbers in a general way,
1957 // to hanlde NaNs correctly.
1958 __ cmp(FieldOperand(edx, HeapObject::kMapOffset),
1959 Immediate(isolate()->factory()->heap_number_map()));
1960 __ j(equal, &generic_heap_number_comparison, Label::kNear);
1961 if (cc != equal) {
1962 // Call runtime on identical JSObjects. Otherwise return equal.
1963 __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx);
1964 __ j(above_equal, ¬_identical);
1965 }
1966 __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
1967 __ ret(0);
1968
1969
1970 __ bind(¬_identical);
1971 }
1972
1973 // Strict equality can quickly decide whether objects are equal.
1974 // Non-strict object equality is slower, so it is handled later in the stub.
1975 if (cc == equal && strict()) {
1976 Label slow; // Fallthrough label.
1977 Label not_smis;
1978 // If we're doing a strict equality comparison, we don't have to do
1979 // type conversion, so we generate code to do fast comparison for objects
1980 // and oddballs. Non-smi numbers and strings still go through the usual
1981 // slow-case code.
1982 // If either is a Smi (we know that not both are), then they can only
1983 // be equal if the other is a HeapNumber. If so, use the slow case.
1984 STATIC_ASSERT(kSmiTag == 0);
1985 ASSERT_EQ(0, Smi::FromInt(0));
1986 __ mov(ecx, Immediate(kSmiTagMask));
1987 __ and_(ecx, eax);
1988 __ test(ecx, edx);
1989 __ j(not_zero, ¬_smis, Label::kNear);
1990 // One operand is a smi.
1991
1992 // Check whether the non-smi is a heap number.
1993 STATIC_ASSERT(kSmiTagMask == 1);
1994 // ecx still holds eax & kSmiTag, which is either zero or one.
1995 __ sub(ecx, Immediate(0x01));
1996 __ mov(ebx, edx);
1997 __ xor_(ebx, eax);
1998 __ and_(ebx, ecx); // ebx holds either 0 or eax ^ edx.
1999 __ xor_(ebx, eax);
2000 // if eax was smi, ebx is now edx, else eax.
2001
2002 // Check if the non-smi operand is a heap number.
2003 __ cmp(FieldOperand(ebx, HeapObject::kMapOffset),
2004 Immediate(isolate()->factory()->heap_number_map()));
2005 // If heap number, handle it in the slow case.
2006 __ j(equal, &slow, Label::kNear);
2007 // Return non-equal (ebx is not zero)
2008 __ mov(eax, ebx);
2009 __ ret(0);
2010
2011 __ bind(¬_smis);
2012 // If either operand is a JSObject or an oddball value, then they are not
2013 // equal since their pointers are different
2014 // There is no test for undetectability in strict equality.
2015
2016 // Get the type of the first operand.
2017 // If the first object is a JS object, we have done pointer comparison.
2018 Label first_non_object;
2019 STATIC_ASSERT(LAST_TYPE == LAST_SPEC_OBJECT_TYPE);
2020 __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx);
2021 __ j(below, &first_non_object, Label::kNear);
2022
2023 // Return non-zero (eax is not zero)
2024 Label return_not_equal;
2025 STATIC_ASSERT(kHeapObjectTag != 0);
2026 __ bind(&return_not_equal);
2027 __ ret(0);
2028
2029 __ bind(&first_non_object);
2030 // Check for oddballs: true, false, null, undefined.
2031 __ CmpInstanceType(ecx, ODDBALL_TYPE);
2032 __ j(equal, &return_not_equal);
2033
2034 __ CmpObjectType(edx, FIRST_SPEC_OBJECT_TYPE, ecx);
2035 __ j(above_equal, &return_not_equal);
2036
2037 // Check for oddballs: true, false, null, undefined.
2038 __ CmpInstanceType(ecx, ODDBALL_TYPE);
2039 __ j(equal, &return_not_equal);
2040
2041 // Fall through to the general case.
2042 __ bind(&slow);
2043 }
2044
2045 // Generate the number comparison code.
2046 Label non_number_comparison;
2047 Label unordered;
2048 __ bind(&generic_heap_number_comparison);
2049
2050 FloatingPointHelper::LoadSSE2Operands(masm, &non_number_comparison);
2051 __ ucomisd(xmm0, xmm1);
2052 // Don't base result on EFLAGS when a NaN is involved.
2053 __ j(parity_even, &unordered, Label::kNear);
2054
2055 __ mov(eax, 0); // equal
2056 __ mov(ecx, Immediate(Smi::FromInt(1)));
2057 __ cmov(above, eax, ecx);
2058 __ mov(ecx, Immediate(Smi::FromInt(-1)));
2059 __ cmov(below, eax, ecx);
2060 __ ret(0);
2061
2062 // If one of the numbers was NaN, then the result is always false.
2063 // The cc is never not-equal.
2064 __ bind(&unordered);
2065 ASSERT(cc != not_equal);
2066 if (cc == less || cc == less_equal) {
2067 __ mov(eax, Immediate(Smi::FromInt(1)));
2068 } else {
2069 __ mov(eax, Immediate(Smi::FromInt(-1)));
2070 }
2071 __ ret(0);
2072
2073 // The number comparison code did not provide a valid result.
2074 __ bind(&non_number_comparison);
2075
2076 // Fast negative check for internalized-to-internalized equality.
2077 Label check_for_strings;
2078 if (cc == equal) {
2079 BranchIfNotInternalizedString(masm, &check_for_strings, eax, ecx);
2080 BranchIfNotInternalizedString(masm, &check_for_strings, edx, ecx);
2081
2082 // We've already checked for object identity, so if both operands
2083 // are internalized they aren't equal. Register eax already holds a
2084 // non-zero value, which indicates not equal, so just return.
2085 __ ret(0);
2086 }
2087
2088 __ bind(&check_for_strings);
2089
2090 __ JumpIfNotBothSequentialAsciiStrings(edx, eax, ecx, ebx,
2091 &check_unequal_objects);
2092
2093 // Inline comparison of ASCII strings.
2094 if (cc == equal) {
2095 StringCompareStub::GenerateFlatAsciiStringEquals(masm,
2096 edx,
2097 eax,
2098 ecx,
2099 ebx);
2100 } else {
2101 StringCompareStub::GenerateCompareFlatAsciiStrings(masm,
2102 edx,
2103 eax,
2104 ecx,
2105 ebx,
2106 edi);
2107 }
2108 #ifdef DEBUG
2109 __ Abort(kUnexpectedFallThroughFromStringComparison);
2110 #endif
2111
2112 __ bind(&check_unequal_objects);
2113 if (cc == equal && !strict()) {
2114 // Non-strict equality. Objects are unequal if
2115 // they are both JSObjects and not undetectable,
2116 // and their pointers are different.
2117 Label not_both_objects;
2118 Label return_unequal;
2119 // At most one is a smi, so we can test for smi by adding the two.
2120 // A smi plus a heap object has the low bit set, a heap object plus
2121 // a heap object has the low bit clear.
2122 STATIC_ASSERT(kSmiTag == 0);
2123 STATIC_ASSERT(kSmiTagMask == 1);
2124 __ lea(ecx, Operand(eax, edx, times_1, 0));
2125 __ test(ecx, Immediate(kSmiTagMask));
2126 __ j(not_zero, ¬_both_objects, Label::kNear);
2127 __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx);
2128 __ j(below, ¬_both_objects, Label::kNear);
2129 __ CmpObjectType(edx, FIRST_SPEC_OBJECT_TYPE, ebx);
2130 __ j(below, ¬_both_objects, Label::kNear);
2131 // We do not bail out after this point. Both are JSObjects, and
2132 // they are equal if and only if both are undetectable.
2133 // The and of the undetectable flags is 1 if and only if they are equal.
2134 __ test_b(FieldOperand(ecx, Map::kBitFieldOffset),
2135 1 << Map::kIsUndetectable);
2136 __ j(zero, &return_unequal, Label::kNear);
2137 __ test_b(FieldOperand(ebx, Map::kBitFieldOffset),
2138 1 << Map::kIsUndetectable);
2139 __ j(zero, &return_unequal, Label::kNear);
2140 // The objects are both undetectable, so they both compare as the value
2141 // undefined, and are equal.
2142 __ Move(eax, Immediate(EQUAL));
2143 __ bind(&return_unequal);
2144 // Return non-equal by returning the non-zero object pointer in eax,
2145 // or return equal if we fell through to here.
2146 __ ret(0); // rax, rdx were pushed
2147 __ bind(¬_both_objects);
2148 }
2149
2150 // Push arguments below the return address.
2151 __ pop(ecx);
2152 __ push(edx);
2153 __ push(eax);
2154
2155 // Figure out which native to call and setup the arguments.
2156 Builtins::JavaScript builtin;
2157 if (cc == equal) {
2158 builtin = strict() ? Builtins::STRICT_EQUALS : Builtins::EQUALS;
2159 } else {
2160 builtin = Builtins::COMPARE;
2161 __ push(Immediate(Smi::FromInt(NegativeComparisonResult(cc))));
2162 }
2163
2164 // Restore return address on the stack.
2165 __ push(ecx);
2166
2167 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater)
2168 // tagged as a small integer.
2169 __ InvokeBuiltin(builtin, JUMP_FUNCTION);
2170
2171 __ bind(&miss);
2172 GenerateMiss(masm);
2173 }
2174
2175
GenerateRecordCallTarget(MacroAssembler * masm)2176 static void GenerateRecordCallTarget(MacroAssembler* masm) {
2177 // Cache the called function in a feedback vector slot. Cache states
2178 // are uninitialized, monomorphic (indicated by a JSFunction), and
2179 // megamorphic.
2180 // eax : number of arguments to the construct function
2181 // ebx : Feedback vector
2182 // edx : slot in feedback vector (Smi)
2183 // edi : the function to call
2184 Isolate* isolate = masm->isolate();
2185 Label initialize, done, miss, megamorphic, not_array_function;
2186
2187 // Load the cache state into ecx.
2188 __ mov(ecx, FieldOperand(ebx, edx, times_half_pointer_size,
2189 FixedArray::kHeaderSize));
2190
2191 // A monomorphic cache hit or an already megamorphic state: invoke the
2192 // function without changing the state.
2193 __ cmp(ecx, edi);
2194 __ j(equal, &done, Label::kFar);
2195 __ cmp(ecx, Immediate(TypeFeedbackInfo::MegamorphicSentinel(isolate)));
2196 __ j(equal, &done, Label::kFar);
2197
2198 if (!FLAG_pretenuring_call_new) {
2199 // If we came here, we need to see if we are the array function.
2200 // If we didn't have a matching function, and we didn't find the megamorph
2201 // sentinel, then we have in the slot either some other function or an
2202 // AllocationSite. Do a map check on the object in ecx.
2203 Handle<Map> allocation_site_map = isolate->factory()->allocation_site_map();
2204 __ cmp(FieldOperand(ecx, 0), Immediate(allocation_site_map));
2205 __ j(not_equal, &miss);
2206
2207 // Make sure the function is the Array() function
2208 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx);
2209 __ cmp(edi, ecx);
2210 __ j(not_equal, &megamorphic);
2211 __ jmp(&done, Label::kFar);
2212 }
2213
2214 __ bind(&miss);
2215
2216 // A monomorphic miss (i.e, here the cache is not uninitialized) goes
2217 // megamorphic.
2218 __ cmp(ecx, Immediate(TypeFeedbackInfo::UninitializedSentinel(isolate)));
2219 __ j(equal, &initialize);
2220 // MegamorphicSentinel is an immortal immovable object (undefined) so no
2221 // write-barrier is needed.
2222 __ bind(&megamorphic);
2223 __ mov(FieldOperand(ebx, edx, times_half_pointer_size,
2224 FixedArray::kHeaderSize),
2225 Immediate(TypeFeedbackInfo::MegamorphicSentinel(isolate)));
2226 __ jmp(&done, Label::kFar);
2227
2228 // An uninitialized cache is patched with the function or sentinel to
2229 // indicate the ElementsKind if function is the Array constructor.
2230 __ bind(&initialize);
2231 if (!FLAG_pretenuring_call_new) {
2232 // Make sure the function is the Array() function
2233 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx);
2234 __ cmp(edi, ecx);
2235 __ j(not_equal, ¬_array_function);
2236
2237 // The target function is the Array constructor,
2238 // Create an AllocationSite if we don't already have it, store it in the
2239 // slot.
2240 {
2241 FrameScope scope(masm, StackFrame::INTERNAL);
2242
2243 // Arguments register must be smi-tagged to call out.
2244 __ SmiTag(eax);
2245 __ push(eax);
2246 __ push(edi);
2247 __ push(edx);
2248 __ push(ebx);
2249
2250 CreateAllocationSiteStub create_stub(isolate);
2251 __ CallStub(&create_stub);
2252
2253 __ pop(ebx);
2254 __ pop(edx);
2255 __ pop(edi);
2256 __ pop(eax);
2257 __ SmiUntag(eax);
2258 }
2259 __ jmp(&done);
2260
2261 __ bind(¬_array_function);
2262 }
2263
2264 __ mov(FieldOperand(ebx, edx, times_half_pointer_size,
2265 FixedArray::kHeaderSize),
2266 edi);
2267 // We won't need edx or ebx anymore, just save edi
2268 __ push(edi);
2269 __ push(ebx);
2270 __ push(edx);
2271 __ RecordWriteArray(ebx, edi, edx, kDontSaveFPRegs,
2272 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
2273 __ pop(edx);
2274 __ pop(ebx);
2275 __ pop(edi);
2276
2277 __ bind(&done);
2278 }
2279
2280
EmitContinueIfStrictOrNative(MacroAssembler * masm,Label * cont)2281 static void EmitContinueIfStrictOrNative(MacroAssembler* masm, Label* cont) {
2282 // Do not transform the receiver for strict mode functions.
2283 __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2284 __ test_b(FieldOperand(ecx, SharedFunctionInfo::kStrictModeByteOffset),
2285 1 << SharedFunctionInfo::kStrictModeBitWithinByte);
2286 __ j(not_equal, cont);
2287
2288 // Do not transform the receiver for natives (shared already in ecx).
2289 __ test_b(FieldOperand(ecx, SharedFunctionInfo::kNativeByteOffset),
2290 1 << SharedFunctionInfo::kNativeBitWithinByte);
2291 __ j(not_equal, cont);
2292 }
2293
2294
EmitSlowCase(Isolate * isolate,MacroAssembler * masm,int argc,Label * non_function)2295 static void EmitSlowCase(Isolate* isolate,
2296 MacroAssembler* masm,
2297 int argc,
2298 Label* non_function) {
2299 // Check for function proxy.
2300 __ CmpInstanceType(ecx, JS_FUNCTION_PROXY_TYPE);
2301 __ j(not_equal, non_function);
2302 __ pop(ecx);
2303 __ push(edi); // put proxy as additional argument under return address
2304 __ push(ecx);
2305 __ Move(eax, Immediate(argc + 1));
2306 __ Move(ebx, Immediate(0));
2307 __ GetBuiltinEntry(edx, Builtins::CALL_FUNCTION_PROXY);
2308 {
2309 Handle<Code> adaptor = isolate->builtins()->ArgumentsAdaptorTrampoline();
2310 __ jmp(adaptor, RelocInfo::CODE_TARGET);
2311 }
2312
2313 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead
2314 // of the original receiver from the call site).
2315 __ bind(non_function);
2316 __ mov(Operand(esp, (argc + 1) * kPointerSize), edi);
2317 __ Move(eax, Immediate(argc));
2318 __ Move(ebx, Immediate(0));
2319 __ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION);
2320 Handle<Code> adaptor = isolate->builtins()->ArgumentsAdaptorTrampoline();
2321 __ jmp(adaptor, RelocInfo::CODE_TARGET);
2322 }
2323
2324
EmitWrapCase(MacroAssembler * masm,int argc,Label * cont)2325 static void EmitWrapCase(MacroAssembler* masm, int argc, Label* cont) {
2326 // Wrap the receiver and patch it back onto the stack.
2327 { FrameScope frame_scope(masm, StackFrame::INTERNAL);
2328 __ push(edi);
2329 __ push(eax);
2330 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
2331 __ pop(edi);
2332 }
2333 __ mov(Operand(esp, (argc + 1) * kPointerSize), eax);
2334 __ jmp(cont);
2335 }
2336
2337
CallFunctionNoFeedback(MacroAssembler * masm,int argc,bool needs_checks,bool call_as_method)2338 static void CallFunctionNoFeedback(MacroAssembler* masm,
2339 int argc, bool needs_checks,
2340 bool call_as_method) {
2341 // edi : the function to call
2342 Label slow, non_function, wrap, cont;
2343
2344 if (needs_checks) {
2345 // Check that the function really is a JavaScript function.
2346 __ JumpIfSmi(edi, &non_function);
2347
2348 // Goto slow case if we do not have a function.
2349 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
2350 __ j(not_equal, &slow);
2351 }
2352
2353 // Fast-case: Just invoke the function.
2354 ParameterCount actual(argc);
2355
2356 if (call_as_method) {
2357 if (needs_checks) {
2358 EmitContinueIfStrictOrNative(masm, &cont);
2359 }
2360
2361 // Load the receiver from the stack.
2362 __ mov(eax, Operand(esp, (argc + 1) * kPointerSize));
2363
2364 if (call_as_method) {
2365 __ JumpIfSmi(eax, &wrap);
2366
2367 __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx);
2368 __ j(below, &wrap);
2369 } else {
2370 __ jmp(&wrap);
2371 }
2372
2373 __ bind(&cont);
2374 }
2375
2376 __ InvokeFunction(edi, actual, JUMP_FUNCTION, NullCallWrapper());
2377
2378 if (needs_checks) {
2379 // Slow-case: Non-function called.
2380 __ bind(&slow);
2381 // (non_function is bound in EmitSlowCase)
2382 EmitSlowCase(masm->isolate(), masm, argc, &non_function);
2383 }
2384
2385 if (call_as_method) {
2386 __ bind(&wrap);
2387 EmitWrapCase(masm, argc, &cont);
2388 }
2389 }
2390
2391
Generate(MacroAssembler * masm)2392 void CallFunctionStub::Generate(MacroAssembler* masm) {
2393 CallFunctionNoFeedback(masm, argc_, NeedsChecks(), CallAsMethod());
2394 }
2395
2396
Generate(MacroAssembler * masm)2397 void CallConstructStub::Generate(MacroAssembler* masm) {
2398 // eax : number of arguments
2399 // ebx : feedback vector
2400 // edx : (only if ebx is not the megamorphic symbol) slot in feedback
2401 // vector (Smi)
2402 // edi : constructor function
2403 Label slow, non_function_call;
2404
2405 // Check that function is not a smi.
2406 __ JumpIfSmi(edi, &non_function_call);
2407 // Check that function is a JSFunction.
2408 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
2409 __ j(not_equal, &slow);
2410
2411 if (RecordCallTarget()) {
2412 GenerateRecordCallTarget(masm);
2413
2414 if (FLAG_pretenuring_call_new) {
2415 // Put the AllocationSite from the feedback vector into ebx.
2416 // By adding kPointerSize we encode that we know the AllocationSite
2417 // entry is at the feedback vector slot given by edx + 1.
2418 __ mov(ebx, FieldOperand(ebx, edx, times_half_pointer_size,
2419 FixedArray::kHeaderSize + kPointerSize));
2420 } else {
2421 Label feedback_register_initialized;
2422 // Put the AllocationSite from the feedback vector into ebx, or undefined.
2423 __ mov(ebx, FieldOperand(ebx, edx, times_half_pointer_size,
2424 FixedArray::kHeaderSize));
2425 Handle<Map> allocation_site_map =
2426 isolate()->factory()->allocation_site_map();
2427 __ cmp(FieldOperand(ebx, 0), Immediate(allocation_site_map));
2428 __ j(equal, &feedback_register_initialized);
2429 __ mov(ebx, isolate()->factory()->undefined_value());
2430 __ bind(&feedback_register_initialized);
2431 }
2432
2433 __ AssertUndefinedOrAllocationSite(ebx);
2434 }
2435
2436 // Jump to the function-specific construct stub.
2437 Register jmp_reg = ecx;
2438 __ mov(jmp_reg, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2439 __ mov(jmp_reg, FieldOperand(jmp_reg,
2440 SharedFunctionInfo::kConstructStubOffset));
2441 __ lea(jmp_reg, FieldOperand(jmp_reg, Code::kHeaderSize));
2442 __ jmp(jmp_reg);
2443
2444 // edi: called object
2445 // eax: number of arguments
2446 // ecx: object map
2447 Label do_call;
2448 __ bind(&slow);
2449 __ CmpInstanceType(ecx, JS_FUNCTION_PROXY_TYPE);
2450 __ j(not_equal, &non_function_call);
2451 __ GetBuiltinEntry(edx, Builtins::CALL_FUNCTION_PROXY_AS_CONSTRUCTOR);
2452 __ jmp(&do_call);
2453
2454 __ bind(&non_function_call);
2455 __ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
2456 __ bind(&do_call);
2457 // Set expected number of arguments to zero (not changing eax).
2458 __ Move(ebx, Immediate(0));
2459 Handle<Code> arguments_adaptor =
2460 isolate()->builtins()->ArgumentsAdaptorTrampoline();
2461 __ jmp(arguments_adaptor, RelocInfo::CODE_TARGET);
2462 }
2463
2464
EmitLoadTypeFeedbackVector(MacroAssembler * masm,Register vector)2465 static void EmitLoadTypeFeedbackVector(MacroAssembler* masm, Register vector) {
2466 __ mov(vector, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
2467 __ mov(vector, FieldOperand(vector, JSFunction::kSharedFunctionInfoOffset));
2468 __ mov(vector, FieldOperand(vector,
2469 SharedFunctionInfo::kFeedbackVectorOffset));
2470 }
2471
2472
Generate(MacroAssembler * masm)2473 void CallIC_ArrayStub::Generate(MacroAssembler* masm) {
2474 // edi - function
2475 // edx - slot id
2476 Label miss;
2477 int argc = state_.arg_count();
2478 ParameterCount actual(argc);
2479
2480 EmitLoadTypeFeedbackVector(masm, ebx);
2481
2482 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx);
2483 __ cmp(edi, ecx);
2484 __ j(not_equal, &miss);
2485
2486 __ mov(eax, arg_count());
2487 __ mov(ecx, FieldOperand(ebx, edx, times_half_pointer_size,
2488 FixedArray::kHeaderSize));
2489
2490 // Verify that ecx contains an AllocationSite
2491 Factory* factory = masm->isolate()->factory();
2492 __ cmp(FieldOperand(ecx, HeapObject::kMapOffset),
2493 factory->allocation_site_map());
2494 __ j(not_equal, &miss);
2495
2496 __ mov(ebx, ecx);
2497 ArrayConstructorStub stub(masm->isolate(), arg_count());
2498 __ TailCallStub(&stub);
2499
2500 __ bind(&miss);
2501 GenerateMiss(masm, IC::kCallIC_Customization_Miss);
2502
2503 // The slow case, we need this no matter what to complete a call after a miss.
2504 CallFunctionNoFeedback(masm,
2505 arg_count(),
2506 true,
2507 CallAsMethod());
2508
2509 // Unreachable.
2510 __ int3();
2511 }
2512
2513
Generate(MacroAssembler * masm)2514 void CallICStub::Generate(MacroAssembler* masm) {
2515 // edi - function
2516 // edx - slot id
2517 Isolate* isolate = masm->isolate();
2518 Label extra_checks_or_miss, slow_start;
2519 Label slow, non_function, wrap, cont;
2520 Label have_js_function;
2521 int argc = state_.arg_count();
2522 ParameterCount actual(argc);
2523
2524 EmitLoadTypeFeedbackVector(masm, ebx);
2525
2526 // The checks. First, does edi match the recorded monomorphic target?
2527 __ cmp(edi, FieldOperand(ebx, edx, times_half_pointer_size,
2528 FixedArray::kHeaderSize));
2529 __ j(not_equal, &extra_checks_or_miss);
2530
2531 __ bind(&have_js_function);
2532 if (state_.CallAsMethod()) {
2533 EmitContinueIfStrictOrNative(masm, &cont);
2534
2535 // Load the receiver from the stack.
2536 __ mov(eax, Operand(esp, (argc + 1) * kPointerSize));
2537
2538 __ JumpIfSmi(eax, &wrap);
2539
2540 __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx);
2541 __ j(below, &wrap);
2542
2543 __ bind(&cont);
2544 }
2545
2546 __ InvokeFunction(edi, actual, JUMP_FUNCTION, NullCallWrapper());
2547
2548 __ bind(&slow);
2549 EmitSlowCase(isolate, masm, argc, &non_function);
2550
2551 if (state_.CallAsMethod()) {
2552 __ bind(&wrap);
2553 EmitWrapCase(masm, argc, &cont);
2554 }
2555
2556 __ bind(&extra_checks_or_miss);
2557 Label miss;
2558
2559 __ mov(ecx, FieldOperand(ebx, edx, times_half_pointer_size,
2560 FixedArray::kHeaderSize));
2561 __ cmp(ecx, Immediate(TypeFeedbackInfo::MegamorphicSentinel(isolate)));
2562 __ j(equal, &slow_start);
2563 __ cmp(ecx, Immediate(TypeFeedbackInfo::UninitializedSentinel(isolate)));
2564 __ j(equal, &miss);
2565
2566 if (!FLAG_trace_ic) {
2567 // We are going megamorphic. If the feedback is a JSFunction, it is fine
2568 // to handle it here. More complex cases are dealt with in the runtime.
2569 __ AssertNotSmi(ecx);
2570 __ CmpObjectType(ecx, JS_FUNCTION_TYPE, ecx);
2571 __ j(not_equal, &miss);
2572 __ mov(FieldOperand(ebx, edx, times_half_pointer_size,
2573 FixedArray::kHeaderSize),
2574 Immediate(TypeFeedbackInfo::MegamorphicSentinel(isolate)));
2575 __ jmp(&slow_start);
2576 }
2577
2578 // We are here because tracing is on or we are going monomorphic.
2579 __ bind(&miss);
2580 GenerateMiss(masm, IC::kCallIC_Miss);
2581
2582 // the slow case
2583 __ bind(&slow_start);
2584
2585 // Check that the function really is a JavaScript function.
2586 __ JumpIfSmi(edi, &non_function);
2587
2588 // Goto slow case if we do not have a function.
2589 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
2590 __ j(not_equal, &slow);
2591 __ jmp(&have_js_function);
2592
2593 // Unreachable
2594 __ int3();
2595 }
2596
2597
GenerateMiss(MacroAssembler * masm,IC::UtilityId id)2598 void CallICStub::GenerateMiss(MacroAssembler* masm, IC::UtilityId id) {
2599 // Get the receiver of the function from the stack; 1 ~ return address.
2600 __ mov(ecx, Operand(esp, (state_.arg_count() + 1) * kPointerSize));
2601
2602 {
2603 FrameScope scope(masm, StackFrame::INTERNAL);
2604
2605 // Push the receiver and the function and feedback info.
2606 __ push(ecx);
2607 __ push(edi);
2608 __ push(ebx);
2609 __ push(edx);
2610
2611 // Call the entry.
2612 ExternalReference miss = ExternalReference(IC_Utility(id),
2613 masm->isolate());
2614 __ CallExternalReference(miss, 4);
2615
2616 // Move result to edi and exit the internal frame.
2617 __ mov(edi, eax);
2618 }
2619 }
2620
2621
NeedsImmovableCode()2622 bool CEntryStub::NeedsImmovableCode() {
2623 return false;
2624 }
2625
2626
GenerateStubsAheadOfTime(Isolate * isolate)2627 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
2628 CEntryStub::GenerateAheadOfTime(isolate);
2629 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate);
2630 StubFailureTrampolineStub::GenerateAheadOfTime(isolate);
2631 // It is important that the store buffer overflow stubs are generated first.
2632 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate);
2633 CreateAllocationSiteStub::GenerateAheadOfTime(isolate);
2634 BinaryOpICStub::GenerateAheadOfTime(isolate);
2635 BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate);
2636 }
2637
2638
GenerateFPStubs(Isolate * isolate)2639 void CodeStub::GenerateFPStubs(Isolate* isolate) {
2640 CEntryStub save_doubles(isolate, 1, kSaveFPRegs);
2641 // Stubs might already be in the snapshot, detect that and don't regenerate,
2642 // which would lead to code stub initialization state being messed up.
2643 Code* save_doubles_code;
2644 if (!save_doubles.FindCodeInCache(&save_doubles_code)) {
2645 save_doubles_code = *(save_doubles.GetCode());
2646 }
2647 isolate->set_fp_stubs_generated(true);
2648 }
2649
2650
GenerateAheadOfTime(Isolate * isolate)2651 void CEntryStub::GenerateAheadOfTime(Isolate* isolate) {
2652 CEntryStub stub(isolate, 1, kDontSaveFPRegs);
2653 stub.GetCode();
2654 }
2655
2656
Generate(MacroAssembler * masm)2657 void CEntryStub::Generate(MacroAssembler* masm) {
2658 // eax: number of arguments including receiver
2659 // ebx: pointer to C function (C callee-saved)
2660 // ebp: frame pointer (restored after C call)
2661 // esp: stack pointer (restored after C call)
2662 // esi: current context (C callee-saved)
2663 // edi: JS function of the caller (C callee-saved)
2664
2665 ProfileEntryHookStub::MaybeCallEntryHook(masm);
2666
2667 // Enter the exit frame that transitions from JavaScript to C++.
2668 __ EnterExitFrame(save_doubles_ == kSaveFPRegs);
2669
2670 // ebx: pointer to C function (C callee-saved)
2671 // ebp: frame pointer (restored after C call)
2672 // esp: stack pointer (restored after C call)
2673 // edi: number of arguments including receiver (C callee-saved)
2674 // esi: pointer to the first argument (C callee-saved)
2675
2676 // Result returned in eax, or eax+edx if result_size_ is 2.
2677
2678 // Check stack alignment.
2679 if (FLAG_debug_code) {
2680 __ CheckStackAlignment();
2681 }
2682
2683 // Call C function.
2684 __ mov(Operand(esp, 0 * kPointerSize), edi); // argc.
2685 __ mov(Operand(esp, 1 * kPointerSize), esi); // argv.
2686 __ mov(Operand(esp, 2 * kPointerSize),
2687 Immediate(ExternalReference::isolate_address(isolate())));
2688 __ call(ebx);
2689 // Result is in eax or edx:eax - do not destroy these registers!
2690
2691 // Runtime functions should not return 'the hole'. Allowing it to escape may
2692 // lead to crashes in the IC code later.
2693 if (FLAG_debug_code) {
2694 Label okay;
2695 __ cmp(eax, isolate()->factory()->the_hole_value());
2696 __ j(not_equal, &okay, Label::kNear);
2697 __ int3();
2698 __ bind(&okay);
2699 }
2700
2701 // Check result for exception sentinel.
2702 Label exception_returned;
2703 __ cmp(eax, isolate()->factory()->exception());
2704 __ j(equal, &exception_returned);
2705
2706 ExternalReference pending_exception_address(
2707 Isolate::kPendingExceptionAddress, isolate());
2708
2709 // Check that there is no pending exception, otherwise we
2710 // should have returned the exception sentinel.
2711 if (FLAG_debug_code) {
2712 __ push(edx);
2713 __ mov(edx, Immediate(isolate()->factory()->the_hole_value()));
2714 Label okay;
2715 __ cmp(edx, Operand::StaticVariable(pending_exception_address));
2716 // Cannot use check here as it attempts to generate call into runtime.
2717 __ j(equal, &okay, Label::kNear);
2718 __ int3();
2719 __ bind(&okay);
2720 __ pop(edx);
2721 }
2722
2723 // Exit the JavaScript to C++ exit frame.
2724 __ LeaveExitFrame(save_doubles_ == kSaveFPRegs);
2725 __ ret(0);
2726
2727 // Handling of exception.
2728 __ bind(&exception_returned);
2729
2730 // Retrieve the pending exception.
2731 __ mov(eax, Operand::StaticVariable(pending_exception_address));
2732
2733 // Clear the pending exception.
2734 __ mov(edx, Immediate(isolate()->factory()->the_hole_value()));
2735 __ mov(Operand::StaticVariable(pending_exception_address), edx);
2736
2737 // Special handling of termination exceptions which are uncatchable
2738 // by javascript code.
2739 Label throw_termination_exception;
2740 __ cmp(eax, isolate()->factory()->termination_exception());
2741 __ j(equal, &throw_termination_exception);
2742
2743 // Handle normal exception.
2744 __ Throw(eax);
2745
2746 __ bind(&throw_termination_exception);
2747 __ ThrowUncatchable(eax);
2748 }
2749
2750
GenerateBody(MacroAssembler * masm,bool is_construct)2751 void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
2752 Label invoke, handler_entry, exit;
2753 Label not_outermost_js, not_outermost_js_2;
2754
2755 ProfileEntryHookStub::MaybeCallEntryHook(masm);
2756
2757 // Set up frame.
2758 __ push(ebp);
2759 __ mov(ebp, esp);
2760
2761 // Push marker in two places.
2762 int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY;
2763 __ push(Immediate(Smi::FromInt(marker))); // context slot
2764 __ push(Immediate(Smi::FromInt(marker))); // function slot
2765 // Save callee-saved registers (C calling conventions).
2766 __ push(edi);
2767 __ push(esi);
2768 __ push(ebx);
2769
2770 // Save copies of the top frame descriptor on the stack.
2771 ExternalReference c_entry_fp(Isolate::kCEntryFPAddress, isolate());
2772 __ push(Operand::StaticVariable(c_entry_fp));
2773
2774 // If this is the outermost JS call, set js_entry_sp value.
2775 ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate());
2776 __ cmp(Operand::StaticVariable(js_entry_sp), Immediate(0));
2777 __ j(not_equal, ¬_outermost_js, Label::kNear);
2778 __ mov(Operand::StaticVariable(js_entry_sp), ebp);
2779 __ push(Immediate(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
2780 __ jmp(&invoke, Label::kNear);
2781 __ bind(¬_outermost_js);
2782 __ push(Immediate(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME)));
2783
2784 // Jump to a faked try block that does the invoke, with a faked catch
2785 // block that sets the pending exception.
2786 __ jmp(&invoke);
2787 __ bind(&handler_entry);
2788 handler_offset_ = handler_entry.pos();
2789 // Caught exception: Store result (exception) in the pending exception
2790 // field in the JSEnv and return a failure sentinel.
2791 ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
2792 isolate());
2793 __ mov(Operand::StaticVariable(pending_exception), eax);
2794 __ mov(eax, Immediate(isolate()->factory()->exception()));
2795 __ jmp(&exit);
2796
2797 // Invoke: Link this frame into the handler chain. There's only one
2798 // handler block in this code object, so its index is 0.
2799 __ bind(&invoke);
2800 __ PushTryHandler(StackHandler::JS_ENTRY, 0);
2801
2802 // Clear any pending exceptions.
2803 __ mov(edx, Immediate(isolate()->factory()->the_hole_value()));
2804 __ mov(Operand::StaticVariable(pending_exception), edx);
2805
2806 // Fake a receiver (NULL).
2807 __ push(Immediate(0)); // receiver
2808
2809 // Invoke the function by calling through JS entry trampoline builtin and
2810 // pop the faked function when we return. Notice that we cannot store a
2811 // reference to the trampoline code directly in this stub, because the
2812 // builtin stubs may not have been generated yet.
2813 if (is_construct) {
2814 ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline,
2815 isolate());
2816 __ mov(edx, Immediate(construct_entry));
2817 } else {
2818 ExternalReference entry(Builtins::kJSEntryTrampoline, isolate());
2819 __ mov(edx, Immediate(entry));
2820 }
2821 __ mov(edx, Operand(edx, 0)); // deref address
2822 __ lea(edx, FieldOperand(edx, Code::kHeaderSize));
2823 __ call(edx);
2824
2825 // Unlink this frame from the handler chain.
2826 __ PopTryHandler();
2827
2828 __ bind(&exit);
2829 // Check if the current stack frame is marked as the outermost JS frame.
2830 __ pop(ebx);
2831 __ cmp(ebx, Immediate(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
2832 __ j(not_equal, ¬_outermost_js_2);
2833 __ mov(Operand::StaticVariable(js_entry_sp), Immediate(0));
2834 __ bind(¬_outermost_js_2);
2835
2836 // Restore the top frame descriptor from the stack.
2837 __ pop(Operand::StaticVariable(ExternalReference(
2838 Isolate::kCEntryFPAddress, isolate())));
2839
2840 // Restore callee-saved registers (C calling conventions).
2841 __ pop(ebx);
2842 __ pop(esi);
2843 __ pop(edi);
2844 __ add(esp, Immediate(2 * kPointerSize)); // remove markers
2845
2846 // Restore frame pointer and return.
2847 __ pop(ebp);
2848 __ ret(0);
2849 }
2850
2851
2852 // Generate stub code for instanceof.
2853 // This code can patch a call site inlined cache of the instance of check,
2854 // which looks like this.
2855 //
2856 // 81 ff XX XX XX XX cmp edi, <the hole, patched to a map>
2857 // 75 0a jne <some near label>
2858 // b8 XX XX XX XX mov eax, <the hole, patched to either true or false>
2859 //
2860 // If call site patching is requested the stack will have the delta from the
2861 // return address to the cmp instruction just below the return address. This
2862 // also means that call site patching can only take place with arguments in
2863 // registers. TOS looks like this when call site patching is requested
2864 //
2865 // esp[0] : return address
2866 // esp[4] : delta from return address to cmp instruction
2867 //
Generate(MacroAssembler * masm)2868 void InstanceofStub::Generate(MacroAssembler* masm) {
2869 // Call site inlining and patching implies arguments in registers.
2870 ASSERT(HasArgsInRegisters() || !HasCallSiteInlineCheck());
2871
2872 // Fixed register usage throughout the stub.
2873 Register object = eax; // Object (lhs).
2874 Register map = ebx; // Map of the object.
2875 Register function = edx; // Function (rhs).
2876 Register prototype = edi; // Prototype of the function.
2877 Register scratch = ecx;
2878
2879 // Constants describing the call site code to patch.
2880 static const int kDeltaToCmpImmediate = 2;
2881 static const int kDeltaToMov = 8;
2882 static const int kDeltaToMovImmediate = 9;
2883 static const int8_t kCmpEdiOperandByte1 = BitCast<int8_t, uint8_t>(0x3b);
2884 static const int8_t kCmpEdiOperandByte2 = BitCast<int8_t, uint8_t>(0x3d);
2885 static const int8_t kMovEaxImmediateByte = BitCast<int8_t, uint8_t>(0xb8);
2886
2887 ASSERT_EQ(object.code(), InstanceofStub::left().code());
2888 ASSERT_EQ(function.code(), InstanceofStub::right().code());
2889
2890 // Get the object and function - they are always both needed.
2891 Label slow, not_js_object;
2892 if (!HasArgsInRegisters()) {
2893 __ mov(object, Operand(esp, 2 * kPointerSize));
2894 __ mov(function, Operand(esp, 1 * kPointerSize));
2895 }
2896
2897 // Check that the left hand is a JS object.
2898 __ JumpIfSmi(object, ¬_js_object);
2899 __ IsObjectJSObjectType(object, map, scratch, ¬_js_object);
2900
2901 // If there is a call site cache don't look in the global cache, but do the
2902 // real lookup and update the call site cache.
2903 if (!HasCallSiteInlineCheck()) {
2904 // Look up the function and the map in the instanceof cache.
2905 Label miss;
2906 __ CompareRoot(function, scratch, Heap::kInstanceofCacheFunctionRootIndex);
2907 __ j(not_equal, &miss, Label::kNear);
2908 __ CompareRoot(map, scratch, Heap::kInstanceofCacheMapRootIndex);
2909 __ j(not_equal, &miss, Label::kNear);
2910 __ LoadRoot(eax, Heap::kInstanceofCacheAnswerRootIndex);
2911 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
2912 __ bind(&miss);
2913 }
2914
2915 // Get the prototype of the function.
2916 __ TryGetFunctionPrototype(function, prototype, scratch, &slow, true);
2917
2918 // Check that the function prototype is a JS object.
2919 __ JumpIfSmi(prototype, &slow);
2920 __ IsObjectJSObjectType(prototype, scratch, scratch, &slow);
2921
2922 // Update the global instanceof or call site inlined cache with the current
2923 // map and function. The cached answer will be set when it is known below.
2924 if (!HasCallSiteInlineCheck()) {
2925 __ StoreRoot(map, scratch, Heap::kInstanceofCacheMapRootIndex);
2926 __ StoreRoot(function, scratch, Heap::kInstanceofCacheFunctionRootIndex);
2927 } else {
2928 // The constants for the code patching are based on no push instructions
2929 // at the call site.
2930 ASSERT(HasArgsInRegisters());
2931 // Get return address and delta to inlined map check.
2932 __ mov(scratch, Operand(esp, 0 * kPointerSize));
2933 __ sub(scratch, Operand(esp, 1 * kPointerSize));
2934 if (FLAG_debug_code) {
2935 __ cmpb(Operand(scratch, 0), kCmpEdiOperandByte1);
2936 __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheCmp1);
2937 __ cmpb(Operand(scratch, 1), kCmpEdiOperandByte2);
2938 __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheCmp2);
2939 }
2940 __ mov(scratch, Operand(scratch, kDeltaToCmpImmediate));
2941 __ mov(Operand(scratch, 0), map);
2942 }
2943
2944 // Loop through the prototype chain of the object looking for the function
2945 // prototype.
2946 __ mov(scratch, FieldOperand(map, Map::kPrototypeOffset));
2947 Label loop, is_instance, is_not_instance;
2948 __ bind(&loop);
2949 __ cmp(scratch, prototype);
2950 __ j(equal, &is_instance, Label::kNear);
2951 Factory* factory = isolate()->factory();
2952 __ cmp(scratch, Immediate(factory->null_value()));
2953 __ j(equal, &is_not_instance, Label::kNear);
2954 __ mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
2955 __ mov(scratch, FieldOperand(scratch, Map::kPrototypeOffset));
2956 __ jmp(&loop);
2957
2958 __ bind(&is_instance);
2959 if (!HasCallSiteInlineCheck()) {
2960 __ mov(eax, Immediate(0));
2961 __ StoreRoot(eax, scratch, Heap::kInstanceofCacheAnswerRootIndex);
2962 } else {
2963 // Get return address and delta to inlined map check.
2964 __ mov(eax, factory->true_value());
2965 __ mov(scratch, Operand(esp, 0 * kPointerSize));
2966 __ sub(scratch, Operand(esp, 1 * kPointerSize));
2967 if (FLAG_debug_code) {
2968 __ cmpb(Operand(scratch, kDeltaToMov), kMovEaxImmediateByte);
2969 __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheMov);
2970 }
2971 __ mov(Operand(scratch, kDeltaToMovImmediate), eax);
2972 if (!ReturnTrueFalseObject()) {
2973 __ Move(eax, Immediate(0));
2974 }
2975 }
2976 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
2977
2978 __ bind(&is_not_instance);
2979 if (!HasCallSiteInlineCheck()) {
2980 __ mov(eax, Immediate(Smi::FromInt(1)));
2981 __ StoreRoot(eax, scratch, Heap::kInstanceofCacheAnswerRootIndex);
2982 } else {
2983 // Get return address and delta to inlined map check.
2984 __ mov(eax, factory->false_value());
2985 __ mov(scratch, Operand(esp, 0 * kPointerSize));
2986 __ sub(scratch, Operand(esp, 1 * kPointerSize));
2987 if (FLAG_debug_code) {
2988 __ cmpb(Operand(scratch, kDeltaToMov), kMovEaxImmediateByte);
2989 __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheMov);
2990 }
2991 __ mov(Operand(scratch, kDeltaToMovImmediate), eax);
2992 if (!ReturnTrueFalseObject()) {
2993 __ Move(eax, Immediate(Smi::FromInt(1)));
2994 }
2995 }
2996 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
2997
2998 Label object_not_null, object_not_null_or_smi;
2999 __ bind(¬_js_object);
3000 // Before null, smi and string value checks, check that the rhs is a function
3001 // as for a non-function rhs an exception needs to be thrown.
3002 __ JumpIfSmi(function, &slow, Label::kNear);
3003 __ CmpObjectType(function, JS_FUNCTION_TYPE, scratch);
3004 __ j(not_equal, &slow, Label::kNear);
3005
3006 // Null is not instance of anything.
3007 __ cmp(object, factory->null_value());
3008 __ j(not_equal, &object_not_null, Label::kNear);
3009 __ Move(eax, Immediate(Smi::FromInt(1)));
3010 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
3011
3012 __ bind(&object_not_null);
3013 // Smi values is not instance of anything.
3014 __ JumpIfNotSmi(object, &object_not_null_or_smi, Label::kNear);
3015 __ Move(eax, Immediate(Smi::FromInt(1)));
3016 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
3017
3018 __ bind(&object_not_null_or_smi);
3019 // String values is not instance of anything.
3020 Condition is_string = masm->IsObjectStringType(object, scratch, scratch);
3021 __ j(NegateCondition(is_string), &slow, Label::kNear);
3022 __ Move(eax, Immediate(Smi::FromInt(1)));
3023 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
3024
3025 // Slow-case: Go through the JavaScript implementation.
3026 __ bind(&slow);
3027 if (!ReturnTrueFalseObject()) {
3028 // Tail call the builtin which returns 0 or 1.
3029 if (HasArgsInRegisters()) {
3030 // Push arguments below return address.
3031 __ pop(scratch);
3032 __ push(object);
3033 __ push(function);
3034 __ push(scratch);
3035 }
3036 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION);
3037 } else {
3038 // Call the builtin and convert 0/1 to true/false.
3039 {
3040 FrameScope scope(masm, StackFrame::INTERNAL);
3041 __ push(object);
3042 __ push(function);
3043 __ InvokeBuiltin(Builtins::INSTANCE_OF, CALL_FUNCTION);
3044 }
3045 Label true_value, done;
3046 __ test(eax, eax);
3047 __ j(zero, &true_value, Label::kNear);
3048 __ mov(eax, factory->false_value());
3049 __ jmp(&done, Label::kNear);
3050 __ bind(&true_value);
3051 __ mov(eax, factory->true_value());
3052 __ bind(&done);
3053 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
3054 }
3055 }
3056
3057
left()3058 Register InstanceofStub::left() { return eax; }
3059
3060
right()3061 Register InstanceofStub::right() { return edx; }
3062
3063
3064 // -------------------------------------------------------------------------
3065 // StringCharCodeAtGenerator
3066
GenerateFast(MacroAssembler * masm)3067 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
3068 // If the receiver is a smi trigger the non-string case.
3069 STATIC_ASSERT(kSmiTag == 0);
3070 __ JumpIfSmi(object_, receiver_not_string_);
3071
3072 // Fetch the instance type of the receiver into result register.
3073 __ mov(result_, FieldOperand(object_, HeapObject::kMapOffset));
3074 __ movzx_b(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
3075 // If the receiver is not a string trigger the non-string case.
3076 __ test(result_, Immediate(kIsNotStringMask));
3077 __ j(not_zero, receiver_not_string_);
3078
3079 // If the index is non-smi trigger the non-smi case.
3080 STATIC_ASSERT(kSmiTag == 0);
3081 __ JumpIfNotSmi(index_, &index_not_smi_);
3082 __ bind(&got_smi_index_);
3083
3084 // Check for index out of range.
3085 __ cmp(index_, FieldOperand(object_, String::kLengthOffset));
3086 __ j(above_equal, index_out_of_range_);
3087
3088 __ SmiUntag(index_);
3089
3090 Factory* factory = masm->isolate()->factory();
3091 StringCharLoadGenerator::Generate(
3092 masm, factory, object_, index_, result_, &call_runtime_);
3093
3094 __ SmiTag(result_);
3095 __ bind(&exit_);
3096 }
3097
3098
GenerateSlow(MacroAssembler * masm,const RuntimeCallHelper & call_helper)3099 void StringCharCodeAtGenerator::GenerateSlow(
3100 MacroAssembler* masm,
3101 const RuntimeCallHelper& call_helper) {
3102 __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase);
3103
3104 // Index is not a smi.
3105 __ bind(&index_not_smi_);
3106 // If index is a heap number, try converting it to an integer.
3107 __ CheckMap(index_,
3108 masm->isolate()->factory()->heap_number_map(),
3109 index_not_number_,
3110 DONT_DO_SMI_CHECK);
3111 call_helper.BeforeCall(masm);
3112 __ push(object_);
3113 __ push(index_); // Consumed by runtime conversion function.
3114 if (index_flags_ == STRING_INDEX_IS_NUMBER) {
3115 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1);
3116 } else {
3117 ASSERT(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX);
3118 // NumberToSmi discards numbers that are not exact integers.
3119 __ CallRuntime(Runtime::kHiddenNumberToSmi, 1);
3120 }
3121 if (!index_.is(eax)) {
3122 // Save the conversion result before the pop instructions below
3123 // have a chance to overwrite it.
3124 __ mov(index_, eax);
3125 }
3126 __ pop(object_);
3127 // Reload the instance type.
3128 __ mov(result_, FieldOperand(object_, HeapObject::kMapOffset));
3129 __ movzx_b(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
3130 call_helper.AfterCall(masm);
3131 // If index is still not a smi, it must be out of range.
3132 STATIC_ASSERT(kSmiTag == 0);
3133 __ JumpIfNotSmi(index_, index_out_of_range_);
3134 // Otherwise, return to the fast path.
3135 __ jmp(&got_smi_index_);
3136
3137 // Call runtime. We get here when the receiver is a string and the
3138 // index is a number, but the code of getting the actual character
3139 // is too complex (e.g., when the string needs to be flattened).
3140 __ bind(&call_runtime_);
3141 call_helper.BeforeCall(masm);
3142 __ push(object_);
3143 __ SmiTag(index_);
3144 __ push(index_);
3145 __ CallRuntime(Runtime::kHiddenStringCharCodeAt, 2);
3146 if (!result_.is(eax)) {
3147 __ mov(result_, eax);
3148 }
3149 call_helper.AfterCall(masm);
3150 __ jmp(&exit_);
3151
3152 __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase);
3153 }
3154
3155
3156 // -------------------------------------------------------------------------
3157 // StringCharFromCodeGenerator
3158
GenerateFast(MacroAssembler * masm)3159 void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) {
3160 // Fast case of Heap::LookupSingleCharacterStringFromCode.
3161 STATIC_ASSERT(kSmiTag == 0);
3162 STATIC_ASSERT(kSmiShiftSize == 0);
3163 ASSERT(IsPowerOf2(String::kMaxOneByteCharCode + 1));
3164 __ test(code_,
3165 Immediate(kSmiTagMask |
3166 ((~String::kMaxOneByteCharCode) << kSmiTagSize)));
3167 __ j(not_zero, &slow_case_);
3168
3169 Factory* factory = masm->isolate()->factory();
3170 __ Move(result_, Immediate(factory->single_character_string_cache()));
3171 STATIC_ASSERT(kSmiTag == 0);
3172 STATIC_ASSERT(kSmiTagSize == 1);
3173 STATIC_ASSERT(kSmiShiftSize == 0);
3174 // At this point code register contains smi tagged ASCII char code.
3175 __ mov(result_, FieldOperand(result_,
3176 code_, times_half_pointer_size,
3177 FixedArray::kHeaderSize));
3178 __ cmp(result_, factory->undefined_value());
3179 __ j(equal, &slow_case_);
3180 __ bind(&exit_);
3181 }
3182
3183
GenerateSlow(MacroAssembler * masm,const RuntimeCallHelper & call_helper)3184 void StringCharFromCodeGenerator::GenerateSlow(
3185 MacroAssembler* masm,
3186 const RuntimeCallHelper& call_helper) {
3187 __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase);
3188
3189 __ bind(&slow_case_);
3190 call_helper.BeforeCall(masm);
3191 __ push(code_);
3192 __ CallRuntime(Runtime::kCharFromCode, 1);
3193 if (!result_.is(eax)) {
3194 __ mov(result_, eax);
3195 }
3196 call_helper.AfterCall(masm);
3197 __ jmp(&exit_);
3198
3199 __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase);
3200 }
3201
3202
GenerateCopyCharacters(MacroAssembler * masm,Register dest,Register src,Register count,Register scratch,String::Encoding encoding)3203 void StringHelper::GenerateCopyCharacters(MacroAssembler* masm,
3204 Register dest,
3205 Register src,
3206 Register count,
3207 Register scratch,
3208 String::Encoding encoding) {
3209 ASSERT(!scratch.is(dest));
3210 ASSERT(!scratch.is(src));
3211 ASSERT(!scratch.is(count));
3212
3213 // Nothing to do for zero characters.
3214 Label done;
3215 __ test(count, count);
3216 __ j(zero, &done);
3217
3218 // Make count the number of bytes to copy.
3219 if (encoding == String::TWO_BYTE_ENCODING) {
3220 __ shl(count, 1);
3221 }
3222
3223 Label loop;
3224 __ bind(&loop);
3225 __ mov_b(scratch, Operand(src, 0));
3226 __ mov_b(Operand(dest, 0), scratch);
3227 __ inc(src);
3228 __ inc(dest);
3229 __ dec(count);
3230 __ j(not_zero, &loop);
3231
3232 __ bind(&done);
3233 }
3234
3235
GenerateHashInit(MacroAssembler * masm,Register hash,Register character,Register scratch)3236 void StringHelper::GenerateHashInit(MacroAssembler* masm,
3237 Register hash,
3238 Register character,
3239 Register scratch) {
3240 // hash = (seed + character) + ((seed + character) << 10);
3241 if (masm->serializer_enabled()) {
3242 __ LoadRoot(scratch, Heap::kHashSeedRootIndex);
3243 __ SmiUntag(scratch);
3244 __ add(scratch, character);
3245 __ mov(hash, scratch);
3246 __ shl(scratch, 10);
3247 __ add(hash, scratch);
3248 } else {
3249 int32_t seed = masm->isolate()->heap()->HashSeed();
3250 __ lea(scratch, Operand(character, seed));
3251 __ shl(scratch, 10);
3252 __ lea(hash, Operand(scratch, character, times_1, seed));
3253 }
3254 // hash ^= hash >> 6;
3255 __ mov(scratch, hash);
3256 __ shr(scratch, 6);
3257 __ xor_(hash, scratch);
3258 }
3259
3260
GenerateHashAddCharacter(MacroAssembler * masm,Register hash,Register character,Register scratch)3261 void StringHelper::GenerateHashAddCharacter(MacroAssembler* masm,
3262 Register hash,
3263 Register character,
3264 Register scratch) {
3265 // hash += character;
3266 __ add(hash, character);
3267 // hash += hash << 10;
3268 __ mov(scratch, hash);
3269 __ shl(scratch, 10);
3270 __ add(hash, scratch);
3271 // hash ^= hash >> 6;
3272 __ mov(scratch, hash);
3273 __ shr(scratch, 6);
3274 __ xor_(hash, scratch);
3275 }
3276
3277
GenerateHashGetHash(MacroAssembler * masm,Register hash,Register scratch)3278 void StringHelper::GenerateHashGetHash(MacroAssembler* masm,
3279 Register hash,
3280 Register scratch) {
3281 // hash += hash << 3;
3282 __ mov(scratch, hash);
3283 __ shl(scratch, 3);
3284 __ add(hash, scratch);
3285 // hash ^= hash >> 11;
3286 __ mov(scratch, hash);
3287 __ shr(scratch, 11);
3288 __ xor_(hash, scratch);
3289 // hash += hash << 15;
3290 __ mov(scratch, hash);
3291 __ shl(scratch, 15);
3292 __ add(hash, scratch);
3293
3294 __ and_(hash, String::kHashBitMask);
3295
3296 // if (hash == 0) hash = 27;
3297 Label hash_not_zero;
3298 __ j(not_zero, &hash_not_zero, Label::kNear);
3299 __ mov(hash, Immediate(StringHasher::kZeroHash));
3300 __ bind(&hash_not_zero);
3301 }
3302
3303
Generate(MacroAssembler * masm)3304 void SubStringStub::Generate(MacroAssembler* masm) {
3305 Label runtime;
3306
3307 // Stack frame on entry.
3308 // esp[0]: return address
3309 // esp[4]: to
3310 // esp[8]: from
3311 // esp[12]: string
3312
3313 // Make sure first argument is a string.
3314 __ mov(eax, Operand(esp, 3 * kPointerSize));
3315 STATIC_ASSERT(kSmiTag == 0);
3316 __ JumpIfSmi(eax, &runtime);
3317 Condition is_string = masm->IsObjectStringType(eax, ebx, ebx);
3318 __ j(NegateCondition(is_string), &runtime);
3319
3320 // eax: string
3321 // ebx: instance type
3322
3323 // Calculate length of sub string using the smi values.
3324 __ mov(ecx, Operand(esp, 1 * kPointerSize)); // To index.
3325 __ JumpIfNotSmi(ecx, &runtime);
3326 __ mov(edx, Operand(esp, 2 * kPointerSize)); // From index.
3327 __ JumpIfNotSmi(edx, &runtime);
3328 __ sub(ecx, edx);
3329 __ cmp(ecx, FieldOperand(eax, String::kLengthOffset));
3330 Label not_original_string;
3331 // Shorter than original string's length: an actual substring.
3332 __ j(below, ¬_original_string, Label::kNear);
3333 // Longer than original string's length or negative: unsafe arguments.
3334 __ j(above, &runtime);
3335 // Return original string.
3336 Counters* counters = isolate()->counters();
3337 __ IncrementCounter(counters->sub_string_native(), 1);
3338 __ ret(3 * kPointerSize);
3339 __ bind(¬_original_string);
3340
3341 Label single_char;
3342 __ cmp(ecx, Immediate(Smi::FromInt(1)));
3343 __ j(equal, &single_char);
3344
3345 // eax: string
3346 // ebx: instance type
3347 // ecx: sub string length (smi)
3348 // edx: from index (smi)
3349 // Deal with different string types: update the index if necessary
3350 // and put the underlying string into edi.
3351 Label underlying_unpacked, sliced_string, seq_or_external_string;
3352 // If the string is not indirect, it can only be sequential or external.
3353 STATIC_ASSERT(kIsIndirectStringMask == (kSlicedStringTag & kConsStringTag));
3354 STATIC_ASSERT(kIsIndirectStringMask != 0);
3355 __ test(ebx, Immediate(kIsIndirectStringMask));
3356 __ j(zero, &seq_or_external_string, Label::kNear);
3357
3358 Factory* factory = isolate()->factory();
3359 __ test(ebx, Immediate(kSlicedNotConsMask));
3360 __ j(not_zero, &sliced_string, Label::kNear);
3361 // Cons string. Check whether it is flat, then fetch first part.
3362 // Flat cons strings have an empty second part.
3363 __ cmp(FieldOperand(eax, ConsString::kSecondOffset),
3364 factory->empty_string());
3365 __ j(not_equal, &runtime);
3366 __ mov(edi, FieldOperand(eax, ConsString::kFirstOffset));
3367 // Update instance type.
3368 __ mov(ebx, FieldOperand(edi, HeapObject::kMapOffset));
3369 __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
3370 __ jmp(&underlying_unpacked, Label::kNear);
3371
3372 __ bind(&sliced_string);
3373 // Sliced string. Fetch parent and adjust start index by offset.
3374 __ add(edx, FieldOperand(eax, SlicedString::kOffsetOffset));
3375 __ mov(edi, FieldOperand(eax, SlicedString::kParentOffset));
3376 // Update instance type.
3377 __ mov(ebx, FieldOperand(edi, HeapObject::kMapOffset));
3378 __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
3379 __ jmp(&underlying_unpacked, Label::kNear);
3380
3381 __ bind(&seq_or_external_string);
3382 // Sequential or external string. Just move string to the expected register.
3383 __ mov(edi, eax);
3384
3385 __ bind(&underlying_unpacked);
3386
3387 if (FLAG_string_slices) {
3388 Label copy_routine;
3389 // edi: underlying subject string
3390 // ebx: instance type of underlying subject string
3391 // edx: adjusted start index (smi)
3392 // ecx: length (smi)
3393 __ cmp(ecx, Immediate(Smi::FromInt(SlicedString::kMinLength)));
3394 // Short slice. Copy instead of slicing.
3395 __ j(less, ©_routine);
3396 // Allocate new sliced string. At this point we do not reload the instance
3397 // type including the string encoding because we simply rely on the info
3398 // provided by the original string. It does not matter if the original
3399 // string's encoding is wrong because we always have to recheck encoding of
3400 // the newly created string's parent anyways due to externalized strings.
3401 Label two_byte_slice, set_slice_header;
3402 STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
3403 STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
3404 __ test(ebx, Immediate(kStringEncodingMask));
3405 __ j(zero, &two_byte_slice, Label::kNear);
3406 __ AllocateAsciiSlicedString(eax, ebx, no_reg, &runtime);
3407 __ jmp(&set_slice_header, Label::kNear);
3408 __ bind(&two_byte_slice);
3409 __ AllocateTwoByteSlicedString(eax, ebx, no_reg, &runtime);
3410 __ bind(&set_slice_header);
3411 __ mov(FieldOperand(eax, SlicedString::kLengthOffset), ecx);
3412 __ mov(FieldOperand(eax, SlicedString::kHashFieldOffset),
3413 Immediate(String::kEmptyHashField));
3414 __ mov(FieldOperand(eax, SlicedString::kParentOffset), edi);
3415 __ mov(FieldOperand(eax, SlicedString::kOffsetOffset), edx);
3416 __ IncrementCounter(counters->sub_string_native(), 1);
3417 __ ret(3 * kPointerSize);
3418
3419 __ bind(©_routine);
3420 }
3421
3422 // edi: underlying subject string
3423 // ebx: instance type of underlying subject string
3424 // edx: adjusted start index (smi)
3425 // ecx: length (smi)
3426 // The subject string can only be external or sequential string of either
3427 // encoding at this point.
3428 Label two_byte_sequential, runtime_drop_two, sequential_string;
3429 STATIC_ASSERT(kExternalStringTag != 0);
3430 STATIC_ASSERT(kSeqStringTag == 0);
3431 __ test_b(ebx, kExternalStringTag);
3432 __ j(zero, &sequential_string);
3433
3434 // Handle external string.
3435 // Rule out short external strings.
3436 STATIC_ASSERT(kShortExternalStringTag != 0);
3437 __ test_b(ebx, kShortExternalStringMask);
3438 __ j(not_zero, &runtime);
3439 __ mov(edi, FieldOperand(edi, ExternalString::kResourceDataOffset));
3440 // Move the pointer so that offset-wise, it looks like a sequential string.
3441 STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
3442 __ sub(edi, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
3443
3444 __ bind(&sequential_string);
3445 // Stash away (adjusted) index and (underlying) string.
3446 __ push(edx);
3447 __ push(edi);
3448 __ SmiUntag(ecx);
3449 STATIC_ASSERT((kOneByteStringTag & kStringEncodingMask) != 0);
3450 __ test_b(ebx, kStringEncodingMask);
3451 __ j(zero, &two_byte_sequential);
3452
3453 // Sequential ASCII string. Allocate the result.
3454 __ AllocateAsciiString(eax, ecx, ebx, edx, edi, &runtime_drop_two);
3455
3456 // eax: result string
3457 // ecx: result string length
3458 // Locate first character of result.
3459 __ mov(edi, eax);
3460 __ add(edi, Immediate(SeqOneByteString::kHeaderSize - kHeapObjectTag));
3461 // Load string argument and locate character of sub string start.
3462 __ pop(edx);
3463 __ pop(ebx);
3464 __ SmiUntag(ebx);
3465 __ lea(edx, FieldOperand(edx, ebx, times_1, SeqOneByteString::kHeaderSize));
3466
3467 // eax: result string
3468 // ecx: result length
3469 // edi: first character of result
3470 // edx: character of sub string start
3471 StringHelper::GenerateCopyCharacters(
3472 masm, edi, edx, ecx, ebx, String::ONE_BYTE_ENCODING);
3473 __ IncrementCounter(counters->sub_string_native(), 1);
3474 __ ret(3 * kPointerSize);
3475
3476 __ bind(&two_byte_sequential);
3477 // Sequential two-byte string. Allocate the result.
3478 __ AllocateTwoByteString(eax, ecx, ebx, edx, edi, &runtime_drop_two);
3479
3480 // eax: result string
3481 // ecx: result string length
3482 // Locate first character of result.
3483 __ mov(edi, eax);
3484 __ add(edi,
3485 Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
3486 // Load string argument and locate character of sub string start.
3487 __ pop(edx);
3488 __ pop(ebx);
3489 // As from is a smi it is 2 times the value which matches the size of a two
3490 // byte character.
3491 STATIC_ASSERT(kSmiTag == 0);
3492 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
3493 __ lea(edx, FieldOperand(edx, ebx, times_1, SeqTwoByteString::kHeaderSize));
3494
3495 // eax: result string
3496 // ecx: result length
3497 // edi: first character of result
3498 // edx: character of sub string start
3499 StringHelper::GenerateCopyCharacters(
3500 masm, edi, edx, ecx, ebx, String::TWO_BYTE_ENCODING);
3501 __ IncrementCounter(counters->sub_string_native(), 1);
3502 __ ret(3 * kPointerSize);
3503
3504 // Drop pushed values on the stack before tail call.
3505 __ bind(&runtime_drop_two);
3506 __ Drop(2);
3507
3508 // Just jump to runtime to create the sub string.
3509 __ bind(&runtime);
3510 __ TailCallRuntime(Runtime::kHiddenSubString, 3, 1);
3511
3512 __ bind(&single_char);
3513 // eax: string
3514 // ebx: instance type
3515 // ecx: sub string length (smi)
3516 // edx: from index (smi)
3517 StringCharAtGenerator generator(
3518 eax, edx, ecx, eax, &runtime, &runtime, &runtime, STRING_INDEX_IS_NUMBER);
3519 generator.GenerateFast(masm);
3520 __ ret(3 * kPointerSize);
3521 generator.SkipSlow(masm, &runtime);
3522 }
3523
3524
GenerateFlatAsciiStringEquals(MacroAssembler * masm,Register left,Register right,Register scratch1,Register scratch2)3525 void StringCompareStub::GenerateFlatAsciiStringEquals(MacroAssembler* masm,
3526 Register left,
3527 Register right,
3528 Register scratch1,
3529 Register scratch2) {
3530 Register length = scratch1;
3531
3532 // Compare lengths.
3533 Label strings_not_equal, check_zero_length;
3534 __ mov(length, FieldOperand(left, String::kLengthOffset));
3535 __ cmp(length, FieldOperand(right, String::kLengthOffset));
3536 __ j(equal, &check_zero_length, Label::kNear);
3537 __ bind(&strings_not_equal);
3538 __ Move(eax, Immediate(Smi::FromInt(NOT_EQUAL)));
3539 __ ret(0);
3540
3541 // Check if the length is zero.
3542 Label compare_chars;
3543 __ bind(&check_zero_length);
3544 STATIC_ASSERT(kSmiTag == 0);
3545 __ test(length, length);
3546 __ j(not_zero, &compare_chars, Label::kNear);
3547 __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
3548 __ ret(0);
3549
3550 // Compare characters.
3551 __ bind(&compare_chars);
3552 GenerateAsciiCharsCompareLoop(masm, left, right, length, scratch2,
3553 &strings_not_equal, Label::kNear);
3554
3555 // Characters are equal.
3556 __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
3557 __ ret(0);
3558 }
3559
3560
GenerateCompareFlatAsciiStrings(MacroAssembler * masm,Register left,Register right,Register scratch1,Register scratch2,Register scratch3)3561 void StringCompareStub::GenerateCompareFlatAsciiStrings(MacroAssembler* masm,
3562 Register left,
3563 Register right,
3564 Register scratch1,
3565 Register scratch2,
3566 Register scratch3) {
3567 Counters* counters = masm->isolate()->counters();
3568 __ IncrementCounter(counters->string_compare_native(), 1);
3569
3570 // Find minimum length.
3571 Label left_shorter;
3572 __ mov(scratch1, FieldOperand(left, String::kLengthOffset));
3573 __ mov(scratch3, scratch1);
3574 __ sub(scratch3, FieldOperand(right, String::kLengthOffset));
3575
3576 Register length_delta = scratch3;
3577
3578 __ j(less_equal, &left_shorter, Label::kNear);
3579 // Right string is shorter. Change scratch1 to be length of right string.
3580 __ sub(scratch1, length_delta);
3581 __ bind(&left_shorter);
3582
3583 Register min_length = scratch1;
3584
3585 // If either length is zero, just compare lengths.
3586 Label compare_lengths;
3587 __ test(min_length, min_length);
3588 __ j(zero, &compare_lengths, Label::kNear);
3589
3590 // Compare characters.
3591 Label result_not_equal;
3592 GenerateAsciiCharsCompareLoop(masm, left, right, min_length, scratch2,
3593 &result_not_equal, Label::kNear);
3594
3595 // Compare lengths - strings up to min-length are equal.
3596 __ bind(&compare_lengths);
3597 __ test(length_delta, length_delta);
3598 Label length_not_equal;
3599 __ j(not_zero, &length_not_equal, Label::kNear);
3600
3601 // Result is EQUAL.
3602 STATIC_ASSERT(EQUAL == 0);
3603 STATIC_ASSERT(kSmiTag == 0);
3604 __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
3605 __ ret(0);
3606
3607 Label result_greater;
3608 Label result_less;
3609 __ bind(&length_not_equal);
3610 __ j(greater, &result_greater, Label::kNear);
3611 __ jmp(&result_less, Label::kNear);
3612 __ bind(&result_not_equal);
3613 __ j(above, &result_greater, Label::kNear);
3614 __ bind(&result_less);
3615
3616 // Result is LESS.
3617 __ Move(eax, Immediate(Smi::FromInt(LESS)));
3618 __ ret(0);
3619
3620 // Result is GREATER.
3621 __ bind(&result_greater);
3622 __ Move(eax, Immediate(Smi::FromInt(GREATER)));
3623 __ ret(0);
3624 }
3625
3626
GenerateAsciiCharsCompareLoop(MacroAssembler * masm,Register left,Register right,Register length,Register scratch,Label * chars_not_equal,Label::Distance chars_not_equal_near)3627 void StringCompareStub::GenerateAsciiCharsCompareLoop(
3628 MacroAssembler* masm,
3629 Register left,
3630 Register right,
3631 Register length,
3632 Register scratch,
3633 Label* chars_not_equal,
3634 Label::Distance chars_not_equal_near) {
3635 // Change index to run from -length to -1 by adding length to string
3636 // start. This means that loop ends when index reaches zero, which
3637 // doesn't need an additional compare.
3638 __ SmiUntag(length);
3639 __ lea(left,
3640 FieldOperand(left, length, times_1, SeqOneByteString::kHeaderSize));
3641 __ lea(right,
3642 FieldOperand(right, length, times_1, SeqOneByteString::kHeaderSize));
3643 __ neg(length);
3644 Register index = length; // index = -length;
3645
3646 // Compare loop.
3647 Label loop;
3648 __ bind(&loop);
3649 __ mov_b(scratch, Operand(left, index, times_1, 0));
3650 __ cmpb(scratch, Operand(right, index, times_1, 0));
3651 __ j(not_equal, chars_not_equal, chars_not_equal_near);
3652 __ inc(index);
3653 __ j(not_zero, &loop);
3654 }
3655
3656
Generate(MacroAssembler * masm)3657 void StringCompareStub::Generate(MacroAssembler* masm) {
3658 Label runtime;
3659
3660 // Stack frame on entry.
3661 // esp[0]: return address
3662 // esp[4]: right string
3663 // esp[8]: left string
3664
3665 __ mov(edx, Operand(esp, 2 * kPointerSize)); // left
3666 __ mov(eax, Operand(esp, 1 * kPointerSize)); // right
3667
3668 Label not_same;
3669 __ cmp(edx, eax);
3670 __ j(not_equal, ¬_same, Label::kNear);
3671 STATIC_ASSERT(EQUAL == 0);
3672 STATIC_ASSERT(kSmiTag == 0);
3673 __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
3674 __ IncrementCounter(isolate()->counters()->string_compare_native(), 1);
3675 __ ret(2 * kPointerSize);
3676
3677 __ bind(¬_same);
3678
3679 // Check that both objects are sequential ASCII strings.
3680 __ JumpIfNotBothSequentialAsciiStrings(edx, eax, ecx, ebx, &runtime);
3681
3682 // Compare flat ASCII strings.
3683 // Drop arguments from the stack.
3684 __ pop(ecx);
3685 __ add(esp, Immediate(2 * kPointerSize));
3686 __ push(ecx);
3687 GenerateCompareFlatAsciiStrings(masm, edx, eax, ecx, ebx, edi);
3688
3689 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater)
3690 // tagged as a small integer.
3691 __ bind(&runtime);
3692 __ TailCallRuntime(Runtime::kHiddenStringCompare, 2, 1);
3693 }
3694
3695
Generate(MacroAssembler * masm)3696 void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) {
3697 // ----------- S t a t e -------------
3698 // -- edx : left
3699 // -- eax : right
3700 // -- esp[0] : return address
3701 // -----------------------------------
3702
3703 // Load ecx with the allocation site. We stick an undefined dummy value here
3704 // and replace it with the real allocation site later when we instantiate this
3705 // stub in BinaryOpICWithAllocationSiteStub::GetCodeCopyFromTemplate().
3706 __ mov(ecx, handle(isolate()->heap()->undefined_value()));
3707
3708 // Make sure that we actually patched the allocation site.
3709 if (FLAG_debug_code) {
3710 __ test(ecx, Immediate(kSmiTagMask));
3711 __ Assert(not_equal, kExpectedAllocationSite);
3712 __ cmp(FieldOperand(ecx, HeapObject::kMapOffset),
3713 isolate()->factory()->allocation_site_map());
3714 __ Assert(equal, kExpectedAllocationSite);
3715 }
3716
3717 // Tail call into the stub that handles binary operations with allocation
3718 // sites.
3719 BinaryOpWithAllocationSiteStub stub(isolate(), state_);
3720 __ TailCallStub(&stub);
3721 }
3722
3723
GenerateSmis(MacroAssembler * masm)3724 void ICCompareStub::GenerateSmis(MacroAssembler* masm) {
3725 ASSERT(state_ == CompareIC::SMI);
3726 Label miss;
3727 __ mov(ecx, edx);
3728 __ or_(ecx, eax);
3729 __ JumpIfNotSmi(ecx, &miss, Label::kNear);
3730
3731 if (GetCondition() == equal) {
3732 // For equality we do not care about the sign of the result.
3733 __ sub(eax, edx);
3734 } else {
3735 Label done;
3736 __ sub(edx, eax);
3737 __ j(no_overflow, &done, Label::kNear);
3738 // Correct sign of result in case of overflow.
3739 __ not_(edx);
3740 __ bind(&done);
3741 __ mov(eax, edx);
3742 }
3743 __ ret(0);
3744
3745 __ bind(&miss);
3746 GenerateMiss(masm);
3747 }
3748
3749
GenerateNumbers(MacroAssembler * masm)3750 void ICCompareStub::GenerateNumbers(MacroAssembler* masm) {
3751 ASSERT(state_ == CompareIC::NUMBER);
3752
3753 Label generic_stub;
3754 Label unordered, maybe_undefined1, maybe_undefined2;
3755 Label miss;
3756
3757 if (left_ == CompareIC::SMI) {
3758 __ JumpIfNotSmi(edx, &miss);
3759 }
3760 if (right_ == CompareIC::SMI) {
3761 __ JumpIfNotSmi(eax, &miss);
3762 }
3763
3764 // Load left and right operand.
3765 Label done, left, left_smi, right_smi;
3766 __ JumpIfSmi(eax, &right_smi, Label::kNear);
3767 __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
3768 isolate()->factory()->heap_number_map());
3769 __ j(not_equal, &maybe_undefined1, Label::kNear);
3770 __ movsd(xmm1, FieldOperand(eax, HeapNumber::kValueOffset));
3771 __ jmp(&left, Label::kNear);
3772 __ bind(&right_smi);
3773 __ mov(ecx, eax); // Can't clobber eax because we can still jump away.
3774 __ SmiUntag(ecx);
3775 __ Cvtsi2sd(xmm1, ecx);
3776
3777 __ bind(&left);
3778 __ JumpIfSmi(edx, &left_smi, Label::kNear);
3779 __ cmp(FieldOperand(edx, HeapObject::kMapOffset),
3780 isolate()->factory()->heap_number_map());
3781 __ j(not_equal, &maybe_undefined2, Label::kNear);
3782 __ movsd(xmm0, FieldOperand(edx, HeapNumber::kValueOffset));
3783 __ jmp(&done);
3784 __ bind(&left_smi);
3785 __ mov(ecx, edx); // Can't clobber edx because we can still jump away.
3786 __ SmiUntag(ecx);
3787 __ Cvtsi2sd(xmm0, ecx);
3788
3789 __ bind(&done);
3790 // Compare operands.
3791 __ ucomisd(xmm0, xmm1);
3792
3793 // Don't base result on EFLAGS when a NaN is involved.
3794 __ j(parity_even, &unordered, Label::kNear);
3795
3796 // Return a result of -1, 0, or 1, based on EFLAGS.
3797 // Performing mov, because xor would destroy the flag register.
3798 __ mov(eax, 0); // equal
3799 __ mov(ecx, Immediate(Smi::FromInt(1)));
3800 __ cmov(above, eax, ecx);
3801 __ mov(ecx, Immediate(Smi::FromInt(-1)));
3802 __ cmov(below, eax, ecx);
3803 __ ret(0);
3804
3805 __ bind(&unordered);
3806 __ bind(&generic_stub);
3807 ICCompareStub stub(isolate(), op_, CompareIC::GENERIC, CompareIC::GENERIC,
3808 CompareIC::GENERIC);
3809 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
3810
3811 __ bind(&maybe_undefined1);
3812 if (Token::IsOrderedRelationalCompareOp(op_)) {
3813 __ cmp(eax, Immediate(isolate()->factory()->undefined_value()));
3814 __ j(not_equal, &miss);
3815 __ JumpIfSmi(edx, &unordered);
3816 __ CmpObjectType(edx, HEAP_NUMBER_TYPE, ecx);
3817 __ j(not_equal, &maybe_undefined2, Label::kNear);
3818 __ jmp(&unordered);
3819 }
3820
3821 __ bind(&maybe_undefined2);
3822 if (Token::IsOrderedRelationalCompareOp(op_)) {
3823 __ cmp(edx, Immediate(isolate()->factory()->undefined_value()));
3824 __ j(equal, &unordered);
3825 }
3826
3827 __ bind(&miss);
3828 GenerateMiss(masm);
3829 }
3830
3831
GenerateInternalizedStrings(MacroAssembler * masm)3832 void ICCompareStub::GenerateInternalizedStrings(MacroAssembler* masm) {
3833 ASSERT(state_ == CompareIC::INTERNALIZED_STRING);
3834 ASSERT(GetCondition() == equal);
3835
3836 // Registers containing left and right operands respectively.
3837 Register left = edx;
3838 Register right = eax;
3839 Register tmp1 = ecx;
3840 Register tmp2 = ebx;
3841
3842 // Check that both operands are heap objects.
3843 Label miss;
3844 __ mov(tmp1, left);
3845 STATIC_ASSERT(kSmiTag == 0);
3846 __ and_(tmp1, right);
3847 __ JumpIfSmi(tmp1, &miss, Label::kNear);
3848
3849 // Check that both operands are internalized strings.
3850 __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset));
3851 __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset));
3852 __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
3853 __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
3854 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
3855 __ or_(tmp1, tmp2);
3856 __ test(tmp1, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
3857 __ j(not_zero, &miss, Label::kNear);
3858
3859 // Internalized strings are compared by identity.
3860 Label done;
3861 __ cmp(left, right);
3862 // Make sure eax is non-zero. At this point input operands are
3863 // guaranteed to be non-zero.
3864 ASSERT(right.is(eax));
3865 __ j(not_equal, &done, Label::kNear);
3866 STATIC_ASSERT(EQUAL == 0);
3867 STATIC_ASSERT(kSmiTag == 0);
3868 __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
3869 __ bind(&done);
3870 __ ret(0);
3871
3872 __ bind(&miss);
3873 GenerateMiss(masm);
3874 }
3875
3876
GenerateUniqueNames(MacroAssembler * masm)3877 void ICCompareStub::GenerateUniqueNames(MacroAssembler* masm) {
3878 ASSERT(state_ == CompareIC::UNIQUE_NAME);
3879 ASSERT(GetCondition() == equal);
3880
3881 // Registers containing left and right operands respectively.
3882 Register left = edx;
3883 Register right = eax;
3884 Register tmp1 = ecx;
3885 Register tmp2 = ebx;
3886
3887 // Check that both operands are heap objects.
3888 Label miss;
3889 __ mov(tmp1, left);
3890 STATIC_ASSERT(kSmiTag == 0);
3891 __ and_(tmp1, right);
3892 __ JumpIfSmi(tmp1, &miss, Label::kNear);
3893
3894 // Check that both operands are unique names. This leaves the instance
3895 // types loaded in tmp1 and tmp2.
3896 __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset));
3897 __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset));
3898 __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
3899 __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
3900
3901 __ JumpIfNotUniqueName(tmp1, &miss, Label::kNear);
3902 __ JumpIfNotUniqueName(tmp2, &miss, Label::kNear);
3903
3904 // Unique names are compared by identity.
3905 Label done;
3906 __ cmp(left, right);
3907 // Make sure eax is non-zero. At this point input operands are
3908 // guaranteed to be non-zero.
3909 ASSERT(right.is(eax));
3910 __ j(not_equal, &done, Label::kNear);
3911 STATIC_ASSERT(EQUAL == 0);
3912 STATIC_ASSERT(kSmiTag == 0);
3913 __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
3914 __ bind(&done);
3915 __ ret(0);
3916
3917 __ bind(&miss);
3918 GenerateMiss(masm);
3919 }
3920
3921
GenerateStrings(MacroAssembler * masm)3922 void ICCompareStub::GenerateStrings(MacroAssembler* masm) {
3923 ASSERT(state_ == CompareIC::STRING);
3924 Label miss;
3925
3926 bool equality = Token::IsEqualityOp(op_);
3927
3928 // Registers containing left and right operands respectively.
3929 Register left = edx;
3930 Register right = eax;
3931 Register tmp1 = ecx;
3932 Register tmp2 = ebx;
3933 Register tmp3 = edi;
3934
3935 // Check that both operands are heap objects.
3936 __ mov(tmp1, left);
3937 STATIC_ASSERT(kSmiTag == 0);
3938 __ and_(tmp1, right);
3939 __ JumpIfSmi(tmp1, &miss);
3940
3941 // Check that both operands are strings. This leaves the instance
3942 // types loaded in tmp1 and tmp2.
3943 __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset));
3944 __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset));
3945 __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
3946 __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
3947 __ mov(tmp3, tmp1);
3948 STATIC_ASSERT(kNotStringTag != 0);
3949 __ or_(tmp3, tmp2);
3950 __ test(tmp3, Immediate(kIsNotStringMask));
3951 __ j(not_zero, &miss);
3952
3953 // Fast check for identical strings.
3954 Label not_same;
3955 __ cmp(left, right);
3956 __ j(not_equal, ¬_same, Label::kNear);
3957 STATIC_ASSERT(EQUAL == 0);
3958 STATIC_ASSERT(kSmiTag == 0);
3959 __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
3960 __ ret(0);
3961
3962 // Handle not identical strings.
3963 __ bind(¬_same);
3964
3965 // Check that both strings are internalized. If they are, we're done
3966 // because we already know they are not identical. But in the case of
3967 // non-equality compare, we still need to determine the order. We
3968 // also know they are both strings.
3969 if (equality) {
3970 Label do_compare;
3971 STATIC_ASSERT(kInternalizedTag == 0);
3972 __ or_(tmp1, tmp2);
3973 __ test(tmp1, Immediate(kIsNotInternalizedMask));
3974 __ j(not_zero, &do_compare, Label::kNear);
3975 // Make sure eax is non-zero. At this point input operands are
3976 // guaranteed to be non-zero.
3977 ASSERT(right.is(eax));
3978 __ ret(0);
3979 __ bind(&do_compare);
3980 }
3981
3982 // Check that both strings are sequential ASCII.
3983 Label runtime;
3984 __ JumpIfNotBothSequentialAsciiStrings(left, right, tmp1, tmp2, &runtime);
3985
3986 // Compare flat ASCII strings. Returns when done.
3987 if (equality) {
3988 StringCompareStub::GenerateFlatAsciiStringEquals(
3989 masm, left, right, tmp1, tmp2);
3990 } else {
3991 StringCompareStub::GenerateCompareFlatAsciiStrings(
3992 masm, left, right, tmp1, tmp2, tmp3);
3993 }
3994
3995 // Handle more complex cases in runtime.
3996 __ bind(&runtime);
3997 __ pop(tmp1); // Return address.
3998 __ push(left);
3999 __ push(right);
4000 __ push(tmp1);
4001 if (equality) {
4002 __ TailCallRuntime(Runtime::kStringEquals, 2, 1);
4003 } else {
4004 __ TailCallRuntime(Runtime::kHiddenStringCompare, 2, 1);
4005 }
4006
4007 __ bind(&miss);
4008 GenerateMiss(masm);
4009 }
4010
4011
GenerateObjects(MacroAssembler * masm)4012 void ICCompareStub::GenerateObjects(MacroAssembler* masm) {
4013 ASSERT(state_ == CompareIC::OBJECT);
4014 Label miss;
4015 __ mov(ecx, edx);
4016 __ and_(ecx, eax);
4017 __ JumpIfSmi(ecx, &miss, Label::kNear);
4018
4019 __ CmpObjectType(eax, JS_OBJECT_TYPE, ecx);
4020 __ j(not_equal, &miss, Label::kNear);
4021 __ CmpObjectType(edx, JS_OBJECT_TYPE, ecx);
4022 __ j(not_equal, &miss, Label::kNear);
4023
4024 ASSERT(GetCondition() == equal);
4025 __ sub(eax, edx);
4026 __ ret(0);
4027
4028 __ bind(&miss);
4029 GenerateMiss(masm);
4030 }
4031
4032
GenerateKnownObjects(MacroAssembler * masm)4033 void ICCompareStub::GenerateKnownObjects(MacroAssembler* masm) {
4034 Label miss;
4035 __ mov(ecx, edx);
4036 __ and_(ecx, eax);
4037 __ JumpIfSmi(ecx, &miss, Label::kNear);
4038
4039 __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset));
4040 __ mov(ebx, FieldOperand(edx, HeapObject::kMapOffset));
4041 __ cmp(ecx, known_map_);
4042 __ j(not_equal, &miss, Label::kNear);
4043 __ cmp(ebx, known_map_);
4044 __ j(not_equal, &miss, Label::kNear);
4045
4046 __ sub(eax, edx);
4047 __ ret(0);
4048
4049 __ bind(&miss);
4050 GenerateMiss(masm);
4051 }
4052
4053
GenerateMiss(MacroAssembler * masm)4054 void ICCompareStub::GenerateMiss(MacroAssembler* masm) {
4055 {
4056 // Call the runtime system in a fresh internal frame.
4057 ExternalReference miss = ExternalReference(IC_Utility(IC::kCompareIC_Miss),
4058 isolate());
4059 FrameScope scope(masm, StackFrame::INTERNAL);
4060 __ push(edx); // Preserve edx and eax.
4061 __ push(eax);
4062 __ push(edx); // And also use them as the arguments.
4063 __ push(eax);
4064 __ push(Immediate(Smi::FromInt(op_)));
4065 __ CallExternalReference(miss, 3);
4066 // Compute the entry point of the rewritten stub.
4067 __ lea(edi, FieldOperand(eax, Code::kHeaderSize));
4068 __ pop(eax);
4069 __ pop(edx);
4070 }
4071
4072 // Do a tail call to the rewritten stub.
4073 __ jmp(edi);
4074 }
4075
4076
4077 // Helper function used to check that the dictionary doesn't contain
4078 // the property. This function may return false negatives, so miss_label
4079 // must always call a backup property check that is complete.
4080 // This function is safe to call if the receiver has fast properties.
4081 // Name must be a unique name and receiver must be a heap object.
GenerateNegativeLookup(MacroAssembler * masm,Label * miss,Label * done,Register properties,Handle<Name> name,Register r0)4082 void NameDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm,
4083 Label* miss,
4084 Label* done,
4085 Register properties,
4086 Handle<Name> name,
4087 Register r0) {
4088 ASSERT(name->IsUniqueName());
4089
4090 // If names of slots in range from 1 to kProbes - 1 for the hash value are
4091 // not equal to the name and kProbes-th slot is not used (its name is the
4092 // undefined value), it guarantees the hash table doesn't contain the
4093 // property. It's true even if some slots represent deleted properties
4094 // (their names are the hole value).
4095 for (int i = 0; i < kInlinedProbes; i++) {
4096 // Compute the masked index: (hash + i + i * i) & mask.
4097 Register index = r0;
4098 // Capacity is smi 2^n.
4099 __ mov(index, FieldOperand(properties, kCapacityOffset));
4100 __ dec(index);
4101 __ and_(index,
4102 Immediate(Smi::FromInt(name->Hash() +
4103 NameDictionary::GetProbeOffset(i))));
4104
4105 // Scale the index by multiplying by the entry size.
4106 ASSERT(NameDictionary::kEntrySize == 3);
4107 __ lea(index, Operand(index, index, times_2, 0)); // index *= 3.
4108 Register entity_name = r0;
4109 // Having undefined at this place means the name is not contained.
4110 ASSERT_EQ(kSmiTagSize, 1);
4111 __ mov(entity_name, Operand(properties, index, times_half_pointer_size,
4112 kElementsStartOffset - kHeapObjectTag));
4113 __ cmp(entity_name, masm->isolate()->factory()->undefined_value());
4114 __ j(equal, done);
4115
4116 // Stop if found the property.
4117 __ cmp(entity_name, Handle<Name>(name));
4118 __ j(equal, miss);
4119
4120 Label good;
4121 // Check for the hole and skip.
4122 __ cmp(entity_name, masm->isolate()->factory()->the_hole_value());
4123 __ j(equal, &good, Label::kNear);
4124
4125 // Check if the entry name is not a unique name.
4126 __ mov(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset));
4127 __ JumpIfNotUniqueName(FieldOperand(entity_name, Map::kInstanceTypeOffset),
4128 miss);
4129 __ bind(&good);
4130 }
4131
4132 NameDictionaryLookupStub stub(masm->isolate(), properties, r0, r0,
4133 NEGATIVE_LOOKUP);
4134 __ push(Immediate(Handle<Object>(name)));
4135 __ push(Immediate(name->Hash()));
4136 __ CallStub(&stub);
4137 __ test(r0, r0);
4138 __ j(not_zero, miss);
4139 __ jmp(done);
4140 }
4141
4142
4143 // Probe the name dictionary in the |elements| register. Jump to the
4144 // |done| label if a property with the given name is found leaving the
4145 // index into the dictionary in |r0|. Jump to the |miss| label
4146 // otherwise.
GeneratePositiveLookup(MacroAssembler * masm,Label * miss,Label * done,Register elements,Register name,Register r0,Register r1)4147 void NameDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm,
4148 Label* miss,
4149 Label* done,
4150 Register elements,
4151 Register name,
4152 Register r0,
4153 Register r1) {
4154 ASSERT(!elements.is(r0));
4155 ASSERT(!elements.is(r1));
4156 ASSERT(!name.is(r0));
4157 ASSERT(!name.is(r1));
4158
4159 __ AssertName(name);
4160
4161 __ mov(r1, FieldOperand(elements, kCapacityOffset));
4162 __ shr(r1, kSmiTagSize); // convert smi to int
4163 __ dec(r1);
4164
4165 // Generate an unrolled loop that performs a few probes before
4166 // giving up. Measurements done on Gmail indicate that 2 probes
4167 // cover ~93% of loads from dictionaries.
4168 for (int i = 0; i < kInlinedProbes; i++) {
4169 // Compute the masked index: (hash + i + i * i) & mask.
4170 __ mov(r0, FieldOperand(name, Name::kHashFieldOffset));
4171 __ shr(r0, Name::kHashShift);
4172 if (i > 0) {
4173 __ add(r0, Immediate(NameDictionary::GetProbeOffset(i)));
4174 }
4175 __ and_(r0, r1);
4176
4177 // Scale the index by multiplying by the entry size.
4178 ASSERT(NameDictionary::kEntrySize == 3);
4179 __ lea(r0, Operand(r0, r0, times_2, 0)); // r0 = r0 * 3
4180
4181 // Check if the key is identical to the name.
4182 __ cmp(name, Operand(elements,
4183 r0,
4184 times_4,
4185 kElementsStartOffset - kHeapObjectTag));
4186 __ j(equal, done);
4187 }
4188
4189 NameDictionaryLookupStub stub(masm->isolate(), elements, r1, r0,
4190 POSITIVE_LOOKUP);
4191 __ push(name);
4192 __ mov(r0, FieldOperand(name, Name::kHashFieldOffset));
4193 __ shr(r0, Name::kHashShift);
4194 __ push(r0);
4195 __ CallStub(&stub);
4196
4197 __ test(r1, r1);
4198 __ j(zero, miss);
4199 __ jmp(done);
4200 }
4201
4202
Generate(MacroAssembler * masm)4203 void NameDictionaryLookupStub::Generate(MacroAssembler* masm) {
4204 // This stub overrides SometimesSetsUpAFrame() to return false. That means
4205 // we cannot call anything that could cause a GC from this stub.
4206 // Stack frame on entry:
4207 // esp[0 * kPointerSize]: return address.
4208 // esp[1 * kPointerSize]: key's hash.
4209 // esp[2 * kPointerSize]: key.
4210 // Registers:
4211 // dictionary_: NameDictionary to probe.
4212 // result_: used as scratch.
4213 // index_: will hold an index of entry if lookup is successful.
4214 // might alias with result_.
4215 // Returns:
4216 // result_ is zero if lookup failed, non zero otherwise.
4217
4218 Label in_dictionary, maybe_in_dictionary, not_in_dictionary;
4219
4220 Register scratch = result_;
4221
4222 __ mov(scratch, FieldOperand(dictionary_, kCapacityOffset));
4223 __ dec(scratch);
4224 __ SmiUntag(scratch);
4225 __ push(scratch);
4226
4227 // If names of slots in range from 1 to kProbes - 1 for the hash value are
4228 // not equal to the name and kProbes-th slot is not used (its name is the
4229 // undefined value), it guarantees the hash table doesn't contain the
4230 // property. It's true even if some slots represent deleted properties
4231 // (their names are the null value).
4232 for (int i = kInlinedProbes; i < kTotalProbes; i++) {
4233 // Compute the masked index: (hash + i + i * i) & mask.
4234 __ mov(scratch, Operand(esp, 2 * kPointerSize));
4235 if (i > 0) {
4236 __ add(scratch, Immediate(NameDictionary::GetProbeOffset(i)));
4237 }
4238 __ and_(scratch, Operand(esp, 0));
4239
4240 // Scale the index by multiplying by the entry size.
4241 ASSERT(NameDictionary::kEntrySize == 3);
4242 __ lea(index_, Operand(scratch, scratch, times_2, 0)); // index *= 3.
4243
4244 // Having undefined at this place means the name is not contained.
4245 ASSERT_EQ(kSmiTagSize, 1);
4246 __ mov(scratch, Operand(dictionary_,
4247 index_,
4248 times_pointer_size,
4249 kElementsStartOffset - kHeapObjectTag));
4250 __ cmp(scratch, isolate()->factory()->undefined_value());
4251 __ j(equal, ¬_in_dictionary);
4252
4253 // Stop if found the property.
4254 __ cmp(scratch, Operand(esp, 3 * kPointerSize));
4255 __ j(equal, &in_dictionary);
4256
4257 if (i != kTotalProbes - 1 && mode_ == NEGATIVE_LOOKUP) {
4258 // If we hit a key that is not a unique name during negative
4259 // lookup we have to bailout as this key might be equal to the
4260 // key we are looking for.
4261
4262 // Check if the entry name is not a unique name.
4263 __ mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
4264 __ JumpIfNotUniqueName(FieldOperand(scratch, Map::kInstanceTypeOffset),
4265 &maybe_in_dictionary);
4266 }
4267 }
4268
4269 __ bind(&maybe_in_dictionary);
4270 // If we are doing negative lookup then probing failure should be
4271 // treated as a lookup success. For positive lookup probing failure
4272 // should be treated as lookup failure.
4273 if (mode_ == POSITIVE_LOOKUP) {
4274 __ mov(result_, Immediate(0));
4275 __ Drop(1);
4276 __ ret(2 * kPointerSize);
4277 }
4278
4279 __ bind(&in_dictionary);
4280 __ mov(result_, Immediate(1));
4281 __ Drop(1);
4282 __ ret(2 * kPointerSize);
4283
4284 __ bind(¬_in_dictionary);
4285 __ mov(result_, Immediate(0));
4286 __ Drop(1);
4287 __ ret(2 * kPointerSize);
4288 }
4289
4290
GenerateFixedRegStubsAheadOfTime(Isolate * isolate)4291 void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(
4292 Isolate* isolate) {
4293 StoreBufferOverflowStub stub(isolate, kDontSaveFPRegs);
4294 stub.GetCode();
4295 StoreBufferOverflowStub stub2(isolate, kSaveFPRegs);
4296 stub2.GetCode();
4297 }
4298
4299
4300 // Takes the input in 3 registers: address_ value_ and object_. A pointer to
4301 // the value has just been written into the object, now this stub makes sure
4302 // we keep the GC informed. The word in the object where the value has been
4303 // written is in the address register.
Generate(MacroAssembler * masm)4304 void RecordWriteStub::Generate(MacroAssembler* masm) {
4305 Label skip_to_incremental_noncompacting;
4306 Label skip_to_incremental_compacting;
4307
4308 // The first two instructions are generated with labels so as to get the
4309 // offset fixed up correctly by the bind(Label*) call. We patch it back and
4310 // forth between a compare instructions (a nop in this position) and the
4311 // real branch when we start and stop incremental heap marking.
4312 __ jmp(&skip_to_incremental_noncompacting, Label::kNear);
4313 __ jmp(&skip_to_incremental_compacting, Label::kFar);
4314
4315 if (remembered_set_action_ == EMIT_REMEMBERED_SET) {
4316 __ RememberedSetHelper(object_,
4317 address_,
4318 value_,
4319 save_fp_regs_mode_,
4320 MacroAssembler::kReturnAtEnd);
4321 } else {
4322 __ ret(0);
4323 }
4324
4325 __ bind(&skip_to_incremental_noncompacting);
4326 GenerateIncremental(masm, INCREMENTAL);
4327
4328 __ bind(&skip_to_incremental_compacting);
4329 GenerateIncremental(masm, INCREMENTAL_COMPACTION);
4330
4331 // Initial mode of the stub is expected to be STORE_BUFFER_ONLY.
4332 // Will be checked in IncrementalMarking::ActivateGeneratedStub.
4333 masm->set_byte_at(0, kTwoByteNopInstruction);
4334 masm->set_byte_at(2, kFiveByteNopInstruction);
4335 }
4336
4337
GenerateIncremental(MacroAssembler * masm,Mode mode)4338 void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) {
4339 regs_.Save(masm);
4340
4341 if (remembered_set_action_ == EMIT_REMEMBERED_SET) {
4342 Label dont_need_remembered_set;
4343
4344 __ mov(regs_.scratch0(), Operand(regs_.address(), 0));
4345 __ JumpIfNotInNewSpace(regs_.scratch0(), // Value.
4346 regs_.scratch0(),
4347 &dont_need_remembered_set);
4348
4349 __ CheckPageFlag(regs_.object(),
4350 regs_.scratch0(),
4351 1 << MemoryChunk::SCAN_ON_SCAVENGE,
4352 not_zero,
4353 &dont_need_remembered_set);
4354
4355 // First notify the incremental marker if necessary, then update the
4356 // remembered set.
4357 CheckNeedsToInformIncrementalMarker(
4358 masm,
4359 kUpdateRememberedSetOnNoNeedToInformIncrementalMarker,
4360 mode);
4361 InformIncrementalMarker(masm);
4362 regs_.Restore(masm);
4363 __ RememberedSetHelper(object_,
4364 address_,
4365 value_,
4366 save_fp_regs_mode_,
4367 MacroAssembler::kReturnAtEnd);
4368
4369 __ bind(&dont_need_remembered_set);
4370 }
4371
4372 CheckNeedsToInformIncrementalMarker(
4373 masm,
4374 kReturnOnNoNeedToInformIncrementalMarker,
4375 mode);
4376 InformIncrementalMarker(masm);
4377 regs_.Restore(masm);
4378 __ ret(0);
4379 }
4380
4381
InformIncrementalMarker(MacroAssembler * masm)4382 void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm) {
4383 regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode_);
4384 int argument_count = 3;
4385 __ PrepareCallCFunction(argument_count, regs_.scratch0());
4386 __ mov(Operand(esp, 0 * kPointerSize), regs_.object());
4387 __ mov(Operand(esp, 1 * kPointerSize), regs_.address()); // Slot.
4388 __ mov(Operand(esp, 2 * kPointerSize),
4389 Immediate(ExternalReference::isolate_address(isolate())));
4390
4391 AllowExternalCallThatCantCauseGC scope(masm);
4392 __ CallCFunction(
4393 ExternalReference::incremental_marking_record_write_function(isolate()),
4394 argument_count);
4395
4396 regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode_);
4397 }
4398
4399
CheckNeedsToInformIncrementalMarker(MacroAssembler * masm,OnNoNeedToInformIncrementalMarker on_no_need,Mode mode)4400 void RecordWriteStub::CheckNeedsToInformIncrementalMarker(
4401 MacroAssembler* masm,
4402 OnNoNeedToInformIncrementalMarker on_no_need,
4403 Mode mode) {
4404 Label object_is_black, need_incremental, need_incremental_pop_object;
4405
4406 __ mov(regs_.scratch0(), Immediate(~Page::kPageAlignmentMask));
4407 __ and_(regs_.scratch0(), regs_.object());
4408 __ mov(regs_.scratch1(),
4409 Operand(regs_.scratch0(),
4410 MemoryChunk::kWriteBarrierCounterOffset));
4411 __ sub(regs_.scratch1(), Immediate(1));
4412 __ mov(Operand(regs_.scratch0(),
4413 MemoryChunk::kWriteBarrierCounterOffset),
4414 regs_.scratch1());
4415 __ j(negative, &need_incremental);
4416
4417 // Let's look at the color of the object: If it is not black we don't have
4418 // to inform the incremental marker.
4419 __ JumpIfBlack(regs_.object(),
4420 regs_.scratch0(),
4421 regs_.scratch1(),
4422 &object_is_black,
4423 Label::kNear);
4424
4425 regs_.Restore(masm);
4426 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
4427 __ RememberedSetHelper(object_,
4428 address_,
4429 value_,
4430 save_fp_regs_mode_,
4431 MacroAssembler::kReturnAtEnd);
4432 } else {
4433 __ ret(0);
4434 }
4435
4436 __ bind(&object_is_black);
4437
4438 // Get the value from the slot.
4439 __ mov(regs_.scratch0(), Operand(regs_.address(), 0));
4440
4441 if (mode == INCREMENTAL_COMPACTION) {
4442 Label ensure_not_white;
4443
4444 __ CheckPageFlag(regs_.scratch0(), // Contains value.
4445 regs_.scratch1(), // Scratch.
4446 MemoryChunk::kEvacuationCandidateMask,
4447 zero,
4448 &ensure_not_white,
4449 Label::kNear);
4450
4451 __ CheckPageFlag(regs_.object(),
4452 regs_.scratch1(), // Scratch.
4453 MemoryChunk::kSkipEvacuationSlotsRecordingMask,
4454 not_zero,
4455 &ensure_not_white,
4456 Label::kNear);
4457
4458 __ jmp(&need_incremental);
4459
4460 __ bind(&ensure_not_white);
4461 }
4462
4463 // We need an extra register for this, so we push the object register
4464 // temporarily.
4465 __ push(regs_.object());
4466 __ EnsureNotWhite(regs_.scratch0(), // The value.
4467 regs_.scratch1(), // Scratch.
4468 regs_.object(), // Scratch.
4469 &need_incremental_pop_object,
4470 Label::kNear);
4471 __ pop(regs_.object());
4472
4473 regs_.Restore(masm);
4474 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
4475 __ RememberedSetHelper(object_,
4476 address_,
4477 value_,
4478 save_fp_regs_mode_,
4479 MacroAssembler::kReturnAtEnd);
4480 } else {
4481 __ ret(0);
4482 }
4483
4484 __ bind(&need_incremental_pop_object);
4485 __ pop(regs_.object());
4486
4487 __ bind(&need_incremental);
4488
4489 // Fall through when we need to inform the incremental marker.
4490 }
4491
4492
Generate(MacroAssembler * masm)4493 void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
4494 // ----------- S t a t e -------------
4495 // -- eax : element value to store
4496 // -- ecx : element index as smi
4497 // -- esp[0] : return address
4498 // -- esp[4] : array literal index in function
4499 // -- esp[8] : array literal
4500 // clobbers ebx, edx, edi
4501 // -----------------------------------
4502
4503 Label element_done;
4504 Label double_elements;
4505 Label smi_element;
4506 Label slow_elements;
4507 Label slow_elements_from_double;
4508 Label fast_elements;
4509
4510 // Get array literal index, array literal and its map.
4511 __ mov(edx, Operand(esp, 1 * kPointerSize));
4512 __ mov(ebx, Operand(esp, 2 * kPointerSize));
4513 __ mov(edi, FieldOperand(ebx, JSObject::kMapOffset));
4514
4515 __ CheckFastElements(edi, &double_elements);
4516
4517 // Check for FAST_*_SMI_ELEMENTS or FAST_*_ELEMENTS elements
4518 __ JumpIfSmi(eax, &smi_element);
4519 __ CheckFastSmiElements(edi, &fast_elements, Label::kNear);
4520
4521 // Store into the array literal requires a elements transition. Call into
4522 // the runtime.
4523
4524 __ bind(&slow_elements);
4525 __ pop(edi); // Pop return address and remember to put back later for tail
4526 // call.
4527 __ push(ebx);
4528 __ push(ecx);
4529 __ push(eax);
4530 __ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
4531 __ push(FieldOperand(ebx, JSFunction::kLiteralsOffset));
4532 __ push(edx);
4533 __ push(edi); // Return return address so that tail call returns to right
4534 // place.
4535 __ TailCallRuntime(Runtime::kStoreArrayLiteralElement, 5, 1);
4536
4537 __ bind(&slow_elements_from_double);
4538 __ pop(edx);
4539 __ jmp(&slow_elements);
4540
4541 // Array literal has ElementsKind of FAST_*_ELEMENTS and value is an object.
4542 __ bind(&fast_elements);
4543 __ mov(ebx, FieldOperand(ebx, JSObject::kElementsOffset));
4544 __ lea(ecx, FieldOperand(ebx, ecx, times_half_pointer_size,
4545 FixedArrayBase::kHeaderSize));
4546 __ mov(Operand(ecx, 0), eax);
4547 // Update the write barrier for the array store.
4548 __ RecordWrite(ebx, ecx, eax,
4549 kDontSaveFPRegs,
4550 EMIT_REMEMBERED_SET,
4551 OMIT_SMI_CHECK);
4552 __ ret(0);
4553
4554 // Array literal has ElementsKind of FAST_*_SMI_ELEMENTS or FAST_*_ELEMENTS,
4555 // and value is Smi.
4556 __ bind(&smi_element);
4557 __ mov(ebx, FieldOperand(ebx, JSObject::kElementsOffset));
4558 __ mov(FieldOperand(ebx, ecx, times_half_pointer_size,
4559 FixedArrayBase::kHeaderSize), eax);
4560 __ ret(0);
4561
4562 // Array literal has ElementsKind of FAST_*_DOUBLE_ELEMENTS.
4563 __ bind(&double_elements);
4564
4565 __ push(edx);
4566 __ mov(edx, FieldOperand(ebx, JSObject::kElementsOffset));
4567 __ StoreNumberToDoubleElements(eax,
4568 edx,
4569 ecx,
4570 edi,
4571 xmm0,
4572 &slow_elements_from_double);
4573 __ pop(edx);
4574 __ ret(0);
4575 }
4576
4577
Generate(MacroAssembler * masm)4578 void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
4579 CEntryStub ces(isolate(), 1, kSaveFPRegs);
4580 __ call(ces.GetCode(), RelocInfo::CODE_TARGET);
4581 int parameter_count_offset =
4582 StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset;
4583 __ mov(ebx, MemOperand(ebp, parameter_count_offset));
4584 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE);
4585 __ pop(ecx);
4586 int additional_offset = function_mode_ == JS_FUNCTION_STUB_MODE
4587 ? kPointerSize
4588 : 0;
4589 __ lea(esp, MemOperand(esp, ebx, times_pointer_size, additional_offset));
4590 __ jmp(ecx); // Return to IC Miss stub, continuation still on stack.
4591 }
4592
4593
MaybeCallEntryHook(MacroAssembler * masm)4594 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
4595 if (masm->isolate()->function_entry_hook() != NULL) {
4596 ProfileEntryHookStub stub(masm->isolate());
4597 masm->CallStub(&stub);
4598 }
4599 }
4600
4601
Generate(MacroAssembler * masm)4602 void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
4603 // Save volatile registers.
4604 const int kNumSavedRegisters = 3;
4605 __ push(eax);
4606 __ push(ecx);
4607 __ push(edx);
4608
4609 // Calculate and push the original stack pointer.
4610 __ lea(eax, Operand(esp, (kNumSavedRegisters + 1) * kPointerSize));
4611 __ push(eax);
4612
4613 // Retrieve our return address and use it to calculate the calling
4614 // function's address.
4615 __ mov(eax, Operand(esp, (kNumSavedRegisters + 1) * kPointerSize));
4616 __ sub(eax, Immediate(Assembler::kCallInstructionLength));
4617 __ push(eax);
4618
4619 // Call the entry hook.
4620 ASSERT(isolate()->function_entry_hook() != NULL);
4621 __ call(FUNCTION_ADDR(isolate()->function_entry_hook()),
4622 RelocInfo::RUNTIME_ENTRY);
4623 __ add(esp, Immediate(2 * kPointerSize));
4624
4625 // Restore ecx.
4626 __ pop(edx);
4627 __ pop(ecx);
4628 __ pop(eax);
4629
4630 __ ret(0);
4631 }
4632
4633
4634 template<class T>
CreateArrayDispatch(MacroAssembler * masm,AllocationSiteOverrideMode mode)4635 static void CreateArrayDispatch(MacroAssembler* masm,
4636 AllocationSiteOverrideMode mode) {
4637 if (mode == DISABLE_ALLOCATION_SITES) {
4638 T stub(masm->isolate(),
4639 GetInitialFastElementsKind(),
4640 mode);
4641 __ TailCallStub(&stub);
4642 } else if (mode == DONT_OVERRIDE) {
4643 int last_index = GetSequenceIndexFromFastElementsKind(
4644 TERMINAL_FAST_ELEMENTS_KIND);
4645 for (int i = 0; i <= last_index; ++i) {
4646 Label next;
4647 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
4648 __ cmp(edx, kind);
4649 __ j(not_equal, &next);
4650 T stub(masm->isolate(), kind);
4651 __ TailCallStub(&stub);
4652 __ bind(&next);
4653 }
4654
4655 // If we reached this point there is a problem.
4656 __ Abort(kUnexpectedElementsKindInArrayConstructor);
4657 } else {
4658 UNREACHABLE();
4659 }
4660 }
4661
4662
CreateArrayDispatchOneArgument(MacroAssembler * masm,AllocationSiteOverrideMode mode)4663 static void CreateArrayDispatchOneArgument(MacroAssembler* masm,
4664 AllocationSiteOverrideMode mode) {
4665 // ebx - allocation site (if mode != DISABLE_ALLOCATION_SITES)
4666 // edx - kind (if mode != DISABLE_ALLOCATION_SITES)
4667 // eax - number of arguments
4668 // edi - constructor?
4669 // esp[0] - return address
4670 // esp[4] - last argument
4671 Label normal_sequence;
4672 if (mode == DONT_OVERRIDE) {
4673 ASSERT(FAST_SMI_ELEMENTS == 0);
4674 ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
4675 ASSERT(FAST_ELEMENTS == 2);
4676 ASSERT(FAST_HOLEY_ELEMENTS == 3);
4677 ASSERT(FAST_DOUBLE_ELEMENTS == 4);
4678 ASSERT(FAST_HOLEY_DOUBLE_ELEMENTS == 5);
4679
4680 // is the low bit set? If so, we are holey and that is good.
4681 __ test_b(edx, 1);
4682 __ j(not_zero, &normal_sequence);
4683 }
4684
4685 // look at the first argument
4686 __ mov(ecx, Operand(esp, kPointerSize));
4687 __ test(ecx, ecx);
4688 __ j(zero, &normal_sequence);
4689
4690 if (mode == DISABLE_ALLOCATION_SITES) {
4691 ElementsKind initial = GetInitialFastElementsKind();
4692 ElementsKind holey_initial = GetHoleyElementsKind(initial);
4693
4694 ArraySingleArgumentConstructorStub stub_holey(masm->isolate(),
4695 holey_initial,
4696 DISABLE_ALLOCATION_SITES);
4697 __ TailCallStub(&stub_holey);
4698
4699 __ bind(&normal_sequence);
4700 ArraySingleArgumentConstructorStub stub(masm->isolate(),
4701 initial,
4702 DISABLE_ALLOCATION_SITES);
4703 __ TailCallStub(&stub);
4704 } else if (mode == DONT_OVERRIDE) {
4705 // We are going to create a holey array, but our kind is non-holey.
4706 // Fix kind and retry.
4707 __ inc(edx);
4708
4709 if (FLAG_debug_code) {
4710 Handle<Map> allocation_site_map =
4711 masm->isolate()->factory()->allocation_site_map();
4712 __ cmp(FieldOperand(ebx, 0), Immediate(allocation_site_map));
4713 __ Assert(equal, kExpectedAllocationSite);
4714 }
4715
4716 // Save the resulting elements kind in type info. We can't just store r3
4717 // in the AllocationSite::transition_info field because elements kind is
4718 // restricted to a portion of the field...upper bits need to be left alone.
4719 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
4720 __ add(FieldOperand(ebx, AllocationSite::kTransitionInfoOffset),
4721 Immediate(Smi::FromInt(kFastElementsKindPackedToHoley)));
4722
4723 __ bind(&normal_sequence);
4724 int last_index = GetSequenceIndexFromFastElementsKind(
4725 TERMINAL_FAST_ELEMENTS_KIND);
4726 for (int i = 0; i <= last_index; ++i) {
4727 Label next;
4728 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
4729 __ cmp(edx, kind);
4730 __ j(not_equal, &next);
4731 ArraySingleArgumentConstructorStub stub(masm->isolate(), kind);
4732 __ TailCallStub(&stub);
4733 __ bind(&next);
4734 }
4735
4736 // If we reached this point there is a problem.
4737 __ Abort(kUnexpectedElementsKindInArrayConstructor);
4738 } else {
4739 UNREACHABLE();
4740 }
4741 }
4742
4743
4744 template<class T>
ArrayConstructorStubAheadOfTimeHelper(Isolate * isolate)4745 static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) {
4746 int to_index = GetSequenceIndexFromFastElementsKind(
4747 TERMINAL_FAST_ELEMENTS_KIND);
4748 for (int i = 0; i <= to_index; ++i) {
4749 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
4750 T stub(isolate, kind);
4751 stub.GetCode();
4752 if (AllocationSite::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE) {
4753 T stub1(isolate, kind, DISABLE_ALLOCATION_SITES);
4754 stub1.GetCode();
4755 }
4756 }
4757 }
4758
4759
GenerateStubsAheadOfTime(Isolate * isolate)4760 void ArrayConstructorStubBase::GenerateStubsAheadOfTime(Isolate* isolate) {
4761 ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>(
4762 isolate);
4763 ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>(
4764 isolate);
4765 ArrayConstructorStubAheadOfTimeHelper<ArrayNArgumentsConstructorStub>(
4766 isolate);
4767 }
4768
4769
GenerateStubsAheadOfTime(Isolate * isolate)4770 void InternalArrayConstructorStubBase::GenerateStubsAheadOfTime(
4771 Isolate* isolate) {
4772 ElementsKind kinds[2] = { FAST_ELEMENTS, FAST_HOLEY_ELEMENTS };
4773 for (int i = 0; i < 2; i++) {
4774 // For internal arrays we only need a few things
4775 InternalArrayNoArgumentConstructorStub stubh1(isolate, kinds[i]);
4776 stubh1.GetCode();
4777 InternalArraySingleArgumentConstructorStub stubh2(isolate, kinds[i]);
4778 stubh2.GetCode();
4779 InternalArrayNArgumentsConstructorStub stubh3(isolate, kinds[i]);
4780 stubh3.GetCode();
4781 }
4782 }
4783
4784
GenerateDispatchToArrayStub(MacroAssembler * masm,AllocationSiteOverrideMode mode)4785 void ArrayConstructorStub::GenerateDispatchToArrayStub(
4786 MacroAssembler* masm,
4787 AllocationSiteOverrideMode mode) {
4788 if (argument_count_ == ANY) {
4789 Label not_zero_case, not_one_case;
4790 __ test(eax, eax);
4791 __ j(not_zero, ¬_zero_case);
4792 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
4793
4794 __ bind(¬_zero_case);
4795 __ cmp(eax, 1);
4796 __ j(greater, ¬_one_case);
4797 CreateArrayDispatchOneArgument(masm, mode);
4798
4799 __ bind(¬_one_case);
4800 CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm, mode);
4801 } else if (argument_count_ == NONE) {
4802 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
4803 } else if (argument_count_ == ONE) {
4804 CreateArrayDispatchOneArgument(masm, mode);
4805 } else if (argument_count_ == MORE_THAN_ONE) {
4806 CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm, mode);
4807 } else {
4808 UNREACHABLE();
4809 }
4810 }
4811
4812
Generate(MacroAssembler * masm)4813 void ArrayConstructorStub::Generate(MacroAssembler* masm) {
4814 // ----------- S t a t e -------------
4815 // -- eax : argc (only if argument_count_ == ANY)
4816 // -- ebx : AllocationSite or undefined
4817 // -- edi : constructor
4818 // -- esp[0] : return address
4819 // -- esp[4] : last argument
4820 // -----------------------------------
4821 if (FLAG_debug_code) {
4822 // The array construct code is only set for the global and natives
4823 // builtin Array functions which always have maps.
4824
4825 // Initial map for the builtin Array function should be a map.
4826 __ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
4827 // Will both indicate a NULL and a Smi.
4828 __ test(ecx, Immediate(kSmiTagMask));
4829 __ Assert(not_zero, kUnexpectedInitialMapForArrayFunction);
4830 __ CmpObjectType(ecx, MAP_TYPE, ecx);
4831 __ Assert(equal, kUnexpectedInitialMapForArrayFunction);
4832
4833 // We should either have undefined in ebx or a valid AllocationSite
4834 __ AssertUndefinedOrAllocationSite(ebx);
4835 }
4836
4837 Label no_info;
4838 // If the feedback vector is the undefined value call an array constructor
4839 // that doesn't use AllocationSites.
4840 __ cmp(ebx, isolate()->factory()->undefined_value());
4841 __ j(equal, &no_info);
4842
4843 // Only look at the lower 16 bits of the transition info.
4844 __ mov(edx, FieldOperand(ebx, AllocationSite::kTransitionInfoOffset));
4845 __ SmiUntag(edx);
4846 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
4847 __ and_(edx, Immediate(AllocationSite::ElementsKindBits::kMask));
4848 GenerateDispatchToArrayStub(masm, DONT_OVERRIDE);
4849
4850 __ bind(&no_info);
4851 GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES);
4852 }
4853
4854
GenerateCase(MacroAssembler * masm,ElementsKind kind)4855 void InternalArrayConstructorStub::GenerateCase(
4856 MacroAssembler* masm, ElementsKind kind) {
4857 Label not_zero_case, not_one_case;
4858 Label normal_sequence;
4859
4860 __ test(eax, eax);
4861 __ j(not_zero, ¬_zero_case);
4862 InternalArrayNoArgumentConstructorStub stub0(isolate(), kind);
4863 __ TailCallStub(&stub0);
4864
4865 __ bind(¬_zero_case);
4866 __ cmp(eax, 1);
4867 __ j(greater, ¬_one_case);
4868
4869 if (IsFastPackedElementsKind(kind)) {
4870 // We might need to create a holey array
4871 // look at the first argument
4872 __ mov(ecx, Operand(esp, kPointerSize));
4873 __ test(ecx, ecx);
4874 __ j(zero, &normal_sequence);
4875
4876 InternalArraySingleArgumentConstructorStub
4877 stub1_holey(isolate(), GetHoleyElementsKind(kind));
4878 __ TailCallStub(&stub1_holey);
4879 }
4880
4881 __ bind(&normal_sequence);
4882 InternalArraySingleArgumentConstructorStub stub1(isolate(), kind);
4883 __ TailCallStub(&stub1);
4884
4885 __ bind(¬_one_case);
4886 InternalArrayNArgumentsConstructorStub stubN(isolate(), kind);
4887 __ TailCallStub(&stubN);
4888 }
4889
4890
Generate(MacroAssembler * masm)4891 void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
4892 // ----------- S t a t e -------------
4893 // -- eax : argc
4894 // -- edi : constructor
4895 // -- esp[0] : return address
4896 // -- esp[4] : last argument
4897 // -----------------------------------
4898
4899 if (FLAG_debug_code) {
4900 // The array construct code is only set for the global and natives
4901 // builtin Array functions which always have maps.
4902
4903 // Initial map for the builtin Array function should be a map.
4904 __ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
4905 // Will both indicate a NULL and a Smi.
4906 __ test(ecx, Immediate(kSmiTagMask));
4907 __ Assert(not_zero, kUnexpectedInitialMapForArrayFunction);
4908 __ CmpObjectType(ecx, MAP_TYPE, ecx);
4909 __ Assert(equal, kUnexpectedInitialMapForArrayFunction);
4910 }
4911
4912 // Figure out the right elements kind
4913 __ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
4914
4915 // Load the map's "bit field 2" into |result|. We only need the first byte,
4916 // but the following masking takes care of that anyway.
4917 __ mov(ecx, FieldOperand(ecx, Map::kBitField2Offset));
4918 // Retrieve elements_kind from bit field 2.
4919 __ DecodeField<Map::ElementsKindBits>(ecx);
4920
4921 if (FLAG_debug_code) {
4922 Label done;
4923 __ cmp(ecx, Immediate(FAST_ELEMENTS));
4924 __ j(equal, &done);
4925 __ cmp(ecx, Immediate(FAST_HOLEY_ELEMENTS));
4926 __ Assert(equal,
4927 kInvalidElementsKindForInternalArrayOrInternalPackedArray);
4928 __ bind(&done);
4929 }
4930
4931 Label fast_elements_case;
4932 __ cmp(ecx, Immediate(FAST_ELEMENTS));
4933 __ j(equal, &fast_elements_case);
4934 GenerateCase(masm, FAST_HOLEY_ELEMENTS);
4935
4936 __ bind(&fast_elements_case);
4937 GenerateCase(masm, FAST_ELEMENTS);
4938 }
4939
4940
Generate(MacroAssembler * masm)4941 void CallApiFunctionStub::Generate(MacroAssembler* masm) {
4942 // ----------- S t a t e -------------
4943 // -- eax : callee
4944 // -- ebx : call_data
4945 // -- ecx : holder
4946 // -- edx : api_function_address
4947 // -- esi : context
4948 // --
4949 // -- esp[0] : return address
4950 // -- esp[4] : last argument
4951 // -- ...
4952 // -- esp[argc * 4] : first argument
4953 // -- esp[(argc + 1) * 4] : receiver
4954 // -----------------------------------
4955
4956 Register callee = eax;
4957 Register call_data = ebx;
4958 Register holder = ecx;
4959 Register api_function_address = edx;
4960 Register return_address = edi;
4961 Register context = esi;
4962
4963 int argc = ArgumentBits::decode(bit_field_);
4964 bool is_store = IsStoreBits::decode(bit_field_);
4965 bool call_data_undefined = CallDataUndefinedBits::decode(bit_field_);
4966
4967 typedef FunctionCallbackArguments FCA;
4968
4969 STATIC_ASSERT(FCA::kContextSaveIndex == 6);
4970 STATIC_ASSERT(FCA::kCalleeIndex == 5);
4971 STATIC_ASSERT(FCA::kDataIndex == 4);
4972 STATIC_ASSERT(FCA::kReturnValueOffset == 3);
4973 STATIC_ASSERT(FCA::kReturnValueDefaultValueIndex == 2);
4974 STATIC_ASSERT(FCA::kIsolateIndex == 1);
4975 STATIC_ASSERT(FCA::kHolderIndex == 0);
4976 STATIC_ASSERT(FCA::kArgsLength == 7);
4977
4978 __ pop(return_address);
4979
4980 // context save
4981 __ push(context);
4982 // load context from callee
4983 __ mov(context, FieldOperand(callee, JSFunction::kContextOffset));
4984
4985 // callee
4986 __ push(callee);
4987
4988 // call data
4989 __ push(call_data);
4990
4991 Register scratch = call_data;
4992 if (!call_data_undefined) {
4993 // return value
4994 __ push(Immediate(isolate()->factory()->undefined_value()));
4995 // return value default
4996 __ push(Immediate(isolate()->factory()->undefined_value()));
4997 } else {
4998 // return value
4999 __ push(scratch);
5000 // return value default
5001 __ push(scratch);
5002 }
5003 // isolate
5004 __ push(Immediate(reinterpret_cast<int>(isolate())));
5005 // holder
5006 __ push(holder);
5007
5008 __ mov(scratch, esp);
5009
5010 // return address
5011 __ push(return_address);
5012
5013 // API function gets reference to the v8::Arguments. If CPU profiler
5014 // is enabled wrapper function will be called and we need to pass
5015 // address of the callback as additional parameter, always allocate
5016 // space for it.
5017 const int kApiArgc = 1 + 1;
5018
5019 // Allocate the v8::Arguments structure in the arguments' space since
5020 // it's not controlled by GC.
5021 const int kApiStackSpace = 4;
5022
5023 __ PrepareCallApiFunction(kApiArgc + kApiStackSpace);
5024
5025 // FunctionCallbackInfo::implicit_args_.
5026 __ mov(ApiParameterOperand(2), scratch);
5027 __ add(scratch, Immediate((argc + FCA::kArgsLength - 1) * kPointerSize));
5028 // FunctionCallbackInfo::values_.
5029 __ mov(ApiParameterOperand(3), scratch);
5030 // FunctionCallbackInfo::length_.
5031 __ Move(ApiParameterOperand(4), Immediate(argc));
5032 // FunctionCallbackInfo::is_construct_call_.
5033 __ Move(ApiParameterOperand(5), Immediate(0));
5034
5035 // v8::InvocationCallback's argument.
5036 __ lea(scratch, ApiParameterOperand(2));
5037 __ mov(ApiParameterOperand(0), scratch);
5038
5039 ExternalReference thunk_ref =
5040 ExternalReference::invoke_function_callback(isolate());
5041
5042 Operand context_restore_operand(ebp,
5043 (2 + FCA::kContextSaveIndex) * kPointerSize);
5044 // Stores return the first js argument
5045 int return_value_offset = 0;
5046 if (is_store) {
5047 return_value_offset = 2 + FCA::kArgsLength;
5048 } else {
5049 return_value_offset = 2 + FCA::kReturnValueOffset;
5050 }
5051 Operand return_value_operand(ebp, return_value_offset * kPointerSize);
5052 __ CallApiFunctionAndReturn(api_function_address,
5053 thunk_ref,
5054 ApiParameterOperand(1),
5055 argc + FCA::kArgsLength + 1,
5056 return_value_operand,
5057 &context_restore_operand);
5058 }
5059
5060
Generate(MacroAssembler * masm)5061 void CallApiGetterStub::Generate(MacroAssembler* masm) {
5062 // ----------- S t a t e -------------
5063 // -- esp[0] : return address
5064 // -- esp[4] : name
5065 // -- esp[8 - kArgsLength*4] : PropertyCallbackArguments object
5066 // -- ...
5067 // -- edx : api_function_address
5068 // -----------------------------------
5069
5070 // array for v8::Arguments::values_, handler for name and pointer
5071 // to the values (it considered as smi in GC).
5072 const int kStackSpace = PropertyCallbackArguments::kArgsLength + 2;
5073 // Allocate space for opional callback address parameter in case
5074 // CPU profiler is active.
5075 const int kApiArgc = 2 + 1;
5076
5077 Register api_function_address = edx;
5078 Register scratch = ebx;
5079
5080 // load address of name
5081 __ lea(scratch, Operand(esp, 1 * kPointerSize));
5082
5083 __ PrepareCallApiFunction(kApiArgc);
5084 __ mov(ApiParameterOperand(0), scratch); // name.
5085 __ add(scratch, Immediate(kPointerSize));
5086 __ mov(ApiParameterOperand(1), scratch); // arguments pointer.
5087
5088 ExternalReference thunk_ref =
5089 ExternalReference::invoke_accessor_getter_callback(isolate());
5090
5091 __ CallApiFunctionAndReturn(api_function_address,
5092 thunk_ref,
5093 ApiParameterOperand(2),
5094 kStackSpace,
5095 Operand(ebp, 7 * kPointerSize),
5096 NULL);
5097 }
5098
5099
5100 #undef __
5101
5102 } } // namespace v8::internal
5103
5104 #endif // V8_TARGET_ARCH_IA32
5105