1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28 #include "v8.h"
29
30 #if defined(V8_TARGET_ARCH_IA32)
31
32 #include "ic-inl.h"
33 #include "codegen.h"
34 #include "stub-cache.h"
35
36 namespace v8 {
37 namespace internal {
38
39 #define __ ACCESS_MASM(masm)
40
41
ProbeTable(Isolate * isolate,MacroAssembler * masm,Code::Flags flags,StubCache::Table table,Register name,Register offset,Register extra)42 static void ProbeTable(Isolate* isolate,
43 MacroAssembler* masm,
44 Code::Flags flags,
45 StubCache::Table table,
46 Register name,
47 Register offset,
48 Register extra) {
49 ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
50 ExternalReference value_offset(isolate->stub_cache()->value_reference(table));
51
52 Label miss;
53
54 if (extra.is_valid()) {
55 // Get the code entry from the cache.
56 __ mov(extra, Operand::StaticArray(offset, times_2, value_offset));
57
58 // Check that the key in the entry matches the name.
59 __ cmp(name, Operand::StaticArray(offset, times_2, key_offset));
60 __ j(not_equal, &miss, not_taken);
61
62 // Check that the flags match what we're looking for.
63 __ mov(offset, FieldOperand(extra, Code::kFlagsOffset));
64 __ and_(offset, ~Code::kFlagsNotUsedInLookup);
65 __ cmp(offset, flags);
66 __ j(not_equal, &miss);
67
68 // Jump to the first instruction in the code stub.
69 __ add(Operand(extra), Immediate(Code::kHeaderSize - kHeapObjectTag));
70 __ jmp(Operand(extra));
71
72 __ bind(&miss);
73 } else {
74 // Save the offset on the stack.
75 __ push(offset);
76
77 // Check that the key in the entry matches the name.
78 __ cmp(name, Operand::StaticArray(offset, times_2, key_offset));
79 __ j(not_equal, &miss, not_taken);
80
81 // Get the code entry from the cache.
82 __ mov(offset, Operand::StaticArray(offset, times_2, value_offset));
83
84 // Check that the flags match what we're looking for.
85 __ mov(offset, FieldOperand(offset, Code::kFlagsOffset));
86 __ and_(offset, ~Code::kFlagsNotUsedInLookup);
87 __ cmp(offset, flags);
88 __ j(not_equal, &miss);
89
90 // Restore offset and re-load code entry from cache.
91 __ pop(offset);
92 __ mov(offset, Operand::StaticArray(offset, times_2, value_offset));
93
94 // Jump to the first instruction in the code stub.
95 __ add(Operand(offset), Immediate(Code::kHeaderSize - kHeapObjectTag));
96 __ jmp(Operand(offset));
97
98 // Pop at miss.
99 __ bind(&miss);
100 __ pop(offset);
101 }
102 }
103
104
105 // Helper function used to check that the dictionary doesn't contain
106 // the property. This function may return false negatives, so miss_label
107 // must always call a backup property check that is complete.
108 // This function is safe to call if the receiver has fast properties.
109 // Name must be a symbol and receiver must be a heap object.
GenerateDictionaryNegativeLookup(MacroAssembler * masm,Label * miss_label,Register receiver,String * name,Register r0,Register r1)110 static void GenerateDictionaryNegativeLookup(MacroAssembler* masm,
111 Label* miss_label,
112 Register receiver,
113 String* name,
114 Register r0,
115 Register r1) {
116 ASSERT(name->IsSymbol());
117 Counters* counters = masm->isolate()->counters();
118 __ IncrementCounter(counters->negative_lookups(), 1);
119 __ IncrementCounter(counters->negative_lookups_miss(), 1);
120
121 Label done;
122 __ mov(r0, FieldOperand(receiver, HeapObject::kMapOffset));
123
124 const int kInterceptorOrAccessCheckNeededMask =
125 (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded);
126
127 // Bail out if the receiver has a named interceptor or requires access checks.
128 __ test_b(FieldOperand(r0, Map::kBitFieldOffset),
129 kInterceptorOrAccessCheckNeededMask);
130 __ j(not_zero, miss_label, not_taken);
131
132 // Check that receiver is a JSObject.
133 __ CmpInstanceType(r0, FIRST_JS_OBJECT_TYPE);
134 __ j(below, miss_label, not_taken);
135
136 // Load properties array.
137 Register properties = r0;
138 __ mov(properties, FieldOperand(receiver, JSObject::kPropertiesOffset));
139
140 // Check that the properties array is a dictionary.
141 __ cmp(FieldOperand(properties, HeapObject::kMapOffset),
142 Immediate(masm->isolate()->factory()->hash_table_map()));
143 __ j(not_equal, miss_label);
144
145 // Compute the capacity mask.
146 const int kCapacityOffset =
147 StringDictionary::kHeaderSize +
148 StringDictionary::kCapacityIndex * kPointerSize;
149
150 // Generate an unrolled loop that performs a few probes before
151 // giving up.
152 static const int kProbes = 4;
153 const int kElementsStartOffset =
154 StringDictionary::kHeaderSize +
155 StringDictionary::kElementsStartIndex * kPointerSize;
156
157 // If names of slots in range from 1 to kProbes - 1 for the hash value are
158 // not equal to the name and kProbes-th slot is not used (its name is the
159 // undefined value), it guarantees the hash table doesn't contain the
160 // property. It's true even if some slots represent deleted properties
161 // (their names are the null value).
162 for (int i = 0; i < kProbes; i++) {
163 // r0 points to properties hash.
164 // Compute the masked index: (hash + i + i * i) & mask.
165 Register index = r1;
166 // Capacity is smi 2^n.
167 __ mov(index, FieldOperand(properties, kCapacityOffset));
168 __ dec(index);
169 __ and_(Operand(index),
170 Immediate(Smi::FromInt(name->Hash() +
171 StringDictionary::GetProbeOffset(i))));
172
173 // Scale the index by multiplying by the entry size.
174 ASSERT(StringDictionary::kEntrySize == 3);
175 __ lea(index, Operand(index, index, times_2, 0)); // index *= 3.
176
177 Register entity_name = r1;
178 // Having undefined at this place means the name is not contained.
179 ASSERT_EQ(kSmiTagSize, 1);
180 __ mov(entity_name, Operand(properties, index, times_half_pointer_size,
181 kElementsStartOffset - kHeapObjectTag));
182 __ cmp(entity_name, masm->isolate()->factory()->undefined_value());
183 if (i != kProbes - 1) {
184 __ j(equal, &done, taken);
185
186 // Stop if found the property.
187 __ cmp(entity_name, Handle<String>(name));
188 __ j(equal, miss_label, not_taken);
189
190 // Check if the entry name is not a symbol.
191 __ mov(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset));
192 __ test_b(FieldOperand(entity_name, Map::kInstanceTypeOffset),
193 kIsSymbolMask);
194 __ j(zero, miss_label, not_taken);
195 } else {
196 // Give up probing if still not found the undefined value.
197 __ j(not_equal, miss_label, not_taken);
198 }
199 }
200
201 __ bind(&done);
202 __ DecrementCounter(counters->negative_lookups_miss(), 1);
203 }
204
205
GenerateProbe(MacroAssembler * masm,Code::Flags flags,Register receiver,Register name,Register scratch,Register extra,Register extra2)206 void StubCache::GenerateProbe(MacroAssembler* masm,
207 Code::Flags flags,
208 Register receiver,
209 Register name,
210 Register scratch,
211 Register extra,
212 Register extra2) {
213 Isolate* isolate = Isolate::Current();
214 Label miss;
215 USE(extra2); // The register extra2 is not used on the ia32 platform.
216
217 // Make sure that code is valid. The shifting code relies on the
218 // entry size being 8.
219 ASSERT(sizeof(Entry) == 8);
220
221 // Make sure the flags does not name a specific type.
222 ASSERT(Code::ExtractTypeFromFlags(flags) == 0);
223
224 // Make sure that there are no register conflicts.
225 ASSERT(!scratch.is(receiver));
226 ASSERT(!scratch.is(name));
227 ASSERT(!extra.is(receiver));
228 ASSERT(!extra.is(name));
229 ASSERT(!extra.is(scratch));
230
231 // Check scratch and extra registers are valid, and extra2 is unused.
232 ASSERT(!scratch.is(no_reg));
233 ASSERT(extra2.is(no_reg));
234
235 // Check that the receiver isn't a smi.
236 __ test(receiver, Immediate(kSmiTagMask));
237 __ j(zero, &miss, not_taken);
238
239 // Get the map of the receiver and compute the hash.
240 __ mov(scratch, FieldOperand(name, String::kHashFieldOffset));
241 __ add(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
242 __ xor_(scratch, flags);
243 __ and_(scratch, (kPrimaryTableSize - 1) << kHeapObjectTagSize);
244
245 // Probe the primary table.
246 ProbeTable(isolate, masm, flags, kPrimary, name, scratch, extra);
247
248 // Primary miss: Compute hash for secondary probe.
249 __ mov(scratch, FieldOperand(name, String::kHashFieldOffset));
250 __ add(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
251 __ xor_(scratch, flags);
252 __ and_(scratch, (kPrimaryTableSize - 1) << kHeapObjectTagSize);
253 __ sub(scratch, Operand(name));
254 __ add(Operand(scratch), Immediate(flags));
255 __ and_(scratch, (kSecondaryTableSize - 1) << kHeapObjectTagSize);
256
257 // Probe the secondary table.
258 ProbeTable(isolate, masm, flags, kSecondary, name, scratch, extra);
259
260 // Cache miss: Fall-through and let caller handle the miss by
261 // entering the runtime system.
262 __ bind(&miss);
263 }
264
265
GenerateLoadGlobalFunctionPrototype(MacroAssembler * masm,int index,Register prototype)266 void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
267 int index,
268 Register prototype) {
269 __ LoadGlobalFunction(index, prototype);
270 __ LoadGlobalFunctionInitialMap(prototype, prototype);
271 // Load the prototype from the initial map.
272 __ mov(prototype, FieldOperand(prototype, Map::kPrototypeOffset));
273 }
274
275
GenerateDirectLoadGlobalFunctionPrototype(MacroAssembler * masm,int index,Register prototype,Label * miss)276 void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
277 MacroAssembler* masm, int index, Register prototype, Label* miss) {
278 // Check we're still in the same context.
279 __ cmp(Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)),
280 masm->isolate()->global());
281 __ j(not_equal, miss);
282 // Get the global function with the given index.
283 JSFunction* function =
284 JSFunction::cast(masm->isolate()->global_context()->get(index));
285 // Load its initial map. The global functions all have initial maps.
286 __ Set(prototype, Immediate(Handle<Map>(function->initial_map())));
287 // Load the prototype from the initial map.
288 __ mov(prototype, FieldOperand(prototype, Map::kPrototypeOffset));
289 }
290
291
GenerateLoadArrayLength(MacroAssembler * masm,Register receiver,Register scratch,Label * miss_label)292 void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm,
293 Register receiver,
294 Register scratch,
295 Label* miss_label) {
296 // Check that the receiver isn't a smi.
297 __ test(receiver, Immediate(kSmiTagMask));
298 __ j(zero, miss_label, not_taken);
299
300 // Check that the object is a JS array.
301 __ CmpObjectType(receiver, JS_ARRAY_TYPE, scratch);
302 __ j(not_equal, miss_label, not_taken);
303
304 // Load length directly from the JS array.
305 __ mov(eax, FieldOperand(receiver, JSArray::kLengthOffset));
306 __ ret(0);
307 }
308
309
310 // Generate code to check if an object is a string. If the object is
311 // a string, the map's instance type is left in the scratch register.
GenerateStringCheck(MacroAssembler * masm,Register receiver,Register scratch,Label * smi,Label * non_string_object)312 static void GenerateStringCheck(MacroAssembler* masm,
313 Register receiver,
314 Register scratch,
315 Label* smi,
316 Label* non_string_object) {
317 // Check that the object isn't a smi.
318 __ test(receiver, Immediate(kSmiTagMask));
319 __ j(zero, smi, not_taken);
320
321 // Check that the object is a string.
322 __ mov(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
323 __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
324 ASSERT(kNotStringTag != 0);
325 __ test(scratch, Immediate(kNotStringTag));
326 __ j(not_zero, non_string_object, not_taken);
327 }
328
329
GenerateLoadStringLength(MacroAssembler * masm,Register receiver,Register scratch1,Register scratch2,Label * miss,bool support_wrappers)330 void StubCompiler::GenerateLoadStringLength(MacroAssembler* masm,
331 Register receiver,
332 Register scratch1,
333 Register scratch2,
334 Label* miss,
335 bool support_wrappers) {
336 Label check_wrapper;
337
338 // Check if the object is a string leaving the instance type in the
339 // scratch register.
340 GenerateStringCheck(masm, receiver, scratch1, miss,
341 support_wrappers ? &check_wrapper : miss);
342
343 // Load length from the string and convert to a smi.
344 __ mov(eax, FieldOperand(receiver, String::kLengthOffset));
345 __ ret(0);
346
347 if (support_wrappers) {
348 // Check if the object is a JSValue wrapper.
349 __ bind(&check_wrapper);
350 __ cmp(scratch1, JS_VALUE_TYPE);
351 __ j(not_equal, miss, not_taken);
352
353 // Check if the wrapped value is a string and load the length
354 // directly if it is.
355 __ mov(scratch2, FieldOperand(receiver, JSValue::kValueOffset));
356 GenerateStringCheck(masm, scratch2, scratch1, miss, miss);
357 __ mov(eax, FieldOperand(scratch2, String::kLengthOffset));
358 __ ret(0);
359 }
360 }
361
362
GenerateLoadFunctionPrototype(MacroAssembler * masm,Register receiver,Register scratch1,Register scratch2,Label * miss_label)363 void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm,
364 Register receiver,
365 Register scratch1,
366 Register scratch2,
367 Label* miss_label) {
368 __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label);
369 __ mov(eax, Operand(scratch1));
370 __ ret(0);
371 }
372
373
374 // Load a fast property out of a holder object (src). In-object properties
375 // are loaded directly otherwise the property is loaded from the properties
376 // fixed array.
GenerateFastPropertyLoad(MacroAssembler * masm,Register dst,Register src,JSObject * holder,int index)377 void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
378 Register dst, Register src,
379 JSObject* holder, int index) {
380 // Adjust for the number of properties stored in the holder.
381 index -= holder->map()->inobject_properties();
382 if (index < 0) {
383 // Get the property straight out of the holder.
384 int offset = holder->map()->instance_size() + (index * kPointerSize);
385 __ mov(dst, FieldOperand(src, offset));
386 } else {
387 // Calculate the offset into the properties array.
388 int offset = index * kPointerSize + FixedArray::kHeaderSize;
389 __ mov(dst, FieldOperand(src, JSObject::kPropertiesOffset));
390 __ mov(dst, FieldOperand(dst, offset));
391 }
392 }
393
394
PushInterceptorArguments(MacroAssembler * masm,Register receiver,Register holder,Register name,JSObject * holder_obj)395 static void PushInterceptorArguments(MacroAssembler* masm,
396 Register receiver,
397 Register holder,
398 Register name,
399 JSObject* holder_obj) {
400 __ push(name);
401 InterceptorInfo* interceptor = holder_obj->GetNamedInterceptor();
402 ASSERT(!masm->isolate()->heap()->InNewSpace(interceptor));
403 Register scratch = name;
404 __ mov(scratch, Immediate(Handle<Object>(interceptor)));
405 __ push(scratch);
406 __ push(receiver);
407 __ push(holder);
408 __ push(FieldOperand(scratch, InterceptorInfo::kDataOffset));
409 }
410
411
CompileCallLoadPropertyWithInterceptor(MacroAssembler * masm,Register receiver,Register holder,Register name,JSObject * holder_obj)412 static void CompileCallLoadPropertyWithInterceptor(MacroAssembler* masm,
413 Register receiver,
414 Register holder,
415 Register name,
416 JSObject* holder_obj) {
417 PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
418 __ CallExternalReference(
419 ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorOnly),
420 masm->isolate()),
421 5);
422 }
423
424
425 // Number of pointers to be reserved on stack for fast API call.
426 static const int kFastApiCallArguments = 3;
427
428
429 // Reserves space for the extra arguments to API function in the
430 // caller's frame.
431 //
432 // These arguments are set by CheckPrototypes and GenerateFastApiCall.
ReserveSpaceForFastApiCall(MacroAssembler * masm,Register scratch)433 static void ReserveSpaceForFastApiCall(MacroAssembler* masm, Register scratch) {
434 // ----------- S t a t e -------------
435 // -- esp[0] : return address
436 // -- esp[4] : last argument in the internal frame of the caller
437 // -----------------------------------
438 __ pop(scratch);
439 for (int i = 0; i < kFastApiCallArguments; i++) {
440 __ push(Immediate(Smi::FromInt(0)));
441 }
442 __ push(scratch);
443 }
444
445
446 // Undoes the effects of ReserveSpaceForFastApiCall.
FreeSpaceForFastApiCall(MacroAssembler * masm,Register scratch)447 static void FreeSpaceForFastApiCall(MacroAssembler* masm, Register scratch) {
448 // ----------- S t a t e -------------
449 // -- esp[0] : return address.
450 // -- esp[4] : last fast api call extra argument.
451 // -- ...
452 // -- esp[kFastApiCallArguments * 4] : first fast api call extra argument.
453 // -- esp[kFastApiCallArguments * 4 + 4] : last argument in the internal
454 // frame.
455 // -----------------------------------
456 __ pop(scratch);
457 __ add(Operand(esp), Immediate(kPointerSize * kFastApiCallArguments));
458 __ push(scratch);
459 }
460
461
462 // Generates call to API function.
GenerateFastApiCall(MacroAssembler * masm,const CallOptimization & optimization,int argc)463 static MaybeObject* GenerateFastApiCall(MacroAssembler* masm,
464 const CallOptimization& optimization,
465 int argc) {
466 // ----------- S t a t e -------------
467 // -- esp[0] : return address
468 // -- esp[4] : object passing the type check
469 // (last fast api call extra argument,
470 // set by CheckPrototypes)
471 // -- esp[8] : api function
472 // (first fast api call extra argument)
473 // -- esp[12] : api call data
474 // -- esp[16] : last argument
475 // -- ...
476 // -- esp[(argc + 3) * 4] : first argument
477 // -- esp[(argc + 4) * 4] : receiver
478 // -----------------------------------
479 // Get the function and setup the context.
480 JSFunction* function = optimization.constant_function();
481 __ mov(edi, Immediate(Handle<JSFunction>(function)));
482 __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
483
484 // Pass the additional arguments.
485 __ mov(Operand(esp, 2 * kPointerSize), edi);
486 Object* call_data = optimization.api_call_info()->data();
487 Handle<CallHandlerInfo> api_call_info_handle(optimization.api_call_info());
488 if (masm->isolate()->heap()->InNewSpace(call_data)) {
489 __ mov(ecx, api_call_info_handle);
490 __ mov(ebx, FieldOperand(ecx, CallHandlerInfo::kDataOffset));
491 __ mov(Operand(esp, 3 * kPointerSize), ebx);
492 } else {
493 __ mov(Operand(esp, 3 * kPointerSize),
494 Immediate(Handle<Object>(call_data)));
495 }
496
497 // Prepare arguments.
498 __ lea(eax, Operand(esp, 3 * kPointerSize));
499
500 Object* callback = optimization.api_call_info()->callback();
501 Address api_function_address = v8::ToCData<Address>(callback);
502 ApiFunction fun(api_function_address);
503
504 const int kApiArgc = 1; // API function gets reference to the v8::Arguments.
505
506 // Allocate the v8::Arguments structure in the arguments' space since
507 // it's not controlled by GC.
508 const int kApiStackSpace = 4;
509
510 __ PrepareCallApiFunction(kApiArgc + kApiStackSpace, ebx);
511
512 __ mov(ApiParameterOperand(1), eax); // v8::Arguments::implicit_args_.
513 __ add(Operand(eax), Immediate(argc * kPointerSize));
514 __ mov(ApiParameterOperand(2), eax); // v8::Arguments::values_.
515 __ Set(ApiParameterOperand(3), Immediate(argc)); // v8::Arguments::length_.
516 // v8::Arguments::is_construct_call_.
517 __ Set(ApiParameterOperand(4), Immediate(0));
518
519 // v8::InvocationCallback's argument.
520 __ lea(eax, ApiParameterOperand(1));
521 __ mov(ApiParameterOperand(0), eax);
522
523 // Emitting a stub call may try to allocate (if the code is not
524 // already generated). Do not allow the assembler to perform a
525 // garbage collection but instead return the allocation failure
526 // object.
527 return masm->TryCallApiFunctionAndReturn(&fun,
528 argc + kFastApiCallArguments + 1);
529 }
530
531
532 class CallInterceptorCompiler BASE_EMBEDDED {
533 public:
CallInterceptorCompiler(StubCompiler * stub_compiler,const ParameterCount & arguments,Register name)534 CallInterceptorCompiler(StubCompiler* stub_compiler,
535 const ParameterCount& arguments,
536 Register name)
537 : stub_compiler_(stub_compiler),
538 arguments_(arguments),
539 name_(name) {}
540
Compile(MacroAssembler * masm,JSObject * object,JSObject * holder,String * name,LookupResult * lookup,Register receiver,Register scratch1,Register scratch2,Register scratch3,Label * miss)541 MaybeObject* Compile(MacroAssembler* masm,
542 JSObject* object,
543 JSObject* holder,
544 String* name,
545 LookupResult* lookup,
546 Register receiver,
547 Register scratch1,
548 Register scratch2,
549 Register scratch3,
550 Label* miss) {
551 ASSERT(holder->HasNamedInterceptor());
552 ASSERT(!holder->GetNamedInterceptor()->getter()->IsUndefined());
553
554 // Check that the receiver isn't a smi.
555 __ test(receiver, Immediate(kSmiTagMask));
556 __ j(zero, miss, not_taken);
557
558 CallOptimization optimization(lookup);
559
560 if (optimization.is_constant_call()) {
561 return CompileCacheable(masm,
562 object,
563 receiver,
564 scratch1,
565 scratch2,
566 scratch3,
567 holder,
568 lookup,
569 name,
570 optimization,
571 miss);
572 } else {
573 CompileRegular(masm,
574 object,
575 receiver,
576 scratch1,
577 scratch2,
578 scratch3,
579 name,
580 holder,
581 miss);
582 return masm->isolate()->heap()->undefined_value(); // Success.
583 }
584 }
585
586 private:
CompileCacheable(MacroAssembler * masm,JSObject * object,Register receiver,Register scratch1,Register scratch2,Register scratch3,JSObject * interceptor_holder,LookupResult * lookup,String * name,const CallOptimization & optimization,Label * miss_label)587 MaybeObject* CompileCacheable(MacroAssembler* masm,
588 JSObject* object,
589 Register receiver,
590 Register scratch1,
591 Register scratch2,
592 Register scratch3,
593 JSObject* interceptor_holder,
594 LookupResult* lookup,
595 String* name,
596 const CallOptimization& optimization,
597 Label* miss_label) {
598 ASSERT(optimization.is_constant_call());
599 ASSERT(!lookup->holder()->IsGlobalObject());
600
601 int depth1 = kInvalidProtoDepth;
602 int depth2 = kInvalidProtoDepth;
603 bool can_do_fast_api_call = false;
604 if (optimization.is_simple_api_call() &&
605 !lookup->holder()->IsGlobalObject()) {
606 depth1 =
607 optimization.GetPrototypeDepthOfExpectedType(object,
608 interceptor_holder);
609 if (depth1 == kInvalidProtoDepth) {
610 depth2 =
611 optimization.GetPrototypeDepthOfExpectedType(interceptor_holder,
612 lookup->holder());
613 }
614 can_do_fast_api_call = (depth1 != kInvalidProtoDepth) ||
615 (depth2 != kInvalidProtoDepth);
616 }
617
618 Counters* counters = masm->isolate()->counters();
619 __ IncrementCounter(counters->call_const_interceptor(), 1);
620
621 if (can_do_fast_api_call) {
622 __ IncrementCounter(counters->call_const_interceptor_fast_api(), 1);
623 ReserveSpaceForFastApiCall(masm, scratch1);
624 }
625
626 // Check that the maps from receiver to interceptor's holder
627 // haven't changed and thus we can invoke interceptor.
628 Label miss_cleanup;
629 Label* miss = can_do_fast_api_call ? &miss_cleanup : miss_label;
630 Register holder =
631 stub_compiler_->CheckPrototypes(object, receiver,
632 interceptor_holder, scratch1,
633 scratch2, scratch3, name, depth1, miss);
634
635 // Invoke an interceptor and if it provides a value,
636 // branch to |regular_invoke|.
637 Label regular_invoke;
638 LoadWithInterceptor(masm, receiver, holder, interceptor_holder,
639 ®ular_invoke);
640
641 // Interceptor returned nothing for this property. Try to use cached
642 // constant function.
643
644 // Check that the maps from interceptor's holder to constant function's
645 // holder haven't changed and thus we can use cached constant function.
646 if (interceptor_holder != lookup->holder()) {
647 stub_compiler_->CheckPrototypes(interceptor_holder, receiver,
648 lookup->holder(), scratch1,
649 scratch2, scratch3, name, depth2, miss);
650 } else {
651 // CheckPrototypes has a side effect of fetching a 'holder'
652 // for API (object which is instanceof for the signature). It's
653 // safe to omit it here, as if present, it should be fetched
654 // by the previous CheckPrototypes.
655 ASSERT(depth2 == kInvalidProtoDepth);
656 }
657
658 // Invoke function.
659 if (can_do_fast_api_call) {
660 MaybeObject* result =
661 GenerateFastApiCall(masm, optimization, arguments_.immediate());
662 if (result->IsFailure()) return result;
663 } else {
664 __ InvokeFunction(optimization.constant_function(), arguments_,
665 JUMP_FUNCTION);
666 }
667
668 // Deferred code for fast API call case---clean preallocated space.
669 if (can_do_fast_api_call) {
670 __ bind(&miss_cleanup);
671 FreeSpaceForFastApiCall(masm, scratch1);
672 __ jmp(miss_label);
673 }
674
675 // Invoke a regular function.
676 __ bind(®ular_invoke);
677 if (can_do_fast_api_call) {
678 FreeSpaceForFastApiCall(masm, scratch1);
679 }
680
681 return masm->isolate()->heap()->undefined_value(); // Success.
682 }
683
CompileRegular(MacroAssembler * masm,JSObject * object,Register receiver,Register scratch1,Register scratch2,Register scratch3,String * name,JSObject * interceptor_holder,Label * miss_label)684 void CompileRegular(MacroAssembler* masm,
685 JSObject* object,
686 Register receiver,
687 Register scratch1,
688 Register scratch2,
689 Register scratch3,
690 String* name,
691 JSObject* interceptor_holder,
692 Label* miss_label) {
693 Register holder =
694 stub_compiler_->CheckPrototypes(object, receiver, interceptor_holder,
695 scratch1, scratch2, scratch3, name,
696 miss_label);
697
698 __ EnterInternalFrame();
699 // Save the name_ register across the call.
700 __ push(name_);
701
702 PushInterceptorArguments(masm,
703 receiver,
704 holder,
705 name_,
706 interceptor_holder);
707
708 __ CallExternalReference(
709 ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForCall),
710 masm->isolate()),
711 5);
712
713 // Restore the name_ register.
714 __ pop(name_);
715 __ LeaveInternalFrame();
716 }
717
LoadWithInterceptor(MacroAssembler * masm,Register receiver,Register holder,JSObject * holder_obj,Label * interceptor_succeeded)718 void LoadWithInterceptor(MacroAssembler* masm,
719 Register receiver,
720 Register holder,
721 JSObject* holder_obj,
722 Label* interceptor_succeeded) {
723 __ EnterInternalFrame();
724 __ push(holder); // Save the holder.
725 __ push(name_); // Save the name.
726
727 CompileCallLoadPropertyWithInterceptor(masm,
728 receiver,
729 holder,
730 name_,
731 holder_obj);
732
733 __ pop(name_); // Restore the name.
734 __ pop(receiver); // Restore the holder.
735 __ LeaveInternalFrame();
736
737 __ cmp(eax, masm->isolate()->factory()->no_interceptor_result_sentinel());
738 __ j(not_equal, interceptor_succeeded);
739 }
740
741 StubCompiler* stub_compiler_;
742 const ParameterCount& arguments_;
743 Register name_;
744 };
745
746
GenerateLoadMiss(MacroAssembler * masm,Code::Kind kind)747 void StubCompiler::GenerateLoadMiss(MacroAssembler* masm, Code::Kind kind) {
748 ASSERT(kind == Code::LOAD_IC || kind == Code::KEYED_LOAD_IC);
749 Code* code = NULL;
750 if (kind == Code::LOAD_IC) {
751 code = masm->isolate()->builtins()->builtin(Builtins::kLoadIC_Miss);
752 } else {
753 code = masm->isolate()->builtins()->builtin(Builtins::kKeyedLoadIC_Miss);
754 }
755
756 Handle<Code> ic(code);
757 __ jmp(ic, RelocInfo::CODE_TARGET);
758 }
759
760
761 // Both name_reg and receiver_reg are preserved on jumps to miss_label,
762 // but may be destroyed if store is successful.
GenerateStoreField(MacroAssembler * masm,JSObject * object,int index,Map * transition,Register receiver_reg,Register name_reg,Register scratch,Label * miss_label)763 void StubCompiler::GenerateStoreField(MacroAssembler* masm,
764 JSObject* object,
765 int index,
766 Map* transition,
767 Register receiver_reg,
768 Register name_reg,
769 Register scratch,
770 Label* miss_label) {
771 // Check that the object isn't a smi.
772 __ test(receiver_reg, Immediate(kSmiTagMask));
773 __ j(zero, miss_label, not_taken);
774
775 // Check that the map of the object hasn't changed.
776 __ cmp(FieldOperand(receiver_reg, HeapObject::kMapOffset),
777 Immediate(Handle<Map>(object->map())));
778 __ j(not_equal, miss_label, not_taken);
779
780 // Perform global security token check if needed.
781 if (object->IsJSGlobalProxy()) {
782 __ CheckAccessGlobalProxy(receiver_reg, scratch, miss_label);
783 }
784
785 // Stub never generated for non-global objects that require access
786 // checks.
787 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
788
789 // Perform map transition for the receiver if necessary.
790 if ((transition != NULL) && (object->map()->unused_property_fields() == 0)) {
791 // The properties must be extended before we can store the value.
792 // We jump to a runtime call that extends the properties array.
793 __ pop(scratch); // Return address.
794 __ push(receiver_reg);
795 __ push(Immediate(Handle<Map>(transition)));
796 __ push(eax);
797 __ push(scratch);
798 __ TailCallExternalReference(
799 ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage),
800 masm->isolate()),
801 3,
802 1);
803 return;
804 }
805
806 if (transition != NULL) {
807 // Update the map of the object; no write barrier updating is
808 // needed because the map is never in new space.
809 __ mov(FieldOperand(receiver_reg, HeapObject::kMapOffset),
810 Immediate(Handle<Map>(transition)));
811 }
812
813 // Adjust for the number of properties stored in the object. Even in the
814 // face of a transition we can use the old map here because the size of the
815 // object and the number of in-object properties is not going to change.
816 index -= object->map()->inobject_properties();
817
818 if (index < 0) {
819 // Set the property straight into the object.
820 int offset = object->map()->instance_size() + (index * kPointerSize);
821 __ mov(FieldOperand(receiver_reg, offset), eax);
822
823 // Update the write barrier for the array address.
824 // Pass the value being stored in the now unused name_reg.
825 __ mov(name_reg, Operand(eax));
826 __ RecordWrite(receiver_reg, offset, name_reg, scratch);
827 } else {
828 // Write to the properties array.
829 int offset = index * kPointerSize + FixedArray::kHeaderSize;
830 // Get the properties array (optimistically).
831 __ mov(scratch, FieldOperand(receiver_reg, JSObject::kPropertiesOffset));
832 __ mov(FieldOperand(scratch, offset), eax);
833
834 // Update the write barrier for the array address.
835 // Pass the value being stored in the now unused name_reg.
836 __ mov(name_reg, Operand(eax));
837 __ RecordWrite(scratch, offset, name_reg, receiver_reg);
838 }
839
840 // Return the value (register eax).
841 __ ret(0);
842 }
843
844
845 // Generate code to check that a global property cell is empty. Create
846 // the property cell at compilation time if no cell exists for the
847 // property.
GenerateCheckPropertyCell(MacroAssembler * masm,GlobalObject * global,String * name,Register scratch,Label * miss)848 MUST_USE_RESULT static MaybeObject* GenerateCheckPropertyCell(
849 MacroAssembler* masm,
850 GlobalObject* global,
851 String* name,
852 Register scratch,
853 Label* miss) {
854 Object* probe;
855 { MaybeObject* maybe_probe = global->EnsurePropertyCell(name);
856 if (!maybe_probe->ToObject(&probe)) return maybe_probe;
857 }
858 JSGlobalPropertyCell* cell = JSGlobalPropertyCell::cast(probe);
859 ASSERT(cell->value()->IsTheHole());
860 if (Serializer::enabled()) {
861 __ mov(scratch, Immediate(Handle<Object>(cell)));
862 __ cmp(FieldOperand(scratch, JSGlobalPropertyCell::kValueOffset),
863 Immediate(masm->isolate()->factory()->the_hole_value()));
864 } else {
865 __ cmp(Operand::Cell(Handle<JSGlobalPropertyCell>(cell)),
866 Immediate(masm->isolate()->factory()->the_hole_value()));
867 }
868 __ j(not_equal, miss, not_taken);
869 return cell;
870 }
871
872
873 // Calls GenerateCheckPropertyCell for each global object in the prototype chain
874 // from object to (but not including) holder.
GenerateCheckPropertyCells(MacroAssembler * masm,JSObject * object,JSObject * holder,String * name,Register scratch,Label * miss)875 MUST_USE_RESULT static MaybeObject* GenerateCheckPropertyCells(
876 MacroAssembler* masm,
877 JSObject* object,
878 JSObject* holder,
879 String* name,
880 Register scratch,
881 Label* miss) {
882 JSObject* current = object;
883 while (current != holder) {
884 if (current->IsGlobalObject()) {
885 // Returns a cell or a failure.
886 MaybeObject* result = GenerateCheckPropertyCell(
887 masm,
888 GlobalObject::cast(current),
889 name,
890 scratch,
891 miss);
892 if (result->IsFailure()) return result;
893 }
894 ASSERT(current->IsJSObject());
895 current = JSObject::cast(current->GetPrototype());
896 }
897 return NULL;
898 }
899
900
901 #undef __
902 #define __ ACCESS_MASM(masm())
903
904
CheckPrototypes(JSObject * object,Register object_reg,JSObject * holder,Register holder_reg,Register scratch1,Register scratch2,String * name,int save_at_depth,Label * miss)905 Register StubCompiler::CheckPrototypes(JSObject* object,
906 Register object_reg,
907 JSObject* holder,
908 Register holder_reg,
909 Register scratch1,
910 Register scratch2,
911 String* name,
912 int save_at_depth,
913 Label* miss) {
914 // Make sure there's no overlap between holder and object registers.
915 ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
916 ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg)
917 && !scratch2.is(scratch1));
918
919 // Keep track of the current object in register reg.
920 Register reg = object_reg;
921 JSObject* current = object;
922 int depth = 0;
923
924 if (save_at_depth == depth) {
925 __ mov(Operand(esp, kPointerSize), reg);
926 }
927
928 // Traverse the prototype chain and check the maps in the prototype chain for
929 // fast and global objects or do negative lookup for normal objects.
930 while (current != holder) {
931 depth++;
932
933 // Only global objects and objects that do not require access
934 // checks are allowed in stubs.
935 ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded());
936
937 ASSERT(current->GetPrototype()->IsJSObject());
938 JSObject* prototype = JSObject::cast(current->GetPrototype());
939 if (!current->HasFastProperties() &&
940 !current->IsJSGlobalObject() &&
941 !current->IsJSGlobalProxy()) {
942 if (!name->IsSymbol()) {
943 MaybeObject* maybe_lookup_result = heap()->LookupSymbol(name);
944 Object* lookup_result = NULL; // Initialization to please compiler.
945 if (!maybe_lookup_result->ToObject(&lookup_result)) {
946 set_failure(Failure::cast(maybe_lookup_result));
947 return reg;
948 }
949 name = String::cast(lookup_result);
950 }
951 ASSERT(current->property_dictionary()->FindEntry(name) ==
952 StringDictionary::kNotFound);
953
954 GenerateDictionaryNegativeLookup(masm(),
955 miss,
956 reg,
957 name,
958 scratch1,
959 scratch2);
960 __ mov(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
961 reg = holder_reg; // from now the object is in holder_reg
962 __ mov(reg, FieldOperand(scratch1, Map::kPrototypeOffset));
963 } else if (heap()->InNewSpace(prototype)) {
964 // Get the map of the current object.
965 __ mov(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
966 __ cmp(Operand(scratch1), Immediate(Handle<Map>(current->map())));
967 // Branch on the result of the map check.
968 __ j(not_equal, miss, not_taken);
969 // Check access rights to the global object. This has to happen
970 // after the map check so that we know that the object is
971 // actually a global object.
972 if (current->IsJSGlobalProxy()) {
973 __ CheckAccessGlobalProxy(reg, scratch1, miss);
974
975 // Restore scratch register to be the map of the object.
976 // We load the prototype from the map in the scratch register.
977 __ mov(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
978 }
979 // The prototype is in new space; we cannot store a reference
980 // to it in the code. Load it from the map.
981 reg = holder_reg; // from now the object is in holder_reg
982 __ mov(reg, FieldOperand(scratch1, Map::kPrototypeOffset));
983 } else {
984 // Check the map of the current object.
985 __ cmp(FieldOperand(reg, HeapObject::kMapOffset),
986 Immediate(Handle<Map>(current->map())));
987 // Branch on the result of the map check.
988 __ j(not_equal, miss, not_taken);
989 // Check access rights to the global object. This has to happen
990 // after the map check so that we know that the object is
991 // actually a global object.
992 if (current->IsJSGlobalProxy()) {
993 __ CheckAccessGlobalProxy(reg, scratch1, miss);
994 }
995 // The prototype is in old space; load it directly.
996 reg = holder_reg; // from now the object is in holder_reg
997 __ mov(reg, Handle<JSObject>(prototype));
998 }
999
1000 if (save_at_depth == depth) {
1001 __ mov(Operand(esp, kPointerSize), reg);
1002 }
1003
1004 // Go to the next object in the prototype chain.
1005 current = prototype;
1006 }
1007 ASSERT(current == holder);
1008
1009 // Log the check depth.
1010 LOG(isolate(), IntEvent("check-maps-depth", depth + 1));
1011
1012 // Check the holder map.
1013 __ cmp(FieldOperand(reg, HeapObject::kMapOffset),
1014 Immediate(Handle<Map>(holder->map())));
1015 __ j(not_equal, miss, not_taken);
1016
1017 // Perform security check for access to the global object.
1018 ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded());
1019 if (holder->IsJSGlobalProxy()) {
1020 __ CheckAccessGlobalProxy(reg, scratch1, miss);
1021 };
1022
1023 // If we've skipped any global objects, it's not enough to verify
1024 // that their maps haven't changed. We also need to check that the
1025 // property cell for the property is still empty.
1026 MaybeObject* result = GenerateCheckPropertyCells(masm(),
1027 object,
1028 holder,
1029 name,
1030 scratch1,
1031 miss);
1032 if (result->IsFailure()) set_failure(Failure::cast(result));
1033
1034 // Return the register containing the holder.
1035 return reg;
1036 }
1037
1038
GenerateLoadField(JSObject * object,JSObject * holder,Register receiver,Register scratch1,Register scratch2,Register scratch3,int index,String * name,Label * miss)1039 void StubCompiler::GenerateLoadField(JSObject* object,
1040 JSObject* holder,
1041 Register receiver,
1042 Register scratch1,
1043 Register scratch2,
1044 Register scratch3,
1045 int index,
1046 String* name,
1047 Label* miss) {
1048 // Check that the receiver isn't a smi.
1049 __ test(receiver, Immediate(kSmiTagMask));
1050 __ j(zero, miss, not_taken);
1051
1052 // Check the prototype chain.
1053 Register reg =
1054 CheckPrototypes(object, receiver, holder,
1055 scratch1, scratch2, scratch3, name, miss);
1056
1057 // Get the value from the properties.
1058 GenerateFastPropertyLoad(masm(), eax, reg, holder, index);
1059 __ ret(0);
1060 }
1061
1062
GenerateLoadCallback(JSObject * object,JSObject * holder,Register receiver,Register name_reg,Register scratch1,Register scratch2,Register scratch3,AccessorInfo * callback,String * name,Label * miss)1063 MaybeObject* StubCompiler::GenerateLoadCallback(JSObject* object,
1064 JSObject* holder,
1065 Register receiver,
1066 Register name_reg,
1067 Register scratch1,
1068 Register scratch2,
1069 Register scratch3,
1070 AccessorInfo* callback,
1071 String* name,
1072 Label* miss) {
1073 // Check that the receiver isn't a smi.
1074 __ test(receiver, Immediate(kSmiTagMask));
1075 __ j(zero, miss, not_taken);
1076
1077 // Check that the maps haven't changed.
1078 Register reg =
1079 CheckPrototypes(object, receiver, holder, scratch1,
1080 scratch2, scratch3, name, miss);
1081
1082 Handle<AccessorInfo> callback_handle(callback);
1083
1084 // Insert additional parameters into the stack frame above return address.
1085 ASSERT(!scratch3.is(reg));
1086 __ pop(scratch3); // Get return address to place it below.
1087
1088 __ push(receiver); // receiver
1089 __ mov(scratch2, Operand(esp));
1090 ASSERT(!scratch2.is(reg));
1091 __ push(reg); // holder
1092 // Push data from AccessorInfo.
1093 if (isolate()->heap()->InNewSpace(callback_handle->data())) {
1094 __ mov(scratch1, Immediate(callback_handle));
1095 __ push(FieldOperand(scratch1, AccessorInfo::kDataOffset));
1096 } else {
1097 __ push(Immediate(Handle<Object>(callback_handle->data())));
1098 }
1099
1100 // Save a pointer to where we pushed the arguments pointer.
1101 // This will be passed as the const AccessorInfo& to the C++ callback.
1102 __ push(scratch2);
1103
1104 __ push(name_reg); // name
1105 __ mov(ebx, esp); // esp points to reference to name (handler).
1106
1107 __ push(scratch3); // Restore return address.
1108
1109 // Do call through the api.
1110 Address getter_address = v8::ToCData<Address>(callback->getter());
1111 ApiFunction fun(getter_address);
1112
1113 // 3 elements array for v8::Agruments::values_, handler for name and pointer
1114 // to the values (it considered as smi in GC).
1115 const int kStackSpace = 5;
1116 const int kApiArgc = 2;
1117
1118 __ PrepareCallApiFunction(kApiArgc, eax);
1119 __ mov(ApiParameterOperand(0), ebx); // name.
1120 __ add(Operand(ebx), Immediate(kPointerSize));
1121 __ mov(ApiParameterOperand(1), ebx); // arguments pointer.
1122
1123 // Emitting a stub call may try to allocate (if the code is not
1124 // already generated). Do not allow the assembler to perform a
1125 // garbage collection but instead return the allocation failure
1126 // object.
1127 return masm()->TryCallApiFunctionAndReturn(&fun, kStackSpace);
1128 }
1129
1130
GenerateLoadConstant(JSObject * object,JSObject * holder,Register receiver,Register scratch1,Register scratch2,Register scratch3,Object * value,String * name,Label * miss)1131 void StubCompiler::GenerateLoadConstant(JSObject* object,
1132 JSObject* holder,
1133 Register receiver,
1134 Register scratch1,
1135 Register scratch2,
1136 Register scratch3,
1137 Object* value,
1138 String* name,
1139 Label* miss) {
1140 // Check that the receiver isn't a smi.
1141 __ test(receiver, Immediate(kSmiTagMask));
1142 __ j(zero, miss, not_taken);
1143
1144 // Check that the maps haven't changed.
1145 CheckPrototypes(object, receiver, holder,
1146 scratch1, scratch2, scratch3, name, miss);
1147
1148 // Return the constant value.
1149 __ mov(eax, Handle<Object>(value));
1150 __ ret(0);
1151 }
1152
1153
GenerateLoadInterceptor(JSObject * object,JSObject * interceptor_holder,LookupResult * lookup,Register receiver,Register name_reg,Register scratch1,Register scratch2,Register scratch3,String * name,Label * miss)1154 void StubCompiler::GenerateLoadInterceptor(JSObject* object,
1155 JSObject* interceptor_holder,
1156 LookupResult* lookup,
1157 Register receiver,
1158 Register name_reg,
1159 Register scratch1,
1160 Register scratch2,
1161 Register scratch3,
1162 String* name,
1163 Label* miss) {
1164 ASSERT(interceptor_holder->HasNamedInterceptor());
1165 ASSERT(!interceptor_holder->GetNamedInterceptor()->getter()->IsUndefined());
1166
1167 // Check that the receiver isn't a smi.
1168 __ test(receiver, Immediate(kSmiTagMask));
1169 __ j(zero, miss, not_taken);
1170
1171 // So far the most popular follow ups for interceptor loads are FIELD
1172 // and CALLBACKS, so inline only them, other cases may be added
1173 // later.
1174 bool compile_followup_inline = false;
1175 if (lookup->IsProperty() && lookup->IsCacheable()) {
1176 if (lookup->type() == FIELD) {
1177 compile_followup_inline = true;
1178 } else if (lookup->type() == CALLBACKS &&
1179 lookup->GetCallbackObject()->IsAccessorInfo() &&
1180 AccessorInfo::cast(lookup->GetCallbackObject())->getter() != NULL) {
1181 compile_followup_inline = true;
1182 }
1183 }
1184
1185 if (compile_followup_inline) {
1186 // Compile the interceptor call, followed by inline code to load the
1187 // property from further up the prototype chain if the call fails.
1188 // Check that the maps haven't changed.
1189 Register holder_reg = CheckPrototypes(object, receiver, interceptor_holder,
1190 scratch1, scratch2, scratch3,
1191 name, miss);
1192 ASSERT(holder_reg.is(receiver) || holder_reg.is(scratch1));
1193
1194 // Save necessary data before invoking an interceptor.
1195 // Requires a frame to make GC aware of pushed pointers.
1196 __ EnterInternalFrame();
1197
1198 if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
1199 // CALLBACKS case needs a receiver to be passed into C++ callback.
1200 __ push(receiver);
1201 }
1202 __ push(holder_reg);
1203 __ push(name_reg);
1204
1205 // Invoke an interceptor. Note: map checks from receiver to
1206 // interceptor's holder has been compiled before (see a caller
1207 // of this method.)
1208 CompileCallLoadPropertyWithInterceptor(masm(),
1209 receiver,
1210 holder_reg,
1211 name_reg,
1212 interceptor_holder);
1213
1214 // Check if interceptor provided a value for property. If it's
1215 // the case, return immediately.
1216 Label interceptor_failed;
1217 __ cmp(eax, factory()->no_interceptor_result_sentinel());
1218 __ j(equal, &interceptor_failed);
1219 __ LeaveInternalFrame();
1220 __ ret(0);
1221
1222 __ bind(&interceptor_failed);
1223 __ pop(name_reg);
1224 __ pop(holder_reg);
1225 if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
1226 __ pop(receiver);
1227 }
1228
1229 __ LeaveInternalFrame();
1230
1231 // Check that the maps from interceptor's holder to lookup's holder
1232 // haven't changed. And load lookup's holder into holder_reg.
1233 if (interceptor_holder != lookup->holder()) {
1234 holder_reg = CheckPrototypes(interceptor_holder,
1235 holder_reg,
1236 lookup->holder(),
1237 scratch1,
1238 scratch2,
1239 scratch3,
1240 name,
1241 miss);
1242 }
1243
1244 if (lookup->type() == FIELD) {
1245 // We found FIELD property in prototype chain of interceptor's holder.
1246 // Retrieve a field from field's holder.
1247 GenerateFastPropertyLoad(masm(), eax, holder_reg,
1248 lookup->holder(), lookup->GetFieldIndex());
1249 __ ret(0);
1250 } else {
1251 // We found CALLBACKS property in prototype chain of interceptor's
1252 // holder.
1253 ASSERT(lookup->type() == CALLBACKS);
1254 ASSERT(lookup->GetCallbackObject()->IsAccessorInfo());
1255 AccessorInfo* callback = AccessorInfo::cast(lookup->GetCallbackObject());
1256 ASSERT(callback != NULL);
1257 ASSERT(callback->getter() != NULL);
1258
1259 // Tail call to runtime.
1260 // Important invariant in CALLBACKS case: the code above must be
1261 // structured to never clobber |receiver| register.
1262 __ pop(scratch2); // return address
1263 __ push(receiver);
1264 __ push(holder_reg);
1265 __ mov(holder_reg, Immediate(Handle<AccessorInfo>(callback)));
1266 __ push(FieldOperand(holder_reg, AccessorInfo::kDataOffset));
1267 __ push(holder_reg);
1268 __ push(name_reg);
1269 __ push(scratch2); // restore return address
1270
1271 ExternalReference ref =
1272 ExternalReference(IC_Utility(IC::kLoadCallbackProperty),
1273 masm()->isolate());
1274 __ TailCallExternalReference(ref, 5, 1);
1275 }
1276 } else { // !compile_followup_inline
1277 // Call the runtime system to load the interceptor.
1278 // Check that the maps haven't changed.
1279 Register holder_reg =
1280 CheckPrototypes(object, receiver, interceptor_holder,
1281 scratch1, scratch2, scratch3, name, miss);
1282 __ pop(scratch2); // save old return address
1283 PushInterceptorArguments(masm(), receiver, holder_reg,
1284 name_reg, interceptor_holder);
1285 __ push(scratch2); // restore old return address
1286
1287 ExternalReference ref =
1288 ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForLoad),
1289 isolate());
1290 __ TailCallExternalReference(ref, 5, 1);
1291 }
1292 }
1293
1294
GenerateNameCheck(String * name,Label * miss)1295 void CallStubCompiler::GenerateNameCheck(String* name, Label* miss) {
1296 if (kind_ == Code::KEYED_CALL_IC) {
1297 __ cmp(Operand(ecx), Immediate(Handle<String>(name)));
1298 __ j(not_equal, miss, not_taken);
1299 }
1300 }
1301
1302
GenerateGlobalReceiverCheck(JSObject * object,JSObject * holder,String * name,Label * miss)1303 void CallStubCompiler::GenerateGlobalReceiverCheck(JSObject* object,
1304 JSObject* holder,
1305 String* name,
1306 Label* miss) {
1307 ASSERT(holder->IsGlobalObject());
1308
1309 // Get the number of arguments.
1310 const int argc = arguments().immediate();
1311
1312 // Get the receiver from the stack.
1313 __ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
1314
1315 // If the object is the holder then we know that it's a global
1316 // object which can only happen for contextual calls. In this case,
1317 // the receiver cannot be a smi.
1318 if (object != holder) {
1319 __ test(edx, Immediate(kSmiTagMask));
1320 __ j(zero, miss, not_taken);
1321 }
1322
1323 // Check that the maps haven't changed.
1324 CheckPrototypes(object, edx, holder, ebx, eax, edi, name, miss);
1325 }
1326
1327
GenerateLoadFunctionFromCell(JSGlobalPropertyCell * cell,JSFunction * function,Label * miss)1328 void CallStubCompiler::GenerateLoadFunctionFromCell(JSGlobalPropertyCell* cell,
1329 JSFunction* function,
1330 Label* miss) {
1331 // Get the value from the cell.
1332 if (Serializer::enabled()) {
1333 __ mov(edi, Immediate(Handle<JSGlobalPropertyCell>(cell)));
1334 __ mov(edi, FieldOperand(edi, JSGlobalPropertyCell::kValueOffset));
1335 } else {
1336 __ mov(edi, Operand::Cell(Handle<JSGlobalPropertyCell>(cell)));
1337 }
1338
1339 // Check that the cell contains the same function.
1340 if (isolate()->heap()->InNewSpace(function)) {
1341 // We can't embed a pointer to a function in new space so we have
1342 // to verify that the shared function info is unchanged. This has
1343 // the nice side effect that multiple closures based on the same
1344 // function can all use this call IC. Before we load through the
1345 // function, we have to verify that it still is a function.
1346 __ test(edi, Immediate(kSmiTagMask));
1347 __ j(zero, miss, not_taken);
1348 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ebx);
1349 __ j(not_equal, miss, not_taken);
1350
1351 // Check the shared function info. Make sure it hasn't changed.
1352 __ cmp(FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset),
1353 Immediate(Handle<SharedFunctionInfo>(function->shared())));
1354 __ j(not_equal, miss, not_taken);
1355 } else {
1356 __ cmp(Operand(edi), Immediate(Handle<JSFunction>(function)));
1357 __ j(not_equal, miss, not_taken);
1358 }
1359 }
1360
1361
GenerateMissBranch()1362 MaybeObject* CallStubCompiler::GenerateMissBranch() {
1363 MaybeObject* maybe_obj =
1364 isolate()->stub_cache()->ComputeCallMiss(arguments().immediate(),
1365 kind_);
1366 Object* obj;
1367 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1368 __ jmp(Handle<Code>(Code::cast(obj)), RelocInfo::CODE_TARGET);
1369 return obj;
1370 }
1371
1372
CompileCallField(JSObject * object,JSObject * holder,int index,String * name)1373 MUST_USE_RESULT MaybeObject* CallStubCompiler::CompileCallField(
1374 JSObject* object,
1375 JSObject* holder,
1376 int index,
1377 String* name) {
1378 // ----------- S t a t e -------------
1379 // -- ecx : name
1380 // -- esp[0] : return address
1381 // -- esp[(argc - n) * 4] : arg[n] (zero-based)
1382 // -- ...
1383 // -- esp[(argc + 1) * 4] : receiver
1384 // -----------------------------------
1385 Label miss;
1386
1387 GenerateNameCheck(name, &miss);
1388
1389 // Get the receiver from the stack.
1390 const int argc = arguments().immediate();
1391 __ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
1392
1393 // Check that the receiver isn't a smi.
1394 __ test(edx, Immediate(kSmiTagMask));
1395 __ j(zero, &miss, not_taken);
1396
1397 // Do the right check and compute the holder register.
1398 Register reg = CheckPrototypes(object, edx, holder, ebx, eax, edi,
1399 name, &miss);
1400
1401 GenerateFastPropertyLoad(masm(), edi, reg, holder, index);
1402
1403 // Check that the function really is a function.
1404 __ test(edi, Immediate(kSmiTagMask));
1405 __ j(zero, &miss, not_taken);
1406 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ebx);
1407 __ j(not_equal, &miss, not_taken);
1408
1409 // Patch the receiver on the stack with the global proxy if
1410 // necessary.
1411 if (object->IsGlobalObject()) {
1412 __ mov(edx, FieldOperand(edx, GlobalObject::kGlobalReceiverOffset));
1413 __ mov(Operand(esp, (argc + 1) * kPointerSize), edx);
1414 }
1415
1416 // Invoke the function.
1417 __ InvokeFunction(edi, arguments(), JUMP_FUNCTION);
1418
1419 // Handle call cache miss.
1420 __ bind(&miss);
1421 MaybeObject* maybe_result = GenerateMissBranch();
1422 if (maybe_result->IsFailure()) return maybe_result;
1423
1424 // Return the generated code.
1425 return GetCode(FIELD, name);
1426 }
1427
1428
CompileArrayPushCall(Object * object,JSObject * holder,JSGlobalPropertyCell * cell,JSFunction * function,String * name)1429 MaybeObject* CallStubCompiler::CompileArrayPushCall(Object* object,
1430 JSObject* holder,
1431 JSGlobalPropertyCell* cell,
1432 JSFunction* function,
1433 String* name) {
1434 // ----------- S t a t e -------------
1435 // -- ecx : name
1436 // -- esp[0] : return address
1437 // -- esp[(argc - n) * 4] : arg[n] (zero-based)
1438 // -- ...
1439 // -- esp[(argc + 1) * 4] : receiver
1440 // -----------------------------------
1441
1442 // If object is not an array, bail out to regular call.
1443 if (!object->IsJSArray() || cell != NULL) {
1444 return isolate()->heap()->undefined_value();
1445 }
1446
1447 Label miss;
1448
1449 GenerateNameCheck(name, &miss);
1450
1451 // Get the receiver from the stack.
1452 const int argc = arguments().immediate();
1453 __ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
1454
1455 // Check that the receiver isn't a smi.
1456 __ test(edx, Immediate(kSmiTagMask));
1457 __ j(zero, &miss);
1458
1459 CheckPrototypes(JSObject::cast(object), edx,
1460 holder, ebx,
1461 eax, edi, name, &miss);
1462
1463 if (argc == 0) {
1464 // Noop, return the length.
1465 __ mov(eax, FieldOperand(edx, JSArray::kLengthOffset));
1466 __ ret((argc + 1) * kPointerSize);
1467 } else {
1468 Label call_builtin;
1469
1470 // Get the elements array of the object.
1471 __ mov(ebx, FieldOperand(edx, JSArray::kElementsOffset));
1472
1473 // Check that the elements are in fast mode and writable.
1474 __ cmp(FieldOperand(ebx, HeapObject::kMapOffset),
1475 Immediate(factory()->fixed_array_map()));
1476 __ j(not_equal, &call_builtin);
1477
1478 if (argc == 1) { // Otherwise fall through to call builtin.
1479 Label exit, with_write_barrier, attempt_to_grow_elements;
1480
1481 // Get the array's length into eax and calculate new length.
1482 __ mov(eax, FieldOperand(edx, JSArray::kLengthOffset));
1483 STATIC_ASSERT(kSmiTagSize == 1);
1484 STATIC_ASSERT(kSmiTag == 0);
1485 __ add(Operand(eax), Immediate(Smi::FromInt(argc)));
1486
1487 // Get the element's length into ecx.
1488 __ mov(ecx, FieldOperand(ebx, FixedArray::kLengthOffset));
1489
1490 // Check if we could survive without allocation.
1491 __ cmp(eax, Operand(ecx));
1492 __ j(greater, &attempt_to_grow_elements);
1493
1494 // Save new length.
1495 __ mov(FieldOperand(edx, JSArray::kLengthOffset), eax);
1496
1497 // Push the element.
1498 __ lea(edx, FieldOperand(ebx,
1499 eax, times_half_pointer_size,
1500 FixedArray::kHeaderSize - argc * kPointerSize));
1501 __ mov(ecx, Operand(esp, argc * kPointerSize));
1502 __ mov(Operand(edx, 0), ecx);
1503
1504 // Check if value is a smi.
1505 __ test(ecx, Immediate(kSmiTagMask));
1506 __ j(not_zero, &with_write_barrier);
1507
1508 __ bind(&exit);
1509 __ ret((argc + 1) * kPointerSize);
1510
1511 __ bind(&with_write_barrier);
1512
1513 __ InNewSpace(ebx, ecx, equal, &exit);
1514
1515 __ RecordWriteHelper(ebx, edx, ecx);
1516 __ ret((argc + 1) * kPointerSize);
1517
1518 __ bind(&attempt_to_grow_elements);
1519 if (!FLAG_inline_new) {
1520 __ jmp(&call_builtin);
1521 }
1522
1523 ExternalReference new_space_allocation_top =
1524 ExternalReference::new_space_allocation_top_address(isolate());
1525 ExternalReference new_space_allocation_limit =
1526 ExternalReference::new_space_allocation_limit_address(isolate());
1527
1528 const int kAllocationDelta = 4;
1529 // Load top.
1530 __ mov(ecx, Operand::StaticVariable(new_space_allocation_top));
1531
1532 // Check if it's the end of elements.
1533 __ lea(edx, FieldOperand(ebx,
1534 eax, times_half_pointer_size,
1535 FixedArray::kHeaderSize - argc * kPointerSize));
1536 __ cmp(edx, Operand(ecx));
1537 __ j(not_equal, &call_builtin);
1538 __ add(Operand(ecx), Immediate(kAllocationDelta * kPointerSize));
1539 __ cmp(ecx, Operand::StaticVariable(new_space_allocation_limit));
1540 __ j(above, &call_builtin);
1541
1542 // We fit and could grow elements.
1543 __ mov(Operand::StaticVariable(new_space_allocation_top), ecx);
1544 __ mov(ecx, Operand(esp, argc * kPointerSize));
1545
1546 // Push the argument...
1547 __ mov(Operand(edx, 0), ecx);
1548 // ... and fill the rest with holes.
1549 for (int i = 1; i < kAllocationDelta; i++) {
1550 __ mov(Operand(edx, i * kPointerSize),
1551 Immediate(factory()->the_hole_value()));
1552 }
1553
1554 // Restore receiver to edx as finish sequence assumes it's here.
1555 __ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
1556
1557 // Increment element's and array's sizes.
1558 __ add(FieldOperand(ebx, FixedArray::kLengthOffset),
1559 Immediate(Smi::FromInt(kAllocationDelta)));
1560 __ mov(FieldOperand(edx, JSArray::kLengthOffset), eax);
1561
1562 // Elements are in new space, so write barrier is not required.
1563 __ ret((argc + 1) * kPointerSize);
1564 }
1565
1566 __ bind(&call_builtin);
1567 __ TailCallExternalReference(
1568 ExternalReference(Builtins::c_ArrayPush, isolate()),
1569 argc + 1,
1570 1);
1571 }
1572
1573 __ bind(&miss);
1574 MaybeObject* maybe_result = GenerateMissBranch();
1575 if (maybe_result->IsFailure()) return maybe_result;
1576
1577 // Return the generated code.
1578 return GetCode(function);
1579 }
1580
1581
CompileArrayPopCall(Object * object,JSObject * holder,JSGlobalPropertyCell * cell,JSFunction * function,String * name)1582 MaybeObject* CallStubCompiler::CompileArrayPopCall(Object* object,
1583 JSObject* holder,
1584 JSGlobalPropertyCell* cell,
1585 JSFunction* function,
1586 String* name) {
1587 // ----------- S t a t e -------------
1588 // -- ecx : name
1589 // -- esp[0] : return address
1590 // -- esp[(argc - n) * 4] : arg[n] (zero-based)
1591 // -- ...
1592 // -- esp[(argc + 1) * 4] : receiver
1593 // -----------------------------------
1594
1595 // If object is not an array, bail out to regular call.
1596 if (!object->IsJSArray() || cell != NULL) {
1597 return heap()->undefined_value();
1598 }
1599
1600 Label miss, return_undefined, call_builtin;
1601
1602 GenerateNameCheck(name, &miss);
1603
1604 // Get the receiver from the stack.
1605 const int argc = arguments().immediate();
1606 __ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
1607
1608 // Check that the receiver isn't a smi.
1609 __ test(edx, Immediate(kSmiTagMask));
1610 __ j(zero, &miss);
1611 CheckPrototypes(JSObject::cast(object), edx,
1612 holder, ebx,
1613 eax, edi, name, &miss);
1614
1615 // Get the elements array of the object.
1616 __ mov(ebx, FieldOperand(edx, JSArray::kElementsOffset));
1617
1618 // Check that the elements are in fast mode and writable.
1619 __ cmp(FieldOperand(ebx, HeapObject::kMapOffset),
1620 Immediate(factory()->fixed_array_map()));
1621 __ j(not_equal, &call_builtin);
1622
1623 // Get the array's length into ecx and calculate new length.
1624 __ mov(ecx, FieldOperand(edx, JSArray::kLengthOffset));
1625 __ sub(Operand(ecx), Immediate(Smi::FromInt(1)));
1626 __ j(negative, &return_undefined);
1627
1628 // Get the last element.
1629 STATIC_ASSERT(kSmiTagSize == 1);
1630 STATIC_ASSERT(kSmiTag == 0);
1631 __ mov(eax, FieldOperand(ebx,
1632 ecx, times_half_pointer_size,
1633 FixedArray::kHeaderSize));
1634 __ cmp(Operand(eax), Immediate(factory()->the_hole_value()));
1635 __ j(equal, &call_builtin);
1636
1637 // Set the array's length.
1638 __ mov(FieldOperand(edx, JSArray::kLengthOffset), ecx);
1639
1640 // Fill with the hole.
1641 __ mov(FieldOperand(ebx,
1642 ecx, times_half_pointer_size,
1643 FixedArray::kHeaderSize),
1644 Immediate(factory()->the_hole_value()));
1645 __ ret((argc + 1) * kPointerSize);
1646
1647 __ bind(&return_undefined);
1648 __ mov(eax, Immediate(factory()->undefined_value()));
1649 __ ret((argc + 1) * kPointerSize);
1650
1651 __ bind(&call_builtin);
1652 __ TailCallExternalReference(
1653 ExternalReference(Builtins::c_ArrayPop, isolate()),
1654 argc + 1,
1655 1);
1656
1657 __ bind(&miss);
1658 MaybeObject* maybe_result = GenerateMissBranch();
1659 if (maybe_result->IsFailure()) return maybe_result;
1660
1661 // Return the generated code.
1662 return GetCode(function);
1663 }
1664
1665
CompileStringCharCodeAtCall(Object * object,JSObject * holder,JSGlobalPropertyCell * cell,JSFunction * function,String * name)1666 MaybeObject* CallStubCompiler::CompileStringCharCodeAtCall(
1667 Object* object,
1668 JSObject* holder,
1669 JSGlobalPropertyCell* cell,
1670 JSFunction* function,
1671 String* name) {
1672 // ----------- S t a t e -------------
1673 // -- ecx : function name
1674 // -- esp[0] : return address
1675 // -- esp[(argc - n) * 4] : arg[n] (zero-based)
1676 // -- ...
1677 // -- esp[(argc + 1) * 4] : receiver
1678 // -----------------------------------
1679
1680 // If object is not a string, bail out to regular call.
1681 if (!object->IsString() || cell != NULL) {
1682 return isolate()->heap()->undefined_value();
1683 }
1684
1685 const int argc = arguments().immediate();
1686
1687 Label miss;
1688 Label name_miss;
1689 Label index_out_of_range;
1690 Label* index_out_of_range_label = &index_out_of_range;
1691
1692 if (kind_ == Code::CALL_IC && extra_ic_state_ == DEFAULT_STRING_STUB) {
1693 index_out_of_range_label = &miss;
1694 }
1695
1696 GenerateNameCheck(name, &name_miss);
1697
1698 // Check that the maps starting from the prototype haven't changed.
1699 GenerateDirectLoadGlobalFunctionPrototype(masm(),
1700 Context::STRING_FUNCTION_INDEX,
1701 eax,
1702 &miss);
1703 ASSERT(object != holder);
1704 CheckPrototypes(JSObject::cast(object->GetPrototype()), eax, holder,
1705 ebx, edx, edi, name, &miss);
1706
1707 Register receiver = ebx;
1708 Register index = edi;
1709 Register scratch = edx;
1710 Register result = eax;
1711 __ mov(receiver, Operand(esp, (argc + 1) * kPointerSize));
1712 if (argc > 0) {
1713 __ mov(index, Operand(esp, (argc - 0) * kPointerSize));
1714 } else {
1715 __ Set(index, Immediate(factory()->undefined_value()));
1716 }
1717
1718 StringCharCodeAtGenerator char_code_at_generator(receiver,
1719 index,
1720 scratch,
1721 result,
1722 &miss, // When not a string.
1723 &miss, // When not a number.
1724 index_out_of_range_label,
1725 STRING_INDEX_IS_NUMBER);
1726 char_code_at_generator.GenerateFast(masm());
1727 __ ret((argc + 1) * kPointerSize);
1728
1729 StubRuntimeCallHelper call_helper;
1730 char_code_at_generator.GenerateSlow(masm(), call_helper);
1731
1732 if (index_out_of_range.is_linked()) {
1733 __ bind(&index_out_of_range);
1734 __ Set(eax, Immediate(factory()->nan_value()));
1735 __ ret((argc + 1) * kPointerSize);
1736 }
1737
1738 __ bind(&miss);
1739 // Restore function name in ecx.
1740 __ Set(ecx, Immediate(Handle<String>(name)));
1741 __ bind(&name_miss);
1742 MaybeObject* maybe_result = GenerateMissBranch();
1743 if (maybe_result->IsFailure()) return maybe_result;
1744
1745 // Return the generated code.
1746 return GetCode(function);
1747 }
1748
1749
CompileStringCharAtCall(Object * object,JSObject * holder,JSGlobalPropertyCell * cell,JSFunction * function,String * name)1750 MaybeObject* CallStubCompiler::CompileStringCharAtCall(
1751 Object* object,
1752 JSObject* holder,
1753 JSGlobalPropertyCell* cell,
1754 JSFunction* function,
1755 String* name) {
1756 // ----------- S t a t e -------------
1757 // -- ecx : function name
1758 // -- esp[0] : return address
1759 // -- esp[(argc - n) * 4] : arg[n] (zero-based)
1760 // -- ...
1761 // -- esp[(argc + 1) * 4] : receiver
1762 // -----------------------------------
1763
1764 // If object is not a string, bail out to regular call.
1765 if (!object->IsString() || cell != NULL) {
1766 return heap()->undefined_value();
1767 }
1768
1769 const int argc = arguments().immediate();
1770
1771 Label miss;
1772 Label name_miss;
1773 Label index_out_of_range;
1774 Label* index_out_of_range_label = &index_out_of_range;
1775
1776 if (kind_ == Code::CALL_IC && extra_ic_state_ == DEFAULT_STRING_STUB) {
1777 index_out_of_range_label = &miss;
1778 }
1779
1780 GenerateNameCheck(name, &name_miss);
1781
1782 // Check that the maps starting from the prototype haven't changed.
1783 GenerateDirectLoadGlobalFunctionPrototype(masm(),
1784 Context::STRING_FUNCTION_INDEX,
1785 eax,
1786 &miss);
1787 ASSERT(object != holder);
1788 CheckPrototypes(JSObject::cast(object->GetPrototype()), eax, holder,
1789 ebx, edx, edi, name, &miss);
1790
1791 Register receiver = eax;
1792 Register index = edi;
1793 Register scratch1 = ebx;
1794 Register scratch2 = edx;
1795 Register result = eax;
1796 __ mov(receiver, Operand(esp, (argc + 1) * kPointerSize));
1797 if (argc > 0) {
1798 __ mov(index, Operand(esp, (argc - 0) * kPointerSize));
1799 } else {
1800 __ Set(index, Immediate(factory()->undefined_value()));
1801 }
1802
1803 StringCharAtGenerator char_at_generator(receiver,
1804 index,
1805 scratch1,
1806 scratch2,
1807 result,
1808 &miss, // When not a string.
1809 &miss, // When not a number.
1810 index_out_of_range_label,
1811 STRING_INDEX_IS_NUMBER);
1812 char_at_generator.GenerateFast(masm());
1813 __ ret((argc + 1) * kPointerSize);
1814
1815 StubRuntimeCallHelper call_helper;
1816 char_at_generator.GenerateSlow(masm(), call_helper);
1817
1818 if (index_out_of_range.is_linked()) {
1819 __ bind(&index_out_of_range);
1820 __ Set(eax, Immediate(factory()->empty_string()));
1821 __ ret((argc + 1) * kPointerSize);
1822 }
1823
1824 __ bind(&miss);
1825 // Restore function name in ecx.
1826 __ Set(ecx, Immediate(Handle<String>(name)));
1827 __ bind(&name_miss);
1828 MaybeObject* maybe_result = GenerateMissBranch();
1829 if (maybe_result->IsFailure()) return maybe_result;
1830
1831 // Return the generated code.
1832 return GetCode(function);
1833 }
1834
1835
CompileStringFromCharCodeCall(Object * object,JSObject * holder,JSGlobalPropertyCell * cell,JSFunction * function,String * name)1836 MaybeObject* CallStubCompiler::CompileStringFromCharCodeCall(
1837 Object* object,
1838 JSObject* holder,
1839 JSGlobalPropertyCell* cell,
1840 JSFunction* function,
1841 String* name) {
1842 // ----------- S t a t e -------------
1843 // -- ecx : function name
1844 // -- esp[0] : return address
1845 // -- esp[(argc - n) * 4] : arg[n] (zero-based)
1846 // -- ...
1847 // -- esp[(argc + 1) * 4] : receiver
1848 // -----------------------------------
1849
1850 const int argc = arguments().immediate();
1851
1852 // If the object is not a JSObject or we got an unexpected number of
1853 // arguments, bail out to the regular call.
1854 if (!object->IsJSObject() || argc != 1) {
1855 return isolate()->heap()->undefined_value();
1856 }
1857
1858 Label miss;
1859 GenerateNameCheck(name, &miss);
1860
1861 if (cell == NULL) {
1862 __ mov(edx, Operand(esp, 2 * kPointerSize));
1863
1864 STATIC_ASSERT(kSmiTag == 0);
1865 __ test(edx, Immediate(kSmiTagMask));
1866 __ j(zero, &miss);
1867
1868 CheckPrototypes(JSObject::cast(object), edx, holder, ebx, eax, edi, name,
1869 &miss);
1870 } else {
1871 ASSERT(cell->value() == function);
1872 GenerateGlobalReceiverCheck(JSObject::cast(object), holder, name, &miss);
1873 GenerateLoadFunctionFromCell(cell, function, &miss);
1874 }
1875
1876 // Load the char code argument.
1877 Register code = ebx;
1878 __ mov(code, Operand(esp, 1 * kPointerSize));
1879
1880 // Check the code is a smi.
1881 Label slow;
1882 STATIC_ASSERT(kSmiTag == 0);
1883 __ test(code, Immediate(kSmiTagMask));
1884 __ j(not_zero, &slow);
1885
1886 // Convert the smi code to uint16.
1887 __ and_(code, Immediate(Smi::FromInt(0xffff)));
1888
1889 StringCharFromCodeGenerator char_from_code_generator(code, eax);
1890 char_from_code_generator.GenerateFast(masm());
1891 __ ret(2 * kPointerSize);
1892
1893 StubRuntimeCallHelper call_helper;
1894 char_from_code_generator.GenerateSlow(masm(), call_helper);
1895
1896 // Tail call the full function. We do not have to patch the receiver
1897 // because the function makes no use of it.
1898 __ bind(&slow);
1899 __ InvokeFunction(function, arguments(), JUMP_FUNCTION);
1900
1901 __ bind(&miss);
1902 // ecx: function name.
1903 MaybeObject* maybe_result = GenerateMissBranch();
1904 if (maybe_result->IsFailure()) return maybe_result;
1905
1906 // Return the generated code.
1907 return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name);
1908 }
1909
1910
CompileMathFloorCall(Object * object,JSObject * holder,JSGlobalPropertyCell * cell,JSFunction * function,String * name)1911 MaybeObject* CallStubCompiler::CompileMathFloorCall(Object* object,
1912 JSObject* holder,
1913 JSGlobalPropertyCell* cell,
1914 JSFunction* function,
1915 String* name) {
1916 // ----------- S t a t e -------------
1917 // -- ecx : name
1918 // -- esp[0] : return address
1919 // -- esp[(argc - n) * 4] : arg[n] (zero-based)
1920 // -- ...
1921 // -- esp[(argc + 1) * 4] : receiver
1922 // -----------------------------------
1923
1924 if (!CpuFeatures::IsSupported(SSE2)) {
1925 return isolate()->heap()->undefined_value();
1926 }
1927
1928 CpuFeatures::Scope use_sse2(SSE2);
1929
1930 const int argc = arguments().immediate();
1931
1932 // If the object is not a JSObject or we got an unexpected number of
1933 // arguments, bail out to the regular call.
1934 if (!object->IsJSObject() || argc != 1) {
1935 return isolate()->heap()->undefined_value();
1936 }
1937
1938 Label miss;
1939 GenerateNameCheck(name, &miss);
1940
1941 if (cell == NULL) {
1942 __ mov(edx, Operand(esp, 2 * kPointerSize));
1943
1944 STATIC_ASSERT(kSmiTag == 0);
1945 __ test(edx, Immediate(kSmiTagMask));
1946 __ j(zero, &miss);
1947
1948 CheckPrototypes(JSObject::cast(object), edx, holder, ebx, eax, edi, name,
1949 &miss);
1950 } else {
1951 ASSERT(cell->value() == function);
1952 GenerateGlobalReceiverCheck(JSObject::cast(object), holder, name, &miss);
1953 GenerateLoadFunctionFromCell(cell, function, &miss);
1954 }
1955
1956 // Load the (only) argument into eax.
1957 __ mov(eax, Operand(esp, 1 * kPointerSize));
1958
1959 // Check if the argument is a smi.
1960 Label smi;
1961 STATIC_ASSERT(kSmiTag == 0);
1962 __ test(eax, Immediate(kSmiTagMask));
1963 __ j(zero, &smi);
1964
1965 // Check if the argument is a heap number and load its value into xmm0.
1966 Label slow;
1967 __ CheckMap(eax, factory()->heap_number_map(), &slow, true);
1968 __ movdbl(xmm0, FieldOperand(eax, HeapNumber::kValueOffset));
1969
1970 // Check if the argument is strictly positive. Note this also
1971 // discards NaN.
1972 __ xorpd(xmm1, xmm1);
1973 __ ucomisd(xmm0, xmm1);
1974 __ j(below_equal, &slow);
1975
1976 // Do a truncating conversion.
1977 __ cvttsd2si(eax, Operand(xmm0));
1978
1979 // Check if the result fits into a smi. Note this also checks for
1980 // 0x80000000 which signals a failed conversion.
1981 Label wont_fit_into_smi;
1982 __ test(eax, Immediate(0xc0000000));
1983 __ j(not_zero, &wont_fit_into_smi);
1984
1985 // Smi tag and return.
1986 __ SmiTag(eax);
1987 __ bind(&smi);
1988 __ ret(2 * kPointerSize);
1989
1990 // Check if the argument is < 2^kMantissaBits.
1991 Label already_round;
1992 __ bind(&wont_fit_into_smi);
1993 __ LoadPowerOf2(xmm1, ebx, HeapNumber::kMantissaBits);
1994 __ ucomisd(xmm0, xmm1);
1995 __ j(above_equal, &already_round);
1996
1997 // Save a copy of the argument.
1998 __ movaps(xmm2, xmm0);
1999
2000 // Compute (argument + 2^kMantissaBits) - 2^kMantissaBits.
2001 __ addsd(xmm0, xmm1);
2002 __ subsd(xmm0, xmm1);
2003
2004 // Compare the argument and the tentative result to get the right mask:
2005 // if xmm2 < xmm0:
2006 // xmm2 = 1...1
2007 // else:
2008 // xmm2 = 0...0
2009 __ cmpltsd(xmm2, xmm0);
2010
2011 // Subtract 1 if the argument was less than the tentative result.
2012 __ LoadPowerOf2(xmm1, ebx, 0);
2013 __ andpd(xmm1, xmm2);
2014 __ subsd(xmm0, xmm1);
2015
2016 // Return a new heap number.
2017 __ AllocateHeapNumber(eax, ebx, edx, &slow);
2018 __ movdbl(FieldOperand(eax, HeapNumber::kValueOffset), xmm0);
2019 __ ret(2 * kPointerSize);
2020
2021 // Return the argument (when it's an already round heap number).
2022 __ bind(&already_round);
2023 __ mov(eax, Operand(esp, 1 * kPointerSize));
2024 __ ret(2 * kPointerSize);
2025
2026 // Tail call the full function. We do not have to patch the receiver
2027 // because the function makes no use of it.
2028 __ bind(&slow);
2029 __ InvokeFunction(function, arguments(), JUMP_FUNCTION);
2030
2031 __ bind(&miss);
2032 // ecx: function name.
2033 MaybeObject* maybe_result = GenerateMissBranch();
2034 if (maybe_result->IsFailure()) return maybe_result;
2035
2036 // Return the generated code.
2037 return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name);
2038 }
2039
2040
CompileMathAbsCall(Object * object,JSObject * holder,JSGlobalPropertyCell * cell,JSFunction * function,String * name)2041 MaybeObject* CallStubCompiler::CompileMathAbsCall(Object* object,
2042 JSObject* holder,
2043 JSGlobalPropertyCell* cell,
2044 JSFunction* function,
2045 String* name) {
2046 // ----------- S t a t e -------------
2047 // -- ecx : name
2048 // -- esp[0] : return address
2049 // -- esp[(argc - n) * 4] : arg[n] (zero-based)
2050 // -- ...
2051 // -- esp[(argc + 1) * 4] : receiver
2052 // -----------------------------------
2053
2054 const int argc = arguments().immediate();
2055
2056 // If the object is not a JSObject or we got an unexpected number of
2057 // arguments, bail out to the regular call.
2058 if (!object->IsJSObject() || argc != 1) {
2059 return isolate()->heap()->undefined_value();
2060 }
2061
2062 Label miss;
2063 GenerateNameCheck(name, &miss);
2064
2065 if (cell == NULL) {
2066 __ mov(edx, Operand(esp, 2 * kPointerSize));
2067
2068 STATIC_ASSERT(kSmiTag == 0);
2069 __ test(edx, Immediate(kSmiTagMask));
2070 __ j(zero, &miss);
2071
2072 CheckPrototypes(JSObject::cast(object), edx, holder, ebx, eax, edi, name,
2073 &miss);
2074 } else {
2075 ASSERT(cell->value() == function);
2076 GenerateGlobalReceiverCheck(JSObject::cast(object), holder, name, &miss);
2077 GenerateLoadFunctionFromCell(cell, function, &miss);
2078 }
2079
2080 // Load the (only) argument into eax.
2081 __ mov(eax, Operand(esp, 1 * kPointerSize));
2082
2083 // Check if the argument is a smi.
2084 Label not_smi;
2085 STATIC_ASSERT(kSmiTag == 0);
2086 __ test(eax, Immediate(kSmiTagMask));
2087 __ j(not_zero, ¬_smi);
2088
2089 // Set ebx to 1...1 (== -1) if the argument is negative, or to 0...0
2090 // otherwise.
2091 __ mov(ebx, eax);
2092 __ sar(ebx, kBitsPerInt - 1);
2093
2094 // Do bitwise not or do nothing depending on ebx.
2095 __ xor_(eax, Operand(ebx));
2096
2097 // Add 1 or do nothing depending on ebx.
2098 __ sub(eax, Operand(ebx));
2099
2100 // If the result is still negative, go to the slow case.
2101 // This only happens for the most negative smi.
2102 Label slow;
2103 __ j(negative, &slow);
2104
2105 // Smi case done.
2106 __ ret(2 * kPointerSize);
2107
2108 // Check if the argument is a heap number and load its exponent and
2109 // sign into ebx.
2110 __ bind(¬_smi);
2111 __ CheckMap(eax, factory()->heap_number_map(), &slow, true);
2112 __ mov(ebx, FieldOperand(eax, HeapNumber::kExponentOffset));
2113
2114 // Check the sign of the argument. If the argument is positive,
2115 // just return it.
2116 Label negative_sign;
2117 __ test(ebx, Immediate(HeapNumber::kSignMask));
2118 __ j(not_zero, &negative_sign);
2119 __ ret(2 * kPointerSize);
2120
2121 // If the argument is negative, clear the sign, and return a new
2122 // number.
2123 __ bind(&negative_sign);
2124 __ and_(ebx, ~HeapNumber::kSignMask);
2125 __ mov(ecx, FieldOperand(eax, HeapNumber::kMantissaOffset));
2126 __ AllocateHeapNumber(eax, edi, edx, &slow);
2127 __ mov(FieldOperand(eax, HeapNumber::kExponentOffset), ebx);
2128 __ mov(FieldOperand(eax, HeapNumber::kMantissaOffset), ecx);
2129 __ ret(2 * kPointerSize);
2130
2131 // Tail call the full function. We do not have to patch the receiver
2132 // because the function makes no use of it.
2133 __ bind(&slow);
2134 __ InvokeFunction(function, arguments(), JUMP_FUNCTION);
2135
2136 __ bind(&miss);
2137 // ecx: function name.
2138 MaybeObject* maybe_result = GenerateMissBranch();
2139 if (maybe_result->IsFailure()) return maybe_result;
2140
2141 // Return the generated code.
2142 return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name);
2143 }
2144
2145
CompileFastApiCall(const CallOptimization & optimization,Object * object,JSObject * holder,JSGlobalPropertyCell * cell,JSFunction * function,String * name)2146 MaybeObject* CallStubCompiler::CompileFastApiCall(
2147 const CallOptimization& optimization,
2148 Object* object,
2149 JSObject* holder,
2150 JSGlobalPropertyCell* cell,
2151 JSFunction* function,
2152 String* name) {
2153 ASSERT(optimization.is_simple_api_call());
2154 // Bail out if object is a global object as we don't want to
2155 // repatch it to global receiver.
2156 if (object->IsGlobalObject()) return heap()->undefined_value();
2157 if (cell != NULL) return heap()->undefined_value();
2158 int depth = optimization.GetPrototypeDepthOfExpectedType(
2159 JSObject::cast(object), holder);
2160 if (depth == kInvalidProtoDepth) return heap()->undefined_value();
2161
2162 Label miss, miss_before_stack_reserved;
2163
2164 GenerateNameCheck(name, &miss_before_stack_reserved);
2165
2166 // Get the receiver from the stack.
2167 const int argc = arguments().immediate();
2168 __ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
2169
2170 // Check that the receiver isn't a smi.
2171 __ test(edx, Immediate(kSmiTagMask));
2172 __ j(zero, &miss_before_stack_reserved, not_taken);
2173
2174 Counters* counters = isolate()->counters();
2175 __ IncrementCounter(counters->call_const(), 1);
2176 __ IncrementCounter(counters->call_const_fast_api(), 1);
2177
2178 // Allocate space for v8::Arguments implicit values. Must be initialized
2179 // before calling any runtime function.
2180 __ sub(Operand(esp), Immediate(kFastApiCallArguments * kPointerSize));
2181
2182 // Check that the maps haven't changed and find a Holder as a side effect.
2183 CheckPrototypes(JSObject::cast(object), edx, holder,
2184 ebx, eax, edi, name, depth, &miss);
2185
2186 // Move the return address on top of the stack.
2187 __ mov(eax, Operand(esp, 3 * kPointerSize));
2188 __ mov(Operand(esp, 0 * kPointerSize), eax);
2189
2190 // esp[2 * kPointerSize] is uninitialized, esp[3 * kPointerSize] contains
2191 // duplicate of return address and will be overwritten.
2192 MaybeObject* result = GenerateFastApiCall(masm(), optimization, argc);
2193 if (result->IsFailure()) return result;
2194
2195 __ bind(&miss);
2196 __ add(Operand(esp), Immediate(kFastApiCallArguments * kPointerSize));
2197
2198 __ bind(&miss_before_stack_reserved);
2199 MaybeObject* maybe_result = GenerateMissBranch();
2200 if (maybe_result->IsFailure()) return maybe_result;
2201
2202 // Return the generated code.
2203 return GetCode(function);
2204 }
2205
2206
CompileCallConstant(Object * object,JSObject * holder,JSFunction * function,String * name,CheckType check)2207 MaybeObject* CallStubCompiler::CompileCallConstant(Object* object,
2208 JSObject* holder,
2209 JSFunction* function,
2210 String* name,
2211 CheckType check) {
2212 // ----------- S t a t e -------------
2213 // -- ecx : name
2214 // -- esp[0] : return address
2215 // -- esp[(argc - n) * 4] : arg[n] (zero-based)
2216 // -- ...
2217 // -- esp[(argc + 1) * 4] : receiver
2218 // -----------------------------------
2219
2220 if (HasCustomCallGenerator(function)) {
2221 MaybeObject* maybe_result = CompileCustomCall(
2222 object, holder, NULL, function, name);
2223 Object* result;
2224 if (!maybe_result->ToObject(&result)) return maybe_result;
2225 // undefined means bail out to regular compiler.
2226 if (!result->IsUndefined()) return result;
2227 }
2228
2229 Label miss;
2230
2231 GenerateNameCheck(name, &miss);
2232
2233 // Get the receiver from the stack.
2234 const int argc = arguments().immediate();
2235 __ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
2236
2237 // Check that the receiver isn't a smi.
2238 if (check != NUMBER_CHECK) {
2239 __ test(edx, Immediate(kSmiTagMask));
2240 __ j(zero, &miss, not_taken);
2241 }
2242
2243 // Make sure that it's okay not to patch the on stack receiver
2244 // unless we're doing a receiver map check.
2245 ASSERT(!object->IsGlobalObject() || check == RECEIVER_MAP_CHECK);
2246
2247 SharedFunctionInfo* function_info = function->shared();
2248 switch (check) {
2249 case RECEIVER_MAP_CHECK:
2250 __ IncrementCounter(isolate()->counters()->call_const(), 1);
2251
2252 // Check that the maps haven't changed.
2253 CheckPrototypes(JSObject::cast(object), edx, holder,
2254 ebx, eax, edi, name, &miss);
2255
2256 // Patch the receiver on the stack with the global proxy if
2257 // necessary.
2258 if (object->IsGlobalObject()) {
2259 __ mov(edx, FieldOperand(edx, GlobalObject::kGlobalReceiverOffset));
2260 __ mov(Operand(esp, (argc + 1) * kPointerSize), edx);
2261 }
2262 break;
2263
2264 case STRING_CHECK:
2265 if (!function->IsBuiltin() && !function_info->strict_mode()) {
2266 // Calling non-strict non-builtins with a value as the receiver
2267 // requires boxing.
2268 __ jmp(&miss);
2269 } else {
2270 // Check that the object is a string or a symbol.
2271 __ CmpObjectType(edx, FIRST_NONSTRING_TYPE, eax);
2272 __ j(above_equal, &miss, not_taken);
2273 // Check that the maps starting from the prototype haven't changed.
2274 GenerateDirectLoadGlobalFunctionPrototype(
2275 masm(), Context::STRING_FUNCTION_INDEX, eax, &miss);
2276 CheckPrototypes(JSObject::cast(object->GetPrototype()), eax, holder,
2277 ebx, edx, edi, name, &miss);
2278 }
2279 break;
2280
2281 case NUMBER_CHECK: {
2282 if (!function->IsBuiltin() && !function_info->strict_mode()) {
2283 // Calling non-strict non-builtins with a value as the receiver
2284 // requires boxing.
2285 __ jmp(&miss);
2286 } else {
2287 Label fast;
2288 // Check that the object is a smi or a heap number.
2289 __ test(edx, Immediate(kSmiTagMask));
2290 __ j(zero, &fast, taken);
2291 __ CmpObjectType(edx, HEAP_NUMBER_TYPE, eax);
2292 __ j(not_equal, &miss, not_taken);
2293 __ bind(&fast);
2294 // Check that the maps starting from the prototype haven't changed.
2295 GenerateDirectLoadGlobalFunctionPrototype(
2296 masm(), Context::NUMBER_FUNCTION_INDEX, eax, &miss);
2297 CheckPrototypes(JSObject::cast(object->GetPrototype()), eax, holder,
2298 ebx, edx, edi, name, &miss);
2299 }
2300 break;
2301 }
2302
2303 case BOOLEAN_CHECK: {
2304 if (!function->IsBuiltin() && !function_info->strict_mode()) {
2305 // Calling non-strict non-builtins with a value as the receiver
2306 // requires boxing.
2307 __ jmp(&miss);
2308 } else {
2309 Label fast;
2310 // Check that the object is a boolean.
2311 __ cmp(edx, factory()->true_value());
2312 __ j(equal, &fast, taken);
2313 __ cmp(edx, factory()->false_value());
2314 __ j(not_equal, &miss, not_taken);
2315 __ bind(&fast);
2316 // Check that the maps starting from the prototype haven't changed.
2317 GenerateDirectLoadGlobalFunctionPrototype(
2318 masm(), Context::BOOLEAN_FUNCTION_INDEX, eax, &miss);
2319 CheckPrototypes(JSObject::cast(object->GetPrototype()), eax, holder,
2320 ebx, edx, edi, name, &miss);
2321 }
2322 break;
2323 }
2324
2325 default:
2326 UNREACHABLE();
2327 }
2328
2329 __ InvokeFunction(function, arguments(), JUMP_FUNCTION);
2330
2331 // Handle call cache miss.
2332 __ bind(&miss);
2333 MaybeObject* maybe_result = GenerateMissBranch();
2334 if (maybe_result->IsFailure()) return maybe_result;
2335
2336 // Return the generated code.
2337 return GetCode(function);
2338 }
2339
2340
CompileCallInterceptor(JSObject * object,JSObject * holder,String * name)2341 MaybeObject* CallStubCompiler::CompileCallInterceptor(JSObject* object,
2342 JSObject* holder,
2343 String* name) {
2344 // ----------- S t a t e -------------
2345 // -- ecx : name
2346 // -- esp[0] : return address
2347 // -- esp[(argc - n) * 4] : arg[n] (zero-based)
2348 // -- ...
2349 // -- esp[(argc + 1) * 4] : receiver
2350 // -----------------------------------
2351 Label miss;
2352
2353 GenerateNameCheck(name, &miss);
2354
2355 // Get the number of arguments.
2356 const int argc = arguments().immediate();
2357
2358 LookupResult lookup;
2359 LookupPostInterceptor(holder, name, &lookup);
2360
2361 // Get the receiver from the stack.
2362 __ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
2363
2364 CallInterceptorCompiler compiler(this, arguments(), ecx);
2365 MaybeObject* result = compiler.Compile(masm(),
2366 object,
2367 holder,
2368 name,
2369 &lookup,
2370 edx,
2371 ebx,
2372 edi,
2373 eax,
2374 &miss);
2375 if (result->IsFailure()) return result;
2376
2377 // Restore receiver.
2378 __ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
2379
2380 // Check that the function really is a function.
2381 __ test(eax, Immediate(kSmiTagMask));
2382 __ j(zero, &miss, not_taken);
2383 __ CmpObjectType(eax, JS_FUNCTION_TYPE, ebx);
2384 __ j(not_equal, &miss, not_taken);
2385
2386 // Patch the receiver on the stack with the global proxy if
2387 // necessary.
2388 if (object->IsGlobalObject()) {
2389 __ mov(edx, FieldOperand(edx, GlobalObject::kGlobalReceiverOffset));
2390 __ mov(Operand(esp, (argc + 1) * kPointerSize), edx);
2391 }
2392
2393 // Invoke the function.
2394 __ mov(edi, eax);
2395 __ InvokeFunction(edi, arguments(), JUMP_FUNCTION);
2396
2397 // Handle load cache miss.
2398 __ bind(&miss);
2399 MaybeObject* maybe_result = GenerateMissBranch();
2400 if (maybe_result->IsFailure()) return maybe_result;
2401
2402 // Return the generated code.
2403 return GetCode(INTERCEPTOR, name);
2404 }
2405
2406
CompileCallGlobal(JSObject * object,GlobalObject * holder,JSGlobalPropertyCell * cell,JSFunction * function,String * name)2407 MaybeObject* CallStubCompiler::CompileCallGlobal(JSObject* object,
2408 GlobalObject* holder,
2409 JSGlobalPropertyCell* cell,
2410 JSFunction* function,
2411 String* name) {
2412 // ----------- S t a t e -------------
2413 // -- ecx : name
2414 // -- esp[0] : return address
2415 // -- esp[(argc - n) * 4] : arg[n] (zero-based)
2416 // -- ...
2417 // -- esp[(argc + 1) * 4] : receiver
2418 // -----------------------------------
2419
2420 if (HasCustomCallGenerator(function)) {
2421 MaybeObject* maybe_result = CompileCustomCall(
2422 object, holder, cell, function, name);
2423 Object* result;
2424 if (!maybe_result->ToObject(&result)) return maybe_result;
2425 // undefined means bail out to regular compiler.
2426 if (!result->IsUndefined()) return result;
2427 }
2428
2429 Label miss;
2430
2431 GenerateNameCheck(name, &miss);
2432
2433 // Get the number of arguments.
2434 const int argc = arguments().immediate();
2435
2436 GenerateGlobalReceiverCheck(object, holder, name, &miss);
2437
2438 GenerateLoadFunctionFromCell(cell, function, &miss);
2439
2440 // Patch the receiver on the stack with the global proxy.
2441 if (object->IsGlobalObject()) {
2442 __ mov(edx, FieldOperand(edx, GlobalObject::kGlobalReceiverOffset));
2443 __ mov(Operand(esp, (argc + 1) * kPointerSize), edx);
2444 }
2445
2446 // Setup the context (function already in edi).
2447 __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
2448
2449 // Jump to the cached code (tail call).
2450 Counters* counters = isolate()->counters();
2451 __ IncrementCounter(counters->call_global_inline(), 1);
2452 ASSERT(function->is_compiled());
2453 ParameterCount expected(function->shared()->formal_parameter_count());
2454 if (V8::UseCrankshaft()) {
2455 // TODO(kasperl): For now, we always call indirectly through the
2456 // code field in the function to allow recompilation to take effect
2457 // without changing any of the call sites.
2458 __ InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
2459 expected, arguments(), JUMP_FUNCTION);
2460 } else {
2461 Handle<Code> code(function->code());
2462 __ InvokeCode(code, expected, arguments(),
2463 RelocInfo::CODE_TARGET, JUMP_FUNCTION);
2464 }
2465
2466 // Handle call cache miss.
2467 __ bind(&miss);
2468 __ IncrementCounter(counters->call_global_inline_miss(), 1);
2469 MaybeObject* maybe_result = GenerateMissBranch();
2470 if (maybe_result->IsFailure()) return maybe_result;
2471
2472 // Return the generated code.
2473 return GetCode(NORMAL, name);
2474 }
2475
2476
CompileStoreField(JSObject * object,int index,Map * transition,String * name)2477 MaybeObject* StoreStubCompiler::CompileStoreField(JSObject* object,
2478 int index,
2479 Map* transition,
2480 String* name) {
2481 // ----------- S t a t e -------------
2482 // -- eax : value
2483 // -- ecx : name
2484 // -- edx : receiver
2485 // -- esp[0] : return address
2486 // -----------------------------------
2487 Label miss;
2488
2489 // Generate store field code. Trashes the name register.
2490 GenerateStoreField(masm(),
2491 object,
2492 index,
2493 transition,
2494 edx, ecx, ebx,
2495 &miss);
2496
2497 // Handle store cache miss.
2498 __ bind(&miss);
2499 __ mov(ecx, Immediate(Handle<String>(name))); // restore name
2500 Handle<Code> ic = isolate()->builtins()->StoreIC_Miss();
2501 __ jmp(ic, RelocInfo::CODE_TARGET);
2502
2503 // Return the generated code.
2504 return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name);
2505 }
2506
2507
CompileStoreCallback(JSObject * object,AccessorInfo * callback,String * name)2508 MaybeObject* StoreStubCompiler::CompileStoreCallback(JSObject* object,
2509 AccessorInfo* callback,
2510 String* name) {
2511 // ----------- S t a t e -------------
2512 // -- eax : value
2513 // -- ecx : name
2514 // -- edx : receiver
2515 // -- esp[0] : return address
2516 // -----------------------------------
2517 Label miss;
2518
2519 // Check that the object isn't a smi.
2520 __ test(edx, Immediate(kSmiTagMask));
2521 __ j(zero, &miss, not_taken);
2522
2523 // Check that the map of the object hasn't changed.
2524 __ cmp(FieldOperand(edx, HeapObject::kMapOffset),
2525 Immediate(Handle<Map>(object->map())));
2526 __ j(not_equal, &miss, not_taken);
2527
2528 // Perform global security token check if needed.
2529 if (object->IsJSGlobalProxy()) {
2530 __ CheckAccessGlobalProxy(edx, ebx, &miss);
2531 }
2532
2533 // Stub never generated for non-global objects that require access
2534 // checks.
2535 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
2536
2537 __ pop(ebx); // remove the return address
2538 __ push(edx); // receiver
2539 __ push(Immediate(Handle<AccessorInfo>(callback))); // callback info
2540 __ push(ecx); // name
2541 __ push(eax); // value
2542 __ push(ebx); // restore return address
2543
2544 // Do tail-call to the runtime system.
2545 ExternalReference store_callback_property =
2546 ExternalReference(IC_Utility(IC::kStoreCallbackProperty), isolate());
2547 __ TailCallExternalReference(store_callback_property, 4, 1);
2548
2549 // Handle store cache miss.
2550 __ bind(&miss);
2551 Handle<Code> ic = isolate()->builtins()->StoreIC_Miss();
2552 __ jmp(ic, RelocInfo::CODE_TARGET);
2553
2554 // Return the generated code.
2555 return GetCode(CALLBACKS, name);
2556 }
2557
2558
CompileStoreInterceptor(JSObject * receiver,String * name)2559 MaybeObject* StoreStubCompiler::CompileStoreInterceptor(JSObject* receiver,
2560 String* name) {
2561 // ----------- S t a t e -------------
2562 // -- eax : value
2563 // -- ecx : name
2564 // -- edx : receiver
2565 // -- esp[0] : return address
2566 // -----------------------------------
2567 Label miss;
2568
2569 // Check that the object isn't a smi.
2570 __ test(edx, Immediate(kSmiTagMask));
2571 __ j(zero, &miss, not_taken);
2572
2573 // Check that the map of the object hasn't changed.
2574 __ cmp(FieldOperand(edx, HeapObject::kMapOffset),
2575 Immediate(Handle<Map>(receiver->map())));
2576 __ j(not_equal, &miss, not_taken);
2577
2578 // Perform global security token check if needed.
2579 if (receiver->IsJSGlobalProxy()) {
2580 __ CheckAccessGlobalProxy(edx, ebx, &miss);
2581 }
2582
2583 // Stub never generated for non-global objects that require access
2584 // checks.
2585 ASSERT(receiver->IsJSGlobalProxy() || !receiver->IsAccessCheckNeeded());
2586
2587 __ pop(ebx); // remove the return address
2588 __ push(edx); // receiver
2589 __ push(ecx); // name
2590 __ push(eax); // value
2591 __ push(Immediate(Smi::FromInt(strict_mode_)));
2592 __ push(ebx); // restore return address
2593
2594 // Do tail-call to the runtime system.
2595 ExternalReference store_ic_property =
2596 ExternalReference(IC_Utility(IC::kStoreInterceptorProperty), isolate());
2597 __ TailCallExternalReference(store_ic_property, 4, 1);
2598
2599 // Handle store cache miss.
2600 __ bind(&miss);
2601 Handle<Code> ic = isolate()->builtins()->StoreIC_Miss();
2602 __ jmp(ic, RelocInfo::CODE_TARGET);
2603
2604 // Return the generated code.
2605 return GetCode(INTERCEPTOR, name);
2606 }
2607
2608
CompileStoreGlobal(GlobalObject * object,JSGlobalPropertyCell * cell,String * name)2609 MaybeObject* StoreStubCompiler::CompileStoreGlobal(GlobalObject* object,
2610 JSGlobalPropertyCell* cell,
2611 String* name) {
2612 // ----------- S t a t e -------------
2613 // -- eax : value
2614 // -- ecx : name
2615 // -- edx : receiver
2616 // -- esp[0] : return address
2617 // -----------------------------------
2618 Label miss;
2619
2620 // Check that the map of the global has not changed.
2621 __ cmp(FieldOperand(edx, HeapObject::kMapOffset),
2622 Immediate(Handle<Map>(object->map())));
2623 __ j(not_equal, &miss, not_taken);
2624
2625
2626 // Compute the cell operand to use.
2627 Operand cell_operand = Operand::Cell(Handle<JSGlobalPropertyCell>(cell));
2628 if (Serializer::enabled()) {
2629 __ mov(ebx, Immediate(Handle<JSGlobalPropertyCell>(cell)));
2630 cell_operand = FieldOperand(ebx, JSGlobalPropertyCell::kValueOffset);
2631 }
2632
2633 // Check that the value in the cell is not the hole. If it is, this
2634 // cell could have been deleted and reintroducing the global needs
2635 // to update the property details in the property dictionary of the
2636 // global object. We bail out to the runtime system to do that.
2637 __ cmp(cell_operand, factory()->the_hole_value());
2638 __ j(equal, &miss);
2639
2640 // Store the value in the cell.
2641 __ mov(cell_operand, eax);
2642
2643 // Return the value (register eax).
2644 Counters* counters = isolate()->counters();
2645 __ IncrementCounter(counters->named_store_global_inline(), 1);
2646 __ ret(0);
2647
2648 // Handle store cache miss.
2649 __ bind(&miss);
2650 __ IncrementCounter(counters->named_store_global_inline_miss(), 1);
2651 Handle<Code> ic = isolate()->builtins()->StoreIC_Miss();
2652 __ jmp(ic, RelocInfo::CODE_TARGET);
2653
2654 // Return the generated code.
2655 return GetCode(NORMAL, name);
2656 }
2657
2658
CompileStoreField(JSObject * object,int index,Map * transition,String * name)2659 MaybeObject* KeyedStoreStubCompiler::CompileStoreField(JSObject* object,
2660 int index,
2661 Map* transition,
2662 String* name) {
2663 // ----------- S t a t e -------------
2664 // -- eax : value
2665 // -- ecx : key
2666 // -- edx : receiver
2667 // -- esp[0] : return address
2668 // -----------------------------------
2669 Label miss;
2670
2671 Counters* counters = isolate()->counters();
2672 __ IncrementCounter(counters->keyed_store_field(), 1);
2673
2674 // Check that the name has not changed.
2675 __ cmp(Operand(ecx), Immediate(Handle<String>(name)));
2676 __ j(not_equal, &miss, not_taken);
2677
2678 // Generate store field code. Trashes the name register.
2679 GenerateStoreField(masm(),
2680 object,
2681 index,
2682 transition,
2683 edx, ecx, ebx,
2684 &miss);
2685
2686 // Handle store cache miss.
2687 __ bind(&miss);
2688 __ DecrementCounter(counters->keyed_store_field(), 1);
2689 Handle<Code> ic = isolate()->builtins()->KeyedStoreIC_Miss();
2690 __ jmp(ic, RelocInfo::CODE_TARGET);
2691
2692 // Return the generated code.
2693 return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name);
2694 }
2695
2696
CompileStoreSpecialized(JSObject * receiver)2697 MaybeObject* KeyedStoreStubCompiler::CompileStoreSpecialized(
2698 JSObject* receiver) {
2699 // ----------- S t a t e -------------
2700 // -- eax : value
2701 // -- ecx : key
2702 // -- edx : receiver
2703 // -- esp[0] : return address
2704 // -----------------------------------
2705 Label miss;
2706
2707 // Check that the receiver isn't a smi.
2708 __ test(edx, Immediate(kSmiTagMask));
2709 __ j(zero, &miss, not_taken);
2710
2711 // Check that the map matches.
2712 __ cmp(FieldOperand(edx, HeapObject::kMapOffset),
2713 Immediate(Handle<Map>(receiver->map())));
2714 __ j(not_equal, &miss, not_taken);
2715
2716 // Check that the key is a smi.
2717 __ test(ecx, Immediate(kSmiTagMask));
2718 __ j(not_zero, &miss, not_taken);
2719
2720 // Get the elements array and make sure it is a fast element array, not 'cow'.
2721 __ mov(edi, FieldOperand(edx, JSObject::kElementsOffset));
2722 __ cmp(FieldOperand(edi, HeapObject::kMapOffset),
2723 Immediate(factory()->fixed_array_map()));
2724 __ j(not_equal, &miss, not_taken);
2725
2726 // Check that the key is within bounds.
2727 if (receiver->IsJSArray()) {
2728 __ cmp(ecx, FieldOperand(edx, JSArray::kLengthOffset)); // Compare smis.
2729 __ j(above_equal, &miss, not_taken);
2730 } else {
2731 __ cmp(ecx, FieldOperand(edi, FixedArray::kLengthOffset)); // Compare smis.
2732 __ j(above_equal, &miss, not_taken);
2733 }
2734
2735 // Do the store and update the write barrier. Make sure to preserve
2736 // the value in register eax.
2737 __ mov(edx, Operand(eax));
2738 __ mov(FieldOperand(edi, ecx, times_2, FixedArray::kHeaderSize), eax);
2739 __ RecordWrite(edi, 0, edx, ecx);
2740
2741 // Done.
2742 __ ret(0);
2743
2744 // Handle store cache miss.
2745 __ bind(&miss);
2746 Handle<Code> ic = isolate()->builtins()->KeyedStoreIC_Miss();
2747 __ jmp(ic, RelocInfo::CODE_TARGET);
2748
2749 // Return the generated code.
2750 return GetCode(NORMAL, NULL);
2751 }
2752
2753
CompileLoadNonexistent(String * name,JSObject * object,JSObject * last)2754 MaybeObject* LoadStubCompiler::CompileLoadNonexistent(String* name,
2755 JSObject* object,
2756 JSObject* last) {
2757 // ----------- S t a t e -------------
2758 // -- eax : receiver
2759 // -- ecx : name
2760 // -- esp[0] : return address
2761 // -----------------------------------
2762 Label miss;
2763
2764 // Check that the receiver isn't a smi.
2765 __ test(eax, Immediate(kSmiTagMask));
2766 __ j(zero, &miss, not_taken);
2767
2768 ASSERT(last->IsGlobalObject() || last->HasFastProperties());
2769
2770 // Check the maps of the full prototype chain. Also check that
2771 // global property cells up to (but not including) the last object
2772 // in the prototype chain are empty.
2773 CheckPrototypes(object, eax, last, ebx, edx, edi, name, &miss);
2774
2775 // If the last object in the prototype chain is a global object,
2776 // check that the global property cell is empty.
2777 if (last->IsGlobalObject()) {
2778 MaybeObject* cell = GenerateCheckPropertyCell(masm(),
2779 GlobalObject::cast(last),
2780 name,
2781 edx,
2782 &miss);
2783 if (cell->IsFailure()) {
2784 miss.Unuse();
2785 return cell;
2786 }
2787 }
2788
2789 // Return undefined if maps of the full prototype chain are still the
2790 // same and no global property with this name contains a value.
2791 __ mov(eax, isolate()->factory()->undefined_value());
2792 __ ret(0);
2793
2794 __ bind(&miss);
2795 GenerateLoadMiss(masm(), Code::LOAD_IC);
2796
2797 // Return the generated code.
2798 return GetCode(NONEXISTENT, isolate()->heap()->empty_string());
2799 }
2800
2801
CompileLoadField(JSObject * object,JSObject * holder,int index,String * name)2802 MaybeObject* LoadStubCompiler::CompileLoadField(JSObject* object,
2803 JSObject* holder,
2804 int index,
2805 String* name) {
2806 // ----------- S t a t e -------------
2807 // -- eax : receiver
2808 // -- ecx : name
2809 // -- esp[0] : return address
2810 // -----------------------------------
2811 Label miss;
2812
2813 GenerateLoadField(object, holder, eax, ebx, edx, edi, index, name, &miss);
2814 __ bind(&miss);
2815 GenerateLoadMiss(masm(), Code::LOAD_IC);
2816
2817 // Return the generated code.
2818 return GetCode(FIELD, name);
2819 }
2820
2821
CompileLoadCallback(String * name,JSObject * object,JSObject * holder,AccessorInfo * callback)2822 MaybeObject* LoadStubCompiler::CompileLoadCallback(String* name,
2823 JSObject* object,
2824 JSObject* holder,
2825 AccessorInfo* callback) {
2826 // ----------- S t a t e -------------
2827 // -- eax : receiver
2828 // -- ecx : name
2829 // -- esp[0] : return address
2830 // -----------------------------------
2831 Label miss;
2832
2833 MaybeObject* result = GenerateLoadCallback(object, holder, eax, ecx, ebx, edx,
2834 edi, callback, name, &miss);
2835 if (result->IsFailure()) {
2836 miss.Unuse();
2837 return result;
2838 }
2839
2840 __ bind(&miss);
2841 GenerateLoadMiss(masm(), Code::LOAD_IC);
2842
2843 // Return the generated code.
2844 return GetCode(CALLBACKS, name);
2845 }
2846
2847
CompileLoadConstant(JSObject * object,JSObject * holder,Object * value,String * name)2848 MaybeObject* LoadStubCompiler::CompileLoadConstant(JSObject* object,
2849 JSObject* holder,
2850 Object* value,
2851 String* name) {
2852 // ----------- S t a t e -------------
2853 // -- eax : receiver
2854 // -- ecx : name
2855 // -- esp[0] : return address
2856 // -----------------------------------
2857 Label miss;
2858
2859 GenerateLoadConstant(object, holder, eax, ebx, edx, edi, value, name, &miss);
2860 __ bind(&miss);
2861 GenerateLoadMiss(masm(), Code::LOAD_IC);
2862
2863 // Return the generated code.
2864 return GetCode(CONSTANT_FUNCTION, name);
2865 }
2866
2867
CompileLoadInterceptor(JSObject * receiver,JSObject * holder,String * name)2868 MaybeObject* LoadStubCompiler::CompileLoadInterceptor(JSObject* receiver,
2869 JSObject* holder,
2870 String* name) {
2871 // ----------- S t a t e -------------
2872 // -- eax : receiver
2873 // -- ecx : name
2874 // -- esp[0] : return address
2875 // -----------------------------------
2876 Label miss;
2877
2878 LookupResult lookup;
2879 LookupPostInterceptor(holder, name, &lookup);
2880
2881 // TODO(368): Compile in the whole chain: all the interceptors in
2882 // prototypes and ultimate answer.
2883 GenerateLoadInterceptor(receiver,
2884 holder,
2885 &lookup,
2886 eax,
2887 ecx,
2888 edx,
2889 ebx,
2890 edi,
2891 name,
2892 &miss);
2893
2894 __ bind(&miss);
2895 GenerateLoadMiss(masm(), Code::LOAD_IC);
2896
2897 // Return the generated code.
2898 return GetCode(INTERCEPTOR, name);
2899 }
2900
2901
CompileLoadGlobal(JSObject * object,GlobalObject * holder,JSGlobalPropertyCell * cell,String * name,bool is_dont_delete)2902 MaybeObject* LoadStubCompiler::CompileLoadGlobal(JSObject* object,
2903 GlobalObject* holder,
2904 JSGlobalPropertyCell* cell,
2905 String* name,
2906 bool is_dont_delete) {
2907 // ----------- S t a t e -------------
2908 // -- eax : receiver
2909 // -- ecx : name
2910 // -- esp[0] : return address
2911 // -----------------------------------
2912 Label miss;
2913
2914 // If the object is the holder then we know that it's a global
2915 // object which can only happen for contextual loads. In this case,
2916 // the receiver cannot be a smi.
2917 if (object != holder) {
2918 __ test(eax, Immediate(kSmiTagMask));
2919 __ j(zero, &miss, not_taken);
2920 }
2921
2922 // Check that the maps haven't changed.
2923 CheckPrototypes(object, eax, holder, ebx, edx, edi, name, &miss);
2924
2925 // Get the value from the cell.
2926 if (Serializer::enabled()) {
2927 __ mov(ebx, Immediate(Handle<JSGlobalPropertyCell>(cell)));
2928 __ mov(ebx, FieldOperand(ebx, JSGlobalPropertyCell::kValueOffset));
2929 } else {
2930 __ mov(ebx, Operand::Cell(Handle<JSGlobalPropertyCell>(cell)));
2931 }
2932
2933 // Check for deleted property if property can actually be deleted.
2934 if (!is_dont_delete) {
2935 __ cmp(ebx, factory()->the_hole_value());
2936 __ j(equal, &miss, not_taken);
2937 } else if (FLAG_debug_code) {
2938 __ cmp(ebx, factory()->the_hole_value());
2939 __ Check(not_equal, "DontDelete cells can't contain the hole");
2940 }
2941
2942 Counters* counters = isolate()->counters();
2943 __ IncrementCounter(counters->named_load_global_stub(), 1);
2944 __ mov(eax, ebx);
2945 __ ret(0);
2946
2947 __ bind(&miss);
2948 __ IncrementCounter(counters->named_load_global_stub_miss(), 1);
2949 GenerateLoadMiss(masm(), Code::LOAD_IC);
2950
2951 // Return the generated code.
2952 return GetCode(NORMAL, name);
2953 }
2954
2955
CompileLoadField(String * name,JSObject * receiver,JSObject * holder,int index)2956 MaybeObject* KeyedLoadStubCompiler::CompileLoadField(String* name,
2957 JSObject* receiver,
2958 JSObject* holder,
2959 int index) {
2960 // ----------- S t a t e -------------
2961 // -- eax : key
2962 // -- edx : receiver
2963 // -- esp[0] : return address
2964 // -----------------------------------
2965 Label miss;
2966
2967 Counters* counters = isolate()->counters();
2968 __ IncrementCounter(counters->keyed_load_field(), 1);
2969
2970 // Check that the name has not changed.
2971 __ cmp(Operand(eax), Immediate(Handle<String>(name)));
2972 __ j(not_equal, &miss, not_taken);
2973
2974 GenerateLoadField(receiver, holder, edx, ebx, ecx, edi, index, name, &miss);
2975
2976 __ bind(&miss);
2977 __ DecrementCounter(counters->keyed_load_field(), 1);
2978 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2979
2980 // Return the generated code.
2981 return GetCode(FIELD, name);
2982 }
2983
2984
CompileLoadCallback(String * name,JSObject * receiver,JSObject * holder,AccessorInfo * callback)2985 MaybeObject* KeyedLoadStubCompiler::CompileLoadCallback(
2986 String* name,
2987 JSObject* receiver,
2988 JSObject* holder,
2989 AccessorInfo* callback) {
2990 // ----------- S t a t e -------------
2991 // -- eax : key
2992 // -- edx : receiver
2993 // -- esp[0] : return address
2994 // -----------------------------------
2995 Label miss;
2996
2997 Counters* counters = isolate()->counters();
2998 __ IncrementCounter(counters->keyed_load_callback(), 1);
2999
3000 // Check that the name has not changed.
3001 __ cmp(Operand(eax), Immediate(Handle<String>(name)));
3002 __ j(not_equal, &miss, not_taken);
3003
3004 MaybeObject* result = GenerateLoadCallback(receiver, holder, edx, eax, ebx,
3005 ecx, edi, callback, name, &miss);
3006 if (result->IsFailure()) {
3007 miss.Unuse();
3008 return result;
3009 }
3010
3011 __ bind(&miss);
3012
3013 __ DecrementCounter(counters->keyed_load_callback(), 1);
3014 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3015
3016 // Return the generated code.
3017 return GetCode(CALLBACKS, name);
3018 }
3019
3020
CompileLoadConstant(String * name,JSObject * receiver,JSObject * holder,Object * value)3021 MaybeObject* KeyedLoadStubCompiler::CompileLoadConstant(String* name,
3022 JSObject* receiver,
3023 JSObject* holder,
3024 Object* value) {
3025 // ----------- S t a t e -------------
3026 // -- eax : key
3027 // -- edx : receiver
3028 // -- esp[0] : return address
3029 // -----------------------------------
3030 Label miss;
3031
3032 Counters* counters = isolate()->counters();
3033 __ IncrementCounter(counters->keyed_load_constant_function(), 1);
3034
3035 // Check that the name has not changed.
3036 __ cmp(Operand(eax), Immediate(Handle<String>(name)));
3037 __ j(not_equal, &miss, not_taken);
3038
3039 GenerateLoadConstant(receiver, holder, edx, ebx, ecx, edi,
3040 value, name, &miss);
3041 __ bind(&miss);
3042 __ DecrementCounter(counters->keyed_load_constant_function(), 1);
3043 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3044
3045 // Return the generated code.
3046 return GetCode(CONSTANT_FUNCTION, name);
3047 }
3048
3049
CompileLoadInterceptor(JSObject * receiver,JSObject * holder,String * name)3050 MaybeObject* KeyedLoadStubCompiler::CompileLoadInterceptor(JSObject* receiver,
3051 JSObject* holder,
3052 String* name) {
3053 // ----------- S t a t e -------------
3054 // -- eax : key
3055 // -- edx : receiver
3056 // -- esp[0] : return address
3057 // -----------------------------------
3058 Label miss;
3059
3060 Counters* counters = isolate()->counters();
3061 __ IncrementCounter(counters->keyed_load_interceptor(), 1);
3062
3063 // Check that the name has not changed.
3064 __ cmp(Operand(eax), Immediate(Handle<String>(name)));
3065 __ j(not_equal, &miss, not_taken);
3066
3067 LookupResult lookup;
3068 LookupPostInterceptor(holder, name, &lookup);
3069 GenerateLoadInterceptor(receiver,
3070 holder,
3071 &lookup,
3072 edx,
3073 eax,
3074 ecx,
3075 ebx,
3076 edi,
3077 name,
3078 &miss);
3079 __ bind(&miss);
3080 __ DecrementCounter(counters->keyed_load_interceptor(), 1);
3081 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3082
3083 // Return the generated code.
3084 return GetCode(INTERCEPTOR, name);
3085 }
3086
3087
CompileLoadArrayLength(String * name)3088 MaybeObject* KeyedLoadStubCompiler::CompileLoadArrayLength(String* name) {
3089 // ----------- S t a t e -------------
3090 // -- eax : key
3091 // -- edx : receiver
3092 // -- esp[0] : return address
3093 // -----------------------------------
3094 Label miss;
3095
3096 Counters* counters = isolate()->counters();
3097 __ IncrementCounter(counters->keyed_load_array_length(), 1);
3098
3099 // Check that the name has not changed.
3100 __ cmp(Operand(eax), Immediate(Handle<String>(name)));
3101 __ j(not_equal, &miss, not_taken);
3102
3103 GenerateLoadArrayLength(masm(), edx, ecx, &miss);
3104 __ bind(&miss);
3105 __ DecrementCounter(counters->keyed_load_array_length(), 1);
3106 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3107
3108 // Return the generated code.
3109 return GetCode(CALLBACKS, name);
3110 }
3111
3112
CompileLoadStringLength(String * name)3113 MaybeObject* KeyedLoadStubCompiler::CompileLoadStringLength(String* name) {
3114 // ----------- S t a t e -------------
3115 // -- eax : key
3116 // -- edx : receiver
3117 // -- esp[0] : return address
3118 // -----------------------------------
3119 Label miss;
3120
3121 Counters* counters = isolate()->counters();
3122 __ IncrementCounter(counters->keyed_load_string_length(), 1);
3123
3124 // Check that the name has not changed.
3125 __ cmp(Operand(eax), Immediate(Handle<String>(name)));
3126 __ j(not_equal, &miss, not_taken);
3127
3128 GenerateLoadStringLength(masm(), edx, ecx, ebx, &miss, true);
3129 __ bind(&miss);
3130 __ DecrementCounter(counters->keyed_load_string_length(), 1);
3131 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3132
3133 // Return the generated code.
3134 return GetCode(CALLBACKS, name);
3135 }
3136
3137
CompileLoadFunctionPrototype(String * name)3138 MaybeObject* KeyedLoadStubCompiler::CompileLoadFunctionPrototype(String* name) {
3139 // ----------- S t a t e -------------
3140 // -- eax : key
3141 // -- edx : receiver
3142 // -- esp[0] : return address
3143 // -----------------------------------
3144 Label miss;
3145
3146 Counters* counters = isolate()->counters();
3147 __ IncrementCounter(counters->keyed_load_function_prototype(), 1);
3148
3149 // Check that the name has not changed.
3150 __ cmp(Operand(eax), Immediate(Handle<String>(name)));
3151 __ j(not_equal, &miss, not_taken);
3152
3153 GenerateLoadFunctionPrototype(masm(), edx, ecx, ebx, &miss);
3154 __ bind(&miss);
3155 __ DecrementCounter(counters->keyed_load_function_prototype(), 1);
3156 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3157
3158 // Return the generated code.
3159 return GetCode(CALLBACKS, name);
3160 }
3161
3162
CompileLoadSpecialized(JSObject * receiver)3163 MaybeObject* KeyedLoadStubCompiler::CompileLoadSpecialized(JSObject* receiver) {
3164 // ----------- S t a t e -------------
3165 // -- eax : key
3166 // -- edx : receiver
3167 // -- esp[0] : return address
3168 // -----------------------------------
3169 Label miss;
3170
3171 // Check that the receiver isn't a smi.
3172 __ test(edx, Immediate(kSmiTagMask));
3173 __ j(zero, &miss, not_taken);
3174
3175 // Check that the map matches.
3176 __ cmp(FieldOperand(edx, HeapObject::kMapOffset),
3177 Immediate(Handle<Map>(receiver->map())));
3178 __ j(not_equal, &miss, not_taken);
3179
3180 // Check that the key is a smi.
3181 __ test(eax, Immediate(kSmiTagMask));
3182 __ j(not_zero, &miss, not_taken);
3183
3184 // Get the elements array.
3185 __ mov(ecx, FieldOperand(edx, JSObject::kElementsOffset));
3186 __ AssertFastElements(ecx);
3187
3188 // Check that the key is within bounds.
3189 __ cmp(eax, FieldOperand(ecx, FixedArray::kLengthOffset));
3190 __ j(above_equal, &miss, not_taken);
3191
3192 // Load the result and make sure it's not the hole.
3193 __ mov(ebx, Operand(ecx, eax, times_2,
3194 FixedArray::kHeaderSize - kHeapObjectTag));
3195 __ cmp(ebx, factory()->the_hole_value());
3196 __ j(equal, &miss, not_taken);
3197 __ mov(eax, ebx);
3198 __ ret(0);
3199
3200 __ bind(&miss);
3201 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3202
3203 // Return the generated code.
3204 return GetCode(NORMAL, NULL);
3205 }
3206
3207
3208 // Specialized stub for constructing objects from functions which only have only
3209 // simple assignments of the form this.x = ...; in their body.
CompileConstructStub(JSFunction * function)3210 MaybeObject* ConstructStubCompiler::CompileConstructStub(JSFunction* function) {
3211 // ----------- S t a t e -------------
3212 // -- eax : argc
3213 // -- edi : constructor
3214 // -- esp[0] : return address
3215 // -- esp[4] : last argument
3216 // -----------------------------------
3217 Label generic_stub_call;
3218 #ifdef ENABLE_DEBUGGER_SUPPORT
3219 // Check to see whether there are any break points in the function code. If
3220 // there are jump to the generic constructor stub which calls the actual
3221 // code for the function thereby hitting the break points.
3222 __ mov(ebx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
3223 __ mov(ebx, FieldOperand(ebx, SharedFunctionInfo::kDebugInfoOffset));
3224 __ cmp(ebx, factory()->undefined_value());
3225 __ j(not_equal, &generic_stub_call, not_taken);
3226 #endif
3227
3228 // Load the initial map and verify that it is in fact a map.
3229 __ mov(ebx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
3230 // Will both indicate a NULL and a Smi.
3231 __ test(ebx, Immediate(kSmiTagMask));
3232 __ j(zero, &generic_stub_call);
3233 __ CmpObjectType(ebx, MAP_TYPE, ecx);
3234 __ j(not_equal, &generic_stub_call);
3235
3236 #ifdef DEBUG
3237 // Cannot construct functions this way.
3238 // edi: constructor
3239 // ebx: initial map
3240 __ CmpInstanceType(ebx, JS_FUNCTION_TYPE);
3241 __ Assert(not_equal, "Function constructed by construct stub.");
3242 #endif
3243
3244 // Now allocate the JSObject on the heap by moving the new space allocation
3245 // top forward.
3246 // edi: constructor
3247 // ebx: initial map
3248 __ movzx_b(ecx, FieldOperand(ebx, Map::kInstanceSizeOffset));
3249 __ shl(ecx, kPointerSizeLog2);
3250 __ AllocateInNewSpace(ecx,
3251 edx,
3252 ecx,
3253 no_reg,
3254 &generic_stub_call,
3255 NO_ALLOCATION_FLAGS);
3256
3257 // Allocated the JSObject, now initialize the fields and add the heap tag.
3258 // ebx: initial map
3259 // edx: JSObject (untagged)
3260 __ mov(Operand(edx, JSObject::kMapOffset), ebx);
3261 __ mov(ebx, factory()->empty_fixed_array());
3262 __ mov(Operand(edx, JSObject::kPropertiesOffset), ebx);
3263 __ mov(Operand(edx, JSObject::kElementsOffset), ebx);
3264
3265 // Push the allocated object to the stack. This is the object that will be
3266 // returned (after it is tagged).
3267 __ push(edx);
3268
3269 // eax: argc
3270 // edx: JSObject (untagged)
3271 // Load the address of the first in-object property into edx.
3272 __ lea(edx, Operand(edx, JSObject::kHeaderSize));
3273 // Calculate the location of the first argument. The stack contains the
3274 // allocated object and the return address on top of the argc arguments.
3275 __ lea(ecx, Operand(esp, eax, times_4, 1 * kPointerSize));
3276
3277 // Use edi for holding undefined which is used in several places below.
3278 __ mov(edi, factory()->undefined_value());
3279
3280 // eax: argc
3281 // ecx: first argument
3282 // edx: first in-object property of the JSObject
3283 // edi: undefined
3284 // Fill the initialized properties with a constant value or a passed argument
3285 // depending on the this.x = ...; assignment in the function.
3286 SharedFunctionInfo* shared = function->shared();
3287 for (int i = 0; i < shared->this_property_assignments_count(); i++) {
3288 if (shared->IsThisPropertyAssignmentArgument(i)) {
3289 // Check if the argument assigned to the property is actually passed.
3290 // If argument is not passed the property is set to undefined,
3291 // otherwise find it on the stack.
3292 int arg_number = shared->GetThisPropertyAssignmentArgument(i);
3293 __ mov(ebx, edi);
3294 __ cmp(eax, arg_number);
3295 if (CpuFeatures::IsSupported(CMOV)) {
3296 CpuFeatures::Scope use_cmov(CMOV);
3297 __ cmov(above, ebx, Operand(ecx, arg_number * -kPointerSize));
3298 } else {
3299 Label not_passed;
3300 __ j(below_equal, ¬_passed);
3301 __ mov(ebx, Operand(ecx, arg_number * -kPointerSize));
3302 __ bind(¬_passed);
3303 }
3304 // Store value in the property.
3305 __ mov(Operand(edx, i * kPointerSize), ebx);
3306 } else {
3307 // Set the property to the constant value.
3308 Handle<Object> constant(shared->GetThisPropertyAssignmentConstant(i));
3309 __ mov(Operand(edx, i * kPointerSize), Immediate(constant));
3310 }
3311 }
3312
3313 // Fill the unused in-object property fields with undefined.
3314 ASSERT(function->has_initial_map());
3315 for (int i = shared->this_property_assignments_count();
3316 i < function->initial_map()->inobject_properties();
3317 i++) {
3318 __ mov(Operand(edx, i * kPointerSize), edi);
3319 }
3320
3321 // Move argc to ebx and retrieve and tag the JSObject to return.
3322 __ mov(ebx, eax);
3323 __ pop(eax);
3324 __ or_(Operand(eax), Immediate(kHeapObjectTag));
3325
3326 // Remove caller arguments and receiver from the stack and return.
3327 __ pop(ecx);
3328 __ lea(esp, Operand(esp, ebx, times_pointer_size, 1 * kPointerSize));
3329 __ push(ecx);
3330 Counters* counters = isolate()->counters();
3331 __ IncrementCounter(counters->constructed_objects(), 1);
3332 __ IncrementCounter(counters->constructed_objects_stub(), 1);
3333 __ ret(0);
3334
3335 // Jump to the generic stub in case the specialized code cannot handle the
3336 // construction.
3337 __ bind(&generic_stub_call);
3338 Handle<Code> generic_construct_stub =
3339 isolate()->builtins()->JSConstructStubGeneric();
3340 __ jmp(generic_construct_stub, RelocInfo::CODE_TARGET);
3341
3342 // Return the generated code.
3343 return GetCode();
3344 }
3345
3346
CompileKeyedLoadStub(JSObject * receiver,ExternalArrayType array_type,Code::Flags flags)3347 MaybeObject* ExternalArrayStubCompiler::CompileKeyedLoadStub(
3348 JSObject*receiver, ExternalArrayType array_type, Code::Flags flags) {
3349 // ----------- S t a t e -------------
3350 // -- eax : key
3351 // -- edx : receiver
3352 // -- esp[0] : return address
3353 // -----------------------------------
3354 Label slow, failed_allocation;
3355
3356 // Check that the object isn't a smi.
3357 __ test(edx, Immediate(kSmiTagMask));
3358 __ j(zero, &slow, not_taken);
3359
3360 // Check that the key is a smi.
3361 __ test(eax, Immediate(kSmiTagMask));
3362 __ j(not_zero, &slow, not_taken);
3363
3364 // Check that the map matches.
3365 __ CheckMap(edx, Handle<Map>(receiver->map()), &slow, false);
3366 __ mov(ebx, FieldOperand(edx, JSObject::kElementsOffset));
3367
3368 // eax: key, known to be a smi.
3369 // edx: receiver, known to be a JSObject.
3370 // ebx: elements object, known to be an external array.
3371 // Check that the index is in range.
3372 __ mov(ecx, eax);
3373 __ SmiUntag(ecx); // Untag the index.
3374 __ cmp(ecx, FieldOperand(ebx, ExternalArray::kLengthOffset));
3375 // Unsigned comparison catches both negative and too-large values.
3376 __ j(above_equal, &slow);
3377 __ mov(ebx, FieldOperand(ebx, ExternalArray::kExternalPointerOffset));
3378 // ebx: base pointer of external storage
3379 switch (array_type) {
3380 case kExternalByteArray:
3381 __ movsx_b(eax, Operand(ebx, ecx, times_1, 0));
3382 break;
3383 case kExternalUnsignedByteArray:
3384 case kExternalPixelArray:
3385 __ movzx_b(eax, Operand(ebx, ecx, times_1, 0));
3386 break;
3387 case kExternalShortArray:
3388 __ movsx_w(eax, Operand(ebx, ecx, times_2, 0));
3389 break;
3390 case kExternalUnsignedShortArray:
3391 __ movzx_w(eax, Operand(ebx, ecx, times_2, 0));
3392 break;
3393 case kExternalIntArray:
3394 case kExternalUnsignedIntArray:
3395 __ mov(ecx, Operand(ebx, ecx, times_4, 0));
3396 break;
3397 case kExternalFloatArray:
3398 __ fld_s(Operand(ebx, ecx, times_4, 0));
3399 break;
3400 default:
3401 UNREACHABLE();
3402 break;
3403 }
3404
3405 // For integer array types:
3406 // ecx: value
3407 // For floating-point array type:
3408 // FP(0): value
3409
3410 if (array_type == kExternalIntArray ||
3411 array_type == kExternalUnsignedIntArray) {
3412 // For the Int and UnsignedInt array types, we need to see whether
3413 // the value can be represented in a Smi. If not, we need to convert
3414 // it to a HeapNumber.
3415 Label box_int;
3416 if (array_type == kExternalIntArray) {
3417 __ cmp(ecx, 0xC0000000);
3418 __ j(sign, &box_int);
3419 } else {
3420 ASSERT_EQ(array_type, kExternalUnsignedIntArray);
3421 // The test is different for unsigned int values. Since we need
3422 // the value to be in the range of a positive smi, we can't
3423 // handle either of the top two bits being set in the value.
3424 __ test(ecx, Immediate(0xC0000000));
3425 __ j(not_zero, &box_int);
3426 }
3427
3428 __ mov(eax, ecx);
3429 __ SmiTag(eax);
3430 __ ret(0);
3431
3432 __ bind(&box_int);
3433
3434 // Allocate a HeapNumber for the int and perform int-to-double
3435 // conversion.
3436 if (array_type == kExternalIntArray) {
3437 __ push(ecx);
3438 __ fild_s(Operand(esp, 0));
3439 __ pop(ecx);
3440 } else {
3441 ASSERT(array_type == kExternalUnsignedIntArray);
3442 // Need to zero-extend the value.
3443 // There's no fild variant for unsigned values, so zero-extend
3444 // to a 64-bit int manually.
3445 __ push(Immediate(0));
3446 __ push(ecx);
3447 __ fild_d(Operand(esp, 0));
3448 __ pop(ecx);
3449 __ pop(ecx);
3450 }
3451 // FP(0): value
3452 __ AllocateHeapNumber(ecx, ebx, edi, &failed_allocation);
3453 // Set the value.
3454 __ mov(eax, ecx);
3455 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
3456 __ ret(0);
3457 } else if (array_type == kExternalFloatArray) {
3458 // For the floating-point array type, we need to always allocate a
3459 // HeapNumber.
3460 __ AllocateHeapNumber(ecx, ebx, edi, &failed_allocation);
3461 // Set the value.
3462 __ mov(eax, ecx);
3463 __ fstp_d(FieldOperand(eax, HeapNumber::kValueOffset));
3464 __ ret(0);
3465 } else {
3466 __ SmiTag(eax);
3467 __ ret(0);
3468 }
3469
3470 // If we fail allocation of the HeapNumber, we still have a value on
3471 // top of the FPU stack. Remove it.
3472 __ bind(&failed_allocation);
3473 __ ffree();
3474 __ fincstp();
3475 // Fall through to slow case.
3476
3477 // Slow case: Jump to runtime.
3478 __ bind(&slow);
3479 Counters* counters = isolate()->counters();
3480 __ IncrementCounter(counters->keyed_load_external_array_slow(), 1);
3481 // ----------- S t a t e -------------
3482 // -- eax : key
3483 // -- edx : receiver
3484 // -- esp[0] : return address
3485 // -----------------------------------
3486
3487 __ pop(ebx);
3488 __ push(edx); // receiver
3489 __ push(eax); // name
3490 __ push(ebx); // return address
3491
3492 // Perform tail call to the entry.
3493 __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1);
3494
3495 // Return the generated code.
3496 return GetCode(flags);
3497 }
3498
3499
CompileKeyedStoreStub(JSObject * receiver,ExternalArrayType array_type,Code::Flags flags)3500 MaybeObject* ExternalArrayStubCompiler::CompileKeyedStoreStub(
3501 JSObject* receiver, ExternalArrayType array_type, Code::Flags flags) {
3502 // ----------- S t a t e -------------
3503 // -- eax : value
3504 // -- ecx : key
3505 // -- edx : receiver
3506 // -- esp[0] : return address
3507 // -----------------------------------
3508 Label slow, check_heap_number;
3509
3510 // Check that the object isn't a smi.
3511 __ test(edx, Immediate(kSmiTagMask));
3512 __ j(zero, &slow);
3513
3514 // Check that the map matches.
3515 __ CheckMap(edx, Handle<Map>(receiver->map()), &slow, false);
3516
3517 // Check that the key is a smi.
3518 __ test(ecx, Immediate(kSmiTagMask));
3519 __ j(not_zero, &slow);
3520
3521 // Check that the index is in range.
3522 __ mov(edi, FieldOperand(edx, JSObject::kElementsOffset));
3523 __ mov(ebx, ecx);
3524 __ SmiUntag(ebx);
3525 __ cmp(ebx, FieldOperand(edi, ExternalArray::kLengthOffset));
3526 // Unsigned comparison catches both negative and too-large values.
3527 __ j(above_equal, &slow);
3528
3529 // Handle both smis and HeapNumbers in the fast path. Go to the
3530 // runtime for all other kinds of values.
3531 // eax: value
3532 // edx: receiver
3533 // ecx: key
3534 // edi: elements array
3535 // ebx: untagged index
3536 __ test(eax, Immediate(kSmiTagMask));
3537 if (array_type == kExternalPixelArray)
3538 __ j(not_equal, &slow);
3539 else
3540 __ j(not_equal, &check_heap_number);
3541
3542 // smi case
3543 __ mov(ecx, eax); // Preserve the value in eax. Key is no longer needed.
3544 __ SmiUntag(ecx);
3545 __ mov(edi, FieldOperand(edi, ExternalArray::kExternalPointerOffset));
3546 // ecx: base pointer of external storage
3547 switch (array_type) {
3548 case kExternalPixelArray:
3549 { // Clamp the value to [0..255].
3550 NearLabel done;
3551 __ test(ecx, Immediate(0xFFFFFF00));
3552 __ j(zero, &done);
3553 __ setcc(negative, ecx); // 1 if negative, 0 if positive.
3554 __ dec_b(ecx); // 0 if negative, 255 if positive.
3555 __ bind(&done);
3556 }
3557 __ mov_b(Operand(edi, ebx, times_1, 0), ecx);
3558 break;
3559 case kExternalByteArray:
3560 case kExternalUnsignedByteArray:
3561 __ mov_b(Operand(edi, ebx, times_1, 0), ecx);
3562 break;
3563 case kExternalShortArray:
3564 case kExternalUnsignedShortArray:
3565 __ mov_w(Operand(edi, ebx, times_2, 0), ecx);
3566 break;
3567 case kExternalIntArray:
3568 case kExternalUnsignedIntArray:
3569 __ mov(Operand(edi, ebx, times_4, 0), ecx);
3570 break;
3571 case kExternalFloatArray:
3572 // Need to perform int-to-float conversion.
3573 __ push(ecx);
3574 __ fild_s(Operand(esp, 0));
3575 __ pop(ecx);
3576 __ fstp_s(Operand(edi, ebx, times_4, 0));
3577 break;
3578 default:
3579 UNREACHABLE();
3580 break;
3581 }
3582 __ ret(0); // Return the original value.
3583
3584 // TODO(danno): handle heap number -> pixel array conversion
3585 if (array_type != kExternalPixelArray) {
3586 __ bind(&check_heap_number);
3587 // eax: value
3588 // edx: receiver
3589 // ecx: key
3590 // edi: elements array
3591 // ebx: untagged index
3592 __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
3593 Immediate(factory()->heap_number_map()));
3594 __ j(not_equal, &slow);
3595
3596 // The WebGL specification leaves the behavior of storing NaN and
3597 // +/-Infinity into integer arrays basically undefined. For more
3598 // reproducible behavior, convert these to zero.
3599 __ mov(edi, FieldOperand(edi, ExternalArray::kExternalPointerOffset));
3600 // ebx: untagged index
3601 // edi: base pointer of external storage
3602 if (array_type == kExternalFloatArray) {
3603 __ fld_d(FieldOperand(eax, HeapNumber::kValueOffset));
3604 __ fstp_s(Operand(edi, ebx, times_4, 0));
3605 __ ret(0);
3606 } else {
3607 // Perform float-to-int conversion with truncation (round-to-zero)
3608 // behavior.
3609
3610 // For the moment we make the slow call to the runtime on
3611 // processors that don't support SSE2. The code in IntegerConvert
3612 // (code-stubs-ia32.cc) is roughly what is needed here though the
3613 // conversion failure case does not need to be handled.
3614 if (CpuFeatures::IsSupported(SSE2)) {
3615 if (array_type != kExternalIntArray &&
3616 array_type != kExternalUnsignedIntArray) {
3617 ASSERT(CpuFeatures::IsSupported(SSE2));
3618 CpuFeatures::Scope scope(SSE2);
3619 __ cvttsd2si(ecx, FieldOperand(eax, HeapNumber::kValueOffset));
3620 // ecx: untagged integer value
3621 switch (array_type) {
3622 case kExternalPixelArray:
3623 { // Clamp the value to [0..255].
3624 NearLabel done;
3625 __ test(ecx, Immediate(0xFFFFFF00));
3626 __ j(zero, &done);
3627 __ setcc(negative, ecx); // 1 if negative, 0 if positive.
3628 __ dec_b(ecx); // 0 if negative, 255 if positive.
3629 __ bind(&done);
3630 }
3631 __ mov_b(Operand(edi, ebx, times_1, 0), ecx);
3632 break;
3633 case kExternalByteArray:
3634 case kExternalUnsignedByteArray:
3635 __ mov_b(Operand(edi, ebx, times_1, 0), ecx);
3636 break;
3637 case kExternalShortArray:
3638 case kExternalUnsignedShortArray:
3639 __ mov_w(Operand(edi, ebx, times_2, 0), ecx);
3640 break;
3641 default:
3642 UNREACHABLE();
3643 break;
3644 }
3645 } else {
3646 if (CpuFeatures::IsSupported(SSE3)) {
3647 CpuFeatures::Scope scope(SSE3);
3648 // fisttp stores values as signed integers. To represent the
3649 // entire range of int and unsigned int arrays, store as a
3650 // 64-bit int and discard the high 32 bits.
3651 // If the value is NaN or +/-infinity, the result is 0x80000000,
3652 // which is automatically zero when taken mod 2^n, n < 32.
3653 __ fld_d(FieldOperand(eax, HeapNumber::kValueOffset));
3654 __ sub(Operand(esp), Immediate(2 * kPointerSize));
3655 __ fisttp_d(Operand(esp, 0));
3656 __ pop(ecx);
3657 __ add(Operand(esp), Immediate(kPointerSize));
3658 } else {
3659 ASSERT(CpuFeatures::IsSupported(SSE2));
3660 CpuFeatures::Scope scope(SSE2);
3661 // We can easily implement the correct rounding behavior for the
3662 // range [0, 2^31-1]. For the time being, to keep this code simple,
3663 // make the slow runtime call for values outside this range.
3664 // Note: we could do better for signed int arrays.
3665 __ movd(xmm0, FieldOperand(eax, HeapNumber::kValueOffset));
3666 // We will need the key if we have to make the slow runtime call.
3667 __ push(ecx);
3668 __ LoadPowerOf2(xmm1, ecx, 31);
3669 __ pop(ecx);
3670 __ ucomisd(xmm1, xmm0);
3671 __ j(above_equal, &slow);
3672 __ cvttsd2si(ecx, Operand(xmm0));
3673 }
3674 // ecx: untagged integer value
3675 __ mov(Operand(edi, ebx, times_4, 0), ecx);
3676 }
3677 __ ret(0); // Return original value.
3678 }
3679 }
3680 }
3681
3682 // Slow case: call runtime.
3683 __ bind(&slow);
3684 // ----------- S t a t e -------------
3685 // -- eax : value
3686 // -- ecx : key
3687 // -- edx : receiver
3688 // -- esp[0] : return address
3689 // -----------------------------------
3690
3691 __ pop(ebx);
3692 __ push(edx);
3693 __ push(ecx);
3694 __ push(eax);
3695 __ push(Immediate(Smi::FromInt(NONE))); // PropertyAttributes
3696 __ push(Immediate(Smi::FromInt(
3697 Code::ExtractExtraICStateFromFlags(flags) & kStrictMode)));
3698 __ push(ebx); // return address
3699
3700 // Do tail-call to runtime routine.
3701 __ TailCallRuntime(Runtime::kSetProperty, 5, 1);
3702
3703 return GetCode(flags);
3704 }
3705
3706
3707 #undef __
3708
3709 } } // namespace v8::internal
3710
3711 #endif // V8_TARGET_ARCH_IA32
3712