1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28 #include "v8.h"
29
30 #if defined(V8_TARGET_ARCH_X64)
31
32 #include "ic-inl.h"
33 #include "codegen.h"
34 #include "stub-cache.h"
35
36 namespace v8 {
37 namespace internal {
38
39 #define __ ACCESS_MASM(masm)
40
41
ProbeTable(Isolate * isolate,MacroAssembler * masm,Code::Flags flags,StubCache::Table table,Register name,Register offset)42 static void ProbeTable(Isolate* isolate,
43 MacroAssembler* masm,
44 Code::Flags flags,
45 StubCache::Table table,
46 Register name,
47 Register offset) {
48 ASSERT_EQ(8, kPointerSize);
49 ASSERT_EQ(16, sizeof(StubCache::Entry));
50 // The offset register holds the entry offset times four (due to masking
51 // and shifting optimizations).
52 ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
53 Label miss;
54
55 __ LoadAddress(kScratchRegister, key_offset);
56 // Check that the key in the entry matches the name.
57 // Multiply entry offset by 16 to get the entry address. Since the
58 // offset register already holds the entry offset times four, multiply
59 // by a further four.
60 __ cmpl(name, Operand(kScratchRegister, offset, times_4, 0));
61 __ j(not_equal, &miss);
62 // Get the code entry from the cache.
63 // Use key_offset + kPointerSize, rather than loading value_offset.
64 __ movq(kScratchRegister,
65 Operand(kScratchRegister, offset, times_4, kPointerSize));
66 // Check that the flags match what we're looking for.
67 __ movl(offset, FieldOperand(kScratchRegister, Code::kFlagsOffset));
68 __ and_(offset, Immediate(~Code::kFlagsNotUsedInLookup));
69 __ cmpl(offset, Immediate(flags));
70 __ j(not_equal, &miss);
71
72 // Jump to the first instruction in the code stub.
73 __ addq(kScratchRegister, Immediate(Code::kHeaderSize - kHeapObjectTag));
74 __ jmp(kScratchRegister);
75
76 __ bind(&miss);
77 }
78
79
80 // Helper function used to check that the dictionary doesn't contain
81 // the property. This function may return false negatives, so miss_label
82 // must always call a backup property check that is complete.
83 // This function is safe to call if the receiver has fast properties.
84 // Name must be a symbol and receiver must be a heap object.
GenerateDictionaryNegativeLookup(MacroAssembler * masm,Label * miss_label,Register receiver,String * name,Register r0,Register r1)85 static void GenerateDictionaryNegativeLookup(MacroAssembler* masm,
86 Label* miss_label,
87 Register receiver,
88 String* name,
89 Register r0,
90 Register r1) {
91 ASSERT(name->IsSymbol());
92 Counters* counters = masm->isolate()->counters();
93 __ IncrementCounter(counters->negative_lookups(), 1);
94 __ IncrementCounter(counters->negative_lookups_miss(), 1);
95
96 Label done;
97 __ movq(r0, FieldOperand(receiver, HeapObject::kMapOffset));
98
99 const int kInterceptorOrAccessCheckNeededMask =
100 (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded);
101
102 // Bail out if the receiver has a named interceptor or requires access checks.
103 __ testb(FieldOperand(r0, Map::kBitFieldOffset),
104 Immediate(kInterceptorOrAccessCheckNeededMask));
105 __ j(not_zero, miss_label);
106
107 // Check that receiver is a JSObject.
108 __ CmpInstanceType(r0, FIRST_JS_OBJECT_TYPE);
109 __ j(below, miss_label);
110
111 // Load properties array.
112 Register properties = r0;
113 __ movq(properties, FieldOperand(receiver, JSObject::kPropertiesOffset));
114
115 // Check that the properties array is a dictionary.
116 __ CompareRoot(FieldOperand(properties, HeapObject::kMapOffset),
117 Heap::kHashTableMapRootIndex);
118 __ j(not_equal, miss_label);
119
120 // Compute the capacity mask.
121 const int kCapacityOffset =
122 StringDictionary::kHeaderSize +
123 StringDictionary::kCapacityIndex * kPointerSize;
124
125 // Generate an unrolled loop that performs a few probes before
126 // giving up.
127 static const int kProbes = 4;
128 const int kElementsStartOffset =
129 StringDictionary::kHeaderSize +
130 StringDictionary::kElementsStartIndex * kPointerSize;
131
132 // If names of slots in range from 1 to kProbes - 1 for the hash value are
133 // not equal to the name and kProbes-th slot is not used (its name is the
134 // undefined value), it guarantees the hash table doesn't contain the
135 // property. It's true even if some slots represent deleted properties
136 // (their names are the null value).
137 for (int i = 0; i < kProbes; i++) {
138 // r0 points to properties hash.
139 // Compute the masked index: (hash + i + i * i) & mask.
140 Register index = r1;
141 // Capacity is smi 2^n.
142 __ SmiToInteger32(index, FieldOperand(properties, kCapacityOffset));
143 __ decl(index);
144 __ and_(index,
145 Immediate(name->Hash() + StringDictionary::GetProbeOffset(i)));
146
147 // Scale the index by multiplying by the entry size.
148 ASSERT(StringDictionary::kEntrySize == 3);
149 __ lea(index, Operand(index, index, times_2, 0)); // index *= 3.
150
151 Register entity_name = r1;
152 // Having undefined at this place means the name is not contained.
153 ASSERT_EQ(kSmiTagSize, 1);
154 __ movq(entity_name, Operand(properties, index, times_pointer_size,
155 kElementsStartOffset - kHeapObjectTag));
156 __ Cmp(entity_name, masm->isolate()->factory()->undefined_value());
157 // __ jmp(miss_label);
158 if (i != kProbes - 1) {
159 __ j(equal, &done);
160
161 // Stop if found the property.
162 __ Cmp(entity_name, Handle<String>(name));
163 __ j(equal, miss_label);
164
165 // Check if the entry name is not a symbol.
166 __ movq(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset));
167 __ testb(FieldOperand(entity_name, Map::kInstanceTypeOffset),
168 Immediate(kIsSymbolMask));
169 __ j(zero, miss_label);
170 } else {
171 // Give up probing if still not found the undefined value.
172 __ j(not_equal, miss_label);
173 }
174 }
175
176 __ bind(&done);
177 __ DecrementCounter(counters->negative_lookups_miss(), 1);
178 }
179
180
GenerateProbe(MacroAssembler * masm,Code::Flags flags,Register receiver,Register name,Register scratch,Register extra,Register extra2)181 void StubCache::GenerateProbe(MacroAssembler* masm,
182 Code::Flags flags,
183 Register receiver,
184 Register name,
185 Register scratch,
186 Register extra,
187 Register extra2) {
188 Isolate* isolate = masm->isolate();
189 Label miss;
190 USE(extra); // The register extra is not used on the X64 platform.
191 USE(extra2); // The register extra2 is not used on the X64 platform.
192 // Make sure that code is valid. The shifting code relies on the
193 // entry size being 16.
194 ASSERT(sizeof(Entry) == 16);
195
196 // Make sure the flags do not name a specific type.
197 ASSERT(Code::ExtractTypeFromFlags(flags) == 0);
198
199 // Make sure that there are no register conflicts.
200 ASSERT(!scratch.is(receiver));
201 ASSERT(!scratch.is(name));
202
203 // Check scratch register is valid, extra and extra2 are unused.
204 ASSERT(!scratch.is(no_reg));
205 ASSERT(extra2.is(no_reg));
206
207 // Check that the receiver isn't a smi.
208 __ JumpIfSmi(receiver, &miss);
209
210 // Get the map of the receiver and compute the hash.
211 __ movl(scratch, FieldOperand(name, String::kHashFieldOffset));
212 // Use only the low 32 bits of the map pointer.
213 __ addl(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
214 __ xor_(scratch, Immediate(flags));
215 __ and_(scratch, Immediate((kPrimaryTableSize - 1) << kHeapObjectTagSize));
216
217 // Probe the primary table.
218 ProbeTable(isolate, masm, flags, kPrimary, name, scratch);
219
220 // Primary miss: Compute hash for secondary probe.
221 __ movl(scratch, FieldOperand(name, String::kHashFieldOffset));
222 __ addl(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
223 __ xor_(scratch, Immediate(flags));
224 __ and_(scratch, Immediate((kPrimaryTableSize - 1) << kHeapObjectTagSize));
225 __ subl(scratch, name);
226 __ addl(scratch, Immediate(flags));
227 __ and_(scratch, Immediate((kSecondaryTableSize - 1) << kHeapObjectTagSize));
228
229 // Probe the secondary table.
230 ProbeTable(isolate, masm, flags, kSecondary, name, scratch);
231
232 // Cache miss: Fall-through and let caller handle the miss by
233 // entering the runtime system.
234 __ bind(&miss);
235 }
236
237
GenerateLoadGlobalFunctionPrototype(MacroAssembler * masm,int index,Register prototype)238 void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
239 int index,
240 Register prototype) {
241 // Load the global or builtins object from the current context.
242 __ movq(prototype,
243 Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)));
244 // Load the global context from the global or builtins object.
245 __ movq(prototype,
246 FieldOperand(prototype, GlobalObject::kGlobalContextOffset));
247 // Load the function from the global context.
248 __ movq(prototype, Operand(prototype, Context::SlotOffset(index)));
249 // Load the initial map. The global functions all have initial maps.
250 __ movq(prototype,
251 FieldOperand(prototype, JSFunction::kPrototypeOrInitialMapOffset));
252 // Load the prototype from the initial map.
253 __ movq(prototype, FieldOperand(prototype, Map::kPrototypeOffset));
254 }
255
256
GenerateDirectLoadGlobalFunctionPrototype(MacroAssembler * masm,int index,Register prototype,Label * miss)257 void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
258 MacroAssembler* masm, int index, Register prototype, Label* miss) {
259 Isolate* isolate = masm->isolate();
260 // Check we're still in the same context.
261 __ Move(prototype, isolate->global());
262 __ cmpq(Operand(rsi, Context::SlotOffset(Context::GLOBAL_INDEX)),
263 prototype);
264 __ j(not_equal, miss);
265 // Get the global function with the given index.
266 JSFunction* function =
267 JSFunction::cast(isolate->global_context()->get(index));
268 // Load its initial map. The global functions all have initial maps.
269 __ Move(prototype, Handle<Map>(function->initial_map()));
270 // Load the prototype from the initial map.
271 __ movq(prototype, FieldOperand(prototype, Map::kPrototypeOffset));
272 }
273
274
GenerateLoadArrayLength(MacroAssembler * masm,Register receiver,Register scratch,Label * miss_label)275 void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm,
276 Register receiver,
277 Register scratch,
278 Label* miss_label) {
279 // Check that the receiver isn't a smi.
280 __ JumpIfSmi(receiver, miss_label);
281
282 // Check that the object is a JS array.
283 __ CmpObjectType(receiver, JS_ARRAY_TYPE, scratch);
284 __ j(not_equal, miss_label);
285
286 // Load length directly from the JS array.
287 __ movq(rax, FieldOperand(receiver, JSArray::kLengthOffset));
288 __ ret(0);
289 }
290
291
292 // Generate code to check if an object is a string. If the object is
293 // a string, the map's instance type is left in the scratch register.
GenerateStringCheck(MacroAssembler * masm,Register receiver,Register scratch,Label * smi,Label * non_string_object)294 static void GenerateStringCheck(MacroAssembler* masm,
295 Register receiver,
296 Register scratch,
297 Label* smi,
298 Label* non_string_object) {
299 // Check that the object isn't a smi.
300 __ JumpIfSmi(receiver, smi);
301
302 // Check that the object is a string.
303 __ movq(scratch, FieldOperand(receiver, HeapObject::kMapOffset));
304 __ movzxbq(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
305 ASSERT(kNotStringTag != 0);
306 __ testl(scratch, Immediate(kNotStringTag));
307 __ j(not_zero, non_string_object);
308 }
309
310
GenerateLoadStringLength(MacroAssembler * masm,Register receiver,Register scratch1,Register scratch2,Label * miss,bool support_wrappers)311 void StubCompiler::GenerateLoadStringLength(MacroAssembler* masm,
312 Register receiver,
313 Register scratch1,
314 Register scratch2,
315 Label* miss,
316 bool support_wrappers) {
317 Label check_wrapper;
318
319 // Check if the object is a string leaving the instance type in the
320 // scratch register.
321 GenerateStringCheck(masm, receiver, scratch1, miss,
322 support_wrappers ? &check_wrapper : miss);
323
324 // Load length directly from the string.
325 __ movq(rax, FieldOperand(receiver, String::kLengthOffset));
326 __ ret(0);
327
328 if (support_wrappers) {
329 // Check if the object is a JSValue wrapper.
330 __ bind(&check_wrapper);
331 __ cmpl(scratch1, Immediate(JS_VALUE_TYPE));
332 __ j(not_equal, miss);
333
334 // Check if the wrapped value is a string and load the length
335 // directly if it is.
336 __ movq(scratch2, FieldOperand(receiver, JSValue::kValueOffset));
337 GenerateStringCheck(masm, scratch2, scratch1, miss, miss);
338 __ movq(rax, FieldOperand(scratch2, String::kLengthOffset));
339 __ ret(0);
340 }
341 }
342
343
GenerateLoadFunctionPrototype(MacroAssembler * masm,Register receiver,Register result,Register scratch,Label * miss_label)344 void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm,
345 Register receiver,
346 Register result,
347 Register scratch,
348 Label* miss_label) {
349 __ TryGetFunctionPrototype(receiver, result, miss_label);
350 if (!result.is(rax)) __ movq(rax, result);
351 __ ret(0);
352 }
353
354
355 // Load a fast property out of a holder object (src). In-object properties
356 // are loaded directly otherwise the property is loaded from the properties
357 // fixed array.
GenerateFastPropertyLoad(MacroAssembler * masm,Register dst,Register src,JSObject * holder,int index)358 void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
359 Register dst, Register src,
360 JSObject* holder, int index) {
361 // Adjust for the number of properties stored in the holder.
362 index -= holder->map()->inobject_properties();
363 if (index < 0) {
364 // Get the property straight out of the holder.
365 int offset = holder->map()->instance_size() + (index * kPointerSize);
366 __ movq(dst, FieldOperand(src, offset));
367 } else {
368 // Calculate the offset into the properties array.
369 int offset = index * kPointerSize + FixedArray::kHeaderSize;
370 __ movq(dst, FieldOperand(src, JSObject::kPropertiesOffset));
371 __ movq(dst, FieldOperand(dst, offset));
372 }
373 }
374
375
PushInterceptorArguments(MacroAssembler * masm,Register receiver,Register holder,Register name,JSObject * holder_obj)376 static void PushInterceptorArguments(MacroAssembler* masm,
377 Register receiver,
378 Register holder,
379 Register name,
380 JSObject* holder_obj) {
381 __ push(name);
382 InterceptorInfo* interceptor = holder_obj->GetNamedInterceptor();
383 ASSERT(!masm->isolate()->heap()->InNewSpace(interceptor));
384 __ Move(kScratchRegister, Handle<Object>(interceptor));
385 __ push(kScratchRegister);
386 __ push(receiver);
387 __ push(holder);
388 __ push(FieldOperand(kScratchRegister, InterceptorInfo::kDataOffset));
389 }
390
391
CompileCallLoadPropertyWithInterceptor(MacroAssembler * masm,Register receiver,Register holder,Register name,JSObject * holder_obj)392 static void CompileCallLoadPropertyWithInterceptor(MacroAssembler* masm,
393 Register receiver,
394 Register holder,
395 Register name,
396 JSObject* holder_obj) {
397 PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
398
399 ExternalReference ref =
400 ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorOnly),
401 masm->isolate());
402 __ Set(rax, 5);
403 __ LoadAddress(rbx, ref);
404
405 CEntryStub stub(1);
406 __ CallStub(&stub);
407 }
408
409
410 // Number of pointers to be reserved on stack for fast API call.
411 static const int kFastApiCallArguments = 3;
412
413
414 // Reserves space for the extra arguments to API function in the
415 // caller's frame.
416 //
417 // These arguments are set by CheckPrototypes and GenerateFastApiCall.
ReserveSpaceForFastApiCall(MacroAssembler * masm,Register scratch)418 static void ReserveSpaceForFastApiCall(MacroAssembler* masm, Register scratch) {
419 // ----------- S t a t e -------------
420 // -- rsp[0] : return address
421 // -- rsp[8] : last argument in the internal frame of the caller
422 // -----------------------------------
423 __ movq(scratch, Operand(rsp, 0));
424 __ subq(rsp, Immediate(kFastApiCallArguments * kPointerSize));
425 __ movq(Operand(rsp, 0), scratch);
426 __ Move(scratch, Smi::FromInt(0));
427 for (int i = 1; i <= kFastApiCallArguments; i++) {
428 __ movq(Operand(rsp, i * kPointerSize), scratch);
429 }
430 }
431
432
433 // Undoes the effects of ReserveSpaceForFastApiCall.
FreeSpaceForFastApiCall(MacroAssembler * masm,Register scratch)434 static void FreeSpaceForFastApiCall(MacroAssembler* masm, Register scratch) {
435 // ----------- S t a t e -------------
436 // -- rsp[0] : return address.
437 // -- rsp[8] : last fast api call extra argument.
438 // -- ...
439 // -- rsp[kFastApiCallArguments * 8] : first fast api call extra argument.
440 // -- rsp[kFastApiCallArguments * 8 + 8] : last argument in the internal
441 // frame.
442 // -----------------------------------
443 __ movq(scratch, Operand(rsp, 0));
444 __ movq(Operand(rsp, kFastApiCallArguments * kPointerSize), scratch);
445 __ addq(rsp, Immediate(kPointerSize * kFastApiCallArguments));
446 }
447
448
449 // Generates call to API function.
GenerateFastApiCall(MacroAssembler * masm,const CallOptimization & optimization,int argc)450 static MaybeObject* GenerateFastApiCall(MacroAssembler* masm,
451 const CallOptimization& optimization,
452 int argc) {
453 // ----------- S t a t e -------------
454 // -- rsp[0] : return address
455 // -- rsp[8] : object passing the type check
456 // (last fast api call extra argument,
457 // set by CheckPrototypes)
458 // -- rsp[16] : api function
459 // (first fast api call extra argument)
460 // -- rsp[24] : api call data
461 // -- rsp[32] : last argument
462 // -- ...
463 // -- rsp[(argc + 3) * 8] : first argument
464 // -- rsp[(argc + 4) * 8] : receiver
465 // -----------------------------------
466 // Get the function and setup the context.
467 JSFunction* function = optimization.constant_function();
468 __ Move(rdi, Handle<JSFunction>(function));
469 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
470
471 // Pass the additional arguments.
472 __ movq(Operand(rsp, 2 * kPointerSize), rdi);
473 Object* call_data = optimization.api_call_info()->data();
474 Handle<CallHandlerInfo> api_call_info_handle(optimization.api_call_info());
475 if (masm->isolate()->heap()->InNewSpace(call_data)) {
476 __ Move(rcx, api_call_info_handle);
477 __ movq(rbx, FieldOperand(rcx, CallHandlerInfo::kDataOffset));
478 __ movq(Operand(rsp, 3 * kPointerSize), rbx);
479 } else {
480 __ Move(Operand(rsp, 3 * kPointerSize), Handle<Object>(call_data));
481 }
482
483 // Prepare arguments.
484 __ lea(rbx, Operand(rsp, 3 * kPointerSize));
485
486 Object* callback = optimization.api_call_info()->callback();
487 Address api_function_address = v8::ToCData<Address>(callback);
488 ApiFunction fun(api_function_address);
489
490 #ifdef _WIN64
491 // Win64 uses first register--rcx--for returned value.
492 Register arguments_arg = rdx;
493 #else
494 Register arguments_arg = rdi;
495 #endif
496
497 // Allocate the v8::Arguments structure in the arguments' space since
498 // it's not controlled by GC.
499 const int kApiStackSpace = 4;
500
501 __ PrepareCallApiFunction(kApiStackSpace);
502
503 __ movq(StackSpaceOperand(0), rbx); // v8::Arguments::implicit_args_.
504 __ addq(rbx, Immediate(argc * kPointerSize));
505 __ movq(StackSpaceOperand(1), rbx); // v8::Arguments::values_.
506 __ Set(StackSpaceOperand(2), argc); // v8::Arguments::length_.
507 // v8::Arguments::is_construct_call_.
508 __ Set(StackSpaceOperand(3), 0);
509
510 // v8::InvocationCallback's argument.
511 __ lea(arguments_arg, StackSpaceOperand(0));
512 // Emitting a stub call may try to allocate (if the code is not
513 // already generated). Do not allow the assembler to perform a
514 // garbage collection but instead return the allocation failure
515 // object.
516 return masm->TryCallApiFunctionAndReturn(&fun,
517 argc + kFastApiCallArguments + 1);
518 }
519
520
521 class CallInterceptorCompiler BASE_EMBEDDED {
522 public:
CallInterceptorCompiler(StubCompiler * stub_compiler,const ParameterCount & arguments,Register name)523 CallInterceptorCompiler(StubCompiler* stub_compiler,
524 const ParameterCount& arguments,
525 Register name)
526 : stub_compiler_(stub_compiler),
527 arguments_(arguments),
528 name_(name) {}
529
Compile(MacroAssembler * masm,JSObject * object,JSObject * holder,String * name,LookupResult * lookup,Register receiver,Register scratch1,Register scratch2,Register scratch3,Label * miss)530 MaybeObject* Compile(MacroAssembler* masm,
531 JSObject* object,
532 JSObject* holder,
533 String* name,
534 LookupResult* lookup,
535 Register receiver,
536 Register scratch1,
537 Register scratch2,
538 Register scratch3,
539 Label* miss) {
540 ASSERT(holder->HasNamedInterceptor());
541 ASSERT(!holder->GetNamedInterceptor()->getter()->IsUndefined());
542
543 // Check that the receiver isn't a smi.
544 __ JumpIfSmi(receiver, miss);
545
546 CallOptimization optimization(lookup);
547
548 if (optimization.is_constant_call()) {
549 return CompileCacheable(masm,
550 object,
551 receiver,
552 scratch1,
553 scratch2,
554 scratch3,
555 holder,
556 lookup,
557 name,
558 optimization,
559 miss);
560 } else {
561 CompileRegular(masm,
562 object,
563 receiver,
564 scratch1,
565 scratch2,
566 scratch3,
567 name,
568 holder,
569 miss);
570 return masm->isolate()->heap()->undefined_value(); // Success.
571 }
572 }
573
574 private:
CompileCacheable(MacroAssembler * masm,JSObject * object,Register receiver,Register scratch1,Register scratch2,Register scratch3,JSObject * interceptor_holder,LookupResult * lookup,String * name,const CallOptimization & optimization,Label * miss_label)575 MaybeObject* CompileCacheable(MacroAssembler* masm,
576 JSObject* object,
577 Register receiver,
578 Register scratch1,
579 Register scratch2,
580 Register scratch3,
581 JSObject* interceptor_holder,
582 LookupResult* lookup,
583 String* name,
584 const CallOptimization& optimization,
585 Label* miss_label) {
586 ASSERT(optimization.is_constant_call());
587 ASSERT(!lookup->holder()->IsGlobalObject());
588
589 int depth1 = kInvalidProtoDepth;
590 int depth2 = kInvalidProtoDepth;
591 bool can_do_fast_api_call = false;
592 if (optimization.is_simple_api_call() &&
593 !lookup->holder()->IsGlobalObject()) {
594 depth1 =
595 optimization.GetPrototypeDepthOfExpectedType(object,
596 interceptor_holder);
597 if (depth1 == kInvalidProtoDepth) {
598 depth2 =
599 optimization.GetPrototypeDepthOfExpectedType(interceptor_holder,
600 lookup->holder());
601 }
602 can_do_fast_api_call = (depth1 != kInvalidProtoDepth) ||
603 (depth2 != kInvalidProtoDepth);
604 }
605
606 Counters* counters = masm->isolate()->counters();
607 __ IncrementCounter(counters->call_const_interceptor(), 1);
608
609 if (can_do_fast_api_call) {
610 __ IncrementCounter(counters->call_const_interceptor_fast_api(), 1);
611 ReserveSpaceForFastApiCall(masm, scratch1);
612 }
613
614 // Check that the maps from receiver to interceptor's holder
615 // haven't changed and thus we can invoke interceptor.
616 Label miss_cleanup;
617 Label* miss = can_do_fast_api_call ? &miss_cleanup : miss_label;
618 Register holder =
619 stub_compiler_->CheckPrototypes(object, receiver,
620 interceptor_holder, scratch1,
621 scratch2, scratch3, name, depth1, miss);
622
623 // Invoke an interceptor and if it provides a value,
624 // branch to |regular_invoke|.
625 Label regular_invoke;
626 LoadWithInterceptor(masm, receiver, holder, interceptor_holder,
627 ®ular_invoke);
628
629 // Interceptor returned nothing for this property. Try to use cached
630 // constant function.
631
632 // Check that the maps from interceptor's holder to constant function's
633 // holder haven't changed and thus we can use cached constant function.
634 if (interceptor_holder != lookup->holder()) {
635 stub_compiler_->CheckPrototypes(interceptor_holder, receiver,
636 lookup->holder(), scratch1,
637 scratch2, scratch3, name, depth2, miss);
638 } else {
639 // CheckPrototypes has a side effect of fetching a 'holder'
640 // for API (object which is instanceof for the signature). It's
641 // safe to omit it here, as if present, it should be fetched
642 // by the previous CheckPrototypes.
643 ASSERT(depth2 == kInvalidProtoDepth);
644 }
645
646 // Invoke function.
647 if (can_do_fast_api_call) {
648 MaybeObject* result = GenerateFastApiCall(masm,
649 optimization,
650 arguments_.immediate());
651 if (result->IsFailure()) return result;
652 } else {
653 __ InvokeFunction(optimization.constant_function(), arguments_,
654 JUMP_FUNCTION);
655 }
656
657 // Deferred code for fast API call case---clean preallocated space.
658 if (can_do_fast_api_call) {
659 __ bind(&miss_cleanup);
660 FreeSpaceForFastApiCall(masm, scratch1);
661 __ jmp(miss_label);
662 }
663
664 // Invoke a regular function.
665 __ bind(®ular_invoke);
666 if (can_do_fast_api_call) {
667 FreeSpaceForFastApiCall(masm, scratch1);
668 }
669
670 return masm->isolate()->heap()->undefined_value(); // Success.
671 }
672
CompileRegular(MacroAssembler * masm,JSObject * object,Register receiver,Register scratch1,Register scratch2,Register scratch3,String * name,JSObject * interceptor_holder,Label * miss_label)673 void CompileRegular(MacroAssembler* masm,
674 JSObject* object,
675 Register receiver,
676 Register scratch1,
677 Register scratch2,
678 Register scratch3,
679 String* name,
680 JSObject* interceptor_holder,
681 Label* miss_label) {
682 Register holder =
683 stub_compiler_->CheckPrototypes(object, receiver, interceptor_holder,
684 scratch1, scratch2, scratch3, name,
685 miss_label);
686
687 __ EnterInternalFrame();
688 // Save the name_ register across the call.
689 __ push(name_);
690
691 PushInterceptorArguments(masm,
692 receiver,
693 holder,
694 name_,
695 interceptor_holder);
696
697 __ CallExternalReference(
698 ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForCall),
699 masm->isolate()),
700 5);
701
702 // Restore the name_ register.
703 __ pop(name_);
704 __ LeaveInternalFrame();
705 }
706
LoadWithInterceptor(MacroAssembler * masm,Register receiver,Register holder,JSObject * holder_obj,Label * interceptor_succeeded)707 void LoadWithInterceptor(MacroAssembler* masm,
708 Register receiver,
709 Register holder,
710 JSObject* holder_obj,
711 Label* interceptor_succeeded) {
712 __ EnterInternalFrame();
713 __ push(holder); // Save the holder.
714 __ push(name_); // Save the name.
715
716 CompileCallLoadPropertyWithInterceptor(masm,
717 receiver,
718 holder,
719 name_,
720 holder_obj);
721
722 __ pop(name_); // Restore the name.
723 __ pop(receiver); // Restore the holder.
724 __ LeaveInternalFrame();
725
726 __ CompareRoot(rax, Heap::kNoInterceptorResultSentinelRootIndex);
727 __ j(not_equal, interceptor_succeeded);
728 }
729
730 StubCompiler* stub_compiler_;
731 const ParameterCount& arguments_;
732 Register name_;
733 };
734
735
GenerateLoadMiss(MacroAssembler * masm,Code::Kind kind)736 void StubCompiler::GenerateLoadMiss(MacroAssembler* masm, Code::Kind kind) {
737 ASSERT(kind == Code::LOAD_IC || kind == Code::KEYED_LOAD_IC);
738 Code* code = NULL;
739 if (kind == Code::LOAD_IC) {
740 code = masm->isolate()->builtins()->builtin(Builtins::kLoadIC_Miss);
741 } else {
742 code = masm->isolate()->builtins()->builtin(Builtins::kKeyedLoadIC_Miss);
743 }
744
745 Handle<Code> ic(code);
746 __ Jump(ic, RelocInfo::CODE_TARGET);
747 }
748
749
750 // Both name_reg and receiver_reg are preserved on jumps to miss_label,
751 // but may be destroyed if store is successful.
GenerateStoreField(MacroAssembler * masm,JSObject * object,int index,Map * transition,Register receiver_reg,Register name_reg,Register scratch,Label * miss_label)752 void StubCompiler::GenerateStoreField(MacroAssembler* masm,
753 JSObject* object,
754 int index,
755 Map* transition,
756 Register receiver_reg,
757 Register name_reg,
758 Register scratch,
759 Label* miss_label) {
760 // Check that the object isn't a smi.
761 __ JumpIfSmi(receiver_reg, miss_label);
762
763 // Check that the map of the object hasn't changed.
764 __ Cmp(FieldOperand(receiver_reg, HeapObject::kMapOffset),
765 Handle<Map>(object->map()));
766 __ j(not_equal, miss_label);
767
768 // Perform global security token check if needed.
769 if (object->IsJSGlobalProxy()) {
770 __ CheckAccessGlobalProxy(receiver_reg, scratch, miss_label);
771 }
772
773 // Stub never generated for non-global objects that require access
774 // checks.
775 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
776
777 // Perform map transition for the receiver if necessary.
778 if ((transition != NULL) && (object->map()->unused_property_fields() == 0)) {
779 // The properties must be extended before we can store the value.
780 // We jump to a runtime call that extends the properties array.
781 __ pop(scratch); // Return address.
782 __ push(receiver_reg);
783 __ Push(Handle<Map>(transition));
784 __ push(rax);
785 __ push(scratch);
786 __ TailCallExternalReference(
787 ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage),
788 masm->isolate()),
789 3,
790 1);
791 return;
792 }
793
794 if (transition != NULL) {
795 // Update the map of the object; no write barrier updating is
796 // needed because the map is never in new space.
797 __ Move(FieldOperand(receiver_reg, HeapObject::kMapOffset),
798 Handle<Map>(transition));
799 }
800
801 // Adjust for the number of properties stored in the object. Even in the
802 // face of a transition we can use the old map here because the size of the
803 // object and the number of in-object properties is not going to change.
804 index -= object->map()->inobject_properties();
805
806 if (index < 0) {
807 // Set the property straight into the object.
808 int offset = object->map()->instance_size() + (index * kPointerSize);
809 __ movq(FieldOperand(receiver_reg, offset), rax);
810
811 // Update the write barrier for the array address.
812 // Pass the value being stored in the now unused name_reg.
813 __ movq(name_reg, rax);
814 __ RecordWrite(receiver_reg, offset, name_reg, scratch);
815 } else {
816 // Write to the properties array.
817 int offset = index * kPointerSize + FixedArray::kHeaderSize;
818 // Get the properties array (optimistically).
819 __ movq(scratch, FieldOperand(receiver_reg, JSObject::kPropertiesOffset));
820 __ movq(FieldOperand(scratch, offset), rax);
821
822 // Update the write barrier for the array address.
823 // Pass the value being stored in the now unused name_reg.
824 __ movq(name_reg, rax);
825 __ RecordWrite(scratch, offset, name_reg, receiver_reg);
826 }
827
828 // Return the value (register rax).
829 __ ret(0);
830 }
831
832
833 // Generate code to check that a global property cell is empty. Create
834 // the property cell at compilation time if no cell exists for the
835 // property.
GenerateCheckPropertyCell(MacroAssembler * masm,GlobalObject * global,String * name,Register scratch,Label * miss)836 MUST_USE_RESULT static MaybeObject* GenerateCheckPropertyCell(
837 MacroAssembler* masm,
838 GlobalObject* global,
839 String* name,
840 Register scratch,
841 Label* miss) {
842 Object* probe;
843 { MaybeObject* maybe_probe = global->EnsurePropertyCell(name);
844 if (!maybe_probe->ToObject(&probe)) return maybe_probe;
845 }
846 JSGlobalPropertyCell* cell = JSGlobalPropertyCell::cast(probe);
847 ASSERT(cell->value()->IsTheHole());
848 __ Move(scratch, Handle<Object>(cell));
849 __ Cmp(FieldOperand(scratch, JSGlobalPropertyCell::kValueOffset),
850 masm->isolate()->factory()->the_hole_value());
851 __ j(not_equal, miss);
852 return cell;
853 }
854
855
856 #undef __
857 #define __ ACCESS_MASM((masm()))
858
859
CheckPrototypes(JSObject * object,Register object_reg,JSObject * holder,Register holder_reg,Register scratch1,Register scratch2,String * name,int save_at_depth,Label * miss)860 Register StubCompiler::CheckPrototypes(JSObject* object,
861 Register object_reg,
862 JSObject* holder,
863 Register holder_reg,
864 Register scratch1,
865 Register scratch2,
866 String* name,
867 int save_at_depth,
868 Label* miss) {
869 // Make sure there's no overlap between holder and object registers.
870 ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
871 ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg)
872 && !scratch2.is(scratch1));
873
874 // Keep track of the current object in register reg. On the first
875 // iteration, reg is an alias for object_reg, on later iterations,
876 // it is an alias for holder_reg.
877 Register reg = object_reg;
878 int depth = 0;
879
880 if (save_at_depth == depth) {
881 __ movq(Operand(rsp, kPointerSize), object_reg);
882 }
883
884 // Check the maps in the prototype chain.
885 // Traverse the prototype chain from the object and do map checks.
886 JSObject* current = object;
887 while (current != holder) {
888 depth++;
889
890 // Only global objects and objects that do not require access
891 // checks are allowed in stubs.
892 ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded());
893
894 JSObject* prototype = JSObject::cast(current->GetPrototype());
895 if (!current->HasFastProperties() &&
896 !current->IsJSGlobalObject() &&
897 !current->IsJSGlobalProxy()) {
898 if (!name->IsSymbol()) {
899 MaybeObject* lookup_result = heap()->LookupSymbol(name);
900 if (lookup_result->IsFailure()) {
901 set_failure(Failure::cast(lookup_result));
902 return reg;
903 } else {
904 name = String::cast(lookup_result->ToObjectUnchecked());
905 }
906 }
907 ASSERT(current->property_dictionary()->FindEntry(name) ==
908 StringDictionary::kNotFound);
909
910 GenerateDictionaryNegativeLookup(masm(),
911 miss,
912 reg,
913 name,
914 scratch1,
915 scratch2);
916 __ movq(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
917 reg = holder_reg; // from now the object is in holder_reg
918 __ movq(reg, FieldOperand(scratch1, Map::kPrototypeOffset));
919 } else if (heap()->InNewSpace(prototype)) {
920 // Get the map of the current object.
921 __ movq(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
922 __ Cmp(scratch1, Handle<Map>(current->map()));
923 // Branch on the result of the map check.
924 __ j(not_equal, miss);
925 // Check access rights to the global object. This has to happen
926 // after the map check so that we know that the object is
927 // actually a global object.
928 if (current->IsJSGlobalProxy()) {
929 __ CheckAccessGlobalProxy(reg, scratch1, miss);
930
931 // Restore scratch register to be the map of the object.
932 // We load the prototype from the map in the scratch register.
933 __ movq(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
934 }
935 // The prototype is in new space; we cannot store a reference
936 // to it in the code. Load it from the map.
937 reg = holder_reg; // from now the object is in holder_reg
938 __ movq(reg, FieldOperand(scratch1, Map::kPrototypeOffset));
939
940 } else {
941 // Check the map of the current object.
942 __ Cmp(FieldOperand(reg, HeapObject::kMapOffset),
943 Handle<Map>(current->map()));
944 // Branch on the result of the map check.
945 __ j(not_equal, miss);
946 // Check access rights to the global object. This has to happen
947 // after the map check so that we know that the object is
948 // actually a global object.
949 if (current->IsJSGlobalProxy()) {
950 __ CheckAccessGlobalProxy(reg, scratch1, miss);
951 }
952 // The prototype is in old space; load it directly.
953 reg = holder_reg; // from now the object is in holder_reg
954 __ Move(reg, Handle<JSObject>(prototype));
955 }
956
957 if (save_at_depth == depth) {
958 __ movq(Operand(rsp, kPointerSize), reg);
959 }
960
961 // Go to the next object in the prototype chain.
962 current = prototype;
963 }
964
965 // Check the holder map.
966 __ Cmp(FieldOperand(reg, HeapObject::kMapOffset), Handle<Map>(holder->map()));
967 __ j(not_equal, miss);
968
969 // Log the check depth.
970 LOG(isolate(), IntEvent("check-maps-depth", depth + 1));
971
972 // Perform security check for access to the global object and return
973 // the holder register.
974 ASSERT(current == holder);
975 ASSERT(current->IsJSGlobalProxy() || !current->IsAccessCheckNeeded());
976 if (current->IsJSGlobalProxy()) {
977 __ CheckAccessGlobalProxy(reg, scratch1, miss);
978 }
979
980 // If we've skipped any global objects, it's not enough to verify
981 // that their maps haven't changed. We also need to check that the
982 // property cell for the property is still empty.
983 current = object;
984 while (current != holder) {
985 if (current->IsGlobalObject()) {
986 MaybeObject* cell = GenerateCheckPropertyCell(masm(),
987 GlobalObject::cast(current),
988 name,
989 scratch1,
990 miss);
991 if (cell->IsFailure()) {
992 set_failure(Failure::cast(cell));
993 return reg;
994 }
995 }
996 current = JSObject::cast(current->GetPrototype());
997 }
998
999 // Return the register containing the holder.
1000 return reg;
1001 }
1002
1003
GenerateLoadField(JSObject * object,JSObject * holder,Register receiver,Register scratch1,Register scratch2,Register scratch3,int index,String * name,Label * miss)1004 void StubCompiler::GenerateLoadField(JSObject* object,
1005 JSObject* holder,
1006 Register receiver,
1007 Register scratch1,
1008 Register scratch2,
1009 Register scratch3,
1010 int index,
1011 String* name,
1012 Label* miss) {
1013 // Check that the receiver isn't a smi.
1014 __ JumpIfSmi(receiver, miss);
1015
1016 // Check the prototype chain.
1017 Register reg =
1018 CheckPrototypes(object, receiver, holder,
1019 scratch1, scratch2, scratch3, name, miss);
1020
1021 // Get the value from the properties.
1022 GenerateFastPropertyLoad(masm(), rax, reg, holder, index);
1023 __ ret(0);
1024 }
1025
1026
GenerateLoadCallback(JSObject * object,JSObject * holder,Register receiver,Register name_reg,Register scratch1,Register scratch2,Register scratch3,AccessorInfo * callback,String * name,Label * miss)1027 MaybeObject* StubCompiler::GenerateLoadCallback(JSObject* object,
1028 JSObject* holder,
1029 Register receiver,
1030 Register name_reg,
1031 Register scratch1,
1032 Register scratch2,
1033 Register scratch3,
1034 AccessorInfo* callback,
1035 String* name,
1036 Label* miss) {
1037 // Check that the receiver isn't a smi.
1038 __ JumpIfSmi(receiver, miss);
1039
1040 // Check that the maps haven't changed.
1041 Register reg =
1042 CheckPrototypes(object, receiver, holder, scratch1,
1043 scratch2, scratch3, name, miss);
1044
1045 Handle<AccessorInfo> callback_handle(callback);
1046
1047 // Insert additional parameters into the stack frame above return address.
1048 ASSERT(!scratch2.is(reg));
1049 __ pop(scratch2); // Get return address to place it below.
1050
1051 __ push(receiver); // receiver
1052 __ push(reg); // holder
1053 if (heap()->InNewSpace(callback_handle->data())) {
1054 __ Move(scratch1, callback_handle);
1055 __ push(FieldOperand(scratch1, AccessorInfo::kDataOffset)); // data
1056 } else {
1057 __ Push(Handle<Object>(callback_handle->data()));
1058 }
1059 __ push(name_reg); // name
1060 // Save a pointer to where we pushed the arguments pointer.
1061 // This will be passed as the const AccessorInfo& to the C++ callback.
1062
1063 #ifdef _WIN64
1064 // Win64 uses first register--rcx--for returned value.
1065 Register accessor_info_arg = r8;
1066 Register name_arg = rdx;
1067 #else
1068 Register accessor_info_arg = rsi;
1069 Register name_arg = rdi;
1070 #endif
1071
1072 ASSERT(!name_arg.is(scratch2));
1073 __ movq(name_arg, rsp);
1074 __ push(scratch2); // Restore return address.
1075
1076 // Do call through the api.
1077 Address getter_address = v8::ToCData<Address>(callback->getter());
1078 ApiFunction fun(getter_address);
1079
1080 // 3 elements array for v8::Agruments::values_ and handler for name.
1081 const int kStackSpace = 4;
1082
1083 // Allocate v8::AccessorInfo in non-GCed stack space.
1084 const int kArgStackSpace = 1;
1085
1086 __ PrepareCallApiFunction(kArgStackSpace);
1087 __ lea(rax, Operand(name_arg, 3 * kPointerSize));
1088
1089 // v8::AccessorInfo::args_.
1090 __ movq(StackSpaceOperand(0), rax);
1091
1092 // The context register (rsi) has been saved in PrepareCallApiFunction and
1093 // could be used to pass arguments.
1094 __ lea(accessor_info_arg, StackSpaceOperand(0));
1095
1096 // Emitting a stub call may try to allocate (if the code is not
1097 // already generated). Do not allow the assembler to perform a
1098 // garbage collection but instead return the allocation failure
1099 // object.
1100 return masm()->TryCallApiFunctionAndReturn(&fun, kStackSpace);
1101 }
1102
1103
GenerateLoadConstant(JSObject * object,JSObject * holder,Register receiver,Register scratch1,Register scratch2,Register scratch3,Object * value,String * name,Label * miss)1104 void StubCompiler::GenerateLoadConstant(JSObject* object,
1105 JSObject* holder,
1106 Register receiver,
1107 Register scratch1,
1108 Register scratch2,
1109 Register scratch3,
1110 Object* value,
1111 String* name,
1112 Label* miss) {
1113 // Check that the receiver isn't a smi.
1114 __ JumpIfSmi(receiver, miss);
1115
1116 // Check that the maps haven't changed.
1117 Register reg =
1118 CheckPrototypes(object, receiver, holder,
1119 scratch1, scratch2, scratch3, name, miss);
1120
1121 // Return the constant value.
1122 __ Move(rax, Handle<Object>(value));
1123 __ ret(0);
1124 }
1125
1126
GenerateLoadInterceptor(JSObject * object,JSObject * interceptor_holder,LookupResult * lookup,Register receiver,Register name_reg,Register scratch1,Register scratch2,Register scratch3,String * name,Label * miss)1127 void StubCompiler::GenerateLoadInterceptor(JSObject* object,
1128 JSObject* interceptor_holder,
1129 LookupResult* lookup,
1130 Register receiver,
1131 Register name_reg,
1132 Register scratch1,
1133 Register scratch2,
1134 Register scratch3,
1135 String* name,
1136 Label* miss) {
1137 ASSERT(interceptor_holder->HasNamedInterceptor());
1138 ASSERT(!interceptor_holder->GetNamedInterceptor()->getter()->IsUndefined());
1139
1140 // Check that the receiver isn't a smi.
1141 __ JumpIfSmi(receiver, miss);
1142
1143 // So far the most popular follow ups for interceptor loads are FIELD
1144 // and CALLBACKS, so inline only them, other cases may be added
1145 // later.
1146 bool compile_followup_inline = false;
1147 if (lookup->IsProperty() && lookup->IsCacheable()) {
1148 if (lookup->type() == FIELD) {
1149 compile_followup_inline = true;
1150 } else if (lookup->type() == CALLBACKS &&
1151 lookup->GetCallbackObject()->IsAccessorInfo() &&
1152 AccessorInfo::cast(lookup->GetCallbackObject())->getter() != NULL) {
1153 compile_followup_inline = true;
1154 }
1155 }
1156
1157 if (compile_followup_inline) {
1158 // Compile the interceptor call, followed by inline code to load the
1159 // property from further up the prototype chain if the call fails.
1160 // Check that the maps haven't changed.
1161 Register holder_reg = CheckPrototypes(object, receiver, interceptor_holder,
1162 scratch1, scratch2, scratch3,
1163 name, miss);
1164 ASSERT(holder_reg.is(receiver) || holder_reg.is(scratch1));
1165
1166 // Save necessary data before invoking an interceptor.
1167 // Requires a frame to make GC aware of pushed pointers.
1168 __ EnterInternalFrame();
1169
1170 if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
1171 // CALLBACKS case needs a receiver to be passed into C++ callback.
1172 __ push(receiver);
1173 }
1174 __ push(holder_reg);
1175 __ push(name_reg);
1176
1177 // Invoke an interceptor. Note: map checks from receiver to
1178 // interceptor's holder has been compiled before (see a caller
1179 // of this method.)
1180 CompileCallLoadPropertyWithInterceptor(masm(),
1181 receiver,
1182 holder_reg,
1183 name_reg,
1184 interceptor_holder);
1185
1186 // Check if interceptor provided a value for property. If it's
1187 // the case, return immediately.
1188 Label interceptor_failed;
1189 __ CompareRoot(rax, Heap::kNoInterceptorResultSentinelRootIndex);
1190 __ j(equal, &interceptor_failed);
1191 __ LeaveInternalFrame();
1192 __ ret(0);
1193
1194 __ bind(&interceptor_failed);
1195 __ pop(name_reg);
1196 __ pop(holder_reg);
1197 if (lookup->type() == CALLBACKS && !receiver.is(holder_reg)) {
1198 __ pop(receiver);
1199 }
1200
1201 __ LeaveInternalFrame();
1202
1203 // Check that the maps from interceptor's holder to lookup's holder
1204 // haven't changed. And load lookup's holder into |holder| register.
1205 if (interceptor_holder != lookup->holder()) {
1206 holder_reg = CheckPrototypes(interceptor_holder,
1207 holder_reg,
1208 lookup->holder(),
1209 scratch1,
1210 scratch2,
1211 scratch3,
1212 name,
1213 miss);
1214 }
1215
1216 if (lookup->type() == FIELD) {
1217 // We found FIELD property in prototype chain of interceptor's holder.
1218 // Retrieve a field from field's holder.
1219 GenerateFastPropertyLoad(masm(), rax, holder_reg,
1220 lookup->holder(), lookup->GetFieldIndex());
1221 __ ret(0);
1222 } else {
1223 // We found CALLBACKS property in prototype chain of interceptor's
1224 // holder.
1225 ASSERT(lookup->type() == CALLBACKS);
1226 ASSERT(lookup->GetCallbackObject()->IsAccessorInfo());
1227 AccessorInfo* callback = AccessorInfo::cast(lookup->GetCallbackObject());
1228 ASSERT(callback != NULL);
1229 ASSERT(callback->getter() != NULL);
1230
1231 // Tail call to runtime.
1232 // Important invariant in CALLBACKS case: the code above must be
1233 // structured to never clobber |receiver| register.
1234 __ pop(scratch2); // return address
1235 __ push(receiver);
1236 __ push(holder_reg);
1237 __ Move(holder_reg, Handle<AccessorInfo>(callback));
1238 __ push(FieldOperand(holder_reg, AccessorInfo::kDataOffset));
1239 __ push(holder_reg);
1240 __ push(name_reg);
1241 __ push(scratch2); // restore return address
1242
1243 ExternalReference ref =
1244 ExternalReference(IC_Utility(IC::kLoadCallbackProperty),
1245 isolate());
1246 __ TailCallExternalReference(ref, 5, 1);
1247 }
1248 } else { // !compile_followup_inline
1249 // Call the runtime system to load the interceptor.
1250 // Check that the maps haven't changed.
1251 Register holder_reg = CheckPrototypes(object, receiver, interceptor_holder,
1252 scratch1, scratch2, scratch3,
1253 name, miss);
1254 __ pop(scratch2); // save old return address
1255 PushInterceptorArguments(masm(), receiver, holder_reg,
1256 name_reg, interceptor_holder);
1257 __ push(scratch2); // restore old return address
1258
1259 ExternalReference ref = ExternalReference(
1260 IC_Utility(IC::kLoadPropertyWithInterceptorForLoad), isolate());
1261 __ TailCallExternalReference(ref, 5, 1);
1262 }
1263 }
1264
1265
GenerateNameCheck(String * name,Label * miss)1266 void CallStubCompiler::GenerateNameCheck(String* name, Label* miss) {
1267 if (kind_ == Code::KEYED_CALL_IC) {
1268 __ Cmp(rcx, Handle<String>(name));
1269 __ j(not_equal, miss);
1270 }
1271 }
1272
1273
GenerateGlobalReceiverCheck(JSObject * object,JSObject * holder,String * name,Label * miss)1274 void CallStubCompiler::GenerateGlobalReceiverCheck(JSObject* object,
1275 JSObject* holder,
1276 String* name,
1277 Label* miss) {
1278 ASSERT(holder->IsGlobalObject());
1279
1280 // Get the number of arguments.
1281 const int argc = arguments().immediate();
1282
1283 // Get the receiver from the stack.
1284 __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
1285
1286 // If the object is the holder then we know that it's a global
1287 // object which can only happen for contextual calls. In this case,
1288 // the receiver cannot be a smi.
1289 if (object != holder) {
1290 __ JumpIfSmi(rdx, miss);
1291 }
1292
1293 // Check that the maps haven't changed.
1294 CheckPrototypes(object, rdx, holder, rbx, rax, rdi, name, miss);
1295 }
1296
1297
GenerateLoadFunctionFromCell(JSGlobalPropertyCell * cell,JSFunction * function,Label * miss)1298 void CallStubCompiler::GenerateLoadFunctionFromCell(JSGlobalPropertyCell* cell,
1299 JSFunction* function,
1300 Label* miss) {
1301 // Get the value from the cell.
1302 __ Move(rdi, Handle<JSGlobalPropertyCell>(cell));
1303 __ movq(rdi, FieldOperand(rdi, JSGlobalPropertyCell::kValueOffset));
1304
1305 // Check that the cell contains the same function.
1306 if (heap()->InNewSpace(function)) {
1307 // We can't embed a pointer to a function in new space so we have
1308 // to verify that the shared function info is unchanged. This has
1309 // the nice side effect that multiple closures based on the same
1310 // function can all use this call IC. Before we load through the
1311 // function, we have to verify that it still is a function.
1312 __ JumpIfSmi(rdi, miss);
1313 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rax);
1314 __ j(not_equal, miss);
1315
1316 // Check the shared function info. Make sure it hasn't changed.
1317 __ Move(rax, Handle<SharedFunctionInfo>(function->shared()));
1318 __ cmpq(FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset), rax);
1319 __ j(not_equal, miss);
1320 } else {
1321 __ Cmp(rdi, Handle<JSFunction>(function));
1322 __ j(not_equal, miss);
1323 }
1324 }
1325
1326
GenerateMissBranch()1327 MaybeObject* CallStubCompiler::GenerateMissBranch() {
1328 MaybeObject* maybe_obj = isolate()->stub_cache()->ComputeCallMiss(
1329 arguments().immediate(), kind_);
1330 Object* obj;
1331 if (!maybe_obj->ToObject(&obj)) return maybe_obj;
1332 __ Jump(Handle<Code>(Code::cast(obj)), RelocInfo::CODE_TARGET);
1333 return obj;
1334 }
1335
1336
CompileCallField(JSObject * object,JSObject * holder,int index,String * name)1337 MaybeObject* CallStubCompiler::CompileCallField(JSObject* object,
1338 JSObject* holder,
1339 int index,
1340 String* name) {
1341 // ----------- S t a t e -------------
1342 // rcx : function name
1343 // rsp[0] : return address
1344 // rsp[8] : argument argc
1345 // rsp[16] : argument argc - 1
1346 // ...
1347 // rsp[argc * 8] : argument 1
1348 // rsp[(argc + 1) * 8] : argument 0 = receiver
1349 // -----------------------------------
1350 Label miss;
1351
1352 GenerateNameCheck(name, &miss);
1353
1354 // Get the receiver from the stack.
1355 const int argc = arguments().immediate();
1356 __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
1357
1358 // Check that the receiver isn't a smi.
1359 __ JumpIfSmi(rdx, &miss);
1360
1361 // Do the right check and compute the holder register.
1362 Register reg = CheckPrototypes(object, rdx, holder, rbx, rax, rdi,
1363 name, &miss);
1364
1365 GenerateFastPropertyLoad(masm(), rdi, reg, holder, index);
1366
1367 // Check that the function really is a function.
1368 __ JumpIfSmi(rdi, &miss);
1369 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rbx);
1370 __ j(not_equal, &miss);
1371
1372 // Patch the receiver on the stack with the global proxy if
1373 // necessary.
1374 if (object->IsGlobalObject()) {
1375 __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalReceiverOffset));
1376 __ movq(Operand(rsp, (argc + 1) * kPointerSize), rdx);
1377 }
1378
1379 // Invoke the function.
1380 __ InvokeFunction(rdi, arguments(), JUMP_FUNCTION);
1381
1382 // Handle call cache miss.
1383 __ bind(&miss);
1384 MaybeObject* maybe_result = GenerateMissBranch();
1385 if (maybe_result->IsFailure()) return maybe_result;
1386
1387 // Return the generated code.
1388 return GetCode(FIELD, name);
1389 }
1390
1391
CompileArrayPushCall(Object * object,JSObject * holder,JSGlobalPropertyCell * cell,JSFunction * function,String * name)1392 MaybeObject* CallStubCompiler::CompileArrayPushCall(Object* object,
1393 JSObject* holder,
1394 JSGlobalPropertyCell* cell,
1395 JSFunction* function,
1396 String* name) {
1397 // ----------- S t a t e -------------
1398 // -- rcx : name
1399 // -- rsp[0] : return address
1400 // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1401 // -- ...
1402 // -- rsp[(argc + 1) * 8] : receiver
1403 // -----------------------------------
1404
1405 // If object is not an array, bail out to regular call.
1406 if (!object->IsJSArray() || cell != NULL) return heap()->undefined_value();
1407
1408 Label miss;
1409
1410 GenerateNameCheck(name, &miss);
1411
1412 // Get the receiver from the stack.
1413 const int argc = arguments().immediate();
1414 __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
1415
1416 // Check that the receiver isn't a smi.
1417 __ JumpIfSmi(rdx, &miss);
1418
1419 CheckPrototypes(JSObject::cast(object),
1420 rdx,
1421 holder,
1422 rbx,
1423 rax,
1424 rdi,
1425 name,
1426 &miss);
1427
1428 if (argc == 0) {
1429 // Noop, return the length.
1430 __ movq(rax, FieldOperand(rdx, JSArray::kLengthOffset));
1431 __ ret((argc + 1) * kPointerSize);
1432 } else {
1433 Label call_builtin;
1434
1435 // Get the elements array of the object.
1436 __ movq(rbx, FieldOperand(rdx, JSArray::kElementsOffset));
1437
1438 // Check that the elements are in fast mode and writable.
1439 __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset),
1440 factory()->fixed_array_map());
1441 __ j(not_equal, &call_builtin);
1442
1443 if (argc == 1) { // Otherwise fall through to call builtin.
1444 Label exit, with_write_barrier, attempt_to_grow_elements;
1445
1446 // Get the array's length into rax and calculate new length.
1447 __ SmiToInteger32(rax, FieldOperand(rdx, JSArray::kLengthOffset));
1448 STATIC_ASSERT(FixedArray::kMaxLength < Smi::kMaxValue);
1449 __ addl(rax, Immediate(argc));
1450
1451 // Get the element's length into rcx.
1452 __ SmiToInteger32(rcx, FieldOperand(rbx, FixedArray::kLengthOffset));
1453
1454 // Check if we could survive without allocation.
1455 __ cmpl(rax, rcx);
1456 __ j(greater, &attempt_to_grow_elements);
1457
1458 // Save new length.
1459 __ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rax);
1460
1461 // Push the element.
1462 __ movq(rcx, Operand(rsp, argc * kPointerSize));
1463 __ lea(rdx, FieldOperand(rbx,
1464 rax, times_pointer_size,
1465 FixedArray::kHeaderSize - argc * kPointerSize));
1466 __ movq(Operand(rdx, 0), rcx);
1467
1468 // Check if value is a smi.
1469 __ Integer32ToSmi(rax, rax); // Return new length as smi.
1470
1471 __ JumpIfNotSmi(rcx, &with_write_barrier);
1472
1473 __ bind(&exit);
1474 __ ret((argc + 1) * kPointerSize);
1475
1476 __ bind(&with_write_barrier);
1477
1478 __ InNewSpace(rbx, rcx, equal, &exit);
1479
1480 __ RecordWriteHelper(rbx, rdx, rcx);
1481
1482 __ ret((argc + 1) * kPointerSize);
1483
1484 __ bind(&attempt_to_grow_elements);
1485 if (!FLAG_inline_new) {
1486 __ jmp(&call_builtin);
1487 }
1488
1489 ExternalReference new_space_allocation_top =
1490 ExternalReference::new_space_allocation_top_address(isolate());
1491 ExternalReference new_space_allocation_limit =
1492 ExternalReference::new_space_allocation_limit_address(isolate());
1493
1494 const int kAllocationDelta = 4;
1495 // Load top.
1496 __ Load(rcx, new_space_allocation_top);
1497
1498 // Check if it's the end of elements.
1499 __ lea(rdx, FieldOperand(rbx,
1500 rax, times_pointer_size,
1501 FixedArray::kHeaderSize - argc * kPointerSize));
1502 __ cmpq(rdx, rcx);
1503 __ j(not_equal, &call_builtin);
1504 __ addq(rcx, Immediate(kAllocationDelta * kPointerSize));
1505 Operand limit_operand =
1506 masm()->ExternalOperand(new_space_allocation_limit);
1507 __ cmpq(rcx, limit_operand);
1508 __ j(above, &call_builtin);
1509
1510 // We fit and could grow elements.
1511 __ Store(new_space_allocation_top, rcx);
1512 __ movq(rcx, Operand(rsp, argc * kPointerSize));
1513
1514 // Push the argument...
1515 __ movq(Operand(rdx, 0), rcx);
1516 // ... and fill the rest with holes.
1517 __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
1518 for (int i = 1; i < kAllocationDelta; i++) {
1519 __ movq(Operand(rdx, i * kPointerSize), kScratchRegister);
1520 }
1521
1522 // Restore receiver to rdx as finish sequence assumes it's here.
1523 __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
1524
1525 // Increment element's and array's sizes.
1526 __ SmiAddConstant(FieldOperand(rbx, FixedArray::kLengthOffset),
1527 Smi::FromInt(kAllocationDelta));
1528
1529 // Make new length a smi before returning it.
1530 __ Integer32ToSmi(rax, rax);
1531 __ movq(FieldOperand(rdx, JSArray::kLengthOffset), rax);
1532
1533 // Elements are in new space, so write barrier is not required.
1534 __ ret((argc + 1) * kPointerSize);
1535 }
1536
1537 __ bind(&call_builtin);
1538 __ TailCallExternalReference(ExternalReference(Builtins::c_ArrayPush,
1539 isolate()),
1540 argc + 1,
1541 1);
1542 }
1543
1544 __ bind(&miss);
1545 MaybeObject* maybe_result = GenerateMissBranch();
1546 if (maybe_result->IsFailure()) return maybe_result;
1547
1548 // Return the generated code.
1549 return GetCode(function);
1550 }
1551
1552
CompileArrayPopCall(Object * object,JSObject * holder,JSGlobalPropertyCell * cell,JSFunction * function,String * name)1553 MaybeObject* CallStubCompiler::CompileArrayPopCall(Object* object,
1554 JSObject* holder,
1555 JSGlobalPropertyCell* cell,
1556 JSFunction* function,
1557 String* name) {
1558 // ----------- S t a t e -------------
1559 // -- rcx : name
1560 // -- rsp[0] : return address
1561 // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1562 // -- ...
1563 // -- rsp[(argc + 1) * 8] : receiver
1564 // -----------------------------------
1565
1566 // If object is not an array, bail out to regular call.
1567 if (!object->IsJSArray() || cell != NULL) return heap()->undefined_value();
1568
1569 Label miss, return_undefined, call_builtin;
1570
1571 GenerateNameCheck(name, &miss);
1572
1573 // Get the receiver from the stack.
1574 const int argc = arguments().immediate();
1575 __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
1576
1577 // Check that the receiver isn't a smi.
1578 __ JumpIfSmi(rdx, &miss);
1579
1580 CheckPrototypes(JSObject::cast(object), rdx,
1581 holder, rbx,
1582 rax, rdi, name, &miss);
1583
1584 // Get the elements array of the object.
1585 __ movq(rbx, FieldOperand(rdx, JSArray::kElementsOffset));
1586
1587 // Check that the elements are in fast mode and writable.
1588 __ CompareRoot(FieldOperand(rbx, HeapObject::kMapOffset),
1589 Heap::kFixedArrayMapRootIndex);
1590 __ j(not_equal, &call_builtin);
1591
1592 // Get the array's length into rcx and calculate new length.
1593 __ SmiToInteger32(rcx, FieldOperand(rdx, JSArray::kLengthOffset));
1594 __ subl(rcx, Immediate(1));
1595 __ j(negative, &return_undefined);
1596
1597 // Get the last element.
1598 __ LoadRoot(r9, Heap::kTheHoleValueRootIndex);
1599 __ movq(rax, FieldOperand(rbx,
1600 rcx, times_pointer_size,
1601 FixedArray::kHeaderSize));
1602 // Check if element is already the hole.
1603 __ cmpq(rax, r9);
1604 // If so, call slow-case to also check prototypes for value.
1605 __ j(equal, &call_builtin);
1606
1607 // Set the array's length.
1608 __ Integer32ToSmiField(FieldOperand(rdx, JSArray::kLengthOffset), rcx);
1609
1610 // Fill with the hole and return original value.
1611 __ movq(FieldOperand(rbx,
1612 rcx, times_pointer_size,
1613 FixedArray::kHeaderSize),
1614 r9);
1615 __ ret((argc + 1) * kPointerSize);
1616
1617 __ bind(&return_undefined);
1618 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
1619 __ ret((argc + 1) * kPointerSize);
1620
1621 __ bind(&call_builtin);
1622 __ TailCallExternalReference(
1623 ExternalReference(Builtins::c_ArrayPop, isolate()),
1624 argc + 1,
1625 1);
1626
1627 __ bind(&miss);
1628 MaybeObject* maybe_result = GenerateMissBranch();
1629 if (maybe_result->IsFailure()) return maybe_result;
1630
1631 // Return the generated code.
1632 return GetCode(function);
1633 }
1634
1635
CompileStringCharCodeAtCall(Object * object,JSObject * holder,JSGlobalPropertyCell * cell,JSFunction * function,String * name)1636 MaybeObject* CallStubCompiler::CompileStringCharCodeAtCall(
1637 Object* object,
1638 JSObject* holder,
1639 JSGlobalPropertyCell* cell,
1640 JSFunction* function,
1641 String* name) {
1642 // ----------- S t a t e -------------
1643 // -- rcx : function name
1644 // -- rsp[0] : return address
1645 // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1646 // -- ...
1647 // -- rsp[(argc + 1) * 8] : receiver
1648 // -----------------------------------
1649
1650 // If object is not a string, bail out to regular call.
1651 if (!object->IsString() || cell != NULL) return heap()->undefined_value();
1652
1653 const int argc = arguments().immediate();
1654
1655 Label miss;
1656 Label name_miss;
1657 Label index_out_of_range;
1658 Label* index_out_of_range_label = &index_out_of_range;
1659
1660 if (kind_ == Code::CALL_IC && extra_ic_state_ == DEFAULT_STRING_STUB) {
1661 index_out_of_range_label = &miss;
1662 }
1663
1664 GenerateNameCheck(name, &name_miss);
1665
1666 // Check that the maps starting from the prototype haven't changed.
1667 GenerateDirectLoadGlobalFunctionPrototype(masm(),
1668 Context::STRING_FUNCTION_INDEX,
1669 rax,
1670 &miss);
1671 ASSERT(object != holder);
1672 CheckPrototypes(JSObject::cast(object->GetPrototype()), rax, holder,
1673 rbx, rdx, rdi, name, &miss);
1674
1675 Register receiver = rbx;
1676 Register index = rdi;
1677 Register scratch = rdx;
1678 Register result = rax;
1679 __ movq(receiver, Operand(rsp, (argc + 1) * kPointerSize));
1680 if (argc > 0) {
1681 __ movq(index, Operand(rsp, (argc - 0) * kPointerSize));
1682 } else {
1683 __ LoadRoot(index, Heap::kUndefinedValueRootIndex);
1684 }
1685
1686 StringCharCodeAtGenerator char_code_at_generator(receiver,
1687 index,
1688 scratch,
1689 result,
1690 &miss, // When not a string.
1691 &miss, // When not a number.
1692 index_out_of_range_label,
1693 STRING_INDEX_IS_NUMBER);
1694 char_code_at_generator.GenerateFast(masm());
1695 __ ret((argc + 1) * kPointerSize);
1696
1697 StubRuntimeCallHelper call_helper;
1698 char_code_at_generator.GenerateSlow(masm(), call_helper);
1699
1700 if (index_out_of_range.is_linked()) {
1701 __ bind(&index_out_of_range);
1702 __ LoadRoot(rax, Heap::kNanValueRootIndex);
1703 __ ret((argc + 1) * kPointerSize);
1704 }
1705
1706 __ bind(&miss);
1707 // Restore function name in rcx.
1708 __ Move(rcx, Handle<String>(name));
1709 __ bind(&name_miss);
1710 MaybeObject* maybe_result = GenerateMissBranch();
1711 if (maybe_result->IsFailure()) return maybe_result;
1712
1713 // Return the generated code.
1714 return GetCode(function);
1715 }
1716
1717
CompileStringCharAtCall(Object * object,JSObject * holder,JSGlobalPropertyCell * cell,JSFunction * function,String * name)1718 MaybeObject* CallStubCompiler::CompileStringCharAtCall(
1719 Object* object,
1720 JSObject* holder,
1721 JSGlobalPropertyCell* cell,
1722 JSFunction* function,
1723 String* name) {
1724 // ----------- S t a t e -------------
1725 // -- rcx : function name
1726 // -- rsp[0] : return address
1727 // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1728 // -- ...
1729 // -- rsp[(argc + 1) * 8] : receiver
1730 // -----------------------------------
1731
1732 // If object is not a string, bail out to regular call.
1733 if (!object->IsString() || cell != NULL) return heap()->undefined_value();
1734
1735 const int argc = arguments().immediate();
1736
1737 Label miss;
1738 Label name_miss;
1739 Label index_out_of_range;
1740 Label* index_out_of_range_label = &index_out_of_range;
1741
1742 if (kind_ == Code::CALL_IC && extra_ic_state_ == DEFAULT_STRING_STUB) {
1743 index_out_of_range_label = &miss;
1744 }
1745
1746 GenerateNameCheck(name, &name_miss);
1747
1748 // Check that the maps starting from the prototype haven't changed.
1749 GenerateDirectLoadGlobalFunctionPrototype(masm(),
1750 Context::STRING_FUNCTION_INDEX,
1751 rax,
1752 &miss);
1753 ASSERT(object != holder);
1754 CheckPrototypes(JSObject::cast(object->GetPrototype()), rax, holder,
1755 rbx, rdx, rdi, name, &miss);
1756
1757 Register receiver = rax;
1758 Register index = rdi;
1759 Register scratch1 = rbx;
1760 Register scratch2 = rdx;
1761 Register result = rax;
1762 __ movq(receiver, Operand(rsp, (argc + 1) * kPointerSize));
1763 if (argc > 0) {
1764 __ movq(index, Operand(rsp, (argc - 0) * kPointerSize));
1765 } else {
1766 __ LoadRoot(index, Heap::kUndefinedValueRootIndex);
1767 }
1768
1769 StringCharAtGenerator char_at_generator(receiver,
1770 index,
1771 scratch1,
1772 scratch2,
1773 result,
1774 &miss, // When not a string.
1775 &miss, // When not a number.
1776 index_out_of_range_label,
1777 STRING_INDEX_IS_NUMBER);
1778 char_at_generator.GenerateFast(masm());
1779 __ ret((argc + 1) * kPointerSize);
1780
1781 StubRuntimeCallHelper call_helper;
1782 char_at_generator.GenerateSlow(masm(), call_helper);
1783
1784 if (index_out_of_range.is_linked()) {
1785 __ bind(&index_out_of_range);
1786 __ LoadRoot(rax, Heap::kEmptyStringRootIndex);
1787 __ ret((argc + 1) * kPointerSize);
1788 }
1789
1790 __ bind(&miss);
1791 // Restore function name in rcx.
1792 __ Move(rcx, Handle<String>(name));
1793 __ bind(&name_miss);
1794 MaybeObject* maybe_result = GenerateMissBranch();
1795 if (maybe_result->IsFailure()) return maybe_result;
1796
1797 // Return the generated code.
1798 return GetCode(function);
1799 }
1800
1801
CompileStringFromCharCodeCall(Object * object,JSObject * holder,JSGlobalPropertyCell * cell,JSFunction * function,String * name)1802 MaybeObject* CallStubCompiler::CompileStringFromCharCodeCall(
1803 Object* object,
1804 JSObject* holder,
1805 JSGlobalPropertyCell* cell,
1806 JSFunction* function,
1807 String* name) {
1808 // ----------- S t a t e -------------
1809 // -- rcx : function name
1810 // -- rsp[0] : return address
1811 // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1812 // -- ...
1813 // -- rsp[(argc + 1) * 8] : receiver
1814 // -----------------------------------
1815
1816 const int argc = arguments().immediate();
1817
1818 // If the object is not a JSObject or we got an unexpected number of
1819 // arguments, bail out to the regular call.
1820 if (!object->IsJSObject() || argc != 1) return heap()->undefined_value();
1821
1822 Label miss;
1823 GenerateNameCheck(name, &miss);
1824
1825 if (cell == NULL) {
1826 __ movq(rdx, Operand(rsp, 2 * kPointerSize));
1827
1828 __ JumpIfSmi(rdx, &miss);
1829
1830 CheckPrototypes(JSObject::cast(object), rdx, holder, rbx, rax, rdi, name,
1831 &miss);
1832 } else {
1833 ASSERT(cell->value() == function);
1834 GenerateGlobalReceiverCheck(JSObject::cast(object), holder, name, &miss);
1835 GenerateLoadFunctionFromCell(cell, function, &miss);
1836 }
1837
1838 // Load the char code argument.
1839 Register code = rbx;
1840 __ movq(code, Operand(rsp, 1 * kPointerSize));
1841
1842 // Check the code is a smi.
1843 Label slow;
1844 __ JumpIfNotSmi(code, &slow);
1845
1846 // Convert the smi code to uint16.
1847 __ SmiAndConstant(code, code, Smi::FromInt(0xffff));
1848
1849 StringCharFromCodeGenerator char_from_code_generator(code, rax);
1850 char_from_code_generator.GenerateFast(masm());
1851 __ ret(2 * kPointerSize);
1852
1853 StubRuntimeCallHelper call_helper;
1854 char_from_code_generator.GenerateSlow(masm(), call_helper);
1855
1856 // Tail call the full function. We do not have to patch the receiver
1857 // because the function makes no use of it.
1858 __ bind(&slow);
1859 __ InvokeFunction(function, arguments(), JUMP_FUNCTION);
1860
1861 __ bind(&miss);
1862 // rcx: function name.
1863 MaybeObject* maybe_result = GenerateMissBranch();
1864 if (maybe_result->IsFailure()) return maybe_result;
1865
1866 // Return the generated code.
1867 return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name);
1868 }
1869
1870
CompileMathFloorCall(Object * object,JSObject * holder,JSGlobalPropertyCell * cell,JSFunction * function,String * name)1871 MaybeObject* CallStubCompiler::CompileMathFloorCall(Object* object,
1872 JSObject* holder,
1873 JSGlobalPropertyCell* cell,
1874 JSFunction* function,
1875 String* name) {
1876 // TODO(872): implement this.
1877 return heap()->undefined_value();
1878 }
1879
1880
CompileMathAbsCall(Object * object,JSObject * holder,JSGlobalPropertyCell * cell,JSFunction * function,String * name)1881 MaybeObject* CallStubCompiler::CompileMathAbsCall(Object* object,
1882 JSObject* holder,
1883 JSGlobalPropertyCell* cell,
1884 JSFunction* function,
1885 String* name) {
1886 // ----------- S t a t e -------------
1887 // -- rcx : function name
1888 // -- rsp[0] : return address
1889 // -- rsp[(argc - n) * 8] : arg[n] (zero-based)
1890 // -- ...
1891 // -- rsp[(argc + 1) * 8] : receiver
1892 // -----------------------------------
1893
1894 const int argc = arguments().immediate();
1895
1896 // If the object is not a JSObject or we got an unexpected number of
1897 // arguments, bail out to the regular call.
1898 if (!object->IsJSObject() || argc != 1) return heap()->undefined_value();
1899
1900 Label miss;
1901 GenerateNameCheck(name, &miss);
1902
1903 if (cell == NULL) {
1904 __ movq(rdx, Operand(rsp, 2 * kPointerSize));
1905
1906 __ JumpIfSmi(rdx, &miss);
1907
1908 CheckPrototypes(JSObject::cast(object), rdx, holder, rbx, rax, rdi, name,
1909 &miss);
1910 } else {
1911 ASSERT(cell->value() == function);
1912 GenerateGlobalReceiverCheck(JSObject::cast(object), holder, name, &miss);
1913 GenerateLoadFunctionFromCell(cell, function, &miss);
1914 }
1915
1916 // Load the (only) argument into rax.
1917 __ movq(rax, Operand(rsp, 1 * kPointerSize));
1918
1919 // Check if the argument is a smi.
1920 Label not_smi;
1921 STATIC_ASSERT(kSmiTag == 0);
1922 __ JumpIfNotSmi(rax, ¬_smi);
1923 __ SmiToInteger32(rax, rax);
1924
1925 // Set ebx to 1...1 (== -1) if the argument is negative, or to 0...0
1926 // otherwise.
1927 __ movl(rbx, rax);
1928 __ sarl(rbx, Immediate(kBitsPerInt - 1));
1929
1930 // Do bitwise not or do nothing depending on ebx.
1931 __ xorl(rax, rbx);
1932
1933 // Add 1 or do nothing depending on ebx.
1934 __ subl(rax, rbx);
1935
1936 // If the result is still negative, go to the slow case.
1937 // This only happens for the most negative smi.
1938 Label slow;
1939 __ j(negative, &slow);
1940
1941 // Smi case done.
1942 __ Integer32ToSmi(rax, rax);
1943 __ ret(2 * kPointerSize);
1944
1945 // Check if the argument is a heap number and load its value.
1946 __ bind(¬_smi);
1947 __ CheckMap(rax, factory()->heap_number_map(), &slow, true);
1948 __ movq(rbx, FieldOperand(rax, HeapNumber::kValueOffset));
1949
1950 // Check the sign of the argument. If the argument is positive,
1951 // just return it.
1952 Label negative_sign;
1953 const int sign_mask_shift =
1954 (HeapNumber::kExponentOffset - HeapNumber::kValueOffset) * kBitsPerByte;
1955 __ movq(rdi, static_cast<int64_t>(HeapNumber::kSignMask) << sign_mask_shift,
1956 RelocInfo::NONE);
1957 __ testq(rbx, rdi);
1958 __ j(not_zero, &negative_sign);
1959 __ ret(2 * kPointerSize);
1960
1961 // If the argument is negative, clear the sign, and return a new
1962 // number. We still have the sign mask in rdi.
1963 __ bind(&negative_sign);
1964 __ xor_(rbx, rdi);
1965 __ AllocateHeapNumber(rax, rdx, &slow);
1966 __ movq(FieldOperand(rax, HeapNumber::kValueOffset), rbx);
1967 __ ret(2 * kPointerSize);
1968
1969 // Tail call the full function. We do not have to patch the receiver
1970 // because the function makes no use of it.
1971 __ bind(&slow);
1972 __ InvokeFunction(function, arguments(), JUMP_FUNCTION);
1973
1974 __ bind(&miss);
1975 // rcx: function name.
1976 MaybeObject* maybe_result = GenerateMissBranch();
1977 if (maybe_result->IsFailure()) return maybe_result;
1978
1979 // Return the generated code.
1980 return (cell == NULL) ? GetCode(function) : GetCode(NORMAL, name);
1981 }
1982
1983
CompileFastApiCall(const CallOptimization & optimization,Object * object,JSObject * holder,JSGlobalPropertyCell * cell,JSFunction * function,String * name)1984 MaybeObject* CallStubCompiler::CompileFastApiCall(
1985 const CallOptimization& optimization,
1986 Object* object,
1987 JSObject* holder,
1988 JSGlobalPropertyCell* cell,
1989 JSFunction* function,
1990 String* name) {
1991 ASSERT(optimization.is_simple_api_call());
1992 // Bail out if object is a global object as we don't want to
1993 // repatch it to global receiver.
1994 if (object->IsGlobalObject()) return heap()->undefined_value();
1995 if (cell != NULL) return heap()->undefined_value();
1996 int depth = optimization.GetPrototypeDepthOfExpectedType(
1997 JSObject::cast(object), holder);
1998 if (depth == kInvalidProtoDepth) return heap()->undefined_value();
1999
2000 Label miss, miss_before_stack_reserved;
2001
2002 GenerateNameCheck(name, &miss_before_stack_reserved);
2003
2004 // Get the receiver from the stack.
2005 const int argc = arguments().immediate();
2006 __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
2007
2008 // Check that the receiver isn't a smi.
2009 __ JumpIfSmi(rdx, &miss_before_stack_reserved);
2010
2011 Counters* counters = isolate()->counters();
2012 __ IncrementCounter(counters->call_const(), 1);
2013 __ IncrementCounter(counters->call_const_fast_api(), 1);
2014
2015 // Allocate space for v8::Arguments implicit values. Must be initialized
2016 // before calling any runtime function.
2017 __ subq(rsp, Immediate(kFastApiCallArguments * kPointerSize));
2018
2019 // Check that the maps haven't changed and find a Holder as a side effect.
2020 CheckPrototypes(JSObject::cast(object), rdx, holder,
2021 rbx, rax, rdi, name, depth, &miss);
2022
2023 // Move the return address on top of the stack.
2024 __ movq(rax, Operand(rsp, 3 * kPointerSize));
2025 __ movq(Operand(rsp, 0 * kPointerSize), rax);
2026
2027 MaybeObject* result = GenerateFastApiCall(masm(), optimization, argc);
2028 if (result->IsFailure()) return result;
2029
2030 __ bind(&miss);
2031 __ addq(rsp, Immediate(kFastApiCallArguments * kPointerSize));
2032
2033 __ bind(&miss_before_stack_reserved);
2034 MaybeObject* maybe_result = GenerateMissBranch();
2035 if (maybe_result->IsFailure()) return maybe_result;
2036
2037 // Return the generated code.
2038 return GetCode(function);
2039 }
2040
2041
CompileCallConstant(Object * object,JSObject * holder,JSFunction * function,String * name,CheckType check)2042 MaybeObject* CallStubCompiler::CompileCallConstant(Object* object,
2043 JSObject* holder,
2044 JSFunction* function,
2045 String* name,
2046 CheckType check) {
2047 // ----------- S t a t e -------------
2048 // rcx : function name
2049 // rsp[0] : return address
2050 // rsp[8] : argument argc
2051 // rsp[16] : argument argc - 1
2052 // ...
2053 // rsp[argc * 8] : argument 1
2054 // rsp[(argc + 1) * 8] : argument 0 = receiver
2055 // -----------------------------------
2056
2057 if (HasCustomCallGenerator(function)) {
2058 MaybeObject* maybe_result = CompileCustomCall(
2059 object, holder, NULL, function, name);
2060 Object* result;
2061 if (!maybe_result->ToObject(&result)) return maybe_result;
2062 // undefined means bail out to regular compiler.
2063 if (!result->IsUndefined()) return result;
2064 }
2065
2066 Label miss;
2067
2068 GenerateNameCheck(name, &miss);
2069
2070 // Get the receiver from the stack.
2071 const int argc = arguments().immediate();
2072 __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
2073
2074 // Check that the receiver isn't a smi.
2075 if (check != NUMBER_CHECK) {
2076 __ JumpIfSmi(rdx, &miss);
2077 }
2078
2079 // Make sure that it's okay not to patch the on stack receiver
2080 // unless we're doing a receiver map check.
2081 ASSERT(!object->IsGlobalObject() || check == RECEIVER_MAP_CHECK);
2082
2083 Counters* counters = isolate()->counters();
2084 SharedFunctionInfo* function_info = function->shared();
2085 switch (check) {
2086 case RECEIVER_MAP_CHECK:
2087 __ IncrementCounter(counters->call_const(), 1);
2088
2089 // Check that the maps haven't changed.
2090 CheckPrototypes(JSObject::cast(object), rdx, holder,
2091 rbx, rax, rdi, name, &miss);
2092
2093 // Patch the receiver on the stack with the global proxy if
2094 // necessary.
2095 if (object->IsGlobalObject()) {
2096 __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalReceiverOffset));
2097 __ movq(Operand(rsp, (argc + 1) * kPointerSize), rdx);
2098 }
2099 break;
2100
2101 case STRING_CHECK:
2102 if (!function->IsBuiltin() && !function_info->strict_mode()) {
2103 // Calling non-strict non-builtins with a value as the receiver
2104 // requires boxing.
2105 __ jmp(&miss);
2106 } else {
2107 // Check that the object is a two-byte string or a symbol.
2108 __ CmpObjectType(rdx, FIRST_NONSTRING_TYPE, rax);
2109 __ j(above_equal, &miss);
2110 // Check that the maps starting from the prototype haven't changed.
2111 GenerateDirectLoadGlobalFunctionPrototype(
2112 masm(), Context::STRING_FUNCTION_INDEX, rax, &miss);
2113 CheckPrototypes(JSObject::cast(object->GetPrototype()), rax, holder,
2114 rbx, rdx, rdi, name, &miss);
2115 }
2116 break;
2117
2118 case NUMBER_CHECK: {
2119 if (!function->IsBuiltin() && !function_info->strict_mode()) {
2120 // Calling non-strict non-builtins with a value as the receiver
2121 // requires boxing.
2122 __ jmp(&miss);
2123 } else {
2124 Label fast;
2125 // Check that the object is a smi or a heap number.
2126 __ JumpIfSmi(rdx, &fast);
2127 __ CmpObjectType(rdx, HEAP_NUMBER_TYPE, rax);
2128 __ j(not_equal, &miss);
2129 __ bind(&fast);
2130 // Check that the maps starting from the prototype haven't changed.
2131 GenerateDirectLoadGlobalFunctionPrototype(
2132 masm(), Context::NUMBER_FUNCTION_INDEX, rax, &miss);
2133 CheckPrototypes(JSObject::cast(object->GetPrototype()), rax, holder,
2134 rbx, rdx, rdi, name, &miss);
2135 }
2136 break;
2137 }
2138
2139 case BOOLEAN_CHECK: {
2140 if (!function->IsBuiltin() && !function_info->strict_mode()) {
2141 // Calling non-strict non-builtins with a value as the receiver
2142 // requires boxing.
2143 __ jmp(&miss);
2144 } else {
2145 Label fast;
2146 // Check that the object is a boolean.
2147 __ CompareRoot(rdx, Heap::kTrueValueRootIndex);
2148 __ j(equal, &fast);
2149 __ CompareRoot(rdx, Heap::kFalseValueRootIndex);
2150 __ j(not_equal, &miss);
2151 __ bind(&fast);
2152 // Check that the maps starting from the prototype haven't changed.
2153 GenerateDirectLoadGlobalFunctionPrototype(
2154 masm(), Context::BOOLEAN_FUNCTION_INDEX, rax, &miss);
2155 CheckPrototypes(JSObject::cast(object->GetPrototype()), rax, holder,
2156 rbx, rdx, rdi, name, &miss);
2157 }
2158 break;
2159 }
2160
2161 default:
2162 UNREACHABLE();
2163 }
2164
2165 __ InvokeFunction(function, arguments(), JUMP_FUNCTION);
2166
2167 // Handle call cache miss.
2168 __ bind(&miss);
2169 MaybeObject* maybe_result = GenerateMissBranch();
2170 if (maybe_result->IsFailure()) return maybe_result;
2171
2172 // Return the generated code.
2173 return GetCode(function);
2174 }
2175
2176
CompileCallInterceptor(JSObject * object,JSObject * holder,String * name)2177 MaybeObject* CallStubCompiler::CompileCallInterceptor(JSObject* object,
2178 JSObject* holder,
2179 String* name) {
2180 // ----------- S t a t e -------------
2181 // rcx : function name
2182 // rsp[0] : return address
2183 // rsp[8] : argument argc
2184 // rsp[16] : argument argc - 1
2185 // ...
2186 // rsp[argc * 8] : argument 1
2187 // rsp[(argc + 1) * 8] : argument 0 = receiver
2188 // -----------------------------------
2189 Label miss;
2190
2191 GenerateNameCheck(name, &miss);
2192
2193 // Get the number of arguments.
2194 const int argc = arguments().immediate();
2195
2196 LookupResult lookup;
2197 LookupPostInterceptor(holder, name, &lookup);
2198
2199 // Get the receiver from the stack.
2200 __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
2201
2202 CallInterceptorCompiler compiler(this, arguments(), rcx);
2203 MaybeObject* result = compiler.Compile(masm(),
2204 object,
2205 holder,
2206 name,
2207 &lookup,
2208 rdx,
2209 rbx,
2210 rdi,
2211 rax,
2212 &miss);
2213 if (result->IsFailure()) return result;
2214
2215 // Restore receiver.
2216 __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize));
2217
2218 // Check that the function really is a function.
2219 __ JumpIfSmi(rax, &miss);
2220 __ CmpObjectType(rax, JS_FUNCTION_TYPE, rbx);
2221 __ j(not_equal, &miss);
2222
2223 // Patch the receiver on the stack with the global proxy if
2224 // necessary.
2225 if (object->IsGlobalObject()) {
2226 __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalReceiverOffset));
2227 __ movq(Operand(rsp, (argc + 1) * kPointerSize), rdx);
2228 }
2229
2230 // Invoke the function.
2231 __ movq(rdi, rax);
2232 __ InvokeFunction(rdi, arguments(), JUMP_FUNCTION);
2233
2234 // Handle load cache miss.
2235 __ bind(&miss);
2236 MaybeObject* maybe_result = GenerateMissBranch();
2237 if (maybe_result->IsFailure()) return maybe_result;
2238
2239 // Return the generated code.
2240 return GetCode(INTERCEPTOR, name);
2241 }
2242
2243
CompileCallGlobal(JSObject * object,GlobalObject * holder,JSGlobalPropertyCell * cell,JSFunction * function,String * name)2244 MaybeObject* CallStubCompiler::CompileCallGlobal(JSObject* object,
2245 GlobalObject* holder,
2246 JSGlobalPropertyCell* cell,
2247 JSFunction* function,
2248 String* name) {
2249 // ----------- S t a t e -------------
2250 // rcx : function name
2251 // rsp[0] : return address
2252 // rsp[8] : argument argc
2253 // rsp[16] : argument argc - 1
2254 // ...
2255 // rsp[argc * 8] : argument 1
2256 // rsp[(argc + 1) * 8] : argument 0 = receiver
2257 // -----------------------------------
2258
2259 if (HasCustomCallGenerator(function)) {
2260 MaybeObject* maybe_result = CompileCustomCall(
2261 object, holder, cell, function, name);
2262 Object* result;
2263 if (!maybe_result->ToObject(&result)) return maybe_result;
2264 // undefined means bail out to regular compiler.
2265 if (!result->IsUndefined()) return result;
2266 }
2267
2268 Label miss;
2269
2270 GenerateNameCheck(name, &miss);
2271
2272 // Get the number of arguments.
2273 const int argc = arguments().immediate();
2274
2275 GenerateGlobalReceiverCheck(object, holder, name, &miss);
2276
2277 GenerateLoadFunctionFromCell(cell, function, &miss);
2278
2279 // Patch the receiver on the stack with the global proxy.
2280 if (object->IsGlobalObject()) {
2281 __ movq(rdx, FieldOperand(rdx, GlobalObject::kGlobalReceiverOffset));
2282 __ movq(Operand(rsp, (argc + 1) * kPointerSize), rdx);
2283 }
2284
2285 // Setup the context (function already in rdi).
2286 __ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
2287
2288 // Jump to the cached code (tail call).
2289 Counters* counters = isolate()->counters();
2290 __ IncrementCounter(counters->call_global_inline(), 1);
2291 ASSERT(function->is_compiled());
2292 ParameterCount expected(function->shared()->formal_parameter_count());
2293 if (V8::UseCrankshaft()) {
2294 // TODO(kasperl): For now, we always call indirectly through the
2295 // code field in the function to allow recompilation to take effect
2296 // without changing any of the call sites.
2297 __ movq(rdx, FieldOperand(rdi, JSFunction::kCodeEntryOffset));
2298 __ InvokeCode(rdx, expected, arguments(), JUMP_FUNCTION);
2299 } else {
2300 Handle<Code> code(function->code());
2301 __ InvokeCode(code, expected, arguments(),
2302 RelocInfo::CODE_TARGET, JUMP_FUNCTION);
2303 }
2304 // Handle call cache miss.
2305 __ bind(&miss);
2306 __ IncrementCounter(counters->call_global_inline_miss(), 1);
2307 MaybeObject* maybe_result = GenerateMissBranch();
2308 if (maybe_result->IsFailure()) return maybe_result;
2309
2310 // Return the generated code.
2311 return GetCode(NORMAL, name);
2312 }
2313
2314
CompileStoreField(JSObject * object,int index,Map * transition,String * name)2315 MaybeObject* StoreStubCompiler::CompileStoreField(JSObject* object,
2316 int index,
2317 Map* transition,
2318 String* name) {
2319 // ----------- S t a t e -------------
2320 // -- rax : value
2321 // -- rcx : name
2322 // -- rdx : receiver
2323 // -- rsp[0] : return address
2324 // -----------------------------------
2325 Label miss;
2326
2327 // Generate store field code. Preserves receiver and name on jump to miss.
2328 GenerateStoreField(masm(),
2329 object,
2330 index,
2331 transition,
2332 rdx, rcx, rbx,
2333 &miss);
2334
2335 // Handle store cache miss.
2336 __ bind(&miss);
2337 Handle<Code> ic = isolate()->builtins()->StoreIC_Miss();
2338 __ Jump(ic, RelocInfo::CODE_TARGET);
2339
2340 // Return the generated code.
2341 return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name);
2342 }
2343
2344
CompileStoreCallback(JSObject * object,AccessorInfo * callback,String * name)2345 MaybeObject* StoreStubCompiler::CompileStoreCallback(JSObject* object,
2346 AccessorInfo* callback,
2347 String* name) {
2348 // ----------- S t a t e -------------
2349 // -- rax : value
2350 // -- rcx : name
2351 // -- rdx : receiver
2352 // -- rsp[0] : return address
2353 // -----------------------------------
2354 Label miss;
2355
2356 // Check that the object isn't a smi.
2357 __ JumpIfSmi(rdx, &miss);
2358
2359 // Check that the map of the object hasn't changed.
2360 __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
2361 Handle<Map>(object->map()));
2362 __ j(not_equal, &miss);
2363
2364 // Perform global security token check if needed.
2365 if (object->IsJSGlobalProxy()) {
2366 __ CheckAccessGlobalProxy(rdx, rbx, &miss);
2367 }
2368
2369 // Stub never generated for non-global objects that require access
2370 // checks.
2371 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
2372
2373 __ pop(rbx); // remove the return address
2374 __ push(rdx); // receiver
2375 __ Push(Handle<AccessorInfo>(callback)); // callback info
2376 __ push(rcx); // name
2377 __ push(rax); // value
2378 __ push(rbx); // restore return address
2379
2380 // Do tail-call to the runtime system.
2381 ExternalReference store_callback_property =
2382 ExternalReference(IC_Utility(IC::kStoreCallbackProperty), isolate());
2383 __ TailCallExternalReference(store_callback_property, 4, 1);
2384
2385 // Handle store cache miss.
2386 __ bind(&miss);
2387 Handle<Code> ic = isolate()->builtins()->StoreIC_Miss();
2388 __ Jump(ic, RelocInfo::CODE_TARGET);
2389
2390 // Return the generated code.
2391 return GetCode(CALLBACKS, name);
2392 }
2393
2394
CompileStoreInterceptor(JSObject * receiver,String * name)2395 MaybeObject* StoreStubCompiler::CompileStoreInterceptor(JSObject* receiver,
2396 String* name) {
2397 // ----------- S t a t e -------------
2398 // -- rax : value
2399 // -- rcx : name
2400 // -- rdx : receiver
2401 // -- rsp[0] : return address
2402 // -----------------------------------
2403 Label miss;
2404
2405 // Check that the object isn't a smi.
2406 __ JumpIfSmi(rdx, &miss);
2407
2408 // Check that the map of the object hasn't changed.
2409 __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
2410 Handle<Map>(receiver->map()));
2411 __ j(not_equal, &miss);
2412
2413 // Perform global security token check if needed.
2414 if (receiver->IsJSGlobalProxy()) {
2415 __ CheckAccessGlobalProxy(rdx, rbx, &miss);
2416 }
2417
2418 // Stub never generated for non-global objects that require access
2419 // checks.
2420 ASSERT(receiver->IsJSGlobalProxy() || !receiver->IsAccessCheckNeeded());
2421
2422 __ pop(rbx); // remove the return address
2423 __ push(rdx); // receiver
2424 __ push(rcx); // name
2425 __ push(rax); // value
2426 __ Push(Smi::FromInt(strict_mode_));
2427 __ push(rbx); // restore return address
2428
2429 // Do tail-call to the runtime system.
2430 ExternalReference store_ic_property =
2431 ExternalReference(IC_Utility(IC::kStoreInterceptorProperty), isolate());
2432 __ TailCallExternalReference(store_ic_property, 4, 1);
2433
2434 // Handle store cache miss.
2435 __ bind(&miss);
2436 Handle<Code> ic = isolate()->builtins()->StoreIC_Miss();
2437 __ Jump(ic, RelocInfo::CODE_TARGET);
2438
2439 // Return the generated code.
2440 return GetCode(INTERCEPTOR, name);
2441 }
2442
2443
CompileStoreGlobal(GlobalObject * object,JSGlobalPropertyCell * cell,String * name)2444 MaybeObject* StoreStubCompiler::CompileStoreGlobal(GlobalObject* object,
2445 JSGlobalPropertyCell* cell,
2446 String* name) {
2447 // ----------- S t a t e -------------
2448 // -- rax : value
2449 // -- rcx : name
2450 // -- rdx : receiver
2451 // -- rsp[0] : return address
2452 // -----------------------------------
2453 Label miss;
2454
2455 // Check that the map of the global has not changed.
2456 __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
2457 Handle<Map>(object->map()));
2458 __ j(not_equal, &miss);
2459
2460 // Check that the value in the cell is not the hole. If it is, this
2461 // cell could have been deleted and reintroducing the global needs
2462 // to update the property details in the property dictionary of the
2463 // global object. We bail out to the runtime system to do that.
2464 __ Move(rbx, Handle<JSGlobalPropertyCell>(cell));
2465 __ CompareRoot(FieldOperand(rbx, JSGlobalPropertyCell::kValueOffset),
2466 Heap::kTheHoleValueRootIndex);
2467 __ j(equal, &miss);
2468
2469 // Store the value in the cell.
2470 __ movq(FieldOperand(rbx, JSGlobalPropertyCell::kValueOffset), rax);
2471
2472 // Return the value (register rax).
2473 Counters* counters = isolate()->counters();
2474 __ IncrementCounter(counters->named_store_global_inline(), 1);
2475 __ ret(0);
2476
2477 // Handle store cache miss.
2478 __ bind(&miss);
2479 __ IncrementCounter(counters->named_store_global_inline_miss(), 1);
2480 Handle<Code> ic = isolate()->builtins()->StoreIC_Miss();
2481 __ Jump(ic, RelocInfo::CODE_TARGET);
2482
2483 // Return the generated code.
2484 return GetCode(NORMAL, name);
2485 }
2486
2487
CompileStoreField(JSObject * object,int index,Map * transition,String * name)2488 MaybeObject* KeyedStoreStubCompiler::CompileStoreField(JSObject* object,
2489 int index,
2490 Map* transition,
2491 String* name) {
2492 // ----------- S t a t e -------------
2493 // -- rax : value
2494 // -- rcx : key
2495 // -- rdx : receiver
2496 // -- rsp[0] : return address
2497 // -----------------------------------
2498 Label miss;
2499
2500 Counters* counters = isolate()->counters();
2501 __ IncrementCounter(counters->keyed_store_field(), 1);
2502
2503 // Check that the name has not changed.
2504 __ Cmp(rcx, Handle<String>(name));
2505 __ j(not_equal, &miss);
2506
2507 // Generate store field code. Preserves receiver and name on jump to miss.
2508 GenerateStoreField(masm(),
2509 object,
2510 index,
2511 transition,
2512 rdx, rcx, rbx,
2513 &miss);
2514
2515 // Handle store cache miss.
2516 __ bind(&miss);
2517 __ DecrementCounter(counters->keyed_store_field(), 1);
2518 Handle<Code> ic = isolate()->builtins()->KeyedStoreIC_Miss();
2519 __ Jump(ic, RelocInfo::CODE_TARGET);
2520
2521 // Return the generated code.
2522 return GetCode(transition == NULL ? FIELD : MAP_TRANSITION, name);
2523 }
2524
2525
CompileStoreSpecialized(JSObject * receiver)2526 MaybeObject* KeyedStoreStubCompiler::CompileStoreSpecialized(
2527 JSObject* receiver) {
2528 // ----------- S t a t e -------------
2529 // -- rax : value
2530 // -- rcx : key
2531 // -- rdx : receiver
2532 // -- rsp[0] : return address
2533 // -----------------------------------
2534 Label miss;
2535
2536 // Check that the receiver isn't a smi.
2537 __ JumpIfSmi(rdx, &miss);
2538
2539 // Check that the map matches.
2540 __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
2541 Handle<Map>(receiver->map()));
2542 __ j(not_equal, &miss);
2543
2544 // Check that the key is a smi.
2545 __ JumpIfNotSmi(rcx, &miss);
2546
2547 // Get the elements array and make sure it is a fast element array, not 'cow'.
2548 __ movq(rdi, FieldOperand(rdx, JSObject::kElementsOffset));
2549 __ Cmp(FieldOperand(rdi, HeapObject::kMapOffset),
2550 factory()->fixed_array_map());
2551 __ j(not_equal, &miss);
2552
2553 // Check that the key is within bounds.
2554 if (receiver->IsJSArray()) {
2555 __ SmiCompare(rcx, FieldOperand(rdx, JSArray::kLengthOffset));
2556 __ j(above_equal, &miss);
2557 } else {
2558 __ SmiCompare(rcx, FieldOperand(rdi, FixedArray::kLengthOffset));
2559 __ j(above_equal, &miss);
2560 }
2561
2562 // Do the store and update the write barrier. Make sure to preserve
2563 // the value in register eax.
2564 __ movq(rdx, rax);
2565 __ SmiToInteger32(rcx, rcx);
2566 __ movq(FieldOperand(rdi, rcx, times_pointer_size, FixedArray::kHeaderSize),
2567 rax);
2568 __ RecordWrite(rdi, 0, rdx, rcx);
2569
2570 // Done.
2571 __ ret(0);
2572
2573 // Handle store cache miss.
2574 __ bind(&miss);
2575 Handle<Code> ic = isolate()->builtins()->KeyedStoreIC_Miss();
2576 __ jmp(ic, RelocInfo::CODE_TARGET);
2577
2578 // Return the generated code.
2579 return GetCode(NORMAL, NULL);
2580 }
2581
2582
CompileLoadNonexistent(String * name,JSObject * object,JSObject * last)2583 MaybeObject* LoadStubCompiler::CompileLoadNonexistent(String* name,
2584 JSObject* object,
2585 JSObject* last) {
2586 // ----------- S t a t e -------------
2587 // -- rax : receiver
2588 // -- rcx : name
2589 // -- rsp[0] : return address
2590 // -----------------------------------
2591 Label miss;
2592
2593 // Chech that receiver is not a smi.
2594 __ JumpIfSmi(rax, &miss);
2595
2596 // Check the maps of the full prototype chain. Also check that
2597 // global property cells up to (but not including) the last object
2598 // in the prototype chain are empty.
2599 CheckPrototypes(object, rax, last, rbx, rdx, rdi, name, &miss);
2600
2601 // If the last object in the prototype chain is a global object,
2602 // check that the global property cell is empty.
2603 if (last->IsGlobalObject()) {
2604 MaybeObject* cell = GenerateCheckPropertyCell(masm(),
2605 GlobalObject::cast(last),
2606 name,
2607 rdx,
2608 &miss);
2609 if (cell->IsFailure()) {
2610 miss.Unuse();
2611 return cell;
2612 }
2613 }
2614
2615 // Return undefined if maps of the full prototype chain are still the
2616 // same and no global property with this name contains a value.
2617 __ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
2618 __ ret(0);
2619
2620 __ bind(&miss);
2621 GenerateLoadMiss(masm(), Code::LOAD_IC);
2622
2623 // Return the generated code.
2624 return GetCode(NONEXISTENT, heap()->empty_string());
2625 }
2626
2627
CompileLoadField(JSObject * object,JSObject * holder,int index,String * name)2628 MaybeObject* LoadStubCompiler::CompileLoadField(JSObject* object,
2629 JSObject* holder,
2630 int index,
2631 String* name) {
2632 // ----------- S t a t e -------------
2633 // -- rax : receiver
2634 // -- rcx : name
2635 // -- rsp[0] : return address
2636 // -----------------------------------
2637 Label miss;
2638
2639 GenerateLoadField(object, holder, rax, rbx, rdx, rdi, index, name, &miss);
2640 __ bind(&miss);
2641 GenerateLoadMiss(masm(), Code::LOAD_IC);
2642
2643 // Return the generated code.
2644 return GetCode(FIELD, name);
2645 }
2646
2647
CompileLoadCallback(String * name,JSObject * object,JSObject * holder,AccessorInfo * callback)2648 MaybeObject* LoadStubCompiler::CompileLoadCallback(String* name,
2649 JSObject* object,
2650 JSObject* holder,
2651 AccessorInfo* callback) {
2652 // ----------- S t a t e -------------
2653 // -- rax : receiver
2654 // -- rcx : name
2655 // -- rsp[0] : return address
2656 // -----------------------------------
2657 Label miss;
2658
2659 MaybeObject* result = GenerateLoadCallback(object, holder, rax, rcx, rdx, rbx,
2660 rdi, callback, name, &miss);
2661 if (result->IsFailure()) {
2662 miss.Unuse();
2663 return result;
2664 }
2665
2666 __ bind(&miss);
2667 GenerateLoadMiss(masm(), Code::LOAD_IC);
2668
2669 // Return the generated code.
2670 return GetCode(CALLBACKS, name);
2671 }
2672
2673
CompileLoadConstant(JSObject * object,JSObject * holder,Object * value,String * name)2674 MaybeObject* LoadStubCompiler::CompileLoadConstant(JSObject* object,
2675 JSObject* holder,
2676 Object* value,
2677 String* name) {
2678 // ----------- S t a t e -------------
2679 // -- rax : receiver
2680 // -- rcx : name
2681 // -- rsp[0] : return address
2682 // -----------------------------------
2683 Label miss;
2684
2685 GenerateLoadConstant(object, holder, rax, rbx, rdx, rdi, value, name, &miss);
2686 __ bind(&miss);
2687 GenerateLoadMiss(masm(), Code::LOAD_IC);
2688
2689 // Return the generated code.
2690 return GetCode(CONSTANT_FUNCTION, name);
2691 }
2692
2693
CompileLoadInterceptor(JSObject * receiver,JSObject * holder,String * name)2694 MaybeObject* LoadStubCompiler::CompileLoadInterceptor(JSObject* receiver,
2695 JSObject* holder,
2696 String* name) {
2697 // ----------- S t a t e -------------
2698 // -- rax : receiver
2699 // -- rcx : name
2700 // -- rsp[0] : return address
2701 // -----------------------------------
2702 Label miss;
2703
2704 LookupResult lookup;
2705 LookupPostInterceptor(holder, name, &lookup);
2706
2707 // TODO(368): Compile in the whole chain: all the interceptors in
2708 // prototypes and ultimate answer.
2709 GenerateLoadInterceptor(receiver,
2710 holder,
2711 &lookup,
2712 rax,
2713 rcx,
2714 rdx,
2715 rbx,
2716 rdi,
2717 name,
2718 &miss);
2719
2720 __ bind(&miss);
2721 GenerateLoadMiss(masm(), Code::LOAD_IC);
2722
2723 // Return the generated code.
2724 return GetCode(INTERCEPTOR, name);
2725 }
2726
2727
CompileLoadGlobal(JSObject * object,GlobalObject * holder,JSGlobalPropertyCell * cell,String * name,bool is_dont_delete)2728 MaybeObject* LoadStubCompiler::CompileLoadGlobal(JSObject* object,
2729 GlobalObject* holder,
2730 JSGlobalPropertyCell* cell,
2731 String* name,
2732 bool is_dont_delete) {
2733 // ----------- S t a t e -------------
2734 // -- rax : receiver
2735 // -- rcx : name
2736 // -- rsp[0] : return address
2737 // -----------------------------------
2738 Label miss;
2739
2740 // If the object is the holder then we know that it's a global
2741 // object which can only happen for contextual loads. In this case,
2742 // the receiver cannot be a smi.
2743 if (object != holder) {
2744 __ JumpIfSmi(rax, &miss);
2745 }
2746
2747 // Check that the maps haven't changed.
2748 CheckPrototypes(object, rax, holder, rbx, rdx, rdi, name, &miss);
2749
2750 // Get the value from the cell.
2751 __ Move(rbx, Handle<JSGlobalPropertyCell>(cell));
2752 __ movq(rbx, FieldOperand(rbx, JSGlobalPropertyCell::kValueOffset));
2753
2754 // Check for deleted property if property can actually be deleted.
2755 if (!is_dont_delete) {
2756 __ CompareRoot(rbx, Heap::kTheHoleValueRootIndex);
2757 __ j(equal, &miss);
2758 } else if (FLAG_debug_code) {
2759 __ CompareRoot(rbx, Heap::kTheHoleValueRootIndex);
2760 __ Check(not_equal, "DontDelete cells can't contain the hole");
2761 }
2762
2763 Counters* counters = isolate()->counters();
2764 __ IncrementCounter(counters->named_load_global_stub(), 1);
2765 __ movq(rax, rbx);
2766 __ ret(0);
2767
2768 __ bind(&miss);
2769 __ IncrementCounter(counters->named_load_global_stub_miss(), 1);
2770 GenerateLoadMiss(masm(), Code::LOAD_IC);
2771
2772 // Return the generated code.
2773 return GetCode(NORMAL, name);
2774 }
2775
2776
CompileLoadField(String * name,JSObject * receiver,JSObject * holder,int index)2777 MaybeObject* KeyedLoadStubCompiler::CompileLoadField(String* name,
2778 JSObject* receiver,
2779 JSObject* holder,
2780 int index) {
2781 // ----------- S t a t e -------------
2782 // -- rax : key
2783 // -- rdx : receiver
2784 // -- rsp[0] : return address
2785 // -----------------------------------
2786 Label miss;
2787
2788 Counters* counters = isolate()->counters();
2789 __ IncrementCounter(counters->keyed_load_field(), 1);
2790
2791 // Check that the name has not changed.
2792 __ Cmp(rax, Handle<String>(name));
2793 __ j(not_equal, &miss);
2794
2795 GenerateLoadField(receiver, holder, rdx, rbx, rcx, rdi, index, name, &miss);
2796
2797 __ bind(&miss);
2798 __ DecrementCounter(counters->keyed_load_field(), 1);
2799 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2800
2801 // Return the generated code.
2802 return GetCode(FIELD, name);
2803 }
2804
2805
CompileLoadCallback(String * name,JSObject * receiver,JSObject * holder,AccessorInfo * callback)2806 MaybeObject* KeyedLoadStubCompiler::CompileLoadCallback(
2807 String* name,
2808 JSObject* receiver,
2809 JSObject* holder,
2810 AccessorInfo* callback) {
2811 // ----------- S t a t e -------------
2812 // -- rax : key
2813 // -- rdx : receiver
2814 // -- rsp[0] : return address
2815 // -----------------------------------
2816 Label miss;
2817
2818 Counters* counters = isolate()->counters();
2819 __ IncrementCounter(counters->keyed_load_callback(), 1);
2820
2821 // Check that the name has not changed.
2822 __ Cmp(rax, Handle<String>(name));
2823 __ j(not_equal, &miss);
2824
2825 MaybeObject* result = GenerateLoadCallback(receiver, holder, rdx, rax, rbx,
2826 rcx, rdi, callback, name, &miss);
2827 if (result->IsFailure()) {
2828 miss.Unuse();
2829 return result;
2830 }
2831
2832 __ bind(&miss);
2833
2834 __ DecrementCounter(counters->keyed_load_callback(), 1);
2835 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2836
2837 // Return the generated code.
2838 return GetCode(CALLBACKS, name);
2839 }
2840
2841
CompileLoadConstant(String * name,JSObject * receiver,JSObject * holder,Object * value)2842 MaybeObject* KeyedLoadStubCompiler::CompileLoadConstant(String* name,
2843 JSObject* receiver,
2844 JSObject* holder,
2845 Object* value) {
2846 // ----------- S t a t e -------------
2847 // -- rax : key
2848 // -- rdx : receiver
2849 // -- rsp[0] : return address
2850 // -----------------------------------
2851 Label miss;
2852
2853 Counters* counters = isolate()->counters();
2854 __ IncrementCounter(counters->keyed_load_constant_function(), 1);
2855
2856 // Check that the name has not changed.
2857 __ Cmp(rax, Handle<String>(name));
2858 __ j(not_equal, &miss);
2859
2860 GenerateLoadConstant(receiver, holder, rdx, rbx, rcx, rdi,
2861 value, name, &miss);
2862 __ bind(&miss);
2863 __ DecrementCounter(counters->keyed_load_constant_function(), 1);
2864 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2865
2866 // Return the generated code.
2867 return GetCode(CONSTANT_FUNCTION, name);
2868 }
2869
2870
CompileLoadInterceptor(JSObject * receiver,JSObject * holder,String * name)2871 MaybeObject* KeyedLoadStubCompiler::CompileLoadInterceptor(JSObject* receiver,
2872 JSObject* holder,
2873 String* name) {
2874 // ----------- S t a t e -------------
2875 // -- rax : key
2876 // -- rdx : receiver
2877 // -- rsp[0] : return address
2878 // -----------------------------------
2879 Label miss;
2880
2881 Counters* counters = isolate()->counters();
2882 __ IncrementCounter(counters->keyed_load_interceptor(), 1);
2883
2884 // Check that the name has not changed.
2885 __ Cmp(rax, Handle<String>(name));
2886 __ j(not_equal, &miss);
2887
2888 LookupResult lookup;
2889 LookupPostInterceptor(holder, name, &lookup);
2890 GenerateLoadInterceptor(receiver,
2891 holder,
2892 &lookup,
2893 rdx,
2894 rax,
2895 rcx,
2896 rbx,
2897 rdi,
2898 name,
2899 &miss);
2900 __ bind(&miss);
2901 __ DecrementCounter(counters->keyed_load_interceptor(), 1);
2902 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2903
2904 // Return the generated code.
2905 return GetCode(INTERCEPTOR, name);
2906 }
2907
2908
CompileLoadArrayLength(String * name)2909 MaybeObject* KeyedLoadStubCompiler::CompileLoadArrayLength(String* name) {
2910 // ----------- S t a t e -------------
2911 // -- rax : key
2912 // -- rdx : receiver
2913 // -- rsp[0] : return address
2914 // -----------------------------------
2915 Label miss;
2916
2917 Counters* counters = isolate()->counters();
2918 __ IncrementCounter(counters->keyed_load_array_length(), 1);
2919
2920 // Check that the name has not changed.
2921 __ Cmp(rax, Handle<String>(name));
2922 __ j(not_equal, &miss);
2923
2924 GenerateLoadArrayLength(masm(), rdx, rcx, &miss);
2925 __ bind(&miss);
2926 __ DecrementCounter(counters->keyed_load_array_length(), 1);
2927 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2928
2929 // Return the generated code.
2930 return GetCode(CALLBACKS, name);
2931 }
2932
2933
CompileLoadStringLength(String * name)2934 MaybeObject* KeyedLoadStubCompiler::CompileLoadStringLength(String* name) {
2935 // ----------- S t a t e -------------
2936 // -- rax : key
2937 // -- rdx : receiver
2938 // -- rsp[0] : return address
2939 // -----------------------------------
2940 Label miss;
2941
2942 Counters* counters = isolate()->counters();
2943 __ IncrementCounter(counters->keyed_load_string_length(), 1);
2944
2945 // Check that the name has not changed.
2946 __ Cmp(rax, Handle<String>(name));
2947 __ j(not_equal, &miss);
2948
2949 GenerateLoadStringLength(masm(), rdx, rcx, rbx, &miss, true);
2950 __ bind(&miss);
2951 __ DecrementCounter(counters->keyed_load_string_length(), 1);
2952 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2953
2954 // Return the generated code.
2955 return GetCode(CALLBACKS, name);
2956 }
2957
2958
CompileLoadFunctionPrototype(String * name)2959 MaybeObject* KeyedLoadStubCompiler::CompileLoadFunctionPrototype(String* name) {
2960 // ----------- S t a t e -------------
2961 // -- rax : key
2962 // -- rdx : receiver
2963 // -- rsp[0] : return address
2964 // -----------------------------------
2965 Label miss;
2966
2967 Counters* counters = isolate()->counters();
2968 __ IncrementCounter(counters->keyed_load_function_prototype(), 1);
2969
2970 // Check that the name has not changed.
2971 __ Cmp(rax, Handle<String>(name));
2972 __ j(not_equal, &miss);
2973
2974 GenerateLoadFunctionPrototype(masm(), rdx, rcx, rbx, &miss);
2975 __ bind(&miss);
2976 __ DecrementCounter(counters->keyed_load_function_prototype(), 1);
2977 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
2978
2979 // Return the generated code.
2980 return GetCode(CALLBACKS, name);
2981 }
2982
2983
CompileLoadSpecialized(JSObject * receiver)2984 MaybeObject* KeyedLoadStubCompiler::CompileLoadSpecialized(JSObject* receiver) {
2985 // ----------- S t a t e -------------
2986 // -- rax : key
2987 // -- rdx : receiver
2988 // -- rsp[0] : return address
2989 // -----------------------------------
2990 Label miss;
2991
2992 // Check that the receiver isn't a smi.
2993 __ JumpIfSmi(rdx, &miss);
2994
2995 // Check that the map matches.
2996 __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
2997 Handle<Map>(receiver->map()));
2998 __ j(not_equal, &miss);
2999
3000 // Check that the key is a smi.
3001 __ JumpIfNotSmi(rax, &miss);
3002
3003 // Get the elements array.
3004 __ movq(rcx, FieldOperand(rdx, JSObject::kElementsOffset));
3005 __ AssertFastElements(rcx);
3006
3007 // Check that the key is within bounds.
3008 __ SmiCompare(rax, FieldOperand(rcx, FixedArray::kLengthOffset));
3009 __ j(above_equal, &miss);
3010
3011 // Load the result and make sure it's not the hole.
3012 SmiIndex index = masm()->SmiToIndex(rbx, rax, kPointerSizeLog2);
3013 __ movq(rbx, FieldOperand(rcx,
3014 index.reg,
3015 index.scale,
3016 FixedArray::kHeaderSize));
3017 __ CompareRoot(rbx, Heap::kTheHoleValueRootIndex);
3018 __ j(equal, &miss);
3019 __ movq(rax, rbx);
3020 __ ret(0);
3021
3022 __ bind(&miss);
3023 GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
3024
3025 // Return the generated code.
3026 return GetCode(NORMAL, NULL);
3027 }
3028
3029
3030 // Specialized stub for constructing objects from functions which only have only
3031 // simple assignments of the form this.x = ...; in their body.
CompileConstructStub(JSFunction * function)3032 MaybeObject* ConstructStubCompiler::CompileConstructStub(JSFunction* function) {
3033 // ----------- S t a t e -------------
3034 // -- rax : argc
3035 // -- rdi : constructor
3036 // -- rsp[0] : return address
3037 // -- rsp[4] : last argument
3038 // -----------------------------------
3039 Label generic_stub_call;
3040
3041 // Use r8 for holding undefined which is used in several places below.
3042 __ Move(r8, factory()->undefined_value());
3043
3044 #ifdef ENABLE_DEBUGGER_SUPPORT
3045 // Check to see whether there are any break points in the function code. If
3046 // there are jump to the generic constructor stub which calls the actual
3047 // code for the function thereby hitting the break points.
3048 __ movq(rbx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
3049 __ movq(rbx, FieldOperand(rbx, SharedFunctionInfo::kDebugInfoOffset));
3050 __ cmpq(rbx, r8);
3051 __ j(not_equal, &generic_stub_call);
3052 #endif
3053
3054 // Load the initial map and verify that it is in fact a map.
3055 __ movq(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
3056 // Will both indicate a NULL and a Smi.
3057 ASSERT(kSmiTag == 0);
3058 __ JumpIfSmi(rbx, &generic_stub_call);
3059 __ CmpObjectType(rbx, MAP_TYPE, rcx);
3060 __ j(not_equal, &generic_stub_call);
3061
3062 #ifdef DEBUG
3063 // Cannot construct functions this way.
3064 // rdi: constructor
3065 // rbx: initial map
3066 __ CmpInstanceType(rbx, JS_FUNCTION_TYPE);
3067 __ Assert(not_equal, "Function constructed by construct stub.");
3068 #endif
3069
3070 // Now allocate the JSObject in new space.
3071 // rdi: constructor
3072 // rbx: initial map
3073 __ movzxbq(rcx, FieldOperand(rbx, Map::kInstanceSizeOffset));
3074 __ shl(rcx, Immediate(kPointerSizeLog2));
3075 __ AllocateInNewSpace(rcx,
3076 rdx,
3077 rcx,
3078 no_reg,
3079 &generic_stub_call,
3080 NO_ALLOCATION_FLAGS);
3081
3082 // Allocated the JSObject, now initialize the fields and add the heap tag.
3083 // rbx: initial map
3084 // rdx: JSObject (untagged)
3085 __ movq(Operand(rdx, JSObject::kMapOffset), rbx);
3086 __ Move(rbx, factory()->empty_fixed_array());
3087 __ movq(Operand(rdx, JSObject::kPropertiesOffset), rbx);
3088 __ movq(Operand(rdx, JSObject::kElementsOffset), rbx);
3089
3090 // rax: argc
3091 // rdx: JSObject (untagged)
3092 // Load the address of the first in-object property into r9.
3093 __ lea(r9, Operand(rdx, JSObject::kHeaderSize));
3094 // Calculate the location of the first argument. The stack contains only the
3095 // return address on top of the argc arguments.
3096 __ lea(rcx, Operand(rsp, rax, times_pointer_size, 0));
3097
3098 // rax: argc
3099 // rcx: first argument
3100 // rdx: JSObject (untagged)
3101 // r8: undefined
3102 // r9: first in-object property of the JSObject
3103 // Fill the initialized properties with a constant value or a passed argument
3104 // depending on the this.x = ...; assignment in the function.
3105 SharedFunctionInfo* shared = function->shared();
3106 for (int i = 0; i < shared->this_property_assignments_count(); i++) {
3107 if (shared->IsThisPropertyAssignmentArgument(i)) {
3108 // Check if the argument assigned to the property is actually passed.
3109 // If argument is not passed the property is set to undefined,
3110 // otherwise find it on the stack.
3111 int arg_number = shared->GetThisPropertyAssignmentArgument(i);
3112 __ movq(rbx, r8);
3113 __ cmpq(rax, Immediate(arg_number));
3114 __ cmovq(above, rbx, Operand(rcx, arg_number * -kPointerSize));
3115 // Store value in the property.
3116 __ movq(Operand(r9, i * kPointerSize), rbx);
3117 } else {
3118 // Set the property to the constant value.
3119 Handle<Object> constant(shared->GetThisPropertyAssignmentConstant(i));
3120 __ Move(Operand(r9, i * kPointerSize), constant);
3121 }
3122 }
3123
3124 // Fill the unused in-object property fields with undefined.
3125 ASSERT(function->has_initial_map());
3126 for (int i = shared->this_property_assignments_count();
3127 i < function->initial_map()->inobject_properties();
3128 i++) {
3129 __ movq(Operand(r9, i * kPointerSize), r8);
3130 }
3131
3132 // rax: argc
3133 // rdx: JSObject (untagged)
3134 // Move argc to rbx and the JSObject to return to rax and tag it.
3135 __ movq(rbx, rax);
3136 __ movq(rax, rdx);
3137 __ or_(rax, Immediate(kHeapObjectTag));
3138
3139 // rax: JSObject
3140 // rbx: argc
3141 // Remove caller arguments and receiver from the stack and return.
3142 __ pop(rcx);
3143 __ lea(rsp, Operand(rsp, rbx, times_pointer_size, 1 * kPointerSize));
3144 __ push(rcx);
3145 Counters* counters = isolate()->counters();
3146 __ IncrementCounter(counters->constructed_objects(), 1);
3147 __ IncrementCounter(counters->constructed_objects_stub(), 1);
3148 __ ret(0);
3149
3150 // Jump to the generic stub in case the specialized code cannot handle the
3151 // construction.
3152 __ bind(&generic_stub_call);
3153 Code* code =
3154 isolate()->builtins()->builtin(Builtins::kJSConstructStubGeneric);
3155 Handle<Code> generic_construct_stub(code);
3156 __ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
3157
3158 // Return the generated code.
3159 return GetCode();
3160 }
3161
3162
CompileKeyedLoadStub(JSObject * receiver,ExternalArrayType array_type,Code::Flags flags)3163 MaybeObject* ExternalArrayStubCompiler::CompileKeyedLoadStub(
3164 JSObject* receiver, ExternalArrayType array_type, Code::Flags flags) {
3165 // ----------- S t a t e -------------
3166 // -- rax : key
3167 // -- rdx : receiver
3168 // -- rsp[0] : return address
3169 // -----------------------------------
3170 Label slow;
3171
3172 // Check that the object isn't a smi.
3173 __ JumpIfSmi(rdx, &slow);
3174
3175 // Check that the key is a smi.
3176 __ JumpIfNotSmi(rax, &slow);
3177
3178 // Check that the map matches.
3179 __ CheckMap(rdx, Handle<Map>(receiver->map()), &slow, false);
3180 __ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset));
3181
3182 // Check that the index is in range.
3183 __ SmiToInteger32(rcx, rax);
3184 __ cmpl(rcx, FieldOperand(rbx, ExternalArray::kLengthOffset));
3185 // Unsigned comparison catches both negative and too-large values.
3186 __ j(above_equal, &slow);
3187
3188 // rax: index (as a smi)
3189 // rdx: receiver (JSObject)
3190 // rcx: untagged index
3191 // rbx: elements array
3192 __ movq(rbx, FieldOperand(rbx, ExternalArray::kExternalPointerOffset));
3193 // rbx: base pointer of external storage
3194 switch (array_type) {
3195 case kExternalByteArray:
3196 __ movsxbq(rcx, Operand(rbx, rcx, times_1, 0));
3197 break;
3198 case kExternalPixelArray:
3199 case kExternalUnsignedByteArray:
3200 __ movzxbq(rcx, Operand(rbx, rcx, times_1, 0));
3201 break;
3202 case kExternalShortArray:
3203 __ movsxwq(rcx, Operand(rbx, rcx, times_2, 0));
3204 break;
3205 case kExternalUnsignedShortArray:
3206 __ movzxwq(rcx, Operand(rbx, rcx, times_2, 0));
3207 break;
3208 case kExternalIntArray:
3209 __ movsxlq(rcx, Operand(rbx, rcx, times_4, 0));
3210 break;
3211 case kExternalUnsignedIntArray:
3212 __ movl(rcx, Operand(rbx, rcx, times_4, 0));
3213 break;
3214 case kExternalFloatArray:
3215 __ cvtss2sd(xmm0, Operand(rbx, rcx, times_4, 0));
3216 break;
3217 default:
3218 UNREACHABLE();
3219 break;
3220 }
3221
3222 // rax: index
3223 // rdx: receiver
3224 // For integer array types:
3225 // rcx: value
3226 // For floating-point array type:
3227 // xmm0: value as double.
3228
3229 ASSERT(kSmiValueSize == 32);
3230 if (array_type == kExternalUnsignedIntArray) {
3231 // For the UnsignedInt array type, we need to see whether
3232 // the value can be represented in a Smi. If not, we need to convert
3233 // it to a HeapNumber.
3234 NearLabel box_int;
3235
3236 __ JumpIfUIntNotValidSmiValue(rcx, &box_int);
3237
3238 __ Integer32ToSmi(rax, rcx);
3239 __ ret(0);
3240
3241 __ bind(&box_int);
3242
3243 // Allocate a HeapNumber for the int and perform int-to-double
3244 // conversion.
3245 // The value is zero-extended since we loaded the value from memory
3246 // with movl.
3247 __ cvtqsi2sd(xmm0, rcx);
3248
3249 __ AllocateHeapNumber(rcx, rbx, &slow);
3250 // Set the value.
3251 __ movsd(FieldOperand(rcx, HeapNumber::kValueOffset), xmm0);
3252 __ movq(rax, rcx);
3253 __ ret(0);
3254 } else if (array_type == kExternalFloatArray) {
3255 // For the floating-point array type, we need to always allocate a
3256 // HeapNumber.
3257 __ AllocateHeapNumber(rcx, rbx, &slow);
3258 // Set the value.
3259 __ movsd(FieldOperand(rcx, HeapNumber::kValueOffset), xmm0);
3260 __ movq(rax, rcx);
3261 __ ret(0);
3262 } else {
3263 __ Integer32ToSmi(rax, rcx);
3264 __ ret(0);
3265 }
3266
3267 // Slow case: Jump to runtime.
3268 __ bind(&slow);
3269 Counters* counters = isolate()->counters();
3270 __ IncrementCounter(counters->keyed_load_external_array_slow(), 1);
3271
3272 // ----------- S t a t e -------------
3273 // -- rax : key
3274 // -- rdx : receiver
3275 // -- rsp[0] : return address
3276 // -----------------------------------
3277
3278 __ pop(rbx);
3279 __ push(rdx); // receiver
3280 __ push(rax); // name
3281 __ push(rbx); // return address
3282
3283 // Perform tail call to the entry.
3284 __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1);
3285
3286 // Return the generated code.
3287 return GetCode(flags);
3288 }
3289
3290
CompileKeyedStoreStub(JSObject * receiver,ExternalArrayType array_type,Code::Flags flags)3291 MaybeObject* ExternalArrayStubCompiler::CompileKeyedStoreStub(
3292 JSObject* receiver, ExternalArrayType array_type, Code::Flags flags) {
3293 // ----------- S t a t e -------------
3294 // -- rax : value
3295 // -- rcx : key
3296 // -- rdx : receiver
3297 // -- rsp[0] : return address
3298 // -----------------------------------
3299 Label slow;
3300
3301 // Check that the object isn't a smi.
3302 __ JumpIfSmi(rdx, &slow);
3303
3304 // Check that the map matches.
3305 __ CheckMap(rdx, Handle<Map>(receiver->map()), &slow, false);
3306 __ movq(rbx, FieldOperand(rdx, JSObject::kElementsOffset));
3307
3308 // Check that the key is a smi.
3309 __ JumpIfNotSmi(rcx, &slow);
3310
3311 // Check that the index is in range.
3312 __ SmiToInteger32(rdi, rcx); // Untag the index.
3313 __ cmpl(rdi, FieldOperand(rbx, ExternalArray::kLengthOffset));
3314 // Unsigned comparison catches both negative and too-large values.
3315 __ j(above_equal, &slow);
3316
3317 // Handle both smis and HeapNumbers in the fast path. Go to the
3318 // runtime for all other kinds of values.
3319 // rax: value
3320 // rcx: key (a smi)
3321 // rdx: receiver (a JSObject)
3322 // rbx: elements array
3323 // rdi: untagged key
3324 NearLabel check_heap_number;
3325 if (array_type == kExternalPixelArray) {
3326 // Float to pixel conversion is only implemented in the runtime for now.
3327 __ JumpIfNotSmi(rax, &slow);
3328 } else {
3329 __ JumpIfNotSmi(rax, &check_heap_number);
3330 }
3331 // No more branches to slow case on this path. Key and receiver not needed.
3332 __ SmiToInteger32(rdx, rax);
3333 __ movq(rbx, FieldOperand(rbx, ExternalArray::kExternalPointerOffset));
3334 // rbx: base pointer of external storage
3335 switch (array_type) {
3336 case kExternalPixelArray:
3337 { // Clamp the value to [0..255].
3338 NearLabel done;
3339 __ testl(rdx, Immediate(0xFFFFFF00));
3340 __ j(zero, &done);
3341 __ setcc(negative, rdx); // 1 if negative, 0 if positive.
3342 __ decb(rdx); // 0 if negative, 255 if positive.
3343 __ bind(&done);
3344 }
3345 __ movb(Operand(rbx, rdi, times_1, 0), rdx);
3346 break;
3347 case kExternalByteArray:
3348 case kExternalUnsignedByteArray:
3349 __ movb(Operand(rbx, rdi, times_1, 0), rdx);
3350 break;
3351 case kExternalShortArray:
3352 case kExternalUnsignedShortArray:
3353 __ movw(Operand(rbx, rdi, times_2, 0), rdx);
3354 break;
3355 case kExternalIntArray:
3356 case kExternalUnsignedIntArray:
3357 __ movl(Operand(rbx, rdi, times_4, 0), rdx);
3358 break;
3359 case kExternalFloatArray:
3360 // Need to perform int-to-float conversion.
3361 __ cvtlsi2ss(xmm0, rdx);
3362 __ movss(Operand(rbx, rdi, times_4, 0), xmm0);
3363 break;
3364 default:
3365 UNREACHABLE();
3366 break;
3367 }
3368 __ ret(0);
3369
3370 // TODO(danno): handle heap number -> pixel array conversion
3371 if (array_type != kExternalPixelArray) {
3372 __ bind(&check_heap_number);
3373 // rax: value
3374 // rcx: key (a smi)
3375 // rdx: receiver (a JSObject)
3376 // rbx: elements array
3377 // rdi: untagged key
3378 __ CmpObjectType(rax, HEAP_NUMBER_TYPE, kScratchRegister);
3379 __ j(not_equal, &slow);
3380 // No more branches to slow case on this path.
3381
3382 // The WebGL specification leaves the behavior of storing NaN and
3383 // +/-Infinity into integer arrays basically undefined. For more
3384 // reproducible behavior, convert these to zero.
3385 __ movsd(xmm0, FieldOperand(rax, HeapNumber::kValueOffset));
3386 __ movq(rbx, FieldOperand(rbx, ExternalArray::kExternalPointerOffset));
3387 // rdi: untagged index
3388 // rbx: base pointer of external storage
3389 // top of FPU stack: value
3390 if (array_type == kExternalFloatArray) {
3391 __ cvtsd2ss(xmm0, xmm0);
3392 __ movss(Operand(rbx, rdi, times_4, 0), xmm0);
3393 __ ret(0);
3394 } else {
3395 // Perform float-to-int conversion with truncation (round-to-zero)
3396 // behavior.
3397
3398 // Convert to int32 and store the low byte/word.
3399 // If the value is NaN or +/-infinity, the result is 0x80000000,
3400 // which is automatically zero when taken mod 2^n, n < 32.
3401 // rdx: value (converted to an untagged integer)
3402 // rdi: untagged index
3403 // rbx: base pointer of external storage
3404 switch (array_type) {
3405 case kExternalByteArray:
3406 case kExternalUnsignedByteArray:
3407 __ cvttsd2si(rdx, xmm0);
3408 __ movb(Operand(rbx, rdi, times_1, 0), rdx);
3409 break;
3410 case kExternalShortArray:
3411 case kExternalUnsignedShortArray:
3412 __ cvttsd2si(rdx, xmm0);
3413 __ movw(Operand(rbx, rdi, times_2, 0), rdx);
3414 break;
3415 case kExternalIntArray:
3416 case kExternalUnsignedIntArray: {
3417 // Convert to int64, so that NaN and infinities become
3418 // 0x8000000000000000, which is zero mod 2^32.
3419 __ cvttsd2siq(rdx, xmm0);
3420 __ movl(Operand(rbx, rdi, times_4, 0), rdx);
3421 break;
3422 }
3423 default:
3424 UNREACHABLE();
3425 break;
3426 }
3427 __ ret(0);
3428 }
3429 }
3430
3431 // Slow case: call runtime.
3432 __ bind(&slow);
3433
3434 // ----------- S t a t e -------------
3435 // -- rax : value
3436 // -- rcx : key
3437 // -- rdx : receiver
3438 // -- rsp[0] : return address
3439 // -----------------------------------
3440
3441 __ pop(rbx);
3442 __ push(rdx); // receiver
3443 __ push(rcx); // key
3444 __ push(rax); // value
3445 __ Push(Smi::FromInt(NONE)); // PropertyAttributes
3446 __ Push(Smi::FromInt(
3447 Code::ExtractExtraICStateFromFlags(flags) & kStrictMode));
3448 __ push(rbx); // return address
3449
3450 // Do tail-call to runtime routine.
3451 __ TailCallRuntime(Runtime::kSetProperty, 5, 1);
3452
3453 return GetCode(flags);
3454 }
3455
3456 #undef __
3457
3458 } } // namespace v8::internal
3459
3460 #endif // V8_TARGET_ARCH_X64
3461