1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/v8.h"
6
7 #if V8_TARGET_ARCH_IA32
8
9 #include "src/ic-inl.h"
10 #include "src/codegen.h"
11 #include "src/stub-cache.h"
12
13 namespace v8 {
14 namespace internal {
15
16 #define __ ACCESS_MASM(masm)
17
18
ProbeTable(Isolate * isolate,MacroAssembler * masm,Code::Flags flags,StubCache::Table table,Register name,Register receiver,Register offset,Register extra)19 static void ProbeTable(Isolate* isolate,
20 MacroAssembler* masm,
21 Code::Flags flags,
22 StubCache::Table table,
23 Register name,
24 Register receiver,
25 // Number of the cache entry pointer-size scaled.
26 Register offset,
27 Register extra) {
28 ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
29 ExternalReference value_offset(isolate->stub_cache()->value_reference(table));
30 ExternalReference map_offset(isolate->stub_cache()->map_reference(table));
31
32 Label miss;
33
34 // Multiply by 3 because there are 3 fields per entry (name, code, map).
35 __ lea(offset, Operand(offset, offset, times_2, 0));
36
37 if (extra.is_valid()) {
38 // Get the code entry from the cache.
39 __ mov(extra, Operand::StaticArray(offset, times_1, value_offset));
40
41 // Check that the key in the entry matches the name.
42 __ cmp(name, Operand::StaticArray(offset, times_1, key_offset));
43 __ j(not_equal, &miss);
44
45 // Check the map matches.
46 __ mov(offset, Operand::StaticArray(offset, times_1, map_offset));
47 __ cmp(offset, FieldOperand(receiver, HeapObject::kMapOffset));
48 __ j(not_equal, &miss);
49
50 // Check that the flags match what we're looking for.
51 __ mov(offset, FieldOperand(extra, Code::kFlagsOffset));
52 __ and_(offset, ~Code::kFlagsNotUsedInLookup);
53 __ cmp(offset, flags);
54 __ j(not_equal, &miss);
55
56 #ifdef DEBUG
57 if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
58 __ jmp(&miss);
59 } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
60 __ jmp(&miss);
61 }
62 #endif
63
64 // Jump to the first instruction in the code stub.
65 __ add(extra, Immediate(Code::kHeaderSize - kHeapObjectTag));
66 __ jmp(extra);
67
68 __ bind(&miss);
69 } else {
70 // Save the offset on the stack.
71 __ push(offset);
72
73 // Check that the key in the entry matches the name.
74 __ cmp(name, Operand::StaticArray(offset, times_1, key_offset));
75 __ j(not_equal, &miss);
76
77 // Check the map matches.
78 __ mov(offset, Operand::StaticArray(offset, times_1, map_offset));
79 __ cmp(offset, FieldOperand(receiver, HeapObject::kMapOffset));
80 __ j(not_equal, &miss);
81
82 // Restore offset register.
83 __ mov(offset, Operand(esp, 0));
84
85 // Get the code entry from the cache.
86 __ mov(offset, Operand::StaticArray(offset, times_1, value_offset));
87
88 // Check that the flags match what we're looking for.
89 __ mov(offset, FieldOperand(offset, Code::kFlagsOffset));
90 __ and_(offset, ~Code::kFlagsNotUsedInLookup);
91 __ cmp(offset, flags);
92 __ j(not_equal, &miss);
93
94 #ifdef DEBUG
95 if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
96 __ jmp(&miss);
97 } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
98 __ jmp(&miss);
99 }
100 #endif
101
102 // Restore offset and re-load code entry from cache.
103 __ pop(offset);
104 __ mov(offset, Operand::StaticArray(offset, times_1, value_offset));
105
106 // Jump to the first instruction in the code stub.
107 __ add(offset, Immediate(Code::kHeaderSize - kHeapObjectTag));
108 __ jmp(offset);
109
110 // Pop at miss.
111 __ bind(&miss);
112 __ pop(offset);
113 }
114 }
115
116
GenerateDictionaryNegativeLookup(MacroAssembler * masm,Label * miss_label,Register receiver,Handle<Name> name,Register scratch0,Register scratch1)117 void StubCompiler::GenerateDictionaryNegativeLookup(MacroAssembler* masm,
118 Label* miss_label,
119 Register receiver,
120 Handle<Name> name,
121 Register scratch0,
122 Register scratch1) {
123 ASSERT(name->IsUniqueName());
124 ASSERT(!receiver.is(scratch0));
125 Counters* counters = masm->isolate()->counters();
126 __ IncrementCounter(counters->negative_lookups(), 1);
127 __ IncrementCounter(counters->negative_lookups_miss(), 1);
128
129 __ mov(scratch0, FieldOperand(receiver, HeapObject::kMapOffset));
130
131 const int kInterceptorOrAccessCheckNeededMask =
132 (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded);
133
134 // Bail out if the receiver has a named interceptor or requires access checks.
135 __ test_b(FieldOperand(scratch0, Map::kBitFieldOffset),
136 kInterceptorOrAccessCheckNeededMask);
137 __ j(not_zero, miss_label);
138
139 // Check that receiver is a JSObject.
140 __ CmpInstanceType(scratch0, FIRST_SPEC_OBJECT_TYPE);
141 __ j(below, miss_label);
142
143 // Load properties array.
144 Register properties = scratch0;
145 __ mov(properties, FieldOperand(receiver, JSObject::kPropertiesOffset));
146
147 // Check that the properties array is a dictionary.
148 __ cmp(FieldOperand(properties, HeapObject::kMapOffset),
149 Immediate(masm->isolate()->factory()->hash_table_map()));
150 __ j(not_equal, miss_label);
151
152 Label done;
153 NameDictionaryLookupStub::GenerateNegativeLookup(masm,
154 miss_label,
155 &done,
156 properties,
157 name,
158 scratch1);
159 __ bind(&done);
160 __ DecrementCounter(counters->negative_lookups_miss(), 1);
161 }
162
163
GenerateProbe(MacroAssembler * masm,Code::Flags flags,Register receiver,Register name,Register scratch,Register extra,Register extra2,Register extra3)164 void StubCache::GenerateProbe(MacroAssembler* masm,
165 Code::Flags flags,
166 Register receiver,
167 Register name,
168 Register scratch,
169 Register extra,
170 Register extra2,
171 Register extra3) {
172 Label miss;
173
174 // Assert that code is valid. The multiplying code relies on the entry size
175 // being 12.
176 ASSERT(sizeof(Entry) == 12);
177
178 // Assert the flags do not name a specific type.
179 ASSERT(Code::ExtractTypeFromFlags(flags) == 0);
180
181 // Assert that there are no register conflicts.
182 ASSERT(!scratch.is(receiver));
183 ASSERT(!scratch.is(name));
184 ASSERT(!extra.is(receiver));
185 ASSERT(!extra.is(name));
186 ASSERT(!extra.is(scratch));
187
188 // Assert scratch and extra registers are valid, and extra2/3 are unused.
189 ASSERT(!scratch.is(no_reg));
190 ASSERT(extra2.is(no_reg));
191 ASSERT(extra3.is(no_reg));
192
193 Register offset = scratch;
194 scratch = no_reg;
195
196 Counters* counters = masm->isolate()->counters();
197 __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1);
198
199 // Check that the receiver isn't a smi.
200 __ JumpIfSmi(receiver, &miss);
201
202 // Get the map of the receiver and compute the hash.
203 __ mov(offset, FieldOperand(name, Name::kHashFieldOffset));
204 __ add(offset, FieldOperand(receiver, HeapObject::kMapOffset));
205 __ xor_(offset, flags);
206 // We mask out the last two bits because they are not part of the hash and
207 // they are always 01 for maps. Also in the two 'and' instructions below.
208 __ and_(offset, (kPrimaryTableSize - 1) << kHeapObjectTagSize);
209 // ProbeTable expects the offset to be pointer scaled, which it is, because
210 // the heap object tag size is 2 and the pointer size log 2 is also 2.
211 ASSERT(kHeapObjectTagSize == kPointerSizeLog2);
212
213 // Probe the primary table.
214 ProbeTable(isolate(), masm, flags, kPrimary, name, receiver, offset, extra);
215
216 // Primary miss: Compute hash for secondary probe.
217 __ mov(offset, FieldOperand(name, Name::kHashFieldOffset));
218 __ add(offset, FieldOperand(receiver, HeapObject::kMapOffset));
219 __ xor_(offset, flags);
220 __ and_(offset, (kPrimaryTableSize - 1) << kHeapObjectTagSize);
221 __ sub(offset, name);
222 __ add(offset, Immediate(flags));
223 __ and_(offset, (kSecondaryTableSize - 1) << kHeapObjectTagSize);
224
225 // Probe the secondary table.
226 ProbeTable(
227 isolate(), masm, flags, kSecondary, name, receiver, offset, extra);
228
229 // Cache miss: Fall-through and let caller handle the miss by
230 // entering the runtime system.
231 __ bind(&miss);
232 __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1);
233 }
234
235
GenerateLoadGlobalFunctionPrototype(MacroAssembler * masm,int index,Register prototype)236 void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
237 int index,
238 Register prototype) {
239 __ LoadGlobalFunction(index, prototype);
240 __ LoadGlobalFunctionInitialMap(prototype, prototype);
241 // Load the prototype from the initial map.
242 __ mov(prototype, FieldOperand(prototype, Map::kPrototypeOffset));
243 }
244
245
GenerateDirectLoadGlobalFunctionPrototype(MacroAssembler * masm,int index,Register prototype,Label * miss)246 void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
247 MacroAssembler* masm,
248 int index,
249 Register prototype,
250 Label* miss) {
251 // Get the global function with the given index.
252 Handle<JSFunction> function(
253 JSFunction::cast(masm->isolate()->native_context()->get(index)));
254 // Check we're still in the same context.
255 Register scratch = prototype;
256 const int offset = Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX);
257 __ mov(scratch, Operand(esi, offset));
258 __ mov(scratch, FieldOperand(scratch, GlobalObject::kNativeContextOffset));
259 __ cmp(Operand(scratch, Context::SlotOffset(index)), function);
260 __ j(not_equal, miss);
261
262 // Load its initial map. The global functions all have initial maps.
263 __ Move(prototype, Immediate(Handle<Map>(function->initial_map())));
264 // Load the prototype from the initial map.
265 __ mov(prototype, FieldOperand(prototype, Map::kPrototypeOffset));
266 }
267
268
GenerateLoadArrayLength(MacroAssembler * masm,Register receiver,Register scratch,Label * miss_label)269 void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm,
270 Register receiver,
271 Register scratch,
272 Label* miss_label) {
273 // Check that the receiver isn't a smi.
274 __ JumpIfSmi(receiver, miss_label);
275
276 // Check that the object is a JS array.
277 __ CmpObjectType(receiver, JS_ARRAY_TYPE, scratch);
278 __ j(not_equal, miss_label);
279
280 // Load length directly from the JS array.
281 __ mov(eax, FieldOperand(receiver, JSArray::kLengthOffset));
282 __ ret(0);
283 }
284
285
GenerateLoadFunctionPrototype(MacroAssembler * masm,Register receiver,Register scratch1,Register scratch2,Label * miss_label)286 void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm,
287 Register receiver,
288 Register scratch1,
289 Register scratch2,
290 Label* miss_label) {
291 __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label);
292 __ mov(eax, scratch1);
293 __ ret(0);
294 }
295
296
GenerateFastPropertyLoad(MacroAssembler * masm,Register dst,Register src,bool inobject,int index,Representation representation)297 void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
298 Register dst,
299 Register src,
300 bool inobject,
301 int index,
302 Representation representation) {
303 ASSERT(!representation.IsDouble());
304 int offset = index * kPointerSize;
305 if (!inobject) {
306 // Calculate the offset into the properties array.
307 offset = offset + FixedArray::kHeaderSize;
308 __ mov(dst, FieldOperand(src, JSObject::kPropertiesOffset));
309 src = dst;
310 }
311 __ mov(dst, FieldOperand(src, offset));
312 }
313
314
PushInterceptorArguments(MacroAssembler * masm,Register receiver,Register holder,Register name,Handle<JSObject> holder_obj)315 static void PushInterceptorArguments(MacroAssembler* masm,
316 Register receiver,
317 Register holder,
318 Register name,
319 Handle<JSObject> holder_obj) {
320 STATIC_ASSERT(StubCache::kInterceptorArgsNameIndex == 0);
321 STATIC_ASSERT(StubCache::kInterceptorArgsInfoIndex == 1);
322 STATIC_ASSERT(StubCache::kInterceptorArgsThisIndex == 2);
323 STATIC_ASSERT(StubCache::kInterceptorArgsHolderIndex == 3);
324 STATIC_ASSERT(StubCache::kInterceptorArgsLength == 4);
325 __ push(name);
326 Handle<InterceptorInfo> interceptor(holder_obj->GetNamedInterceptor());
327 ASSERT(!masm->isolate()->heap()->InNewSpace(*interceptor));
328 Register scratch = name;
329 __ mov(scratch, Immediate(interceptor));
330 __ push(scratch);
331 __ push(receiver);
332 __ push(holder);
333 }
334
335
CompileCallLoadPropertyWithInterceptor(MacroAssembler * masm,Register receiver,Register holder,Register name,Handle<JSObject> holder_obj,IC::UtilityId id)336 static void CompileCallLoadPropertyWithInterceptor(
337 MacroAssembler* masm,
338 Register receiver,
339 Register holder,
340 Register name,
341 Handle<JSObject> holder_obj,
342 IC::UtilityId id) {
343 PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
344 __ CallExternalReference(
345 ExternalReference(IC_Utility(id), masm->isolate()),
346 StubCache::kInterceptorArgsLength);
347 }
348
349
350 // Generate call to api function.
351 // This function uses push() to generate smaller, faster code than
352 // the version above. It is an optimization that should will be removed
353 // when api call ICs are generated in hydrogen.
GenerateFastApiCall(MacroAssembler * masm,const CallOptimization & optimization,Handle<Map> receiver_map,Register receiver,Register scratch_in,bool is_store,int argc,Register * values)354 void StubCompiler::GenerateFastApiCall(MacroAssembler* masm,
355 const CallOptimization& optimization,
356 Handle<Map> receiver_map,
357 Register receiver,
358 Register scratch_in,
359 bool is_store,
360 int argc,
361 Register* values) {
362 // Copy return value.
363 __ pop(scratch_in);
364 // receiver
365 __ push(receiver);
366 // Write the arguments to stack frame.
367 for (int i = 0; i < argc; i++) {
368 Register arg = values[argc-1-i];
369 ASSERT(!receiver.is(arg));
370 ASSERT(!scratch_in.is(arg));
371 __ push(arg);
372 }
373 __ push(scratch_in);
374 // Stack now matches JSFunction abi.
375 ASSERT(optimization.is_simple_api_call());
376
377 // Abi for CallApiFunctionStub.
378 Register callee = eax;
379 Register call_data = ebx;
380 Register holder = ecx;
381 Register api_function_address = edx;
382 Register scratch = edi; // scratch_in is no longer valid.
383
384 // Put holder in place.
385 CallOptimization::HolderLookup holder_lookup;
386 Handle<JSObject> api_holder = optimization.LookupHolderOfExpectedType(
387 receiver_map,
388 &holder_lookup);
389 switch (holder_lookup) {
390 case CallOptimization::kHolderIsReceiver:
391 __ Move(holder, receiver);
392 break;
393 case CallOptimization::kHolderFound:
394 __ LoadHeapObject(holder, api_holder);
395 break;
396 case CallOptimization::kHolderNotFound:
397 UNREACHABLE();
398 break;
399 }
400
401 Isolate* isolate = masm->isolate();
402 Handle<JSFunction> function = optimization.constant_function();
403 Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
404 Handle<Object> call_data_obj(api_call_info->data(), isolate);
405
406 // Put callee in place.
407 __ LoadHeapObject(callee, function);
408
409 bool call_data_undefined = false;
410 // Put call_data in place.
411 if (isolate->heap()->InNewSpace(*call_data_obj)) {
412 __ mov(scratch, api_call_info);
413 __ mov(call_data, FieldOperand(scratch, CallHandlerInfo::kDataOffset));
414 } else if (call_data_obj->IsUndefined()) {
415 call_data_undefined = true;
416 __ mov(call_data, Immediate(isolate->factory()->undefined_value()));
417 } else {
418 __ mov(call_data, call_data_obj);
419 }
420
421 // Put api_function_address in place.
422 Address function_address = v8::ToCData<Address>(api_call_info->callback());
423 __ mov(api_function_address, Immediate(function_address));
424
425 // Jump to stub.
426 CallApiFunctionStub stub(isolate, is_store, call_data_undefined, argc);
427 __ TailCallStub(&stub);
428 }
429
430
GenerateRestoreName(MacroAssembler * masm,Label * label,Handle<Name> name)431 void StoreStubCompiler::GenerateRestoreName(MacroAssembler* masm,
432 Label* label,
433 Handle<Name> name) {
434 if (!label->is_unused()) {
435 __ bind(label);
436 __ mov(this->name(), Immediate(name));
437 }
438 }
439
440
441 // Generate code to check that a global property cell is empty. Create
442 // the property cell at compilation time if no cell exists for the
443 // property.
GenerateCheckPropertyCell(MacroAssembler * masm,Handle<JSGlobalObject> global,Handle<Name> name,Register scratch,Label * miss)444 void StubCompiler::GenerateCheckPropertyCell(MacroAssembler* masm,
445 Handle<JSGlobalObject> global,
446 Handle<Name> name,
447 Register scratch,
448 Label* miss) {
449 Handle<PropertyCell> cell =
450 JSGlobalObject::EnsurePropertyCell(global, name);
451 ASSERT(cell->value()->IsTheHole());
452 Handle<Oddball> the_hole = masm->isolate()->factory()->the_hole_value();
453 if (masm->serializer_enabled()) {
454 __ mov(scratch, Immediate(cell));
455 __ cmp(FieldOperand(scratch, PropertyCell::kValueOffset),
456 Immediate(the_hole));
457 } else {
458 __ cmp(Operand::ForCell(cell), Immediate(the_hole));
459 }
460 __ j(not_equal, miss);
461 }
462
463
GenerateNegativeHolderLookup(MacroAssembler * masm,Handle<JSObject> holder,Register holder_reg,Handle<Name> name,Label * miss)464 void StoreStubCompiler::GenerateNegativeHolderLookup(
465 MacroAssembler* masm,
466 Handle<JSObject> holder,
467 Register holder_reg,
468 Handle<Name> name,
469 Label* miss) {
470 if (holder->IsJSGlobalObject()) {
471 GenerateCheckPropertyCell(
472 masm, Handle<JSGlobalObject>::cast(holder), name, scratch1(), miss);
473 } else if (!holder->HasFastProperties() && !holder->IsJSGlobalProxy()) {
474 GenerateDictionaryNegativeLookup(
475 masm, miss, holder_reg, name, scratch1(), scratch2());
476 }
477 }
478
479
480 // Receiver_reg is preserved on jumps to miss_label, but may be destroyed if
481 // store is successful.
GenerateStoreTransition(MacroAssembler * masm,Handle<JSObject> object,LookupResult * lookup,Handle<Map> transition,Handle<Name> name,Register receiver_reg,Register storage_reg,Register value_reg,Register scratch1,Register scratch2,Register unused,Label * miss_label,Label * slow)482 void StoreStubCompiler::GenerateStoreTransition(MacroAssembler* masm,
483 Handle<JSObject> object,
484 LookupResult* lookup,
485 Handle<Map> transition,
486 Handle<Name> name,
487 Register receiver_reg,
488 Register storage_reg,
489 Register value_reg,
490 Register scratch1,
491 Register scratch2,
492 Register unused,
493 Label* miss_label,
494 Label* slow) {
495 int descriptor = transition->LastAdded();
496 DescriptorArray* descriptors = transition->instance_descriptors();
497 PropertyDetails details = descriptors->GetDetails(descriptor);
498 Representation representation = details.representation();
499 ASSERT(!representation.IsNone());
500
501 if (details.type() == CONSTANT) {
502 Handle<Object> constant(descriptors->GetValue(descriptor), masm->isolate());
503 __ CmpObject(value_reg, constant);
504 __ j(not_equal, miss_label);
505 } else if (representation.IsSmi()) {
506 __ JumpIfNotSmi(value_reg, miss_label);
507 } else if (representation.IsHeapObject()) {
508 __ JumpIfSmi(value_reg, miss_label);
509 HeapType* field_type = descriptors->GetFieldType(descriptor);
510 HeapType::Iterator<Map> it = field_type->Classes();
511 if (!it.Done()) {
512 Label do_store;
513 while (true) {
514 __ CompareMap(value_reg, it.Current());
515 it.Advance();
516 if (it.Done()) {
517 __ j(not_equal, miss_label);
518 break;
519 }
520 __ j(equal, &do_store, Label::kNear);
521 }
522 __ bind(&do_store);
523 }
524 } else if (representation.IsDouble()) {
525 Label do_store, heap_number;
526 __ AllocateHeapNumber(storage_reg, scratch1, scratch2, slow);
527
528 __ JumpIfNotSmi(value_reg, &heap_number);
529 __ SmiUntag(value_reg);
530 __ Cvtsi2sd(xmm0, value_reg);
531 __ SmiTag(value_reg);
532 __ jmp(&do_store);
533
534 __ bind(&heap_number);
535 __ CheckMap(value_reg, masm->isolate()->factory()->heap_number_map(),
536 miss_label, DONT_DO_SMI_CHECK);
537 __ movsd(xmm0, FieldOperand(value_reg, HeapNumber::kValueOffset));
538
539 __ bind(&do_store);
540 __ movsd(FieldOperand(storage_reg, HeapNumber::kValueOffset), xmm0);
541 }
542
543 // Stub never generated for non-global objects that require access
544 // checks.
545 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
546
547 // Perform map transition for the receiver if necessary.
548 if (details.type() == FIELD &&
549 object->map()->unused_property_fields() == 0) {
550 // The properties must be extended before we can store the value.
551 // We jump to a runtime call that extends the properties array.
552 __ pop(scratch1); // Return address.
553 __ push(receiver_reg);
554 __ push(Immediate(transition));
555 __ push(value_reg);
556 __ push(scratch1);
557 __ TailCallExternalReference(
558 ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage),
559 masm->isolate()),
560 3,
561 1);
562 return;
563 }
564
565 // Update the map of the object.
566 __ mov(scratch1, Immediate(transition));
567 __ mov(FieldOperand(receiver_reg, HeapObject::kMapOffset), scratch1);
568
569 // Update the write barrier for the map field.
570 __ RecordWriteField(receiver_reg,
571 HeapObject::kMapOffset,
572 scratch1,
573 scratch2,
574 kDontSaveFPRegs,
575 OMIT_REMEMBERED_SET,
576 OMIT_SMI_CHECK);
577
578 if (details.type() == CONSTANT) {
579 ASSERT(value_reg.is(eax));
580 __ ret(0);
581 return;
582 }
583
584 int index = transition->instance_descriptors()->GetFieldIndex(
585 transition->LastAdded());
586
587 // Adjust for the number of properties stored in the object. Even in the
588 // face of a transition we can use the old map here because the size of the
589 // object and the number of in-object properties is not going to change.
590 index -= object->map()->inobject_properties();
591
592 SmiCheck smi_check = representation.IsTagged()
593 ? INLINE_SMI_CHECK : OMIT_SMI_CHECK;
594 // TODO(verwaest): Share this code as a code stub.
595 if (index < 0) {
596 // Set the property straight into the object.
597 int offset = object->map()->instance_size() + (index * kPointerSize);
598 if (representation.IsDouble()) {
599 __ mov(FieldOperand(receiver_reg, offset), storage_reg);
600 } else {
601 __ mov(FieldOperand(receiver_reg, offset), value_reg);
602 }
603
604 if (!representation.IsSmi()) {
605 // Update the write barrier for the array address.
606 if (!representation.IsDouble()) {
607 __ mov(storage_reg, value_reg);
608 }
609 __ RecordWriteField(receiver_reg,
610 offset,
611 storage_reg,
612 scratch1,
613 kDontSaveFPRegs,
614 EMIT_REMEMBERED_SET,
615 smi_check);
616 }
617 } else {
618 // Write to the properties array.
619 int offset = index * kPointerSize + FixedArray::kHeaderSize;
620 // Get the properties array (optimistically).
621 __ mov(scratch1, FieldOperand(receiver_reg, JSObject::kPropertiesOffset));
622 if (representation.IsDouble()) {
623 __ mov(FieldOperand(scratch1, offset), storage_reg);
624 } else {
625 __ mov(FieldOperand(scratch1, offset), value_reg);
626 }
627
628 if (!representation.IsSmi()) {
629 // Update the write barrier for the array address.
630 if (!representation.IsDouble()) {
631 __ mov(storage_reg, value_reg);
632 }
633 __ RecordWriteField(scratch1,
634 offset,
635 storage_reg,
636 receiver_reg,
637 kDontSaveFPRegs,
638 EMIT_REMEMBERED_SET,
639 smi_check);
640 }
641 }
642
643 // Return the value (register eax).
644 ASSERT(value_reg.is(eax));
645 __ ret(0);
646 }
647
648
649 // Both name_reg and receiver_reg are preserved on jumps to miss_label,
650 // but may be destroyed if store is successful.
GenerateStoreField(MacroAssembler * masm,Handle<JSObject> object,LookupResult * lookup,Register receiver_reg,Register name_reg,Register value_reg,Register scratch1,Register scratch2,Label * miss_label)651 void StoreStubCompiler::GenerateStoreField(MacroAssembler* masm,
652 Handle<JSObject> object,
653 LookupResult* lookup,
654 Register receiver_reg,
655 Register name_reg,
656 Register value_reg,
657 Register scratch1,
658 Register scratch2,
659 Label* miss_label) {
660 // Stub never generated for non-global objects that require access
661 // checks.
662 ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
663
664 FieldIndex index = lookup->GetFieldIndex();
665
666 Representation representation = lookup->representation();
667 ASSERT(!representation.IsNone());
668 if (representation.IsSmi()) {
669 __ JumpIfNotSmi(value_reg, miss_label);
670 } else if (representation.IsHeapObject()) {
671 __ JumpIfSmi(value_reg, miss_label);
672 HeapType* field_type = lookup->GetFieldType();
673 HeapType::Iterator<Map> it = field_type->Classes();
674 if (!it.Done()) {
675 Label do_store;
676 while (true) {
677 __ CompareMap(value_reg, it.Current());
678 it.Advance();
679 if (it.Done()) {
680 __ j(not_equal, miss_label);
681 break;
682 }
683 __ j(equal, &do_store, Label::kNear);
684 }
685 __ bind(&do_store);
686 }
687 } else if (representation.IsDouble()) {
688 // Load the double storage.
689 if (index.is_inobject()) {
690 __ mov(scratch1, FieldOperand(receiver_reg, index.offset()));
691 } else {
692 __ mov(scratch1, FieldOperand(receiver_reg, JSObject::kPropertiesOffset));
693 __ mov(scratch1, FieldOperand(scratch1, index.offset()));
694 }
695
696 // Store the value into the storage.
697 Label do_store, heap_number;
698 __ JumpIfNotSmi(value_reg, &heap_number);
699 __ SmiUntag(value_reg);
700 __ Cvtsi2sd(xmm0, value_reg);
701 __ SmiTag(value_reg);
702 __ jmp(&do_store);
703 __ bind(&heap_number);
704 __ CheckMap(value_reg, masm->isolate()->factory()->heap_number_map(),
705 miss_label, DONT_DO_SMI_CHECK);
706 __ movsd(xmm0, FieldOperand(value_reg, HeapNumber::kValueOffset));
707 __ bind(&do_store);
708 __ movsd(FieldOperand(scratch1, HeapNumber::kValueOffset), xmm0);
709 // Return the value (register eax).
710 ASSERT(value_reg.is(eax));
711 __ ret(0);
712 return;
713 }
714
715 ASSERT(!representation.IsDouble());
716 // TODO(verwaest): Share this code as a code stub.
717 SmiCheck smi_check = representation.IsTagged()
718 ? INLINE_SMI_CHECK : OMIT_SMI_CHECK;
719 if (index.is_inobject()) {
720 // Set the property straight into the object.
721 __ mov(FieldOperand(receiver_reg, index.offset()), value_reg);
722
723 if (!representation.IsSmi()) {
724 // Update the write barrier for the array address.
725 // Pass the value being stored in the now unused name_reg.
726 __ mov(name_reg, value_reg);
727 __ RecordWriteField(receiver_reg,
728 index.offset(),
729 name_reg,
730 scratch1,
731 kDontSaveFPRegs,
732 EMIT_REMEMBERED_SET,
733 smi_check);
734 }
735 } else {
736 // Write to the properties array.
737 // Get the properties array (optimistically).
738 __ mov(scratch1, FieldOperand(receiver_reg, JSObject::kPropertiesOffset));
739 __ mov(FieldOperand(scratch1, index.offset()), value_reg);
740
741 if (!representation.IsSmi()) {
742 // Update the write barrier for the array address.
743 // Pass the value being stored in the now unused name_reg.
744 __ mov(name_reg, value_reg);
745 __ RecordWriteField(scratch1,
746 index.offset(),
747 name_reg,
748 receiver_reg,
749 kDontSaveFPRegs,
750 EMIT_REMEMBERED_SET,
751 smi_check);
752 }
753 }
754
755 // Return the value (register eax).
756 ASSERT(value_reg.is(eax));
757 __ ret(0);
758 }
759
760
GenerateTailCall(MacroAssembler * masm,Handle<Code> code)761 void StubCompiler::GenerateTailCall(MacroAssembler* masm, Handle<Code> code) {
762 __ jmp(code, RelocInfo::CODE_TARGET);
763 }
764
765
766 #undef __
767 #define __ ACCESS_MASM(masm())
768
769
CheckPrototypes(Handle<HeapType> type,Register object_reg,Handle<JSObject> holder,Register holder_reg,Register scratch1,Register scratch2,Handle<Name> name,Label * miss,PrototypeCheckType check)770 Register StubCompiler::CheckPrototypes(Handle<HeapType> type,
771 Register object_reg,
772 Handle<JSObject> holder,
773 Register holder_reg,
774 Register scratch1,
775 Register scratch2,
776 Handle<Name> name,
777 Label* miss,
778 PrototypeCheckType check) {
779 Handle<Map> receiver_map(IC::TypeToMap(*type, isolate()));
780
781 // Make sure there's no overlap between holder and object registers.
782 ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
783 ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg)
784 && !scratch2.is(scratch1));
785
786 // Keep track of the current object in register reg.
787 Register reg = object_reg;
788 int depth = 0;
789
790 Handle<JSObject> current = Handle<JSObject>::null();
791 if (type->IsConstant()) current =
792 Handle<JSObject>::cast(type->AsConstant()->Value());
793 Handle<JSObject> prototype = Handle<JSObject>::null();
794 Handle<Map> current_map = receiver_map;
795 Handle<Map> holder_map(holder->map());
796 // Traverse the prototype chain and check the maps in the prototype chain for
797 // fast and global objects or do negative lookup for normal objects.
798 while (!current_map.is_identical_to(holder_map)) {
799 ++depth;
800
801 // Only global objects and objects that do not require access
802 // checks are allowed in stubs.
803 ASSERT(current_map->IsJSGlobalProxyMap() ||
804 !current_map->is_access_check_needed());
805
806 prototype = handle(JSObject::cast(current_map->prototype()));
807 if (current_map->is_dictionary_map() &&
808 !current_map->IsJSGlobalObjectMap() &&
809 !current_map->IsJSGlobalProxyMap()) {
810 if (!name->IsUniqueName()) {
811 ASSERT(name->IsString());
812 name = factory()->InternalizeString(Handle<String>::cast(name));
813 }
814 ASSERT(current.is_null() ||
815 current->property_dictionary()->FindEntry(name) ==
816 NameDictionary::kNotFound);
817
818 GenerateDictionaryNegativeLookup(masm(), miss, reg, name,
819 scratch1, scratch2);
820
821 __ mov(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
822 reg = holder_reg; // From now on the object will be in holder_reg.
823 __ mov(reg, FieldOperand(scratch1, Map::kPrototypeOffset));
824 } else {
825 bool in_new_space = heap()->InNewSpace(*prototype);
826 if (depth != 1 || check == CHECK_ALL_MAPS) {
827 __ CheckMap(reg, current_map, miss, DONT_DO_SMI_CHECK);
828 }
829
830 // Check access rights to the global object. This has to happen after
831 // the map check so that we know that the object is actually a global
832 // object.
833 if (current_map->IsJSGlobalProxyMap()) {
834 __ CheckAccessGlobalProxy(reg, scratch1, scratch2, miss);
835 } else if (current_map->IsJSGlobalObjectMap()) {
836 GenerateCheckPropertyCell(
837 masm(), Handle<JSGlobalObject>::cast(current), name,
838 scratch2, miss);
839 }
840
841 if (in_new_space) {
842 // Save the map in scratch1 for later.
843 __ mov(scratch1, FieldOperand(reg, HeapObject::kMapOffset));
844 }
845
846 reg = holder_reg; // From now on the object will be in holder_reg.
847
848 if (in_new_space) {
849 // The prototype is in new space; we cannot store a reference to it
850 // in the code. Load it from the map.
851 __ mov(reg, FieldOperand(scratch1, Map::kPrototypeOffset));
852 } else {
853 // The prototype is in old space; load it directly.
854 __ mov(reg, prototype);
855 }
856 }
857
858 // Go to the next object in the prototype chain.
859 current = prototype;
860 current_map = handle(current->map());
861 }
862
863 // Log the check depth.
864 LOG(isolate(), IntEvent("check-maps-depth", depth + 1));
865
866 if (depth != 0 || check == CHECK_ALL_MAPS) {
867 // Check the holder map.
868 __ CheckMap(reg, current_map, miss, DONT_DO_SMI_CHECK);
869 }
870
871 // Perform security check for access to the global object.
872 ASSERT(current_map->IsJSGlobalProxyMap() ||
873 !current_map->is_access_check_needed());
874 if (current_map->IsJSGlobalProxyMap()) {
875 __ CheckAccessGlobalProxy(reg, scratch1, scratch2, miss);
876 }
877
878 // Return the register containing the holder.
879 return reg;
880 }
881
882
HandlerFrontendFooter(Handle<Name> name,Label * miss)883 void LoadStubCompiler::HandlerFrontendFooter(Handle<Name> name, Label* miss) {
884 if (!miss->is_unused()) {
885 Label success;
886 __ jmp(&success);
887 __ bind(miss);
888 TailCallBuiltin(masm(), MissBuiltin(kind()));
889 __ bind(&success);
890 }
891 }
892
893
HandlerFrontendFooter(Handle<Name> name,Label * miss)894 void StoreStubCompiler::HandlerFrontendFooter(Handle<Name> name, Label* miss) {
895 if (!miss->is_unused()) {
896 Label success;
897 __ jmp(&success);
898 GenerateRestoreName(masm(), miss, name);
899 TailCallBuiltin(masm(), MissBuiltin(kind()));
900 __ bind(&success);
901 }
902 }
903
904
CallbackHandlerFrontend(Handle<HeapType> type,Register object_reg,Handle<JSObject> holder,Handle<Name> name,Handle<Object> callback)905 Register LoadStubCompiler::CallbackHandlerFrontend(
906 Handle<HeapType> type,
907 Register object_reg,
908 Handle<JSObject> holder,
909 Handle<Name> name,
910 Handle<Object> callback) {
911 Label miss;
912
913 Register reg = HandlerFrontendHeader(type, object_reg, holder, name, &miss);
914
915 if (!holder->HasFastProperties() && !holder->IsJSGlobalObject()) {
916 ASSERT(!reg.is(scratch2()));
917 ASSERT(!reg.is(scratch3()));
918 Register dictionary = scratch1();
919 bool must_preserve_dictionary_reg = reg.is(dictionary);
920
921 // Load the properties dictionary.
922 if (must_preserve_dictionary_reg) {
923 __ push(dictionary);
924 }
925 __ mov(dictionary, FieldOperand(reg, JSObject::kPropertiesOffset));
926
927 // Probe the dictionary.
928 Label probe_done, pop_and_miss;
929 NameDictionaryLookupStub::GeneratePositiveLookup(masm(),
930 &pop_and_miss,
931 &probe_done,
932 dictionary,
933 this->name(),
934 scratch2(),
935 scratch3());
936 __ bind(&pop_and_miss);
937 if (must_preserve_dictionary_reg) {
938 __ pop(dictionary);
939 }
940 __ jmp(&miss);
941 __ bind(&probe_done);
942
943 // If probing finds an entry in the dictionary, scratch2 contains the
944 // index into the dictionary. Check that the value is the callback.
945 Register index = scratch2();
946 const int kElementsStartOffset =
947 NameDictionary::kHeaderSize +
948 NameDictionary::kElementsStartIndex * kPointerSize;
949 const int kValueOffset = kElementsStartOffset + kPointerSize;
950 __ mov(scratch3(),
951 Operand(dictionary, index, times_4, kValueOffset - kHeapObjectTag));
952 if (must_preserve_dictionary_reg) {
953 __ pop(dictionary);
954 }
955 __ cmp(scratch3(), callback);
956 __ j(not_equal, &miss);
957 }
958
959 HandlerFrontendFooter(name, &miss);
960 return reg;
961 }
962
963
GenerateLoadField(Register reg,Handle<JSObject> holder,FieldIndex field,Representation representation)964 void LoadStubCompiler::GenerateLoadField(Register reg,
965 Handle<JSObject> holder,
966 FieldIndex field,
967 Representation representation) {
968 if (!reg.is(receiver())) __ mov(receiver(), reg);
969 if (kind() == Code::LOAD_IC) {
970 LoadFieldStub stub(isolate(), field);
971 GenerateTailCall(masm(), stub.GetCode());
972 } else {
973 KeyedLoadFieldStub stub(isolate(), field);
974 GenerateTailCall(masm(), stub.GetCode());
975 }
976 }
977
978
GenerateLoadCallback(Register reg,Handle<ExecutableAccessorInfo> callback)979 void LoadStubCompiler::GenerateLoadCallback(
980 Register reg,
981 Handle<ExecutableAccessorInfo> callback) {
982 // Insert additional parameters into the stack frame above return address.
983 ASSERT(!scratch3().is(reg));
984 __ pop(scratch3()); // Get return address to place it below.
985
986 STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 0);
987 STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 1);
988 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 2);
989 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 3);
990 STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 4);
991 STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 5);
992 __ push(receiver()); // receiver
993 // Push data from ExecutableAccessorInfo.
994 if (isolate()->heap()->InNewSpace(callback->data())) {
995 ASSERT(!scratch2().is(reg));
996 __ mov(scratch2(), Immediate(callback));
997 __ push(FieldOperand(scratch2(), ExecutableAccessorInfo::kDataOffset));
998 } else {
999 __ push(Immediate(Handle<Object>(callback->data(), isolate())));
1000 }
1001 __ push(Immediate(isolate()->factory()->undefined_value())); // ReturnValue
1002 // ReturnValue default value
1003 __ push(Immediate(isolate()->factory()->undefined_value()));
1004 __ push(Immediate(reinterpret_cast<int>(isolate())));
1005 __ push(reg); // holder
1006
1007 // Save a pointer to where we pushed the arguments. This will be
1008 // passed as the const PropertyAccessorInfo& to the C++ callback.
1009 __ push(esp);
1010
1011 __ push(name()); // name
1012
1013 __ push(scratch3()); // Restore return address.
1014
1015 // Abi for CallApiGetter
1016 Register getter_address = edx;
1017 Address function_address = v8::ToCData<Address>(callback->getter());
1018 __ mov(getter_address, Immediate(function_address));
1019
1020 CallApiGetterStub stub(isolate());
1021 __ TailCallStub(&stub);
1022 }
1023
1024
GenerateLoadConstant(Handle<Object> value)1025 void LoadStubCompiler::GenerateLoadConstant(Handle<Object> value) {
1026 // Return the constant value.
1027 __ LoadObject(eax, value);
1028 __ ret(0);
1029 }
1030
1031
GenerateLoadInterceptor(Register holder_reg,Handle<Object> object,Handle<JSObject> interceptor_holder,LookupResult * lookup,Handle<Name> name)1032 void LoadStubCompiler::GenerateLoadInterceptor(
1033 Register holder_reg,
1034 Handle<Object> object,
1035 Handle<JSObject> interceptor_holder,
1036 LookupResult* lookup,
1037 Handle<Name> name) {
1038 ASSERT(interceptor_holder->HasNamedInterceptor());
1039 ASSERT(!interceptor_holder->GetNamedInterceptor()->getter()->IsUndefined());
1040
1041 // So far the most popular follow ups for interceptor loads are FIELD
1042 // and CALLBACKS, so inline only them, other cases may be added
1043 // later.
1044 bool compile_followup_inline = false;
1045 if (lookup->IsFound() && lookup->IsCacheable()) {
1046 if (lookup->IsField()) {
1047 compile_followup_inline = true;
1048 } else if (lookup->type() == CALLBACKS &&
1049 lookup->GetCallbackObject()->IsExecutableAccessorInfo()) {
1050 ExecutableAccessorInfo* callback =
1051 ExecutableAccessorInfo::cast(lookup->GetCallbackObject());
1052 compile_followup_inline = callback->getter() != NULL &&
1053 callback->IsCompatibleReceiver(*object);
1054 }
1055 }
1056
1057 if (compile_followup_inline) {
1058 // Compile the interceptor call, followed by inline code to load the
1059 // property from further up the prototype chain if the call fails.
1060 // Check that the maps haven't changed.
1061 ASSERT(holder_reg.is(receiver()) || holder_reg.is(scratch1()));
1062
1063 // Preserve the receiver register explicitly whenever it is different from
1064 // the holder and it is needed should the interceptor return without any
1065 // result. The CALLBACKS case needs the receiver to be passed into C++ code,
1066 // the FIELD case might cause a miss during the prototype check.
1067 bool must_perfrom_prototype_check = *interceptor_holder != lookup->holder();
1068 bool must_preserve_receiver_reg = !receiver().is(holder_reg) &&
1069 (lookup->type() == CALLBACKS || must_perfrom_prototype_check);
1070
1071 // Save necessary data before invoking an interceptor.
1072 // Requires a frame to make GC aware of pushed pointers.
1073 {
1074 FrameScope frame_scope(masm(), StackFrame::INTERNAL);
1075
1076 if (must_preserve_receiver_reg) {
1077 __ push(receiver());
1078 }
1079 __ push(holder_reg);
1080 __ push(this->name());
1081
1082 // Invoke an interceptor. Note: map checks from receiver to
1083 // interceptor's holder has been compiled before (see a caller
1084 // of this method.)
1085 CompileCallLoadPropertyWithInterceptor(
1086 masm(), receiver(), holder_reg, this->name(), interceptor_holder,
1087 IC::kLoadPropertyWithInterceptorOnly);
1088
1089 // Check if interceptor provided a value for property. If it's
1090 // the case, return immediately.
1091 Label interceptor_failed;
1092 __ cmp(eax, factory()->no_interceptor_result_sentinel());
1093 __ j(equal, &interceptor_failed);
1094 frame_scope.GenerateLeaveFrame();
1095 __ ret(0);
1096
1097 // Clobber registers when generating debug-code to provoke errors.
1098 __ bind(&interceptor_failed);
1099 if (FLAG_debug_code) {
1100 __ mov(receiver(), Immediate(BitCast<int32_t>(kZapValue)));
1101 __ mov(holder_reg, Immediate(BitCast<int32_t>(kZapValue)));
1102 __ mov(this->name(), Immediate(BitCast<int32_t>(kZapValue)));
1103 }
1104
1105 __ pop(this->name());
1106 __ pop(holder_reg);
1107 if (must_preserve_receiver_reg) {
1108 __ pop(receiver());
1109 }
1110
1111 // Leave the internal frame.
1112 }
1113
1114 GenerateLoadPostInterceptor(holder_reg, interceptor_holder, name, lookup);
1115 } else { // !compile_followup_inline
1116 // Call the runtime system to load the interceptor.
1117 // Check that the maps haven't changed.
1118 __ pop(scratch2()); // save old return address
1119 PushInterceptorArguments(masm(), receiver(), holder_reg,
1120 this->name(), interceptor_holder);
1121 __ push(scratch2()); // restore old return address
1122
1123 ExternalReference ref =
1124 ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptor),
1125 isolate());
1126 __ TailCallExternalReference(ref, StubCache::kInterceptorArgsLength, 1);
1127 }
1128 }
1129
1130
CompileStoreCallback(Handle<JSObject> object,Handle<JSObject> holder,Handle<Name> name,Handle<ExecutableAccessorInfo> callback)1131 Handle<Code> StoreStubCompiler::CompileStoreCallback(
1132 Handle<JSObject> object,
1133 Handle<JSObject> holder,
1134 Handle<Name> name,
1135 Handle<ExecutableAccessorInfo> callback) {
1136 Register holder_reg = HandlerFrontend(
1137 IC::CurrentTypeOf(object, isolate()), receiver(), holder, name);
1138
1139 __ pop(scratch1()); // remove the return address
1140 __ push(receiver());
1141 __ push(holder_reg);
1142 __ Push(callback);
1143 __ Push(name);
1144 __ push(value());
1145 __ push(scratch1()); // restore return address
1146
1147 // Do tail-call to the runtime system.
1148 ExternalReference store_callback_property =
1149 ExternalReference(IC_Utility(IC::kStoreCallbackProperty), isolate());
1150 __ TailCallExternalReference(store_callback_property, 5, 1);
1151
1152 // Return the generated code.
1153 return GetCode(kind(), Code::FAST, name);
1154 }
1155
1156
1157 #undef __
1158 #define __ ACCESS_MASM(masm)
1159
1160
GenerateStoreViaSetter(MacroAssembler * masm,Handle<HeapType> type,Register receiver,Handle<JSFunction> setter)1161 void StoreStubCompiler::GenerateStoreViaSetter(
1162 MacroAssembler* masm,
1163 Handle<HeapType> type,
1164 Register receiver,
1165 Handle<JSFunction> setter) {
1166 // ----------- S t a t e -------------
1167 // -- esp[0] : return address
1168 // -----------------------------------
1169 {
1170 FrameScope scope(masm, StackFrame::INTERNAL);
1171
1172 // Save value register, so we can restore it later.
1173 __ push(value());
1174
1175 if (!setter.is_null()) {
1176 // Call the JavaScript setter with receiver and value on the stack.
1177 if (IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) {
1178 // Swap in the global receiver.
1179 __ mov(receiver,
1180 FieldOperand(receiver, JSGlobalObject::kGlobalReceiverOffset));
1181 }
1182 __ push(receiver);
1183 __ push(value());
1184 ParameterCount actual(1);
1185 ParameterCount expected(setter);
1186 __ InvokeFunction(setter, expected, actual,
1187 CALL_FUNCTION, NullCallWrapper());
1188 } else {
1189 // If we generate a global code snippet for deoptimization only, remember
1190 // the place to continue after deoptimization.
1191 masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset());
1192 }
1193
1194 // We have to return the passed value, not the return value of the setter.
1195 __ pop(eax);
1196
1197 // Restore context register.
1198 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
1199 }
1200 __ ret(0);
1201 }
1202
1203
1204 #undef __
1205 #define __ ACCESS_MASM(masm())
1206
1207
CompileStoreInterceptor(Handle<JSObject> object,Handle<Name> name)1208 Handle<Code> StoreStubCompiler::CompileStoreInterceptor(
1209 Handle<JSObject> object,
1210 Handle<Name> name) {
1211 __ pop(scratch1()); // remove the return address
1212 __ push(receiver());
1213 __ push(this->name());
1214 __ push(value());
1215 __ push(scratch1()); // restore return address
1216
1217 // Do tail-call to the runtime system.
1218 ExternalReference store_ic_property =
1219 ExternalReference(IC_Utility(IC::kStoreInterceptorProperty), isolate());
1220 __ TailCallExternalReference(store_ic_property, 3, 1);
1221
1222 // Return the generated code.
1223 return GetCode(kind(), Code::FAST, name);
1224 }
1225
1226
GenerateStoreArrayLength()1227 void StoreStubCompiler::GenerateStoreArrayLength() {
1228 // Prepare tail call to StoreIC_ArrayLength.
1229 __ pop(scratch1()); // remove the return address
1230 __ push(receiver());
1231 __ push(value());
1232 __ push(scratch1()); // restore return address
1233
1234 ExternalReference ref =
1235 ExternalReference(IC_Utility(IC::kStoreIC_ArrayLength),
1236 masm()->isolate());
1237 __ TailCallExternalReference(ref, 2, 1);
1238 }
1239
1240
CompileStorePolymorphic(MapHandleList * receiver_maps,CodeHandleList * handler_stubs,MapHandleList * transitioned_maps)1241 Handle<Code> KeyedStoreStubCompiler::CompileStorePolymorphic(
1242 MapHandleList* receiver_maps,
1243 CodeHandleList* handler_stubs,
1244 MapHandleList* transitioned_maps) {
1245 Label miss;
1246 __ JumpIfSmi(receiver(), &miss, Label::kNear);
1247 __ mov(scratch1(), FieldOperand(receiver(), HeapObject::kMapOffset));
1248 for (int i = 0; i < receiver_maps->length(); ++i) {
1249 __ cmp(scratch1(), receiver_maps->at(i));
1250 if (transitioned_maps->at(i).is_null()) {
1251 __ j(equal, handler_stubs->at(i));
1252 } else {
1253 Label next_map;
1254 __ j(not_equal, &next_map, Label::kNear);
1255 __ mov(transition_map(), Immediate(transitioned_maps->at(i)));
1256 __ jmp(handler_stubs->at(i), RelocInfo::CODE_TARGET);
1257 __ bind(&next_map);
1258 }
1259 }
1260 __ bind(&miss);
1261 TailCallBuiltin(masm(), MissBuiltin(kind()));
1262
1263 // Return the generated code.
1264 return GetICCode(
1265 kind(), Code::NORMAL, factory()->empty_string(), POLYMORPHIC);
1266 }
1267
1268
CompileLoadNonexistent(Handle<HeapType> type,Handle<JSObject> last,Handle<Name> name)1269 Handle<Code> LoadStubCompiler::CompileLoadNonexistent(Handle<HeapType> type,
1270 Handle<JSObject> last,
1271 Handle<Name> name) {
1272 NonexistentHandlerFrontend(type, last, name);
1273
1274 // Return undefined if maps of the full prototype chain are still the
1275 // same and no global property with this name contains a value.
1276 __ mov(eax, isolate()->factory()->undefined_value());
1277 __ ret(0);
1278
1279 // Return the generated code.
1280 return GetCode(kind(), Code::FAST, name);
1281 }
1282
1283
registers()1284 Register* LoadStubCompiler::registers() {
1285 // receiver, name, scratch1, scratch2, scratch3, scratch4.
1286 static Register registers[] = { edx, ecx, ebx, eax, edi, no_reg };
1287 return registers;
1288 }
1289
1290
registers()1291 Register* KeyedLoadStubCompiler::registers() {
1292 // receiver, name, scratch1, scratch2, scratch3, scratch4.
1293 static Register registers[] = { edx, ecx, ebx, eax, edi, no_reg };
1294 return registers;
1295 }
1296
1297
value()1298 Register StoreStubCompiler::value() {
1299 return eax;
1300 }
1301
1302
registers()1303 Register* StoreStubCompiler::registers() {
1304 // receiver, name, scratch1, scratch2, scratch3.
1305 static Register registers[] = { edx, ecx, ebx, edi, no_reg };
1306 return registers;
1307 }
1308
1309
registers()1310 Register* KeyedStoreStubCompiler::registers() {
1311 // receiver, name, scratch1, scratch2, scratch3.
1312 static Register registers[] = { edx, ecx, ebx, edi, no_reg };
1313 return registers;
1314 }
1315
1316
1317 #undef __
1318 #define __ ACCESS_MASM(masm)
1319
1320
GenerateLoadViaGetter(MacroAssembler * masm,Handle<HeapType> type,Register receiver,Handle<JSFunction> getter)1321 void LoadStubCompiler::GenerateLoadViaGetter(MacroAssembler* masm,
1322 Handle<HeapType> type,
1323 Register receiver,
1324 Handle<JSFunction> getter) {
1325 {
1326 FrameScope scope(masm, StackFrame::INTERNAL);
1327
1328 if (!getter.is_null()) {
1329 // Call the JavaScript getter with the receiver on the stack.
1330 if (IC::TypeToMap(*type, masm->isolate())->IsJSGlobalObjectMap()) {
1331 // Swap in the global receiver.
1332 __ mov(receiver,
1333 FieldOperand(receiver, JSGlobalObject::kGlobalReceiverOffset));
1334 }
1335 __ push(receiver);
1336 ParameterCount actual(0);
1337 ParameterCount expected(getter);
1338 __ InvokeFunction(getter, expected, actual,
1339 CALL_FUNCTION, NullCallWrapper());
1340 } else {
1341 // If we generate a global code snippet for deoptimization only, remember
1342 // the place to continue after deoptimization.
1343 masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset());
1344 }
1345
1346 // Restore context register.
1347 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
1348 }
1349 __ ret(0);
1350 }
1351
1352
1353 #undef __
1354 #define __ ACCESS_MASM(masm())
1355
1356
CompileLoadGlobal(Handle<HeapType> type,Handle<GlobalObject> global,Handle<PropertyCell> cell,Handle<Name> name,bool is_dont_delete)1357 Handle<Code> LoadStubCompiler::CompileLoadGlobal(
1358 Handle<HeapType> type,
1359 Handle<GlobalObject> global,
1360 Handle<PropertyCell> cell,
1361 Handle<Name> name,
1362 bool is_dont_delete) {
1363 Label miss;
1364
1365 HandlerFrontendHeader(type, receiver(), global, name, &miss);
1366 // Get the value from the cell.
1367 if (masm()->serializer_enabled()) {
1368 __ mov(eax, Immediate(cell));
1369 __ mov(eax, FieldOperand(eax, PropertyCell::kValueOffset));
1370 } else {
1371 __ mov(eax, Operand::ForCell(cell));
1372 }
1373
1374 // Check for deleted property if property can actually be deleted.
1375 if (!is_dont_delete) {
1376 __ cmp(eax, factory()->the_hole_value());
1377 __ j(equal, &miss);
1378 } else if (FLAG_debug_code) {
1379 __ cmp(eax, factory()->the_hole_value());
1380 __ Check(not_equal, kDontDeleteCellsCannotContainTheHole);
1381 }
1382
1383 Counters* counters = isolate()->counters();
1384 __ IncrementCounter(counters->named_load_global_stub(), 1);
1385 // The code above already loads the result into the return register.
1386 __ ret(0);
1387
1388 HandlerFrontendFooter(name, &miss);
1389
1390 // Return the generated code.
1391 return GetCode(kind(), Code::NORMAL, name);
1392 }
1393
1394
CompilePolymorphicIC(TypeHandleList * types,CodeHandleList * handlers,Handle<Name> name,Code::StubType type,IcCheckType check)1395 Handle<Code> BaseLoadStoreStubCompiler::CompilePolymorphicIC(
1396 TypeHandleList* types,
1397 CodeHandleList* handlers,
1398 Handle<Name> name,
1399 Code::StubType type,
1400 IcCheckType check) {
1401 Label miss;
1402
1403 if (check == PROPERTY &&
1404 (kind() == Code::KEYED_LOAD_IC || kind() == Code::KEYED_STORE_IC)) {
1405 __ cmp(this->name(), Immediate(name));
1406 __ j(not_equal, &miss);
1407 }
1408
1409 Label number_case;
1410 Label* smi_target = IncludesNumberType(types) ? &number_case : &miss;
1411 __ JumpIfSmi(receiver(), smi_target);
1412
1413 Register map_reg = scratch1();
1414 __ mov(map_reg, FieldOperand(receiver(), HeapObject::kMapOffset));
1415 int receiver_count = types->length();
1416 int number_of_handled_maps = 0;
1417 for (int current = 0; current < receiver_count; ++current) {
1418 Handle<HeapType> type = types->at(current);
1419 Handle<Map> map = IC::TypeToMap(*type, isolate());
1420 if (!map->is_deprecated()) {
1421 number_of_handled_maps++;
1422 __ cmp(map_reg, map);
1423 if (type->Is(HeapType::Number())) {
1424 ASSERT(!number_case.is_unused());
1425 __ bind(&number_case);
1426 }
1427 __ j(equal, handlers->at(current));
1428 }
1429 }
1430 ASSERT(number_of_handled_maps != 0);
1431
1432 __ bind(&miss);
1433 TailCallBuiltin(masm(), MissBuiltin(kind()));
1434
1435 // Return the generated code.
1436 InlineCacheState state =
1437 number_of_handled_maps > 1 ? POLYMORPHIC : MONOMORPHIC;
1438 return GetICCode(kind(), type, name, state);
1439 }
1440
1441
1442 #undef __
1443 #define __ ACCESS_MASM(masm)
1444
1445
GenerateLoadDictionaryElement(MacroAssembler * masm)1446 void KeyedLoadStubCompiler::GenerateLoadDictionaryElement(
1447 MacroAssembler* masm) {
1448 // ----------- S t a t e -------------
1449 // -- ecx : key
1450 // -- edx : receiver
1451 // -- esp[0] : return address
1452 // -----------------------------------
1453 Label slow, miss;
1454
1455 // This stub is meant to be tail-jumped to, the receiver must already
1456 // have been verified by the caller to not be a smi.
1457 __ JumpIfNotSmi(ecx, &miss);
1458 __ mov(ebx, ecx);
1459 __ SmiUntag(ebx);
1460 __ mov(eax, FieldOperand(edx, JSObject::kElementsOffset));
1461
1462 // Push receiver on the stack to free up a register for the dictionary
1463 // probing.
1464 __ push(edx);
1465 __ LoadFromNumberDictionary(&slow, eax, ecx, ebx, edx, edi, eax);
1466 // Pop receiver before returning.
1467 __ pop(edx);
1468 __ ret(0);
1469
1470 __ bind(&slow);
1471 __ pop(edx);
1472
1473 // ----------- S t a t e -------------
1474 // -- ecx : key
1475 // -- edx : receiver
1476 // -- esp[0] : return address
1477 // -----------------------------------
1478 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Slow);
1479
1480 __ bind(&miss);
1481 // ----------- S t a t e -------------
1482 // -- ecx : key
1483 // -- edx : receiver
1484 // -- esp[0] : return address
1485 // -----------------------------------
1486 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Miss);
1487 }
1488
1489
1490 #undef __
1491
1492 } } // namespace v8::internal
1493
1494 #endif // V8_TARGET_ARCH_IA32
1495