• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #if V8_TARGET_ARCH_X87
6 
7 #include "src/codegen.h"
8 #include "src/ic/ic.h"
9 #include "src/ic/ic-compiler.h"
10 #include "src/ic/stub-cache.h"
11 
12 namespace v8 {
13 namespace internal {
14 
15 // ----------------------------------------------------------------------------
16 // Static IC stub generators.
17 //
18 
19 #define __ ACCESS_MASM(masm)
20 
21 
GenerateGlobalInstanceTypeCheck(MacroAssembler * masm,Register type,Label * global_object)22 static void GenerateGlobalInstanceTypeCheck(MacroAssembler* masm, Register type,
23                                             Label* global_object) {
24   // Register usage:
25   //   type: holds the receiver instance type on entry.
26   __ cmp(type, JS_GLOBAL_OBJECT_TYPE);
27   __ j(equal, global_object);
28   __ cmp(type, JS_GLOBAL_PROXY_TYPE);
29   __ j(equal, global_object);
30 }
31 
32 
33 // Helper function used to load a property from a dictionary backing
34 // storage. This function may fail to load a property even though it is
35 // in the dictionary, so code at miss_label must always call a backup
36 // property load that is complete. This function is safe to call if
37 // name is not internalized, and will jump to the miss_label in that
38 // case. The generated code assumes that the receiver has slow
39 // properties, is not a global object and does not have interceptors.
GenerateDictionaryLoad(MacroAssembler * masm,Label * miss_label,Register elements,Register name,Register r0,Register r1,Register result)40 static void GenerateDictionaryLoad(MacroAssembler* masm, Label* miss_label,
41                                    Register elements, Register name,
42                                    Register r0, Register r1, Register result) {
43   // Register use:
44   //
45   // elements - holds the property dictionary on entry and is unchanged.
46   //
47   // name - holds the name of the property on entry and is unchanged.
48   //
49   // Scratch registers:
50   //
51   // r0   - used for the index into the property dictionary
52   //
53   // r1   - used to hold the capacity of the property dictionary.
54   //
55   // result - holds the result on exit.
56 
57   Label done;
58 
59   // Probe the dictionary.
60   NameDictionaryLookupStub::GeneratePositiveLookup(masm, miss_label, &done,
61                                                    elements, name, r0, r1);
62 
63   // If probing finds an entry in the dictionary, r0 contains the
64   // index into the dictionary. Check that the value is a normal
65   // property.
66   __ bind(&done);
67   const int kElementsStartOffset =
68       NameDictionary::kHeaderSize +
69       NameDictionary::kElementsStartIndex * kPointerSize;
70   const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize;
71   __ test(Operand(elements, r0, times_4, kDetailsOffset - kHeapObjectTag),
72           Immediate(PropertyDetails::TypeField::kMask << kSmiTagSize));
73   __ j(not_zero, miss_label);
74 
75   // Get the value at the masked, scaled index.
76   const int kValueOffset = kElementsStartOffset + kPointerSize;
77   __ mov(result, Operand(elements, r0, times_4, kValueOffset - kHeapObjectTag));
78 }
79 
80 
81 // Helper function used to store a property to a dictionary backing
82 // storage. This function may fail to store a property eventhough it
83 // is in the dictionary, so code at miss_label must always call a
84 // backup property store that is complete. This function is safe to
85 // call if name is not internalized, and will jump to the miss_label in
86 // that case. The generated code assumes that the receiver has slow
87 // properties, is not a global object and does not have interceptors.
GenerateDictionaryStore(MacroAssembler * masm,Label * miss_label,Register elements,Register name,Register value,Register r0,Register r1)88 static void GenerateDictionaryStore(MacroAssembler* masm, Label* miss_label,
89                                     Register elements, Register name,
90                                     Register value, Register r0, Register r1) {
91   // Register use:
92   //
93   // elements - holds the property dictionary on entry and is clobbered.
94   //
95   // name - holds the name of the property on entry and is unchanged.
96   //
97   // value - holds the value to store and is unchanged.
98   //
99   // r0 - used for index into the property dictionary and is clobbered.
100   //
101   // r1 - used to hold the capacity of the property dictionary and is clobbered.
102   Label done;
103 
104 
105   // Probe the dictionary.
106   NameDictionaryLookupStub::GeneratePositiveLookup(masm, miss_label, &done,
107                                                    elements, name, r0, r1);
108 
109   // If probing finds an entry in the dictionary, r0 contains the
110   // index into the dictionary. Check that the value is a normal
111   // property that is not read only.
112   __ bind(&done);
113   const int kElementsStartOffset =
114       NameDictionary::kHeaderSize +
115       NameDictionary::kElementsStartIndex * kPointerSize;
116   const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize;
117   const int kTypeAndReadOnlyMask =
118       (PropertyDetails::TypeField::kMask |
119        PropertyDetails::AttributesField::encode(READ_ONLY))
120       << kSmiTagSize;
121   __ test(Operand(elements, r0, times_4, kDetailsOffset - kHeapObjectTag),
122           Immediate(kTypeAndReadOnlyMask));
123   __ j(not_zero, miss_label);
124 
125   // Store the value at the masked, scaled index.
126   const int kValueOffset = kElementsStartOffset + kPointerSize;
127   __ lea(r0, Operand(elements, r0, times_4, kValueOffset - kHeapObjectTag));
128   __ mov(Operand(r0, 0), value);
129 
130   // Update write barrier. Make sure not to clobber the value.
131   __ mov(r1, value);
132   __ RecordWrite(elements, r0, r1, kDontSaveFPRegs);
133 }
134 
135 
136 // Checks the receiver for special cases (value type, slow case bits).
137 // Falls through for regular JS object.
GenerateKeyedLoadReceiverCheck(MacroAssembler * masm,Register receiver,Register map,int interceptor_bit,Label * slow)138 static void GenerateKeyedLoadReceiverCheck(MacroAssembler* masm,
139                                            Register receiver, Register map,
140                                            int interceptor_bit, Label* slow) {
141   // Register use:
142   //   receiver - holds the receiver and is unchanged.
143   // Scratch registers:
144   //   map - used to hold the map of the receiver.
145 
146   // Check that the object isn't a smi.
147   __ JumpIfSmi(receiver, slow);
148 
149   // Get the map of the receiver.
150   __ mov(map, FieldOperand(receiver, HeapObject::kMapOffset));
151 
152   // Check bit field.
153   __ test_b(
154       FieldOperand(map, Map::kBitFieldOffset),
155       Immediate((1 << Map::kIsAccessCheckNeeded) | (1 << interceptor_bit)));
156   __ j(not_zero, slow);
157   // Check that the object is some kind of JS object EXCEPT JS Value type. In
158   // the case that the object is a value-wrapper object, we enter the runtime
159   // system to make sure that indexing into string objects works as intended.
160   DCHECK(JS_OBJECT_TYPE > JS_VALUE_TYPE);
161 
162   __ CmpInstanceType(map, JS_OBJECT_TYPE);
163   __ j(below, slow);
164 }
165 
166 
167 // Loads an indexed element from a fast case array.
GenerateFastArrayLoad(MacroAssembler * masm,Register receiver,Register key,Register scratch,Register scratch2,Register result,Label * slow)168 static void GenerateFastArrayLoad(MacroAssembler* masm, Register receiver,
169                                   Register key, Register scratch,
170                                   Register scratch2, Register result,
171                                   Label* slow) {
172   // Register use:
173   //   receiver - holds the receiver and is unchanged.
174   //   key - holds the key and is unchanged (must be a smi).
175   // Scratch registers:
176   //   scratch - used to hold elements of the receiver and the loaded value.
177   //   scratch2 - holds maps and prototypes during prototype chain check.
178   //   result - holds the result on exit if the load succeeds and
179   //            we fall through.
180   Label check_prototypes, check_next_prototype;
181   Label done, in_bounds, absent;
182 
183   __ mov(scratch, FieldOperand(receiver, JSObject::kElementsOffset));
184   __ AssertFastElements(scratch);
185 
186   // Check that the key (index) is within bounds.
187   __ cmp(key, FieldOperand(scratch, FixedArray::kLengthOffset));
188   __ j(below, &in_bounds);
189   // Out-of-bounds. Check the prototype chain to see if we can just return
190   // 'undefined'.
191   __ cmp(key, 0);
192   __ j(less, slow);  // Negative keys can't take the fast OOB path.
193   __ bind(&check_prototypes);
194   __ mov(scratch2, FieldOperand(receiver, HeapObject::kMapOffset));
195   __ bind(&check_next_prototype);
196   __ mov(scratch2, FieldOperand(scratch2, Map::kPrototypeOffset));
197   // scratch2: current prototype
198   __ cmp(scratch2, masm->isolate()->factory()->null_value());
199   __ j(equal, &absent);
200   __ mov(scratch, FieldOperand(scratch2, JSObject::kElementsOffset));
201   __ mov(scratch2, FieldOperand(scratch2, HeapObject::kMapOffset));
202   // scratch: elements of current prototype
203   // scratch2: map of current prototype
204   __ CmpInstanceType(scratch2, JS_OBJECT_TYPE);
205   __ j(below, slow);
206   __ test_b(FieldOperand(scratch2, Map::kBitFieldOffset),
207             Immediate((1 << Map::kIsAccessCheckNeeded) |
208                       (1 << Map::kHasIndexedInterceptor)));
209   __ j(not_zero, slow);
210   __ cmp(scratch, masm->isolate()->factory()->empty_fixed_array());
211   __ j(not_equal, slow);
212   __ jmp(&check_next_prototype);
213 
214   __ bind(&absent);
215   __ mov(result, masm->isolate()->factory()->undefined_value());
216   __ jmp(&done);
217 
218   __ bind(&in_bounds);
219   // Fast case: Do the load.
220   STATIC_ASSERT((kPointerSize == 4) && (kSmiTagSize == 1) && (kSmiTag == 0));
221   __ mov(scratch, FieldOperand(scratch, key, times_2, FixedArray::kHeaderSize));
222   __ cmp(scratch, Immediate(masm->isolate()->factory()->the_hole_value()));
223   // In case the loaded value is the_hole we have to check the prototype chain.
224   __ j(equal, &check_prototypes);
225   __ Move(result, scratch);
226   __ bind(&done);
227 }
228 
229 
230 // Checks whether a key is an array index string or a unique name.
231 // Falls through if the key is a unique name.
GenerateKeyNameCheck(MacroAssembler * masm,Register key,Register map,Register hash,Label * index_string,Label * not_unique)232 static void GenerateKeyNameCheck(MacroAssembler* masm, Register key,
233                                  Register map, Register hash,
234                                  Label* index_string, Label* not_unique) {
235   // Register use:
236   //   key - holds the key and is unchanged. Assumed to be non-smi.
237   // Scratch registers:
238   //   map - used to hold the map of the key.
239   //   hash - used to hold the hash of the key.
240   Label unique;
241   __ CmpObjectType(key, LAST_UNIQUE_NAME_TYPE, map);
242   __ j(above, not_unique);
243   STATIC_ASSERT(LAST_UNIQUE_NAME_TYPE == FIRST_NONSTRING_TYPE);
244   __ j(equal, &unique);
245 
246   // Is the string an array index, with cached numeric value?
247   __ mov(hash, FieldOperand(key, Name::kHashFieldOffset));
248   __ test(hash, Immediate(Name::kContainsCachedArrayIndexMask));
249   __ j(zero, index_string);
250 
251   // Is the string internalized? We already know it's a string so a single
252   // bit test is enough.
253   STATIC_ASSERT(kNotInternalizedTag != 0);
254   __ test_b(FieldOperand(map, Map::kInstanceTypeOffset),
255             Immediate(kIsNotInternalizedMask));
256   __ j(not_zero, not_unique);
257 
258   __ bind(&unique);
259 }
260 
GenerateMegamorphic(MacroAssembler * masm)261 void KeyedLoadIC::GenerateMegamorphic(MacroAssembler* masm) {
262   // The return address is on the stack.
263   Label slow, check_name, index_smi, index_name, property_array_property;
264   Label probe_dictionary, check_number_dictionary;
265 
266   Register receiver = LoadDescriptor::ReceiverRegister();
267   Register key = LoadDescriptor::NameRegister();
268   DCHECK(receiver.is(edx));
269   DCHECK(key.is(ecx));
270 
271   // Check that the key is a smi.
272   __ JumpIfNotSmi(key, &check_name);
273   __ bind(&index_smi);
274   // Now the key is known to be a smi. This place is also jumped to from
275   // where a numeric string is converted to a smi.
276 
277   GenerateKeyedLoadReceiverCheck(masm, receiver, eax,
278                                  Map::kHasIndexedInterceptor, &slow);
279 
280   // Check the receiver's map to see if it has fast elements.
281   __ CheckFastElements(eax, &check_number_dictionary);
282 
283   GenerateFastArrayLoad(masm, receiver, key, eax, ebx, eax, &slow);
284   Isolate* isolate = masm->isolate();
285   Counters* counters = isolate->counters();
286   __ IncrementCounter(counters->ic_keyed_load_generic_smi(), 1);
287   __ ret(0);
288 
289   __ bind(&check_number_dictionary);
290   __ mov(ebx, key);
291   __ SmiUntag(ebx);
292   __ mov(eax, FieldOperand(receiver, JSObject::kElementsOffset));
293 
294   // Check whether the elements is a number dictionary.
295   // ebx: untagged index
296   // eax: elements
297   __ CheckMap(eax, isolate->factory()->hash_table_map(), &slow,
298               DONT_DO_SMI_CHECK);
299   Label slow_pop_receiver;
300   // Push receiver on the stack to free up a register for the dictionary
301   // probing.
302   __ push(receiver);
303   __ LoadFromNumberDictionary(&slow_pop_receiver, eax, key, ebx, edx, edi, eax);
304   // Pop receiver before returning.
305   __ pop(receiver);
306   __ ret(0);
307 
308   __ bind(&slow_pop_receiver);
309   // Pop the receiver from the stack and jump to runtime.
310   __ pop(receiver);
311 
312   __ bind(&slow);
313   // Slow case: jump to runtime.
314   __ IncrementCounter(counters->ic_keyed_load_generic_slow(), 1);
315   GenerateRuntimeGetProperty(masm);
316 
317   __ bind(&check_name);
318   GenerateKeyNameCheck(masm, key, eax, ebx, &index_name, &slow);
319 
320   GenerateKeyedLoadReceiverCheck(masm, receiver, eax, Map::kHasNamedInterceptor,
321                                  &slow);
322 
323   // If the receiver is a fast-case object, check the stub cache. Otherwise
324   // probe the dictionary.
325   __ mov(ebx, FieldOperand(receiver, JSObject::kPropertiesOffset));
326   __ cmp(FieldOperand(ebx, HeapObject::kMapOffset),
327          Immediate(isolate->factory()->hash_table_map()));
328   __ j(equal, &probe_dictionary);
329 
330   // The handlers in the stub cache expect a vector and slot. Since we won't
331   // change the IC from any downstream misses, a dummy vector can be used.
332   Handle<TypeFeedbackVector> dummy_vector =
333       TypeFeedbackVector::DummyVector(isolate);
334   int slot = dummy_vector->GetIndex(
335       FeedbackVectorSlot(TypeFeedbackVector::kDummyKeyedLoadICSlot));
336   __ push(Immediate(Smi::FromInt(slot)));
337   __ push(Immediate(dummy_vector));
338 
339   Code::Flags flags =
340       Code::RemoveHolderFromFlags(Code::ComputeHandlerFlags(Code::LOAD_IC));
341   masm->isolate()->stub_cache()->GenerateProbe(masm, Code::KEYED_LOAD_IC, flags,
342                                                receiver, key, ebx, edi);
343 
344   __ pop(LoadWithVectorDescriptor::VectorRegister());
345   __ pop(LoadDescriptor::SlotRegister());
346 
347   // Cache miss.
348   GenerateMiss(masm);
349 
350   // Do a quick inline probe of the receiver's dictionary, if it
351   // exists.
352   __ bind(&probe_dictionary);
353 
354   __ mov(eax, FieldOperand(receiver, JSObject::kMapOffset));
355   __ movzx_b(eax, FieldOperand(eax, Map::kInstanceTypeOffset));
356   GenerateGlobalInstanceTypeCheck(masm, eax, &slow);
357 
358   GenerateDictionaryLoad(masm, &slow, ebx, key, eax, edi, eax);
359   __ IncrementCounter(counters->ic_keyed_load_generic_symbol(), 1);
360   __ ret(0);
361 
362   __ bind(&index_name);
363   __ IndexFromHash(ebx, key);
364   // Now jump to the place where smi keys are handled.
365   __ jmp(&index_smi);
366 }
367 
368 
KeyedStoreGenerateMegamorphicHelper(MacroAssembler * masm,Label * fast_object,Label * fast_double,Label * slow,KeyedStoreCheckMap check_map,KeyedStoreIncrementLength increment_length)369 static void KeyedStoreGenerateMegamorphicHelper(
370     MacroAssembler* masm, Label* fast_object, Label* fast_double, Label* slow,
371     KeyedStoreCheckMap check_map, KeyedStoreIncrementLength increment_length) {
372   Label transition_smi_elements;
373   Label finish_object_store, non_double_value, transition_double_elements;
374   Label fast_double_without_map_check;
375   Register receiver = StoreDescriptor::ReceiverRegister();
376   Register key = StoreDescriptor::NameRegister();
377   Register value = StoreDescriptor::ValueRegister();
378   DCHECK(receiver.is(edx));
379   DCHECK(key.is(ecx));
380   DCHECK(value.is(eax));
381   // key is a smi.
382   // ebx: FixedArray receiver->elements
383   // edi: receiver map
384   // Fast case: Do the store, could either Object or double.
385   __ bind(fast_object);
386   if (check_map == kCheckMap) {
387     __ mov(edi, FieldOperand(ebx, HeapObject::kMapOffset));
388     __ cmp(edi, masm->isolate()->factory()->fixed_array_map());
389     __ j(not_equal, fast_double);
390   }
391 
392   // HOLECHECK: guards "A[i] = V"
393   // We have to go to the runtime if the current value is the hole because
394   // there may be a callback on the element
395   Label holecheck_passed1;
396   __ cmp(FixedArrayElementOperand(ebx, key),
397          masm->isolate()->factory()->the_hole_value());
398   __ j(not_equal, &holecheck_passed1);
399   __ JumpIfDictionaryInPrototypeChain(receiver, ebx, edi, slow);
400   __ mov(ebx, FieldOperand(receiver, JSObject::kElementsOffset));
401 
402   __ bind(&holecheck_passed1);
403 
404   // Smi stores don't require further checks.
405   Label non_smi_value;
406   __ JumpIfNotSmi(value, &non_smi_value);
407   if (increment_length == kIncrementLength) {
408     // Add 1 to receiver->length.
409     __ add(FieldOperand(receiver, JSArray::kLengthOffset),
410            Immediate(Smi::FromInt(1)));
411   }
412   // It's irrelevant whether array is smi-only or not when writing a smi.
413   __ mov(FixedArrayElementOperand(ebx, key), value);
414   __ ret(0);
415 
416   __ bind(&non_smi_value);
417   // Escape to elements kind transition case.
418   __ mov(edi, FieldOperand(receiver, HeapObject::kMapOffset));
419   __ CheckFastObjectElements(edi, &transition_smi_elements);
420 
421   // Fast elements array, store the value to the elements backing store.
422   __ bind(&finish_object_store);
423   if (increment_length == kIncrementLength) {
424     // Add 1 to receiver->length.
425     __ add(FieldOperand(receiver, JSArray::kLengthOffset),
426            Immediate(Smi::FromInt(1)));
427   }
428   __ mov(FixedArrayElementOperand(ebx, key), value);
429   // Update write barrier for the elements array address.
430   __ mov(edx, value);  // Preserve the value which is returned.
431   __ RecordWriteArray(ebx, edx, key, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
432                       OMIT_SMI_CHECK);
433   __ ret(0);
434 
435   __ bind(fast_double);
436   if (check_map == kCheckMap) {
437     // Check for fast double array case. If this fails, call through to the
438     // runtime.
439     __ cmp(edi, masm->isolate()->factory()->fixed_double_array_map());
440     __ j(not_equal, slow);
441     // If the value is a number, store it as a double in the FastDoubleElements
442     // array.
443   }
444 
445   // HOLECHECK: guards "A[i] double hole?"
446   // We have to see if the double version of the hole is present. If so
447   // go to the runtime.
448   uint32_t offset = FixedDoubleArray::kHeaderSize + sizeof(kHoleNanLower32);
449   __ cmp(FieldOperand(ebx, key, times_4, offset), Immediate(kHoleNanUpper32));
450   __ j(not_equal, &fast_double_without_map_check);
451   __ JumpIfDictionaryInPrototypeChain(receiver, ebx, edi, slow);
452   __ mov(ebx, FieldOperand(receiver, JSObject::kElementsOffset));
453 
454   __ bind(&fast_double_without_map_check);
455   __ StoreNumberToDoubleElements(value, ebx, key, edi,
456                                  &transition_double_elements, false);
457   if (increment_length == kIncrementLength) {
458     // Add 1 to receiver->length.
459     __ add(FieldOperand(receiver, JSArray::kLengthOffset),
460            Immediate(Smi::FromInt(1)));
461   }
462   __ ret(0);
463 
464   __ bind(&transition_smi_elements);
465   __ mov(ebx, FieldOperand(receiver, HeapObject::kMapOffset));
466 
467   // Transition the array appropriately depending on the value type.
468   __ CheckMap(value, masm->isolate()->factory()->heap_number_map(),
469               &non_double_value, DONT_DO_SMI_CHECK);
470 
471   // Value is a double. Transition FAST_SMI_ELEMENTS -> FAST_DOUBLE_ELEMENTS
472   // and complete the store.
473   __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
474                                          FAST_DOUBLE_ELEMENTS, ebx, edi, slow);
475   AllocationSiteMode mode =
476       AllocationSite::GetMode(FAST_SMI_ELEMENTS, FAST_DOUBLE_ELEMENTS);
477   ElementsTransitionGenerator::GenerateSmiToDouble(masm, receiver, key, value,
478                                                    ebx, mode, slow);
479   __ mov(ebx, FieldOperand(receiver, JSObject::kElementsOffset));
480   __ jmp(&fast_double_without_map_check);
481 
482   __ bind(&non_double_value);
483   // Value is not a double, FAST_SMI_ELEMENTS -> FAST_ELEMENTS
484   __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS, FAST_ELEMENTS, ebx,
485                                          edi, slow);
486   mode = AllocationSite::GetMode(FAST_SMI_ELEMENTS, FAST_ELEMENTS);
487   ElementsTransitionGenerator::GenerateMapChangeElementsTransition(
488       masm, receiver, key, value, ebx, mode, slow);
489   __ mov(ebx, FieldOperand(receiver, JSObject::kElementsOffset));
490   __ jmp(&finish_object_store);
491 
492   __ bind(&transition_double_elements);
493   // Elements are FAST_DOUBLE_ELEMENTS, but value is an Object that's not a
494   // HeapNumber. Make sure that the receiver is a Array with FAST_ELEMENTS and
495   // transition array from FAST_DOUBLE_ELEMENTS to FAST_ELEMENTS
496   __ mov(ebx, FieldOperand(receiver, HeapObject::kMapOffset));
497   __ LoadTransitionedArrayMapConditional(FAST_DOUBLE_ELEMENTS, FAST_ELEMENTS,
498                                          ebx, edi, slow);
499   mode = AllocationSite::GetMode(FAST_DOUBLE_ELEMENTS, FAST_ELEMENTS);
500   ElementsTransitionGenerator::GenerateDoubleToObject(masm, receiver, key,
501                                                       value, ebx, mode, slow);
502   __ mov(ebx, FieldOperand(receiver, JSObject::kElementsOffset));
503   __ jmp(&finish_object_store);
504 }
505 
506 
GenerateMegamorphic(MacroAssembler * masm,LanguageMode language_mode)507 void KeyedStoreIC::GenerateMegamorphic(MacroAssembler* masm,
508                                        LanguageMode language_mode) {
509   // Return address is on the stack.
510   Label slow, fast_object, fast_object_grow;
511   Label fast_double, fast_double_grow;
512   Label array, extra, check_if_double_array, maybe_name_key, miss;
513   Register receiver = StoreDescriptor::ReceiverRegister();
514   Register key = StoreDescriptor::NameRegister();
515   DCHECK(receiver.is(edx));
516   DCHECK(key.is(ecx));
517 
518   // Check that the object isn't a smi.
519   __ JumpIfSmi(receiver, &slow);
520   // Get the map from the receiver.
521   __ mov(edi, FieldOperand(receiver, HeapObject::kMapOffset));
522   // Check that the receiver does not require access checks.
523   // The generic stub does not perform map checks.
524   __ test_b(FieldOperand(edi, Map::kBitFieldOffset),
525             Immediate(1 << Map::kIsAccessCheckNeeded));
526   __ j(not_zero, &slow);
527   // Check that the key is a smi.
528   __ JumpIfNotSmi(key, &maybe_name_key);
529   __ CmpInstanceType(edi, JS_ARRAY_TYPE);
530   __ j(equal, &array);
531   // Check that the object is some kind of JS object EXCEPT JS Value type. In
532   // the case that the object is a value-wrapper object, we enter the runtime
533   // system to make sure that indexing into string objects works as intended.
534   STATIC_ASSERT(JS_VALUE_TYPE < JS_OBJECT_TYPE);
535   __ CmpInstanceType(edi, JS_OBJECT_TYPE);
536   __ j(below, &slow);
537 
538   // Object case: Check key against length in the elements array.
539   // Key is a smi.
540   // edi: receiver map
541   __ mov(ebx, FieldOperand(receiver, JSObject::kElementsOffset));
542   // Check array bounds. Both the key and the length of FixedArray are smis.
543   __ cmp(key, FieldOperand(ebx, FixedArray::kLengthOffset));
544   __ j(below, &fast_object);
545 
546   // Slow case: call runtime.
547   __ bind(&slow);
548   PropertyICCompiler::GenerateRuntimeSetProperty(masm, language_mode);
549   // Never returns to here.
550 
551   __ bind(&maybe_name_key);
552   __ mov(ebx, FieldOperand(key, HeapObject::kMapOffset));
553   __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
554   __ JumpIfNotUniqueNameInstanceType(ebx, &slow);
555 
556 
557   // The handlers in the stub cache expect a vector and slot. Since we won't
558   // change the IC from any downstream misses, a dummy vector can be used.
559   Handle<TypeFeedbackVector> dummy_vector =
560       TypeFeedbackVector::DummyVector(masm->isolate());
561   int slot = dummy_vector->GetIndex(
562       FeedbackVectorSlot(TypeFeedbackVector::kDummyKeyedStoreICSlot));
563   __ push(Immediate(Smi::FromInt(slot)));
564   __ push(Immediate(dummy_vector));
565 
566   Code::Flags flags =
567       Code::RemoveHolderFromFlags(Code::ComputeHandlerFlags(Code::STORE_IC));
568   masm->isolate()->stub_cache()->GenerateProbe(
569       masm, Code::KEYED_STORE_IC, flags, receiver, key, edi, no_reg);
570 
571   __ pop(VectorStoreICDescriptor::VectorRegister());
572   __ pop(VectorStoreICDescriptor::SlotRegister());
573 
574   // Cache miss.
575   __ jmp(&miss);
576 
577   // Extra capacity case: Check if there is extra capacity to
578   // perform the store and update the length. Used for adding one
579   // element to the array by writing to array[array.length].
580   __ bind(&extra);
581   // receiver is a JSArray.
582   // key is a smi.
583   // ebx: receiver->elements, a FixedArray
584   // edi: receiver map
585   // flags: compare (key, receiver.length())
586   // do not leave holes in the array:
587   __ j(not_equal, &slow);
588   __ cmp(key, FieldOperand(ebx, FixedArray::kLengthOffset));
589   __ j(above_equal, &slow);
590   __ mov(edi, FieldOperand(ebx, HeapObject::kMapOffset));
591   __ cmp(edi, masm->isolate()->factory()->fixed_array_map());
592   __ j(not_equal, &check_if_double_array);
593   __ jmp(&fast_object_grow);
594 
595   __ bind(&check_if_double_array);
596   __ cmp(edi, masm->isolate()->factory()->fixed_double_array_map());
597   __ j(not_equal, &slow);
598   __ jmp(&fast_double_grow);
599 
600   // Array case: Get the length and the elements array from the JS
601   // array. Check that the array is in fast mode (and writable); if it
602   // is the length is always a smi.
603   __ bind(&array);
604   // receiver is a JSArray.
605   // key is a smi.
606   // edi: receiver map
607   __ mov(ebx, FieldOperand(receiver, JSObject::kElementsOffset));
608 
609   // Check the key against the length in the array and fall through to the
610   // common store code.
611   __ cmp(key, FieldOperand(receiver, JSArray::kLengthOffset));  // Compare smis.
612   __ j(above_equal, &extra);
613 
614   KeyedStoreGenerateMegamorphicHelper(masm, &fast_object, &fast_double, &slow,
615                                       kCheckMap, kDontIncrementLength);
616   KeyedStoreGenerateMegamorphicHelper(masm, &fast_object_grow,
617                                       &fast_double_grow, &slow, kDontCheckMap,
618                                       kIncrementLength);
619 
620   __ bind(&miss);
621   GenerateMiss(masm);
622 }
623 
GenerateNormal(MacroAssembler * masm)624 void LoadIC::GenerateNormal(MacroAssembler* masm) {
625   Register dictionary = eax;
626   DCHECK(!dictionary.is(LoadDescriptor::ReceiverRegister()));
627   DCHECK(!dictionary.is(LoadDescriptor::NameRegister()));
628 
629   Label slow;
630 
631   __ mov(dictionary, FieldOperand(LoadDescriptor::ReceiverRegister(),
632                                   JSObject::kPropertiesOffset));
633   GenerateDictionaryLoad(masm, &slow, dictionary,
634                          LoadDescriptor::NameRegister(), edi, ebx, eax);
635   __ ret(0);
636 
637   // Dictionary load failed, go slow (but don't miss).
638   __ bind(&slow);
639   GenerateRuntimeGetProperty(masm);
640 }
641 
642 
LoadIC_PushArgs(MacroAssembler * masm)643 static void LoadIC_PushArgs(MacroAssembler* masm) {
644   Register receiver = LoadDescriptor::ReceiverRegister();
645   Register name = LoadDescriptor::NameRegister();
646 
647   Register slot = LoadDescriptor::SlotRegister();
648   Register vector = LoadWithVectorDescriptor::VectorRegister();
649   DCHECK(!edi.is(receiver) && !edi.is(name) && !edi.is(slot) &&
650          !edi.is(vector));
651 
652   __ pop(edi);
653   __ push(receiver);
654   __ push(name);
655   __ push(slot);
656   __ push(vector);
657   __ push(edi);
658 }
659 
660 
GenerateMiss(MacroAssembler * masm)661 void LoadIC::GenerateMiss(MacroAssembler* masm) {
662   // Return address is on the stack.
663   __ IncrementCounter(masm->isolate()->counters()->ic_load_miss(), 1);
664   LoadIC_PushArgs(masm);
665 
666   // Perform tail call to the entry.
667   __ TailCallRuntime(Runtime::kLoadIC_Miss);
668 }
669 
GenerateRuntimeGetProperty(MacroAssembler * masm)670 void LoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) {
671   // Return address is on the stack.
672   Register receiver = LoadDescriptor::ReceiverRegister();
673   Register name = LoadDescriptor::NameRegister();
674   DCHECK(!ebx.is(receiver) && !ebx.is(name));
675 
676   __ pop(ebx);
677   __ push(receiver);
678   __ push(name);
679   __ push(ebx);
680 
681   // Do tail-call to runtime routine.
682   __ TailCallRuntime(Runtime::kGetProperty);
683 }
684 
685 
GenerateMiss(MacroAssembler * masm)686 void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) {
687   // Return address is on the stack.
688   __ IncrementCounter(masm->isolate()->counters()->ic_keyed_load_miss(), 1);
689 
690   LoadIC_PushArgs(masm);
691 
692   // Perform tail call to the entry.
693   __ TailCallRuntime(Runtime::kKeyedLoadIC_Miss);
694 }
695 
GenerateRuntimeGetProperty(MacroAssembler * masm)696 void KeyedLoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) {
697   // Return address is on the stack.
698   Register receiver = LoadDescriptor::ReceiverRegister();
699   Register name = LoadDescriptor::NameRegister();
700   DCHECK(!ebx.is(receiver) && !ebx.is(name));
701 
702   __ pop(ebx);
703   __ push(receiver);
704   __ push(name);
705   __ push(ebx);
706 
707   // Do tail-call to runtime routine.
708   __ TailCallRuntime(Runtime::kKeyedGetProperty);
709 }
710 
StoreIC_PushArgs(MacroAssembler * masm)711 static void StoreIC_PushArgs(MacroAssembler* masm) {
712   Register receiver = StoreDescriptor::ReceiverRegister();
713   Register name = StoreDescriptor::NameRegister();
714   Register value = StoreDescriptor::ValueRegister();
715   Register slot = VectorStoreICDescriptor::SlotRegister();
716   Register vector = VectorStoreICDescriptor::VectorRegister();
717 
718   __ xchg(receiver, Operand(esp, 0));
719   __ push(name);
720   __ push(value);
721   __ push(slot);
722   __ push(vector);
723   __ push(receiver);  // Contains the return address.
724 }
725 
726 
GenerateMiss(MacroAssembler * masm)727 void StoreIC::GenerateMiss(MacroAssembler* masm) {
728   // Return address is on the stack.
729   StoreIC_PushArgs(masm);
730 
731   // Perform tail call to the entry.
732   __ TailCallRuntime(Runtime::kStoreIC_Miss);
733 }
734 
735 
GenerateNormal(MacroAssembler * masm)736 void StoreIC::GenerateNormal(MacroAssembler* masm) {
737   Label restore_miss;
738   Register receiver = StoreDescriptor::ReceiverRegister();
739   Register name = StoreDescriptor::NameRegister();
740   Register value = StoreDescriptor::ValueRegister();
741   Register vector = VectorStoreICDescriptor::VectorRegister();
742   Register slot = VectorStoreICDescriptor::SlotRegister();
743 
744   // A lot of registers are needed for storing to slow case
745   // objects. Push and restore receiver but rely on
746   // GenerateDictionaryStore preserving the value and name.
747   __ push(receiver);
748   __ push(vector);
749   __ push(slot);
750 
751   Register dictionary = ebx;
752   __ mov(dictionary, FieldOperand(receiver, JSObject::kPropertiesOffset));
753   GenerateDictionaryStore(masm, &restore_miss, dictionary, name, value,
754                           receiver, edi);
755   __ Drop(3);
756   Counters* counters = masm->isolate()->counters();
757   __ IncrementCounter(counters->ic_store_normal_hit(), 1);
758   __ ret(0);
759 
760   __ bind(&restore_miss);
761   __ pop(slot);
762   __ pop(vector);
763   __ pop(receiver);
764   __ IncrementCounter(counters->ic_store_normal_miss(), 1);
765   GenerateMiss(masm);
766 }
767 
768 
GenerateMiss(MacroAssembler * masm)769 void KeyedStoreIC::GenerateMiss(MacroAssembler* masm) {
770   // Return address is on the stack.
771   StoreIC_PushArgs(masm);
772 
773   // Do tail-call to runtime routine.
774   __ TailCallRuntime(Runtime::kKeyedStoreIC_Miss);
775 }
776 
777 
778 #undef __
779 
780 
ComputeCondition(Token::Value op)781 Condition CompareIC::ComputeCondition(Token::Value op) {
782   switch (op) {
783     case Token::EQ_STRICT:
784     case Token::EQ:
785       return equal;
786     case Token::LT:
787       return less;
788     case Token::GT:
789       return greater;
790     case Token::LTE:
791       return less_equal;
792     case Token::GTE:
793       return greater_equal;
794     default:
795       UNREACHABLE();
796       return no_condition;
797   }
798 }
799 
800 
HasInlinedSmiCode(Address address)801 bool CompareIC::HasInlinedSmiCode(Address address) {
802   // The address of the instruction following the call.
803   Address test_instruction_address =
804       address + Assembler::kCallTargetAddressOffset;
805 
806   // If the instruction following the call is not a test al, nothing
807   // was inlined.
808   return *test_instruction_address == Assembler::kTestAlByte;
809 }
810 
811 
PatchInlinedSmiCode(Isolate * isolate,Address address,InlinedSmiCheck check)812 void PatchInlinedSmiCode(Isolate* isolate, Address address,
813                          InlinedSmiCheck check) {
814   // The address of the instruction following the call.
815   Address test_instruction_address =
816       address + Assembler::kCallTargetAddressOffset;
817 
818   // If the instruction following the call is not a test al, nothing
819   // was inlined.
820   if (*test_instruction_address != Assembler::kTestAlByte) {
821     DCHECK(*test_instruction_address == Assembler::kNopByte);
822     return;
823   }
824 
825   Address delta_address = test_instruction_address + 1;
826   // The delta to the start of the map check instruction and the
827   // condition code uses at the patched jump.
828   uint8_t delta = *reinterpret_cast<uint8_t*>(delta_address);
829   if (FLAG_trace_ic) {
830     PrintF("[  patching ic at %p, test=%p, delta=%d\n",
831            static_cast<void*>(address),
832            static_cast<void*>(test_instruction_address), delta);
833   }
834 
835   // Patch with a short conditional jump. Enabling means switching from a short
836   // jump-if-carry/not-carry to jump-if-zero/not-zero, whereas disabling is the
837   // reverse operation of that.
838   Address jmp_address = test_instruction_address - delta;
839   DCHECK((check == ENABLE_INLINED_SMI_CHECK)
840              ? (*jmp_address == Assembler::kJncShortOpcode ||
841                 *jmp_address == Assembler::kJcShortOpcode)
842              : (*jmp_address == Assembler::kJnzShortOpcode ||
843                 *jmp_address == Assembler::kJzShortOpcode));
844   Condition cc =
845       (check == ENABLE_INLINED_SMI_CHECK)
846           ? (*jmp_address == Assembler::kJncShortOpcode ? not_zero : zero)
847           : (*jmp_address == Assembler::kJnzShortOpcode ? not_carry : carry);
848   *jmp_address = static_cast<byte>(Assembler::kJccShortPrefix | cc);
849 }
850 }  // namespace internal
851 }  // namespace v8
852 
853 #endif  // V8_TARGET_ARCH_X87
854