• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 //     * Redistributions of source code must retain the above copyright
7 //       notice, this list of conditions and the following disclaimer.
8 //     * Redistributions in binary form must reproduce the above
9 //       copyright notice, this list of conditions and the following
10 //       disclaimer in the documentation and/or other materials provided
11 //       with the distribution.
12 //     * Neither the name of Google Inc. nor the names of its
13 //       contributors may be used to endorse or promote products derived
14 //       from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #if V8_TARGET_ARCH_ARM
31 
32 #include "assembler-arm.h"
33 #include "code-stubs.h"
34 #include "codegen.h"
35 #include "disasm.h"
36 #include "ic-inl.h"
37 #include "runtime.h"
38 #include "stub-cache.h"
39 
40 namespace v8 {
41 namespace internal {
42 
43 
44 // ----------------------------------------------------------------------------
45 // Static IC stub generators.
46 //
47 
48 #define __ ACCESS_MASM(masm)
49 
50 
GenerateGlobalInstanceTypeCheck(MacroAssembler * masm,Register type,Label * global_object)51 static void GenerateGlobalInstanceTypeCheck(MacroAssembler* masm,
52                                             Register type,
53                                             Label* global_object) {
54   // Register usage:
55   //   type: holds the receiver instance type on entry.
56   __ cmp(type, Operand(JS_GLOBAL_OBJECT_TYPE));
57   __ b(eq, global_object);
58   __ cmp(type, Operand(JS_BUILTINS_OBJECT_TYPE));
59   __ b(eq, global_object);
60   __ cmp(type, Operand(JS_GLOBAL_PROXY_TYPE));
61   __ b(eq, global_object);
62 }
63 
64 
65 // Generated code falls through if the receiver is a regular non-global
66 // JS object with slow properties and no interceptors.
GenerateNameDictionaryReceiverCheck(MacroAssembler * masm,Register receiver,Register elements,Register t0,Register t1,Label * miss)67 static void GenerateNameDictionaryReceiverCheck(MacroAssembler* masm,
68                                                 Register receiver,
69                                                 Register elements,
70                                                 Register t0,
71                                                 Register t1,
72                                                 Label* miss) {
73   // Register usage:
74   //   receiver: holds the receiver on entry and is unchanged.
75   //   elements: holds the property dictionary on fall through.
76   // Scratch registers:
77   //   t0: used to holds the receiver map.
78   //   t1: used to holds the receiver instance type, receiver bit mask and
79   //       elements map.
80 
81   // Check that the receiver isn't a smi.
82   __ JumpIfSmi(receiver, miss);
83 
84   // Check that the receiver is a valid JS object.
85   __ CompareObjectType(receiver, t0, t1, FIRST_SPEC_OBJECT_TYPE);
86   __ b(lt, miss);
87 
88   // If this assert fails, we have to check upper bound too.
89   STATIC_ASSERT(LAST_TYPE == LAST_SPEC_OBJECT_TYPE);
90 
91   GenerateGlobalInstanceTypeCheck(masm, t1, miss);
92 
93   // Check that the global object does not require access checks.
94   __ ldrb(t1, FieldMemOperand(t0, Map::kBitFieldOffset));
95   __ tst(t1, Operand((1 << Map::kIsAccessCheckNeeded) |
96                      (1 << Map::kHasNamedInterceptor)));
97   __ b(ne, miss);
98 
99   __ ldr(elements, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
100   __ ldr(t1, FieldMemOperand(elements, HeapObject::kMapOffset));
101   __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
102   __ cmp(t1, ip);
103   __ b(ne, miss);
104 }
105 
106 
107 // Helper function used from LoadIC/CallIC GenerateNormal.
108 //
109 // elements: Property dictionary. It is not clobbered if a jump to the miss
110 //           label is done.
111 // name:     Property name. It is not clobbered if a jump to the miss label is
112 //           done
113 // result:   Register for the result. It is only updated if a jump to the miss
114 //           label is not done. Can be the same as elements or name clobbering
115 //           one of these in the case of not jumping to the miss label.
116 // The two scratch registers need to be different from elements, name and
117 // result.
118 // The generated code assumes that the receiver has slow properties,
119 // is not a global object and does not have interceptors.
GenerateDictionaryLoad(MacroAssembler * masm,Label * miss,Register elements,Register name,Register result,Register scratch1,Register scratch2)120 static void GenerateDictionaryLoad(MacroAssembler* masm,
121                                    Label* miss,
122                                    Register elements,
123                                    Register name,
124                                    Register result,
125                                    Register scratch1,
126                                    Register scratch2) {
127   // Main use of the scratch registers.
128   // scratch1: Used as temporary and to hold the capacity of the property
129   //           dictionary.
130   // scratch2: Used as temporary.
131   Label done;
132 
133   // Probe the dictionary.
134   NameDictionaryLookupStub::GeneratePositiveLookup(masm,
135                                                    miss,
136                                                    &done,
137                                                    elements,
138                                                    name,
139                                                    scratch1,
140                                                    scratch2);
141 
142   // If probing finds an entry check that the value is a normal
143   // property.
144   __ bind(&done);  // scratch2 == elements + 4 * index
145   const int kElementsStartOffset = NameDictionary::kHeaderSize +
146       NameDictionary::kElementsStartIndex * kPointerSize;
147   const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize;
148   __ ldr(scratch1, FieldMemOperand(scratch2, kDetailsOffset));
149   __ tst(scratch1, Operand(PropertyDetails::TypeField::kMask << kSmiTagSize));
150   __ b(ne, miss);
151 
152   // Get the value at the masked, scaled index and return.
153   __ ldr(result,
154          FieldMemOperand(scratch2, kElementsStartOffset + 1 * kPointerSize));
155 }
156 
157 
158 // Helper function used from StoreIC::GenerateNormal.
159 //
160 // elements: Property dictionary. It is not clobbered if a jump to the miss
161 //           label is done.
162 // name:     Property name. It is not clobbered if a jump to the miss label is
163 //           done
164 // value:    The value to store.
165 // The two scratch registers need to be different from elements, name and
166 // result.
167 // The generated code assumes that the receiver has slow properties,
168 // is not a global object and does not have interceptors.
GenerateDictionaryStore(MacroAssembler * masm,Label * miss,Register elements,Register name,Register value,Register scratch1,Register scratch2)169 static void GenerateDictionaryStore(MacroAssembler* masm,
170                                     Label* miss,
171                                     Register elements,
172                                     Register name,
173                                     Register value,
174                                     Register scratch1,
175                                     Register scratch2) {
176   // Main use of the scratch registers.
177   // scratch1: Used as temporary and to hold the capacity of the property
178   //           dictionary.
179   // scratch2: Used as temporary.
180   Label done;
181 
182   // Probe the dictionary.
183   NameDictionaryLookupStub::GeneratePositiveLookup(masm,
184                                                    miss,
185                                                    &done,
186                                                    elements,
187                                                    name,
188                                                    scratch1,
189                                                    scratch2);
190 
191   // If probing finds an entry in the dictionary check that the value
192   // is a normal property that is not read only.
193   __ bind(&done);  // scratch2 == elements + 4 * index
194   const int kElementsStartOffset = NameDictionary::kHeaderSize +
195       NameDictionary::kElementsStartIndex * kPointerSize;
196   const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize;
197   const int kTypeAndReadOnlyMask =
198       (PropertyDetails::TypeField::kMask |
199        PropertyDetails::AttributesField::encode(READ_ONLY)) << kSmiTagSize;
200   __ ldr(scratch1, FieldMemOperand(scratch2, kDetailsOffset));
201   __ tst(scratch1, Operand(kTypeAndReadOnlyMask));
202   __ b(ne, miss);
203 
204   // Store the value at the masked, scaled index and return.
205   const int kValueOffset = kElementsStartOffset + kPointerSize;
206   __ add(scratch2, scratch2, Operand(kValueOffset - kHeapObjectTag));
207   __ str(value, MemOperand(scratch2));
208 
209   // Update the write barrier. Make sure not to clobber the value.
210   __ mov(scratch1, value);
211   __ RecordWrite(
212       elements, scratch2, scratch1, kLRHasNotBeenSaved, kDontSaveFPRegs);
213 }
214 
215 
216 // Checks the receiver for special cases (value type, slow case bits).
217 // Falls through for regular JS object.
GenerateKeyedLoadReceiverCheck(MacroAssembler * masm,Register receiver,Register map,Register scratch,int interceptor_bit,Label * slow)218 static void GenerateKeyedLoadReceiverCheck(MacroAssembler* masm,
219                                            Register receiver,
220                                            Register map,
221                                            Register scratch,
222                                            int interceptor_bit,
223                                            Label* slow) {
224   // Check that the object isn't a smi.
225   __ JumpIfSmi(receiver, slow);
226   // Get the map of the receiver.
227   __ ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
228   // Check bit field.
229   __ ldrb(scratch, FieldMemOperand(map, Map::kBitFieldOffset));
230   __ tst(scratch,
231          Operand((1 << Map::kIsAccessCheckNeeded) | (1 << interceptor_bit)));
232   __ b(ne, slow);
233   // Check that the object is some kind of JS object EXCEPT JS Value type.
234   // In the case that the object is a value-wrapper object,
235   // we enter the runtime system to make sure that indexing into string
236   // objects work as intended.
237   ASSERT(JS_OBJECT_TYPE > JS_VALUE_TYPE);
238   __ ldrb(scratch, FieldMemOperand(map, Map::kInstanceTypeOffset));
239   __ cmp(scratch, Operand(JS_OBJECT_TYPE));
240   __ b(lt, slow);
241 }
242 
243 
244 // Loads an indexed element from a fast case array.
245 // If not_fast_array is NULL, doesn't perform the elements map check.
GenerateFastArrayLoad(MacroAssembler * masm,Register receiver,Register key,Register elements,Register scratch1,Register scratch2,Register result,Label * not_fast_array,Label * out_of_range)246 static void GenerateFastArrayLoad(MacroAssembler* masm,
247                                   Register receiver,
248                                   Register key,
249                                   Register elements,
250                                   Register scratch1,
251                                   Register scratch2,
252                                   Register result,
253                                   Label* not_fast_array,
254                                   Label* out_of_range) {
255   // Register use:
256   //
257   // receiver - holds the receiver on entry.
258   //            Unchanged unless 'result' is the same register.
259   //
260   // key      - holds the smi key on entry.
261   //            Unchanged unless 'result' is the same register.
262   //
263   // elements - holds the elements of the receiver on exit.
264   //
265   // result   - holds the result on exit if the load succeeded.
266   //            Allowed to be the the same as 'receiver' or 'key'.
267   //            Unchanged on bailout so 'receiver' and 'key' can be safely
268   //            used by further computation.
269   //
270   // Scratch registers:
271   //
272   // scratch1 - used to hold elements map and elements length.
273   //            Holds the elements map if not_fast_array branch is taken.
274   //
275   // scratch2 - used to hold the loaded value.
276 
277   __ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
278   if (not_fast_array != NULL) {
279     // Check that the object is in fast mode and writable.
280     __ ldr(scratch1, FieldMemOperand(elements, HeapObject::kMapOffset));
281     __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
282     __ cmp(scratch1, ip);
283     __ b(ne, not_fast_array);
284   } else {
285     __ AssertFastElements(elements);
286   }
287   // Check that the key (index) is within bounds.
288   __ ldr(scratch1, FieldMemOperand(elements, FixedArray::kLengthOffset));
289   __ cmp(key, Operand(scratch1));
290   __ b(hs, out_of_range);
291   // Fast case: Do the load.
292   __ add(scratch1, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
293   __ ldr(scratch2, MemOperand::PointerAddressFromSmiKey(scratch1, key));
294   __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
295   __ cmp(scratch2, ip);
296   // In case the loaded value is the_hole we have to consult GetProperty
297   // to ensure the prototype chain is searched.
298   __ b(eq, out_of_range);
299   __ mov(result, scratch2);
300 }
301 
302 
303 // Checks whether a key is an array index string or a unique name.
304 // Falls through if a key is a unique name.
GenerateKeyNameCheck(MacroAssembler * masm,Register key,Register map,Register hash,Label * index_string,Label * not_unique)305 static void GenerateKeyNameCheck(MacroAssembler* masm,
306                                  Register key,
307                                  Register map,
308                                  Register hash,
309                                  Label* index_string,
310                                  Label* not_unique) {
311   // The key is not a smi.
312   Label unique;
313   // Is it a name?
314   __ CompareObjectType(key, map, hash, LAST_UNIQUE_NAME_TYPE);
315   __ b(hi, not_unique);
316   STATIC_ASSERT(LAST_UNIQUE_NAME_TYPE == FIRST_NONSTRING_TYPE);
317   __ b(eq, &unique);
318 
319   // Is the string an array index, with cached numeric value?
320   __ ldr(hash, FieldMemOperand(key, Name::kHashFieldOffset));
321   __ tst(hash, Operand(Name::kContainsCachedArrayIndexMask));
322   __ b(eq, index_string);
323 
324   // Is the string internalized? We know it's a string, so a single
325   // bit test is enough.
326   // map: key map
327   __ ldrb(hash, FieldMemOperand(map, Map::kInstanceTypeOffset));
328   STATIC_ASSERT(kInternalizedTag == 0);
329   __ tst(hash, Operand(kIsNotInternalizedMask));
330   __ b(ne, not_unique);
331 
332   __ bind(&unique);
333 }
334 
335 
336 // Defined in ic.cc.
337 Object* CallIC_Miss(Arguments args);
338 
339 // The generated code does not accept smi keys.
340 // The generated code falls through if both probes miss.
GenerateMonomorphicCacheProbe(MacroAssembler * masm,int argc,Code::Kind kind,ExtraICState extra_state)341 void CallICBase::GenerateMonomorphicCacheProbe(MacroAssembler* masm,
342                                                int argc,
343                                                Code::Kind kind,
344                                                ExtraICState extra_state) {
345   // ----------- S t a t e -------------
346   //  -- r1    : receiver
347   //  -- r2    : name
348   // -----------------------------------
349   Label number, non_number, non_string, boolean, probe, miss;
350 
351   // Probe the stub cache.
352   Code::Flags flags = Code::ComputeFlags(kind,
353                                          MONOMORPHIC,
354                                          extra_state,
355                                          Code::NORMAL,
356                                          argc);
357   masm->isolate()->stub_cache()->GenerateProbe(
358       masm, flags, r1, r2, r3, r4, r5, r6);
359 
360   // If the stub cache probing failed, the receiver might be a value.
361   // For value objects, we use the map of the prototype objects for
362   // the corresponding JSValue for the cache and that is what we need
363   // to probe.
364   //
365   // Check for number.
366   __ JumpIfSmi(r1, &number);
367   __ CompareObjectType(r1, r3, r3, HEAP_NUMBER_TYPE);
368   __ b(ne, &non_number);
369   __ bind(&number);
370   StubCompiler::GenerateLoadGlobalFunctionPrototype(
371       masm, Context::NUMBER_FUNCTION_INDEX, r1);
372   __ b(&probe);
373 
374   // Check for string.
375   __ bind(&non_number);
376   __ cmp(r3, Operand(FIRST_NONSTRING_TYPE));
377   __ b(hs, &non_string);
378   StubCompiler::GenerateLoadGlobalFunctionPrototype(
379       masm, Context::STRING_FUNCTION_INDEX, r1);
380   __ b(&probe);
381 
382   // Check for boolean.
383   __ bind(&non_string);
384   __ LoadRoot(ip, Heap::kTrueValueRootIndex);
385   __ cmp(r1, ip);
386   __ b(eq, &boolean);
387   __ LoadRoot(ip, Heap::kFalseValueRootIndex);
388   __ cmp(r1, ip);
389   __ b(ne, &miss);
390   __ bind(&boolean);
391   StubCompiler::GenerateLoadGlobalFunctionPrototype(
392       masm, Context::BOOLEAN_FUNCTION_INDEX, r1);
393 
394   // Probe the stub cache for the value object.
395   __ bind(&probe);
396   masm->isolate()->stub_cache()->GenerateProbe(
397       masm, flags, r1, r2, r3, r4, r5, r6);
398 
399   __ bind(&miss);
400 }
401 
402 
GenerateFunctionTailCall(MacroAssembler * masm,int argc,Label * miss,Register scratch)403 static void GenerateFunctionTailCall(MacroAssembler* masm,
404                                      int argc,
405                                      Label* miss,
406                                      Register scratch) {
407   // r1: function
408 
409   // Check that the value isn't a smi.
410   __ JumpIfSmi(r1, miss);
411 
412   // Check that the value is a JSFunction.
413   __ CompareObjectType(r1, scratch, scratch, JS_FUNCTION_TYPE);
414   __ b(ne, miss);
415 
416   // Invoke the function.
417   ParameterCount actual(argc);
418   __ InvokeFunction(r1, actual, JUMP_FUNCTION,
419                     NullCallWrapper(), CALL_AS_METHOD);
420 }
421 
422 
GenerateNormal(MacroAssembler * masm,int argc)423 void CallICBase::GenerateNormal(MacroAssembler* masm, int argc) {
424   // ----------- S t a t e -------------
425   //  -- r2    : name
426   //  -- lr    : return address
427   // -----------------------------------
428   Label miss;
429 
430   // Get the receiver of the function from the stack into r1.
431   __ ldr(r1, MemOperand(sp, argc * kPointerSize));
432 
433   GenerateNameDictionaryReceiverCheck(masm, r1, r0, r3, r4, &miss);
434 
435   // r0: elements
436   // Search the dictionary - put result in register r1.
437   GenerateDictionaryLoad(masm, &miss, r0, r2, r1, r3, r4);
438 
439   GenerateFunctionTailCall(masm, argc, &miss, r4);
440 
441   __ bind(&miss);
442 }
443 
444 
GenerateMiss(MacroAssembler * masm,int argc,IC::UtilityId id,ExtraICState extra_state)445 void CallICBase::GenerateMiss(MacroAssembler* masm,
446                               int argc,
447                               IC::UtilityId id,
448                               ExtraICState extra_state) {
449   // ----------- S t a t e -------------
450   //  -- r2    : name
451   //  -- lr    : return address
452   // -----------------------------------
453   Isolate* isolate = masm->isolate();
454 
455   if (id == IC::kCallIC_Miss) {
456     __ IncrementCounter(isolate->counters()->call_miss(), 1, r3, r4);
457   } else {
458     __ IncrementCounter(isolate->counters()->keyed_call_miss(), 1, r3, r4);
459   }
460 
461   // Get the receiver of the function from the stack.
462   __ ldr(r3, MemOperand(sp, argc * kPointerSize));
463 
464   {
465     FrameScope scope(masm, StackFrame::INTERNAL);
466 
467     // Push the receiver and the name of the function.
468     __ Push(r3, r2);
469 
470     // Call the entry.
471     __ mov(r0, Operand(2));
472     __ mov(r1, Operand(ExternalReference(IC_Utility(id), isolate)));
473 
474     CEntryStub stub(1);
475     __ CallStub(&stub);
476 
477     // Move result to r1 and leave the internal frame.
478     __ mov(r1, Operand(r0));
479   }
480 
481   // Check if the receiver is a global object of some sort.
482   // This can happen only for regular CallIC but not KeyedCallIC.
483   if (id == IC::kCallIC_Miss) {
484     Label invoke, global;
485     __ ldr(r2, MemOperand(sp, argc * kPointerSize));  // receiver
486     __ JumpIfSmi(r2, &invoke);
487     __ CompareObjectType(r2, r3, r3, JS_GLOBAL_OBJECT_TYPE);
488     __ b(eq, &global);
489     __ cmp(r3, Operand(JS_BUILTINS_OBJECT_TYPE));
490     __ b(ne, &invoke);
491 
492     // Patch the receiver on the stack.
493     __ bind(&global);
494     __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalReceiverOffset));
495     __ str(r2, MemOperand(sp, argc * kPointerSize));
496     __ bind(&invoke);
497   }
498 
499   // Invoke the function.
500   CallKind call_kind = CallICBase::Contextual::decode(extra_state)
501       ? CALL_AS_FUNCTION
502       : CALL_AS_METHOD;
503   ParameterCount actual(argc);
504   __ InvokeFunction(r1,
505                     actual,
506                     JUMP_FUNCTION,
507                     NullCallWrapper(),
508                     call_kind);
509 }
510 
511 
GenerateMegamorphic(MacroAssembler * masm,int argc,ExtraICState extra_ic_state)512 void CallIC::GenerateMegamorphic(MacroAssembler* masm,
513                                  int argc,
514                                  ExtraICState extra_ic_state) {
515   // ----------- S t a t e -------------
516   //  -- r2    : name
517   //  -- lr    : return address
518   // -----------------------------------
519 
520   // Get the receiver of the function from the stack into r1.
521   __ ldr(r1, MemOperand(sp, argc * kPointerSize));
522   GenerateMonomorphicCacheProbe(masm, argc, Code::CALL_IC, extra_ic_state);
523   GenerateMiss(masm, argc, extra_ic_state);
524 }
525 
526 
GenerateMegamorphic(MacroAssembler * masm,int argc)527 void KeyedCallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) {
528   // ----------- S t a t e -------------
529   //  -- r2    : name
530   //  -- lr    : return address
531   // -----------------------------------
532 
533   // Get the receiver of the function from the stack into r1.
534   __ ldr(r1, MemOperand(sp, argc * kPointerSize));
535 
536   Label do_call, slow_call, slow_load, slow_reload_receiver;
537   Label check_number_dictionary, check_name, lookup_monomorphic_cache;
538   Label index_smi, index_name;
539 
540   // Check that the key is a smi.
541   __ JumpIfNotSmi(r2, &check_name);
542   __ bind(&index_smi);
543   // Now the key is known to be a smi. This place is also jumped to from below
544   // where a numeric string is converted to a smi.
545 
546   GenerateKeyedLoadReceiverCheck(
547       masm, r1, r0, r3, Map::kHasIndexedInterceptor, &slow_call);
548 
549   GenerateFastArrayLoad(
550       masm, r1, r2, r4, r3, r0, r1, &check_number_dictionary, &slow_load);
551   Counters* counters = masm->isolate()->counters();
552   __ IncrementCounter(counters->keyed_call_generic_smi_fast(), 1, r0, r3);
553 
554   __ bind(&do_call);
555   // receiver in r1 is not used after this point.
556   // r2: key
557   // r1: function
558   GenerateFunctionTailCall(masm, argc, &slow_call, r0);
559 
560   __ bind(&check_number_dictionary);
561   // r2: key
562   // r3: elements map
563   // r4: elements
564   // Check whether the elements is a number dictionary.
565   __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
566   __ cmp(r3, ip);
567   __ b(ne, &slow_load);
568   __ SmiUntag(r0, r2);
569   // r0: untagged index
570   __ LoadFromNumberDictionary(&slow_load, r4, r2, r1, r0, r3, r5);
571   __ IncrementCounter(counters->keyed_call_generic_smi_dict(), 1, r0, r3);
572   __ jmp(&do_call);
573 
574   __ bind(&slow_load);
575   // This branch is taken when calling KeyedCallIC_Miss is neither required
576   // nor beneficial.
577   __ IncrementCounter(counters->keyed_call_generic_slow_load(), 1, r0, r3);
578   {
579     FrameScope scope(masm, StackFrame::INTERNAL);
580     __ Push(r2, r1);  // save the key and the receiver
581     __ push(r2);  // pass the receiver and the key
582     __ CallRuntime(Runtime::kKeyedGetProperty, 2);
583     __ pop(r2);  // restore the key
584   }
585   __ mov(r1, r0);
586   __ jmp(&do_call);
587 
588   __ bind(&check_name);
589   GenerateKeyNameCheck(masm, r2, r0, r3, &index_name, &slow_call);
590 
591   // The key is known to be a unique name.
592   // If the receiver is a regular JS object with slow properties then do
593   // a quick inline probe of the receiver's dictionary.
594   // Otherwise do the monomorphic cache probe.
595   GenerateKeyedLoadReceiverCheck(
596       masm, r1, r0, r3, Map::kHasNamedInterceptor, &lookup_monomorphic_cache);
597 
598   __ ldr(r0, FieldMemOperand(r1, JSObject::kPropertiesOffset));
599   __ ldr(r3, FieldMemOperand(r0, HeapObject::kMapOffset));
600   __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
601   __ cmp(r3, ip);
602   __ b(ne, &lookup_monomorphic_cache);
603 
604   GenerateDictionaryLoad(masm, &slow_load, r0, r2, r1, r3, r4);
605   __ IncrementCounter(counters->keyed_call_generic_lookup_dict(), 1, r0, r3);
606   __ jmp(&do_call);
607 
608   __ bind(&lookup_monomorphic_cache);
609   __ IncrementCounter(counters->keyed_call_generic_lookup_cache(), 1, r0, r3);
610   GenerateMonomorphicCacheProbe(masm,
611                                 argc,
612                                 Code::KEYED_CALL_IC,
613                                 kNoExtraICState);
614   // Fall through on miss.
615 
616   __ bind(&slow_call);
617   // This branch is taken if:
618   // - the receiver requires boxing or access check,
619   // - the key is neither smi nor a unique name,
620   // - the value loaded is not a function,
621   // - there is hope that the runtime will create a monomorphic call stub
622   //   that will get fetched next time.
623   __ IncrementCounter(counters->keyed_call_generic_slow(), 1, r0, r3);
624   GenerateMiss(masm, argc);
625 
626   __ bind(&index_name);
627   __ IndexFromHash(r3, r2);
628   // Now jump to the place where smi keys are handled.
629   __ jmp(&index_smi);
630 }
631 
632 
GenerateNormal(MacroAssembler * masm,int argc)633 void KeyedCallIC::GenerateNormal(MacroAssembler* masm, int argc) {
634   // ----------- S t a t e -------------
635   //  -- r2    : name
636   //  -- lr    : return address
637   // -----------------------------------
638 
639   // Check if the name is really a name.
640   Label miss;
641   __ JumpIfSmi(r2, &miss);
642   __ IsObjectNameType(r2, r0, &miss);
643 
644   CallICBase::GenerateNormal(masm, argc);
645   __ bind(&miss);
646   GenerateMiss(masm, argc);
647 }
648 
649 
GenerateMegamorphic(MacroAssembler * masm)650 void LoadIC::GenerateMegamorphic(MacroAssembler* masm) {
651   // ----------- S t a t e -------------
652   //  -- r2    : name
653   //  -- lr    : return address
654   //  -- r0    : receiver
655   // -----------------------------------
656 
657   // Probe the stub cache.
658   Code::Flags flags = Code::ComputeFlags(
659       Code::HANDLER, MONOMORPHIC, kNoExtraICState,
660       Code::NORMAL, Code::LOAD_IC);
661   masm->isolate()->stub_cache()->GenerateProbe(
662       masm, flags, r0, r2, r3, r4, r5, r6);
663 
664   // Cache miss: Jump to runtime.
665   GenerateMiss(masm);
666 }
667 
668 
GenerateNormal(MacroAssembler * masm)669 void LoadIC::GenerateNormal(MacroAssembler* masm) {
670   // ----------- S t a t e -------------
671   //  -- r2    : name
672   //  -- lr    : return address
673   //  -- r0    : receiver
674   // -----------------------------------
675   Label miss;
676 
677   GenerateNameDictionaryReceiverCheck(masm, r0, r1, r3, r4, &miss);
678 
679   // r1: elements
680   GenerateDictionaryLoad(masm, &miss, r1, r2, r0, r3, r4);
681   __ Ret();
682 
683   // Cache miss: Jump to runtime.
684   __ bind(&miss);
685   GenerateMiss(masm);
686 }
687 
688 
GenerateMiss(MacroAssembler * masm)689 void LoadIC::GenerateMiss(MacroAssembler* masm) {
690   // ----------- S t a t e -------------
691   //  -- r2    : name
692   //  -- lr    : return address
693   //  -- r0    : receiver
694   // -----------------------------------
695   Isolate* isolate = masm->isolate();
696 
697   __ IncrementCounter(isolate->counters()->load_miss(), 1, r3, r4);
698 
699   __ mov(r3, r0);
700   __ Push(r3, r2);
701 
702   // Perform tail call to the entry.
703   ExternalReference ref =
704       ExternalReference(IC_Utility(kLoadIC_Miss), isolate);
705   __ TailCallExternalReference(ref, 2, 1);
706 }
707 
708 
GenerateRuntimeGetProperty(MacroAssembler * masm)709 void LoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) {
710   // ---------- S t a t e --------------
711   //  -- r2    : name
712   //  -- lr    : return address
713   //  -- r0    : receiver
714   // -----------------------------------
715 
716   __ mov(r3, r0);
717   __ Push(r3, r2);
718 
719   __ TailCallRuntime(Runtime::kGetProperty, 2, 1);
720 }
721 
722 
GenerateMappedArgumentsLookup(MacroAssembler * masm,Register object,Register key,Register scratch1,Register scratch2,Register scratch3,Label * unmapped_case,Label * slow_case)723 static MemOperand GenerateMappedArgumentsLookup(MacroAssembler* masm,
724                                                 Register object,
725                                                 Register key,
726                                                 Register scratch1,
727                                                 Register scratch2,
728                                                 Register scratch3,
729                                                 Label* unmapped_case,
730                                                 Label* slow_case) {
731   Heap* heap = masm->isolate()->heap();
732 
733   // Check that the receiver is a JSObject. Because of the map check
734   // later, we do not need to check for interceptors or whether it
735   // requires access checks.
736   __ JumpIfSmi(object, slow_case);
737   // Check that the object is some kind of JSObject.
738   __ CompareObjectType(object, scratch1, scratch2, FIRST_JS_RECEIVER_TYPE);
739   __ b(lt, slow_case);
740 
741   // Check that the key is a positive smi.
742   __ tst(key, Operand(0x80000001));
743   __ b(ne, slow_case);
744 
745   // Load the elements into scratch1 and check its map.
746   Handle<Map> arguments_map(heap->non_strict_arguments_elements_map());
747   __ ldr(scratch1, FieldMemOperand(object, JSObject::kElementsOffset));
748   __ CheckMap(scratch1, scratch2, arguments_map, slow_case, DONT_DO_SMI_CHECK);
749 
750   // Check if element is in the range of mapped arguments. If not, jump
751   // to the unmapped lookup with the parameter map in scratch1.
752   __ ldr(scratch2, FieldMemOperand(scratch1, FixedArray::kLengthOffset));
753   __ sub(scratch2, scratch2, Operand(Smi::FromInt(2)));
754   __ cmp(key, Operand(scratch2));
755   __ b(cs, unmapped_case);
756 
757   // Load element index and check whether it is the hole.
758   const int kOffset =
759       FixedArray::kHeaderSize + 2 * kPointerSize - kHeapObjectTag;
760 
761   __ mov(scratch3, Operand(kPointerSize >> 1));
762   __ mul(scratch3, key, scratch3);
763   __ add(scratch3, scratch3, Operand(kOffset));
764 
765   __ ldr(scratch2, MemOperand(scratch1, scratch3));
766   __ LoadRoot(scratch3, Heap::kTheHoleValueRootIndex);
767   __ cmp(scratch2, scratch3);
768   __ b(eq, unmapped_case);
769 
770   // Load value from context and return it. We can reuse scratch1 because
771   // we do not jump to the unmapped lookup (which requires the parameter
772   // map in scratch1).
773   __ ldr(scratch1, FieldMemOperand(scratch1, FixedArray::kHeaderSize));
774   __ mov(scratch3, Operand(kPointerSize >> 1));
775   __ mul(scratch3, scratch2, scratch3);
776   __ add(scratch3, scratch3, Operand(Context::kHeaderSize - kHeapObjectTag));
777   return MemOperand(scratch1, scratch3);
778 }
779 
780 
GenerateUnmappedArgumentsLookup(MacroAssembler * masm,Register key,Register parameter_map,Register scratch,Label * slow_case)781 static MemOperand GenerateUnmappedArgumentsLookup(MacroAssembler* masm,
782                                                   Register key,
783                                                   Register parameter_map,
784                                                   Register scratch,
785                                                   Label* slow_case) {
786   // Element is in arguments backing store, which is referenced by the
787   // second element of the parameter_map. The parameter_map register
788   // must be loaded with the parameter map of the arguments object and is
789   // overwritten.
790   const int kBackingStoreOffset = FixedArray::kHeaderSize + kPointerSize;
791   Register backing_store = parameter_map;
792   __ ldr(backing_store, FieldMemOperand(parameter_map, kBackingStoreOffset));
793   Handle<Map> fixed_array_map(masm->isolate()->heap()->fixed_array_map());
794   __ CheckMap(backing_store, scratch, fixed_array_map, slow_case,
795               DONT_DO_SMI_CHECK);
796   __ ldr(scratch, FieldMemOperand(backing_store, FixedArray::kLengthOffset));
797   __ cmp(key, Operand(scratch));
798   __ b(cs, slow_case);
799   __ mov(scratch, Operand(kPointerSize >> 1));
800   __ mul(scratch, key, scratch);
801   __ add(scratch,
802          scratch,
803          Operand(FixedArray::kHeaderSize - kHeapObjectTag));
804   return MemOperand(backing_store, scratch);
805 }
806 
807 
GenerateNonStrictArguments(MacroAssembler * masm)808 void KeyedLoadIC::GenerateNonStrictArguments(MacroAssembler* masm) {
809   // ---------- S t a t e --------------
810   //  -- lr     : return address
811   //  -- r0     : key
812   //  -- r1     : receiver
813   // -----------------------------------
814   Label slow, notin;
815   MemOperand mapped_location =
816       GenerateMappedArgumentsLookup(masm, r1, r0, r2, r3, r4, &notin, &slow);
817   __ ldr(r0, mapped_location);
818   __ Ret();
819   __ bind(&notin);
820   // The unmapped lookup expects that the parameter map is in r2.
821   MemOperand unmapped_location =
822       GenerateUnmappedArgumentsLookup(masm, r0, r2, r3, &slow);
823   __ ldr(r2, unmapped_location);
824   __ LoadRoot(r3, Heap::kTheHoleValueRootIndex);
825   __ cmp(r2, r3);
826   __ b(eq, &slow);
827   __ mov(r0, r2);
828   __ Ret();
829   __ bind(&slow);
830   GenerateMiss(masm);
831 }
832 
833 
GenerateNonStrictArguments(MacroAssembler * masm)834 void KeyedStoreIC::GenerateNonStrictArguments(MacroAssembler* masm) {
835   // ---------- S t a t e --------------
836   //  -- r0     : value
837   //  -- r1     : key
838   //  -- r2     : receiver
839   //  -- lr     : return address
840   // -----------------------------------
841   Label slow, notin;
842   MemOperand mapped_location =
843       GenerateMappedArgumentsLookup(masm, r2, r1, r3, r4, r5, &notin, &slow);
844   __ str(r0, mapped_location);
845   __ add(r6, r3, r5);
846   __ mov(r9, r0);
847   __ RecordWrite(r3, r6, r9, kLRHasNotBeenSaved, kDontSaveFPRegs);
848   __ Ret();
849   __ bind(&notin);
850   // The unmapped lookup expects that the parameter map is in r3.
851   MemOperand unmapped_location =
852       GenerateUnmappedArgumentsLookup(masm, r1, r3, r4, &slow);
853   __ str(r0, unmapped_location);
854   __ add(r6, r3, r4);
855   __ mov(r9, r0);
856   __ RecordWrite(r3, r6, r9, kLRHasNotBeenSaved, kDontSaveFPRegs);
857   __ Ret();
858   __ bind(&slow);
859   GenerateMiss(masm);
860 }
861 
862 
GenerateNonStrictArguments(MacroAssembler * masm,int argc)863 void KeyedCallIC::GenerateNonStrictArguments(MacroAssembler* masm,
864                                              int argc) {
865   // ----------- S t a t e -------------
866   //  -- r2    : name
867   //  -- lr    : return address
868   // -----------------------------------
869   Label slow, notin;
870   // Load receiver.
871   __ ldr(r1, MemOperand(sp, argc * kPointerSize));
872   MemOperand mapped_location =
873       GenerateMappedArgumentsLookup(masm, r1, r2, r3, r4, r5, &notin, &slow);
874   __ ldr(r1, mapped_location);
875   GenerateFunctionTailCall(masm, argc, &slow, r3);
876   __ bind(&notin);
877   // The unmapped lookup expects that the parameter map is in r3.
878   MemOperand unmapped_location =
879       GenerateUnmappedArgumentsLookup(masm, r2, r3, r4, &slow);
880   __ ldr(r1, unmapped_location);
881   __ LoadRoot(r3, Heap::kTheHoleValueRootIndex);
882   __ cmp(r1, r3);
883   __ b(eq, &slow);
884   GenerateFunctionTailCall(masm, argc, &slow, r3);
885   __ bind(&slow);
886   GenerateMiss(masm, argc);
887 }
888 
889 
GenerateMiss(MacroAssembler * masm)890 void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) {
891   // ---------- S t a t e --------------
892   //  -- lr     : return address
893   //  -- r0     : key
894   //  -- r1     : receiver
895   // -----------------------------------
896   Isolate* isolate = masm->isolate();
897 
898   __ IncrementCounter(isolate->counters()->keyed_load_miss(), 1, r3, r4);
899 
900   __ Push(r1, r0);
901 
902   // Perform tail call to the entry.
903   ExternalReference ref =
904       ExternalReference(IC_Utility(kKeyedLoadIC_Miss), isolate);
905 
906   __ TailCallExternalReference(ref, 2, 1);
907 }
908 
909 
GenerateRuntimeGetProperty(MacroAssembler * masm)910 void KeyedLoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) {
911   // ---------- S t a t e --------------
912   //  -- lr     : return address
913   //  -- r0     : key
914   //  -- r1     : receiver
915   // -----------------------------------
916 
917   __ Push(r1, r0);
918 
919   __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1);
920 }
921 
922 
GenerateGeneric(MacroAssembler * masm)923 void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
924   // ---------- S t a t e --------------
925   //  -- lr     : return address
926   //  -- r0     : key
927   //  -- r1     : receiver
928   // -----------------------------------
929   Label slow, check_name, index_smi, index_name, property_array_property;
930   Label probe_dictionary, check_number_dictionary;
931 
932   Register key = r0;
933   Register receiver = r1;
934 
935   Isolate* isolate = masm->isolate();
936 
937   // Check that the key is a smi.
938   __ JumpIfNotSmi(key, &check_name);
939   __ bind(&index_smi);
940   // Now the key is known to be a smi. This place is also jumped to from below
941   // where a numeric string is converted to a smi.
942 
943   GenerateKeyedLoadReceiverCheck(
944       masm, receiver, r2, r3, Map::kHasIndexedInterceptor, &slow);
945 
946   // Check the receiver's map to see if it has fast elements.
947   __ CheckFastElements(r2, r3, &check_number_dictionary);
948 
949   GenerateFastArrayLoad(
950       masm, receiver, key, r4, r3, r2, r0, NULL, &slow);
951   __ IncrementCounter(isolate->counters()->keyed_load_generic_smi(), 1, r2, r3);
952   __ Ret();
953 
954   __ bind(&check_number_dictionary);
955   __ ldr(r4, FieldMemOperand(receiver, JSObject::kElementsOffset));
956   __ ldr(r3, FieldMemOperand(r4, JSObject::kMapOffset));
957 
958   // Check whether the elements is a number dictionary.
959   // r0: key
960   // r3: elements map
961   // r4: elements
962   __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
963   __ cmp(r3, ip);
964   __ b(ne, &slow);
965   __ SmiUntag(r2, r0);
966   __ LoadFromNumberDictionary(&slow, r4, r0, r0, r2, r3, r5);
967   __ Ret();
968 
969   // Slow case, key and receiver still in r0 and r1.
970   __ bind(&slow);
971   __ IncrementCounter(isolate->counters()->keyed_load_generic_slow(),
972                       1, r2, r3);
973   GenerateRuntimeGetProperty(masm);
974 
975   __ bind(&check_name);
976   GenerateKeyNameCheck(masm, key, r2, r3, &index_name, &slow);
977 
978   GenerateKeyedLoadReceiverCheck(
979       masm, receiver, r2, r3, Map::kHasNamedInterceptor, &slow);
980 
981   // If the receiver is a fast-case object, check the keyed lookup
982   // cache. Otherwise probe the dictionary.
983   __ ldr(r3, FieldMemOperand(r1, JSObject::kPropertiesOffset));
984   __ ldr(r4, FieldMemOperand(r3, HeapObject::kMapOffset));
985   __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
986   __ cmp(r4, ip);
987   __ b(eq, &probe_dictionary);
988 
989   // Load the map of the receiver, compute the keyed lookup cache hash
990   // based on 32 bits of the map pointer and the name hash.
991   __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset));
992   __ mov(r3, Operand(r2, ASR, KeyedLookupCache::kMapHashShift));
993   __ ldr(r4, FieldMemOperand(r0, Name::kHashFieldOffset));
994   __ eor(r3, r3, Operand(r4, ASR, Name::kHashShift));
995   int mask = KeyedLookupCache::kCapacityMask & KeyedLookupCache::kHashMask;
996   __ And(r3, r3, Operand(mask));
997 
998   // Load the key (consisting of map and unique name) from the cache and
999   // check for match.
1000   Label load_in_object_property;
1001   static const int kEntriesPerBucket = KeyedLookupCache::kEntriesPerBucket;
1002   Label hit_on_nth_entry[kEntriesPerBucket];
1003   ExternalReference cache_keys =
1004       ExternalReference::keyed_lookup_cache_keys(isolate);
1005 
1006   __ mov(r4, Operand(cache_keys));
1007   __ add(r4, r4, Operand(r3, LSL, kPointerSizeLog2 + 1));
1008 
1009   for (int i = 0; i < kEntriesPerBucket - 1; i++) {
1010     Label try_next_entry;
1011     // Load map and move r4 to next entry.
1012     __ ldr(r5, MemOperand(r4, kPointerSize * 2, PostIndex));
1013     __ cmp(r2, r5);
1014     __ b(ne, &try_next_entry);
1015     __ ldr(r5, MemOperand(r4, -kPointerSize));  // Load name
1016     __ cmp(r0, r5);
1017     __ b(eq, &hit_on_nth_entry[i]);
1018     __ bind(&try_next_entry);
1019   }
1020 
1021   // Last entry: Load map and move r4 to name.
1022   __ ldr(r5, MemOperand(r4, kPointerSize, PostIndex));
1023   __ cmp(r2, r5);
1024   __ b(ne, &slow);
1025   __ ldr(r5, MemOperand(r4));
1026   __ cmp(r0, r5);
1027   __ b(ne, &slow);
1028 
1029   // Get field offset.
1030   // r0     : key
1031   // r1     : receiver
1032   // r2     : receiver's map
1033   // r3     : lookup cache index
1034   ExternalReference cache_field_offsets =
1035       ExternalReference::keyed_lookup_cache_field_offsets(isolate);
1036 
1037   // Hit on nth entry.
1038   for (int i = kEntriesPerBucket - 1; i >= 0; i--) {
1039     __ bind(&hit_on_nth_entry[i]);
1040     __ mov(r4, Operand(cache_field_offsets));
1041     if (i != 0) {
1042       __ add(r3, r3, Operand(i));
1043     }
1044     __ ldr(r5, MemOperand(r4, r3, LSL, kPointerSizeLog2));
1045     __ ldrb(r6, FieldMemOperand(r2, Map::kInObjectPropertiesOffset));
1046     __ sub(r5, r5, r6, SetCC);
1047     __ b(ge, &property_array_property);
1048     if (i != 0) {
1049       __ jmp(&load_in_object_property);
1050     }
1051   }
1052 
1053   // Load in-object property.
1054   __ bind(&load_in_object_property);
1055   __ ldrb(r6, FieldMemOperand(r2, Map::kInstanceSizeOffset));
1056   __ add(r6, r6, r5);  // Index from start of object.
1057   __ sub(r1, r1, Operand(kHeapObjectTag));  // Remove the heap tag.
1058   __ ldr(r0, MemOperand(r1, r6, LSL, kPointerSizeLog2));
1059   __ IncrementCounter(isolate->counters()->keyed_load_generic_lookup_cache(),
1060                       1, r2, r3);
1061   __ Ret();
1062 
1063   // Load property array property.
1064   __ bind(&property_array_property);
1065   __ ldr(r1, FieldMemOperand(r1, JSObject::kPropertiesOffset));
1066   __ add(r1, r1, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1067   __ ldr(r0, MemOperand(r1, r5, LSL, kPointerSizeLog2));
1068   __ IncrementCounter(isolate->counters()->keyed_load_generic_lookup_cache(),
1069                       1, r2, r3);
1070   __ Ret();
1071 
1072   // Do a quick inline probe of the receiver's dictionary, if it
1073   // exists.
1074   __ bind(&probe_dictionary);
1075   // r1: receiver
1076   // r0: key
1077   // r3: elements
1078   __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset));
1079   __ ldrb(r2, FieldMemOperand(r2, Map::kInstanceTypeOffset));
1080   GenerateGlobalInstanceTypeCheck(masm, r2, &slow);
1081   // Load the property to r0.
1082   GenerateDictionaryLoad(masm, &slow, r3, r0, r0, r2, r4);
1083   __ IncrementCounter(
1084       isolate->counters()->keyed_load_generic_symbol(), 1, r2, r3);
1085   __ Ret();
1086 
1087   __ bind(&index_name);
1088   __ IndexFromHash(r3, key);
1089   // Now jump to the place where smi keys are handled.
1090   __ jmp(&index_smi);
1091 }
1092 
1093 
GenerateString(MacroAssembler * masm)1094 void KeyedLoadIC::GenerateString(MacroAssembler* masm) {
1095   // ---------- S t a t e --------------
1096   //  -- lr     : return address
1097   //  -- r0     : key (index)
1098   //  -- r1     : receiver
1099   // -----------------------------------
1100   Label miss;
1101 
1102   Register receiver = r1;
1103   Register index = r0;
1104   Register scratch = r3;
1105   Register result = r0;
1106 
1107   StringCharAtGenerator char_at_generator(receiver,
1108                                           index,
1109                                           scratch,
1110                                           result,
1111                                           &miss,  // When not a string.
1112                                           &miss,  // When not a number.
1113                                           &miss,  // When index out of range.
1114                                           STRING_INDEX_IS_ARRAY_INDEX);
1115   char_at_generator.GenerateFast(masm);
1116   __ Ret();
1117 
1118   StubRuntimeCallHelper call_helper;
1119   char_at_generator.GenerateSlow(masm, call_helper);
1120 
1121   __ bind(&miss);
1122   GenerateMiss(masm);
1123 }
1124 
1125 
GenerateIndexedInterceptor(MacroAssembler * masm)1126 void KeyedLoadIC::GenerateIndexedInterceptor(MacroAssembler* masm) {
1127   // ---------- S t a t e --------------
1128   //  -- lr     : return address
1129   //  -- r0     : key
1130   //  -- r1     : receiver
1131   // -----------------------------------
1132   Label slow;
1133 
1134   // Check that the receiver isn't a smi.
1135   __ JumpIfSmi(r1, &slow);
1136 
1137   // Check that the key is an array index, that is Uint32.
1138   __ NonNegativeSmiTst(r0);
1139   __ b(ne, &slow);
1140 
1141   // Get the map of the receiver.
1142   __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset));
1143 
1144   // Check that it has indexed interceptor and access checks
1145   // are not enabled for this object.
1146   __ ldrb(r3, FieldMemOperand(r2, Map::kBitFieldOffset));
1147   __ and_(r3, r3, Operand(kSlowCaseBitFieldMask));
1148   __ cmp(r3, Operand(1 << Map::kHasIndexedInterceptor));
1149   __ b(ne, &slow);
1150 
1151   // Everything is fine, call runtime.
1152   __ Push(r1, r0);  // Receiver, key.
1153 
1154   // Perform tail call to the entry.
1155   __ TailCallExternalReference(
1156       ExternalReference(IC_Utility(kKeyedLoadPropertyWithInterceptor),
1157                         masm->isolate()),
1158       2,
1159       1);
1160 
1161   __ bind(&slow);
1162   GenerateMiss(masm);
1163 }
1164 
1165 
GenerateMiss(MacroAssembler * masm)1166 void KeyedStoreIC::GenerateMiss(MacroAssembler* masm) {
1167   // ---------- S t a t e --------------
1168   //  -- r0     : value
1169   //  -- r1     : key
1170   //  -- r2     : receiver
1171   //  -- lr     : return address
1172   // -----------------------------------
1173 
1174   // Push receiver, key and value for runtime call.
1175   __ Push(r2, r1, r0);
1176 
1177   ExternalReference ref =
1178       ExternalReference(IC_Utility(kKeyedStoreIC_Miss), masm->isolate());
1179   __ TailCallExternalReference(ref, 3, 1);
1180 }
1181 
1182 
GenerateSlow(MacroAssembler * masm)1183 void StoreIC::GenerateSlow(MacroAssembler* masm) {
1184   // ---------- S t a t e --------------
1185   //  -- r0     : value
1186   //  -- r2     : key
1187   //  -- r1     : receiver
1188   //  -- lr     : return address
1189   // -----------------------------------
1190 
1191   // Push receiver, key and value for runtime call.
1192   __ Push(r1, r2, r0);
1193 
1194   // The slow case calls into the runtime to complete the store without causing
1195   // an IC miss that would otherwise cause a transition to the generic stub.
1196   ExternalReference ref =
1197       ExternalReference(IC_Utility(kStoreIC_Slow), masm->isolate());
1198   __ TailCallExternalReference(ref, 3, 1);
1199 }
1200 
1201 
GenerateSlow(MacroAssembler * masm)1202 void KeyedStoreIC::GenerateSlow(MacroAssembler* masm) {
1203   // ---------- S t a t e --------------
1204   //  -- r0     : value
1205   //  -- r1     : key
1206   //  -- r2     : receiver
1207   //  -- lr     : return address
1208   // -----------------------------------
1209 
1210   // Push receiver, key and value for runtime call.
1211   __ Push(r2, r1, r0);
1212 
1213   // The slow case calls into the runtime to complete the store without causing
1214   // an IC miss that would otherwise cause a transition to the generic stub.
1215   ExternalReference ref =
1216       ExternalReference(IC_Utility(kKeyedStoreIC_Slow), masm->isolate());
1217   __ TailCallExternalReference(ref, 3, 1);
1218 }
1219 
1220 
GenerateRuntimeSetProperty(MacroAssembler * masm,StrictModeFlag strict_mode)1221 void KeyedStoreIC::GenerateRuntimeSetProperty(MacroAssembler* masm,
1222                                               StrictModeFlag strict_mode) {
1223   // ---------- S t a t e --------------
1224   //  -- r0     : value
1225   //  -- r1     : key
1226   //  -- r2     : receiver
1227   //  -- lr     : return address
1228   // -----------------------------------
1229 
1230   // Push receiver, key and value for runtime call.
1231   __ Push(r2, r1, r0);
1232 
1233   __ mov(r1, Operand(Smi::FromInt(NONE)));          // PropertyAttributes
1234   __ mov(r0, Operand(Smi::FromInt(strict_mode)));   // Strict mode.
1235   __ Push(r1, r0);
1236 
1237   __ TailCallRuntime(Runtime::kSetProperty, 5, 1);
1238 }
1239 
1240 
KeyedStoreGenerateGenericHelper(MacroAssembler * masm,Label * fast_object,Label * fast_double,Label * slow,KeyedStoreCheckMap check_map,KeyedStoreIncrementLength increment_length,Register value,Register key,Register receiver,Register receiver_map,Register elements_map,Register elements)1241 static void KeyedStoreGenerateGenericHelper(
1242     MacroAssembler* masm,
1243     Label* fast_object,
1244     Label* fast_double,
1245     Label* slow,
1246     KeyedStoreCheckMap check_map,
1247     KeyedStoreIncrementLength increment_length,
1248     Register value,
1249     Register key,
1250     Register receiver,
1251     Register receiver_map,
1252     Register elements_map,
1253     Register elements) {
1254   Label transition_smi_elements;
1255   Label finish_object_store, non_double_value, transition_double_elements;
1256   Label fast_double_without_map_check;
1257 
1258   // Fast case: Do the store, could be either Object or double.
1259   __ bind(fast_object);
1260   Register scratch_value = r4;
1261   Register address = r5;
1262   if (check_map == kCheckMap) {
1263     __ ldr(elements_map, FieldMemOperand(elements, HeapObject::kMapOffset));
1264     __ cmp(elements_map,
1265            Operand(masm->isolate()->factory()->fixed_array_map()));
1266     __ b(ne, fast_double);
1267   }
1268 
1269   // HOLECHECK: guards "A[i] = V"
1270   // We have to go to the runtime if the current value is the hole because
1271   // there may be a callback on the element
1272   Label holecheck_passed1;
1273   __ add(address, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1274   __ ldr(scratch_value,
1275          MemOperand::PointerAddressFromSmiKey(address, key, PreIndex));
1276   __ cmp(scratch_value, Operand(masm->isolate()->factory()->the_hole_value()));
1277   __ b(ne, &holecheck_passed1);
1278   __ JumpIfDictionaryInPrototypeChain(receiver, elements_map, scratch_value,
1279                                       slow);
1280 
1281   __ bind(&holecheck_passed1);
1282 
1283   // Smi stores don't require further checks.
1284   Label non_smi_value;
1285   __ JumpIfNotSmi(value, &non_smi_value);
1286 
1287   if (increment_length == kIncrementLength) {
1288     // Add 1 to receiver->length.
1289     __ add(scratch_value, key, Operand(Smi::FromInt(1)));
1290     __ str(scratch_value, FieldMemOperand(receiver, JSArray::kLengthOffset));
1291   }
1292   // It's irrelevant whether array is smi-only or not when writing a smi.
1293   __ add(address, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1294   __ str(value, MemOperand::PointerAddressFromSmiKey(address, key));
1295   __ Ret();
1296 
1297   __ bind(&non_smi_value);
1298   // Escape to elements kind transition case.
1299   __ CheckFastObjectElements(receiver_map, scratch_value,
1300                              &transition_smi_elements);
1301 
1302   // Fast elements array, store the value to the elements backing store.
1303   __ bind(&finish_object_store);
1304   if (increment_length == kIncrementLength) {
1305     // Add 1 to receiver->length.
1306     __ add(scratch_value, key, Operand(Smi::FromInt(1)));
1307     __ str(scratch_value, FieldMemOperand(receiver, JSArray::kLengthOffset));
1308   }
1309   __ add(address, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1310   __ add(address, address, Operand::PointerOffsetFromSmiKey(key));
1311   __ str(value, MemOperand(address));
1312   // Update write barrier for the elements array address.
1313   __ mov(scratch_value, value);  // Preserve the value which is returned.
1314   __ RecordWrite(elements,
1315                  address,
1316                  scratch_value,
1317                  kLRHasNotBeenSaved,
1318                  kDontSaveFPRegs,
1319                  EMIT_REMEMBERED_SET,
1320                  OMIT_SMI_CHECK);
1321   __ Ret();
1322 
1323   __ bind(fast_double);
1324   if (check_map == kCheckMap) {
1325     // Check for fast double array case. If this fails, call through to the
1326     // runtime.
1327     __ CompareRoot(elements_map, Heap::kFixedDoubleArrayMapRootIndex);
1328     __ b(ne, slow);
1329   }
1330 
1331   // HOLECHECK: guards "A[i] double hole?"
1332   // We have to see if the double version of the hole is present. If so
1333   // go to the runtime.
1334   __ add(address, elements,
1335          Operand((FixedDoubleArray::kHeaderSize + sizeof(kHoleNanLower32))
1336                  - kHeapObjectTag));
1337   __ ldr(scratch_value,
1338          MemOperand(address, key, LSL, kPointerSizeLog2, PreIndex));
1339   __ cmp(scratch_value, Operand(kHoleNanUpper32));
1340   __ b(ne, &fast_double_without_map_check);
1341   __ JumpIfDictionaryInPrototypeChain(receiver, elements_map, scratch_value,
1342                                       slow);
1343 
1344   __ bind(&fast_double_without_map_check);
1345   __ StoreNumberToDoubleElements(value, key, elements, r3, d0,
1346                                  &transition_double_elements);
1347   if (increment_length == kIncrementLength) {
1348     // Add 1 to receiver->length.
1349     __ add(scratch_value, key, Operand(Smi::FromInt(1)));
1350     __ str(scratch_value, FieldMemOperand(receiver, JSArray::kLengthOffset));
1351   }
1352   __ Ret();
1353 
1354   __ bind(&transition_smi_elements);
1355   // Transition the array appropriately depending on the value type.
1356   __ ldr(r4, FieldMemOperand(value, HeapObject::kMapOffset));
1357   __ CompareRoot(r4, Heap::kHeapNumberMapRootIndex);
1358   __ b(ne, &non_double_value);
1359 
1360   // Value is a double. Transition FAST_SMI_ELEMENTS ->
1361   // FAST_DOUBLE_ELEMENTS and complete the store.
1362   __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
1363                                          FAST_DOUBLE_ELEMENTS,
1364                                          receiver_map,
1365                                          r4,
1366                                          slow);
1367   ASSERT(receiver_map.is(r3));  // Transition code expects map in r3
1368   AllocationSiteMode mode = AllocationSite::GetMode(FAST_SMI_ELEMENTS,
1369                                                     FAST_DOUBLE_ELEMENTS);
1370   ElementsTransitionGenerator::GenerateSmiToDouble(masm, mode, slow);
1371   __ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
1372   __ jmp(&fast_double_without_map_check);
1373 
1374   __ bind(&non_double_value);
1375   // Value is not a double, FAST_SMI_ELEMENTS -> FAST_ELEMENTS
1376   __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
1377                                          FAST_ELEMENTS,
1378                                          receiver_map,
1379                                          r4,
1380                                          slow);
1381   ASSERT(receiver_map.is(r3));  // Transition code expects map in r3
1382   mode = AllocationSite::GetMode(FAST_SMI_ELEMENTS, FAST_ELEMENTS);
1383   ElementsTransitionGenerator::GenerateMapChangeElementsTransition(masm, mode,
1384                                                                    slow);
1385   __ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
1386   __ jmp(&finish_object_store);
1387 
1388   __ bind(&transition_double_elements);
1389   // Elements are FAST_DOUBLE_ELEMENTS, but value is an Object that's not a
1390   // HeapNumber. Make sure that the receiver is a Array with FAST_ELEMENTS and
1391   // transition array from FAST_DOUBLE_ELEMENTS to FAST_ELEMENTS
1392   __ LoadTransitionedArrayMapConditional(FAST_DOUBLE_ELEMENTS,
1393                                          FAST_ELEMENTS,
1394                                          receiver_map,
1395                                          r4,
1396                                          slow);
1397   ASSERT(receiver_map.is(r3));  // Transition code expects map in r3
1398   mode = AllocationSite::GetMode(FAST_DOUBLE_ELEMENTS, FAST_ELEMENTS);
1399   ElementsTransitionGenerator::GenerateDoubleToObject(masm, mode, slow);
1400   __ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
1401   __ jmp(&finish_object_store);
1402 }
1403 
1404 
GenerateGeneric(MacroAssembler * masm,StrictModeFlag strict_mode)1405 void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
1406                                    StrictModeFlag strict_mode) {
1407   // ---------- S t a t e --------------
1408   //  -- r0     : value
1409   //  -- r1     : key
1410   //  -- r2     : receiver
1411   //  -- lr     : return address
1412   // -----------------------------------
1413   Label slow, fast_object, fast_object_grow;
1414   Label fast_double, fast_double_grow;
1415   Label array, extra, check_if_double_array;
1416 
1417   // Register usage.
1418   Register value = r0;
1419   Register key = r1;
1420   Register receiver = r2;
1421   Register receiver_map = r3;
1422   Register elements_map = r6;
1423   Register elements = r9;  // Elements array of the receiver.
1424   // r4 and r5 are used as general scratch registers.
1425 
1426   // Check that the key is a smi.
1427   __ JumpIfNotSmi(key, &slow);
1428   // Check that the object isn't a smi.
1429   __ JumpIfSmi(receiver, &slow);
1430   // Get the map of the object.
1431   __ ldr(receiver_map, FieldMemOperand(receiver, HeapObject::kMapOffset));
1432   // Check that the receiver does not require access checks and is not observed.
1433   // The generic stub does not perform map checks or handle observed objects.
1434   __ ldrb(ip, FieldMemOperand(receiver_map, Map::kBitFieldOffset));
1435   __ tst(ip, Operand(1 << Map::kIsAccessCheckNeeded | 1 << Map::kIsObserved));
1436   __ b(ne, &slow);
1437   // Check if the object is a JS array or not.
1438   __ ldrb(r4, FieldMemOperand(receiver_map, Map::kInstanceTypeOffset));
1439   __ cmp(r4, Operand(JS_ARRAY_TYPE));
1440   __ b(eq, &array);
1441   // Check that the object is some kind of JSObject.
1442   __ cmp(r4, Operand(FIRST_JS_OBJECT_TYPE));
1443   __ b(lt, &slow);
1444 
1445   // Object case: Check key against length in the elements array.
1446   __ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
1447   // Check array bounds. Both the key and the length of FixedArray are smis.
1448   __ ldr(ip, FieldMemOperand(elements, FixedArray::kLengthOffset));
1449   __ cmp(key, Operand(ip));
1450   __ b(lo, &fast_object);
1451 
1452   // Slow case, handle jump to runtime.
1453   __ bind(&slow);
1454   // Entry registers are intact.
1455   // r0: value.
1456   // r1: key.
1457   // r2: receiver.
1458   GenerateRuntimeSetProperty(masm, strict_mode);
1459 
1460   // Extra capacity case: Check if there is extra capacity to
1461   // perform the store and update the length. Used for adding one
1462   // element to the array by writing to array[array.length].
1463   __ bind(&extra);
1464   // Condition code from comparing key and array length is still available.
1465   __ b(ne, &slow);  // Only support writing to writing to array[array.length].
1466   // Check for room in the elements backing store.
1467   // Both the key and the length of FixedArray are smis.
1468   __ ldr(ip, FieldMemOperand(elements, FixedArray::kLengthOffset));
1469   __ cmp(key, Operand(ip));
1470   __ b(hs, &slow);
1471   __ ldr(elements_map, FieldMemOperand(elements, HeapObject::kMapOffset));
1472   __ cmp(elements_map,
1473          Operand(masm->isolate()->factory()->fixed_array_map()));
1474   __ b(ne, &check_if_double_array);
1475   __ jmp(&fast_object_grow);
1476 
1477   __ bind(&check_if_double_array);
1478   __ cmp(elements_map,
1479          Operand(masm->isolate()->factory()->fixed_double_array_map()));
1480   __ b(ne, &slow);
1481   __ jmp(&fast_double_grow);
1482 
1483   // Array case: Get the length and the elements array from the JS
1484   // array. Check that the array is in fast mode (and writable); if it
1485   // is the length is always a smi.
1486   __ bind(&array);
1487   __ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
1488 
1489   // Check the key against the length in the array.
1490   __ ldr(ip, FieldMemOperand(receiver, JSArray::kLengthOffset));
1491   __ cmp(key, Operand(ip));
1492   __ b(hs, &extra);
1493 
1494   KeyedStoreGenerateGenericHelper(masm, &fast_object, &fast_double,
1495                                   &slow, kCheckMap, kDontIncrementLength,
1496                                   value, key, receiver, receiver_map,
1497                                   elements_map, elements);
1498   KeyedStoreGenerateGenericHelper(masm, &fast_object_grow, &fast_double_grow,
1499                                   &slow, kDontCheckMap, kIncrementLength,
1500                                   value, key, receiver, receiver_map,
1501                                   elements_map, elements);
1502 }
1503 
1504 
GenerateMegamorphic(MacroAssembler * masm,ExtraICState extra_ic_state)1505 void StoreIC::GenerateMegamorphic(MacroAssembler* masm,
1506                                   ExtraICState extra_ic_state) {
1507   // ----------- S t a t e -------------
1508   //  -- r0    : value
1509   //  -- r1    : receiver
1510   //  -- r2    : name
1511   //  -- lr    : return address
1512   // -----------------------------------
1513 
1514   // Get the receiver from the stack and probe the stub cache.
1515   Code::Flags flags = Code::ComputeFlags(
1516       Code::HANDLER, MONOMORPHIC, extra_ic_state,
1517       Code::NORMAL, Code::STORE_IC);
1518 
1519   masm->isolate()->stub_cache()->GenerateProbe(
1520       masm, flags, r1, r2, r3, r4, r5, r6);
1521 
1522   // Cache miss: Jump to runtime.
1523   GenerateMiss(masm);
1524 }
1525 
1526 
GenerateMiss(MacroAssembler * masm)1527 void StoreIC::GenerateMiss(MacroAssembler* masm) {
1528   // ----------- S t a t e -------------
1529   //  -- r0    : value
1530   //  -- r1    : receiver
1531   //  -- r2    : name
1532   //  -- lr    : return address
1533   // -----------------------------------
1534 
1535   __ Push(r1, r2, r0);
1536 
1537   // Perform tail call to the entry.
1538   ExternalReference ref =
1539       ExternalReference(IC_Utility(kStoreIC_Miss), masm->isolate());
1540   __ TailCallExternalReference(ref, 3, 1);
1541 }
1542 
1543 
GenerateNormal(MacroAssembler * masm)1544 void StoreIC::GenerateNormal(MacroAssembler* masm) {
1545   // ----------- S t a t e -------------
1546   //  -- r0    : value
1547   //  -- r1    : receiver
1548   //  -- r2    : name
1549   //  -- lr    : return address
1550   // -----------------------------------
1551   Label miss;
1552 
1553   GenerateNameDictionaryReceiverCheck(masm, r1, r3, r4, r5, &miss);
1554 
1555   GenerateDictionaryStore(masm, &miss, r3, r2, r0, r4, r5);
1556   Counters* counters = masm->isolate()->counters();
1557   __ IncrementCounter(counters->store_normal_hit(),
1558                       1, r4, r5);
1559   __ Ret();
1560 
1561   __ bind(&miss);
1562   __ IncrementCounter(counters->store_normal_miss(), 1, r4, r5);
1563   GenerateMiss(masm);
1564 }
1565 
1566 
GenerateRuntimeSetProperty(MacroAssembler * masm,StrictModeFlag strict_mode)1567 void StoreIC::GenerateRuntimeSetProperty(MacroAssembler* masm,
1568                                          StrictModeFlag strict_mode) {
1569   // ----------- S t a t e -------------
1570   //  -- r0    : value
1571   //  -- r1    : receiver
1572   //  -- r2    : name
1573   //  -- lr    : return address
1574   // -----------------------------------
1575 
1576   __ Push(r1, r2, r0);
1577 
1578   __ mov(r1, Operand(Smi::FromInt(NONE)));  // PropertyAttributes
1579   __ mov(r0, Operand(Smi::FromInt(strict_mode)));
1580   __ Push(r1, r0);
1581 
1582   // Do tail-call to runtime routine.
1583   __ TailCallRuntime(Runtime::kSetProperty, 5, 1);
1584 }
1585 
1586 
1587 #undef __
1588 
1589 
ComputeCondition(Token::Value op)1590 Condition CompareIC::ComputeCondition(Token::Value op) {
1591   switch (op) {
1592     case Token::EQ_STRICT:
1593     case Token::EQ:
1594       return eq;
1595     case Token::LT:
1596       return lt;
1597     case Token::GT:
1598       return gt;
1599     case Token::LTE:
1600       return le;
1601     case Token::GTE:
1602       return ge;
1603     default:
1604       UNREACHABLE();
1605       return kNoCondition;
1606   }
1607 }
1608 
1609 
HasInlinedSmiCode(Address address)1610 bool CompareIC::HasInlinedSmiCode(Address address) {
1611   // The address of the instruction following the call.
1612   Address cmp_instruction_address =
1613       Assembler::return_address_from_call_start(address);
1614 
1615   // If the instruction following the call is not a cmp rx, #yyy, nothing
1616   // was inlined.
1617   Instr instr = Assembler::instr_at(cmp_instruction_address);
1618   return Assembler::IsCmpImmediate(instr);
1619 }
1620 
1621 
PatchInlinedSmiCode(Address address,InlinedSmiCheck check)1622 void PatchInlinedSmiCode(Address address, InlinedSmiCheck check) {
1623   Address cmp_instruction_address =
1624       Assembler::return_address_from_call_start(address);
1625 
1626   // If the instruction following the call is not a cmp rx, #yyy, nothing
1627   // was inlined.
1628   Instr instr = Assembler::instr_at(cmp_instruction_address);
1629   if (!Assembler::IsCmpImmediate(instr)) {
1630     return;
1631   }
1632 
1633   // The delta to the start of the map check instruction and the
1634   // condition code uses at the patched jump.
1635   int delta = Assembler::GetCmpImmediateRawImmediate(instr);
1636   delta +=
1637       Assembler::GetCmpImmediateRegister(instr).code() * kOff12Mask;
1638   // If the delta is 0 the instruction is cmp r0, #0 which also signals that
1639   // nothing was inlined.
1640   if (delta == 0) {
1641     return;
1642   }
1643 
1644   if (FLAG_trace_ic) {
1645     PrintF("[  patching ic at %p, cmp=%p, delta=%d\n",
1646            address, cmp_instruction_address, delta);
1647   }
1648 
1649   Address patch_address =
1650       cmp_instruction_address - delta * Instruction::kInstrSize;
1651   Instr instr_at_patch = Assembler::instr_at(patch_address);
1652   Instr branch_instr =
1653       Assembler::instr_at(patch_address + Instruction::kInstrSize);
1654   // This is patching a conditional "jump if not smi/jump if smi" site.
1655   // Enabling by changing from
1656   //   cmp rx, rx
1657   //   b eq/ne, <target>
1658   // to
1659   //   tst rx, #kSmiTagMask
1660   //   b ne/eq, <target>
1661   // and vice-versa to be disabled again.
1662   CodePatcher patcher(patch_address, 2);
1663   Register reg = Assembler::GetRn(instr_at_patch);
1664   if (check == ENABLE_INLINED_SMI_CHECK) {
1665     ASSERT(Assembler::IsCmpRegister(instr_at_patch));
1666     ASSERT_EQ(Assembler::GetRn(instr_at_patch).code(),
1667               Assembler::GetRm(instr_at_patch).code());
1668     patcher.masm()->tst(reg, Operand(kSmiTagMask));
1669   } else {
1670     ASSERT(check == DISABLE_INLINED_SMI_CHECK);
1671     ASSERT(Assembler::IsTstImmediate(instr_at_patch));
1672     patcher.masm()->cmp(reg, reg);
1673   }
1674   ASSERT(Assembler::IsBranch(branch_instr));
1675   if (Assembler::GetCondition(branch_instr) == eq) {
1676     patcher.EmitCondition(ne);
1677   } else {
1678     ASSERT(Assembler::GetCondition(branch_instr) == ne);
1679     patcher.EmitCondition(eq);
1680   }
1681 }
1682 
1683 
1684 } }  // namespace v8::internal
1685 
1686 #endif  // V8_TARGET_ARCH_ARM
1687