1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28 #include "v8.h"
29
30 #if defined(V8_TARGET_ARCH_ARM)
31
32 #include "assembler-arm.h"
33 #include "code-stubs.h"
34 #include "codegen.h"
35 #include "disasm.h"
36 #include "ic-inl.h"
37 #include "runtime.h"
38 #include "stub-cache.h"
39
40 namespace v8 {
41 namespace internal {
42
43
44 // ----------------------------------------------------------------------------
45 // Static IC stub generators.
46 //
47
48 #define __ ACCESS_MASM(masm)
49
50
GenerateGlobalInstanceTypeCheck(MacroAssembler * masm,Register type,Label * global_object)51 static void GenerateGlobalInstanceTypeCheck(MacroAssembler* masm,
52 Register type,
53 Label* global_object) {
54 // Register usage:
55 // type: holds the receiver instance type on entry.
56 __ cmp(type, Operand(JS_GLOBAL_OBJECT_TYPE));
57 __ b(eq, global_object);
58 __ cmp(type, Operand(JS_BUILTINS_OBJECT_TYPE));
59 __ b(eq, global_object);
60 __ cmp(type, Operand(JS_GLOBAL_PROXY_TYPE));
61 __ b(eq, global_object);
62 }
63
64
65 // Generated code falls through if the receiver is a regular non-global
66 // JS object with slow properties and no interceptors.
GenerateStringDictionaryReceiverCheck(MacroAssembler * masm,Register receiver,Register elements,Register t0,Register t1,Label * miss)67 static void GenerateStringDictionaryReceiverCheck(MacroAssembler* masm,
68 Register receiver,
69 Register elements,
70 Register t0,
71 Register t1,
72 Label* miss) {
73 // Register usage:
74 // receiver: holds the receiver on entry and is unchanged.
75 // elements: holds the property dictionary on fall through.
76 // Scratch registers:
77 // t0: used to holds the receiver map.
78 // t1: used to holds the receiver instance type, receiver bit mask and
79 // elements map.
80
81 // Check that the receiver isn't a smi.
82 __ JumpIfSmi(receiver, miss);
83
84 // Check that the receiver is a valid JS object.
85 __ CompareObjectType(receiver, t0, t1, FIRST_SPEC_OBJECT_TYPE);
86 __ b(lt, miss);
87
88 // If this assert fails, we have to check upper bound too.
89 STATIC_ASSERT(LAST_TYPE == LAST_SPEC_OBJECT_TYPE);
90
91 GenerateGlobalInstanceTypeCheck(masm, t1, miss);
92
93 // Check that the global object does not require access checks.
94 __ ldrb(t1, FieldMemOperand(t0, Map::kBitFieldOffset));
95 __ tst(t1, Operand((1 << Map::kIsAccessCheckNeeded) |
96 (1 << Map::kHasNamedInterceptor)));
97 __ b(ne, miss);
98
99 __ ldr(elements, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
100 __ ldr(t1, FieldMemOperand(elements, HeapObject::kMapOffset));
101 __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
102 __ cmp(t1, ip);
103 __ b(ne, miss);
104 }
105
106
107 // Helper function used from LoadIC/CallIC GenerateNormal.
108 //
109 // elements: Property dictionary. It is not clobbered if a jump to the miss
110 // label is done.
111 // name: Property name. It is not clobbered if a jump to the miss label is
112 // done
113 // result: Register for the result. It is only updated if a jump to the miss
114 // label is not done. Can be the same as elements or name clobbering
115 // one of these in the case of not jumping to the miss label.
116 // The two scratch registers need to be different from elements, name and
117 // result.
118 // The generated code assumes that the receiver has slow properties,
119 // is not a global object and does not have interceptors.
GenerateDictionaryLoad(MacroAssembler * masm,Label * miss,Register elements,Register name,Register result,Register scratch1,Register scratch2)120 static void GenerateDictionaryLoad(MacroAssembler* masm,
121 Label* miss,
122 Register elements,
123 Register name,
124 Register result,
125 Register scratch1,
126 Register scratch2) {
127 // Main use of the scratch registers.
128 // scratch1: Used as temporary and to hold the capacity of the property
129 // dictionary.
130 // scratch2: Used as temporary.
131 Label done;
132
133 // Probe the dictionary.
134 StringDictionaryLookupStub::GeneratePositiveLookup(masm,
135 miss,
136 &done,
137 elements,
138 name,
139 scratch1,
140 scratch2);
141
142 // If probing finds an entry check that the value is a normal
143 // property.
144 __ bind(&done); // scratch2 == elements + 4 * index
145 const int kElementsStartOffset = StringDictionary::kHeaderSize +
146 StringDictionary::kElementsStartIndex * kPointerSize;
147 const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize;
148 __ ldr(scratch1, FieldMemOperand(scratch2, kDetailsOffset));
149 __ tst(scratch1, Operand(PropertyDetails::TypeField::kMask << kSmiTagSize));
150 __ b(ne, miss);
151
152 // Get the value at the masked, scaled index and return.
153 __ ldr(result,
154 FieldMemOperand(scratch2, kElementsStartOffset + 1 * kPointerSize));
155 }
156
157
158 // Helper function used from StoreIC::GenerateNormal.
159 //
160 // elements: Property dictionary. It is not clobbered if a jump to the miss
161 // label is done.
162 // name: Property name. It is not clobbered if a jump to the miss label is
163 // done
164 // value: The value to store.
165 // The two scratch registers need to be different from elements, name and
166 // result.
167 // The generated code assumes that the receiver has slow properties,
168 // is not a global object and does not have interceptors.
GenerateDictionaryStore(MacroAssembler * masm,Label * miss,Register elements,Register name,Register value,Register scratch1,Register scratch2)169 static void GenerateDictionaryStore(MacroAssembler* masm,
170 Label* miss,
171 Register elements,
172 Register name,
173 Register value,
174 Register scratch1,
175 Register scratch2) {
176 // Main use of the scratch registers.
177 // scratch1: Used as temporary and to hold the capacity of the property
178 // dictionary.
179 // scratch2: Used as temporary.
180 Label done;
181
182 // Probe the dictionary.
183 StringDictionaryLookupStub::GeneratePositiveLookup(masm,
184 miss,
185 &done,
186 elements,
187 name,
188 scratch1,
189 scratch2);
190
191 // If probing finds an entry in the dictionary check that the value
192 // is a normal property that is not read only.
193 __ bind(&done); // scratch2 == elements + 4 * index
194 const int kElementsStartOffset = StringDictionary::kHeaderSize +
195 StringDictionary::kElementsStartIndex * kPointerSize;
196 const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize;
197 const int kTypeAndReadOnlyMask =
198 (PropertyDetails::TypeField::kMask |
199 PropertyDetails::AttributesField::encode(READ_ONLY)) << kSmiTagSize;
200 __ ldr(scratch1, FieldMemOperand(scratch2, kDetailsOffset));
201 __ tst(scratch1, Operand(kTypeAndReadOnlyMask));
202 __ b(ne, miss);
203
204 // Store the value at the masked, scaled index and return.
205 const int kValueOffset = kElementsStartOffset + kPointerSize;
206 __ add(scratch2, scratch2, Operand(kValueOffset - kHeapObjectTag));
207 __ str(value, MemOperand(scratch2));
208
209 // Update the write barrier. Make sure not to clobber the value.
210 __ mov(scratch1, value);
211 __ RecordWrite(
212 elements, scratch2, scratch1, kLRHasNotBeenSaved, kDontSaveFPRegs);
213 }
214
215
GenerateArrayLength(MacroAssembler * masm)216 void LoadIC::GenerateArrayLength(MacroAssembler* masm) {
217 // ----------- S t a t e -------------
218 // -- r2 : name
219 // -- lr : return address
220 // -- r0 : receiver
221 // -- sp[0] : receiver
222 // -----------------------------------
223 Label miss;
224
225 StubCompiler::GenerateLoadArrayLength(masm, r0, r3, &miss);
226 __ bind(&miss);
227 StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
228 }
229
230
GenerateStringLength(MacroAssembler * masm,bool support_wrappers)231 void LoadIC::GenerateStringLength(MacroAssembler* masm, bool support_wrappers) {
232 // ----------- S t a t e -------------
233 // -- r2 : name
234 // -- lr : return address
235 // -- r0 : receiver
236 // -- sp[0] : receiver
237 // -----------------------------------
238 Label miss;
239
240 StubCompiler::GenerateLoadStringLength(masm, r0, r1, r3, &miss,
241 support_wrappers);
242 // Cache miss: Jump to runtime.
243 __ bind(&miss);
244 StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
245 }
246
247
GenerateFunctionPrototype(MacroAssembler * masm)248 void LoadIC::GenerateFunctionPrototype(MacroAssembler* masm) {
249 // ----------- S t a t e -------------
250 // -- r2 : name
251 // -- lr : return address
252 // -- r0 : receiver
253 // -- sp[0] : receiver
254 // -----------------------------------
255 Label miss;
256
257 StubCompiler::GenerateLoadFunctionPrototype(masm, r0, r1, r3, &miss);
258 __ bind(&miss);
259 StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
260 }
261
262
263 // Checks the receiver for special cases (value type, slow case bits).
264 // Falls through for regular JS object.
GenerateKeyedLoadReceiverCheck(MacroAssembler * masm,Register receiver,Register map,Register scratch,int interceptor_bit,Label * slow)265 static void GenerateKeyedLoadReceiverCheck(MacroAssembler* masm,
266 Register receiver,
267 Register map,
268 Register scratch,
269 int interceptor_bit,
270 Label* slow) {
271 // Check that the object isn't a smi.
272 __ JumpIfSmi(receiver, slow);
273 // Get the map of the receiver.
274 __ ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
275 // Check bit field.
276 __ ldrb(scratch, FieldMemOperand(map, Map::kBitFieldOffset));
277 __ tst(scratch,
278 Operand((1 << Map::kIsAccessCheckNeeded) | (1 << interceptor_bit)));
279 __ b(ne, slow);
280 // Check that the object is some kind of JS object EXCEPT JS Value type.
281 // In the case that the object is a value-wrapper object,
282 // we enter the runtime system to make sure that indexing into string
283 // objects work as intended.
284 ASSERT(JS_OBJECT_TYPE > JS_VALUE_TYPE);
285 __ ldrb(scratch, FieldMemOperand(map, Map::kInstanceTypeOffset));
286 __ cmp(scratch, Operand(JS_OBJECT_TYPE));
287 __ b(lt, slow);
288 }
289
290
291 // Loads an indexed element from a fast case array.
292 // If not_fast_array is NULL, doesn't perform the elements map check.
GenerateFastArrayLoad(MacroAssembler * masm,Register receiver,Register key,Register elements,Register scratch1,Register scratch2,Register result,Label * not_fast_array,Label * out_of_range)293 static void GenerateFastArrayLoad(MacroAssembler* masm,
294 Register receiver,
295 Register key,
296 Register elements,
297 Register scratch1,
298 Register scratch2,
299 Register result,
300 Label* not_fast_array,
301 Label* out_of_range) {
302 // Register use:
303 //
304 // receiver - holds the receiver on entry.
305 // Unchanged unless 'result' is the same register.
306 //
307 // key - holds the smi key on entry.
308 // Unchanged unless 'result' is the same register.
309 //
310 // elements - holds the elements of the receiver on exit.
311 //
312 // result - holds the result on exit if the load succeeded.
313 // Allowed to be the the same as 'receiver' or 'key'.
314 // Unchanged on bailout so 'receiver' and 'key' can be safely
315 // used by further computation.
316 //
317 // Scratch registers:
318 //
319 // scratch1 - used to hold elements map and elements length.
320 // Holds the elements map if not_fast_array branch is taken.
321 //
322 // scratch2 - used to hold the loaded value.
323
324 __ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
325 if (not_fast_array != NULL) {
326 // Check that the object is in fast mode and writable.
327 __ ldr(scratch1, FieldMemOperand(elements, HeapObject::kMapOffset));
328 __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
329 __ cmp(scratch1, ip);
330 __ b(ne, not_fast_array);
331 } else {
332 __ AssertFastElements(elements);
333 }
334 // Check that the key (index) is within bounds.
335 __ ldr(scratch1, FieldMemOperand(elements, FixedArray::kLengthOffset));
336 __ cmp(key, Operand(scratch1));
337 __ b(hs, out_of_range);
338 // Fast case: Do the load.
339 __ add(scratch1, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
340 // The key is a smi.
341 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
342 __ ldr(scratch2,
343 MemOperand(scratch1, key, LSL, kPointerSizeLog2 - kSmiTagSize));
344 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
345 __ cmp(scratch2, ip);
346 // In case the loaded value is the_hole we have to consult GetProperty
347 // to ensure the prototype chain is searched.
348 __ b(eq, out_of_range);
349 __ mov(result, scratch2);
350 }
351
352
353 // Checks whether a key is an array index string or a symbol string.
354 // Falls through if a key is a symbol.
GenerateKeyStringCheck(MacroAssembler * masm,Register key,Register map,Register hash,Label * index_string,Label * not_symbol)355 static void GenerateKeyStringCheck(MacroAssembler* masm,
356 Register key,
357 Register map,
358 Register hash,
359 Label* index_string,
360 Label* not_symbol) {
361 // The key is not a smi.
362 // Is it a string?
363 __ CompareObjectType(key, map, hash, FIRST_NONSTRING_TYPE);
364 __ b(ge, not_symbol);
365
366 // Is the string an array index, with cached numeric value?
367 __ ldr(hash, FieldMemOperand(key, String::kHashFieldOffset));
368 __ tst(hash, Operand(String::kContainsCachedArrayIndexMask));
369 __ b(eq, index_string);
370
371 // Is the string a symbol?
372 // map: key map
373 __ ldrb(hash, FieldMemOperand(map, Map::kInstanceTypeOffset));
374 STATIC_ASSERT(kSymbolTag != 0);
375 __ tst(hash, Operand(kIsSymbolMask));
376 __ b(eq, not_symbol);
377 }
378
379
380 // Defined in ic.cc.
381 Object* CallIC_Miss(Arguments args);
382
383 // The generated code does not accept smi keys.
384 // The generated code falls through if both probes miss.
GenerateMonomorphicCacheProbe(MacroAssembler * masm,int argc,Code::Kind kind,Code::ExtraICState extra_state)385 void CallICBase::GenerateMonomorphicCacheProbe(MacroAssembler* masm,
386 int argc,
387 Code::Kind kind,
388 Code::ExtraICState extra_state) {
389 // ----------- S t a t e -------------
390 // -- r1 : receiver
391 // -- r2 : name
392 // -----------------------------------
393 Label number, non_number, non_string, boolean, probe, miss;
394
395 // Probe the stub cache.
396 Code::Flags flags = Code::ComputeFlags(kind,
397 MONOMORPHIC,
398 extra_state,
399 NORMAL,
400 argc);
401 Isolate::Current()->stub_cache()->GenerateProbe(
402 masm, flags, r1, r2, r3, r4, r5, r6);
403
404 // If the stub cache probing failed, the receiver might be a value.
405 // For value objects, we use the map of the prototype objects for
406 // the corresponding JSValue for the cache and that is what we need
407 // to probe.
408 //
409 // Check for number.
410 __ JumpIfSmi(r1, &number);
411 __ CompareObjectType(r1, r3, r3, HEAP_NUMBER_TYPE);
412 __ b(ne, &non_number);
413 __ bind(&number);
414 StubCompiler::GenerateLoadGlobalFunctionPrototype(
415 masm, Context::NUMBER_FUNCTION_INDEX, r1);
416 __ b(&probe);
417
418 // Check for string.
419 __ bind(&non_number);
420 __ cmp(r3, Operand(FIRST_NONSTRING_TYPE));
421 __ b(hs, &non_string);
422 StubCompiler::GenerateLoadGlobalFunctionPrototype(
423 masm, Context::STRING_FUNCTION_INDEX, r1);
424 __ b(&probe);
425
426 // Check for boolean.
427 __ bind(&non_string);
428 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
429 __ cmp(r1, ip);
430 __ b(eq, &boolean);
431 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
432 __ cmp(r1, ip);
433 __ b(ne, &miss);
434 __ bind(&boolean);
435 StubCompiler::GenerateLoadGlobalFunctionPrototype(
436 masm, Context::BOOLEAN_FUNCTION_INDEX, r1);
437
438 // Probe the stub cache for the value object.
439 __ bind(&probe);
440 Isolate::Current()->stub_cache()->GenerateProbe(
441 masm, flags, r1, r2, r3, r4, r5, r6);
442
443 __ bind(&miss);
444 }
445
446
GenerateFunctionTailCall(MacroAssembler * masm,int argc,Label * miss,Register scratch)447 static void GenerateFunctionTailCall(MacroAssembler* masm,
448 int argc,
449 Label* miss,
450 Register scratch) {
451 // r1: function
452
453 // Check that the value isn't a smi.
454 __ JumpIfSmi(r1, miss);
455
456 // Check that the value is a JSFunction.
457 __ CompareObjectType(r1, scratch, scratch, JS_FUNCTION_TYPE);
458 __ b(ne, miss);
459
460 // Invoke the function.
461 ParameterCount actual(argc);
462 __ InvokeFunction(r1, actual, JUMP_FUNCTION,
463 NullCallWrapper(), CALL_AS_METHOD);
464 }
465
466
GenerateNormal(MacroAssembler * masm,int argc)467 void CallICBase::GenerateNormal(MacroAssembler* masm, int argc) {
468 // ----------- S t a t e -------------
469 // -- r2 : name
470 // -- lr : return address
471 // -----------------------------------
472 Label miss;
473
474 // Get the receiver of the function from the stack into r1.
475 __ ldr(r1, MemOperand(sp, argc * kPointerSize));
476
477 GenerateStringDictionaryReceiverCheck(masm, r1, r0, r3, r4, &miss);
478
479 // r0: elements
480 // Search the dictionary - put result in register r1.
481 GenerateDictionaryLoad(masm, &miss, r0, r2, r1, r3, r4);
482
483 GenerateFunctionTailCall(masm, argc, &miss, r4);
484
485 __ bind(&miss);
486 }
487
488
GenerateMiss(MacroAssembler * masm,int argc,IC::UtilityId id,Code::ExtraICState extra_state)489 void CallICBase::GenerateMiss(MacroAssembler* masm,
490 int argc,
491 IC::UtilityId id,
492 Code::ExtraICState extra_state) {
493 // ----------- S t a t e -------------
494 // -- r2 : name
495 // -- lr : return address
496 // -----------------------------------
497 Isolate* isolate = masm->isolate();
498
499 if (id == IC::kCallIC_Miss) {
500 __ IncrementCounter(isolate->counters()->call_miss(), 1, r3, r4);
501 } else {
502 __ IncrementCounter(isolate->counters()->keyed_call_miss(), 1, r3, r4);
503 }
504
505 // Get the receiver of the function from the stack.
506 __ ldr(r3, MemOperand(sp, argc * kPointerSize));
507
508 {
509 FrameScope scope(masm, StackFrame::INTERNAL);
510
511 // Push the receiver and the name of the function.
512 __ Push(r3, r2);
513
514 // Call the entry.
515 __ mov(r0, Operand(2));
516 __ mov(r1, Operand(ExternalReference(IC_Utility(id), isolate)));
517
518 CEntryStub stub(1);
519 __ CallStub(&stub);
520
521 // Move result to r1 and leave the internal frame.
522 __ mov(r1, Operand(r0));
523 }
524
525 // Check if the receiver is a global object of some sort.
526 // This can happen only for regular CallIC but not KeyedCallIC.
527 if (id == IC::kCallIC_Miss) {
528 Label invoke, global;
529 __ ldr(r2, MemOperand(sp, argc * kPointerSize)); // receiver
530 __ JumpIfSmi(r2, &invoke);
531 __ CompareObjectType(r2, r3, r3, JS_GLOBAL_OBJECT_TYPE);
532 __ b(eq, &global);
533 __ cmp(r3, Operand(JS_BUILTINS_OBJECT_TYPE));
534 __ b(ne, &invoke);
535
536 // Patch the receiver on the stack.
537 __ bind(&global);
538 __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalReceiverOffset));
539 __ str(r2, MemOperand(sp, argc * kPointerSize));
540 __ bind(&invoke);
541 }
542
543 // Invoke the function.
544 CallKind call_kind = CallICBase::Contextual::decode(extra_state)
545 ? CALL_AS_FUNCTION
546 : CALL_AS_METHOD;
547 ParameterCount actual(argc);
548 __ InvokeFunction(r1,
549 actual,
550 JUMP_FUNCTION,
551 NullCallWrapper(),
552 call_kind);
553 }
554
555
GenerateMegamorphic(MacroAssembler * masm,int argc,Code::ExtraICState extra_ic_state)556 void CallIC::GenerateMegamorphic(MacroAssembler* masm,
557 int argc,
558 Code::ExtraICState extra_ic_state) {
559 // ----------- S t a t e -------------
560 // -- r2 : name
561 // -- lr : return address
562 // -----------------------------------
563
564 // Get the receiver of the function from the stack into r1.
565 __ ldr(r1, MemOperand(sp, argc * kPointerSize));
566 GenerateMonomorphicCacheProbe(masm, argc, Code::CALL_IC, extra_ic_state);
567 GenerateMiss(masm, argc, extra_ic_state);
568 }
569
570
GenerateMegamorphic(MacroAssembler * masm,int argc)571 void KeyedCallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) {
572 // ----------- S t a t e -------------
573 // -- r2 : name
574 // -- lr : return address
575 // -----------------------------------
576
577 // Get the receiver of the function from the stack into r1.
578 __ ldr(r1, MemOperand(sp, argc * kPointerSize));
579
580 Label do_call, slow_call, slow_load, slow_reload_receiver;
581 Label check_number_dictionary, check_string, lookup_monomorphic_cache;
582 Label index_smi, index_string;
583
584 // Check that the key is a smi.
585 __ JumpIfNotSmi(r2, &check_string);
586 __ bind(&index_smi);
587 // Now the key is known to be a smi. This place is also jumped to from below
588 // where a numeric string is converted to a smi.
589
590 GenerateKeyedLoadReceiverCheck(
591 masm, r1, r0, r3, Map::kHasIndexedInterceptor, &slow_call);
592
593 GenerateFastArrayLoad(
594 masm, r1, r2, r4, r3, r0, r1, &check_number_dictionary, &slow_load);
595 Counters* counters = masm->isolate()->counters();
596 __ IncrementCounter(counters->keyed_call_generic_smi_fast(), 1, r0, r3);
597
598 __ bind(&do_call);
599 // receiver in r1 is not used after this point.
600 // r2: key
601 // r1: function
602 GenerateFunctionTailCall(masm, argc, &slow_call, r0);
603
604 __ bind(&check_number_dictionary);
605 // r2: key
606 // r3: elements map
607 // r4: elements
608 // Check whether the elements is a number dictionary.
609 __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
610 __ cmp(r3, ip);
611 __ b(ne, &slow_load);
612 __ mov(r0, Operand(r2, ASR, kSmiTagSize));
613 // r0: untagged index
614 __ LoadFromNumberDictionary(&slow_load, r4, r2, r1, r0, r3, r5);
615 __ IncrementCounter(counters->keyed_call_generic_smi_dict(), 1, r0, r3);
616 __ jmp(&do_call);
617
618 __ bind(&slow_load);
619 // This branch is taken when calling KeyedCallIC_Miss is neither required
620 // nor beneficial.
621 __ IncrementCounter(counters->keyed_call_generic_slow_load(), 1, r0, r3);
622 {
623 FrameScope scope(masm, StackFrame::INTERNAL);
624 __ push(r2); // save the key
625 __ Push(r1, r2); // pass the receiver and the key
626 __ CallRuntime(Runtime::kKeyedGetProperty, 2);
627 __ pop(r2); // restore the key
628 }
629 __ mov(r1, r0);
630 __ jmp(&do_call);
631
632 __ bind(&check_string);
633 GenerateKeyStringCheck(masm, r2, r0, r3, &index_string, &slow_call);
634
635 // The key is known to be a symbol.
636 // If the receiver is a regular JS object with slow properties then do
637 // a quick inline probe of the receiver's dictionary.
638 // Otherwise do the monomorphic cache probe.
639 GenerateKeyedLoadReceiverCheck(
640 masm, r1, r0, r3, Map::kHasNamedInterceptor, &lookup_monomorphic_cache);
641
642 __ ldr(r0, FieldMemOperand(r1, JSObject::kPropertiesOffset));
643 __ ldr(r3, FieldMemOperand(r0, HeapObject::kMapOffset));
644 __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
645 __ cmp(r3, ip);
646 __ b(ne, &lookup_monomorphic_cache);
647
648 GenerateDictionaryLoad(masm, &slow_load, r0, r2, r1, r3, r4);
649 __ IncrementCounter(counters->keyed_call_generic_lookup_dict(), 1, r0, r3);
650 __ jmp(&do_call);
651
652 __ bind(&lookup_monomorphic_cache);
653 __ IncrementCounter(counters->keyed_call_generic_lookup_cache(), 1, r0, r3);
654 GenerateMonomorphicCacheProbe(masm,
655 argc,
656 Code::KEYED_CALL_IC,
657 Code::kNoExtraICState);
658 // Fall through on miss.
659
660 __ bind(&slow_call);
661 // This branch is taken if:
662 // - the receiver requires boxing or access check,
663 // - the key is neither smi nor symbol,
664 // - the value loaded is not a function,
665 // - there is hope that the runtime will create a monomorphic call stub
666 // that will get fetched next time.
667 __ IncrementCounter(counters->keyed_call_generic_slow(), 1, r0, r3);
668 GenerateMiss(masm, argc);
669
670 __ bind(&index_string);
671 __ IndexFromHash(r3, r2);
672 // Now jump to the place where smi keys are handled.
673 __ jmp(&index_smi);
674 }
675
676
GenerateNormal(MacroAssembler * masm,int argc)677 void KeyedCallIC::GenerateNormal(MacroAssembler* masm, int argc) {
678 // ----------- S t a t e -------------
679 // -- r2 : name
680 // -- lr : return address
681 // -----------------------------------
682
683 // Check if the name is a string.
684 Label miss;
685 __ JumpIfSmi(r2, &miss);
686 __ IsObjectJSStringType(r2, r0, &miss);
687
688 CallICBase::GenerateNormal(masm, argc);
689 __ bind(&miss);
690 GenerateMiss(masm, argc);
691 }
692
693
694 // Defined in ic.cc.
695 Object* LoadIC_Miss(Arguments args);
696
GenerateMegamorphic(MacroAssembler * masm)697 void LoadIC::GenerateMegamorphic(MacroAssembler* masm) {
698 // ----------- S t a t e -------------
699 // -- r2 : name
700 // -- lr : return address
701 // -- r0 : receiver
702 // -- sp[0] : receiver
703 // -----------------------------------
704
705 // Probe the stub cache.
706 Code::Flags flags =
707 Code::ComputeFlags(Code::LOAD_IC, MONOMORPHIC);
708 Isolate::Current()->stub_cache()->GenerateProbe(
709 masm, flags, r0, r2, r3, r4, r5, r6);
710
711 // Cache miss: Jump to runtime.
712 GenerateMiss(masm);
713 }
714
715
GenerateNormal(MacroAssembler * masm)716 void LoadIC::GenerateNormal(MacroAssembler* masm) {
717 // ----------- S t a t e -------------
718 // -- r2 : name
719 // -- lr : return address
720 // -- r0 : receiver
721 // -- sp[0] : receiver
722 // -----------------------------------
723 Label miss;
724
725 GenerateStringDictionaryReceiverCheck(masm, r0, r1, r3, r4, &miss);
726
727 // r1: elements
728 GenerateDictionaryLoad(masm, &miss, r1, r2, r0, r3, r4);
729 __ Ret();
730
731 // Cache miss: Jump to runtime.
732 __ bind(&miss);
733 GenerateMiss(masm);
734 }
735
736
GenerateMiss(MacroAssembler * masm)737 void LoadIC::GenerateMiss(MacroAssembler* masm) {
738 // ----------- S t a t e -------------
739 // -- r2 : name
740 // -- lr : return address
741 // -- r0 : receiver
742 // -- sp[0] : receiver
743 // -----------------------------------
744 Isolate* isolate = masm->isolate();
745
746 __ IncrementCounter(isolate->counters()->load_miss(), 1, r3, r4);
747
748 __ mov(r3, r0);
749 __ Push(r3, r2);
750
751 // Perform tail call to the entry.
752 ExternalReference ref =
753 ExternalReference(IC_Utility(kLoadIC_Miss), isolate);
754 __ TailCallExternalReference(ref, 2, 1);
755 }
756
757
GenerateMappedArgumentsLookup(MacroAssembler * masm,Register object,Register key,Register scratch1,Register scratch2,Register scratch3,Label * unmapped_case,Label * slow_case)758 static MemOperand GenerateMappedArgumentsLookup(MacroAssembler* masm,
759 Register object,
760 Register key,
761 Register scratch1,
762 Register scratch2,
763 Register scratch3,
764 Label* unmapped_case,
765 Label* slow_case) {
766 Heap* heap = masm->isolate()->heap();
767
768 // Check that the receiver is a JSObject. Because of the map check
769 // later, we do not need to check for interceptors or whether it
770 // requires access checks.
771 __ JumpIfSmi(object, slow_case);
772 // Check that the object is some kind of JSObject.
773 __ CompareObjectType(object, scratch1, scratch2, FIRST_JS_RECEIVER_TYPE);
774 __ b(lt, slow_case);
775
776 // Check that the key is a positive smi.
777 __ tst(key, Operand(0x80000001));
778 __ b(ne, slow_case);
779
780 // Load the elements into scratch1 and check its map.
781 Handle<Map> arguments_map(heap->non_strict_arguments_elements_map());
782 __ ldr(scratch1, FieldMemOperand(object, JSObject::kElementsOffset));
783 __ CheckMap(scratch1, scratch2, arguments_map, slow_case, DONT_DO_SMI_CHECK);
784
785 // Check if element is in the range of mapped arguments. If not, jump
786 // to the unmapped lookup with the parameter map in scratch1.
787 __ ldr(scratch2, FieldMemOperand(scratch1, FixedArray::kLengthOffset));
788 __ sub(scratch2, scratch2, Operand(Smi::FromInt(2)));
789 __ cmp(key, Operand(scratch2));
790 __ b(cs, unmapped_case);
791
792 // Load element index and check whether it is the hole.
793 const int kOffset =
794 FixedArray::kHeaderSize + 2 * kPointerSize - kHeapObjectTag;
795
796 __ mov(scratch3, Operand(kPointerSize >> 1));
797 __ mul(scratch3, key, scratch3);
798 __ add(scratch3, scratch3, Operand(kOffset));
799
800 __ ldr(scratch2, MemOperand(scratch1, scratch3));
801 __ LoadRoot(scratch3, Heap::kTheHoleValueRootIndex);
802 __ cmp(scratch2, scratch3);
803 __ b(eq, unmapped_case);
804
805 // Load value from context and return it. We can reuse scratch1 because
806 // we do not jump to the unmapped lookup (which requires the parameter
807 // map in scratch1).
808 __ ldr(scratch1, FieldMemOperand(scratch1, FixedArray::kHeaderSize));
809 __ mov(scratch3, Operand(kPointerSize >> 1));
810 __ mul(scratch3, scratch2, scratch3);
811 __ add(scratch3, scratch3, Operand(Context::kHeaderSize - kHeapObjectTag));
812 return MemOperand(scratch1, scratch3);
813 }
814
815
GenerateUnmappedArgumentsLookup(MacroAssembler * masm,Register key,Register parameter_map,Register scratch,Label * slow_case)816 static MemOperand GenerateUnmappedArgumentsLookup(MacroAssembler* masm,
817 Register key,
818 Register parameter_map,
819 Register scratch,
820 Label* slow_case) {
821 // Element is in arguments backing store, which is referenced by the
822 // second element of the parameter_map. The parameter_map register
823 // must be loaded with the parameter map of the arguments object and is
824 // overwritten.
825 const int kBackingStoreOffset = FixedArray::kHeaderSize + kPointerSize;
826 Register backing_store = parameter_map;
827 __ ldr(backing_store, FieldMemOperand(parameter_map, kBackingStoreOffset));
828 Handle<Map> fixed_array_map(masm->isolate()->heap()->fixed_array_map());
829 __ CheckMap(backing_store, scratch, fixed_array_map, slow_case,
830 DONT_DO_SMI_CHECK);
831 __ ldr(scratch, FieldMemOperand(backing_store, FixedArray::kLengthOffset));
832 __ cmp(key, Operand(scratch));
833 __ b(cs, slow_case);
834 __ mov(scratch, Operand(kPointerSize >> 1));
835 __ mul(scratch, key, scratch);
836 __ add(scratch,
837 scratch,
838 Operand(FixedArray::kHeaderSize - kHeapObjectTag));
839 return MemOperand(backing_store, scratch);
840 }
841
842
GenerateNonStrictArguments(MacroAssembler * masm)843 void KeyedLoadIC::GenerateNonStrictArguments(MacroAssembler* masm) {
844 // ---------- S t a t e --------------
845 // -- lr : return address
846 // -- r0 : key
847 // -- r1 : receiver
848 // -----------------------------------
849 Label slow, notin;
850 MemOperand mapped_location =
851 GenerateMappedArgumentsLookup(masm, r1, r0, r2, r3, r4, ¬in, &slow);
852 __ ldr(r0, mapped_location);
853 __ Ret();
854 __ bind(¬in);
855 // The unmapped lookup expects that the parameter map is in r2.
856 MemOperand unmapped_location =
857 GenerateUnmappedArgumentsLookup(masm, r0, r2, r3, &slow);
858 __ ldr(r2, unmapped_location);
859 __ LoadRoot(r3, Heap::kTheHoleValueRootIndex);
860 __ cmp(r2, r3);
861 __ b(eq, &slow);
862 __ mov(r0, r2);
863 __ Ret();
864 __ bind(&slow);
865 GenerateMiss(masm, false);
866 }
867
868
GenerateNonStrictArguments(MacroAssembler * masm)869 void KeyedStoreIC::GenerateNonStrictArguments(MacroAssembler* masm) {
870 // ---------- S t a t e --------------
871 // -- r0 : value
872 // -- r1 : key
873 // -- r2 : receiver
874 // -- lr : return address
875 // -----------------------------------
876 Label slow, notin;
877 MemOperand mapped_location =
878 GenerateMappedArgumentsLookup(masm, r2, r1, r3, r4, r5, ¬in, &slow);
879 __ str(r0, mapped_location);
880 __ add(r6, r3, r5);
881 __ mov(r9, r0);
882 __ RecordWrite(r3, r6, r9, kLRHasNotBeenSaved, kDontSaveFPRegs);
883 __ Ret();
884 __ bind(¬in);
885 // The unmapped lookup expects that the parameter map is in r3.
886 MemOperand unmapped_location =
887 GenerateUnmappedArgumentsLookup(masm, r1, r3, r4, &slow);
888 __ str(r0, unmapped_location);
889 __ add(r6, r3, r4);
890 __ mov(r9, r0);
891 __ RecordWrite(r3, r6, r9, kLRHasNotBeenSaved, kDontSaveFPRegs);
892 __ Ret();
893 __ bind(&slow);
894 GenerateMiss(masm, false);
895 }
896
897
GenerateNonStrictArguments(MacroAssembler * masm,int argc)898 void KeyedCallIC::GenerateNonStrictArguments(MacroAssembler* masm,
899 int argc) {
900 // ----------- S t a t e -------------
901 // -- r2 : name
902 // -- lr : return address
903 // -----------------------------------
904 Label slow, notin;
905 // Load receiver.
906 __ ldr(r1, MemOperand(sp, argc * kPointerSize));
907 MemOperand mapped_location =
908 GenerateMappedArgumentsLookup(masm, r1, r2, r3, r4, r5, ¬in, &slow);
909 __ ldr(r1, mapped_location);
910 GenerateFunctionTailCall(masm, argc, &slow, r3);
911 __ bind(¬in);
912 // The unmapped lookup expects that the parameter map is in r3.
913 MemOperand unmapped_location =
914 GenerateUnmappedArgumentsLookup(masm, r2, r3, r4, &slow);
915 __ ldr(r1, unmapped_location);
916 __ LoadRoot(r3, Heap::kTheHoleValueRootIndex);
917 __ cmp(r1, r3);
918 __ b(eq, &slow);
919 GenerateFunctionTailCall(masm, argc, &slow, r3);
920 __ bind(&slow);
921 GenerateMiss(masm, argc);
922 }
923
924
925 Object* KeyedLoadIC_Miss(Arguments args);
926
927
GenerateMiss(MacroAssembler * masm,bool force_generic)928 void KeyedLoadIC::GenerateMiss(MacroAssembler* masm, bool force_generic) {
929 // ---------- S t a t e --------------
930 // -- lr : return address
931 // -- r0 : key
932 // -- r1 : receiver
933 // -----------------------------------
934 Isolate* isolate = masm->isolate();
935
936 __ IncrementCounter(isolate->counters()->keyed_load_miss(), 1, r3, r4);
937
938 __ Push(r1, r0);
939
940 // Perform tail call to the entry.
941 ExternalReference ref = force_generic
942 ? ExternalReference(IC_Utility(kKeyedLoadIC_MissForceGeneric), isolate)
943 : ExternalReference(IC_Utility(kKeyedLoadIC_Miss), isolate);
944
945 __ TailCallExternalReference(ref, 2, 1);
946 }
947
948
GenerateRuntimeGetProperty(MacroAssembler * masm)949 void KeyedLoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) {
950 // ---------- S t a t e --------------
951 // -- lr : return address
952 // -- r0 : key
953 // -- r1 : receiver
954 // -----------------------------------
955
956 __ Push(r1, r0);
957
958 __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1);
959 }
960
961
GenerateGeneric(MacroAssembler * masm)962 void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
963 // ---------- S t a t e --------------
964 // -- lr : return address
965 // -- r0 : key
966 // -- r1 : receiver
967 // -----------------------------------
968 Label slow, check_string, index_smi, index_string, property_array_property;
969 Label probe_dictionary, check_number_dictionary;
970
971 Register key = r0;
972 Register receiver = r1;
973
974 Isolate* isolate = masm->isolate();
975
976 // Check that the key is a smi.
977 __ JumpIfNotSmi(key, &check_string);
978 __ bind(&index_smi);
979 // Now the key is known to be a smi. This place is also jumped to from below
980 // where a numeric string is converted to a smi.
981
982 GenerateKeyedLoadReceiverCheck(
983 masm, receiver, r2, r3, Map::kHasIndexedInterceptor, &slow);
984
985 // Check the receiver's map to see if it has fast elements.
986 __ CheckFastElements(r2, r3, &check_number_dictionary);
987
988 GenerateFastArrayLoad(
989 masm, receiver, key, r4, r3, r2, r0, NULL, &slow);
990 __ IncrementCounter(isolate->counters()->keyed_load_generic_smi(), 1, r2, r3);
991 __ Ret();
992
993 __ bind(&check_number_dictionary);
994 __ ldr(r4, FieldMemOperand(receiver, JSObject::kElementsOffset));
995 __ ldr(r3, FieldMemOperand(r4, JSObject::kMapOffset));
996
997 // Check whether the elements is a number dictionary.
998 // r0: key
999 // r3: elements map
1000 // r4: elements
1001 __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
1002 __ cmp(r3, ip);
1003 __ b(ne, &slow);
1004 __ mov(r2, Operand(r0, ASR, kSmiTagSize));
1005 __ LoadFromNumberDictionary(&slow, r4, r0, r0, r2, r3, r5);
1006 __ Ret();
1007
1008 // Slow case, key and receiver still in r0 and r1.
1009 __ bind(&slow);
1010 __ IncrementCounter(isolate->counters()->keyed_load_generic_slow(),
1011 1, r2, r3);
1012 GenerateRuntimeGetProperty(masm);
1013
1014 __ bind(&check_string);
1015 GenerateKeyStringCheck(masm, key, r2, r3, &index_string, &slow);
1016
1017 GenerateKeyedLoadReceiverCheck(
1018 masm, receiver, r2, r3, Map::kHasNamedInterceptor, &slow);
1019
1020 // If the receiver is a fast-case object, check the keyed lookup
1021 // cache. Otherwise probe the dictionary.
1022 __ ldr(r3, FieldMemOperand(r1, JSObject::kPropertiesOffset));
1023 __ ldr(r4, FieldMemOperand(r3, HeapObject::kMapOffset));
1024 __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
1025 __ cmp(r4, ip);
1026 __ b(eq, &probe_dictionary);
1027
1028 // Load the map of the receiver, compute the keyed lookup cache hash
1029 // based on 32 bits of the map pointer and the string hash.
1030 __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset));
1031 __ mov(r3, Operand(r2, ASR, KeyedLookupCache::kMapHashShift));
1032 __ ldr(r4, FieldMemOperand(r0, String::kHashFieldOffset));
1033 __ eor(r3, r3, Operand(r4, ASR, String::kHashShift));
1034 int mask = KeyedLookupCache::kCapacityMask & KeyedLookupCache::kHashMask;
1035 __ And(r3, r3, Operand(mask));
1036
1037 // Load the key (consisting of map and symbol) from the cache and
1038 // check for match.
1039 Label load_in_object_property;
1040 static const int kEntriesPerBucket = KeyedLookupCache::kEntriesPerBucket;
1041 Label hit_on_nth_entry[kEntriesPerBucket];
1042 ExternalReference cache_keys =
1043 ExternalReference::keyed_lookup_cache_keys(isolate);
1044
1045 __ mov(r4, Operand(cache_keys));
1046 __ add(r4, r4, Operand(r3, LSL, kPointerSizeLog2 + 1));
1047
1048 for (int i = 0; i < kEntriesPerBucket - 1; i++) {
1049 Label try_next_entry;
1050 // Load map and move r4 to next entry.
1051 __ ldr(r5, MemOperand(r4, kPointerSize * 2, PostIndex));
1052 __ cmp(r2, r5);
1053 __ b(ne, &try_next_entry);
1054 __ ldr(r5, MemOperand(r4, -kPointerSize)); // Load symbol
1055 __ cmp(r0, r5);
1056 __ b(eq, &hit_on_nth_entry[i]);
1057 __ bind(&try_next_entry);
1058 }
1059
1060 // Last entry: Load map and move r4 to symbol.
1061 __ ldr(r5, MemOperand(r4, kPointerSize, PostIndex));
1062 __ cmp(r2, r5);
1063 __ b(ne, &slow);
1064 __ ldr(r5, MemOperand(r4));
1065 __ cmp(r0, r5);
1066 __ b(ne, &slow);
1067
1068 // Get field offset.
1069 // r0 : key
1070 // r1 : receiver
1071 // r2 : receiver's map
1072 // r3 : lookup cache index
1073 ExternalReference cache_field_offsets =
1074 ExternalReference::keyed_lookup_cache_field_offsets(isolate);
1075
1076 // Hit on nth entry.
1077 for (int i = kEntriesPerBucket - 1; i >= 0; i--) {
1078 __ bind(&hit_on_nth_entry[i]);
1079 __ mov(r4, Operand(cache_field_offsets));
1080 if (i != 0) {
1081 __ add(r3, r3, Operand(i));
1082 }
1083 __ ldr(r5, MemOperand(r4, r3, LSL, kPointerSizeLog2));
1084 __ ldrb(r6, FieldMemOperand(r2, Map::kInObjectPropertiesOffset));
1085 __ sub(r5, r5, r6, SetCC);
1086 __ b(ge, &property_array_property);
1087 if (i != 0) {
1088 __ jmp(&load_in_object_property);
1089 }
1090 }
1091
1092 // Load in-object property.
1093 __ bind(&load_in_object_property);
1094 __ ldrb(r6, FieldMemOperand(r2, Map::kInstanceSizeOffset));
1095 __ add(r6, r6, r5); // Index from start of object.
1096 __ sub(r1, r1, Operand(kHeapObjectTag)); // Remove the heap tag.
1097 __ ldr(r0, MemOperand(r1, r6, LSL, kPointerSizeLog2));
1098 __ IncrementCounter(isolate->counters()->keyed_load_generic_lookup_cache(),
1099 1, r2, r3);
1100 __ Ret();
1101
1102 // Load property array property.
1103 __ bind(&property_array_property);
1104 __ ldr(r1, FieldMemOperand(r1, JSObject::kPropertiesOffset));
1105 __ add(r1, r1, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1106 __ ldr(r0, MemOperand(r1, r5, LSL, kPointerSizeLog2));
1107 __ IncrementCounter(isolate->counters()->keyed_load_generic_lookup_cache(),
1108 1, r2, r3);
1109 __ Ret();
1110
1111 // Do a quick inline probe of the receiver's dictionary, if it
1112 // exists.
1113 __ bind(&probe_dictionary);
1114 // r1: receiver
1115 // r0: key
1116 // r3: elements
1117 __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset));
1118 __ ldrb(r2, FieldMemOperand(r2, Map::kInstanceTypeOffset));
1119 GenerateGlobalInstanceTypeCheck(masm, r2, &slow);
1120 // Load the property to r0.
1121 GenerateDictionaryLoad(masm, &slow, r3, r0, r0, r2, r4);
1122 __ IncrementCounter(isolate->counters()->keyed_load_generic_symbol(),
1123 1, r2, r3);
1124 __ Ret();
1125
1126 __ bind(&index_string);
1127 __ IndexFromHash(r3, key);
1128 // Now jump to the place where smi keys are handled.
1129 __ jmp(&index_smi);
1130 }
1131
1132
GenerateString(MacroAssembler * masm)1133 void KeyedLoadIC::GenerateString(MacroAssembler* masm) {
1134 // ---------- S t a t e --------------
1135 // -- lr : return address
1136 // -- r0 : key (index)
1137 // -- r1 : receiver
1138 // -----------------------------------
1139 Label miss;
1140
1141 Register receiver = r1;
1142 Register index = r0;
1143 Register scratch = r3;
1144 Register result = r0;
1145
1146 StringCharAtGenerator char_at_generator(receiver,
1147 index,
1148 scratch,
1149 result,
1150 &miss, // When not a string.
1151 &miss, // When not a number.
1152 &miss, // When index out of range.
1153 STRING_INDEX_IS_ARRAY_INDEX);
1154 char_at_generator.GenerateFast(masm);
1155 __ Ret();
1156
1157 StubRuntimeCallHelper call_helper;
1158 char_at_generator.GenerateSlow(masm, call_helper);
1159
1160 __ bind(&miss);
1161 GenerateMiss(masm, false);
1162 }
1163
1164
GenerateIndexedInterceptor(MacroAssembler * masm)1165 void KeyedLoadIC::GenerateIndexedInterceptor(MacroAssembler* masm) {
1166 // ---------- S t a t e --------------
1167 // -- lr : return address
1168 // -- r0 : key
1169 // -- r1 : receiver
1170 // -----------------------------------
1171 Label slow;
1172
1173 // Check that the receiver isn't a smi.
1174 __ JumpIfSmi(r1, &slow);
1175
1176 // Check that the key is an array index, that is Uint32.
1177 __ tst(r0, Operand(kSmiTagMask | kSmiSignMask));
1178 __ b(ne, &slow);
1179
1180 // Get the map of the receiver.
1181 __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset));
1182
1183 // Check that it has indexed interceptor and access checks
1184 // are not enabled for this object.
1185 __ ldrb(r3, FieldMemOperand(r2, Map::kBitFieldOffset));
1186 __ and_(r3, r3, Operand(kSlowCaseBitFieldMask));
1187 __ cmp(r3, Operand(1 << Map::kHasIndexedInterceptor));
1188 __ b(ne, &slow);
1189
1190 // Everything is fine, call runtime.
1191 __ Push(r1, r0); // Receiver, key.
1192
1193 // Perform tail call to the entry.
1194 __ TailCallExternalReference(
1195 ExternalReference(IC_Utility(kKeyedLoadPropertyWithInterceptor),
1196 masm->isolate()),
1197 2,
1198 1);
1199
1200 __ bind(&slow);
1201 GenerateMiss(masm, false);
1202 }
1203
1204
GenerateMiss(MacroAssembler * masm,bool force_generic)1205 void KeyedStoreIC::GenerateMiss(MacroAssembler* masm, bool force_generic) {
1206 // ---------- S t a t e --------------
1207 // -- r0 : value
1208 // -- r1 : key
1209 // -- r2 : receiver
1210 // -- lr : return address
1211 // -----------------------------------
1212
1213 // Push receiver, key and value for runtime call.
1214 __ Push(r2, r1, r0);
1215
1216 ExternalReference ref = force_generic
1217 ? ExternalReference(IC_Utility(kKeyedStoreIC_MissForceGeneric),
1218 masm->isolate())
1219 : ExternalReference(IC_Utility(kKeyedStoreIC_Miss), masm->isolate());
1220 __ TailCallExternalReference(ref, 3, 1);
1221 }
1222
1223
GenerateSlow(MacroAssembler * masm)1224 void KeyedStoreIC::GenerateSlow(MacroAssembler* masm) {
1225 // ---------- S t a t e --------------
1226 // -- r0 : value
1227 // -- r1 : key
1228 // -- r2 : receiver
1229 // -- lr : return address
1230 // -----------------------------------
1231
1232 // Push receiver, key and value for runtime call.
1233 __ Push(r2, r1, r0);
1234
1235 // The slow case calls into the runtime to complete the store without causing
1236 // an IC miss that would otherwise cause a transition to the generic stub.
1237 ExternalReference ref =
1238 ExternalReference(IC_Utility(kKeyedStoreIC_Slow), masm->isolate());
1239 __ TailCallExternalReference(ref, 3, 1);
1240 }
1241
1242
GenerateTransitionElementsSmiToDouble(MacroAssembler * masm)1243 void KeyedStoreIC::GenerateTransitionElementsSmiToDouble(MacroAssembler* masm) {
1244 // ---------- S t a t e --------------
1245 // -- r2 : receiver
1246 // -- r3 : target map
1247 // -- lr : return address
1248 // -----------------------------------
1249 // Must return the modified receiver in r0.
1250 if (!FLAG_trace_elements_transitions) {
1251 Label fail;
1252 ElementsTransitionGenerator::GenerateSmiOnlyToDouble(masm, &fail);
1253 __ mov(r0, r2);
1254 __ Ret();
1255 __ bind(&fail);
1256 }
1257
1258 __ push(r2);
1259 __ TailCallRuntime(Runtime::kTransitionElementsSmiToDouble, 1, 1);
1260 }
1261
1262
GenerateTransitionElementsDoubleToObject(MacroAssembler * masm)1263 void KeyedStoreIC::GenerateTransitionElementsDoubleToObject(
1264 MacroAssembler* masm) {
1265 // ---------- S t a t e --------------
1266 // -- r2 : receiver
1267 // -- r3 : target map
1268 // -- lr : return address
1269 // -----------------------------------
1270 // Must return the modified receiver in r0.
1271 if (!FLAG_trace_elements_transitions) {
1272 Label fail;
1273 ElementsTransitionGenerator::GenerateDoubleToObject(masm, &fail);
1274 __ mov(r0, r2);
1275 __ Ret();
1276 __ bind(&fail);
1277 }
1278
1279 __ push(r2);
1280 __ TailCallRuntime(Runtime::kTransitionElementsDoubleToObject, 1, 1);
1281 }
1282
1283
GenerateRuntimeSetProperty(MacroAssembler * masm,StrictModeFlag strict_mode)1284 void KeyedStoreIC::GenerateRuntimeSetProperty(MacroAssembler* masm,
1285 StrictModeFlag strict_mode) {
1286 // ---------- S t a t e --------------
1287 // -- r0 : value
1288 // -- r1 : key
1289 // -- r2 : receiver
1290 // -- lr : return address
1291 // -----------------------------------
1292
1293 // Push receiver, key and value for runtime call.
1294 __ Push(r2, r1, r0);
1295
1296 __ mov(r1, Operand(Smi::FromInt(NONE))); // PropertyAttributes
1297 __ mov(r0, Operand(Smi::FromInt(strict_mode))); // Strict mode.
1298 __ Push(r1, r0);
1299
1300 __ TailCallRuntime(Runtime::kSetProperty, 5, 1);
1301 }
1302
1303
GenerateGeneric(MacroAssembler * masm,StrictModeFlag strict_mode)1304 void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
1305 StrictModeFlag strict_mode) {
1306 // ---------- S t a t e --------------
1307 // -- r0 : value
1308 // -- r1 : key
1309 // -- r2 : receiver
1310 // -- lr : return address
1311 // -----------------------------------
1312 Label slow, array, extra, check_if_double_array;
1313 Label fast_object_with_map_check, fast_object_without_map_check;
1314 Label fast_double_with_map_check, fast_double_without_map_check;
1315 Label transition_smi_elements, finish_object_store, non_double_value;
1316 Label transition_double_elements;
1317
1318 // Register usage.
1319 Register value = r0;
1320 Register key = r1;
1321 Register receiver = r2;
1322 Register receiver_map = r3;
1323 Register elements_map = r6;
1324 Register elements = r7; // Elements array of the receiver.
1325 // r4 and r5 are used as general scratch registers.
1326
1327 // Check that the key is a smi.
1328 __ JumpIfNotSmi(key, &slow);
1329 // Check that the object isn't a smi.
1330 __ JumpIfSmi(receiver, &slow);
1331 // Get the map of the object.
1332 __ ldr(receiver_map, FieldMemOperand(receiver, HeapObject::kMapOffset));
1333 // Check that the receiver does not require access checks. We need
1334 // to do this because this generic stub does not perform map checks.
1335 __ ldrb(ip, FieldMemOperand(receiver_map, Map::kBitFieldOffset));
1336 __ tst(ip, Operand(1 << Map::kIsAccessCheckNeeded));
1337 __ b(ne, &slow);
1338 // Check if the object is a JS array or not.
1339 __ ldrb(r4, FieldMemOperand(receiver_map, Map::kInstanceTypeOffset));
1340 __ cmp(r4, Operand(JS_ARRAY_TYPE));
1341 __ b(eq, &array);
1342 // Check that the object is some kind of JSObject.
1343 __ cmp(r4, Operand(FIRST_JS_OBJECT_TYPE));
1344 __ b(lt, &slow);
1345
1346 // Object case: Check key against length in the elements array.
1347 __ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
1348 // Check array bounds. Both the key and the length of FixedArray are smis.
1349 __ ldr(ip, FieldMemOperand(elements, FixedArray::kLengthOffset));
1350 __ cmp(key, Operand(ip));
1351 __ b(lo, &fast_object_with_map_check);
1352
1353 // Slow case, handle jump to runtime.
1354 __ bind(&slow);
1355 // Entry registers are intact.
1356 // r0: value.
1357 // r1: key.
1358 // r2: receiver.
1359 GenerateRuntimeSetProperty(masm, strict_mode);
1360
1361 // Extra capacity case: Check if there is extra capacity to
1362 // perform the store and update the length. Used for adding one
1363 // element to the array by writing to array[array.length].
1364 __ bind(&extra);
1365 // Condition code from comparing key and array length is still available.
1366 __ b(ne, &slow); // Only support writing to writing to array[array.length].
1367 // Check for room in the elements backing store.
1368 // Both the key and the length of FixedArray are smis.
1369 __ ldr(ip, FieldMemOperand(elements, FixedArray::kLengthOffset));
1370 __ cmp(key, Operand(ip));
1371 __ b(hs, &slow);
1372 __ ldr(elements_map, FieldMemOperand(elements, HeapObject::kMapOffset));
1373 __ cmp(elements_map,
1374 Operand(masm->isolate()->factory()->fixed_array_map()));
1375 __ b(ne, &check_if_double_array);
1376 // Calculate key + 1 as smi.
1377 STATIC_ASSERT(kSmiTag == 0);
1378 __ add(r4, key, Operand(Smi::FromInt(1)));
1379 __ str(r4, FieldMemOperand(receiver, JSArray::kLengthOffset));
1380 __ b(&fast_object_without_map_check);
1381
1382 __ bind(&check_if_double_array);
1383 __ cmp(elements_map,
1384 Operand(masm->isolate()->factory()->fixed_double_array_map()));
1385 __ b(ne, &slow);
1386 // Add 1 to key, and go to common element store code for doubles.
1387 STATIC_ASSERT(kSmiTag == 0);
1388 __ add(r4, key, Operand(Smi::FromInt(1)));
1389 __ str(r4, FieldMemOperand(receiver, JSArray::kLengthOffset));
1390 __ jmp(&fast_double_without_map_check);
1391
1392 // Array case: Get the length and the elements array from the JS
1393 // array. Check that the array is in fast mode (and writable); if it
1394 // is the length is always a smi.
1395 __ bind(&array);
1396 __ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
1397
1398 // Check the key against the length in the array.
1399 __ ldr(ip, FieldMemOperand(receiver, JSArray::kLengthOffset));
1400 __ cmp(key, Operand(ip));
1401 __ b(hs, &extra);
1402 // Fall through to fast case.
1403
1404 __ bind(&fast_object_with_map_check);
1405 Register scratch_value = r4;
1406 Register address = r5;
1407 __ ldr(elements_map, FieldMemOperand(elements, HeapObject::kMapOffset));
1408 __ cmp(elements_map,
1409 Operand(masm->isolate()->factory()->fixed_array_map()));
1410 __ b(ne, &fast_double_with_map_check);
1411 __ bind(&fast_object_without_map_check);
1412 // Smi stores don't require further checks.
1413 Label non_smi_value;
1414 __ JumpIfNotSmi(value, &non_smi_value);
1415 // It's irrelevant whether array is smi-only or not when writing a smi.
1416 __ add(address, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1417 __ add(address, address, Operand(key, LSL, kPointerSizeLog2 - kSmiTagSize));
1418 __ str(value, MemOperand(address));
1419 __ Ret();
1420
1421 __ bind(&non_smi_value);
1422 // Escape to elements kind transition case.
1423 __ CheckFastObjectElements(receiver_map, scratch_value,
1424 &transition_smi_elements);
1425 // Fast elements array, store the value to the elements backing store.
1426 __ bind(&finish_object_store);
1427 __ add(address, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1428 __ add(address, address, Operand(key, LSL, kPointerSizeLog2 - kSmiTagSize));
1429 __ str(value, MemOperand(address));
1430 // Update write barrier for the elements array address.
1431 __ mov(scratch_value, value); // Preserve the value which is returned.
1432 __ RecordWrite(elements,
1433 address,
1434 scratch_value,
1435 kLRHasNotBeenSaved,
1436 kDontSaveFPRegs,
1437 EMIT_REMEMBERED_SET,
1438 OMIT_SMI_CHECK);
1439 __ Ret();
1440
1441 __ bind(&fast_double_with_map_check);
1442 // Check for fast double array case. If this fails, call through to the
1443 // runtime.
1444 __ cmp(elements_map,
1445 Operand(masm->isolate()->factory()->fixed_double_array_map()));
1446 __ b(ne, &slow);
1447 __ bind(&fast_double_without_map_check);
1448 __ StoreNumberToDoubleElements(value,
1449 key,
1450 receiver,
1451 elements,
1452 r3,
1453 r4,
1454 r5,
1455 r6,
1456 &transition_double_elements);
1457 __ Ret();
1458
1459 __ bind(&transition_smi_elements);
1460 // Transition the array appropriately depending on the value type.
1461 __ ldr(r4, FieldMemOperand(value, HeapObject::kMapOffset));
1462 __ CompareRoot(r4, Heap::kHeapNumberMapRootIndex);
1463 __ b(ne, &non_double_value);
1464
1465 // Value is a double. Transition FAST_SMI_ONLY_ELEMENTS ->
1466 // FAST_DOUBLE_ELEMENTS and complete the store.
1467 __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
1468 FAST_DOUBLE_ELEMENTS,
1469 receiver_map,
1470 r4,
1471 &slow);
1472 ASSERT(receiver_map.is(r3)); // Transition code expects map in r3
1473 ElementsTransitionGenerator::GenerateSmiOnlyToDouble(masm, &slow);
1474 __ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
1475 __ jmp(&fast_double_without_map_check);
1476
1477 __ bind(&non_double_value);
1478 // Value is not a double, FAST_SMI_ONLY_ELEMENTS -> FAST_ELEMENTS
1479 __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
1480 FAST_ELEMENTS,
1481 receiver_map,
1482 r4,
1483 &slow);
1484 ASSERT(receiver_map.is(r3)); // Transition code expects map in r3
1485 ElementsTransitionGenerator::GenerateSmiOnlyToObject(masm);
1486 __ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
1487 __ jmp(&finish_object_store);
1488
1489 __ bind(&transition_double_elements);
1490 // Elements are FAST_DOUBLE_ELEMENTS, but value is an Object that's not a
1491 // HeapNumber. Make sure that the receiver is a Array with FAST_ELEMENTS and
1492 // transition array from FAST_DOUBLE_ELEMENTS to FAST_ELEMENTS
1493 __ LoadTransitionedArrayMapConditional(FAST_DOUBLE_ELEMENTS,
1494 FAST_ELEMENTS,
1495 receiver_map,
1496 r4,
1497 &slow);
1498 ASSERT(receiver_map.is(r3)); // Transition code expects map in r3
1499 ElementsTransitionGenerator::GenerateDoubleToObject(masm, &slow);
1500 __ ldr(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
1501 __ jmp(&finish_object_store);
1502 }
1503
1504
GenerateMegamorphic(MacroAssembler * masm,StrictModeFlag strict_mode)1505 void StoreIC::GenerateMegamorphic(MacroAssembler* masm,
1506 StrictModeFlag strict_mode) {
1507 // ----------- S t a t e -------------
1508 // -- r0 : value
1509 // -- r1 : receiver
1510 // -- r2 : name
1511 // -- lr : return address
1512 // -----------------------------------
1513
1514 // Get the receiver from the stack and probe the stub cache.
1515 Code::Flags flags =
1516 Code::ComputeFlags(Code::STORE_IC, MONOMORPHIC, strict_mode);
1517
1518 Isolate::Current()->stub_cache()->GenerateProbe(
1519 masm, flags, r1, r2, r3, r4, r5, r6);
1520
1521 // Cache miss: Jump to runtime.
1522 GenerateMiss(masm);
1523 }
1524
1525
GenerateMiss(MacroAssembler * masm)1526 void StoreIC::GenerateMiss(MacroAssembler* masm) {
1527 // ----------- S t a t e -------------
1528 // -- r0 : value
1529 // -- r1 : receiver
1530 // -- r2 : name
1531 // -- lr : return address
1532 // -----------------------------------
1533
1534 __ Push(r1, r2, r0);
1535
1536 // Perform tail call to the entry.
1537 ExternalReference ref =
1538 ExternalReference(IC_Utility(kStoreIC_Miss), masm->isolate());
1539 __ TailCallExternalReference(ref, 3, 1);
1540 }
1541
1542
GenerateArrayLength(MacroAssembler * masm)1543 void StoreIC::GenerateArrayLength(MacroAssembler* masm) {
1544 // ----------- S t a t e -------------
1545 // -- r0 : value
1546 // -- r1 : receiver
1547 // -- r2 : name
1548 // -- lr : return address
1549 // -----------------------------------
1550 //
1551 // This accepts as a receiver anything JSArray::SetElementsLength accepts
1552 // (currently anything except for external arrays which means anything with
1553 // elements of FixedArray type). Value must be a number, but only smis are
1554 // accepted as the most common case.
1555
1556 Label miss;
1557
1558 Register receiver = r1;
1559 Register value = r0;
1560 Register scratch = r3;
1561
1562 // Check that the receiver isn't a smi.
1563 __ JumpIfSmi(receiver, &miss);
1564
1565 // Check that the object is a JS array.
1566 __ CompareObjectType(receiver, scratch, scratch, JS_ARRAY_TYPE);
1567 __ b(ne, &miss);
1568
1569 // Check that elements are FixedArray.
1570 // We rely on StoreIC_ArrayLength below to deal with all types of
1571 // fast elements (including COW).
1572 __ ldr(scratch, FieldMemOperand(receiver, JSArray::kElementsOffset));
1573 __ CompareObjectType(scratch, scratch, scratch, FIXED_ARRAY_TYPE);
1574 __ b(ne, &miss);
1575
1576 // Check that the array has fast properties, otherwise the length
1577 // property might have been redefined.
1578 __ ldr(scratch, FieldMemOperand(receiver, JSArray::kPropertiesOffset));
1579 __ ldr(scratch, FieldMemOperand(scratch, FixedArray::kMapOffset));
1580 __ CompareRoot(scratch, Heap::kHashTableMapRootIndex);
1581 __ b(eq, &miss);
1582
1583 // Check that value is a smi.
1584 __ JumpIfNotSmi(value, &miss);
1585
1586 // Prepare tail call to StoreIC_ArrayLength.
1587 __ Push(receiver, value);
1588
1589 ExternalReference ref =
1590 ExternalReference(IC_Utility(kStoreIC_ArrayLength), masm->isolate());
1591 __ TailCallExternalReference(ref, 2, 1);
1592
1593 __ bind(&miss);
1594
1595 GenerateMiss(masm);
1596 }
1597
1598
GenerateNormal(MacroAssembler * masm)1599 void StoreIC::GenerateNormal(MacroAssembler* masm) {
1600 // ----------- S t a t e -------------
1601 // -- r0 : value
1602 // -- r1 : receiver
1603 // -- r2 : name
1604 // -- lr : return address
1605 // -----------------------------------
1606 Label miss;
1607
1608 GenerateStringDictionaryReceiverCheck(masm, r1, r3, r4, r5, &miss);
1609
1610 GenerateDictionaryStore(masm, &miss, r3, r2, r0, r4, r5);
1611 Counters* counters = masm->isolate()->counters();
1612 __ IncrementCounter(counters->store_normal_hit(),
1613 1, r4, r5);
1614 __ Ret();
1615
1616 __ bind(&miss);
1617 __ IncrementCounter(counters->store_normal_miss(), 1, r4, r5);
1618 GenerateMiss(masm);
1619 }
1620
1621
GenerateGlobalProxy(MacroAssembler * masm,StrictModeFlag strict_mode)1622 void StoreIC::GenerateGlobalProxy(MacroAssembler* masm,
1623 StrictModeFlag strict_mode) {
1624 // ----------- S t a t e -------------
1625 // -- r0 : value
1626 // -- r1 : receiver
1627 // -- r2 : name
1628 // -- lr : return address
1629 // -----------------------------------
1630
1631 __ Push(r1, r2, r0);
1632
1633 __ mov(r1, Operand(Smi::FromInt(NONE))); // PropertyAttributes
1634 __ mov(r0, Operand(Smi::FromInt(strict_mode)));
1635 __ Push(r1, r0);
1636
1637 // Do tail-call to runtime routine.
1638 __ TailCallRuntime(Runtime::kSetProperty, 5, 1);
1639 }
1640
1641
1642 #undef __
1643
1644
ComputeCondition(Token::Value op)1645 Condition CompareIC::ComputeCondition(Token::Value op) {
1646 switch (op) {
1647 case Token::EQ_STRICT:
1648 case Token::EQ:
1649 return eq;
1650 case Token::LT:
1651 return lt;
1652 case Token::GT:
1653 return gt;
1654 case Token::LTE:
1655 return le;
1656 case Token::GTE:
1657 return ge;
1658 default:
1659 UNREACHABLE();
1660 return kNoCondition;
1661 }
1662 }
1663
1664
UpdateCaches(Handle<Object> x,Handle<Object> y)1665 void CompareIC::UpdateCaches(Handle<Object> x, Handle<Object> y) {
1666 HandleScope scope;
1667 Handle<Code> rewritten;
1668 State previous_state = GetState();
1669 State state = TargetState(previous_state, false, x, y);
1670 if (state == GENERIC) {
1671 CompareStub stub(GetCondition(), strict(), NO_COMPARE_FLAGS, r1, r0);
1672 rewritten = stub.GetCode();
1673 } else {
1674 ICCompareStub stub(op_, state);
1675 if (state == KNOWN_OBJECTS) {
1676 stub.set_known_map(Handle<Map>(Handle<JSObject>::cast(x)->map()));
1677 }
1678 rewritten = stub.GetCode();
1679 }
1680 set_target(*rewritten);
1681
1682 #ifdef DEBUG
1683 if (FLAG_trace_ic) {
1684 PrintF("[CompareIC (%s->%s)#%s]\n",
1685 GetStateName(previous_state),
1686 GetStateName(state),
1687 Token::Name(op_));
1688 }
1689 #endif
1690
1691 // Activate inlined smi code.
1692 if (previous_state == UNINITIALIZED) {
1693 PatchInlinedSmiCode(address());
1694 }
1695 }
1696
1697
PatchInlinedSmiCode(Address address)1698 void PatchInlinedSmiCode(Address address) {
1699 Address cmp_instruction_address =
1700 address + Assembler::kCallTargetAddressOffset;
1701
1702 // If the instruction following the call is not a cmp rx, #yyy, nothing
1703 // was inlined.
1704 Instr instr = Assembler::instr_at(cmp_instruction_address);
1705 if (!Assembler::IsCmpImmediate(instr)) {
1706 return;
1707 }
1708
1709 // The delta to the start of the map check instruction and the
1710 // condition code uses at the patched jump.
1711 int delta = Assembler::GetCmpImmediateRawImmediate(instr);
1712 delta +=
1713 Assembler::GetCmpImmediateRegister(instr).code() * kOff12Mask;
1714 // If the delta is 0 the instruction is cmp r0, #0 which also signals that
1715 // nothing was inlined.
1716 if (delta == 0) {
1717 return;
1718 }
1719
1720 #ifdef DEBUG
1721 if (FLAG_trace_ic) {
1722 PrintF("[ patching ic at %p, cmp=%p, delta=%d\n",
1723 address, cmp_instruction_address, delta);
1724 }
1725 #endif
1726
1727 Address patch_address =
1728 cmp_instruction_address - delta * Instruction::kInstrSize;
1729 Instr instr_at_patch = Assembler::instr_at(patch_address);
1730 Instr branch_instr =
1731 Assembler::instr_at(patch_address + Instruction::kInstrSize);
1732 ASSERT(Assembler::IsCmpRegister(instr_at_patch));
1733 ASSERT_EQ(Assembler::GetRn(instr_at_patch).code(),
1734 Assembler::GetRm(instr_at_patch).code());
1735 ASSERT(Assembler::IsBranch(branch_instr));
1736 if (Assembler::GetCondition(branch_instr) == eq) {
1737 // This is patching a "jump if not smi" site to be active.
1738 // Changing
1739 // cmp rx, rx
1740 // b eq, <target>
1741 // to
1742 // tst rx, #kSmiTagMask
1743 // b ne, <target>
1744 CodePatcher patcher(patch_address, 2);
1745 Register reg = Assembler::GetRn(instr_at_patch);
1746 patcher.masm()->tst(reg, Operand(kSmiTagMask));
1747 patcher.EmitCondition(ne);
1748 } else {
1749 ASSERT(Assembler::GetCondition(branch_instr) == ne);
1750 // This is patching a "jump if smi" site to be active.
1751 // Changing
1752 // cmp rx, rx
1753 // b ne, <target>
1754 // to
1755 // tst rx, #kSmiTagMask
1756 // b eq, <target>
1757 CodePatcher patcher(patch_address, 2);
1758 Register reg = Assembler::GetRn(instr_at_patch);
1759 patcher.masm()->tst(reg, Operand(kSmiTagMask));
1760 patcher.EmitCondition(eq);
1761 }
1762 }
1763
1764
1765 } } // namespace v8::internal
1766
1767 #endif // V8_TARGET_ARCH_ARM
1768