1 // Copyright 2006-2008 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28 #include "v8.h"
29
30 #include "codegen-inl.h"
31 #include "ic-inl.h"
32 #include "runtime.h"
33 #include "stub-cache.h"
34
35 namespace v8 {
36 namespace internal {
37
38
39 // ----------------------------------------------------------------------------
40 // Static IC stub generators.
41 //
42
43 #define __ ACCESS_MASM(masm)
44
45
46 // Helper function used from LoadIC/CallIC GenerateNormal.
GenerateDictionaryLoad(MacroAssembler * masm,Label * miss,Register t0,Register t1)47 static void GenerateDictionaryLoad(MacroAssembler* masm,
48 Label* miss,
49 Register t0,
50 Register t1) {
51 // Register use:
52 //
53 // t0 - used to hold the property dictionary.
54 //
55 // t1 - initially the receiver
56 // - used for the index into the property dictionary
57 // - holds the result on exit.
58 //
59 // r3 - used as temporary and to hold the capacity of the property
60 // dictionary.
61 //
62 // r2 - holds the name of the property and is unchanges.
63
64 Label done;
65
66 // Check for the absence of an interceptor.
67 // Load the map into t0.
68 __ ldr(t0, FieldMemOperand(t1, JSObject::kMapOffset));
69 // Test the has_named_interceptor bit in the map.
70 __ ldr(r3, FieldMemOperand(t0, Map::kInstanceAttributesOffset));
71 __ tst(r3, Operand(1 << (Map::kHasNamedInterceptor + (3 * 8))));
72 // Jump to miss if the interceptor bit is set.
73 __ b(ne, miss);
74
75 // Bail out if we have a JS global proxy object.
76 __ ldrb(r3, FieldMemOperand(t0, Map::kInstanceTypeOffset));
77 __ cmp(r3, Operand(JS_GLOBAL_PROXY_TYPE));
78 __ b(eq, miss);
79
80 // Possible work-around for http://crbug.com/16276.
81 // See also: http://codereview.chromium.org/155418.
82 __ cmp(r3, Operand(JS_GLOBAL_OBJECT_TYPE));
83 __ b(eq, miss);
84 __ cmp(r3, Operand(JS_BUILTINS_OBJECT_TYPE));
85 __ b(eq, miss);
86
87 // Check that the properties array is a dictionary.
88 __ ldr(t0, FieldMemOperand(t1, JSObject::kPropertiesOffset));
89 __ ldr(r3, FieldMemOperand(t0, HeapObject::kMapOffset));
90 __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
91 __ cmp(r3, ip);
92 __ b(ne, miss);
93
94 // Compute the capacity mask.
95 const int kCapacityOffset = StringDictionary::kHeaderSize +
96 StringDictionary::kCapacityIndex * kPointerSize;
97 __ ldr(r3, FieldMemOperand(t0, kCapacityOffset));
98 __ mov(r3, Operand(r3, ASR, kSmiTagSize)); // convert smi to int
99 __ sub(r3, r3, Operand(1));
100
101 const int kElementsStartOffset = StringDictionary::kHeaderSize +
102 StringDictionary::kElementsStartIndex * kPointerSize;
103
104 // Generate an unrolled loop that performs a few probes before
105 // giving up. Measurements done on Gmail indicate that 2 probes
106 // cover ~93% of loads from dictionaries.
107 static const int kProbes = 4;
108 for (int i = 0; i < kProbes; i++) {
109 // Compute the masked index: (hash + i + i * i) & mask.
110 __ ldr(t1, FieldMemOperand(r2, String::kLengthOffset));
111 __ mov(t1, Operand(t1, LSR, String::kHashShift));
112 if (i > 0) {
113 __ add(t1, t1, Operand(StringDictionary::GetProbeOffset(i)));
114 }
115 __ and_(t1, t1, Operand(r3));
116
117 // Scale the index by multiplying by the element size.
118 ASSERT(StringDictionary::kEntrySize == 3);
119 __ add(t1, t1, Operand(t1, LSL, 1)); // t1 = t1 * 3
120
121 // Check if the key is identical to the name.
122 __ add(t1, t0, Operand(t1, LSL, 2));
123 __ ldr(ip, FieldMemOperand(t1, kElementsStartOffset));
124 __ cmp(r2, Operand(ip));
125 if (i != kProbes - 1) {
126 __ b(eq, &done);
127 } else {
128 __ b(ne, miss);
129 }
130 }
131
132 // Check that the value is a normal property.
133 __ bind(&done); // t1 == t0 + 4*index
134 __ ldr(r3, FieldMemOperand(t1, kElementsStartOffset + 2 * kPointerSize));
135 __ tst(r3, Operand(PropertyDetails::TypeField::mask() << kSmiTagSize));
136 __ b(ne, miss);
137
138 // Get the value at the masked, scaled index and return.
139 __ ldr(t1, FieldMemOperand(t1, kElementsStartOffset + 1 * kPointerSize));
140 }
141
142
143 // Helper function used to check that a value is either not an object
144 // or is loaded if it is an object.
GenerateCheckNonObjectOrLoaded(MacroAssembler * masm,Label * miss,Register value,Register scratch)145 static void GenerateCheckNonObjectOrLoaded(MacroAssembler* masm,
146 Label* miss,
147 Register value,
148 Register scratch) {
149 Label done;
150 // Check if the value is a Smi.
151 __ tst(value, Operand(kSmiTagMask));
152 __ b(eq, &done);
153 // Check if the object has been loaded.
154 __ ldr(scratch, FieldMemOperand(value, JSObject::kMapOffset));
155 __ ldrb(scratch, FieldMemOperand(scratch, Map::kBitField2Offset));
156 __ tst(scratch, Operand(1 << Map::kNeedsLoading));
157 __ b(ne, miss);
158 __ bind(&done);
159 }
160
161
GenerateArrayLength(MacroAssembler * masm)162 void LoadIC::GenerateArrayLength(MacroAssembler* masm) {
163 // ----------- S t a t e -------------
164 // -- r2 : name
165 // -- lr : return address
166 // -- [sp] : receiver
167 // -----------------------------------
168
169 Label miss;
170
171 __ ldr(r0, MemOperand(sp, 0));
172
173 StubCompiler::GenerateLoadArrayLength(masm, r0, r3, &miss);
174 __ bind(&miss);
175 StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
176 }
177
178
GenerateStringLength(MacroAssembler * masm)179 void LoadIC::GenerateStringLength(MacroAssembler* masm) {
180 // ----------- S t a t e -------------
181 // -- r2 : name
182 // -- lr : return address
183 // -- [sp] : receiver
184 // -----------------------------------
185 Label miss;
186
187 __ ldr(r0, MemOperand(sp, 0));
188
189 StubCompiler::GenerateLoadStringLength2(masm, r0, r1, r3, &miss);
190 // Cache miss: Jump to runtime.
191 __ bind(&miss);
192 StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
193 }
194
195
GenerateFunctionPrototype(MacroAssembler * masm)196 void LoadIC::GenerateFunctionPrototype(MacroAssembler* masm) {
197 // ----------- S t a t e -------------
198 // -- r2 : name
199 // -- lr : return address
200 // -- [sp] : receiver
201 // -----------------------------------
202
203 Label miss;
204
205 // Load receiver.
206 __ ldr(r0, MemOperand(sp, 0));
207
208 StubCompiler::GenerateLoadFunctionPrototype(masm, r0, r1, r3, &miss);
209 __ bind(&miss);
210 StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
211 }
212
213
214 // Defined in ic.cc.
215 Object* CallIC_Miss(Arguments args);
216
GenerateMegamorphic(MacroAssembler * masm,int argc)217 void CallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) {
218 // ----------- S t a t e -------------
219 // -- lr: return address
220 // -----------------------------------
221 Label number, non_number, non_string, boolean, probe, miss;
222
223 // Get the receiver of the function from the stack into r1.
224 __ ldr(r1, MemOperand(sp, argc * kPointerSize));
225 // Get the name of the function from the stack; 1 ~ receiver.
226 __ ldr(r2, MemOperand(sp, (argc + 1) * kPointerSize));
227
228 // Probe the stub cache.
229 Code::Flags flags =
230 Code::ComputeFlags(Code::CALL_IC, NOT_IN_LOOP, MONOMORPHIC, NORMAL, argc);
231 StubCache::GenerateProbe(masm, flags, r1, r2, r3, no_reg);
232
233 // If the stub cache probing failed, the receiver might be a value.
234 // For value objects, we use the map of the prototype objects for
235 // the corresponding JSValue for the cache and that is what we need
236 // to probe.
237 //
238 // Check for number.
239 __ tst(r1, Operand(kSmiTagMask));
240 __ b(eq, &number);
241 __ CompareObjectType(r1, r3, r3, HEAP_NUMBER_TYPE);
242 __ b(ne, &non_number);
243 __ bind(&number);
244 StubCompiler::GenerateLoadGlobalFunctionPrototype(
245 masm, Context::NUMBER_FUNCTION_INDEX, r1);
246 __ b(&probe);
247
248 // Check for string.
249 __ bind(&non_number);
250 __ cmp(r3, Operand(FIRST_NONSTRING_TYPE));
251 __ b(hs, &non_string);
252 StubCompiler::GenerateLoadGlobalFunctionPrototype(
253 masm, Context::STRING_FUNCTION_INDEX, r1);
254 __ b(&probe);
255
256 // Check for boolean.
257 __ bind(&non_string);
258 __ LoadRoot(ip, Heap::kTrueValueRootIndex);
259 __ cmp(r1, ip);
260 __ b(eq, &boolean);
261 __ LoadRoot(ip, Heap::kFalseValueRootIndex);
262 __ cmp(r1, ip);
263 __ b(ne, &miss);
264 __ bind(&boolean);
265 StubCompiler::GenerateLoadGlobalFunctionPrototype(
266 masm, Context::BOOLEAN_FUNCTION_INDEX, r1);
267
268 // Probe the stub cache for the value object.
269 __ bind(&probe);
270 StubCache::GenerateProbe(masm, flags, r1, r2, r3, no_reg);
271
272 // Cache miss: Jump to runtime.
273 __ bind(&miss);
274 Generate(masm, argc, ExternalReference(IC_Utility(kCallIC_Miss)));
275 }
276
277
GenerateNormalHelper(MacroAssembler * masm,int argc,bool is_global_object,Label * miss)278 static void GenerateNormalHelper(MacroAssembler* masm,
279 int argc,
280 bool is_global_object,
281 Label* miss) {
282 // Search dictionary - put result in register r1.
283 GenerateDictionaryLoad(masm, miss, r0, r1);
284
285 // Check that the value isn't a smi.
286 __ tst(r1, Operand(kSmiTagMask));
287 __ b(eq, miss);
288
289 // Check that the value is a JSFunction.
290 __ CompareObjectType(r1, r0, r0, JS_FUNCTION_TYPE);
291 __ b(ne, miss);
292
293 // Check that the function has been loaded.
294 __ ldr(r0, FieldMemOperand(r1, JSObject::kMapOffset));
295 __ ldrb(r0, FieldMemOperand(r0, Map::kBitField2Offset));
296 __ tst(r0, Operand(1 << Map::kNeedsLoading));
297 __ b(ne, miss);
298
299 // Patch the receiver with the global proxy if necessary.
300 if (is_global_object) {
301 __ ldr(r2, MemOperand(sp, argc * kPointerSize));
302 __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalReceiverOffset));
303 __ str(r2, MemOperand(sp, argc * kPointerSize));
304 }
305
306 // Invoke the function.
307 ParameterCount actual(argc);
308 __ InvokeFunction(r1, actual, JUMP_FUNCTION);
309 }
310
311
GenerateNormal(MacroAssembler * masm,int argc)312 void CallIC::GenerateNormal(MacroAssembler* masm, int argc) {
313 // ----------- S t a t e -------------
314 // -- lr: return address
315 // -----------------------------------
316
317 Label miss, global_object, non_global_object;
318
319 // Get the receiver of the function from the stack into r1.
320 __ ldr(r1, MemOperand(sp, argc * kPointerSize));
321 // Get the name of the function from the stack; 1 ~ receiver.
322 __ ldr(r2, MemOperand(sp, (argc + 1) * kPointerSize));
323
324 // Check that the receiver isn't a smi.
325 __ tst(r1, Operand(kSmiTagMask));
326 __ b(eq, &miss);
327
328 // Check that the receiver is a valid JS object. Put the map in r3.
329 __ CompareObjectType(r1, r3, r0, FIRST_JS_OBJECT_TYPE);
330 __ b(lt, &miss);
331
332 // If this assert fails, we have to check upper bound too.
333 ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
334
335 // Check for access to global object.
336 __ cmp(r0, Operand(JS_GLOBAL_OBJECT_TYPE));
337 __ b(eq, &global_object);
338 __ cmp(r0, Operand(JS_BUILTINS_OBJECT_TYPE));
339 __ b(ne, &non_global_object);
340
341 // Accessing global object: Load and invoke.
342 __ bind(&global_object);
343 // Check that the global object does not require access checks.
344 __ ldrb(r3, FieldMemOperand(r3, Map::kBitFieldOffset));
345 __ tst(r3, Operand(1 << Map::kIsAccessCheckNeeded));
346 __ b(ne, &miss);
347 GenerateNormalHelper(masm, argc, true, &miss);
348
349 // Accessing non-global object: Check for access to global proxy.
350 Label global_proxy, invoke;
351 __ bind(&non_global_object);
352 __ cmp(r0, Operand(JS_GLOBAL_PROXY_TYPE));
353 __ b(eq, &global_proxy);
354 // Check that the non-global, non-global-proxy object does not
355 // require access checks.
356 __ ldrb(r3, FieldMemOperand(r3, Map::kBitFieldOffset));
357 __ tst(r3, Operand(1 << Map::kIsAccessCheckNeeded));
358 __ b(ne, &miss);
359 __ bind(&invoke);
360 GenerateNormalHelper(masm, argc, false, &miss);
361
362 // Global object access: Check access rights.
363 __ bind(&global_proxy);
364 __ CheckAccessGlobalProxy(r1, r0, &miss);
365 __ b(&invoke);
366
367 // Cache miss: Jump to runtime.
368 __ bind(&miss);
369 Generate(masm, argc, ExternalReference(IC_Utility(kCallIC_Miss)));
370 }
371
372
Generate(MacroAssembler * masm,int argc,const ExternalReference & f)373 void CallIC::Generate(MacroAssembler* masm,
374 int argc,
375 const ExternalReference& f) {
376 // ----------- S t a t e -------------
377 // -- lr: return address
378 // -----------------------------------
379
380 // Get the receiver of the function from the stack.
381 __ ldr(r2, MemOperand(sp, argc * kPointerSize));
382 // Get the name of the function to call from the stack.
383 __ ldr(r1, MemOperand(sp, (argc + 1) * kPointerSize));
384
385 __ EnterInternalFrame();
386
387 // Push the receiver and the name of the function.
388 __ stm(db_w, sp, r1.bit() | r2.bit());
389
390 // Call the entry.
391 __ mov(r0, Operand(2));
392 __ mov(r1, Operand(f));
393
394 CEntryStub stub;
395 __ CallStub(&stub);
396
397 // Move result to r1 and leave the internal frame.
398 __ mov(r1, Operand(r0));
399 __ LeaveInternalFrame();
400
401 // Check if the receiver is a global object of some sort.
402 Label invoke, global;
403 __ ldr(r2, MemOperand(sp, argc * kPointerSize)); // receiver
404 __ tst(r2, Operand(kSmiTagMask));
405 __ b(eq, &invoke);
406 __ CompareObjectType(r2, r3, r3, JS_GLOBAL_OBJECT_TYPE);
407 __ b(eq, &global);
408 __ cmp(r3, Operand(JS_BUILTINS_OBJECT_TYPE));
409 __ b(ne, &invoke);
410
411 // Patch the receiver on the stack.
412 __ bind(&global);
413 __ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalReceiverOffset));
414 __ str(r2, MemOperand(sp, argc * kPointerSize));
415
416 // Invoke the function.
417 ParameterCount actual(argc);
418 __ bind(&invoke);
419 __ InvokeFunction(r1, actual, JUMP_FUNCTION);
420 }
421
422
423 // Defined in ic.cc.
424 Object* LoadIC_Miss(Arguments args);
425
GenerateMegamorphic(MacroAssembler * masm)426 void LoadIC::GenerateMegamorphic(MacroAssembler* masm) {
427 // ----------- S t a t e -------------
428 // -- r2 : name
429 // -- lr : return address
430 // -- [sp] : receiver
431 // -----------------------------------
432
433 __ ldr(r0, MemOperand(sp, 0));
434 // Probe the stub cache.
435 Code::Flags flags = Code::ComputeFlags(Code::LOAD_IC,
436 NOT_IN_LOOP,
437 MONOMORPHIC);
438 StubCache::GenerateProbe(masm, flags, r0, r2, r3, no_reg);
439
440 // Cache miss: Jump to runtime.
441 Generate(masm, ExternalReference(IC_Utility(kLoadIC_Miss)));
442 }
443
444
GenerateNormal(MacroAssembler * masm)445 void LoadIC::GenerateNormal(MacroAssembler* masm) {
446 // ----------- S t a t e -------------
447 // -- r2 : name
448 // -- lr : return address
449 // -- [sp] : receiver
450 // -----------------------------------
451
452 Label miss, probe, global;
453
454 __ ldr(r0, MemOperand(sp, 0));
455 // Check that the receiver isn't a smi.
456 __ tst(r0, Operand(kSmiTagMask));
457 __ b(eq, &miss);
458
459 // Check that the receiver is a valid JS object. Put the map in r3.
460 __ CompareObjectType(r0, r3, r1, FIRST_JS_OBJECT_TYPE);
461 __ b(lt, &miss);
462 // If this assert fails, we have to check upper bound too.
463 ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
464
465 // Check for access to global object (unlikely).
466 __ cmp(r1, Operand(JS_GLOBAL_PROXY_TYPE));
467 __ b(eq, &global);
468
469 // Check for non-global object that requires access check.
470 __ ldrb(r3, FieldMemOperand(r3, Map::kBitFieldOffset));
471 __ tst(r3, Operand(1 << Map::kIsAccessCheckNeeded));
472 __ b(ne, &miss);
473
474 __ bind(&probe);
475 GenerateDictionaryLoad(masm, &miss, r1, r0);
476 GenerateCheckNonObjectOrLoaded(masm, &miss, r0, r1);
477 __ Ret();
478
479 // Global object access: Check access rights.
480 __ bind(&global);
481 __ CheckAccessGlobalProxy(r0, r1, &miss);
482 __ b(&probe);
483
484 // Cache miss: Restore receiver from stack and jump to runtime.
485 __ bind(&miss);
486 Generate(masm, ExternalReference(IC_Utility(kLoadIC_Miss)));
487 }
488
489
GenerateMiss(MacroAssembler * masm)490 void LoadIC::GenerateMiss(MacroAssembler* masm) {
491 Generate(masm, ExternalReference(IC_Utility(kLoadIC_Miss)));
492 }
493
494
Generate(MacroAssembler * masm,const ExternalReference & f)495 void LoadIC::Generate(MacroAssembler* masm, const ExternalReference& f) {
496 // ----------- S t a t e -------------
497 // -- r2 : name
498 // -- lr : return address
499 // -- [sp] : receiver
500 // -----------------------------------
501
502 __ ldr(r3, MemOperand(sp, 0));
503 __ stm(db_w, sp, r2.bit() | r3.bit());
504
505 // Perform tail call to the entry.
506 __ TailCallRuntime(f, 2);
507 }
508
509
510 // TODO(181): Implement map patching once loop nesting is tracked on the
511 // ARM platform so we can generate inlined fast-case code loads in
512 // loops.
ClearInlinedVersion(Address address)513 void LoadIC::ClearInlinedVersion(Address address) {}
PatchInlinedLoad(Address address,Object * map,int offset)514 bool LoadIC::PatchInlinedLoad(Address address, Object* map, int offset) {
515 return false;
516 }
517
ClearInlinedVersion(Address address)518 void KeyedLoadIC::ClearInlinedVersion(Address address) {}
PatchInlinedLoad(Address address,Object * map)519 bool KeyedLoadIC::PatchInlinedLoad(Address address, Object* map) {
520 return false;
521 }
522
ClearInlinedVersion(Address address)523 void KeyedStoreIC::ClearInlinedVersion(Address address) {}
RestoreInlinedVersion(Address address)524 void KeyedStoreIC::RestoreInlinedVersion(Address address) {}
PatchInlinedStore(Address address,Object * map)525 bool KeyedStoreIC::PatchInlinedStore(Address address, Object* map) {
526 return false;
527 }
528
529
530 Object* KeyedLoadIC_Miss(Arguments args);
531
532
GenerateMiss(MacroAssembler * masm)533 void KeyedLoadIC::GenerateMiss(MacroAssembler* masm) {
534 Generate(masm, ExternalReference(IC_Utility(kKeyedLoadIC_Miss)));
535 }
536
537
Generate(MacroAssembler * masm,const ExternalReference & f)538 void KeyedLoadIC::Generate(MacroAssembler* masm, const ExternalReference& f) {
539 // ---------- S t a t e --------------
540 // -- lr : return address
541 // -- sp[0] : key
542 // -- sp[4] : receiver
543 __ ldm(ia, sp, r2.bit() | r3.bit());
544 __ stm(db_w, sp, r2.bit() | r3.bit());
545
546 __ TailCallRuntime(f, 2);
547 }
548
549
GenerateGeneric(MacroAssembler * masm)550 void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
551 // ---------- S t a t e --------------
552 // -- lr : return address
553 // -- sp[0] : key
554 // -- sp[4] : receiver
555 Label slow, fast;
556
557 // Get the key and receiver object from the stack.
558 __ ldm(ia, sp, r0.bit() | r1.bit());
559 // Check that the key is a smi.
560 __ tst(r0, Operand(kSmiTagMask));
561 __ b(ne, &slow);
562 __ mov(r0, Operand(r0, ASR, kSmiTagSize));
563 // Check that the object isn't a smi.
564 __ tst(r1, Operand(kSmiTagMask));
565 __ b(eq, &slow);
566
567 // Get the map of the receiver.
568 __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset));
569 // Check that the receiver does not require access checks. We need
570 // to check this explicitly since this generic stub does not perform
571 // map checks.
572 __ ldrb(r3, FieldMemOperand(r2, Map::kBitFieldOffset));
573 __ tst(r3, Operand(1 << Map::kIsAccessCheckNeeded));
574 __ b(ne, &slow);
575 // Check that the object is some kind of JS object EXCEPT JS Value type.
576 // In the case that the object is a value-wrapper object,
577 // we enter the runtime system to make sure that indexing into string
578 // objects work as intended.
579 ASSERT(JS_OBJECT_TYPE > JS_VALUE_TYPE);
580 __ ldrb(r2, FieldMemOperand(r2, Map::kInstanceTypeOffset));
581 __ cmp(r2, Operand(JS_OBJECT_TYPE));
582 __ b(lt, &slow);
583
584 // Get the elements array of the object.
585 __ ldr(r1, FieldMemOperand(r1, JSObject::kElementsOffset));
586 // Check that the object is in fast mode (not dictionary).
587 __ ldr(r3, FieldMemOperand(r1, HeapObject::kMapOffset));
588 __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
589 __ cmp(r3, ip);
590 __ b(ne, &slow);
591 // Check that the key (index) is within bounds.
592 __ ldr(r3, FieldMemOperand(r1, Array::kLengthOffset));
593 __ cmp(r0, Operand(r3));
594 __ b(lo, &fast);
595
596 // Slow case: Push extra copies of the arguments (2).
597 __ bind(&slow);
598 __ IncrementCounter(&Counters::keyed_load_generic_slow, 1, r0, r1);
599 __ ldm(ia, sp, r0.bit() | r1.bit());
600 __ stm(db_w, sp, r0.bit() | r1.bit());
601 // Do tail-call to runtime routine.
602 __ TailCallRuntime(ExternalReference(Runtime::kGetProperty), 2);
603
604 // Fast case: Do the load.
605 __ bind(&fast);
606 __ add(r3, r1, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
607 __ ldr(r0, MemOperand(r3, r0, LSL, kPointerSizeLog2));
608 __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
609 __ cmp(r0, ip);
610 // In case the loaded value is the_hole we have to consult GetProperty
611 // to ensure the prototype chain is searched.
612 __ b(eq, &slow);
613
614 __ Ret();
615 }
616
617
Generate(MacroAssembler * masm,const ExternalReference & f)618 void KeyedStoreIC::Generate(MacroAssembler* masm,
619 const ExternalReference& f) {
620 // ---------- S t a t e --------------
621 // -- r0 : value
622 // -- lr : return address
623 // -- sp[0] : key
624 // -- sp[1] : receiver
625
626 __ ldm(ia, sp, r2.bit() | r3.bit());
627 __ stm(db_w, sp, r0.bit() | r2.bit() | r3.bit());
628
629 __ TailCallRuntime(f, 3);
630 }
631
632
GenerateGeneric(MacroAssembler * masm)633 void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm) {
634 // ---------- S t a t e --------------
635 // -- r0 : value
636 // -- lr : return address
637 // -- sp[0] : key
638 // -- sp[1] : receiver
639 Label slow, fast, array, extra, exit;
640 // Get the key and the object from the stack.
641 __ ldm(ia, sp, r1.bit() | r3.bit()); // r1 = key, r3 = receiver
642 // Check that the key is a smi.
643 __ tst(r1, Operand(kSmiTagMask));
644 __ b(ne, &slow);
645 // Check that the object isn't a smi.
646 __ tst(r3, Operand(kSmiTagMask));
647 __ b(eq, &slow);
648 // Get the map of the object.
649 __ ldr(r2, FieldMemOperand(r3, HeapObject::kMapOffset));
650 // Check that the receiver does not require access checks. We need
651 // to do this because this generic stub does not perform map checks.
652 __ ldrb(ip, FieldMemOperand(r2, Map::kBitFieldOffset));
653 __ tst(ip, Operand(1 << Map::kIsAccessCheckNeeded));
654 __ b(ne, &slow);
655 // Check if the object is a JS array or not.
656 __ ldrb(r2, FieldMemOperand(r2, Map::kInstanceTypeOffset));
657 __ cmp(r2, Operand(JS_ARRAY_TYPE));
658 // r1 == key.
659 __ b(eq, &array);
660 // Check that the object is some kind of JS object.
661 __ cmp(r2, Operand(FIRST_JS_OBJECT_TYPE));
662 __ b(lt, &slow);
663
664
665 // Object case: Check key against length in the elements array.
666 __ ldr(r3, FieldMemOperand(r3, JSObject::kElementsOffset));
667 // Check that the object is in fast mode (not dictionary).
668 __ ldr(r2, FieldMemOperand(r3, HeapObject::kMapOffset));
669 __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
670 __ cmp(r2, ip);
671 __ b(ne, &slow);
672 // Untag the key (for checking against untagged length in the fixed array).
673 __ mov(r1, Operand(r1, ASR, kSmiTagSize));
674 // Compute address to store into and check array bounds.
675 __ add(r2, r3, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
676 __ add(r2, r2, Operand(r1, LSL, kPointerSizeLog2));
677 __ ldr(ip, FieldMemOperand(r3, FixedArray::kLengthOffset));
678 __ cmp(r1, Operand(ip));
679 __ b(lo, &fast);
680
681
682 // Slow case: Push extra copies of the arguments (3).
683 __ bind(&slow);
684 __ ldm(ia, sp, r1.bit() | r3.bit()); // r0 == value, r1 == key, r3 == object
685 __ stm(db_w, sp, r0.bit() | r1.bit() | r3.bit());
686 // Do tail-call to runtime routine.
687 __ TailCallRuntime(ExternalReference(Runtime::kSetProperty), 3);
688
689 // Extra capacity case: Check if there is extra capacity to
690 // perform the store and update the length. Used for adding one
691 // element to the array by writing to array[array.length].
692 // r0 == value, r1 == key, r2 == elements, r3 == object
693 __ bind(&extra);
694 __ b(ne, &slow); // do not leave holes in the array
695 __ mov(r1, Operand(r1, ASR, kSmiTagSize)); // untag
696 __ ldr(ip, FieldMemOperand(r2, Array::kLengthOffset));
697 __ cmp(r1, Operand(ip));
698 __ b(hs, &slow);
699 __ mov(r1, Operand(r1, LSL, kSmiTagSize)); // restore tag
700 __ add(r1, r1, Operand(1 << kSmiTagSize)); // and increment
701 __ str(r1, FieldMemOperand(r3, JSArray::kLengthOffset));
702 __ mov(r3, Operand(r2));
703 // NOTE: Computing the address to store into must take the fact
704 // that the key has been incremented into account.
705 int displacement = FixedArray::kHeaderSize - kHeapObjectTag -
706 ((1 << kSmiTagSize) * 2);
707 __ add(r2, r2, Operand(displacement));
708 __ add(r2, r2, Operand(r1, LSL, kPointerSizeLog2 - kSmiTagSize));
709 __ b(&fast);
710
711
712 // Array case: Get the length and the elements array from the JS
713 // array. Check that the array is in fast mode; if it is the
714 // length is always a smi.
715 // r0 == value, r3 == object
716 __ bind(&array);
717 __ ldr(r2, FieldMemOperand(r3, JSObject::kElementsOffset));
718 __ ldr(r1, FieldMemOperand(r2, HeapObject::kMapOffset));
719 __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
720 __ cmp(r1, ip);
721 __ b(ne, &slow);
722
723 // Check the key against the length in the array, compute the
724 // address to store into and fall through to fast case.
725 __ ldr(r1, MemOperand(sp)); // restore key
726 // r0 == value, r1 == key, r2 == elements, r3 == object.
727 __ ldr(ip, FieldMemOperand(r3, JSArray::kLengthOffset));
728 __ cmp(r1, Operand(ip));
729 __ b(hs, &extra);
730 __ mov(r3, Operand(r2));
731 __ add(r2, r2, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
732 __ add(r2, r2, Operand(r1, LSL, kPointerSizeLog2 - kSmiTagSize));
733
734
735 // Fast case: Do the store.
736 // r0 == value, r2 == address to store into, r3 == elements
737 __ bind(&fast);
738 __ str(r0, MemOperand(r2));
739 // Skip write barrier if the written value is a smi.
740 __ tst(r0, Operand(kSmiTagMask));
741 __ b(eq, &exit);
742 // Update write barrier for the elements array address.
743 __ sub(r1, r2, Operand(r3));
744 __ RecordWrite(r3, r1, r2);
745
746 __ bind(&exit);
747 __ Ret();
748 }
749
750
GenerateExtendStorage(MacroAssembler * masm)751 void KeyedStoreIC::GenerateExtendStorage(MacroAssembler* masm) {
752 // ---------- S t a t e --------------
753 // -- r0 : value
754 // -- lr : return address
755 // -- sp[0] : key
756 // -- sp[1] : receiver
757 // ----------- S t a t e -------------
758
759 __ ldm(ia, sp, r2.bit() | r3.bit());
760 __ stm(db_w, sp, r0.bit() | r2.bit() | r3.bit());
761
762 // Perform tail call to the entry.
763 __ TailCallRuntime(
764 ExternalReference(IC_Utility(kSharedStoreIC_ExtendStorage)), 3);
765 }
766
767
GenerateMegamorphic(MacroAssembler * masm)768 void StoreIC::GenerateMegamorphic(MacroAssembler* masm) {
769 // ----------- S t a t e -------------
770 // -- r0 : value
771 // -- r2 : name
772 // -- lr : return address
773 // -- [sp] : receiver
774 // -----------------------------------
775
776 // Get the receiver from the stack and probe the stub cache.
777 __ ldr(r1, MemOperand(sp));
778 Code::Flags flags = Code::ComputeFlags(Code::STORE_IC,
779 NOT_IN_LOOP,
780 MONOMORPHIC);
781 StubCache::GenerateProbe(masm, flags, r1, r2, r3, no_reg);
782
783 // Cache miss: Jump to runtime.
784 Generate(masm, ExternalReference(IC_Utility(kStoreIC_Miss)));
785 }
786
787
GenerateExtendStorage(MacroAssembler * masm)788 void StoreIC::GenerateExtendStorage(MacroAssembler* masm) {
789 // ----------- S t a t e -------------
790 // -- r0 : value
791 // -- r2 : name
792 // -- lr : return address
793 // -- [sp] : receiver
794 // -----------------------------------
795
796 __ ldr(r3, MemOperand(sp)); // copy receiver
797 __ stm(db_w, sp, r0.bit() | r2.bit() | r3.bit());
798
799 // Perform tail call to the entry.
800 __ TailCallRuntime(
801 ExternalReference(IC_Utility(kSharedStoreIC_ExtendStorage)), 3);
802 }
803
804
Generate(MacroAssembler * masm,const ExternalReference & f)805 void StoreIC::Generate(MacroAssembler* masm, const ExternalReference& f) {
806 // ----------- S t a t e -------------
807 // -- r0 : value
808 // -- r2 : name
809 // -- lr : return address
810 // -- [sp] : receiver
811 // -----------------------------------
812
813 __ ldr(r3, MemOperand(sp)); // copy receiver
814 __ stm(db_w, sp, r0.bit() | r2.bit() | r3.bit());
815
816 // Perform tail call to the entry.
817 __ TailCallRuntime(f, 3);
818 }
819
820
821 #undef __
822
823
824 } } // namespace v8::internal
825