• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 //     * Redistributions of source code must retain the above copyright
7 //       notice, this list of conditions and the following disclaimer.
8 //     * Redistributions in binary form must reproduce the above
9 //       copyright notice, this list of conditions and the following
10 //       disclaimer in the documentation and/or other materials provided
11 //       with the distribution.
12 //     * Neither the name of Google Inc. nor the names of its
13 //       contributors may be used to endorse or promote products derived
14 //       from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 
29 
30 #include "v8.h"
31 
32 #if defined(V8_TARGET_ARCH_MIPS)
33 
34 #include "codegen.h"
35 #include "code-stubs.h"
36 #include "ic-inl.h"
37 #include "runtime.h"
38 #include "stub-cache.h"
39 
40 namespace v8 {
41 namespace internal {
42 
43 
44 // ----------------------------------------------------------------------------
45 // Static IC stub generators.
46 //
47 
48 #define __ ACCESS_MASM(masm)
49 
50 
GenerateGlobalInstanceTypeCheck(MacroAssembler * masm,Register type,Label * global_object)51 static void GenerateGlobalInstanceTypeCheck(MacroAssembler* masm,
52                                             Register type,
53                                             Label* global_object) {
54   // Register usage:
55   //   type: holds the receiver instance type on entry.
56   __ Branch(global_object, eq, type, Operand(JS_GLOBAL_OBJECT_TYPE));
57   __ Branch(global_object, eq, type, Operand(JS_BUILTINS_OBJECT_TYPE));
58   __ Branch(global_object, eq, type, Operand(JS_GLOBAL_PROXY_TYPE));
59 }
60 
61 
62 // Generated code falls through if the receiver is a regular non-global
63 // JS object with slow properties and no interceptors.
GenerateStringDictionaryReceiverCheck(MacroAssembler * masm,Register receiver,Register elements,Register scratch0,Register scratch1,Label * miss)64 static void GenerateStringDictionaryReceiverCheck(MacroAssembler* masm,
65                                                   Register receiver,
66                                                   Register elements,
67                                                   Register scratch0,
68                                                   Register scratch1,
69                                                   Label* miss) {
70   // Register usage:
71   //   receiver: holds the receiver on entry and is unchanged.
72   //   elements: holds the property dictionary on fall through.
73   // Scratch registers:
74   //   scratch0: used to holds the receiver map.
75   //   scratch1: used to holds the receiver instance type, receiver bit mask
76   //     and elements map.
77 
78   // Check that the receiver isn't a smi.
79   __ JumpIfSmi(receiver, miss);
80 
81   // Check that the receiver is a valid JS object.
82   __ GetObjectType(receiver, scratch0, scratch1);
83   __ Branch(miss, lt, scratch1, Operand(FIRST_SPEC_OBJECT_TYPE));
84 
85   // If this assert fails, we have to check upper bound too.
86   STATIC_ASSERT(LAST_TYPE == LAST_SPEC_OBJECT_TYPE);
87 
88   GenerateGlobalInstanceTypeCheck(masm, scratch1, miss);
89 
90   // Check that the global object does not require access checks.
91   __ lbu(scratch1, FieldMemOperand(scratch0, Map::kBitFieldOffset));
92   __ And(scratch1, scratch1, Operand((1 << Map::kIsAccessCheckNeeded) |
93                            (1 << Map::kHasNamedInterceptor)));
94   __ Branch(miss, ne, scratch1, Operand(zero_reg));
95 
96   __ lw(elements, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
97   __ lw(scratch1, FieldMemOperand(elements, HeapObject::kMapOffset));
98   __ LoadRoot(scratch0, Heap::kHashTableMapRootIndex);
99   __ Branch(miss, ne, scratch1, Operand(scratch0));
100 }
101 
102 
103 // Helper function used from LoadIC/CallIC GenerateNormal.
104 //
105 // elements: Property dictionary. It is not clobbered if a jump to the miss
106 //           label is done.
107 // name:     Property name. It is not clobbered if a jump to the miss label is
108 //           done
109 // result:   Register for the result. It is only updated if a jump to the miss
110 //           label is not done. Can be the same as elements or name clobbering
111 //           one of these in the case of not jumping to the miss label.
112 // The two scratch registers need to be different from elements, name and
113 // result.
114 // The generated code assumes that the receiver has slow properties,
115 // is not a global object and does not have interceptors.
116 // The address returned from GenerateStringDictionaryProbes() in scratch2
117 // is used.
GenerateDictionaryLoad(MacroAssembler * masm,Label * miss,Register elements,Register name,Register result,Register scratch1,Register scratch2)118 static void GenerateDictionaryLoad(MacroAssembler* masm,
119                                    Label* miss,
120                                    Register elements,
121                                    Register name,
122                                    Register result,
123                                    Register scratch1,
124                                    Register scratch2) {
125   // Main use of the scratch registers.
126   // scratch1: Used as temporary and to hold the capacity of the property
127   //           dictionary.
128   // scratch2: Used as temporary.
129   Label done;
130 
131   // Probe the dictionary.
132   StringDictionaryLookupStub::GeneratePositiveLookup(masm,
133                                                      miss,
134                                                      &done,
135                                                      elements,
136                                                      name,
137                                                      scratch1,
138                                                      scratch2);
139 
140   // If probing finds an entry check that the value is a normal
141   // property.
142   __ bind(&done);  // scratch2 == elements + 4 * index.
143   const int kElementsStartOffset = StringDictionary::kHeaderSize +
144       StringDictionary::kElementsStartIndex * kPointerSize;
145   const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize;
146   __ lw(scratch1, FieldMemOperand(scratch2, kDetailsOffset));
147   __ And(at,
148          scratch1,
149          Operand(PropertyDetails::TypeField::kMask << kSmiTagSize));
150   __ Branch(miss, ne, at, Operand(zero_reg));
151 
152   // Get the value at the masked, scaled index and return.
153   __ lw(result,
154         FieldMemOperand(scratch2, kElementsStartOffset + 1 * kPointerSize));
155 }
156 
157 
158 // Helper function used from StoreIC::GenerateNormal.
159 //
160 // elements: Property dictionary. It is not clobbered if a jump to the miss
161 //           label is done.
162 // name:     Property name. It is not clobbered if a jump to the miss label is
163 //           done
164 // value:    The value to store.
165 // The two scratch registers need to be different from elements, name and
166 // result.
167 // The generated code assumes that the receiver has slow properties,
168 // is not a global object and does not have interceptors.
169 // The address returned from GenerateStringDictionaryProbes() in scratch2
170 // is used.
GenerateDictionaryStore(MacroAssembler * masm,Label * miss,Register elements,Register name,Register value,Register scratch1,Register scratch2)171 static void GenerateDictionaryStore(MacroAssembler* masm,
172                                     Label* miss,
173                                     Register elements,
174                                     Register name,
175                                     Register value,
176                                     Register scratch1,
177                                     Register scratch2) {
178   // Main use of the scratch registers.
179   // scratch1: Used as temporary and to hold the capacity of the property
180   //           dictionary.
181   // scratch2: Used as temporary.
182   Label done;
183 
184   // Probe the dictionary.
185   StringDictionaryLookupStub::GeneratePositiveLookup(masm,
186                                                      miss,
187                                                      &done,
188                                                      elements,
189                                                      name,
190                                                      scratch1,
191                                                      scratch2);
192 
193   // If probing finds an entry in the dictionary check that the value
194   // is a normal property that is not read only.
195   __ bind(&done);  // scratch2 == elements + 4 * index.
196   const int kElementsStartOffset = StringDictionary::kHeaderSize +
197       StringDictionary::kElementsStartIndex * kPointerSize;
198   const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize;
199   const int kTypeAndReadOnlyMask =
200       (PropertyDetails::TypeField::kMask |
201        PropertyDetails::AttributesField::encode(READ_ONLY)) << kSmiTagSize;
202   __ lw(scratch1, FieldMemOperand(scratch2, kDetailsOffset));
203   __ And(at, scratch1, Operand(kTypeAndReadOnlyMask));
204   __ Branch(miss, ne, at, Operand(zero_reg));
205 
206   // Store the value at the masked, scaled index and return.
207   const int kValueOffset = kElementsStartOffset + kPointerSize;
208   __ Addu(scratch2, scratch2, Operand(kValueOffset - kHeapObjectTag));
209   __ sw(value, MemOperand(scratch2));
210 
211   // Update the write barrier. Make sure not to clobber the value.
212   __ mov(scratch1, value);
213   __ RecordWrite(
214       elements, scratch2, scratch1, kRAHasNotBeenSaved, kDontSaveFPRegs);
215 }
216 
217 
GenerateArrayLength(MacroAssembler * masm)218 void LoadIC::GenerateArrayLength(MacroAssembler* masm) {
219   // ----------- S t a t e -------------
220   //  -- a2    : name
221   //  -- ra    : return address
222   //  -- a0    : receiver
223   //  -- sp[0] : receiver
224   // -----------------------------------
225   Label miss;
226 
227   StubCompiler::GenerateLoadArrayLength(masm, a0, a3, &miss);
228   __ bind(&miss);
229   StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
230 }
231 
232 
GenerateStringLength(MacroAssembler * masm,bool support_wrappers)233 void LoadIC::GenerateStringLength(MacroAssembler* masm, bool support_wrappers) {
234   // ----------- S t a t e -------------
235   //  -- a2    : name
236   //  -- lr    : return address
237   //  -- a0    : receiver
238   //  -- sp[0] : receiver
239   // -----------------------------------
240   Label miss;
241 
242   StubCompiler::GenerateLoadStringLength(masm, a0, a1, a3, &miss,
243                                          support_wrappers);
244   // Cache miss: Jump to runtime.
245   __ bind(&miss);
246   StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
247 }
248 
249 
GenerateFunctionPrototype(MacroAssembler * masm)250 void LoadIC::GenerateFunctionPrototype(MacroAssembler* masm) {
251   // ----------- S t a t e -------------
252   //  -- a2    : name
253   //  -- lr    : return address
254   //  -- a0    : receiver
255   //  -- sp[0] : receiver
256   // -----------------------------------
257   Label miss;
258 
259   StubCompiler::GenerateLoadFunctionPrototype(masm, a0, a1, a3, &miss);
260   __ bind(&miss);
261   StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
262 }
263 
264 
265 // Checks the receiver for special cases (value type, slow case bits).
266 // Falls through for regular JS object.
GenerateKeyedLoadReceiverCheck(MacroAssembler * masm,Register receiver,Register map,Register scratch,int interceptor_bit,Label * slow)267 static void GenerateKeyedLoadReceiverCheck(MacroAssembler* masm,
268                                            Register receiver,
269                                            Register map,
270                                            Register scratch,
271                                            int interceptor_bit,
272                                            Label* slow) {
273   // Check that the object isn't a smi.
274   __ JumpIfSmi(receiver, slow);
275   // Get the map of the receiver.
276   __ lw(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
277   // Check bit field.
278   __ lbu(scratch, FieldMemOperand(map, Map::kBitFieldOffset));
279   __ And(at, scratch, Operand(KeyedLoadIC::kSlowCaseBitFieldMask));
280   __ Branch(slow, ne, at, Operand(zero_reg));
281   // Check that the object is some kind of JS object EXCEPT JS Value type.
282   // In the case that the object is a value-wrapper object,
283   // we enter the runtime system to make sure that indexing into string
284   // objects work as intended.
285   ASSERT(JS_OBJECT_TYPE > JS_VALUE_TYPE);
286   __ lbu(scratch, FieldMemOperand(map, Map::kInstanceTypeOffset));
287   __ Branch(slow, lt, scratch, Operand(JS_OBJECT_TYPE));
288 }
289 
290 
291 // Loads an indexed element from a fast case array.
292 // If not_fast_array is NULL, doesn't perform the elements map check.
GenerateFastArrayLoad(MacroAssembler * masm,Register receiver,Register key,Register elements,Register scratch1,Register scratch2,Register result,Label * not_fast_array,Label * out_of_range)293 static void GenerateFastArrayLoad(MacroAssembler* masm,
294                                   Register receiver,
295                                   Register key,
296                                   Register elements,
297                                   Register scratch1,
298                                   Register scratch2,
299                                   Register result,
300                                   Label* not_fast_array,
301                                   Label* out_of_range) {
302   // Register use:
303   //
304   // receiver - holds the receiver on entry.
305   //            Unchanged unless 'result' is the same register.
306   //
307   // key      - holds the smi key on entry.
308   //            Unchanged unless 'result' is the same register.
309   //
310   // elements - holds the elements of the receiver on exit.
311   //
312   // result   - holds the result on exit if the load succeeded.
313   //            Allowed to be the the same as 'receiver' or 'key'.
314   //            Unchanged on bailout so 'receiver' and 'key' can be safely
315   //            used by further computation.
316   //
317   // Scratch registers:
318   //
319   // scratch1 - used to hold elements map and elements length.
320   //            Holds the elements map if not_fast_array branch is taken.
321   //
322   // scratch2 - used to hold the loaded value.
323 
324   __ lw(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
325   if (not_fast_array != NULL) {
326     // Check that the object is in fast mode (not dictionary).
327     __ lw(scratch1, FieldMemOperand(elements, HeapObject::kMapOffset));
328     __ LoadRoot(at, Heap::kFixedArrayMapRootIndex);
329     __ Branch(not_fast_array, ne, scratch1, Operand(at));
330   } else {
331     __ AssertFastElements(elements);
332   }
333 
334   // Check that the key (index) is within bounds.
335   __ lw(scratch1, FieldMemOperand(elements, FixedArray::kLengthOffset));
336   __ Branch(out_of_range, hs, key, Operand(scratch1));
337 
338   // Fast case: Do the load.
339   __ Addu(scratch1, elements,
340           Operand(FixedArray::kHeaderSize - kHeapObjectTag));
341   // The key is a smi.
342   STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize < kPointerSizeLog2);
343   __ sll(at, key, kPointerSizeLog2 - kSmiTagSize);
344   __ addu(at, at, scratch1);
345   __ lw(scratch2, MemOperand(at));
346 
347   __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
348   // In case the loaded value is the_hole we have to consult GetProperty
349   // to ensure the prototype chain is searched.
350   __ Branch(out_of_range, eq, scratch2, Operand(at));
351   __ mov(result, scratch2);
352 }
353 
354 
355 // Checks whether a key is an array index string or a symbol string.
356 // Falls through if a key is a symbol.
GenerateKeyStringCheck(MacroAssembler * masm,Register key,Register map,Register hash,Label * index_string,Label * not_symbol)357 static void GenerateKeyStringCheck(MacroAssembler* masm,
358                                    Register key,
359                                    Register map,
360                                    Register hash,
361                                    Label* index_string,
362                                    Label* not_symbol) {
363   // The key is not a smi.
364   // Is it a string?
365   __ GetObjectType(key, map, hash);
366   __ Branch(not_symbol, ge, hash, Operand(FIRST_NONSTRING_TYPE));
367 
368   // Is the string an array index, with cached numeric value?
369   __ lw(hash, FieldMemOperand(key, String::kHashFieldOffset));
370   __ And(at, hash, Operand(String::kContainsCachedArrayIndexMask));
371   __ Branch(index_string, eq, at, Operand(zero_reg));
372 
373   // Is the string a symbol?
374   // map: key map
375   __ lbu(hash, FieldMemOperand(map, Map::kInstanceTypeOffset));
376   STATIC_ASSERT(kSymbolTag != 0);
377   __ And(at, hash, Operand(kIsSymbolMask));
378   __ Branch(not_symbol, eq, at, Operand(zero_reg));
379 }
380 
381 
382 // Defined in ic.cc.
383 Object* CallIC_Miss(Arguments args);
384 
385 // The generated code does not accept smi keys.
386 // The generated code falls through if both probes miss.
GenerateMonomorphicCacheProbe(MacroAssembler * masm,int argc,Code::Kind kind,Code::ExtraICState extra_state)387 void CallICBase::GenerateMonomorphicCacheProbe(MacroAssembler* masm,
388                                                int argc,
389                                                Code::Kind kind,
390                                                Code::ExtraICState extra_state) {
391   // ----------- S t a t e -------------
392   //  -- a1    : receiver
393   //  -- a2    : name
394   // -----------------------------------
395   Label number, non_number, non_string, boolean, probe, miss;
396 
397   // Probe the stub cache.
398   Code::Flags flags = Code::ComputeFlags(kind,
399                                          MONOMORPHIC,
400                                          extra_state,
401                                          NORMAL,
402                                          argc);
403   Isolate::Current()->stub_cache()->GenerateProbe(
404       masm, flags, a1, a2, a3, t0, t1, t2);
405 
406   // If the stub cache probing failed, the receiver might be a value.
407   // For value objects, we use the map of the prototype objects for
408   // the corresponding JSValue for the cache and that is what we need
409   // to probe.
410   //
411   // Check for number.
412   __ JumpIfSmi(a1, &number, t1);
413   __ GetObjectType(a1, a3, a3);
414   __ Branch(&non_number, ne, a3, Operand(HEAP_NUMBER_TYPE));
415   __ bind(&number);
416   StubCompiler::GenerateLoadGlobalFunctionPrototype(
417       masm, Context::NUMBER_FUNCTION_INDEX, a1);
418   __ Branch(&probe);
419 
420   // Check for string.
421   __ bind(&non_number);
422   __ Branch(&non_string, Ugreater_equal, a3, Operand(FIRST_NONSTRING_TYPE));
423   StubCompiler::GenerateLoadGlobalFunctionPrototype(
424       masm, Context::STRING_FUNCTION_INDEX, a1);
425   __ Branch(&probe);
426 
427   // Check for boolean.
428   __ bind(&non_string);
429   __ LoadRoot(t0, Heap::kTrueValueRootIndex);
430   __ Branch(&boolean, eq, a1, Operand(t0));
431   __ LoadRoot(t1, Heap::kFalseValueRootIndex);
432   __ Branch(&miss, ne, a1, Operand(t1));
433   __ bind(&boolean);
434   StubCompiler::GenerateLoadGlobalFunctionPrototype(
435       masm, Context::BOOLEAN_FUNCTION_INDEX, a1);
436 
437   // Probe the stub cache for the value object.
438   __ bind(&probe);
439   Isolate::Current()->stub_cache()->GenerateProbe(
440       masm, flags, a1, a2, a3, t0, t1, t2);
441 
442   __ bind(&miss);
443 }
444 
445 
GenerateFunctionTailCall(MacroAssembler * masm,int argc,Label * miss,Register scratch)446 static void GenerateFunctionTailCall(MacroAssembler* masm,
447                                      int argc,
448                                      Label* miss,
449                                      Register scratch) {
450   // a1: function
451 
452   // Check that the value isn't a smi.
453   __ JumpIfSmi(a1, miss);
454 
455   // Check that the value is a JSFunction.
456   __ GetObjectType(a1, scratch, scratch);
457   __ Branch(miss, ne, scratch, Operand(JS_FUNCTION_TYPE));
458 
459   // Invoke the function.
460   ParameterCount actual(argc);
461   __ InvokeFunction(a1, actual, JUMP_FUNCTION,
462                     NullCallWrapper(), CALL_AS_METHOD);
463 }
464 
465 
GenerateNormal(MacroAssembler * masm,int argc)466 void CallICBase::GenerateNormal(MacroAssembler* masm, int argc) {
467   // ----------- S t a t e -------------
468   //  -- a2    : name
469   //  -- ra    : return address
470   // -----------------------------------
471   Label miss;
472 
473   // Get the receiver of the function from the stack into a1.
474   __ lw(a1, MemOperand(sp, argc * kPointerSize));
475 
476   GenerateStringDictionaryReceiverCheck(masm, a1, a0, a3, t0, &miss);
477 
478   // a0: elements
479   // Search the dictionary - put result in register a1.
480   GenerateDictionaryLoad(masm, &miss, a0, a2, a1, a3, t0);
481 
482   GenerateFunctionTailCall(masm, argc, &miss, t0);
483 
484   // Cache miss: Jump to runtime.
485   __ bind(&miss);
486 }
487 
488 
GenerateMiss(MacroAssembler * masm,int argc,IC::UtilityId id,Code::ExtraICState extra_state)489 void CallICBase::GenerateMiss(MacroAssembler* masm,
490                               int argc,
491                               IC::UtilityId id,
492                               Code::ExtraICState extra_state) {
493   // ----------- S t a t e -------------
494   //  -- a2    : name
495   //  -- ra    : return address
496   // -----------------------------------
497   Isolate* isolate = masm->isolate();
498 
499   if (id == IC::kCallIC_Miss) {
500     __ IncrementCounter(isolate->counters()->call_miss(), 1, a3, t0);
501   } else {
502     __ IncrementCounter(isolate->counters()->keyed_call_miss(), 1, a3, t0);
503   }
504 
505   // Get the receiver of the function from the stack.
506   __ lw(a3, MemOperand(sp, argc*kPointerSize));
507 
508   {
509     FrameScope scope(masm, StackFrame::INTERNAL);
510 
511     // Push the receiver and the name of the function.
512     __ Push(a3, a2);
513 
514     // Call the entry.
515     __ PrepareCEntryArgs(2);
516     __ PrepareCEntryFunction(ExternalReference(IC_Utility(id), isolate));
517 
518     CEntryStub stub(1);
519     __ CallStub(&stub);
520 
521     // Move result to a1 and leave the internal frame.
522     __ mov(a1, v0);
523   }
524 
525   // Check if the receiver is a global object of some sort.
526   // This can happen only for regular CallIC but not KeyedCallIC.
527   if (id == IC::kCallIC_Miss) {
528     Label invoke, global;
529     __ lw(a2, MemOperand(sp, argc * kPointerSize));
530     __ JumpIfSmi(a2, &invoke);
531     __ GetObjectType(a2, a3, a3);
532     __ Branch(&global, eq, a3, Operand(JS_GLOBAL_OBJECT_TYPE));
533     __ Branch(&invoke, ne, a3, Operand(JS_BUILTINS_OBJECT_TYPE));
534 
535     // Patch the receiver on the stack.
536     __ bind(&global);
537     __ lw(a2, FieldMemOperand(a2, GlobalObject::kGlobalReceiverOffset));
538     __ sw(a2, MemOperand(sp, argc * kPointerSize));
539     __ bind(&invoke);
540   }
541   // Invoke the function.
542   CallKind call_kind = CallICBase::Contextual::decode(extra_state)
543       ? CALL_AS_FUNCTION
544       : CALL_AS_METHOD;
545   ParameterCount actual(argc);
546   __ InvokeFunction(a1,
547                     actual,
548                     JUMP_FUNCTION,
549                     NullCallWrapper(),
550                     call_kind);
551 }
552 
553 
GenerateMegamorphic(MacroAssembler * masm,int argc,Code::ExtraICState extra_ic_state)554 void CallIC::GenerateMegamorphic(MacroAssembler* masm,
555                                  int argc,
556                                  Code::ExtraICState extra_ic_state) {
557   // ----------- S t a t e -------------
558   //  -- a2    : name
559   //  -- ra    : return address
560   // -----------------------------------
561 
562   // Get the receiver of the function from the stack into a1.
563   __ lw(a1, MemOperand(sp, argc * kPointerSize));
564   GenerateMonomorphicCacheProbe(masm, argc, Code::CALL_IC, extra_ic_state);
565   GenerateMiss(masm, argc, extra_ic_state);
566 }
567 
568 
GenerateMegamorphic(MacroAssembler * masm,int argc)569 void KeyedCallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) {
570   // ----------- S t a t e -------------
571   //  -- a2    : name
572   //  -- ra    : return address
573   // -----------------------------------
574 
575   // Get the receiver of the function from the stack into a1.
576   __ lw(a1, MemOperand(sp, argc * kPointerSize));
577 
578   Label do_call, slow_call, slow_load, slow_reload_receiver;
579   Label check_number_dictionary, check_string, lookup_monomorphic_cache;
580   Label index_smi, index_string;
581 
582   // Check that the key is a smi.
583   __ JumpIfNotSmi(a2, &check_string);
584   __ bind(&index_smi);
585   // Now the key is known to be a smi. This place is also jumped to from below
586   // where a numeric string is converted to a smi.
587 
588   GenerateKeyedLoadReceiverCheck(
589       masm, a1, a0, a3, Map::kHasIndexedInterceptor, &slow_call);
590 
591   GenerateFastArrayLoad(
592       masm, a1, a2, t0, a3, a0, a1, &check_number_dictionary, &slow_load);
593   Counters* counters = masm->isolate()->counters();
594   __ IncrementCounter(counters->keyed_call_generic_smi_fast(), 1, a0, a3);
595 
596   __ bind(&do_call);
597   // receiver in a1 is not used after this point.
598   // a2: key
599   // a1: function
600 
601   GenerateFunctionTailCall(masm, argc, &slow_call, a0);
602 
603   __ bind(&check_number_dictionary);
604   // a2: key
605   // a3: elements map
606   // t0: elements pointer
607   // Check whether the elements is a number dictionary.
608   __ LoadRoot(at, Heap::kHashTableMapRootIndex);
609   __ Branch(&slow_load, ne, a3, Operand(at));
610   __ sra(a0, a2, kSmiTagSize);
611   // a0: untagged index
612   __ LoadFromNumberDictionary(&slow_load, t0, a2, a1, a0, a3, t1);
613   __ IncrementCounter(counters->keyed_call_generic_smi_dict(), 1, a0, a3);
614   __ jmp(&do_call);
615 
616   __ bind(&slow_load);
617   // This branch is taken when calling KeyedCallIC_Miss is neither required
618   // nor beneficial.
619   __ IncrementCounter(counters->keyed_call_generic_slow_load(), 1, a0, a3);
620   {
621     FrameScope scope(masm, StackFrame::INTERNAL);
622     __ push(a2);  // Save the key.
623     __ Push(a1, a2);  // Pass the receiver and the key.
624     __ CallRuntime(Runtime::kKeyedGetProperty, 2);
625     __ pop(a2);  // Restore the key.
626   }
627   __ mov(a1, v0);
628   __ jmp(&do_call);
629 
630   __ bind(&check_string);
631   GenerateKeyStringCheck(masm, a2, a0, a3, &index_string, &slow_call);
632 
633   // The key is known to be a symbol.
634   // If the receiver is a regular JS object with slow properties then do
635   // a quick inline probe of the receiver's dictionary.
636   // Otherwise do the monomorphic cache probe.
637   GenerateKeyedLoadReceiverCheck(
638       masm, a1, a0, a3, Map::kHasNamedInterceptor, &lookup_monomorphic_cache);
639 
640   __ lw(a0, FieldMemOperand(a1, JSObject::kPropertiesOffset));
641   __ lw(a3, FieldMemOperand(a0, HeapObject::kMapOffset));
642   __ LoadRoot(at, Heap::kHashTableMapRootIndex);
643   __ Branch(&lookup_monomorphic_cache, ne, a3, Operand(at));
644 
645   GenerateDictionaryLoad(masm, &slow_load, a0, a2, a1, a3, t0);
646   __ IncrementCounter(counters->keyed_call_generic_lookup_dict(), 1, a0, a3);
647   __ jmp(&do_call);
648 
649   __ bind(&lookup_monomorphic_cache);
650   __ IncrementCounter(counters->keyed_call_generic_lookup_cache(), 1, a0, a3);
651   GenerateMonomorphicCacheProbe(masm,
652                                 argc,
653                                 Code::KEYED_CALL_IC,
654                                 Code::kNoExtraICState);
655   // Fall through on miss.
656 
657   __ bind(&slow_call);
658   // This branch is taken if:
659   // - the receiver requires boxing or access check,
660   // - the key is neither smi nor symbol,
661   // - the value loaded is not a function,
662   // - there is hope that the runtime will create a monomorphic call stub,
663   //   that will get fetched next time.
664   __ IncrementCounter(counters->keyed_call_generic_slow(), 1, a0, a3);
665   GenerateMiss(masm, argc);
666 
667   __ bind(&index_string);
668   __ IndexFromHash(a3, a2);
669   // Now jump to the place where smi keys are handled.
670   __ jmp(&index_smi);
671 }
672 
673 
GenerateNormal(MacroAssembler * masm,int argc)674 void KeyedCallIC::GenerateNormal(MacroAssembler* masm, int argc) {
675   // ----------- S t a t e -------------
676   //  -- a2    : name
677   //  -- ra    : return address
678   // -----------------------------------
679 
680   // Check if the name is a string.
681   Label miss;
682   __ JumpIfSmi(a2, &miss);
683   __ IsObjectJSStringType(a2, a0, &miss);
684 
685   CallICBase::GenerateNormal(masm, argc);
686   __ bind(&miss);
687   GenerateMiss(masm, argc);
688 }
689 
690 
691 // Defined in ic.cc.
692 Object* LoadIC_Miss(Arguments args);
693 
GenerateMegamorphic(MacroAssembler * masm)694 void LoadIC::GenerateMegamorphic(MacroAssembler* masm) {
695   // ----------- S t a t e -------------
696   //  -- a2    : name
697   //  -- ra    : return address
698   //  -- a0    : receiver
699   //  -- sp[0] : receiver
700   // -----------------------------------
701 
702   // Probe the stub cache.
703   Code::Flags flags = Code::ComputeFlags(Code::LOAD_IC, MONOMORPHIC);
704   Isolate::Current()->stub_cache()->GenerateProbe(
705       masm, flags, a0, a2, a3, t0, t1, t2);
706 
707   // Cache miss: Jump to runtime.
708   GenerateMiss(masm);
709 }
710 
711 
GenerateNormal(MacroAssembler * masm)712 void LoadIC::GenerateNormal(MacroAssembler* masm) {
713   // ----------- S t a t e -------------
714   //  -- a2    : name
715   //  -- lr    : return address
716   //  -- a0    : receiver
717   //  -- sp[0] : receiver
718   // -----------------------------------
719   Label miss;
720 
721   GenerateStringDictionaryReceiverCheck(masm, a0, a1, a3, t0, &miss);
722 
723   // a1: elements
724   GenerateDictionaryLoad(masm, &miss, a1, a2, v0, a3, t0);
725   __ Ret();
726 
727   // Cache miss: Jump to runtime.
728   __ bind(&miss);
729   GenerateMiss(masm);
730 }
731 
732 
GenerateMiss(MacroAssembler * masm)733 void LoadIC::GenerateMiss(MacroAssembler* masm) {
734   // ----------- S t a t e -------------
735   //  -- a2    : name
736   //  -- ra    : return address
737   //  -- a0    : receiver
738   //  -- sp[0] : receiver
739   // -----------------------------------
740   Isolate* isolate = masm->isolate();
741 
742   __ IncrementCounter(isolate->counters()->keyed_load_miss(), 1, a3, t0);
743 
744   __ mov(a3, a0);
745   __ Push(a3, a2);
746 
747   // Perform tail call to the entry.
748   ExternalReference ref = ExternalReference(IC_Utility(kLoadIC_Miss), isolate);
749   __ TailCallExternalReference(ref, 2, 1);
750 }
751 
752 
GenerateMappedArgumentsLookup(MacroAssembler * masm,Register object,Register key,Register scratch1,Register scratch2,Register scratch3,Label * unmapped_case,Label * slow_case)753 static MemOperand GenerateMappedArgumentsLookup(MacroAssembler* masm,
754                                                 Register object,
755                                                 Register key,
756                                                 Register scratch1,
757                                                 Register scratch2,
758                                                 Register scratch3,
759                                                 Label* unmapped_case,
760                                                 Label* slow_case) {
761   // Check that the receiver is a JSObject. Because of the map check
762   // later, we do not need to check for interceptors or whether it
763   // requires access checks.
764   __ JumpIfSmi(object, slow_case);
765   // Check that the object is some kind of JSObject.
766   __ GetObjectType(object, scratch1, scratch2);
767   __ Branch(slow_case, lt, scratch2, Operand(FIRST_JS_RECEIVER_TYPE));
768 
769   // Check that the key is a positive smi.
770   __ And(scratch1, key, Operand(0x80000001));
771   __ Branch(slow_case, ne, scratch1, Operand(zero_reg));
772 
773   // Load the elements into scratch1 and check its map.
774   __ lw(scratch1, FieldMemOperand(object, JSObject::kElementsOffset));
775   __ CheckMap(scratch1,
776               scratch2,
777               Heap::kNonStrictArgumentsElementsMapRootIndex,
778               slow_case,
779               DONT_DO_SMI_CHECK);
780   // Check if element is in the range of mapped arguments. If not, jump
781   // to the unmapped lookup with the parameter map in scratch1.
782   __ lw(scratch2, FieldMemOperand(scratch1, FixedArray::kLengthOffset));
783   __ Subu(scratch2, scratch2, Operand(Smi::FromInt(2)));
784   __ Branch(unmapped_case, Ugreater_equal, key, Operand(scratch2));
785 
786   // Load element index and check whether it is the hole.
787   const int kOffset =
788       FixedArray::kHeaderSize + 2 * kPointerSize - kHeapObjectTag;
789 
790   __ li(scratch3, Operand(kPointerSize >> 1));
791   __ Mul(scratch3, key, scratch3);
792   __ Addu(scratch3, scratch3, Operand(kOffset));
793 
794   __ Addu(scratch2, scratch1, scratch3);
795   __ lw(scratch2, MemOperand(scratch2));
796   __ LoadRoot(scratch3, Heap::kTheHoleValueRootIndex);
797   __ Branch(unmapped_case, eq, scratch2, Operand(scratch3));
798 
799   // Load value from context and return it. We can reuse scratch1 because
800   // we do not jump to the unmapped lookup (which requires the parameter
801   // map in scratch1).
802   __ lw(scratch1, FieldMemOperand(scratch1, FixedArray::kHeaderSize));
803   __ li(scratch3, Operand(kPointerSize >> 1));
804   __ Mul(scratch3, scratch2, scratch3);
805   __ Addu(scratch3, scratch3, Operand(Context::kHeaderSize - kHeapObjectTag));
806   __ Addu(scratch2, scratch1, scratch3);
807   return MemOperand(scratch2);
808 }
809 
810 
GenerateUnmappedArgumentsLookup(MacroAssembler * masm,Register key,Register parameter_map,Register scratch,Label * slow_case)811 static MemOperand GenerateUnmappedArgumentsLookup(MacroAssembler* masm,
812                                                   Register key,
813                                                   Register parameter_map,
814                                                   Register scratch,
815                                                   Label* slow_case) {
816   // Element is in arguments backing store, which is referenced by the
817   // second element of the parameter_map. The parameter_map register
818   // must be loaded with the parameter map of the arguments object and is
819   // overwritten.
820   const int kBackingStoreOffset = FixedArray::kHeaderSize + kPointerSize;
821   Register backing_store = parameter_map;
822   __ lw(backing_store, FieldMemOperand(parameter_map, kBackingStoreOffset));
823   __ CheckMap(backing_store,
824               scratch,
825               Heap::kFixedArrayMapRootIndex,
826               slow_case,
827               DONT_DO_SMI_CHECK);
828   __ lw(scratch, FieldMemOperand(backing_store, FixedArray::kLengthOffset));
829   __ Branch(slow_case, Ugreater_equal, key, Operand(scratch));
830   __ li(scratch, Operand(kPointerSize >> 1));
831   __ Mul(scratch, key, scratch);
832   __ Addu(scratch,
833           scratch,
834           Operand(FixedArray::kHeaderSize - kHeapObjectTag));
835   __ Addu(scratch, backing_store, scratch);
836   return MemOperand(scratch);
837 }
838 
839 
GenerateNonStrictArguments(MacroAssembler * masm)840 void KeyedLoadIC::GenerateNonStrictArguments(MacroAssembler* masm) {
841   // ---------- S t a t e --------------
842   //  -- lr     : return address
843   //  -- a0     : key
844   //  -- a1     : receiver
845   // -----------------------------------
846   Label slow, notin;
847   MemOperand mapped_location =
848       GenerateMappedArgumentsLookup(masm, a1, a0, a2, a3, t0, &notin, &slow);
849   __ Ret(USE_DELAY_SLOT);
850   __ lw(v0, mapped_location);
851   __ bind(&notin);
852   // The unmapped lookup expects that the parameter map is in a2.
853   MemOperand unmapped_location =
854       GenerateUnmappedArgumentsLookup(masm, a0, a2, a3, &slow);
855   __ lw(a2, unmapped_location);
856   __ LoadRoot(a3, Heap::kTheHoleValueRootIndex);
857   __ Branch(&slow, eq, a2, Operand(a3));
858   __ Ret(USE_DELAY_SLOT);
859   __ mov(v0, a2);
860   __ bind(&slow);
861   GenerateMiss(masm, false);
862 }
863 
864 
GenerateNonStrictArguments(MacroAssembler * masm)865 void KeyedStoreIC::GenerateNonStrictArguments(MacroAssembler* masm) {
866   // ---------- S t a t e --------------
867   //  -- a0     : value
868   //  -- a1     : key
869   //  -- a2     : receiver
870   //  -- lr     : return address
871   // -----------------------------------
872   Label slow, notin;
873   // Store address is returned in register (of MemOperand) mapped_location.
874   MemOperand mapped_location =
875       GenerateMappedArgumentsLookup(masm, a2, a1, a3, t0, t1, &notin, &slow);
876   __ sw(a0, mapped_location);
877   __ mov(t5, a0);
878   ASSERT_EQ(mapped_location.offset(), 0);
879   __ RecordWrite(a3, mapped_location.rm(), t5,
880                  kRAHasNotBeenSaved, kDontSaveFPRegs);
881   __ Ret(USE_DELAY_SLOT);
882   __ mov(v0, a0);  // (In delay slot) return the value stored in v0.
883   __ bind(&notin);
884   // The unmapped lookup expects that the parameter map is in a3.
885   // Store address is returned in register (of MemOperand) unmapped_location.
886   MemOperand unmapped_location =
887       GenerateUnmappedArgumentsLookup(masm, a1, a3, t0, &slow);
888   __ sw(a0, unmapped_location);
889   __ mov(t5, a0);
890   ASSERT_EQ(unmapped_location.offset(), 0);
891   __ RecordWrite(a3, unmapped_location.rm(), t5,
892                  kRAHasNotBeenSaved, kDontSaveFPRegs);
893   __ Ret(USE_DELAY_SLOT);
894   __ mov(v0, a0);  // (In delay slot) return the value stored in v0.
895   __ bind(&slow);
896   GenerateMiss(masm, false);
897 }
898 
899 
GenerateNonStrictArguments(MacroAssembler * masm,int argc)900 void KeyedCallIC::GenerateNonStrictArguments(MacroAssembler* masm,
901                                              int argc) {
902   // ----------- S t a t e -------------
903   //  -- a2    : name
904   //  -- lr    : return address
905   // -----------------------------------
906   Label slow, notin;
907   // Load receiver.
908   __ lw(a1, MemOperand(sp, argc * kPointerSize));
909   MemOperand mapped_location =
910       GenerateMappedArgumentsLookup(masm, a1, a2, a3, t0, t1, &notin, &slow);
911   __ lw(a1, mapped_location);
912   GenerateFunctionTailCall(masm, argc, &slow, a3);
913   __ bind(&notin);
914   // The unmapped lookup expects that the parameter map is in a3.
915   MemOperand unmapped_location =
916       GenerateUnmappedArgumentsLookup(masm, a2, a3, t0, &slow);
917   __ lw(a1, unmapped_location);
918   __ LoadRoot(a3, Heap::kTheHoleValueRootIndex);
919   __ Branch(&slow, eq, a1, Operand(a3));
920   GenerateFunctionTailCall(masm, argc, &slow, a3);
921   __ bind(&slow);
922   GenerateMiss(masm, argc);
923 }
924 
925 
926 Object* KeyedLoadIC_Miss(Arguments args);
927 
928 
GenerateMiss(MacroAssembler * masm,bool force_generic)929 void KeyedLoadIC::GenerateMiss(MacroAssembler* masm, bool force_generic) {
930   // ---------- S t a t e --------------
931   //  -- ra     : return address
932   //  -- a0     : key
933   //  -- a1     : receiver
934   // -----------------------------------
935   Isolate* isolate = masm->isolate();
936 
937   __ IncrementCounter(isolate->counters()->keyed_load_miss(), 1, a3, t0);
938 
939   __ Push(a1, a0);
940 
941   // Perform tail call to the entry.
942   ExternalReference ref = force_generic
943       ? ExternalReference(IC_Utility(kKeyedLoadIC_MissForceGeneric), isolate)
944       : ExternalReference(IC_Utility(kKeyedLoadIC_Miss), isolate);
945 
946   __ TailCallExternalReference(ref, 2, 1);
947 }
948 
949 
GenerateRuntimeGetProperty(MacroAssembler * masm)950 void KeyedLoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) {
951   // ---------- S t a t e --------------
952   //  -- ra     : return address
953   //  -- a0     : key
954   //  -- a1     : receiver
955   // -----------------------------------
956 
957   __ Push(a1, a0);
958 
959   __ TailCallRuntime(Runtime::kKeyedGetProperty, 2, 1);
960 }
961 
962 
GenerateGeneric(MacroAssembler * masm)963 void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
964   // ---------- S t a t e --------------
965   //  -- ra     : return address
966   //  -- a0     : key
967   //  -- a1     : receiver
968   // -----------------------------------
969   Label slow, check_string, index_smi, index_string, property_array_property;
970   Label probe_dictionary, check_number_dictionary;
971 
972   Register key = a0;
973   Register receiver = a1;
974 
975   Isolate* isolate = masm->isolate();
976 
977   // Check that the key is a smi.
978   __ JumpIfNotSmi(key, &check_string);
979   __ bind(&index_smi);
980   // Now the key is known to be a smi. This place is also jumped to from below
981   // where a numeric string is converted to a smi.
982 
983   GenerateKeyedLoadReceiverCheck(
984       masm, receiver, a2, a3, Map::kHasIndexedInterceptor, &slow);
985 
986   // Check the receiver's map to see if it has fast elements.
987   __ CheckFastElements(a2, a3, &check_number_dictionary);
988 
989   GenerateFastArrayLoad(
990       masm, receiver, key, t0, a3, a2, v0, NULL, &slow);
991 
992   __ IncrementCounter(isolate->counters()->keyed_load_generic_smi(), 1, a2, a3);
993   __ Ret();
994 
995   __ bind(&check_number_dictionary);
996   __ lw(t0, FieldMemOperand(receiver, JSObject::kElementsOffset));
997   __ lw(a3, FieldMemOperand(t0, JSObject::kMapOffset));
998 
999   // Check whether the elements is a number dictionary.
1000   // a0: key
1001   // a3: elements map
1002   // t0: elements
1003   __ LoadRoot(at, Heap::kHashTableMapRootIndex);
1004   __ Branch(&slow, ne, a3, Operand(at));
1005   __ sra(a2, a0, kSmiTagSize);
1006   __ LoadFromNumberDictionary(&slow, t0, a0, v0, a2, a3, t1);
1007   __ Ret();
1008 
1009   // Slow case, key and receiver still in a0 and a1.
1010   __ bind(&slow);
1011   __ IncrementCounter(isolate->counters()->keyed_load_generic_slow(),
1012                       1,
1013                       a2,
1014                       a3);
1015   GenerateRuntimeGetProperty(masm);
1016 
1017   __ bind(&check_string);
1018   GenerateKeyStringCheck(masm, key, a2, a3, &index_string, &slow);
1019 
1020   GenerateKeyedLoadReceiverCheck(
1021        masm, receiver, a2, a3, Map::kHasIndexedInterceptor, &slow);
1022 
1023 
1024   // If the receiver is a fast-case object, check the keyed lookup
1025   // cache. Otherwise probe the dictionary.
1026   __ lw(a3, FieldMemOperand(a1, JSObject::kPropertiesOffset));
1027   __ lw(t0, FieldMemOperand(a3, HeapObject::kMapOffset));
1028   __ LoadRoot(at, Heap::kHashTableMapRootIndex);
1029   __ Branch(&probe_dictionary, eq, t0, Operand(at));
1030 
1031   // Load the map of the receiver, compute the keyed lookup cache hash
1032   // based on 32 bits of the map pointer and the string hash.
1033   __ lw(a2, FieldMemOperand(a1, HeapObject::kMapOffset));
1034   __ sra(a3, a2, KeyedLookupCache::kMapHashShift);
1035   __ lw(t0, FieldMemOperand(a0, String::kHashFieldOffset));
1036   __ sra(at, t0, String::kHashShift);
1037   __ xor_(a3, a3, at);
1038   int mask = KeyedLookupCache::kCapacityMask & KeyedLookupCache::kHashMask;
1039   __ And(a3, a3, Operand(mask));
1040 
1041   // Load the key (consisting of map and symbol) from the cache and
1042   // check for match.
1043   Label load_in_object_property;
1044   static const int kEntriesPerBucket = KeyedLookupCache::kEntriesPerBucket;
1045   Label hit_on_nth_entry[kEntriesPerBucket];
1046   ExternalReference cache_keys =
1047       ExternalReference::keyed_lookup_cache_keys(isolate);
1048   __ li(t0, Operand(cache_keys));
1049   __ sll(at, a3, kPointerSizeLog2 + 1);
1050   __ addu(t0, t0, at);
1051 
1052   for (int i = 0; i < kEntriesPerBucket - 1; i++) {
1053     Label try_next_entry;
1054     __ lw(t1, MemOperand(t0, kPointerSize * i * 2));
1055     __ Branch(&try_next_entry, ne, a2, Operand(t1));
1056     __ lw(t1, MemOperand(t0, kPointerSize * (i * 2 + 1)));
1057     __ Branch(&hit_on_nth_entry[i], eq, a0, Operand(t1));
1058     __ bind(&try_next_entry);
1059   }
1060 
1061   __ lw(t1, MemOperand(t0, kPointerSize * (kEntriesPerBucket - 1) * 2));
1062   __ Branch(&slow, ne, a2, Operand(t1));
1063   __ lw(t1, MemOperand(t0, kPointerSize * ((kEntriesPerBucket - 1) * 2 + 1)));
1064   __ Branch(&slow, ne, a0, Operand(t1));
1065 
1066   // Get field offset.
1067   // a0     : key
1068   // a1     : receiver
1069   // a2     : receiver's map
1070   // a3     : lookup cache index
1071   ExternalReference cache_field_offsets =
1072       ExternalReference::keyed_lookup_cache_field_offsets(isolate);
1073 
1074   // Hit on nth entry.
1075   for (int i = kEntriesPerBucket - 1; i >= 0; i--) {
1076     __ bind(&hit_on_nth_entry[i]);
1077     __ li(t0, Operand(cache_field_offsets));
1078     __ sll(at, a3, kPointerSizeLog2);
1079     __ addu(at, t0, at);
1080     __ lw(t1, MemOperand(at, kPointerSize * i));
1081     __ lbu(t2, FieldMemOperand(a2, Map::kInObjectPropertiesOffset));
1082     __ Subu(t1, t1, t2);
1083     __ Branch(&property_array_property, ge, t1, Operand(zero_reg));
1084     if (i != 0) {
1085       __ Branch(&load_in_object_property);
1086     }
1087   }
1088 
1089   // Load in-object property.
1090   __ bind(&load_in_object_property);
1091   __ lbu(t2, FieldMemOperand(a2, Map::kInstanceSizeOffset));
1092   __ addu(t2, t2, t1);  // Index from start of object.
1093   __ Subu(a1, a1, Operand(kHeapObjectTag));  // Remove the heap tag.
1094   __ sll(at, t2, kPointerSizeLog2);
1095   __ addu(at, a1, at);
1096   __ lw(v0, MemOperand(at));
1097   __ IncrementCounter(isolate->counters()->keyed_load_generic_lookup_cache(),
1098                       1,
1099                       a2,
1100                       a3);
1101   __ Ret();
1102 
1103   // Load property array property.
1104   __ bind(&property_array_property);
1105   __ lw(a1, FieldMemOperand(a1, JSObject::kPropertiesOffset));
1106   __ Addu(a1, a1, FixedArray::kHeaderSize - kHeapObjectTag);
1107   __ sll(t0, t1, kPointerSizeLog2);
1108   __ Addu(t0, t0, a1);
1109   __ lw(v0, MemOperand(t0));
1110   __ IncrementCounter(isolate->counters()->keyed_load_generic_lookup_cache(),
1111                       1,
1112                       a2,
1113                       a3);
1114   __ Ret();
1115 
1116 
1117   // Do a quick inline probe of the receiver's dictionary, if it
1118   // exists.
1119   __ bind(&probe_dictionary);
1120   // a1: receiver
1121   // a0: key
1122   // a3: elements
1123   __ lw(a2, FieldMemOperand(a1, HeapObject::kMapOffset));
1124   __ lbu(a2, FieldMemOperand(a2, Map::kInstanceTypeOffset));
1125   GenerateGlobalInstanceTypeCheck(masm, a2, &slow);
1126   // Load the property to v0.
1127   GenerateDictionaryLoad(masm, &slow, a3, a0, v0, a2, t0);
1128   __ IncrementCounter(isolate->counters()->keyed_load_generic_symbol(),
1129                       1,
1130                       a2,
1131                       a3);
1132   __ Ret();
1133 
1134   __ bind(&index_string);
1135   __ IndexFromHash(a3, key);
1136   // Now jump to the place where smi keys are handled.
1137   __ Branch(&index_smi);
1138 }
1139 
1140 
GenerateString(MacroAssembler * masm)1141 void KeyedLoadIC::GenerateString(MacroAssembler* masm) {
1142   // ---------- S t a t e --------------
1143   //  -- ra     : return address
1144   //  -- a0     : key (index)
1145   //  -- a1     : receiver
1146   // -----------------------------------
1147   Label miss;
1148 
1149   Register receiver = a1;
1150   Register index = a0;
1151   Register scratch = a3;
1152   Register result = v0;
1153 
1154   StringCharAtGenerator char_at_generator(receiver,
1155                                           index,
1156                                           scratch,
1157                                           result,
1158                                           &miss,  // When not a string.
1159                                           &miss,  // When not a number.
1160                                           &miss,  // When index out of range.
1161                                           STRING_INDEX_IS_ARRAY_INDEX);
1162   char_at_generator.GenerateFast(masm);
1163   __ Ret();
1164 
1165   StubRuntimeCallHelper call_helper;
1166   char_at_generator.GenerateSlow(masm, call_helper);
1167 
1168   __ bind(&miss);
1169   GenerateMiss(masm, false);
1170 }
1171 
1172 
GenerateRuntimeSetProperty(MacroAssembler * masm,StrictModeFlag strict_mode)1173 void KeyedStoreIC::GenerateRuntimeSetProperty(MacroAssembler* masm,
1174                                               StrictModeFlag strict_mode) {
1175   // ---------- S t a t e --------------
1176   //  -- a0     : value
1177   //  -- a1     : key
1178   //  -- a2     : receiver
1179   //  -- ra     : return address
1180   // -----------------------------------
1181 
1182   // Push receiver, key and value for runtime call.
1183   __ Push(a2, a1, a0);
1184   __ li(a1, Operand(Smi::FromInt(NONE)));          // PropertyAttributes.
1185   __ li(a0, Operand(Smi::FromInt(strict_mode)));   // Strict mode.
1186   __ Push(a1, a0);
1187 
1188   __ TailCallRuntime(Runtime::kSetProperty, 5, 1);
1189 }
1190 
1191 
GenerateGeneric(MacroAssembler * masm,StrictModeFlag strict_mode)1192 void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm,
1193                                    StrictModeFlag strict_mode) {
1194   // ---------- S t a t e --------------
1195   //  -- a0     : value
1196   //  -- a1     : key
1197   //  -- a2     : receiver
1198   //  -- ra     : return address
1199   // -----------------------------------
1200   Label slow, array, extra, check_if_double_array;
1201   Label fast_object_with_map_check, fast_object_without_map_check;
1202   Label fast_double_with_map_check, fast_double_without_map_check;
1203   Label transition_smi_elements, finish_object_store, non_double_value;
1204   Label transition_double_elements;
1205 
1206   // Register usage.
1207   Register value = a0;
1208   Register key = a1;
1209   Register receiver = a2;
1210   Register receiver_map = a3;
1211   Register elements_map = t2;
1212   Register elements = t3;  // Elements array of the receiver.
1213   // t0 and t1 are used as general scratch registers.
1214 
1215   // Check that the key is a smi.
1216   __ JumpIfNotSmi(key, &slow);
1217   // Check that the object isn't a smi.
1218   __ JumpIfSmi(receiver, &slow);
1219   // Get the map of the object.
1220   __ lw(receiver_map, FieldMemOperand(receiver, HeapObject::kMapOffset));
1221   // Check that the receiver does not require access checks.  We need
1222   // to do this because this generic stub does not perform map checks.
1223   __ lbu(t0, FieldMemOperand(receiver_map, Map::kBitFieldOffset));
1224   __ And(t0, t0, Operand(1 << Map::kIsAccessCheckNeeded));
1225   __ Branch(&slow, ne, t0, Operand(zero_reg));
1226   // Check if the object is a JS array or not.
1227   __ lbu(t0, FieldMemOperand(receiver_map, Map::kInstanceTypeOffset));
1228   __ Branch(&array, eq, t0, Operand(JS_ARRAY_TYPE));
1229   // Check that the object is some kind of JSObject.
1230   __ Branch(&slow, lt, t0, Operand(FIRST_JS_OBJECT_TYPE));
1231 
1232   // Object case: Check key against length in the elements array.
1233   __ lw(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
1234   // Check array bounds. Both the key and the length of FixedArray are smis.
1235   __ lw(t0, FieldMemOperand(elements, FixedArray::kLengthOffset));
1236   __ Branch(&fast_object_with_map_check, lo, key, Operand(t0));
1237 
1238   // Slow case, handle jump to runtime.
1239   __ bind(&slow);
1240   // Entry registers are intact.
1241   // a0: value.
1242   // a1: key.
1243   // a2: receiver.
1244   GenerateRuntimeSetProperty(masm, strict_mode);
1245 
1246   // Extra capacity case: Check if there is extra capacity to
1247   // perform the store and update the length. Used for adding one
1248   // element to the array by writing to array[array.length].
1249   __ bind(&extra);
1250   // Condition code from comparing key and array length is still available.
1251   // Only support writing to array[array.length].
1252   __ Branch(&slow, ne, key, Operand(t0));
1253   // Check for room in the elements backing store.
1254   // Both the key and the length of FixedArray are smis.
1255   __ lw(t0, FieldMemOperand(elements, FixedArray::kLengthOffset));
1256   __ Branch(&slow, hs, key, Operand(t0));
1257   __ lw(elements_map, FieldMemOperand(elements, HeapObject::kMapOffset));
1258   __ Branch(
1259       &check_if_double_array, ne, elements_map, Heap::kFixedArrayMapRootIndex);
1260 
1261   // Calculate key + 1 as smi.
1262   STATIC_ASSERT(kSmiTag == 0);
1263   __ Addu(t0, key, Operand(Smi::FromInt(1)));
1264   __ sw(t0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1265   __ Branch(&fast_object_without_map_check);
1266 
1267   __ bind(&check_if_double_array);
1268   __ Branch(&slow, ne, elements_map, Heap::kFixedDoubleArrayMapRootIndex);
1269   // Add 1 to key, and go to common element store code for doubles.
1270   STATIC_ASSERT(kSmiTag == 0);
1271   __ Addu(t0, key, Operand(Smi::FromInt(1)));
1272   __ sw(t0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1273   __ jmp(&fast_double_without_map_check);
1274 
1275   // Array case: Get the length and the elements array from the JS
1276   // array. Check that the array is in fast mode (and writable); if it
1277   // is the length is always a smi.
1278   __ bind(&array);
1279   __ lw(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
1280 
1281   // Check the key against the length in the array.
1282   __ lw(t0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1283   __ Branch(&extra, hs, key, Operand(t0));
1284   // Fall through to fast case.
1285 
1286   __ bind(&fast_object_with_map_check);
1287   Register scratch_value = t0;
1288   Register address = t1;
1289   __ lw(elements_map, FieldMemOperand(elements, HeapObject::kMapOffset));
1290   __ Branch(&fast_double_with_map_check,
1291             ne,
1292             elements_map,
1293             Heap::kFixedArrayMapRootIndex);
1294   __ bind(&fast_object_without_map_check);
1295   // Smi stores don't require further checks.
1296   Label non_smi_value;
1297   __ JumpIfNotSmi(value, &non_smi_value);
1298   // It's irrelevant whether array is smi-only or not when writing a smi.
1299   __ Addu(address, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1300   __ sll(scratch_value, key, kPointerSizeLog2 - kSmiTagSize);
1301   __ Addu(address, address, scratch_value);
1302   __ sw(value, MemOperand(address));
1303   __ Ret(USE_DELAY_SLOT);
1304   __ mov(v0, value);
1305 
1306   __ bind(&non_smi_value);
1307   // Escape to elements kind transition case.
1308   __ CheckFastObjectElements(receiver_map, scratch_value,
1309                              &transition_smi_elements);
1310   // Fast elements array, store the value to the elements backing store.
1311   __ bind(&finish_object_store);
1312   __ Addu(address, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
1313   __ sll(scratch_value, key, kPointerSizeLog2 - kSmiTagSize);
1314   __ Addu(address, address, scratch_value);
1315   __ sw(value, MemOperand(address));
1316   // Update write barrier for the elements array address.
1317   __ mov(v0, value);  // Preserve the value which is returned.
1318   __ RecordWrite(elements,
1319                  address,
1320                  value,
1321                  kRAHasNotBeenSaved,
1322                  kDontSaveFPRegs,
1323                  EMIT_REMEMBERED_SET,
1324                  OMIT_SMI_CHECK);
1325   __ Ret();
1326 
1327   __ bind(&fast_double_with_map_check);
1328   // Check for fast double array case. If this fails, call through to the
1329   // runtime.
1330   __ Branch(&slow, ne, elements_map, Heap::kFixedDoubleArrayMapRootIndex);
1331   __ bind(&fast_double_without_map_check);
1332   __ StoreNumberToDoubleElements(value,
1333                                  key,
1334                                  receiver,
1335                                  elements,
1336                                  a3,
1337                                  t0,
1338                                  t1,
1339                                  t2,
1340                                  &transition_double_elements);
1341   __ Ret(USE_DELAY_SLOT);
1342   __ mov(v0, value);
1343 
1344   __ bind(&transition_smi_elements);
1345   // Transition the array appropriately depending on the value type.
1346   __ lw(t0, FieldMemOperand(value, HeapObject::kMapOffset));
1347   __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
1348   __ Branch(&non_double_value, ne, t0, Operand(at));
1349 
1350   // Value is a double. Transition FAST_SMI_ONLY_ELEMENTS ->
1351   // FAST_DOUBLE_ELEMENTS and complete the store.
1352   __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
1353                                          FAST_DOUBLE_ELEMENTS,
1354                                          receiver_map,
1355                                          t0,
1356                                          &slow);
1357   ASSERT(receiver_map.is(a3));  // Transition code expects map in a3
1358   ElementsTransitionGenerator::GenerateSmiOnlyToDouble(masm, &slow);
1359   __ lw(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
1360   __ jmp(&fast_double_without_map_check);
1361 
1362   __ bind(&non_double_value);
1363   // Value is not a double, FAST_SMI_ONLY_ELEMENTS -> FAST_ELEMENTS
1364   __ LoadTransitionedArrayMapConditional(FAST_SMI_ONLY_ELEMENTS,
1365                                          FAST_ELEMENTS,
1366                                          receiver_map,
1367                                          t0,
1368                                          &slow);
1369   ASSERT(receiver_map.is(a3));  // Transition code expects map in a3
1370   ElementsTransitionGenerator::GenerateSmiOnlyToObject(masm);
1371   __ lw(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
1372   __ jmp(&finish_object_store);
1373 
1374   __ bind(&transition_double_elements);
1375   // Elements are FAST_DOUBLE_ELEMENTS, but value is an Object that's not a
1376   // HeapNumber. Make sure that the receiver is a Array with FAST_ELEMENTS and
1377   // transition array from FAST_DOUBLE_ELEMENTS to FAST_ELEMENTS
1378   __ LoadTransitionedArrayMapConditional(FAST_DOUBLE_ELEMENTS,
1379                                          FAST_ELEMENTS,
1380                                          receiver_map,
1381                                          t0,
1382                                          &slow);
1383   ASSERT(receiver_map.is(a3));  // Transition code expects map in a3
1384   ElementsTransitionGenerator::GenerateDoubleToObject(masm, &slow);
1385   __ lw(elements, FieldMemOperand(receiver, JSObject::kElementsOffset));
1386   __ jmp(&finish_object_store);
1387 }
1388 
1389 
GenerateIndexedInterceptor(MacroAssembler * masm)1390 void KeyedLoadIC::GenerateIndexedInterceptor(MacroAssembler* masm) {
1391   // ---------- S t a t e --------------
1392   //  -- ra     : return address
1393   //  -- a0     : key
1394   //  -- a1     : receiver
1395   // -----------------------------------
1396   Label slow;
1397 
1398   // Check that the receiver isn't a smi.
1399   __ JumpIfSmi(a1, &slow);
1400 
1401   // Check that the key is an array index, that is Uint32.
1402   __ And(t0, a0, Operand(kSmiTagMask | kSmiSignMask));
1403   __ Branch(&slow, ne, t0, Operand(zero_reg));
1404 
1405   // Get the map of the receiver.
1406   __ lw(a2, FieldMemOperand(a1, HeapObject::kMapOffset));
1407 
1408   // Check that it has indexed interceptor and access checks
1409   // are not enabled for this object.
1410   __ lbu(a3, FieldMemOperand(a2, Map::kBitFieldOffset));
1411   __ And(a3, a3, Operand(kSlowCaseBitFieldMask));
1412   __ Branch(&slow, ne, a3, Operand(1 << Map::kHasIndexedInterceptor));
1413   // Everything is fine, call runtime.
1414   __ Push(a1, a0);  // Receiver, key.
1415 
1416   // Perform tail call to the entry.
1417   __ TailCallExternalReference(ExternalReference(
1418        IC_Utility(kKeyedLoadPropertyWithInterceptor), masm->isolate()), 2, 1);
1419 
1420   __ bind(&slow);
1421   GenerateMiss(masm, false);
1422 }
1423 
1424 
GenerateMiss(MacroAssembler * masm,bool force_generic)1425 void KeyedStoreIC::GenerateMiss(MacroAssembler* masm, bool force_generic) {
1426   // ---------- S t a t e --------------
1427   //  -- a0     : value
1428   //  -- a1     : key
1429   //  -- a2     : receiver
1430   //  -- ra     : return address
1431   // -----------------------------------
1432 
1433   // Push receiver, key and value for runtime call.
1434   __ Push(a2, a1, a0);
1435 
1436   ExternalReference ref = force_generic
1437       ? ExternalReference(IC_Utility(kKeyedStoreIC_MissForceGeneric),
1438                           masm->isolate())
1439       : ExternalReference(IC_Utility(kKeyedStoreIC_Miss), masm->isolate());
1440   __ TailCallExternalReference(ref, 3, 1);
1441 }
1442 
1443 
GenerateSlow(MacroAssembler * masm)1444 void KeyedStoreIC::GenerateSlow(MacroAssembler* masm) {
1445   // ---------- S t a t e --------------
1446   //  -- a0     : value
1447   //  -- a1     : key
1448   //  -- a2     : receiver
1449   //  -- ra     : return address
1450   // -----------------------------------
1451 
1452   // Push receiver, key and value for runtime call.
1453   // We can't use MultiPush as the order of the registers is important.
1454   __ Push(a2, a1, a0);
1455 
1456   // The slow case calls into the runtime to complete the store without causing
1457   // an IC miss that would otherwise cause a transition to the generic stub.
1458   ExternalReference ref =
1459       ExternalReference(IC_Utility(kKeyedStoreIC_Slow), masm->isolate());
1460 
1461   __ TailCallExternalReference(ref, 3, 1);
1462 }
1463 
1464 
GenerateTransitionElementsSmiToDouble(MacroAssembler * masm)1465 void KeyedStoreIC::GenerateTransitionElementsSmiToDouble(MacroAssembler* masm) {
1466   // ---------- S t a t e --------------
1467   //  -- a2     : receiver
1468   //  -- a3     : target map
1469   //  -- ra     : return address
1470   // -----------------------------------
1471   // Must return the modified receiver in v0.
1472   if (!FLAG_trace_elements_transitions) {
1473     Label fail;
1474     ElementsTransitionGenerator::GenerateSmiOnlyToDouble(masm, &fail);
1475     __ Ret(USE_DELAY_SLOT);
1476     __ mov(v0, a2);
1477     __ bind(&fail);
1478   }
1479 
1480   __ push(a2);
1481   __ TailCallRuntime(Runtime::kTransitionElementsSmiToDouble, 1, 1);
1482 }
1483 
1484 
GenerateTransitionElementsDoubleToObject(MacroAssembler * masm)1485 void KeyedStoreIC::GenerateTransitionElementsDoubleToObject(
1486     MacroAssembler* masm) {
1487   // ---------- S t a t e --------------
1488   //  -- a2     : receiver
1489   //  -- a3     : target map
1490   //  -- ra     : return address
1491   // -----------------------------------
1492   // Must return the modified receiver in v0.
1493   if (!FLAG_trace_elements_transitions) {
1494     Label fail;
1495     ElementsTransitionGenerator::GenerateDoubleToObject(masm, &fail);
1496     __ Ret(USE_DELAY_SLOT);
1497     __ mov(v0, a2);
1498     __ bind(&fail);
1499   }
1500 
1501   __ push(a2);
1502   __ TailCallRuntime(Runtime::kTransitionElementsDoubleToObject, 1, 1);
1503 }
1504 
1505 
GenerateMegamorphic(MacroAssembler * masm,StrictModeFlag strict_mode)1506 void StoreIC::GenerateMegamorphic(MacroAssembler* masm,
1507                                   StrictModeFlag strict_mode) {
1508   // ----------- S t a t e -------------
1509   //  -- a0    : value
1510   //  -- a1    : receiver
1511   //  -- a2    : name
1512   //  -- ra    : return address
1513   // -----------------------------------
1514 
1515   // Get the receiver from the stack and probe the stub cache.
1516   Code::Flags flags =
1517       Code::ComputeFlags(Code::STORE_IC, MONOMORPHIC, strict_mode);
1518   Isolate::Current()->stub_cache()->GenerateProbe(
1519       masm, flags, a1, a2, a3, t0, t1, t2);
1520 
1521   // Cache miss: Jump to runtime.
1522   GenerateMiss(masm);
1523 }
1524 
1525 
GenerateMiss(MacroAssembler * masm)1526 void StoreIC::GenerateMiss(MacroAssembler* masm) {
1527   // ----------- S t a t e -------------
1528   //  -- a0    : value
1529   //  -- a1    : receiver
1530   //  -- a2    : name
1531   //  -- ra    : return address
1532   // -----------------------------------
1533 
1534   __ Push(a1, a2, a0);
1535   // Perform tail call to the entry.
1536   ExternalReference ref = ExternalReference(IC_Utility(kStoreIC_Miss),
1537                                             masm->isolate());
1538   __ TailCallExternalReference(ref, 3, 1);
1539 }
1540 
1541 
GenerateArrayLength(MacroAssembler * masm)1542 void StoreIC::GenerateArrayLength(MacroAssembler* masm) {
1543   // ----------- S t a t e -------------
1544   //  -- a0    : value
1545   //  -- a1    : receiver
1546   //  -- a2    : name
1547   //  -- ra    : return address
1548   // -----------------------------------
1549   //
1550   // This accepts as a receiver anything JSArray::SetElementsLength accepts
1551   // (currently anything except for external arrays which means anything with
1552   // elements of FixedArray type).  Value must be a number, but only smis are
1553   // accepted as the most common case.
1554 
1555   Label miss;
1556 
1557   Register receiver = a1;
1558   Register value = a0;
1559   Register scratch = a3;
1560 
1561   // Check that the receiver isn't a smi.
1562   __ JumpIfSmi(receiver, &miss);
1563 
1564   // Check that the object is a JS array.
1565   __ GetObjectType(receiver, scratch, scratch);
1566   __ Branch(&miss, ne, scratch, Operand(JS_ARRAY_TYPE));
1567 
1568   // Check that elements are FixedArray.
1569   // We rely on StoreIC_ArrayLength below to deal with all types of
1570   // fast elements (including COW).
1571   __ lw(scratch, FieldMemOperand(receiver, JSArray::kElementsOffset));
1572   __ GetObjectType(scratch, scratch, scratch);
1573   __ Branch(&miss, ne, scratch, Operand(FIXED_ARRAY_TYPE));
1574 
1575   // Check that the array has fast properties, otherwise the length
1576   // property might have been redefined.
1577   __ lw(scratch, FieldMemOperand(receiver, JSArray::kPropertiesOffset));
1578   __ lw(scratch, FieldMemOperand(scratch, FixedArray::kMapOffset));
1579   __ LoadRoot(at, Heap::kHashTableMapRootIndex);
1580   __ Branch(&miss, eq, scratch, Operand(at));
1581 
1582   // Check that value is a smi.
1583   __ JumpIfNotSmi(value, &miss);
1584 
1585   // Prepare tail call to StoreIC_ArrayLength.
1586   __ Push(receiver, value);
1587 
1588   ExternalReference ref = ExternalReference(IC_Utility(kStoreIC_ArrayLength),
1589                                             masm->isolate());
1590   __ TailCallExternalReference(ref, 2, 1);
1591 
1592   __ bind(&miss);
1593 
1594   GenerateMiss(masm);
1595 }
1596 
1597 
GenerateNormal(MacroAssembler * masm)1598 void StoreIC::GenerateNormal(MacroAssembler* masm) {
1599   // ----------- S t a t e -------------
1600   //  -- a0    : value
1601   //  -- a1    : receiver
1602   //  -- a2    : name
1603   //  -- ra    : return address
1604   // -----------------------------------
1605   Label miss;
1606 
1607   GenerateStringDictionaryReceiverCheck(masm, a1, a3, t0, t1, &miss);
1608 
1609   GenerateDictionaryStore(masm, &miss, a3, a2, a0, t0, t1);
1610   Counters* counters = masm->isolate()->counters();
1611   __ IncrementCounter(counters->store_normal_hit(), 1, t0, t1);
1612   __ Ret();
1613 
1614   __ bind(&miss);
1615   __ IncrementCounter(counters->store_normal_miss(), 1, t0, t1);
1616   GenerateMiss(masm);
1617 }
1618 
1619 
GenerateGlobalProxy(MacroAssembler * masm,StrictModeFlag strict_mode)1620 void StoreIC::GenerateGlobalProxy(MacroAssembler* masm,
1621                                   StrictModeFlag strict_mode) {
1622   // ----------- S t a t e -------------
1623   //  -- a0    : value
1624   //  -- a1    : receiver
1625   //  -- a2    : name
1626   //  -- ra    : return address
1627   // -----------------------------------
1628 
1629   __ Push(a1, a2, a0);
1630 
1631   __ li(a1, Operand(Smi::FromInt(NONE)));  // PropertyAttributes.
1632   __ li(a0, Operand(Smi::FromInt(strict_mode)));
1633   __ Push(a1, a0);
1634 
1635   // Do tail-call to runtime routine.
1636   __ TailCallRuntime(Runtime::kSetProperty, 5, 1);
1637 }
1638 
1639 
1640 #undef __
1641 
1642 
ComputeCondition(Token::Value op)1643 Condition CompareIC::ComputeCondition(Token::Value op) {
1644   switch (op) {
1645     case Token::EQ_STRICT:
1646     case Token::EQ:
1647       return eq;
1648     case Token::LT:
1649       return lt;
1650     case Token::GT:
1651       return gt;
1652     case Token::LTE:
1653       return le;
1654     case Token::GTE:
1655       return ge;
1656     default:
1657       UNREACHABLE();
1658       return kNoCondition;
1659   }
1660 }
1661 
1662 
UpdateCaches(Handle<Object> x,Handle<Object> y)1663 void CompareIC::UpdateCaches(Handle<Object> x, Handle<Object> y) {
1664   HandleScope scope;
1665   Handle<Code> rewritten;
1666   State previous_state = GetState();
1667   State state = TargetState(previous_state, false, x, y);
1668   if (state == GENERIC) {
1669     CompareStub stub(GetCondition(), strict(), NO_COMPARE_FLAGS, a1, a0);
1670     rewritten = stub.GetCode();
1671   } else {
1672     ICCompareStub stub(op_, state);
1673     if (state == KNOWN_OBJECTS) {
1674       stub.set_known_map(Handle<Map>(Handle<JSObject>::cast(x)->map()));
1675     }
1676     rewritten = stub.GetCode();
1677   }
1678   set_target(*rewritten);
1679 
1680 #ifdef DEBUG
1681   if (FLAG_trace_ic) {
1682     PrintF("[CompareIC (%s->%s)#%s]\n",
1683            GetStateName(previous_state),
1684            GetStateName(state),
1685            Token::Name(op_));
1686   }
1687 #endif
1688 
1689   // Activate inlined smi code.
1690   if (previous_state == UNINITIALIZED) {
1691     PatchInlinedSmiCode(address());
1692   }
1693 }
1694 
1695 
PatchInlinedSmiCode(Address address)1696 void PatchInlinedSmiCode(Address address) {
1697   Address andi_instruction_address =
1698       address + Assembler::kCallTargetAddressOffset;
1699 
1700   // If the instruction following the call is not a andi at, rx, #yyy, nothing
1701   // was inlined.
1702   Instr instr = Assembler::instr_at(andi_instruction_address);
1703   if (!(Assembler::IsAndImmediate(instr) &&
1704         Assembler::GetRt(instr) == (uint32_t)zero_reg.code())) {
1705     return;
1706   }
1707 
1708   // The delta to the start of the map check instruction and the
1709   // condition code uses at the patched jump.
1710   int delta = Assembler::GetImmediate16(instr);
1711   delta += Assembler::GetRs(instr) * kImm16Mask;
1712   // If the delta is 0 the instruction is andi at, zero_reg, #0 which also
1713   // signals that nothing was inlined.
1714   if (delta == 0) {
1715     return;
1716   }
1717 
1718 #ifdef DEBUG
1719   if (FLAG_trace_ic) {
1720     PrintF("[  patching ic at %p, andi=%p, delta=%d\n",
1721            address, andi_instruction_address, delta);
1722   }
1723 #endif
1724 
1725   Address patch_address =
1726       andi_instruction_address - delta * Instruction::kInstrSize;
1727   Instr instr_at_patch = Assembler::instr_at(patch_address);
1728   Instr branch_instr =
1729       Assembler::instr_at(patch_address + Instruction::kInstrSize);
1730   ASSERT(Assembler::IsAndImmediate(instr_at_patch));
1731   ASSERT_EQ(0, Assembler::GetImmediate16(instr_at_patch));
1732   ASSERT(Assembler::IsBranch(branch_instr));
1733   if (Assembler::IsBeq(branch_instr)) {
1734     // This is patching a "jump if not smi" site to be active.
1735     // Changing:
1736     //   andi at, rx, 0
1737     //   Branch <target>, eq, at, Operand(zero_reg)
1738     // to:
1739     //   andi at, rx, #kSmiTagMask
1740     //   Branch <target>, ne, at, Operand(zero_reg)
1741     CodePatcher patcher(patch_address, 2);
1742     Register reg = Register::from_code(Assembler::GetRs(instr_at_patch));
1743     patcher.masm()->andi(at, reg, kSmiTagMask);
1744     patcher.ChangeBranchCondition(ne);
1745   } else {
1746     ASSERT(Assembler::IsBne(branch_instr));
1747     // This is patching a "jump if smi" site to be active.
1748     // Changing:
1749     //   andi at, rx, 0
1750     //   Branch <target>, ne, at, Operand(zero_reg)
1751     // to:
1752     //   andi at, rx, #kSmiTagMask
1753     //   Branch <target>, eq, at, Operand(zero_reg)
1754     CodePatcher patcher(patch_address, 2);
1755     Register reg = Register::from_code(Assembler::GetRs(instr_at_patch));
1756     patcher.masm()->andi(at, reg, kSmiTagMask);
1757     patcher.ChangeBranchCondition(eq);
1758   }
1759 }
1760 
1761 
1762 } }  // namespace v8::internal
1763 
1764 #endif  // V8_TARGET_ARCH_MIPS
1765