• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 //     * Redistributions of source code must retain the above copyright
7 //       notice, this list of conditions and the following disclaimer.
8 //     * Redistributions in binary form must reproduce the above
9 //       copyright notice, this list of conditions and the following
10 //       disclaimer in the documentation and/or other materials provided
11 //       with the distribution.
12 //     * Neither the name of Google Inc. nor the names of its
13 //       contributors may be used to endorse or promote products derived
14 //       from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 
28 #include "v8.h"
29 
30 #if V8_TARGET_ARCH_MIPS
31 
32 #include "ic-inl.h"
33 #include "codegen.h"
34 #include "stub-cache.h"
35 
36 namespace v8 {
37 namespace internal {
38 
39 #define __ ACCESS_MASM(masm)
40 
41 
ProbeTable(Isolate * isolate,MacroAssembler * masm,Code::Flags flags,StubCache::Table table,Register receiver,Register name,Register offset,Register scratch,Register scratch2,Register offset_scratch)42 static void ProbeTable(Isolate* isolate,
43                        MacroAssembler* masm,
44                        Code::Flags flags,
45                        StubCache::Table table,
46                        Register receiver,
47                        Register name,
48                        // Number of the cache entry, not scaled.
49                        Register offset,
50                        Register scratch,
51                        Register scratch2,
52                        Register offset_scratch) {
53   ExternalReference key_offset(isolate->stub_cache()->key_reference(table));
54   ExternalReference value_offset(isolate->stub_cache()->value_reference(table));
55   ExternalReference map_offset(isolate->stub_cache()->map_reference(table));
56 
57   uint32_t key_off_addr = reinterpret_cast<uint32_t>(key_offset.address());
58   uint32_t value_off_addr = reinterpret_cast<uint32_t>(value_offset.address());
59   uint32_t map_off_addr = reinterpret_cast<uint32_t>(map_offset.address());
60 
61   // Check the relative positions of the address fields.
62   ASSERT(value_off_addr > key_off_addr);
63   ASSERT((value_off_addr - key_off_addr) % 4 == 0);
64   ASSERT((value_off_addr - key_off_addr) < (256 * 4));
65   ASSERT(map_off_addr > key_off_addr);
66   ASSERT((map_off_addr - key_off_addr) % 4 == 0);
67   ASSERT((map_off_addr - key_off_addr) < (256 * 4));
68 
69   Label miss;
70   Register base_addr = scratch;
71   scratch = no_reg;
72 
73   // Multiply by 3 because there are 3 fields per entry (name, code, map).
74   __ sll(offset_scratch, offset, 1);
75   __ Addu(offset_scratch, offset_scratch, offset);
76 
77   // Calculate the base address of the entry.
78   __ li(base_addr, Operand(key_offset));
79   __ sll(at, offset_scratch, kPointerSizeLog2);
80   __ Addu(base_addr, base_addr, at);
81 
82   // Check that the key in the entry matches the name.
83   __ lw(at, MemOperand(base_addr, 0));
84   __ Branch(&miss, ne, name, Operand(at));
85 
86   // Check the map matches.
87   __ lw(at, MemOperand(base_addr, map_off_addr - key_off_addr));
88   __ lw(scratch2, FieldMemOperand(receiver, HeapObject::kMapOffset));
89   __ Branch(&miss, ne, at, Operand(scratch2));
90 
91   // Get the code entry from the cache.
92   Register code = scratch2;
93   scratch2 = no_reg;
94   __ lw(code, MemOperand(base_addr, value_off_addr - key_off_addr));
95 
96   // Check that the flags match what we're looking for.
97   Register flags_reg = base_addr;
98   base_addr = no_reg;
99   __ lw(flags_reg, FieldMemOperand(code, Code::kFlagsOffset));
100   __ And(flags_reg, flags_reg, Operand(~Code::kFlagsNotUsedInLookup));
101   __ Branch(&miss, ne, flags_reg, Operand(flags));
102 
103 #ifdef DEBUG
104     if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
105       __ jmp(&miss);
106     } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
107       __ jmp(&miss);
108     }
109 #endif
110 
111   // Jump to the first instruction in the code stub.
112   __ Addu(at, code, Operand(Code::kHeaderSize - kHeapObjectTag));
113   __ Jump(at);
114 
115   // Miss: fall through.
116   __ bind(&miss);
117 }
118 
119 
GenerateDictionaryNegativeLookup(MacroAssembler * masm,Label * miss_label,Register receiver,Handle<Name> name,Register scratch0,Register scratch1)120 void StubCompiler::GenerateDictionaryNegativeLookup(MacroAssembler* masm,
121                                                     Label* miss_label,
122                                                     Register receiver,
123                                                     Handle<Name> name,
124                                                     Register scratch0,
125                                                     Register scratch1) {
126   ASSERT(name->IsUniqueName());
127   ASSERT(!receiver.is(scratch0));
128   Counters* counters = masm->isolate()->counters();
129   __ IncrementCounter(counters->negative_lookups(), 1, scratch0, scratch1);
130   __ IncrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
131 
132   Label done;
133 
134   const int kInterceptorOrAccessCheckNeededMask =
135       (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded);
136 
137   // Bail out if the receiver has a named interceptor or requires access checks.
138   Register map = scratch1;
139   __ lw(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
140   __ lbu(scratch0, FieldMemOperand(map, Map::kBitFieldOffset));
141   __ And(scratch0, scratch0, Operand(kInterceptorOrAccessCheckNeededMask));
142   __ Branch(miss_label, ne, scratch0, Operand(zero_reg));
143 
144   // Check that receiver is a JSObject.
145   __ lbu(scratch0, FieldMemOperand(map, Map::kInstanceTypeOffset));
146   __ Branch(miss_label, lt, scratch0, Operand(FIRST_SPEC_OBJECT_TYPE));
147 
148   // Load properties array.
149   Register properties = scratch0;
150   __ lw(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
151   // Check that the properties array is a dictionary.
152   __ lw(map, FieldMemOperand(properties, HeapObject::kMapOffset));
153   Register tmp = properties;
154   __ LoadRoot(tmp, Heap::kHashTableMapRootIndex);
155   __ Branch(miss_label, ne, map, Operand(tmp));
156 
157   // Restore the temporarily used register.
158   __ lw(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
159 
160 
161   NameDictionaryLookupStub::GenerateNegativeLookup(masm,
162                                                    miss_label,
163                                                    &done,
164                                                    receiver,
165                                                    properties,
166                                                    name,
167                                                    scratch1);
168   __ bind(&done);
169   __ DecrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
170 }
171 
172 
GenerateProbe(MacroAssembler * masm,Code::Flags flags,Register receiver,Register name,Register scratch,Register extra,Register extra2,Register extra3)173 void StubCache::GenerateProbe(MacroAssembler* masm,
174                               Code::Flags flags,
175                               Register receiver,
176                               Register name,
177                               Register scratch,
178                               Register extra,
179                               Register extra2,
180                               Register extra3) {
181   Isolate* isolate = masm->isolate();
182   Label miss;
183 
184   // Make sure that code is valid. The multiplying code relies on the
185   // entry size being 12.
186   ASSERT(sizeof(Entry) == 12);
187 
188   // Make sure the flags does not name a specific type.
189   ASSERT(Code::ExtractTypeFromFlags(flags) == 0);
190 
191   // Make sure that there are no register conflicts.
192   ASSERT(!scratch.is(receiver));
193   ASSERT(!scratch.is(name));
194   ASSERT(!extra.is(receiver));
195   ASSERT(!extra.is(name));
196   ASSERT(!extra.is(scratch));
197   ASSERT(!extra2.is(receiver));
198   ASSERT(!extra2.is(name));
199   ASSERT(!extra2.is(scratch));
200   ASSERT(!extra2.is(extra));
201 
202   // Check register validity.
203   ASSERT(!scratch.is(no_reg));
204   ASSERT(!extra.is(no_reg));
205   ASSERT(!extra2.is(no_reg));
206   ASSERT(!extra3.is(no_reg));
207 
208   Counters* counters = masm->isolate()->counters();
209   __ IncrementCounter(counters->megamorphic_stub_cache_probes(), 1,
210                       extra2, extra3);
211 
212   // Check that the receiver isn't a smi.
213   __ JumpIfSmi(receiver, &miss);
214 
215   // Get the map of the receiver and compute the hash.
216   __ lw(scratch, FieldMemOperand(name, Name::kHashFieldOffset));
217   __ lw(at, FieldMemOperand(receiver, HeapObject::kMapOffset));
218   __ Addu(scratch, scratch, at);
219   uint32_t mask = kPrimaryTableSize - 1;
220   // We shift out the last two bits because they are not part of the hash and
221   // they are always 01 for maps.
222   __ srl(scratch, scratch, kHeapObjectTagSize);
223   __ Xor(scratch, scratch, Operand((flags >> kHeapObjectTagSize) & mask));
224   __ And(scratch, scratch, Operand(mask));
225 
226   // Probe the primary table.
227   ProbeTable(isolate,
228              masm,
229              flags,
230              kPrimary,
231              receiver,
232              name,
233              scratch,
234              extra,
235              extra2,
236              extra3);
237 
238   // Primary miss: Compute hash for secondary probe.
239   __ srl(at, name, kHeapObjectTagSize);
240   __ Subu(scratch, scratch, at);
241   uint32_t mask2 = kSecondaryTableSize - 1;
242   __ Addu(scratch, scratch, Operand((flags >> kHeapObjectTagSize) & mask2));
243   __ And(scratch, scratch, Operand(mask2));
244 
245   // Probe the secondary table.
246   ProbeTable(isolate,
247              masm,
248              flags,
249              kSecondary,
250              receiver,
251              name,
252              scratch,
253              extra,
254              extra2,
255              extra3);
256 
257   // Cache miss: Fall-through and let caller handle the miss by
258   // entering the runtime system.
259   __ bind(&miss);
260   __ IncrementCounter(counters->megamorphic_stub_cache_misses(), 1,
261                       extra2, extra3);
262 }
263 
264 
GenerateLoadGlobalFunctionPrototype(MacroAssembler * masm,int index,Register prototype)265 void StubCompiler::GenerateLoadGlobalFunctionPrototype(MacroAssembler* masm,
266                                                        int index,
267                                                        Register prototype) {
268   // Load the global or builtins object from the current context.
269   __ lw(prototype,
270         MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
271   // Load the native context from the global or builtins object.
272   __ lw(prototype,
273          FieldMemOperand(prototype, GlobalObject::kNativeContextOffset));
274   // Load the function from the native context.
275   __ lw(prototype, MemOperand(prototype, Context::SlotOffset(index)));
276   // Load the initial map.  The global functions all have initial maps.
277   __ lw(prototype,
278          FieldMemOperand(prototype, JSFunction::kPrototypeOrInitialMapOffset));
279   // Load the prototype from the initial map.
280   __ lw(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset));
281 }
282 
283 
GenerateDirectLoadGlobalFunctionPrototype(MacroAssembler * masm,int index,Register prototype,Label * miss)284 void StubCompiler::GenerateDirectLoadGlobalFunctionPrototype(
285     MacroAssembler* masm,
286     int index,
287     Register prototype,
288     Label* miss) {
289   Isolate* isolate = masm->isolate();
290   // Check we're still in the same context.
291   __ lw(prototype,
292         MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
293   ASSERT(!prototype.is(at));
294   __ li(at, isolate->global_object());
295   __ Branch(miss, ne, prototype, Operand(at));
296   // Get the global function with the given index.
297   Handle<JSFunction> function(
298       JSFunction::cast(isolate->native_context()->get(index)));
299   // Load its initial map. The global functions all have initial maps.
300   __ li(prototype, Handle<Map>(function->initial_map()));
301   // Load the prototype from the initial map.
302   __ lw(prototype, FieldMemOperand(prototype, Map::kPrototypeOffset));
303 }
304 
305 
GenerateFastPropertyLoad(MacroAssembler * masm,Register dst,Register src,bool inobject,int index,Representation representation)306 void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
307                                             Register dst,
308                                             Register src,
309                                             bool inobject,
310                                             int index,
311                                             Representation representation) {
312   ASSERT(!FLAG_track_double_fields || !representation.IsDouble());
313   int offset = index * kPointerSize;
314   if (!inobject) {
315     // Calculate the offset into the properties array.
316     offset = offset + FixedArray::kHeaderSize;
317     __ lw(dst, FieldMemOperand(src, JSObject::kPropertiesOffset));
318     src = dst;
319   }
320   __ lw(dst, FieldMemOperand(src, offset));
321 }
322 
323 
GenerateLoadArrayLength(MacroAssembler * masm,Register receiver,Register scratch,Label * miss_label)324 void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm,
325                                            Register receiver,
326                                            Register scratch,
327                                            Label* miss_label) {
328   // Check that the receiver isn't a smi.
329   __ JumpIfSmi(receiver, miss_label);
330 
331   // Check that the object is a JS array.
332   __ GetObjectType(receiver, scratch, scratch);
333   __ Branch(miss_label, ne, scratch, Operand(JS_ARRAY_TYPE));
334 
335   // Load length directly from the JS array.
336   __ Ret(USE_DELAY_SLOT);
337   __ lw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset));
338 }
339 
340 
341 // Generate code to check if an object is a string.  If the object is a
342 // heap object, its map's instance type is left in the scratch1 register.
343 // If this is not needed, scratch1 and scratch2 may be the same register.
GenerateStringCheck(MacroAssembler * masm,Register receiver,Register scratch1,Register scratch2,Label * smi,Label * non_string_object)344 static void GenerateStringCheck(MacroAssembler* masm,
345                                 Register receiver,
346                                 Register scratch1,
347                                 Register scratch2,
348                                 Label* smi,
349                                 Label* non_string_object) {
350   // Check that the receiver isn't a smi.
351   __ JumpIfSmi(receiver, smi, t0);
352 
353   // Check that the object is a string.
354   __ lw(scratch1, FieldMemOperand(receiver, HeapObject::kMapOffset));
355   __ lbu(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
356   __ And(scratch2, scratch1, Operand(kIsNotStringMask));
357   // The cast is to resolve the overload for the argument of 0x0.
358   __ Branch(non_string_object,
359             ne,
360             scratch2,
361             Operand(static_cast<int32_t>(kStringTag)));
362 }
363 
364 
365 // Generate code to load the length from a string object and return the length.
366 // If the receiver object is not a string or a wrapped string object the
367 // execution continues at the miss label. The register containing the
368 // receiver is potentially clobbered.
GenerateLoadStringLength(MacroAssembler * masm,Register receiver,Register scratch1,Register scratch2,Label * miss)369 void StubCompiler::GenerateLoadStringLength(MacroAssembler* masm,
370                                             Register receiver,
371                                             Register scratch1,
372                                             Register scratch2,
373                                             Label* miss) {
374   Label check_wrapper;
375 
376   // Check if the object is a string leaving the instance type in the
377   // scratch1 register.
378   GenerateStringCheck(masm, receiver, scratch1, scratch2, miss, &check_wrapper);
379 
380   // Load length directly from the string.
381   __ Ret(USE_DELAY_SLOT);
382   __ lw(v0, FieldMemOperand(receiver, String::kLengthOffset));
383 
384   // Check if the object is a JSValue wrapper.
385   __ bind(&check_wrapper);
386   __ Branch(miss, ne, scratch1, Operand(JS_VALUE_TYPE));
387 
388   // Unwrap the value and check if the wrapped value is a string.
389   __ lw(scratch1, FieldMemOperand(receiver, JSValue::kValueOffset));
390   GenerateStringCheck(masm, scratch1, scratch2, scratch2, miss, miss);
391   __ Ret(USE_DELAY_SLOT);
392   __ lw(v0, FieldMemOperand(scratch1, String::kLengthOffset));
393 }
394 
395 
GenerateLoadFunctionPrototype(MacroAssembler * masm,Register receiver,Register scratch1,Register scratch2,Label * miss_label)396 void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm,
397                                                  Register receiver,
398                                                  Register scratch1,
399                                                  Register scratch2,
400                                                  Label* miss_label) {
401   __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label);
402   __ Ret(USE_DELAY_SLOT);
403   __ mov(v0, scratch1);
404 }
405 
406 
GenerateCheckPropertyCell(MacroAssembler * masm,Handle<JSGlobalObject> global,Handle<Name> name,Register scratch,Label * miss)407 void StubCompiler::GenerateCheckPropertyCell(MacroAssembler* masm,
408                                              Handle<JSGlobalObject> global,
409                                              Handle<Name> name,
410                                              Register scratch,
411                                              Label* miss) {
412   Handle<Cell> cell = JSGlobalObject::EnsurePropertyCell(global, name);
413   ASSERT(cell->value()->IsTheHole());
414   __ li(scratch, Operand(cell));
415   __ lw(scratch, FieldMemOperand(scratch, Cell::kValueOffset));
416   __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
417   __ Branch(miss, ne, scratch, Operand(at));
418 }
419 
420 
GenerateNegativeHolderLookup(MacroAssembler * masm,Handle<JSObject> holder,Register holder_reg,Handle<Name> name,Label * miss)421 void StoreStubCompiler::GenerateNegativeHolderLookup(
422     MacroAssembler* masm,
423     Handle<JSObject> holder,
424     Register holder_reg,
425     Handle<Name> name,
426     Label* miss) {
427   if (holder->IsJSGlobalObject()) {
428     GenerateCheckPropertyCell(
429         masm, Handle<JSGlobalObject>::cast(holder), name, scratch1(), miss);
430   } else if (!holder->HasFastProperties() && !holder->IsJSGlobalProxy()) {
431     GenerateDictionaryNegativeLookup(
432         masm, miss, holder_reg, name, scratch1(), scratch2());
433   }
434 }
435 
436 
437 // Generate StoreTransition code, value is passed in a0 register.
438 // After executing generated code, the receiver_reg and name_reg
439 // may be clobbered.
GenerateStoreTransition(MacroAssembler * masm,Handle<JSObject> object,LookupResult * lookup,Handle<Map> transition,Handle<Name> name,Register receiver_reg,Register storage_reg,Register value_reg,Register scratch1,Register scratch2,Register scratch3,Label * miss_label,Label * slow)440 void StoreStubCompiler::GenerateStoreTransition(MacroAssembler* masm,
441                                                 Handle<JSObject> object,
442                                                 LookupResult* lookup,
443                                                 Handle<Map> transition,
444                                                 Handle<Name> name,
445                                                 Register receiver_reg,
446                                                 Register storage_reg,
447                                                 Register value_reg,
448                                                 Register scratch1,
449                                                 Register scratch2,
450                                                 Register scratch3,
451                                                 Label* miss_label,
452                                                 Label* slow) {
453   // a0 : value.
454   Label exit;
455 
456   int descriptor = transition->LastAdded();
457   DescriptorArray* descriptors = transition->instance_descriptors();
458   PropertyDetails details = descriptors->GetDetails(descriptor);
459   Representation representation = details.representation();
460   ASSERT(!representation.IsNone());
461 
462   if (details.type() == CONSTANT) {
463     Handle<Object> constant(descriptors->GetValue(descriptor), masm->isolate());
464     __ li(scratch1, constant);
465     __ Branch(miss_label, ne, value_reg, Operand(scratch1));
466   } else if (FLAG_track_fields && representation.IsSmi()) {
467     __ JumpIfNotSmi(value_reg, miss_label);
468   } else if (FLAG_track_heap_object_fields && representation.IsHeapObject()) {
469     __ JumpIfSmi(value_reg, miss_label);
470   } else if (FLAG_track_double_fields && representation.IsDouble()) {
471     Label do_store, heap_number;
472     __ LoadRoot(scratch3, Heap::kHeapNumberMapRootIndex);
473     __ AllocateHeapNumber(storage_reg, scratch1, scratch2, scratch3, slow);
474 
475     __ JumpIfNotSmi(value_reg, &heap_number);
476     __ SmiUntag(scratch1, value_reg);
477     __ mtc1(scratch1, f6);
478     __ cvt_d_w(f4, f6);
479     __ jmp(&do_store);
480 
481     __ bind(&heap_number);
482     __ CheckMap(value_reg, scratch1, Heap::kHeapNumberMapRootIndex,
483                 miss_label, DONT_DO_SMI_CHECK);
484     __ ldc1(f4, FieldMemOperand(value_reg, HeapNumber::kValueOffset));
485 
486     __ bind(&do_store);
487     __ sdc1(f4, FieldMemOperand(storage_reg, HeapNumber::kValueOffset));
488   }
489 
490   // Stub never generated for non-global objects that require access
491   // checks.
492   ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
493 
494   // Perform map transition for the receiver if necessary.
495   if (details.type() == FIELD &&
496       object->map()->unused_property_fields() == 0) {
497     // The properties must be extended before we can store the value.
498     // We jump to a runtime call that extends the properties array.
499     __ push(receiver_reg);
500     __ li(a2, Operand(transition));
501     __ Push(a2, a0);
502     __ TailCallExternalReference(
503            ExternalReference(IC_Utility(IC::kSharedStoreIC_ExtendStorage),
504                              masm->isolate()),
505            3, 1);
506     return;
507   }
508 
509   // Update the map of the object.
510   __ li(scratch1, Operand(transition));
511   __ sw(scratch1, FieldMemOperand(receiver_reg, HeapObject::kMapOffset));
512 
513   // Update the write barrier for the map field.
514   __ RecordWriteField(receiver_reg,
515                       HeapObject::kMapOffset,
516                       scratch1,
517                       scratch2,
518                       kRAHasNotBeenSaved,
519                       kDontSaveFPRegs,
520                       OMIT_REMEMBERED_SET,
521                       OMIT_SMI_CHECK);
522 
523   if (details.type() == CONSTANT) {
524     ASSERT(value_reg.is(a0));
525     __ Ret(USE_DELAY_SLOT);
526     __ mov(v0, a0);
527     return;
528   }
529 
530   int index = transition->instance_descriptors()->GetFieldIndex(
531       transition->LastAdded());
532 
533   // Adjust for the number of properties stored in the object. Even in the
534   // face of a transition we can use the old map here because the size of the
535   // object and the number of in-object properties is not going to change.
536   index -= object->map()->inobject_properties();
537 
538   // TODO(verwaest): Share this code as a code stub.
539   SmiCheck smi_check = representation.IsTagged()
540       ? INLINE_SMI_CHECK : OMIT_SMI_CHECK;
541   if (index < 0) {
542     // Set the property straight into the object.
543     int offset = object->map()->instance_size() + (index * kPointerSize);
544     if (FLAG_track_double_fields && representation.IsDouble()) {
545       __ sw(storage_reg, FieldMemOperand(receiver_reg, offset));
546     } else {
547       __ sw(value_reg, FieldMemOperand(receiver_reg, offset));
548     }
549 
550     if (!FLAG_track_fields || !representation.IsSmi()) {
551       // Update the write barrier for the array address.
552       if (!FLAG_track_double_fields || !representation.IsDouble()) {
553         __ mov(storage_reg, value_reg);
554       }
555       __ RecordWriteField(receiver_reg,
556                           offset,
557                           storage_reg,
558                           scratch1,
559                           kRAHasNotBeenSaved,
560                           kDontSaveFPRegs,
561                           EMIT_REMEMBERED_SET,
562                           smi_check);
563     }
564   } else {
565     // Write to the properties array.
566     int offset = index * kPointerSize + FixedArray::kHeaderSize;
567     // Get the properties array
568     __ lw(scratch1,
569           FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
570     if (FLAG_track_double_fields && representation.IsDouble()) {
571       __ sw(storage_reg, FieldMemOperand(scratch1, offset));
572     } else {
573       __ sw(value_reg, FieldMemOperand(scratch1, offset));
574     }
575 
576     if (!FLAG_track_fields || !representation.IsSmi()) {
577       // Update the write barrier for the array address.
578       if (!FLAG_track_double_fields || !representation.IsDouble()) {
579         __ mov(storage_reg, value_reg);
580       }
581       __ RecordWriteField(scratch1,
582                           offset,
583                           storage_reg,
584                           receiver_reg,
585                           kRAHasNotBeenSaved,
586                           kDontSaveFPRegs,
587                           EMIT_REMEMBERED_SET,
588                           smi_check);
589     }
590   }
591 
592   // Return the value (register v0).
593   ASSERT(value_reg.is(a0));
594   __ bind(&exit);
595   __ Ret(USE_DELAY_SLOT);
596   __ mov(v0, a0);
597 }
598 
599 
600 // Generate StoreField code, value is passed in a0 register.
601 // When leaving generated code after success, the receiver_reg and name_reg
602 // may be clobbered.  Upon branch to miss_label, the receiver and name
603 // registers have their original values.
GenerateStoreField(MacroAssembler * masm,Handle<JSObject> object,LookupResult * lookup,Register receiver_reg,Register name_reg,Register value_reg,Register scratch1,Register scratch2,Label * miss_label)604 void StoreStubCompiler::GenerateStoreField(MacroAssembler* masm,
605                                            Handle<JSObject> object,
606                                            LookupResult* lookup,
607                                            Register receiver_reg,
608                                            Register name_reg,
609                                            Register value_reg,
610                                            Register scratch1,
611                                            Register scratch2,
612                                            Label* miss_label) {
613   // a0 : value
614   Label exit;
615 
616   // Stub never generated for non-global objects that require access
617   // checks.
618   ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
619 
620   int index = lookup->GetFieldIndex().field_index();
621 
622   // Adjust for the number of properties stored in the object. Even in the
623   // face of a transition we can use the old map here because the size of the
624   // object and the number of in-object properties is not going to change.
625   index -= object->map()->inobject_properties();
626 
627   Representation representation = lookup->representation();
628   ASSERT(!representation.IsNone());
629   if (FLAG_track_fields && representation.IsSmi()) {
630     __ JumpIfNotSmi(value_reg, miss_label);
631   } else if (FLAG_track_heap_object_fields && representation.IsHeapObject()) {
632     __ JumpIfSmi(value_reg, miss_label);
633   } else if (FLAG_track_double_fields && representation.IsDouble()) {
634     // Load the double storage.
635     if (index < 0) {
636       int offset = object->map()->instance_size() + (index * kPointerSize);
637       __ lw(scratch1, FieldMemOperand(receiver_reg, offset));
638     } else {
639       __ lw(scratch1,
640             FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
641       int offset = index * kPointerSize + FixedArray::kHeaderSize;
642       __ lw(scratch1, FieldMemOperand(scratch1, offset));
643     }
644 
645     // Store the value into the storage.
646     Label do_store, heap_number;
647     __ JumpIfNotSmi(value_reg, &heap_number);
648     __ SmiUntag(scratch2, value_reg);
649     __ mtc1(scratch2, f6);
650     __ cvt_d_w(f4, f6);
651     __ jmp(&do_store);
652 
653     __ bind(&heap_number);
654     __ CheckMap(value_reg, scratch2, Heap::kHeapNumberMapRootIndex,
655                 miss_label, DONT_DO_SMI_CHECK);
656     __ ldc1(f4, FieldMemOperand(value_reg, HeapNumber::kValueOffset));
657 
658     __ bind(&do_store);
659     __ sdc1(f4, FieldMemOperand(scratch1, HeapNumber::kValueOffset));
660     // Return the value (register v0).
661     ASSERT(value_reg.is(a0));
662     __ Ret(USE_DELAY_SLOT);
663     __ mov(v0, a0);
664     return;
665   }
666 
667   // TODO(verwaest): Share this code as a code stub.
668   SmiCheck smi_check = representation.IsTagged()
669       ? INLINE_SMI_CHECK : OMIT_SMI_CHECK;
670   if (index < 0) {
671     // Set the property straight into the object.
672     int offset = object->map()->instance_size() + (index * kPointerSize);
673     __ sw(value_reg, FieldMemOperand(receiver_reg, offset));
674 
675     if (!FLAG_track_fields || !representation.IsSmi()) {
676       // Skip updating write barrier if storing a smi.
677       __ JumpIfSmi(value_reg, &exit);
678 
679       // Update the write barrier for the array address.
680       // Pass the now unused name_reg as a scratch register.
681       __ mov(name_reg, value_reg);
682       __ RecordWriteField(receiver_reg,
683                           offset,
684                           name_reg,
685                           scratch1,
686                           kRAHasNotBeenSaved,
687                           kDontSaveFPRegs,
688                           EMIT_REMEMBERED_SET,
689                           smi_check);
690     }
691   } else {
692     // Write to the properties array.
693     int offset = index * kPointerSize + FixedArray::kHeaderSize;
694     // Get the properties array.
695     __ lw(scratch1,
696           FieldMemOperand(receiver_reg, JSObject::kPropertiesOffset));
697     __ sw(value_reg, FieldMemOperand(scratch1, offset));
698 
699     if (!FLAG_track_fields || !representation.IsSmi()) {
700       // Skip updating write barrier if storing a smi.
701       __ JumpIfSmi(value_reg, &exit);
702 
703       // Update the write barrier for the array address.
704       // Ok to clobber receiver_reg and name_reg, since we return.
705       __ mov(name_reg, value_reg);
706       __ RecordWriteField(scratch1,
707                           offset,
708                           name_reg,
709                           receiver_reg,
710                           kRAHasNotBeenSaved,
711                           kDontSaveFPRegs,
712                           EMIT_REMEMBERED_SET,
713                           smi_check);
714     }
715   }
716 
717   // Return the value (register v0).
718   ASSERT(value_reg.is(a0));
719   __ bind(&exit);
720   __ Ret(USE_DELAY_SLOT);
721   __ mov(v0, a0);
722 }
723 
724 
GenerateRestoreName(MacroAssembler * masm,Label * label,Handle<Name> name)725 void StoreStubCompiler::GenerateRestoreName(MacroAssembler* masm,
726                                             Label* label,
727                                             Handle<Name> name) {
728   if (!label->is_unused()) {
729     __ bind(label);
730     __ li(this->name(), Operand(name));
731   }
732 }
733 
734 
PushInterceptorArguments(MacroAssembler * masm,Register receiver,Register holder,Register name,Handle<JSObject> holder_obj)735 static void PushInterceptorArguments(MacroAssembler* masm,
736                                      Register receiver,
737                                      Register holder,
738                                      Register name,
739                                      Handle<JSObject> holder_obj) {
740   STATIC_ASSERT(StubCache::kInterceptorArgsNameIndex == 0);
741   STATIC_ASSERT(StubCache::kInterceptorArgsInfoIndex == 1);
742   STATIC_ASSERT(StubCache::kInterceptorArgsThisIndex == 2);
743   STATIC_ASSERT(StubCache::kInterceptorArgsHolderIndex == 3);
744   STATIC_ASSERT(StubCache::kInterceptorArgsLength == 4);
745   __ push(name);
746   Handle<InterceptorInfo> interceptor(holder_obj->GetNamedInterceptor());
747   ASSERT(!masm->isolate()->heap()->InNewSpace(*interceptor));
748   Register scratch = name;
749   __ li(scratch, Operand(interceptor));
750   __ Push(scratch, receiver, holder);
751 }
752 
753 
CompileCallLoadPropertyWithInterceptor(MacroAssembler * masm,Register receiver,Register holder,Register name,Handle<JSObject> holder_obj,IC::UtilityId id)754 static void CompileCallLoadPropertyWithInterceptor(
755     MacroAssembler* masm,
756     Register receiver,
757     Register holder,
758     Register name,
759     Handle<JSObject> holder_obj,
760     IC::UtilityId id) {
761   PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
762   __ CallExternalReference(
763       ExternalReference(IC_Utility(id), masm->isolate()),
764       StubCache::kInterceptorArgsLength);
765 }
766 
767 
768 static const int kFastApiCallArguments = FunctionCallbackArguments::kArgsLength;
769 
770 // Reserves space for the extra arguments to API function in the
771 // caller's frame.
772 //
773 // These arguments are set by CheckPrototypes and GenerateFastApiDirectCall.
ReserveSpaceForFastApiCall(MacroAssembler * masm,Register scratch)774 static void ReserveSpaceForFastApiCall(MacroAssembler* masm,
775                                        Register scratch) {
776   ASSERT(Smi::FromInt(0) == 0);
777   for (int i = 0; i < kFastApiCallArguments; i++) {
778     __ push(zero_reg);
779   }
780 }
781 
782 
783 // Undoes the effects of ReserveSpaceForFastApiCall.
FreeSpaceForFastApiCall(MacroAssembler * masm)784 static void FreeSpaceForFastApiCall(MacroAssembler* masm) {
785   __ Drop(kFastApiCallArguments);
786 }
787 
788 
GenerateFastApiDirectCall(MacroAssembler * masm,const CallOptimization & optimization,int argc,bool restore_context)789 static void GenerateFastApiDirectCall(MacroAssembler* masm,
790                                       const CallOptimization& optimization,
791                                       int argc,
792                                       bool restore_context) {
793   // ----------- S t a t e -------------
794   //  -- sp[0] - sp[24]     : FunctionCallbackInfo, incl.
795   //                        :  holder (set by CheckPrototypes)
796   //  -- sp[28]             : last JS argument
797   //  -- ...
798   //  -- sp[(argc + 6) * 4] : first JS argument
799   //  -- sp[(argc + 7) * 4] : receiver
800   // -----------------------------------
801   typedef FunctionCallbackArguments FCA;
802   // Save calling context.
803   __ sw(cp, MemOperand(sp, FCA::kContextSaveIndex * kPointerSize));
804   // Get the function and setup the context.
805   Handle<JSFunction> function = optimization.constant_function();
806   __ li(t1, function);
807   __ lw(cp, FieldMemOperand(t1, JSFunction::kContextOffset));
808   __ sw(t1, MemOperand(sp, FCA::kCalleeIndex * kPointerSize));
809 
810   // Construct the FunctionCallbackInfo.
811   Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
812   Handle<Object> call_data(api_call_info->data(), masm->isolate());
813   if (masm->isolate()->heap()->InNewSpace(*call_data)) {
814     __ li(a0, api_call_info);
815     __ lw(t2, FieldMemOperand(a0, CallHandlerInfo::kDataOffset));
816   } else {
817     __ li(t2, call_data);
818   }
819   // Store call data.
820   __ sw(t2, MemOperand(sp, FCA::kDataIndex * kPointerSize));
821   // Store isolate.
822   __ li(t3, Operand(ExternalReference::isolate_address(masm->isolate())));
823   __ sw(t3, MemOperand(sp, FCA::kIsolateIndex * kPointerSize));
824   // Store ReturnValue default and ReturnValue.
825   __ LoadRoot(t1, Heap::kUndefinedValueRootIndex);
826   __ sw(t1, MemOperand(sp, FCA::kReturnValueOffset * kPointerSize));
827   __ sw(t1, MemOperand(sp, FCA::kReturnValueDefaultValueIndex * kPointerSize));
828 
829   // Prepare arguments.
830   __ Move(a2, sp);
831 
832   // Allocate the v8::Arguments structure in the arguments' space since
833   // it's not controlled by GC.
834   const int kApiStackSpace = 4;
835 
836   FrameScope frame_scope(masm, StackFrame::MANUAL);
837   __ EnterExitFrame(false, kApiStackSpace);
838 
839   // a0 = FunctionCallbackInfo&
840   // Arguments is built at sp + 1 (sp is a reserved spot for ra).
841   __ Addu(a0, sp, kPointerSize);
842   // FunctionCallbackInfo::implicit_args_
843   __ sw(a2, MemOperand(a0, 0 * kPointerSize));
844   // FunctionCallbackInfo::values_
845   __ Addu(t0, a2, Operand((kFastApiCallArguments - 1 + argc) * kPointerSize));
846   __ sw(t0, MemOperand(a0, 1 * kPointerSize));
847   // FunctionCallbackInfo::length_ = argc
848   __ li(t0, Operand(argc));
849   __ sw(t0, MemOperand(a0, 2 * kPointerSize));
850   // FunctionCallbackInfo::is_construct_call = 0
851   __ sw(zero_reg, MemOperand(a0, 3 * kPointerSize));
852 
853   const int kStackUnwindSpace = argc + kFastApiCallArguments + 1;
854   Address function_address = v8::ToCData<Address>(api_call_info->callback());
855   ApiFunction fun(function_address);
856   ExternalReference::Type type = ExternalReference::DIRECT_API_CALL;
857   ExternalReference ref =
858       ExternalReference(&fun,
859                         type,
860                         masm->isolate());
861   Address thunk_address = FUNCTION_ADDR(&InvokeFunctionCallback);
862   ExternalReference::Type thunk_type = ExternalReference::PROFILING_API_CALL;
863   ApiFunction thunk_fun(thunk_address);
864   ExternalReference thunk_ref = ExternalReference(&thunk_fun, thunk_type,
865       masm->isolate());
866 
867   AllowExternalCallThatCantCauseGC scope(masm);
868   MemOperand context_restore_operand(
869       fp, (2 + FCA::kContextSaveIndex) * kPointerSize);
870   MemOperand return_value_operand(
871       fp, (2 + FCA::kReturnValueOffset) * kPointerSize);
872 
873   __ CallApiFunctionAndReturn(ref,
874                               function_address,
875                               thunk_ref,
876                               a1,
877                               kStackUnwindSpace,
878                               return_value_operand,
879                               restore_context ?
880                                   &context_restore_operand : NULL);
881 }
882 
883 
884 // Generate call to api function.
GenerateFastApiCall(MacroAssembler * masm,const CallOptimization & optimization,Register receiver,Register scratch,int argc,Register * values)885 static void GenerateFastApiCall(MacroAssembler* masm,
886                                 const CallOptimization& optimization,
887                                 Register receiver,
888                                 Register scratch,
889                                 int argc,
890                                 Register* values) {
891   ASSERT(optimization.is_simple_api_call());
892   ASSERT(!receiver.is(scratch));
893 
894   typedef FunctionCallbackArguments FCA;
895   const int stack_space = kFastApiCallArguments + argc + 1;
896   // Assign stack space for the call arguments.
897   __ Subu(sp, sp, Operand(stack_space * kPointerSize));
898   // Write holder to stack frame.
899   __ sw(receiver, MemOperand(sp, FCA::kHolderIndex * kPointerSize));
900   // Write receiver to stack frame.
901   int index = stack_space - 1;
902   __ sw(receiver, MemOperand(sp, index * kPointerSize));
903   // Write the arguments to stack frame.
904   for (int i = 0; i < argc; i++) {
905     ASSERT(!receiver.is(values[i]));
906     ASSERT(!scratch.is(values[i]));
907     __ sw(receiver, MemOperand(sp, index-- * kPointerSize));
908   }
909 
910   GenerateFastApiDirectCall(masm, optimization, argc, true);
911 }
912 
913 
914 class CallInterceptorCompiler BASE_EMBEDDED {
915  public:
CallInterceptorCompiler(CallStubCompiler * stub_compiler,const ParameterCount & arguments,Register name,ExtraICState extra_ic_state)916   CallInterceptorCompiler(CallStubCompiler* stub_compiler,
917                           const ParameterCount& arguments,
918                           Register name,
919                           ExtraICState extra_ic_state)
920       : stub_compiler_(stub_compiler),
921         arguments_(arguments),
922         name_(name),
923         extra_ic_state_(extra_ic_state) {}
924 
Compile(MacroAssembler * masm,Handle<JSObject> object,Handle<JSObject> holder,Handle<Name> name,LookupResult * lookup,Register receiver,Register scratch1,Register scratch2,Register scratch3,Label * miss)925   void Compile(MacroAssembler* masm,
926                Handle<JSObject> object,
927                Handle<JSObject> holder,
928                Handle<Name> name,
929                LookupResult* lookup,
930                Register receiver,
931                Register scratch1,
932                Register scratch2,
933                Register scratch3,
934                Label* miss) {
935     ASSERT(holder->HasNamedInterceptor());
936     ASSERT(!holder->GetNamedInterceptor()->getter()->IsUndefined());
937 
938     // Check that the receiver isn't a smi.
939     __ JumpIfSmi(receiver, miss);
940     CallOptimization optimization(lookup);
941     if (optimization.is_constant_call()) {
942       CompileCacheable(masm, object, receiver, scratch1, scratch2, scratch3,
943                        holder, lookup, name, optimization, miss);
944     } else {
945       CompileRegular(masm, object, receiver, scratch1, scratch2, scratch3,
946                      name, holder, miss);
947     }
948   }
949 
950  private:
CompileCacheable(MacroAssembler * masm,Handle<JSObject> object,Register receiver,Register scratch1,Register scratch2,Register scratch3,Handle<JSObject> interceptor_holder,LookupResult * lookup,Handle<Name> name,const CallOptimization & optimization,Label * miss_label)951   void CompileCacheable(MacroAssembler* masm,
952                         Handle<JSObject> object,
953                         Register receiver,
954                         Register scratch1,
955                         Register scratch2,
956                         Register scratch3,
957                         Handle<JSObject> interceptor_holder,
958                         LookupResult* lookup,
959                         Handle<Name> name,
960                         const CallOptimization& optimization,
961                         Label* miss_label) {
962     ASSERT(optimization.is_constant_call());
963     ASSERT(!lookup->holder()->IsGlobalObject());
964     Counters* counters = masm->isolate()->counters();
965     int depth1 = kInvalidProtoDepth;
966     int depth2 = kInvalidProtoDepth;
967     bool can_do_fast_api_call = false;
968     if (optimization.is_simple_api_call() &&
969           !lookup->holder()->IsGlobalObject()) {
970       depth1 = optimization.GetPrototypeDepthOfExpectedType(
971           object, interceptor_holder);
972       if (depth1 == kInvalidProtoDepth) {
973         depth2 = optimization.GetPrototypeDepthOfExpectedType(
974             interceptor_holder, Handle<JSObject>(lookup->holder()));
975       }
976       can_do_fast_api_call =
977           depth1 != kInvalidProtoDepth || depth2 != kInvalidProtoDepth;
978     }
979 
980     __ IncrementCounter(counters->call_const_interceptor(), 1,
981                         scratch1, scratch2);
982 
983     if (can_do_fast_api_call) {
984       __ IncrementCounter(counters->call_const_interceptor_fast_api(), 1,
985                           scratch1, scratch2);
986       ReserveSpaceForFastApiCall(masm, scratch1);
987     }
988 
989     // Check that the maps from receiver to interceptor's holder
990     // haven't changed and thus we can invoke interceptor.
991     Label miss_cleanup;
992     Label* miss = can_do_fast_api_call ? &miss_cleanup : miss_label;
993     Register holder =
994         stub_compiler_->CheckPrototypes(
995             IC::CurrentTypeOf(object, masm->isolate()), receiver,
996             interceptor_holder, scratch1, scratch2, scratch3,
997             name, depth1, miss);
998 
999     // Invoke an interceptor and if it provides a value,
1000     // branch to |regular_invoke|.
1001     Label regular_invoke;
1002     LoadWithInterceptor(masm, receiver, holder, interceptor_holder, scratch2,
1003                         &regular_invoke);
1004 
1005     // Interceptor returned nothing for this property.  Try to use cached
1006     // constant function.
1007 
1008     // Check that the maps from interceptor's holder to constant function's
1009     // holder haven't changed and thus we can use cached constant function.
1010     if (*interceptor_holder != lookup->holder()) {
1011       stub_compiler_->CheckPrototypes(
1012           IC::CurrentTypeOf(interceptor_holder, masm->isolate()), holder,
1013           handle(lookup->holder()), scratch1, scratch2, scratch3,
1014           name, depth2, miss);
1015     } else {
1016       // CheckPrototypes has a side effect of fetching a 'holder'
1017       // for API (object which is instanceof for the signature).  It's
1018       // safe to omit it here, as if present, it should be fetched
1019       // by the previous CheckPrototypes.
1020       ASSERT(depth2 == kInvalidProtoDepth);
1021     }
1022 
1023     // Invoke function.
1024     if (can_do_fast_api_call) {
1025       GenerateFastApiDirectCall(
1026           masm, optimization, arguments_.immediate(), false);
1027     } else {
1028       Handle<JSFunction> function = optimization.constant_function();
1029       __ Move(a0, receiver);
1030       stub_compiler_->GenerateJumpFunction(object, function);
1031     }
1032 
1033     // Deferred code for fast API call case---clean preallocated space.
1034     if (can_do_fast_api_call) {
1035       __ bind(&miss_cleanup);
1036       FreeSpaceForFastApiCall(masm);
1037       __ Branch(miss_label);
1038     }
1039 
1040     // Invoke a regular function.
1041     __ bind(&regular_invoke);
1042     if (can_do_fast_api_call) {
1043       FreeSpaceForFastApiCall(masm);
1044     }
1045   }
1046 
CompileRegular(MacroAssembler * masm,Handle<JSObject> object,Register receiver,Register scratch1,Register scratch2,Register scratch3,Handle<Name> name,Handle<JSObject> interceptor_holder,Label * miss_label)1047   void CompileRegular(MacroAssembler* masm,
1048                       Handle<JSObject> object,
1049                       Register receiver,
1050                       Register scratch1,
1051                       Register scratch2,
1052                       Register scratch3,
1053                       Handle<Name> name,
1054                       Handle<JSObject> interceptor_holder,
1055                       Label* miss_label) {
1056     Register holder =
1057         stub_compiler_->CheckPrototypes(
1058             IC::CurrentTypeOf(object, masm->isolate()), receiver,
1059             interceptor_holder, scratch1, scratch2, scratch3, name, miss_label);
1060 
1061     // Call a runtime function to load the interceptor property.
1062     FrameScope scope(masm, StackFrame::INTERNAL);
1063     // Save the name_ register across the call.
1064     __ push(name_);
1065 
1066     CompileCallLoadPropertyWithInterceptor(
1067         masm, receiver, holder, name_, interceptor_holder,
1068         IC::kLoadPropertyWithInterceptorForCall);
1069 
1070     // Restore the name_ register.
1071     __ pop(name_);
1072     // Leave the internal frame.
1073   }
1074 
LoadWithInterceptor(MacroAssembler * masm,Register receiver,Register holder,Handle<JSObject> holder_obj,Register scratch,Label * interceptor_succeeded)1075   void LoadWithInterceptor(MacroAssembler* masm,
1076                            Register receiver,
1077                            Register holder,
1078                            Handle<JSObject> holder_obj,
1079                            Register scratch,
1080                            Label* interceptor_succeeded) {
1081     {
1082       FrameScope scope(masm, StackFrame::INTERNAL);
1083 
1084       __ Push(receiver, holder, name_);
1085       CompileCallLoadPropertyWithInterceptor(
1086           masm, receiver, holder, name_, holder_obj,
1087           IC::kLoadPropertyWithInterceptorOnly);
1088       __ pop(name_);
1089       __ pop(holder);
1090       __ pop(receiver);
1091     }
1092     // If interceptor returns no-result sentinel, call the constant function.
1093     __ LoadRoot(scratch, Heap::kNoInterceptorResultSentinelRootIndex);
1094     __ Branch(interceptor_succeeded, ne, v0, Operand(scratch));
1095   }
1096 
1097   CallStubCompiler* stub_compiler_;
1098   const ParameterCount& arguments_;
1099   Register name_;
1100   ExtraICState extra_ic_state_;
1101 };
1102 
1103 
GenerateTailCall(MacroAssembler * masm,Handle<Code> code)1104 void StubCompiler::GenerateTailCall(MacroAssembler* masm, Handle<Code> code) {
1105   __ Jump(code, RelocInfo::CODE_TARGET);
1106 }
1107 
1108 
1109 #undef __
1110 #define __ ACCESS_MASM(masm())
1111 
1112 
CheckPrototypes(Handle<Type> type,Register object_reg,Handle<JSObject> holder,Register holder_reg,Register scratch1,Register scratch2,Handle<Name> name,int save_at_depth,Label * miss,PrototypeCheckType check)1113 Register StubCompiler::CheckPrototypes(Handle<Type> type,
1114                                        Register object_reg,
1115                                        Handle<JSObject> holder,
1116                                        Register holder_reg,
1117                                        Register scratch1,
1118                                        Register scratch2,
1119                                        Handle<Name> name,
1120                                        int save_at_depth,
1121                                        Label* miss,
1122                                        PrototypeCheckType check) {
1123   Handle<Map> receiver_map(IC::TypeToMap(*type, isolate()));
1124   // Make sure that the type feedback oracle harvests the receiver map.
1125   // TODO(svenpanne) Remove this hack when all ICs are reworked.
1126   __ li(scratch1, Operand(receiver_map));
1127 
1128   // Make sure there's no overlap between holder and object registers.
1129   ASSERT(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
1130   ASSERT(!scratch2.is(object_reg) && !scratch2.is(holder_reg)
1131          && !scratch2.is(scratch1));
1132 
1133   // Keep track of the current object in register reg.
1134   Register reg = object_reg;
1135   int depth = 0;
1136 
1137   typedef FunctionCallbackArguments FCA;
1138   if (save_at_depth == depth) {
1139     __ sw(reg, MemOperand(sp, FCA::kHolderIndex * kPointerSize));
1140   }
1141 
1142   Handle<JSObject> current = Handle<JSObject>::null();
1143   if (type->IsConstant()) current = Handle<JSObject>::cast(type->AsConstant());
1144   Handle<JSObject> prototype = Handle<JSObject>::null();
1145   Handle<Map> current_map = receiver_map;
1146   Handle<Map> holder_map(holder->map());
1147   // Traverse the prototype chain and check the maps in the prototype chain for
1148   // fast and global objects or do negative lookup for normal objects.
1149   while (!current_map.is_identical_to(holder_map)) {
1150     ++depth;
1151 
1152     // Only global objects and objects that do not require access
1153     // checks are allowed in stubs.
1154     ASSERT(current_map->IsJSGlobalProxyMap() ||
1155            !current_map->is_access_check_needed());
1156 
1157     prototype = handle(JSObject::cast(current_map->prototype()));
1158     if (current_map->is_dictionary_map() &&
1159         !current_map->IsJSGlobalObjectMap() &&
1160         !current_map->IsJSGlobalProxyMap()) {
1161       if (!name->IsUniqueName()) {
1162         ASSERT(name->IsString());
1163         name = factory()->InternalizeString(Handle<String>::cast(name));
1164       }
1165       ASSERT(current.is_null() ||
1166              current->property_dictionary()->FindEntry(*name) ==
1167              NameDictionary::kNotFound);
1168 
1169       GenerateDictionaryNegativeLookup(masm(), miss, reg, name,
1170                                        scratch1, scratch2);
1171 
1172       __ lw(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
1173       reg = holder_reg;  // From now on the object will be in holder_reg.
1174       __ lw(reg, FieldMemOperand(scratch1, Map::kPrototypeOffset));
1175     } else {
1176       Register map_reg = scratch1;
1177       if (depth != 1 || check == CHECK_ALL_MAPS) {
1178         // CheckMap implicitly loads the map of |reg| into |map_reg|.
1179         __ CheckMap(reg, map_reg, current_map, miss, DONT_DO_SMI_CHECK);
1180       } else {
1181         __ lw(map_reg, FieldMemOperand(reg, HeapObject::kMapOffset));
1182       }
1183 
1184       // Check access rights to the global object.  This has to happen after
1185       // the map check so that we know that the object is actually a global
1186       // object.
1187       if (current_map->IsJSGlobalProxyMap()) {
1188         __ CheckAccessGlobalProxy(reg, scratch2, miss);
1189       } else if (current_map->IsJSGlobalObjectMap()) {
1190         GenerateCheckPropertyCell(
1191             masm(), Handle<JSGlobalObject>::cast(current), name,
1192             scratch2, miss);
1193       }
1194 
1195       reg = holder_reg;  // From now on the object will be in holder_reg.
1196 
1197       if (heap()->InNewSpace(*prototype)) {
1198         // The prototype is in new space; we cannot store a reference to it
1199         // in the code.  Load it from the map.
1200         __ lw(reg, FieldMemOperand(map_reg, Map::kPrototypeOffset));
1201       } else {
1202         // The prototype is in old space; load it directly.
1203         __ li(reg, Operand(prototype));
1204       }
1205     }
1206 
1207     if (save_at_depth == depth) {
1208       __ sw(reg, MemOperand(sp, FCA::kHolderIndex * kPointerSize));
1209     }
1210 
1211     // Go to the next object in the prototype chain.
1212     current = prototype;
1213     current_map = handle(current->map());
1214   }
1215 
1216   // Log the check depth.
1217   LOG(isolate(), IntEvent("check-maps-depth", depth + 1));
1218 
1219   if (depth != 0 || check == CHECK_ALL_MAPS) {
1220     // Check the holder map.
1221     __ CheckMap(reg, scratch1, current_map, miss, DONT_DO_SMI_CHECK);
1222   }
1223 
1224   // Perform security check for access to the global object.
1225   ASSERT(current_map->IsJSGlobalProxyMap() ||
1226          !current_map->is_access_check_needed());
1227   if (current_map->IsJSGlobalProxyMap()) {
1228     __ CheckAccessGlobalProxy(reg, scratch1, miss);
1229   }
1230 
1231   // Return the register containing the holder.
1232   return reg;
1233 }
1234 
1235 
HandlerFrontendFooter(Handle<Name> name,Label * miss)1236 void LoadStubCompiler::HandlerFrontendFooter(Handle<Name> name, Label* miss) {
1237   if (!miss->is_unused()) {
1238     Label success;
1239     __ Branch(&success);
1240     __ bind(miss);
1241     TailCallBuiltin(masm(), MissBuiltin(kind()));
1242     __ bind(&success);
1243   }
1244 }
1245 
1246 
HandlerFrontendFooter(Handle<Name> name,Label * miss)1247 void StoreStubCompiler::HandlerFrontendFooter(Handle<Name> name, Label* miss) {
1248   if (!miss->is_unused()) {
1249     Label success;
1250     __ Branch(&success);
1251     GenerateRestoreName(masm(), miss, name);
1252     TailCallBuiltin(masm(), MissBuiltin(kind()));
1253     __ bind(&success);
1254   }
1255 }
1256 
1257 
CallbackHandlerFrontend(Handle<Type> type,Register object_reg,Handle<JSObject> holder,Handle<Name> name,Handle<Object> callback)1258 Register LoadStubCompiler::CallbackHandlerFrontend(
1259     Handle<Type> type,
1260     Register object_reg,
1261     Handle<JSObject> holder,
1262     Handle<Name> name,
1263     Handle<Object> callback) {
1264   Label miss;
1265 
1266   Register reg = HandlerFrontendHeader(type, object_reg, holder, name, &miss);
1267 
1268   if (!holder->HasFastProperties() && !holder->IsJSGlobalObject()) {
1269     ASSERT(!reg.is(scratch2()));
1270     ASSERT(!reg.is(scratch3()));
1271     ASSERT(!reg.is(scratch4()));
1272 
1273     // Load the properties dictionary.
1274     Register dictionary = scratch4();
1275     __ lw(dictionary, FieldMemOperand(reg, JSObject::kPropertiesOffset));
1276 
1277     // Probe the dictionary.
1278     Label probe_done;
1279     NameDictionaryLookupStub::GeneratePositiveLookup(masm(),
1280                                                      &miss,
1281                                                      &probe_done,
1282                                                      dictionary,
1283                                                      this->name(),
1284                                                      scratch2(),
1285                                                      scratch3());
1286     __ bind(&probe_done);
1287 
1288     // If probing finds an entry in the dictionary, scratch3 contains the
1289     // pointer into the dictionary. Check that the value is the callback.
1290     Register pointer = scratch3();
1291     const int kElementsStartOffset = NameDictionary::kHeaderSize +
1292         NameDictionary::kElementsStartIndex * kPointerSize;
1293     const int kValueOffset = kElementsStartOffset + kPointerSize;
1294     __ lw(scratch2(), FieldMemOperand(pointer, kValueOffset));
1295     __ Branch(&miss, ne, scratch2(), Operand(callback));
1296   }
1297 
1298   HandlerFrontendFooter(name, &miss);
1299   return reg;
1300 }
1301 
1302 
GenerateLoadField(Register reg,Handle<JSObject> holder,PropertyIndex field,Representation representation)1303 void LoadStubCompiler::GenerateLoadField(Register reg,
1304                                          Handle<JSObject> holder,
1305                                          PropertyIndex field,
1306                                          Representation representation) {
1307   if (!reg.is(receiver())) __ mov(receiver(), reg);
1308   if (kind() == Code::LOAD_IC) {
1309     LoadFieldStub stub(field.is_inobject(holder),
1310                        field.translate(holder),
1311                        representation);
1312     GenerateTailCall(masm(), stub.GetCode(isolate()));
1313   } else {
1314     KeyedLoadFieldStub stub(field.is_inobject(holder),
1315                             field.translate(holder),
1316                             representation);
1317     GenerateTailCall(masm(), stub.GetCode(isolate()));
1318   }
1319 }
1320 
1321 
GenerateLoadConstant(Handle<Object> value)1322 void LoadStubCompiler::GenerateLoadConstant(Handle<Object> value) {
1323   // Return the constant value.
1324   __ li(v0, value);
1325   __ Ret();
1326 }
1327 
1328 
GenerateLoadCallback(const CallOptimization & call_optimization)1329 void LoadStubCompiler::GenerateLoadCallback(
1330     const CallOptimization& call_optimization) {
1331   GenerateFastApiCall(
1332       masm(), call_optimization, receiver(), scratch3(), 0, NULL);
1333 }
1334 
1335 
GenerateLoadCallback(Register reg,Handle<ExecutableAccessorInfo> callback)1336 void LoadStubCompiler::GenerateLoadCallback(
1337     Register reg,
1338     Handle<ExecutableAccessorInfo> callback) {
1339   // Build AccessorInfo::args_ list on the stack and push property name below
1340   // the exit frame to make GC aware of them and store pointers to them.
1341   STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 0);
1342   STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 1);
1343   STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 2);
1344   STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 3);
1345   STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 4);
1346   STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 5);
1347   STATIC_ASSERT(PropertyCallbackArguments::kArgsLength == 6);
1348   ASSERT(!scratch2().is(reg));
1349   ASSERT(!scratch3().is(reg));
1350   ASSERT(!scratch4().is(reg));
1351   __ push(receiver());
1352   if (heap()->InNewSpace(callback->data())) {
1353     __ li(scratch3(), callback);
1354     __ lw(scratch3(), FieldMemOperand(scratch3(),
1355                                       ExecutableAccessorInfo::kDataOffset));
1356   } else {
1357     __ li(scratch3(), Handle<Object>(callback->data(), isolate()));
1358   }
1359   __ Subu(sp, sp, 6 * kPointerSize);
1360   __ sw(scratch3(), MemOperand(sp, 5 * kPointerSize));
1361   __ LoadRoot(scratch3(), Heap::kUndefinedValueRootIndex);
1362   __ sw(scratch3(), MemOperand(sp, 4 * kPointerSize));
1363   __ sw(scratch3(), MemOperand(sp, 3 * kPointerSize));
1364   __ li(scratch4(),
1365         Operand(ExternalReference::isolate_address(isolate())));
1366   __ sw(scratch4(), MemOperand(sp, 2 * kPointerSize));
1367   __ sw(reg, MemOperand(sp, 1 * kPointerSize));
1368   __ sw(name(), MemOperand(sp, 0 * kPointerSize));
1369   __ Addu(scratch2(), sp, 1 * kPointerSize);
1370 
1371   __ mov(a2, scratch2());  // Saved in case scratch2 == a1.
1372   __ mov(a0, sp);  // (first argument - a0) = Handle<Name>
1373 
1374   const int kApiStackSpace = 1;
1375   FrameScope frame_scope(masm(), StackFrame::MANUAL);
1376   __ EnterExitFrame(false, kApiStackSpace);
1377 
1378   // Create PropertyAccessorInfo instance on the stack above the exit frame with
1379   // scratch2 (internal::Object** args_) as the data.
1380   __ sw(a2, MemOperand(sp, kPointerSize));
1381   // (second argument - a1) = AccessorInfo&
1382   __ Addu(a1, sp, kPointerSize);
1383 
1384   const int kStackUnwindSpace = PropertyCallbackArguments::kArgsLength + 1;
1385   Address getter_address = v8::ToCData<Address>(callback->getter());
1386   ApiFunction fun(getter_address);
1387   ExternalReference::Type type = ExternalReference::DIRECT_GETTER_CALL;
1388   ExternalReference ref = ExternalReference(&fun, type, isolate());
1389 
1390   Address thunk_address = FUNCTION_ADDR(&InvokeAccessorGetterCallback);
1391   ExternalReference::Type thunk_type =
1392       ExternalReference::PROFILING_GETTER_CALL;
1393   ApiFunction thunk_fun(thunk_address);
1394   ExternalReference thunk_ref = ExternalReference(&thunk_fun, thunk_type,
1395       isolate());
1396   __ CallApiFunctionAndReturn(ref,
1397                               getter_address,
1398                               thunk_ref,
1399                               a2,
1400                               kStackUnwindSpace,
1401                               MemOperand(fp, 6 * kPointerSize),
1402                               NULL);
1403 }
1404 
1405 
GenerateLoadInterceptor(Register holder_reg,Handle<Object> object,Handle<JSObject> interceptor_holder,LookupResult * lookup,Handle<Name> name)1406 void LoadStubCompiler::GenerateLoadInterceptor(
1407     Register holder_reg,
1408     Handle<Object> object,
1409     Handle<JSObject> interceptor_holder,
1410     LookupResult* lookup,
1411     Handle<Name> name) {
1412   ASSERT(interceptor_holder->HasNamedInterceptor());
1413   ASSERT(!interceptor_holder->GetNamedInterceptor()->getter()->IsUndefined());
1414 
1415   // So far the most popular follow ups for interceptor loads are FIELD
1416   // and CALLBACKS, so inline only them, other cases may be added
1417   // later.
1418   bool compile_followup_inline = false;
1419   if (lookup->IsFound() && lookup->IsCacheable()) {
1420     if (lookup->IsField()) {
1421       compile_followup_inline = true;
1422     } else if (lookup->type() == CALLBACKS &&
1423         lookup->GetCallbackObject()->IsExecutableAccessorInfo()) {
1424       ExecutableAccessorInfo* callback =
1425           ExecutableAccessorInfo::cast(lookup->GetCallbackObject());
1426       compile_followup_inline = callback->getter() != NULL &&
1427           callback->IsCompatibleReceiver(*object);
1428     }
1429   }
1430 
1431   if (compile_followup_inline) {
1432     // Compile the interceptor call, followed by inline code to load the
1433     // property from further up the prototype chain if the call fails.
1434     // Check that the maps haven't changed.
1435     ASSERT(holder_reg.is(receiver()) || holder_reg.is(scratch1()));
1436 
1437     // Preserve the receiver register explicitly whenever it is different from
1438     // the holder and it is needed should the interceptor return without any
1439     // result. The CALLBACKS case needs the receiver to be passed into C++ code,
1440     // the FIELD case might cause a miss during the prototype check.
1441     bool must_perfrom_prototype_check = *interceptor_holder != lookup->holder();
1442     bool must_preserve_receiver_reg = !receiver().is(holder_reg) &&
1443         (lookup->type() == CALLBACKS || must_perfrom_prototype_check);
1444 
1445     // Save necessary data before invoking an interceptor.
1446     // Requires a frame to make GC aware of pushed pointers.
1447     {
1448       FrameScope frame_scope(masm(), StackFrame::INTERNAL);
1449       if (must_preserve_receiver_reg) {
1450         __ Push(receiver(), holder_reg, this->name());
1451       } else {
1452         __ Push(holder_reg, this->name());
1453       }
1454       // Invoke an interceptor.  Note: map checks from receiver to
1455       // interceptor's holder has been compiled before (see a caller
1456       // of this method).
1457       CompileCallLoadPropertyWithInterceptor(
1458           masm(), receiver(), holder_reg, this->name(), interceptor_holder,
1459           IC::kLoadPropertyWithInterceptorOnly);
1460 
1461       // Check if interceptor provided a value for property.  If it's
1462       // the case, return immediately.
1463       Label interceptor_failed;
1464       __ LoadRoot(scratch1(), Heap::kNoInterceptorResultSentinelRootIndex);
1465       __ Branch(&interceptor_failed, eq, v0, Operand(scratch1()));
1466       frame_scope.GenerateLeaveFrame();
1467       __ Ret();
1468 
1469       __ bind(&interceptor_failed);
1470       __ pop(this->name());
1471       __ pop(holder_reg);
1472       if (must_preserve_receiver_reg) {
1473         __ pop(receiver());
1474       }
1475       // Leave the internal frame.
1476     }
1477     GenerateLoadPostInterceptor(holder_reg, interceptor_holder, name, lookup);
1478   } else {  // !compile_followup_inline
1479     // Call the runtime system to load the interceptor.
1480     // Check that the maps haven't changed.
1481     PushInterceptorArguments(masm(), receiver(), holder_reg,
1482                              this->name(), interceptor_holder);
1483 
1484     ExternalReference ref = ExternalReference(
1485         IC_Utility(IC::kLoadPropertyWithInterceptorForLoad), isolate());
1486     __ TailCallExternalReference(ref, StubCache::kInterceptorArgsLength, 1);
1487   }
1488 }
1489 
1490 
GenerateNameCheck(Handle<Name> name,Label * miss)1491 void CallStubCompiler::GenerateNameCheck(Handle<Name> name, Label* miss) {
1492   if (kind_ == Code::KEYED_CALL_IC) {
1493     __ Branch(miss, ne, a2, Operand(name));
1494   }
1495 }
1496 
1497 
GenerateFunctionCheck(Register function,Register scratch,Label * miss)1498 void CallStubCompiler::GenerateFunctionCheck(Register function,
1499                                              Register scratch,
1500                                              Label* miss) {
1501   __ JumpIfSmi(function, miss);
1502   __ GetObjectType(function, scratch, scratch);
1503   __ Branch(miss, ne, scratch, Operand(JS_FUNCTION_TYPE));
1504 }
1505 
1506 
GenerateLoadFunctionFromCell(Handle<Cell> cell,Handle<JSFunction> function,Label * miss)1507 void CallStubCompiler::GenerateLoadFunctionFromCell(
1508     Handle<Cell> cell,
1509     Handle<JSFunction> function,
1510     Label* miss) {
1511   // Get the value from the cell.
1512   __ li(a3, Operand(cell));
1513   __ lw(a1, FieldMemOperand(a3, Cell::kValueOffset));
1514 
1515   // Check that the cell contains the same function.
1516   if (heap()->InNewSpace(*function)) {
1517     // We can't embed a pointer to a function in new space so we have
1518     // to verify that the shared function info is unchanged. This has
1519     // the nice side effect that multiple closures based on the same
1520     // function can all use this call IC. Before we load through the
1521     // function, we have to verify that it still is a function.
1522     GenerateFunctionCheck(a1, a3, miss);
1523 
1524     // Check the shared function info. Make sure it hasn't changed.
1525     __ li(a3, Handle<SharedFunctionInfo>(function->shared()));
1526     __ lw(t0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1527     __ Branch(miss, ne, t0, Operand(a3));
1528   } else {
1529     __ Branch(miss, ne, a1, Operand(function));
1530   }
1531 }
1532 
1533 
GenerateMissBranch()1534 void CallStubCompiler::GenerateMissBranch() {
1535   Handle<Code> code =
1536       isolate()->stub_cache()->ComputeCallMiss(arguments().immediate(),
1537                                                kind_,
1538                                                extra_state());
1539   __ Jump(code, RelocInfo::CODE_TARGET);
1540 }
1541 
1542 
CompileCallField(Handle<JSObject> object,Handle<JSObject> holder,PropertyIndex index,Handle<Name> name)1543 Handle<Code> CallStubCompiler::CompileCallField(Handle<JSObject> object,
1544                                                 Handle<JSObject> holder,
1545                                                 PropertyIndex index,
1546                                                 Handle<Name> name) {
1547   Label miss;
1548 
1549   Register reg = HandlerFrontendHeader(
1550       object, holder, name, RECEIVER_MAP_CHECK, &miss);
1551   GenerateFastPropertyLoad(masm(), a1, reg, index.is_inobject(holder),
1552                            index.translate(holder), Representation::Tagged());
1553   GenerateJumpFunction(object, a1, &miss);
1554 
1555   HandlerFrontendFooter(&miss);
1556 
1557   // Return the generated code.
1558   return GetCode(Code::FAST, name);
1559 }
1560 
1561 
CompileArrayCodeCall(Handle<Object> object,Handle<JSObject> holder,Handle<Cell> cell,Handle<JSFunction> function,Handle<String> name,Code::StubType type)1562 Handle<Code> CallStubCompiler::CompileArrayCodeCall(
1563     Handle<Object> object,
1564     Handle<JSObject> holder,
1565     Handle<Cell> cell,
1566     Handle<JSFunction> function,
1567     Handle<String> name,
1568     Code::StubType type) {
1569   Label miss;
1570 
1571   HandlerFrontendHeader(object, holder, name, RECEIVER_MAP_CHECK, &miss);
1572   if (!cell.is_null()) {
1573     ASSERT(cell->value() == *function);
1574     GenerateLoadFunctionFromCell(cell, function, &miss);
1575   }
1576 
1577   Handle<AllocationSite> site = isolate()->factory()->NewAllocationSite();
1578   site->SetElementsKind(GetInitialFastElementsKind());
1579   Handle<Cell> site_feedback_cell = isolate()->factory()->NewCell(site);
1580   const int argc = arguments().immediate();
1581   __ li(a0, Operand(argc));
1582   __ li(a2, Operand(site_feedback_cell));
1583   __ li(a1, Operand(function));
1584 
1585   ArrayConstructorStub stub(isolate());
1586   __ TailCallStub(&stub);
1587 
1588   HandlerFrontendFooter(&miss);
1589 
1590   // Return the generated code.
1591   return GetCode(type, name);
1592 }
1593 
1594 
CompileArrayPushCall(Handle<Object> object,Handle<JSObject> holder,Handle<Cell> cell,Handle<JSFunction> function,Handle<String> name,Code::StubType type)1595 Handle<Code> CallStubCompiler::CompileArrayPushCall(
1596     Handle<Object> object,
1597     Handle<JSObject> holder,
1598     Handle<Cell> cell,
1599     Handle<JSFunction> function,
1600     Handle<String> name,
1601     Code::StubType type) {
1602   // If object is not an array or is observed or sealed, bail out to regular
1603   // call.
1604   if (!object->IsJSArray() ||
1605       !cell.is_null() ||
1606       Handle<JSArray>::cast(object)->map()->is_observed() ||
1607       !Handle<JSArray>::cast(object)->map()->is_extensible()) {
1608     return Handle<Code>::null();
1609   }
1610 
1611   Label miss;
1612   HandlerFrontendHeader(object, holder, name, RECEIVER_MAP_CHECK, &miss);
1613   Register receiver = a0;
1614   Register scratch = a1;
1615 
1616   const int argc = arguments().immediate();
1617 
1618   if (argc == 0) {
1619     // Nothing to do, just return the length.
1620     __ lw(v0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1621     __ DropAndRet(argc + 1);
1622   } else {
1623     Label call_builtin;
1624     if (argc == 1) {  // Otherwise fall through to call the builtin.
1625       Label attempt_to_grow_elements, with_write_barrier, check_double;
1626 
1627       Register elements = t2;
1628       Register end_elements = t1;
1629       // Get the elements array of the object.
1630       __ lw(elements, FieldMemOperand(receiver, JSArray::kElementsOffset));
1631 
1632       // Check that the elements are in fast mode and writable.
1633       __ CheckMap(elements,
1634                   scratch,
1635                   Heap::kFixedArrayMapRootIndex,
1636                   &check_double,
1637                   DONT_DO_SMI_CHECK);
1638 
1639       // Get the array's length into scratch and calculate new length.
1640       __ lw(scratch, FieldMemOperand(receiver, JSArray::kLengthOffset));
1641       STATIC_ASSERT(kSmiTagSize == 1);
1642       STATIC_ASSERT(kSmiTag == 0);
1643       __ Addu(scratch, scratch, Operand(Smi::FromInt(argc)));
1644 
1645       // Get the elements' length.
1646       __ lw(t0, FieldMemOperand(elements, FixedArray::kLengthOffset));
1647 
1648       // Check if we could survive without allocation.
1649       __ Branch(&attempt_to_grow_elements, gt, scratch, Operand(t0));
1650 
1651       // Check if value is a smi.
1652       __ lw(t0, MemOperand(sp, (argc - 1) * kPointerSize));
1653       __ JumpIfNotSmi(t0, &with_write_barrier);
1654 
1655       // Save new length.
1656       __ sw(scratch, FieldMemOperand(receiver, JSArray::kLengthOffset));
1657 
1658       // Store the value.
1659       // We may need a register containing the address end_elements below,
1660       // so write back the value in end_elements.
1661       __ sll(end_elements, scratch, kPointerSizeLog2 - kSmiTagSize);
1662       __ Addu(end_elements, elements, end_elements);
1663       const int kEndElementsOffset =
1664           FixedArray::kHeaderSize - kHeapObjectTag - argc * kPointerSize;
1665       __ Addu(end_elements, end_elements, kEndElementsOffset);
1666       __ sw(t0, MemOperand(end_elements));
1667 
1668       // Check for a smi.
1669       __ mov(v0, scratch);
1670       __ DropAndRet(argc + 1);
1671 
1672       __ bind(&check_double);
1673 
1674       // Check that the elements are in fast mode and writable.
1675       __ CheckMap(elements,
1676                   scratch,
1677                   Heap::kFixedDoubleArrayMapRootIndex,
1678                   &call_builtin,
1679                   DONT_DO_SMI_CHECK);
1680 
1681       // Get the array's length into scratch and calculate new length.
1682       __ lw(scratch, FieldMemOperand(receiver, JSArray::kLengthOffset));
1683       STATIC_ASSERT(kSmiTagSize == 1);
1684       STATIC_ASSERT(kSmiTag == 0);
1685       __ Addu(scratch, scratch, Operand(Smi::FromInt(argc)));
1686 
1687       // Get the elements' length.
1688       __ lw(t0, FieldMemOperand(elements, FixedArray::kLengthOffset));
1689 
1690       // Check if we could survive without allocation.
1691       __ Branch(&call_builtin, gt, scratch, Operand(t0));
1692 
1693       __ lw(t0, MemOperand(sp, (argc - 1) * kPointerSize));
1694       __ StoreNumberToDoubleElements(
1695           t0, scratch, elements, a3, t1, a2,
1696           &call_builtin, argc * kDoubleSize);
1697 
1698       // Save new length.
1699       __ sw(scratch, FieldMemOperand(receiver, JSArray::kLengthOffset));
1700 
1701       __ mov(v0, scratch);
1702       __ DropAndRet(argc + 1);
1703 
1704       __ bind(&with_write_barrier);
1705 
1706       __ lw(a3, FieldMemOperand(receiver, HeapObject::kMapOffset));
1707 
1708       if (FLAG_smi_only_arrays  && !FLAG_trace_elements_transitions) {
1709         Label fast_object, not_fast_object;
1710         __ CheckFastObjectElements(a3, t3, &not_fast_object);
1711         __ jmp(&fast_object);
1712         // In case of fast smi-only, convert to fast object, otherwise bail out.
1713         __ bind(&not_fast_object);
1714         __ CheckFastSmiElements(a3, t3, &call_builtin);
1715 
1716         __ lw(t3, FieldMemOperand(t0, HeapObject::kMapOffset));
1717         __ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
1718         __ Branch(&call_builtin, eq, t3, Operand(at));
1719         // edx: receiver
1720         // a3: map
1721         Label try_holey_map;
1722         __ LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
1723                                                FAST_ELEMENTS,
1724                                                a3,
1725                                                t3,
1726                                                &try_holey_map);
1727         __ mov(a2, receiver);
1728         ElementsTransitionGenerator::
1729             GenerateMapChangeElementsTransition(masm(),
1730                                                 DONT_TRACK_ALLOCATION_SITE,
1731                                                 NULL);
1732         __ jmp(&fast_object);
1733 
1734         __ bind(&try_holey_map);
1735         __ LoadTransitionedArrayMapConditional(FAST_HOLEY_SMI_ELEMENTS,
1736                                                FAST_HOLEY_ELEMENTS,
1737                                                a3,
1738                                                t3,
1739                                                &call_builtin);
1740         __ mov(a2, receiver);
1741         ElementsTransitionGenerator::
1742             GenerateMapChangeElementsTransition(masm(),
1743                                                 DONT_TRACK_ALLOCATION_SITE,
1744                                                 NULL);
1745         __ bind(&fast_object);
1746       } else {
1747         __ CheckFastObjectElements(a3, a3, &call_builtin);
1748       }
1749 
1750       // Save new length.
1751       __ sw(scratch, FieldMemOperand(receiver, JSArray::kLengthOffset));
1752 
1753       // Store the value.
1754       // We may need a register containing the address end_elements below,
1755       // so write back the value in end_elements.
1756       __ sll(end_elements, scratch, kPointerSizeLog2 - kSmiTagSize);
1757       __ Addu(end_elements, elements, end_elements);
1758       __ Addu(end_elements, end_elements, kEndElementsOffset);
1759       __ sw(t0, MemOperand(end_elements));
1760 
1761       __ RecordWrite(elements,
1762                      end_elements,
1763                      t0,
1764                      kRAHasNotBeenSaved,
1765                      kDontSaveFPRegs,
1766                      EMIT_REMEMBERED_SET,
1767                      OMIT_SMI_CHECK);
1768       __ mov(v0, scratch);
1769       __ DropAndRet(argc + 1);
1770 
1771       __ bind(&attempt_to_grow_elements);
1772       // scratch: array's length + 1.
1773       // t0: elements' length.
1774 
1775       if (!FLAG_inline_new) {
1776         __ Branch(&call_builtin);
1777       }
1778 
1779       __ lw(a2, MemOperand(sp, (argc - 1) * kPointerSize));
1780       // Growing elements that are SMI-only requires special handling in case
1781       // the new element is non-Smi. For now, delegate to the builtin.
1782       Label no_fast_elements_check;
1783       __ JumpIfSmi(a2, &no_fast_elements_check);
1784       __ lw(t3, FieldMemOperand(receiver, HeapObject::kMapOffset));
1785       __ CheckFastObjectElements(t3, t3, &call_builtin);
1786       __ bind(&no_fast_elements_check);
1787 
1788       ExternalReference new_space_allocation_top =
1789           ExternalReference::new_space_allocation_top_address(isolate());
1790       ExternalReference new_space_allocation_limit =
1791           ExternalReference::new_space_allocation_limit_address(isolate());
1792 
1793       const int kAllocationDelta = 4;
1794       // Load top and check if it is the end of elements.
1795       __ sll(end_elements, scratch, kPointerSizeLog2 - kSmiTagSize);
1796       __ Addu(end_elements, elements, end_elements);
1797       __ Addu(end_elements, end_elements, Operand(kEndElementsOffset));
1798       __ li(t3, Operand(new_space_allocation_top));
1799       __ lw(a3, MemOperand(t3));
1800       __ Branch(&call_builtin, ne, end_elements, Operand(a3));
1801 
1802       __ li(t5, Operand(new_space_allocation_limit));
1803       __ lw(t5, MemOperand(t5));
1804       __ Addu(a3, a3, Operand(kAllocationDelta * kPointerSize));
1805       __ Branch(&call_builtin, hi, a3, Operand(t5));
1806 
1807       // We fit and could grow elements.
1808       // Update new_space_allocation_top.
1809       __ sw(a3, MemOperand(t3));
1810       // Push the argument.
1811       __ sw(a2, MemOperand(end_elements));
1812       // Fill the rest with holes.
1813       __ LoadRoot(a3, Heap::kTheHoleValueRootIndex);
1814       for (int i = 1; i < kAllocationDelta; i++) {
1815         __ sw(a3, MemOperand(end_elements, i * kPointerSize));
1816       }
1817 
1818       // Update elements' and array's sizes.
1819       __ sw(scratch, FieldMemOperand(receiver, JSArray::kLengthOffset));
1820       __ Addu(t0, t0, Operand(Smi::FromInt(kAllocationDelta)));
1821       __ sw(t0, FieldMemOperand(elements, FixedArray::kLengthOffset));
1822 
1823       // Elements are in new space, so write barrier is not required.
1824       __ mov(v0, scratch);
1825       __ DropAndRet(argc + 1);
1826     }
1827     __ bind(&call_builtin);
1828     __ TailCallExternalReference(
1829         ExternalReference(Builtins::c_ArrayPush, isolate()), argc + 1, 1);
1830   }
1831 
1832   HandlerFrontendFooter(&miss);
1833 
1834   // Return the generated code.
1835   return GetCode(type, name);
1836 }
1837 
1838 
CompileArrayPopCall(Handle<Object> object,Handle<JSObject> holder,Handle<Cell> cell,Handle<JSFunction> function,Handle<String> name,Code::StubType type)1839 Handle<Code> CallStubCompiler::CompileArrayPopCall(
1840     Handle<Object> object,
1841     Handle<JSObject> holder,
1842     Handle<Cell> cell,
1843     Handle<JSFunction> function,
1844     Handle<String> name,
1845     Code::StubType type) {
1846   // If object is not an array or is observed or sealed, bail out to regular
1847   // call.
1848   if (!object->IsJSArray() ||
1849       !cell.is_null() ||
1850       Handle<JSArray>::cast(object)->map()->is_observed() ||
1851       !Handle<JSArray>::cast(object)->map()->is_extensible()) {
1852     return Handle<Code>::null();
1853   }
1854 
1855   Label miss, return_undefined, call_builtin;
1856   Register receiver = a0;
1857   Register scratch = a1;
1858   Register elements = a3;
1859   HandlerFrontendHeader(object, holder, name, RECEIVER_MAP_CHECK, &miss);
1860 
1861   // Get the elements array of the object.
1862   __ lw(elements, FieldMemOperand(receiver, JSArray::kElementsOffset));
1863 
1864   // Check that the elements are in fast mode and writable.
1865   __ CheckMap(elements,
1866               scratch,
1867               Heap::kFixedArrayMapRootIndex,
1868               &call_builtin,
1869               DONT_DO_SMI_CHECK);
1870 
1871   // Get the array's length into t0 and calculate new length.
1872   __ lw(t0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1873   __ Subu(t0, t0, Operand(Smi::FromInt(1)));
1874   __ Branch(&return_undefined, lt, t0, Operand(zero_reg));
1875 
1876   // Get the last element.
1877   __ LoadRoot(t2, Heap::kTheHoleValueRootIndex);
1878   STATIC_ASSERT(kSmiTagSize == 1);
1879   STATIC_ASSERT(kSmiTag == 0);
1880   // We can't address the last element in one operation. Compute the more
1881   // expensive shift first, and use an offset later on.
1882   __ sll(t1, t0, kPointerSizeLog2 - kSmiTagSize);
1883   __ Addu(elements, elements, t1);
1884   __ lw(scratch, FieldMemOperand(elements, FixedArray::kHeaderSize));
1885   __ Branch(&call_builtin, eq, scratch, Operand(t2));
1886 
1887   // Set the array's length.
1888   __ sw(t0, FieldMemOperand(receiver, JSArray::kLengthOffset));
1889 
1890   // Fill with the hole.
1891   __ sw(t2, FieldMemOperand(elements, FixedArray::kHeaderSize));
1892   const int argc = arguments().immediate();
1893   __ mov(v0, scratch);
1894   __ DropAndRet(argc + 1);
1895 
1896   __ bind(&return_undefined);
1897   __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
1898   __ DropAndRet(argc + 1);
1899 
1900   __ bind(&call_builtin);
1901   __ TailCallExternalReference(
1902       ExternalReference(Builtins::c_ArrayPop, isolate()), argc + 1, 1);
1903 
1904   HandlerFrontendFooter(&miss);
1905 
1906   // Return the generated code.
1907   return GetCode(type, name);
1908 }
1909 
1910 
CompileStringCharCodeAtCall(Handle<Object> object,Handle<JSObject> holder,Handle<Cell> cell,Handle<JSFunction> function,Handle<String> name,Code::StubType type)1911 Handle<Code> CallStubCompiler::CompileStringCharCodeAtCall(
1912     Handle<Object> object,
1913     Handle<JSObject> holder,
1914     Handle<Cell> cell,
1915     Handle<JSFunction> function,
1916     Handle<String> name,
1917     Code::StubType type) {
1918   // If object is not a string, bail out to regular call.
1919   if (!object->IsString() || !cell.is_null()) return Handle<Code>::null();
1920 
1921   Label miss;
1922   Label name_miss;
1923   Label index_out_of_range;
1924 
1925   Label* index_out_of_range_label = &index_out_of_range;
1926 
1927   if (kind_ == Code::CALL_IC &&
1928       (CallICBase::StringStubState::decode(extra_state()) ==
1929        DEFAULT_STRING_STUB)) {
1930     index_out_of_range_label = &miss;
1931   }
1932 
1933   HandlerFrontendHeader(object, holder, name, STRING_CHECK, &name_miss);
1934 
1935   Register receiver = a0;
1936   Register index = t1;
1937   Register result = a1;
1938   const int argc = arguments().immediate();
1939   __ lw(receiver, MemOperand(sp, argc * kPointerSize));
1940   if (argc > 0) {
1941     __ lw(index, MemOperand(sp, (argc - 1) * kPointerSize));
1942   } else {
1943     __ LoadRoot(index, Heap::kUndefinedValueRootIndex);
1944   }
1945 
1946   StringCharCodeAtGenerator generator(receiver,
1947                                       index,
1948                                       result,
1949                                       &miss,  // When not a string.
1950                                       &miss,  // When not a number.
1951                                       index_out_of_range_label,
1952                                       STRING_INDEX_IS_NUMBER);
1953   generator.GenerateFast(masm());
1954   __ mov(v0, result);
1955   __ DropAndRet(argc + 1);
1956 
1957   StubRuntimeCallHelper call_helper;
1958   generator.GenerateSlow(masm(), call_helper);
1959 
1960   if (index_out_of_range.is_linked()) {
1961     __ bind(&index_out_of_range);
1962     __ LoadRoot(v0, Heap::kNanValueRootIndex);
1963     __ DropAndRet(argc + 1);
1964   }
1965 
1966   __ bind(&miss);
1967   // Restore function name in a2.
1968   __ li(a2, name);
1969   HandlerFrontendFooter(&name_miss);
1970 
1971   // Return the generated code.
1972   return GetCode(type, name);
1973 }
1974 
1975 
CompileStringCharAtCall(Handle<Object> object,Handle<JSObject> holder,Handle<Cell> cell,Handle<JSFunction> function,Handle<String> name,Code::StubType type)1976 Handle<Code> CallStubCompiler::CompileStringCharAtCall(
1977     Handle<Object> object,
1978     Handle<JSObject> holder,
1979     Handle<Cell> cell,
1980     Handle<JSFunction> function,
1981     Handle<String> name,
1982     Code::StubType type) {
1983   // If object is not a string, bail out to regular call.
1984   if (!object->IsString() || !cell.is_null()) return Handle<Code>::null();
1985 
1986   const int argc = arguments().immediate();
1987   Label miss;
1988   Label name_miss;
1989   Label index_out_of_range;
1990   Label* index_out_of_range_label = &index_out_of_range;
1991   if (kind_ == Code::CALL_IC &&
1992       (CallICBase::StringStubState::decode(extra_state()) ==
1993        DEFAULT_STRING_STUB)) {
1994     index_out_of_range_label = &miss;
1995   }
1996 
1997   HandlerFrontendHeader(object, holder, name, STRING_CHECK, &name_miss);
1998 
1999   Register receiver = a0;
2000   Register index = t1;
2001   Register scratch = a3;
2002   Register result = a1;
2003   if (argc > 0) {
2004     __ lw(index, MemOperand(sp, (argc - 1) * kPointerSize));
2005   } else {
2006     __ LoadRoot(index, Heap::kUndefinedValueRootIndex);
2007   }
2008 
2009   StringCharAtGenerator generator(receiver,
2010                                   index,
2011                                   scratch,
2012                                   result,
2013                                   &miss,  // When not a string.
2014                                   &miss,  // When not a number.
2015                                   index_out_of_range_label,
2016                                   STRING_INDEX_IS_NUMBER);
2017   generator.GenerateFast(masm());
2018   __ mov(v0, result);
2019   __ DropAndRet(argc + 1);
2020 
2021   StubRuntimeCallHelper call_helper;
2022   generator.GenerateSlow(masm(), call_helper);
2023 
2024   if (index_out_of_range.is_linked()) {
2025     __ bind(&index_out_of_range);
2026     __ LoadRoot(v0, Heap::kempty_stringRootIndex);
2027     __ DropAndRet(argc + 1);
2028   }
2029 
2030   __ bind(&miss);
2031   // Restore function name in a2.
2032   __ li(a2, name);
2033   HandlerFrontendFooter(&name_miss);
2034 
2035   // Return the generated code.
2036   return GetCode(type, name);
2037 }
2038 
2039 
CompileStringFromCharCodeCall(Handle<Object> object,Handle<JSObject> holder,Handle<Cell> cell,Handle<JSFunction> function,Handle<String> name,Code::StubType type)2040 Handle<Code> CallStubCompiler::CompileStringFromCharCodeCall(
2041     Handle<Object> object,
2042     Handle<JSObject> holder,
2043     Handle<Cell> cell,
2044     Handle<JSFunction> function,
2045     Handle<String> name,
2046     Code::StubType type) {
2047   const int argc = arguments().immediate();
2048 
2049   // If the object is not a JSObject or we got an unexpected number of
2050   // arguments, bail out to the regular call.
2051   if (!object->IsJSObject() || argc != 1) return Handle<Code>::null();
2052 
2053   Label miss;
2054   HandlerFrontendHeader(object, holder, name, RECEIVER_MAP_CHECK, &miss);
2055   if (!cell.is_null()) {
2056     ASSERT(cell->value() == *function);
2057     GenerateLoadFunctionFromCell(cell, function, &miss);
2058   }
2059 
2060   // Load the char code argument.
2061   Register code = a1;
2062   __ lw(code, MemOperand(sp, 0 * kPointerSize));
2063 
2064   // Check the code is a smi.
2065   Label slow;
2066   STATIC_ASSERT(kSmiTag == 0);
2067   __ JumpIfNotSmi(code, &slow);
2068 
2069   // Convert the smi code to uint16.
2070   __ And(code, code, Operand(Smi::FromInt(0xffff)));
2071 
2072   StringCharFromCodeGenerator generator(code, v0);
2073   generator.GenerateFast(masm());
2074   __ DropAndRet(argc + 1);
2075 
2076   StubRuntimeCallHelper call_helper;
2077   generator.GenerateSlow(masm(), call_helper);
2078 
2079   __ bind(&slow);
2080   // We do not have to patch the receiver because the function makes no use of
2081   // it.
2082   GenerateJumpFunctionIgnoreReceiver(function);
2083 
2084   HandlerFrontendFooter(&miss);
2085 
2086   // Return the generated code.
2087   return GetCode(type, name);
2088 }
2089 
2090 
CompileMathFloorCall(Handle<Object> object,Handle<JSObject> holder,Handle<Cell> cell,Handle<JSFunction> function,Handle<String> name,Code::StubType type)2091 Handle<Code> CallStubCompiler::CompileMathFloorCall(
2092     Handle<Object> object,
2093     Handle<JSObject> holder,
2094     Handle<Cell> cell,
2095     Handle<JSFunction> function,
2096     Handle<String> name,
2097     Code::StubType type) {
2098   const int argc = arguments().immediate();
2099   // If the object is not a JSObject or we got an unexpected number of
2100   // arguments, bail out to the regular call.
2101   if (!object->IsJSObject() || argc != 1) return Handle<Code>::null();
2102 
2103   Label miss, slow;
2104   HandlerFrontendHeader(object, holder, name, RECEIVER_MAP_CHECK, &miss);
2105   if (!cell.is_null()) {
2106     ASSERT(cell->value() == *function);
2107     GenerateLoadFunctionFromCell(cell, function, &miss);
2108   }
2109 
2110   // Load the (only) argument into v0.
2111   __ lw(v0, MemOperand(sp, 0 * kPointerSize));
2112 
2113   // If the argument is a smi, just return.
2114   STATIC_ASSERT(kSmiTag == 0);
2115   __ SmiTst(v0, t0);
2116   __ DropAndRet(argc + 1, eq, t0, Operand(zero_reg));
2117 
2118   __ CheckMap(v0, a1, Heap::kHeapNumberMapRootIndex, &slow, DONT_DO_SMI_CHECK);
2119 
2120   Label wont_fit_smi, no_fpu_error, restore_fcsr_and_return;
2121 
2122   // If fpu is enabled, we use the floor instruction.
2123 
2124   // Load the HeapNumber value.
2125   __ ldc1(f0, FieldMemOperand(v0, HeapNumber::kValueOffset));
2126 
2127   // Backup FCSR.
2128   __ cfc1(a3, FCSR);
2129   // Clearing FCSR clears the exception mask with no side-effects.
2130   __ ctc1(zero_reg, FCSR);
2131   // Convert the argument to an integer.
2132   __ floor_w_d(f0, f0);
2133 
2134   // Start checking for special cases.
2135   // Get the argument exponent and clear the sign bit.
2136   __ lw(t1, FieldMemOperand(v0, HeapNumber::kValueOffset + kPointerSize));
2137   __ And(t2, t1, Operand(~HeapNumber::kSignMask));
2138   __ srl(t2, t2, HeapNumber::kMantissaBitsInTopWord);
2139 
2140   // Retrieve FCSR and check for fpu errors.
2141   __ cfc1(t5, FCSR);
2142   __ And(t5, t5, Operand(kFCSRExceptionFlagMask));
2143   __ Branch(&no_fpu_error, eq, t5, Operand(zero_reg));
2144 
2145   // Check for NaN, Infinity, and -Infinity.
2146   // They are invariant through a Math.Floor call, so just
2147   // return the original argument.
2148   __ Subu(t3, t2, Operand(HeapNumber::kExponentMask
2149         >> HeapNumber::kMantissaBitsInTopWord));
2150   __ Branch(&restore_fcsr_and_return, eq, t3, Operand(zero_reg));
2151   // We had an overflow or underflow in the conversion. Check if we
2152   // have a big exponent.
2153   // If greater or equal, the argument is already round and in v0.
2154   __ Branch(&restore_fcsr_and_return, ge, t3,
2155       Operand(HeapNumber::kMantissaBits));
2156   __ Branch(&wont_fit_smi);
2157 
2158   __ bind(&no_fpu_error);
2159   // Move the result back to v0.
2160   __ mfc1(v0, f0);
2161   // Check if the result fits into a smi.
2162   __ Addu(a1, v0, Operand(0x40000000));
2163   __ Branch(&wont_fit_smi, lt, a1, Operand(zero_reg));
2164   // Tag the result.
2165   STATIC_ASSERT(kSmiTag == 0);
2166   __ sll(v0, v0, kSmiTagSize);
2167 
2168   // Check for -0.
2169   __ Branch(&restore_fcsr_and_return, ne, v0, Operand(zero_reg));
2170   // t1 already holds the HeapNumber exponent.
2171   __ And(t0, t1, Operand(HeapNumber::kSignMask));
2172   // If our HeapNumber is negative it was -0, so load its address and return.
2173   // Else v0 is loaded with 0, so we can also just return.
2174   __ Branch(&restore_fcsr_and_return, eq, t0, Operand(zero_reg));
2175   __ lw(v0, MemOperand(sp, 0 * kPointerSize));
2176 
2177   __ bind(&restore_fcsr_and_return);
2178   // Restore FCSR and return.
2179   __ ctc1(a3, FCSR);
2180 
2181   __ DropAndRet(argc + 1);
2182 
2183   __ bind(&wont_fit_smi);
2184   // Restore FCSR and fall to slow case.
2185   __ ctc1(a3, FCSR);
2186 
2187   __ bind(&slow);
2188   // We do not have to patch the receiver because the function makes no use of
2189   // it.
2190   GenerateJumpFunctionIgnoreReceiver(function);
2191 
2192   HandlerFrontendFooter(&miss);
2193 
2194   // Return the generated code.
2195   return GetCode(type, name);
2196 }
2197 
2198 
CompileMathAbsCall(Handle<Object> object,Handle<JSObject> holder,Handle<Cell> cell,Handle<JSFunction> function,Handle<String> name,Code::StubType type)2199 Handle<Code> CallStubCompiler::CompileMathAbsCall(
2200     Handle<Object> object,
2201     Handle<JSObject> holder,
2202     Handle<Cell> cell,
2203     Handle<JSFunction> function,
2204     Handle<String> name,
2205     Code::StubType type) {
2206   const int argc = arguments().immediate();
2207   // If the object is not a JSObject or we got an unexpected number of
2208   // arguments, bail out to the regular call.
2209   if (!object->IsJSObject() || argc != 1) return Handle<Code>::null();
2210 
2211   Label miss;
2212 
2213   HandlerFrontendHeader(object, holder, name, RECEIVER_MAP_CHECK, &miss);
2214   if (!cell.is_null()) {
2215     ASSERT(cell->value() == *function);
2216     GenerateLoadFunctionFromCell(cell, function, &miss);
2217   }
2218 
2219   // Load the (only) argument into v0.
2220   __ lw(v0, MemOperand(sp, 0 * kPointerSize));
2221 
2222   // Check if the argument is a smi.
2223   Label not_smi;
2224   STATIC_ASSERT(kSmiTag == 0);
2225   __ JumpIfNotSmi(v0, &not_smi);
2226 
2227   // Do bitwise not or do nothing depending on the sign of the
2228   // argument.
2229   __ sra(t0, v0, kBitsPerInt - 1);
2230   __ Xor(a1, v0, t0);
2231 
2232   // Add 1 or do nothing depending on the sign of the argument.
2233   __ Subu(v0, a1, t0);
2234 
2235   // If the result is still negative, go to the slow case.
2236   // This only happens for the most negative smi.
2237   Label slow;
2238   __ Branch(&slow, lt, v0, Operand(zero_reg));
2239 
2240   // Smi case done.
2241   __ DropAndRet(argc + 1);
2242 
2243   // Check if the argument is a heap number and load its exponent and
2244   // sign.
2245   __ bind(&not_smi);
2246   __ CheckMap(v0, a1, Heap::kHeapNumberMapRootIndex, &slow, DONT_DO_SMI_CHECK);
2247   __ lw(a1, FieldMemOperand(v0, HeapNumber::kExponentOffset));
2248 
2249   // Check the sign of the argument. If the argument is positive,
2250   // just return it.
2251   Label negative_sign;
2252   __ And(t0, a1, Operand(HeapNumber::kSignMask));
2253   __ Branch(&negative_sign, ne, t0, Operand(zero_reg));
2254   __ DropAndRet(argc + 1);
2255 
2256   // If the argument is negative, clear the sign, and return a new
2257   // number.
2258   __ bind(&negative_sign);
2259   __ Xor(a1, a1, Operand(HeapNumber::kSignMask));
2260   __ lw(a3, FieldMemOperand(v0, HeapNumber::kMantissaOffset));
2261   __ LoadRoot(t2, Heap::kHeapNumberMapRootIndex);
2262   __ AllocateHeapNumber(v0, t0, t1, t2, &slow);
2263   __ sw(a1, FieldMemOperand(v0, HeapNumber::kExponentOffset));
2264   __ sw(a3, FieldMemOperand(v0, HeapNumber::kMantissaOffset));
2265   __ DropAndRet(argc + 1);
2266 
2267   __ bind(&slow);
2268   // We do not have to patch the receiver because the function makes no use of
2269   // it.
2270   GenerateJumpFunctionIgnoreReceiver(function);
2271 
2272   HandlerFrontendFooter(&miss);
2273 
2274   // Return the generated code.
2275   return GetCode(type, name);
2276 }
2277 
2278 
CompileFastApiCall(const CallOptimization & optimization,Handle<Object> object,Handle<JSObject> holder,Handle<Cell> cell,Handle<JSFunction> function,Handle<String> name)2279 Handle<Code> CallStubCompiler::CompileFastApiCall(
2280     const CallOptimization& optimization,
2281     Handle<Object> object,
2282     Handle<JSObject> holder,
2283     Handle<Cell> cell,
2284     Handle<JSFunction> function,
2285     Handle<String> name) {
2286 
2287   Counters* counters = isolate()->counters();
2288 
2289   ASSERT(optimization.is_simple_api_call());
2290   // Bail out if object is a global object as we don't want to
2291   // repatch it to global receiver.
2292   if (object->IsGlobalObject()) return Handle<Code>::null();
2293   if (!cell.is_null()) return Handle<Code>::null();
2294   if (!object->IsJSObject()) return Handle<Code>::null();
2295   int depth = optimization.GetPrototypeDepthOfExpectedType(
2296       Handle<JSObject>::cast(object), holder);
2297   if (depth == kInvalidProtoDepth) return Handle<Code>::null();
2298 
2299   Label miss, miss_before_stack_reserved;
2300 
2301   GenerateNameCheck(name, &miss_before_stack_reserved);
2302 
2303   // Get the receiver from the stack.
2304   const int argc = arguments().immediate();
2305   __ lw(a1, MemOperand(sp, argc * kPointerSize));
2306 
2307   // Check that the receiver isn't a smi.
2308   __ JumpIfSmi(a1, &miss_before_stack_reserved);
2309 
2310   __ IncrementCounter(counters->call_const(), 1, a0, a3);
2311   __ IncrementCounter(counters->call_const_fast_api(), 1, a0, a3);
2312 
2313   ReserveSpaceForFastApiCall(masm(), a0);
2314 
2315   // Check that the maps haven't changed and find a Holder as a side effect.
2316   CheckPrototypes(
2317       IC::CurrentTypeOf(object, isolate()),
2318       a1, holder, a0, a3, t0, name, depth, &miss);
2319 
2320   GenerateFastApiDirectCall(masm(), optimization, argc, false);
2321 
2322   __ bind(&miss);
2323   FreeSpaceForFastApiCall(masm());
2324 
2325   HandlerFrontendFooter(&miss_before_stack_reserved);
2326 
2327   // Return the generated code.
2328   return GetCode(function);
2329 }
2330 
2331 
GenerateBooleanCheck(Register object,Label * miss)2332 void StubCompiler::GenerateBooleanCheck(Register object, Label* miss) {
2333   Label success;
2334   // Check that the object is a boolean.
2335   __ LoadRoot(at, Heap::kTrueValueRootIndex);
2336   __ Branch(&success, eq, object, Operand(at));
2337   __ LoadRoot(at, Heap::kFalseValueRootIndex);
2338   __ Branch(miss, ne, object, Operand(at));
2339   __ bind(&success);
2340 }
2341 
2342 
PatchGlobalProxy(Handle<Object> object)2343 void CallStubCompiler::PatchGlobalProxy(Handle<Object> object) {
2344   if (object->IsGlobalObject()) {
2345     const int argc = arguments().immediate();
2346     const int receiver_offset = argc * kPointerSize;
2347     __ lw(a3, FieldMemOperand(a0, GlobalObject::kGlobalReceiverOffset));
2348     __ sw(a3, MemOperand(sp, receiver_offset));
2349   }
2350 }
2351 
2352 
HandlerFrontendHeader(Handle<Object> object,Handle<JSObject> holder,Handle<Name> name,CheckType check,Label * miss)2353 Register CallStubCompiler::HandlerFrontendHeader(Handle<Object> object,
2354                                                  Handle<JSObject> holder,
2355                                                  Handle<Name> name,
2356                                                  CheckType check,
2357                                                  Label* miss) {
2358   // ----------- S t a t e -------------
2359   //  -- a2    : name
2360   //  -- ra    : return address
2361   // -----------------------------------
2362   GenerateNameCheck(name, miss);
2363 
2364   Register reg = a0;
2365 
2366   // Get the receiver from the stack.
2367   const int argc = arguments().immediate();
2368   const int receiver_offset = argc * kPointerSize;
2369   __ lw(a0, MemOperand(sp, receiver_offset));
2370 
2371   // Check that the receiver isn't a smi.
2372   if (check != NUMBER_CHECK) {
2373     __ JumpIfSmi(a0, miss);
2374   }
2375 
2376   // Make sure that it's okay not to patch the on stack receiver
2377   // unless we're doing a receiver map check.
2378   ASSERT(!object->IsGlobalObject() || check == RECEIVER_MAP_CHECK);
2379   switch (check) {
2380     case RECEIVER_MAP_CHECK:
2381       __ IncrementCounter(isolate()->counters()->call_const(), 1, a1, a3);
2382 
2383       // Check that the maps haven't changed.
2384       reg = CheckPrototypes(
2385           IC::CurrentTypeOf(object, isolate()),
2386           reg, holder, a1, a3, t0, name, miss);
2387       break;
2388 
2389     case STRING_CHECK: {
2390       // Check that the object is a string.
2391       __ GetObjectType(reg, a3, a3);
2392       __ Branch(miss, Ugreater_equal, a3, Operand(FIRST_NONSTRING_TYPE));
2393       // Check that the maps starting from the prototype haven't changed.
2394       GenerateDirectLoadGlobalFunctionPrototype(
2395           masm(), Context::STRING_FUNCTION_INDEX, a1, miss);
2396       break;
2397     }
2398     case SYMBOL_CHECK: {
2399       // Check that the object is a symbol.
2400       __ GetObjectType(reg, a1, a3);
2401       __ Branch(miss, ne, a3, Operand(SYMBOL_TYPE));
2402       // Check that the maps starting from the prototype haven't changed.
2403       GenerateDirectLoadGlobalFunctionPrototype(
2404           masm(), Context::SYMBOL_FUNCTION_INDEX, a1, miss);
2405       break;
2406     }
2407     case NUMBER_CHECK: {
2408       Label fast;
2409       // Check that the object is a smi or a heap number.
2410       __ JumpIfSmi(reg, &fast);
2411       __ GetObjectType(reg, a3, a3);
2412       __ Branch(miss, ne, a3, Operand(HEAP_NUMBER_TYPE));
2413       __ bind(&fast);
2414       // Check that the maps starting from the prototype haven't changed.
2415       GenerateDirectLoadGlobalFunctionPrototype(
2416           masm(), Context::NUMBER_FUNCTION_INDEX, a1, miss);
2417       break;
2418     }
2419     case BOOLEAN_CHECK: {
2420       GenerateBooleanCheck(reg, miss);
2421 
2422       // Check that the maps starting from the prototype haven't changed.
2423       GenerateDirectLoadGlobalFunctionPrototype(
2424           masm(), Context::BOOLEAN_FUNCTION_INDEX, a1, miss);
2425       break;
2426     }
2427   }
2428 
2429   if (check != RECEIVER_MAP_CHECK) {
2430     Handle<Object> prototype(object->GetPrototype(isolate()), isolate());
2431     reg = CheckPrototypes(
2432         IC::CurrentTypeOf(prototype, isolate()),
2433         a1, holder, a1, a3, t0, name, miss);
2434   }
2435 
2436   return reg;
2437 }
2438 
2439 
GenerateJumpFunction(Handle<Object> object,Register function,Label * miss)2440 void CallStubCompiler::GenerateJumpFunction(Handle<Object> object,
2441                                             Register function,
2442                                             Label* miss) {
2443   ASSERT(function.is(a1));
2444   // Check that the function really is a function.
2445   GenerateFunctionCheck(function, a3, miss);
2446   PatchGlobalProxy(object);
2447   // Invoke the function.
2448   __ InvokeFunction(a1, arguments(), JUMP_FUNCTION,
2449                     NullCallWrapper(), call_kind());
2450 }
2451 
2452 
CompileCallInterceptor(Handle<JSObject> object,Handle<JSObject> holder,Handle<Name> name)2453 Handle<Code> CallStubCompiler::CompileCallInterceptor(Handle<JSObject> object,
2454                                                       Handle<JSObject> holder,
2455                                                       Handle<Name> name) {
2456   Label miss;
2457 
2458   GenerateNameCheck(name, &miss);
2459 
2460   // Get the number of arguments.
2461   const int argc = arguments().immediate();
2462   LookupResult lookup(isolate());
2463   LookupPostInterceptor(holder, name, &lookup);
2464 
2465   // Get the receiver from the stack.
2466   __ lw(a1, MemOperand(sp, argc * kPointerSize));
2467 
2468   CallInterceptorCompiler compiler(this, arguments(), a2, extra_state());
2469   compiler.Compile(masm(), object, holder, name, &lookup, a1, a3, t0, a0,
2470                    &miss);
2471 
2472   // Move returned value, the function to call, to a1.
2473   __ mov(a1, v0);
2474   // Restore receiver.
2475   __ lw(a0, MemOperand(sp, argc * kPointerSize));
2476 
2477   GenerateJumpFunction(object, a1, &miss);
2478 
2479   HandlerFrontendFooter(&miss);
2480 
2481   // Return the generated code.
2482   return GetCode(Code::FAST, name);
2483 }
2484 
2485 
CompileCallGlobal(Handle<JSObject> object,Handle<GlobalObject> holder,Handle<PropertyCell> cell,Handle<JSFunction> function,Handle<Name> name)2486 Handle<Code> CallStubCompiler::CompileCallGlobal(
2487     Handle<JSObject> object,
2488     Handle<GlobalObject> holder,
2489     Handle<PropertyCell> cell,
2490     Handle<JSFunction> function,
2491     Handle<Name> name) {
2492   if (HasCustomCallGenerator(function)) {
2493     Handle<Code> code = CompileCustomCall(
2494         object, holder, cell, function, Handle<String>::cast(name),
2495         Code::NORMAL);
2496     // A null handle means bail out to the regular compiler code below.
2497     if (!code.is_null()) return code;
2498   }
2499 
2500   Label miss;
2501   HandlerFrontendHeader(object, holder, name, RECEIVER_MAP_CHECK, &miss);
2502   // Potentially loads a closure that matches the shared function info of the
2503   // function, rather than function.
2504   GenerateLoadFunctionFromCell(cell, function, &miss);
2505   Counters* counters = isolate()->counters();
2506   __ IncrementCounter(counters->call_global_inline(), 1, a3, t0);
2507   GenerateJumpFunction(object, a1, function);
2508   HandlerFrontendFooter(&miss);
2509 
2510   // Return the generated code.
2511   return GetCode(Code::NORMAL, name);
2512 }
2513 
2514 
CompileStoreCallback(Handle<JSObject> object,Handle<JSObject> holder,Handle<Name> name,Handle<ExecutableAccessorInfo> callback)2515 Handle<Code> StoreStubCompiler::CompileStoreCallback(
2516     Handle<JSObject> object,
2517     Handle<JSObject> holder,
2518     Handle<Name> name,
2519     Handle<ExecutableAccessorInfo> callback) {
2520   HandlerFrontend(IC::CurrentTypeOf(object, isolate()),
2521                   receiver(), holder, name);
2522 
2523   // Stub never generated for non-global objects that require access
2524   // checks.
2525   ASSERT(holder->IsJSGlobalProxy() || !holder->IsAccessCheckNeeded());
2526 
2527   __ push(receiver());  // Receiver.
2528   __ li(at, Operand(callback));  // Callback info.
2529   __ push(at);
2530   __ li(at, Operand(name));
2531   __ Push(at, value());
2532 
2533   // Do tail-call to the runtime system.
2534   ExternalReference store_callback_property =
2535       ExternalReference(IC_Utility(IC::kStoreCallbackProperty), isolate());
2536   __ TailCallExternalReference(store_callback_property, 4, 1);
2537 
2538   // Return the generated code.
2539   return GetCode(kind(), Code::FAST, name);
2540 }
2541 
2542 
CompileStoreCallback(Handle<JSObject> object,Handle<JSObject> holder,Handle<Name> name,const CallOptimization & call_optimization)2543 Handle<Code> StoreStubCompiler::CompileStoreCallback(
2544     Handle<JSObject> object,
2545     Handle<JSObject> holder,
2546     Handle<Name> name,
2547     const CallOptimization& call_optimization) {
2548   HandlerFrontend(IC::CurrentTypeOf(object, isolate()),
2549                   receiver(), holder, name);
2550 
2551   Register values[] = { value() };
2552   GenerateFastApiCall(
2553       masm(), call_optimization, receiver(), scratch3(), 1, values);
2554 
2555   // Return the generated code.
2556   return GetCode(kind(), Code::FAST, name);
2557 }
2558 
2559 
2560 #undef __
2561 #define __ ACCESS_MASM(masm)
2562 
2563 
GenerateStoreViaSetter(MacroAssembler * masm,Handle<JSFunction> setter)2564 void StoreStubCompiler::GenerateStoreViaSetter(
2565     MacroAssembler* masm,
2566     Handle<JSFunction> setter) {
2567   // ----------- S t a t e -------------
2568   //  -- a0    : value
2569   //  -- a1    : receiver
2570   //  -- a2    : name
2571   //  -- ra    : return address
2572   // -----------------------------------
2573   {
2574     FrameScope scope(masm, StackFrame::INTERNAL);
2575 
2576     // Save value register, so we can restore it later.
2577     __ push(a0);
2578 
2579     if (!setter.is_null()) {
2580       // Call the JavaScript setter with receiver and value on the stack.
2581       __ push(a1);
2582       __ push(a0);
2583       ParameterCount actual(1);
2584       ParameterCount expected(setter);
2585       __ InvokeFunction(setter, expected, actual,
2586                         CALL_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
2587     } else {
2588       // If we generate a global code snippet for deoptimization only, remember
2589       // the place to continue after deoptimization.
2590       masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset());
2591     }
2592 
2593     // We have to return the passed value, not the return value of the setter.
2594     __ pop(v0);
2595 
2596     // Restore context register.
2597     __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2598   }
2599   __ Ret();
2600 }
2601 
2602 
2603 #undef __
2604 #define __ ACCESS_MASM(masm())
2605 
2606 
CompileStoreInterceptor(Handle<JSObject> object,Handle<Name> name)2607 Handle<Code> StoreStubCompiler::CompileStoreInterceptor(
2608     Handle<JSObject> object,
2609     Handle<Name> name) {
2610   Label miss;
2611 
2612   // Check that the map of the object hasn't changed.
2613   __ CheckMap(receiver(), scratch1(), Handle<Map>(object->map()), &miss,
2614               DO_SMI_CHECK);
2615 
2616   // Perform global security token check if needed.
2617   if (object->IsJSGlobalProxy()) {
2618     __ CheckAccessGlobalProxy(receiver(), scratch1(), &miss);
2619   }
2620 
2621   // Stub is never generated for non-global objects that require access
2622   // checks.
2623   ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
2624 
2625   __ Push(receiver(), this->name(), value());
2626 
2627   // Do tail-call to the runtime system.
2628   ExternalReference store_ic_property =
2629       ExternalReference(IC_Utility(IC::kStoreInterceptorProperty), isolate());
2630   __ TailCallExternalReference(store_ic_property, 3, 1);
2631 
2632   // Handle store cache miss.
2633   __ bind(&miss);
2634   TailCallBuiltin(masm(), MissBuiltin(kind()));
2635 
2636   // Return the generated code.
2637   return GetCode(kind(), Code::FAST, name);
2638 }
2639 
2640 
CompileLoadNonexistent(Handle<Type> type,Handle<JSObject> last,Handle<Name> name)2641 Handle<Code> LoadStubCompiler::CompileLoadNonexistent(Handle<Type> type,
2642                                                       Handle<JSObject> last,
2643                                                       Handle<Name> name) {
2644   NonexistentHandlerFrontend(type, last, name);
2645 
2646   // Return undefined if maps of the full prototype chain is still the same.
2647   __ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
2648   __ Ret();
2649 
2650   // Return the generated code.
2651   return GetCode(kind(), Code::FAST, name);
2652 }
2653 
2654 
registers()2655 Register* LoadStubCompiler::registers() {
2656   // receiver, name, scratch1, scratch2, scratch3, scratch4.
2657   static Register registers[] = { a0, a2, a3, a1, t0, t1 };
2658   return registers;
2659 }
2660 
2661 
registers()2662 Register* KeyedLoadStubCompiler::registers() {
2663   // receiver, name, scratch1, scratch2, scratch3, scratch4.
2664   static Register registers[] = { a1, a0, a2, a3, t0, t1 };
2665   return registers;
2666 }
2667 
2668 
registers()2669 Register* StoreStubCompiler::registers() {
2670   // receiver, name, value, scratch1, scratch2, scratch3.
2671   static Register registers[] = { a1, a2, a0, a3, t0, t1 };
2672   return registers;
2673 }
2674 
2675 
registers()2676 Register* KeyedStoreStubCompiler::registers() {
2677   // receiver, name, value, scratch1, scratch2, scratch3.
2678   static Register registers[] = { a2, a1, a0, a3, t0, t1 };
2679   return registers;
2680 }
2681 
2682 
GenerateNameCheck(Handle<Name> name,Register name_reg,Label * miss)2683 void KeyedLoadStubCompiler::GenerateNameCheck(Handle<Name> name,
2684                                               Register name_reg,
2685                                               Label* miss) {
2686   __ Branch(miss, ne, name_reg, Operand(name));
2687 }
2688 
2689 
GenerateNameCheck(Handle<Name> name,Register name_reg,Label * miss)2690 void KeyedStoreStubCompiler::GenerateNameCheck(Handle<Name> name,
2691                                                Register name_reg,
2692                                                Label* miss) {
2693   __ Branch(miss, ne, name_reg, Operand(name));
2694 }
2695 
2696 
2697 #undef __
2698 #define __ ACCESS_MASM(masm)
2699 
2700 
GenerateLoadViaGetter(MacroAssembler * masm,Register receiver,Handle<JSFunction> getter)2701 void LoadStubCompiler::GenerateLoadViaGetter(MacroAssembler* masm,
2702                                              Register receiver,
2703                                              Handle<JSFunction> getter) {
2704   // ----------- S t a t e -------------
2705   //  -- a0    : receiver
2706   //  -- a2    : name
2707   //  -- ra    : return address
2708   // -----------------------------------
2709   {
2710     FrameScope scope(masm, StackFrame::INTERNAL);
2711 
2712     if (!getter.is_null()) {
2713       // Call the JavaScript getter with the receiver on the stack.
2714       __ push(receiver);
2715       ParameterCount actual(0);
2716       ParameterCount expected(getter);
2717       __ InvokeFunction(getter, expected, actual,
2718                         CALL_FUNCTION, NullCallWrapper(), CALL_AS_METHOD);
2719     } else {
2720       // If we generate a global code snippet for deoptimization only, remember
2721       // the place to continue after deoptimization.
2722       masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset());
2723     }
2724 
2725     // Restore context register.
2726     __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
2727   }
2728   __ Ret();
2729 }
2730 
2731 
2732 #undef __
2733 #define __ ACCESS_MASM(masm())
2734 
2735 
CompileLoadGlobal(Handle<Type> type,Handle<GlobalObject> global,Handle<PropertyCell> cell,Handle<Name> name,bool is_dont_delete)2736 Handle<Code> LoadStubCompiler::CompileLoadGlobal(
2737     Handle<Type> type,
2738     Handle<GlobalObject> global,
2739     Handle<PropertyCell> cell,
2740     Handle<Name> name,
2741     bool is_dont_delete) {
2742   Label miss;
2743 
2744   HandlerFrontendHeader(type, receiver(), global, name, &miss);
2745 
2746   // Get the value from the cell.
2747   __ li(a3, Operand(cell));
2748   __ lw(t0, FieldMemOperand(a3, Cell::kValueOffset));
2749 
2750   // Check for deleted property if property can actually be deleted.
2751   if (!is_dont_delete) {
2752     __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
2753     __ Branch(&miss, eq, t0, Operand(at));
2754   }
2755 
2756   HandlerFrontendFooter(name, &miss);
2757 
2758   Counters* counters = isolate()->counters();
2759   __ IncrementCounter(counters->named_load_global_stub(), 1, a1, a3);
2760   __ Ret(USE_DELAY_SLOT);
2761   __ mov(v0, t0);
2762 
2763   // Return the generated code.
2764   return GetCode(kind(), Code::NORMAL, name);
2765 }
2766 
2767 
CompilePolymorphicIC(TypeHandleList * types,CodeHandleList * handlers,Handle<Name> name,Code::StubType type,IcCheckType check)2768 Handle<Code> BaseLoadStoreStubCompiler::CompilePolymorphicIC(
2769     TypeHandleList* types,
2770     CodeHandleList* handlers,
2771     Handle<Name> name,
2772     Code::StubType type,
2773     IcCheckType check) {
2774   Label miss;
2775 
2776   if (check == PROPERTY) {
2777     GenerateNameCheck(name, this->name(), &miss);
2778   }
2779 
2780   Label number_case;
2781   Register match = scratch1();
2782   Label* smi_target = IncludesNumberType(types) ? &number_case : &miss;
2783   __ JumpIfSmi(receiver(), smi_target, match);  // Reg match is 0 if Smi.
2784 
2785   Register map_reg = scratch2();
2786 
2787   int receiver_count = types->length();
2788   int number_of_handled_maps = 0;
2789   __ lw(map_reg, FieldMemOperand(receiver(), HeapObject::kMapOffset));
2790   for (int current = 0; current < receiver_count; ++current) {
2791     Handle<Type> type = types->at(current);
2792     Handle<Map> map = IC::TypeToMap(*type, isolate());
2793     if (!map->is_deprecated()) {
2794       number_of_handled_maps++;
2795       // Check map and tail call if there's a match.
2796       // Separate compare from branch, to provide path for above JumpIfSmi().
2797       __ Subu(match, map_reg, Operand(map));
2798       if (type->Is(Type::Number())) {
2799         ASSERT(!number_case.is_unused());
2800         __ bind(&number_case);
2801       }
2802       __ Jump(handlers->at(current), RelocInfo::CODE_TARGET,
2803           eq, match, Operand(zero_reg));
2804     }
2805   }
2806   ASSERT(number_of_handled_maps != 0);
2807 
2808   __ bind(&miss);
2809   TailCallBuiltin(masm(), MissBuiltin(kind()));
2810 
2811   // Return the generated code.
2812   InlineCacheState state =
2813       number_of_handled_maps > 1 ? POLYMORPHIC : MONOMORPHIC;
2814   return GetICCode(kind(), type, name, state);
2815 }
2816 
2817 
CompileStorePolymorphic(MapHandleList * receiver_maps,CodeHandleList * handler_stubs,MapHandleList * transitioned_maps)2818 Handle<Code> KeyedStoreStubCompiler::CompileStorePolymorphic(
2819     MapHandleList* receiver_maps,
2820     CodeHandleList* handler_stubs,
2821     MapHandleList* transitioned_maps) {
2822   Label miss;
2823   __ JumpIfSmi(receiver(), &miss);
2824 
2825   int receiver_count = receiver_maps->length();
2826   __ lw(scratch1(), FieldMemOperand(receiver(), HeapObject::kMapOffset));
2827   for (int i = 0; i < receiver_count; ++i) {
2828     if (transitioned_maps->at(i).is_null()) {
2829       __ Jump(handler_stubs->at(i), RelocInfo::CODE_TARGET, eq,
2830           scratch1(), Operand(receiver_maps->at(i)));
2831     } else {
2832       Label next_map;
2833       __ Branch(&next_map, ne, scratch1(), Operand(receiver_maps->at(i)));
2834       __ li(transition_map(), Operand(transitioned_maps->at(i)));
2835       __ Jump(handler_stubs->at(i), RelocInfo::CODE_TARGET);
2836       __ bind(&next_map);
2837     }
2838   }
2839 
2840   __ bind(&miss);
2841   TailCallBuiltin(masm(), MissBuiltin(kind()));
2842 
2843   // Return the generated code.
2844   return GetICCode(
2845       kind(), Code::NORMAL, factory()->empty_string(), POLYMORPHIC);
2846 }
2847 
2848 
2849 #undef __
2850 #define __ ACCESS_MASM(masm)
2851 
2852 
GenerateLoadDictionaryElement(MacroAssembler * masm)2853 void KeyedLoadStubCompiler::GenerateLoadDictionaryElement(
2854     MacroAssembler* masm) {
2855   // ---------- S t a t e --------------
2856   //  -- ra     : return address
2857   //  -- a0     : key
2858   //  -- a1     : receiver
2859   // -----------------------------------
2860   Label slow, miss;
2861 
2862   Register key = a0;
2863   Register receiver = a1;
2864 
2865   __ JumpIfNotSmi(key, &miss);
2866   __ lw(t0, FieldMemOperand(receiver, JSObject::kElementsOffset));
2867   __ sra(a2, a0, kSmiTagSize);
2868   __ LoadFromNumberDictionary(&slow, t0, a0, v0, a2, a3, t1);
2869   __ Ret();
2870 
2871   // Slow case, key and receiver still in a0 and a1.
2872   __ bind(&slow);
2873   __ IncrementCounter(
2874       masm->isolate()->counters()->keyed_load_external_array_slow(),
2875       1, a2, a3);
2876   // Entry registers are intact.
2877   // ---------- S t a t e --------------
2878   //  -- ra     : return address
2879   //  -- a0     : key
2880   //  -- a1     : receiver
2881   // -----------------------------------
2882   TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Slow);
2883 
2884   // Miss case, call the runtime.
2885   __ bind(&miss);
2886 
2887   // ---------- S t a t e --------------
2888   //  -- ra     : return address
2889   //  -- a0     : key
2890   //  -- a1     : receiver
2891   // -----------------------------------
2892   TailCallBuiltin(masm, Builtins::kKeyedLoadIC_Miss);
2893 }
2894 
2895 
2896 #undef __
2897 
2898 } }  // namespace v8::internal
2899 
2900 #endif  // V8_TARGET_ARCH_MIPS
2901