• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #if V8_TARGET_ARCH_MIPS
6 
7 #include "src/ic/handler-compiler.h"
8 
9 #include "src/api-arguments.h"
10 #include "src/field-type.h"
11 #include "src/ic/call-optimization.h"
12 #include "src/ic/ic.h"
13 #include "src/isolate-inl.h"
14 
15 namespace v8 {
16 namespace internal {
17 
18 #define __ ACCESS_MASM(masm)
19 
20 
GenerateLoadViaGetter(MacroAssembler * masm,Handle<Map> map,Register receiver,Register holder,int accessor_index,int expected_arguments,Register scratch)21 void NamedLoadHandlerCompiler::GenerateLoadViaGetter(
22     MacroAssembler* masm, Handle<Map> map, Register receiver, Register holder,
23     int accessor_index, int expected_arguments, Register scratch) {
24   // ----------- S t a t e -------------
25   //  -- a0    : receiver
26   //  -- a2    : name
27   //  -- ra    : return address
28   // -----------------------------------
29   {
30     FrameScope scope(masm, StackFrame::INTERNAL);
31 
32     // Save context register
33     __ push(cp);
34 
35     if (accessor_index >= 0) {
36       DCHECK(!holder.is(scratch));
37       DCHECK(!receiver.is(scratch));
38       // Call the JavaScript getter with the receiver on the stack.
39       if (map->IsJSGlobalObjectMap()) {
40         // Swap in the global receiver.
41         __ lw(scratch,
42               FieldMemOperand(receiver, JSGlobalObject::kGlobalProxyOffset));
43         receiver = scratch;
44       }
45       __ push(receiver);
46       __ LoadAccessor(a1, holder, accessor_index, ACCESSOR_GETTER);
47       __ li(a0, Operand(0));
48       __ Call(masm->isolate()->builtins()->CallFunction(
49                   ConvertReceiverMode::kNotNullOrUndefined),
50               RelocInfo::CODE_TARGET);
51     } else {
52       // If we generate a global code snippet for deoptimization only, remember
53       // the place to continue after deoptimization.
54       masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset());
55     }
56 
57     // Restore context register.
58     __ pop(cp);
59   }
60   __ Ret();
61 }
62 
63 
GenerateStoreViaSetter(MacroAssembler * masm,Handle<Map> map,Register receiver,Register holder,int accessor_index,int expected_arguments,Register scratch)64 void NamedStoreHandlerCompiler::GenerateStoreViaSetter(
65     MacroAssembler* masm, Handle<Map> map, Register receiver, Register holder,
66     int accessor_index, int expected_arguments, Register scratch) {
67   // ----------- S t a t e -------------
68   //  -- ra    : return address
69   // -----------------------------------
70   {
71     FrameScope scope(masm, StackFrame::INTERNAL);
72 
73     // Save context and value registers, so we can restore them later.
74     __ Push(cp, value());
75 
76     if (accessor_index >= 0) {
77       DCHECK(!holder.is(scratch));
78       DCHECK(!receiver.is(scratch));
79       DCHECK(!value().is(scratch));
80       // Call the JavaScript setter with receiver and value on the stack.
81       if (map->IsJSGlobalObjectMap()) {
82         // Swap in the global receiver.
83         __ lw(scratch,
84               FieldMemOperand(receiver, JSGlobalObject::kGlobalProxyOffset));
85         receiver = scratch;
86       }
87       __ Push(receiver, value());
88       __ LoadAccessor(a1, holder, accessor_index, ACCESSOR_SETTER);
89       __ li(a0, Operand(1));
90       __ Call(masm->isolate()->builtins()->CallFunction(
91                   ConvertReceiverMode::kNotNullOrUndefined),
92               RelocInfo::CODE_TARGET);
93     } else {
94       // If we generate a global code snippet for deoptimization only, remember
95       // the place to continue after deoptimization.
96       masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset());
97     }
98 
99     // We have to return the passed value, not the return value of the setter.
100     // Restore context register.
101     __ Pop(cp, v0);
102   }
103   __ Ret();
104 }
105 
106 
PushVectorAndSlot(Register vector,Register slot)107 void PropertyHandlerCompiler::PushVectorAndSlot(Register vector,
108                                                 Register slot) {
109   MacroAssembler* masm = this->masm();
110   __ Push(vector, slot);
111 }
112 
113 
PopVectorAndSlot(Register vector,Register slot)114 void PropertyHandlerCompiler::PopVectorAndSlot(Register vector, Register slot) {
115   MacroAssembler* masm = this->masm();
116   __ Pop(vector, slot);
117 }
118 
119 
DiscardVectorAndSlot()120 void PropertyHandlerCompiler::DiscardVectorAndSlot() {
121   MacroAssembler* masm = this->masm();
122   // Remove vector and slot.
123   __ Addu(sp, sp, Operand(2 * kPointerSize));
124 }
125 
126 
GenerateDictionaryNegativeLookup(MacroAssembler * masm,Label * miss_label,Register receiver,Handle<Name> name,Register scratch0,Register scratch1)127 void PropertyHandlerCompiler::GenerateDictionaryNegativeLookup(
128     MacroAssembler* masm, Label* miss_label, Register receiver,
129     Handle<Name> name, Register scratch0, Register scratch1) {
130   DCHECK(name->IsUniqueName());
131   DCHECK(!receiver.is(scratch0));
132   Counters* counters = masm->isolate()->counters();
133   __ IncrementCounter(counters->negative_lookups(), 1, scratch0, scratch1);
134   __ IncrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
135 
136   Label done;
137 
138   const int kInterceptorOrAccessCheckNeededMask =
139   (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded);
140 
141   // Bail out if the receiver has a named interceptor or requires access checks.
142   Register map = scratch1;
143   __ lw(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
144   __ lbu(scratch0, FieldMemOperand(map, Map::kBitFieldOffset));
145   __ And(scratch0, scratch0, Operand(kInterceptorOrAccessCheckNeededMask));
146   __ Branch(miss_label, ne, scratch0, Operand(zero_reg));
147 
148   // Check that receiver is a JSObject.
149   __ lbu(scratch0, FieldMemOperand(map, Map::kInstanceTypeOffset));
150   __ Branch(miss_label, lt, scratch0, Operand(FIRST_JS_RECEIVER_TYPE));
151 
152   // Load properties array.
153   Register properties = scratch0;
154   __ lw(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
155   // Check that the properties array is a dictionary.
156   __ lw(map, FieldMemOperand(properties, HeapObject::kMapOffset));
157   Register tmp = properties;
158   __ LoadRoot(tmp, Heap::kHashTableMapRootIndex);
159   __ Branch(miss_label, ne, map, Operand(tmp));
160 
161   // Restore the temporarily used register.
162   __ lw(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
163 
164 
165   NameDictionaryLookupStub::GenerateNegativeLookup(
166       masm, miss_label, &done, receiver, properties, name, scratch1);
167   __ bind(&done);
168   __ DecrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
169 }
170 
171 
GenerateDirectLoadGlobalFunctionPrototype(MacroAssembler * masm,int index,Register result,Label * miss)172 void NamedLoadHandlerCompiler::GenerateDirectLoadGlobalFunctionPrototype(
173     MacroAssembler* masm, int index, Register result, Label* miss) {
174   __ LoadNativeContextSlot(index, result);
175   // Load its initial map. The global functions all have initial maps.
176   __ lw(result,
177         FieldMemOperand(result, JSFunction::kPrototypeOrInitialMapOffset));
178   // Load the prototype from the initial map.
179   __ lw(result, FieldMemOperand(result, Map::kPrototypeOffset));
180 }
181 
182 
GenerateLoadFunctionPrototype(MacroAssembler * masm,Register receiver,Register scratch1,Register scratch2,Label * miss_label)183 void NamedLoadHandlerCompiler::GenerateLoadFunctionPrototype(
184     MacroAssembler* masm, Register receiver, Register scratch1,
185     Register scratch2, Label* miss_label) {
186   __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label);
187   __ Ret(USE_DELAY_SLOT);
188   __ mov(v0, scratch1);
189 }
190 
191 
192 // Generate code to check that a global property cell is empty. Create
193 // the property cell at compilation time if no cell exists for the
194 // property.
GenerateCheckPropertyCell(MacroAssembler * masm,Handle<JSGlobalObject> global,Handle<Name> name,Register scratch,Label * miss)195 void PropertyHandlerCompiler::GenerateCheckPropertyCell(
196     MacroAssembler* masm, Handle<JSGlobalObject> global, Handle<Name> name,
197     Register scratch, Label* miss) {
198   Handle<PropertyCell> cell = JSGlobalObject::EnsurePropertyCell(global, name);
199   Isolate* isolate = masm->isolate();
200   DCHECK(cell->value()->IsTheHole(isolate));
201   Handle<WeakCell> weak_cell = isolate->factory()->NewWeakCell(cell);
202   __ LoadWeakValue(scratch, weak_cell, miss);
203   __ lw(scratch, FieldMemOperand(scratch, PropertyCell::kValueOffset));
204   __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
205   __ Branch(miss, ne, scratch, Operand(at));
206 }
207 
208 
PushInterceptorArguments(MacroAssembler * masm,Register receiver,Register holder,Register name,Handle<JSObject> holder_obj)209 static void PushInterceptorArguments(MacroAssembler* masm, Register receiver,
210                                      Register holder, Register name,
211                                      Handle<JSObject> holder_obj) {
212   STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsNameIndex == 0);
213   STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsThisIndex == 1);
214   STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsHolderIndex == 2);
215   STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsLength == 3);
216   __ Push(name, receiver, holder);
217 }
218 
219 
CompileCallLoadPropertyWithInterceptor(MacroAssembler * masm,Register receiver,Register holder,Register name,Handle<JSObject> holder_obj,Runtime::FunctionId id)220 static void CompileCallLoadPropertyWithInterceptor(
221     MacroAssembler* masm, Register receiver, Register holder, Register name,
222     Handle<JSObject> holder_obj, Runtime::FunctionId id) {
223   DCHECK(NamedLoadHandlerCompiler::kInterceptorArgsLength ==
224          Runtime::FunctionForId(id)->nargs);
225   PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
226   __ CallRuntime(id);
227 }
228 
229 
230 // Generate call to api function.
GenerateApiAccessorCall(MacroAssembler * masm,const CallOptimization & optimization,Handle<Map> receiver_map,Register receiver,Register scratch_in,bool is_store,Register store_parameter,Register accessor_holder,int accessor_index)231 void PropertyHandlerCompiler::GenerateApiAccessorCall(
232     MacroAssembler* masm, const CallOptimization& optimization,
233     Handle<Map> receiver_map, Register receiver, Register scratch_in,
234     bool is_store, Register store_parameter, Register accessor_holder,
235     int accessor_index) {
236   DCHECK(!accessor_holder.is(scratch_in));
237   DCHECK(!receiver.is(scratch_in));
238   __ push(receiver);
239   // Write the arguments to stack frame.
240   if (is_store) {
241     DCHECK(!receiver.is(store_parameter));
242     DCHECK(!scratch_in.is(store_parameter));
243     __ push(store_parameter);
244   }
245   DCHECK(optimization.is_simple_api_call());
246 
247   // Abi for CallApiCallbackStub.
248   Register callee = a0;
249   Register data = t0;
250   Register holder = a2;
251   Register api_function_address = a1;
252 
253   // Put callee in place.
254   __ LoadAccessor(callee, accessor_holder, accessor_index,
255                   is_store ? ACCESSOR_SETTER : ACCESSOR_GETTER);
256 
257   // Put holder in place.
258   CallOptimization::HolderLookup holder_lookup;
259   int holder_depth = 0;
260   optimization.LookupHolderOfExpectedType(receiver_map, &holder_lookup,
261                                           &holder_depth);
262   switch (holder_lookup) {
263     case CallOptimization::kHolderIsReceiver:
264       __ Move(holder, receiver);
265       break;
266     case CallOptimization::kHolderFound:
267       __ lw(holder, FieldMemOperand(receiver, HeapObject::kMapOffset));
268       __ lw(holder, FieldMemOperand(holder, Map::kPrototypeOffset));
269       for (int i = 1; i < holder_depth; i++) {
270         __ lw(holder, FieldMemOperand(holder, HeapObject::kMapOffset));
271         __ lw(holder, FieldMemOperand(holder, Map::kPrototypeOffset));
272       }
273       break;
274     case CallOptimization::kHolderNotFound:
275       UNREACHABLE();
276       break;
277   }
278 
279   Isolate* isolate = masm->isolate();
280   Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
281   bool call_data_undefined = false;
282   // Put call data in place.
283   if (api_call_info->data()->IsUndefined(isolate)) {
284     call_data_undefined = true;
285     __ LoadRoot(data, Heap::kUndefinedValueRootIndex);
286   } else {
287     if (optimization.is_constant_call()) {
288       __ lw(data,
289             FieldMemOperand(callee, JSFunction::kSharedFunctionInfoOffset));
290       __ lw(data,
291             FieldMemOperand(data, SharedFunctionInfo::kFunctionDataOffset));
292       __ lw(data, FieldMemOperand(data, FunctionTemplateInfo::kCallCodeOffset));
293     } else {
294       __ lw(data,
295             FieldMemOperand(callee, FunctionTemplateInfo::kCallCodeOffset));
296     }
297     __ lw(data, FieldMemOperand(data, CallHandlerInfo::kDataOffset));
298   }
299 
300   if (api_call_info->fast_handler()->IsCode()) {
301     // Just tail call into the fast handler if present.
302     __ Jump(handle(Code::cast(api_call_info->fast_handler())),
303             RelocInfo::CODE_TARGET);
304     return;
305   }
306   // Put api_function_address in place.
307   Address function_address = v8::ToCData<Address>(api_call_info->callback());
308   ApiFunction fun(function_address);
309   ExternalReference::Type type = ExternalReference::DIRECT_API_CALL;
310   ExternalReference ref = ExternalReference(&fun, type, masm->isolate());
311   __ li(api_function_address, Operand(ref));
312 
313   // Jump to stub.
314   CallApiCallbackStub stub(isolate, is_store, call_data_undefined,
315                            !optimization.is_constant_call());
316   __ TailCallStub(&stub);
317 }
318 
319 
StoreIC_PushArgs(MacroAssembler * masm)320 static void StoreIC_PushArgs(MacroAssembler* masm) {
321   __ Push(StoreDescriptor::ReceiverRegister(), StoreDescriptor::NameRegister(),
322           StoreDescriptor::ValueRegister(),
323           VectorStoreICDescriptor::SlotRegister(),
324           VectorStoreICDescriptor::VectorRegister());
325 }
326 
327 
GenerateSlow(MacroAssembler * masm)328 void NamedStoreHandlerCompiler::GenerateSlow(MacroAssembler* masm) {
329   StoreIC_PushArgs(masm);
330 
331   // The slow case calls into the runtime to complete the store without causing
332   // an IC miss that would otherwise cause a transition to the generic stub.
333   __ TailCallRuntime(Runtime::kStoreIC_Slow);
334 }
335 
336 
GenerateStoreSlow(MacroAssembler * masm)337 void ElementHandlerCompiler::GenerateStoreSlow(MacroAssembler* masm) {
338   StoreIC_PushArgs(masm);
339 
340   // The slow case calls into the runtime to complete the store without causing
341   // an IC miss that would otherwise cause a transition to the generic stub.
342   __ TailCallRuntime(Runtime::kKeyedStoreIC_Slow);
343 }
344 
345 
346 #undef __
347 #define __ ACCESS_MASM(masm())
348 
349 
GenerateRestoreName(Label * label,Handle<Name> name)350 void NamedStoreHandlerCompiler::GenerateRestoreName(Label* label,
351                                                     Handle<Name> name) {
352   if (!label->is_unused()) {
353     __ bind(label);
354     __ li(this->name(), Operand(name));
355   }
356 }
357 
358 
GenerateRestoreName(Handle<Name> name)359 void NamedStoreHandlerCompiler::GenerateRestoreName(Handle<Name> name) {
360   __ li(this->name(), Operand(name));
361 }
362 
363 
RearrangeVectorAndSlot(Register current_map,Register destination_map)364 void NamedStoreHandlerCompiler::RearrangeVectorAndSlot(
365     Register current_map, Register destination_map) {
366   DCHECK(false);  // Not implemented.
367 }
368 
369 
GenerateRestoreMap(Handle<Map> transition,Register map_reg,Register scratch,Label * miss)370 void NamedStoreHandlerCompiler::GenerateRestoreMap(Handle<Map> transition,
371                                                    Register map_reg,
372                                                    Register scratch,
373                                                    Label* miss) {
374   Handle<WeakCell> cell = Map::WeakCellForMap(transition);
375   DCHECK(!map_reg.is(scratch));
376   __ LoadWeakValue(map_reg, cell, miss);
377   if (transition->CanBeDeprecated()) {
378     __ lw(scratch, FieldMemOperand(map_reg, Map::kBitField3Offset));
379     __ And(at, scratch, Operand(Map::Deprecated::kMask));
380     __ Branch(miss, ne, at, Operand(zero_reg));
381   }
382 }
383 
384 
GenerateConstantCheck(Register map_reg,int descriptor,Register value_reg,Register scratch,Label * miss_label)385 void NamedStoreHandlerCompiler::GenerateConstantCheck(Register map_reg,
386                                                       int descriptor,
387                                                       Register value_reg,
388                                                       Register scratch,
389                                                       Label* miss_label) {
390   DCHECK(!map_reg.is(scratch));
391   DCHECK(!map_reg.is(value_reg));
392   DCHECK(!value_reg.is(scratch));
393   __ LoadInstanceDescriptors(map_reg, scratch);
394   __ lw(scratch,
395         FieldMemOperand(scratch, DescriptorArray::GetValueOffset(descriptor)));
396   __ Branch(miss_label, ne, value_reg, Operand(scratch));
397 }
398 
GenerateFieldTypeChecks(FieldType * field_type,Register value_reg,Label * miss_label)399 void NamedStoreHandlerCompiler::GenerateFieldTypeChecks(FieldType* field_type,
400                                                         Register value_reg,
401                                                         Label* miss_label) {
402   Register map_reg = scratch1();
403   Register scratch = scratch2();
404   DCHECK(!value_reg.is(map_reg));
405   DCHECK(!value_reg.is(scratch));
406   __ JumpIfSmi(value_reg, miss_label);
407   if (field_type->IsClass()) {
408     __ lw(map_reg, FieldMemOperand(value_reg, HeapObject::kMapOffset));
409     // Compare map directly within the Branch() functions.
410     __ GetWeakValue(scratch, Map::WeakCellForMap(field_type->AsClass()));
411     __ Branch(miss_label, ne, map_reg, Operand(scratch));
412   }
413 }
414 
415 
CheckPrototypes(Register object_reg,Register holder_reg,Register scratch1,Register scratch2,Handle<Name> name,Label * miss,PrototypeCheckType check,ReturnHolder return_what)416 Register PropertyHandlerCompiler::CheckPrototypes(
417     Register object_reg, Register holder_reg, Register scratch1,
418     Register scratch2, Handle<Name> name, Label* miss, PrototypeCheckType check,
419     ReturnHolder return_what) {
420   Handle<Map> receiver_map = map();
421 
422   // Make sure there's no overlap between holder and object registers.
423   DCHECK(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
424   DCHECK(!scratch2.is(object_reg) && !scratch2.is(holder_reg) &&
425          !scratch2.is(scratch1));
426 
427   Handle<Cell> validity_cell =
428       Map::GetOrCreatePrototypeChainValidityCell(receiver_map, isolate());
429   if (!validity_cell.is_null()) {
430     DCHECK_EQ(Smi::FromInt(Map::kPrototypeChainValid), validity_cell->value());
431     __ li(scratch1, Operand(validity_cell));
432     __ lw(scratch1, FieldMemOperand(scratch1, Cell::kValueOffset));
433     __ Branch(miss, ne, scratch1,
434               Operand(Smi::FromInt(Map::kPrototypeChainValid)));
435   }
436 
437   // The prototype chain of primitives (and their JSValue wrappers) depends
438   // on the native context, which can't be guarded by validity cells.
439   // |object_reg| holds the native context specific prototype in this case;
440   // we need to check its map.
441   if (check == CHECK_ALL_MAPS) {
442     __ lw(scratch1, FieldMemOperand(object_reg, HeapObject::kMapOffset));
443     Handle<WeakCell> cell = Map::WeakCellForMap(receiver_map);
444     __ GetWeakValue(scratch2, cell);
445     __ Branch(miss, ne, scratch1, Operand(scratch2));
446   }
447 
448   // Keep track of the current object in register reg.
449   Register reg = object_reg;
450   int depth = 0;
451 
452   Handle<JSObject> current = Handle<JSObject>::null();
453   if (receiver_map->IsJSGlobalObjectMap()) {
454     current = isolate()->global_object();
455   }
456 
457   // Check access rights to the global object.  This has to happen after
458   // the map check so that we know that the object is actually a global
459   // object.
460   // This allows us to install generated handlers for accesses to the
461   // global proxy (as opposed to using slow ICs). See corresponding code
462   // in LookupForRead().
463   if (receiver_map->IsJSGlobalProxyMap()) {
464     __ CheckAccessGlobalProxy(reg, scratch2, miss);
465   }
466 
467   Handle<JSObject> prototype = Handle<JSObject>::null();
468   Handle<Map> current_map = receiver_map;
469   Handle<Map> holder_map(holder()->map());
470   // Traverse the prototype chain and check the maps in the prototype chain for
471   // fast and global objects or do negative lookup for normal objects.
472   while (!current_map.is_identical_to(holder_map)) {
473     ++depth;
474 
475     // Only global objects and objects that do not require access
476     // checks are allowed in stubs.
477     DCHECK(current_map->IsJSGlobalProxyMap() ||
478            !current_map->is_access_check_needed());
479 
480     prototype = handle(JSObject::cast(current_map->prototype()));
481     if (current_map->IsJSGlobalObjectMap()) {
482       GenerateCheckPropertyCell(masm(), Handle<JSGlobalObject>::cast(current),
483                                 name, scratch2, miss);
484     } else if (current_map->is_dictionary_map()) {
485       DCHECK(!current_map->IsJSGlobalProxyMap());  // Proxy maps are fast.
486       if (!name->IsUniqueName()) {
487         DCHECK(name->IsString());
488         name = factory()->InternalizeString(Handle<String>::cast(name));
489       }
490       DCHECK(current.is_null() ||
491              current->property_dictionary()->FindEntry(name) ==
492                  NameDictionary::kNotFound);
493 
494       if (depth > 1) {
495         // TODO(jkummerow): Cache and re-use weak cell.
496         __ LoadWeakValue(reg, isolate()->factory()->NewWeakCell(current), miss);
497       }
498       GenerateDictionaryNegativeLookup(masm(), miss, reg, name, scratch1,
499                                        scratch2);
500     }
501 
502     reg = holder_reg;  // From now on the object will be in holder_reg.
503     // Go to the next object in the prototype chain.
504     current = prototype;
505     current_map = handle(current->map());
506   }
507 
508   DCHECK(!current_map->IsJSGlobalProxyMap());
509 
510   // Log the check depth.
511   LOG(isolate(), IntEvent("check-maps-depth", depth + 1));
512 
513   bool return_holder = return_what == RETURN_HOLDER;
514   if (return_holder && depth != 0) {
515     __ LoadWeakValue(reg, isolate()->factory()->NewWeakCell(current), miss);
516   }
517 
518   // Return the register containing the holder.
519   return return_holder ? reg : no_reg;
520 }
521 
522 
FrontendFooter(Handle<Name> name,Label * miss)523 void NamedLoadHandlerCompiler::FrontendFooter(Handle<Name> name, Label* miss) {
524   if (!miss->is_unused()) {
525     Label success;
526     __ Branch(&success);
527     __ bind(miss);
528     if (IC::ICUseVector(kind())) {
529       DCHECK(kind() == Code::LOAD_IC);
530       PopVectorAndSlot();
531     }
532     TailCallBuiltin(masm(), MissBuiltin(kind()));
533     __ bind(&success);
534   }
535 }
536 
537 
FrontendFooter(Handle<Name> name,Label * miss)538 void NamedStoreHandlerCompiler::FrontendFooter(Handle<Name> name, Label* miss) {
539   if (!miss->is_unused()) {
540     Label success;
541     __ Branch(&success);
542     GenerateRestoreName(miss, name);
543     if (IC::ICUseVector(kind())) PopVectorAndSlot();
544     TailCallBuiltin(masm(), MissBuiltin(kind()));
545     __ bind(&success);
546   }
547 }
548 
549 
GenerateLoadConstant(Handle<Object> value)550 void NamedLoadHandlerCompiler::GenerateLoadConstant(Handle<Object> value) {
551   // Return the constant value.
552   __ li(v0, value);
553   __ Ret();
554 }
555 
556 
GenerateLoadInterceptorWithFollowup(LookupIterator * it,Register holder_reg)557 void NamedLoadHandlerCompiler::GenerateLoadInterceptorWithFollowup(
558     LookupIterator* it, Register holder_reg) {
559   DCHECK(holder()->HasNamedInterceptor());
560   DCHECK(!holder()->GetNamedInterceptor()->getter()->IsUndefined(isolate()));
561 
562   // Compile the interceptor call, followed by inline code to load the
563   // property from further up the prototype chain if the call fails.
564   // Check that the maps haven't changed.
565   DCHECK(holder_reg.is(receiver()) || holder_reg.is(scratch1()));
566 
567   // Preserve the receiver register explicitly whenever it is different from the
568   // holder and it is needed should the interceptor return without any result.
569   // The ACCESSOR case needs the receiver to be passed into C++ code, the FIELD
570   // case might cause a miss during the prototype check.
571   bool must_perform_prototype_check =
572       !holder().is_identical_to(it->GetHolder<JSObject>());
573   bool must_preserve_receiver_reg =
574       !receiver().is(holder_reg) &&
575       (it->state() == LookupIterator::ACCESSOR || must_perform_prototype_check);
576 
577   // Save necessary data before invoking an interceptor.
578   // Requires a frame to make GC aware of pushed pointers.
579   {
580     FrameScope frame_scope(masm(), StackFrame::INTERNAL);
581     if (must_preserve_receiver_reg) {
582       __ Push(receiver(), holder_reg, this->name());
583     } else {
584       __ Push(holder_reg, this->name());
585     }
586     InterceptorVectorSlotPush(holder_reg);
587     // Invoke an interceptor.  Note: map checks from receiver to
588     // interceptor's holder has been compiled before (see a caller
589     // of this method).
590     CompileCallLoadPropertyWithInterceptor(
591         masm(), receiver(), holder_reg, this->name(), holder(),
592         Runtime::kLoadPropertyWithInterceptorOnly);
593 
594     // Check if interceptor provided a value for property.  If it's
595     // the case, return immediately.
596     Label interceptor_failed;
597     __ LoadRoot(scratch1(), Heap::kNoInterceptorResultSentinelRootIndex);
598     __ Branch(&interceptor_failed, eq, v0, Operand(scratch1()));
599     frame_scope.GenerateLeaveFrame();
600     __ Ret();
601 
602     __ bind(&interceptor_failed);
603     InterceptorVectorSlotPop(holder_reg);
604     if (must_preserve_receiver_reg) {
605       __ Pop(receiver(), holder_reg, this->name());
606     } else {
607       __ Pop(holder_reg, this->name());
608     }
609     // Leave the internal frame.
610   }
611 
612   GenerateLoadPostInterceptor(it, holder_reg);
613 }
614 
615 
GenerateLoadInterceptor(Register holder_reg)616 void NamedLoadHandlerCompiler::GenerateLoadInterceptor(Register holder_reg) {
617   // Call the runtime system to load the interceptor.
618   DCHECK(holder()->HasNamedInterceptor());
619   DCHECK(!holder()->GetNamedInterceptor()->getter()->IsUndefined(isolate()));
620   PushInterceptorArguments(masm(), receiver(), holder_reg, this->name(),
621                            holder());
622 
623   __ TailCallRuntime(Runtime::kLoadPropertyWithInterceptor);
624 }
625 
626 
CompileStoreCallback(Handle<JSObject> object,Handle<Name> name,Handle<AccessorInfo> callback,LanguageMode language_mode)627 Handle<Code> NamedStoreHandlerCompiler::CompileStoreCallback(
628     Handle<JSObject> object, Handle<Name> name, Handle<AccessorInfo> callback,
629     LanguageMode language_mode) {
630   Register holder_reg = Frontend(name);
631 
632   __ Push(receiver(), holder_reg);  // Receiver.
633   // If the callback cannot leak, then push the callback directly,
634   // otherwise wrap it in a weak cell.
635   if (callback->data()->IsUndefined(isolate()) || callback->data()->IsSmi()) {
636     __ li(at, Operand(callback));
637   } else {
638     Handle<WeakCell> cell = isolate()->factory()->NewWeakCell(callback);
639     __ li(at, Operand(cell));
640   }
641   __ push(at);
642   __ li(at, Operand(name));
643   __ Push(at, value());
644   __ Push(Smi::FromInt(language_mode));
645 
646   // Do tail-call to the runtime system.
647   __ TailCallRuntime(Runtime::kStoreCallbackProperty);
648 
649   // Return the generated code.
650   return GetCode(kind(), name);
651 }
652 
653 
value()654 Register NamedStoreHandlerCompiler::value() {
655   return StoreDescriptor::ValueRegister();
656 }
657 
658 
CompileLoadGlobal(Handle<PropertyCell> cell,Handle<Name> name,bool is_configurable)659 Handle<Code> NamedLoadHandlerCompiler::CompileLoadGlobal(
660     Handle<PropertyCell> cell, Handle<Name> name, bool is_configurable) {
661   Label miss;
662   if (IC::ICUseVector(kind())) {
663     PushVectorAndSlot();
664   }
665 
666   FrontendHeader(receiver(), name, &miss, DONT_RETURN_ANYTHING);
667 
668   // Get the value from the cell.
669   Register result = StoreDescriptor::ValueRegister();
670   Handle<WeakCell> weak_cell = factory()->NewWeakCell(cell);
671   __ LoadWeakValue(result, weak_cell, &miss);
672   __ lw(result, FieldMemOperand(result, PropertyCell::kValueOffset));
673 
674   // Check for deleted property if property can actually be deleted.
675   if (is_configurable) {
676     __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
677     __ Branch(&miss, eq, result, Operand(at));
678   }
679 
680   Counters* counters = isolate()->counters();
681   __ IncrementCounter(counters->ic_named_load_global_stub(), 1, a1, a3);
682   if (IC::ICUseVector(kind())) {
683     DiscardVectorAndSlot();
684   }
685   __ Ret(USE_DELAY_SLOT);
686   __ mov(v0, result);
687 
688   FrontendFooter(name, &miss);
689 
690   // Return the generated code.
691   return GetCode(kind(), name);
692 }
693 
694 
695 #undef __
696 }  // namespace internal
697 }  // namespace v8
698 
699 #endif  // V8_TARGET_ARCH_MIPS
700