• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #if V8_TARGET_ARCH_ARM
6 
7 #include "src/ic/call-optimization.h"
8 #include "src/ic/handler-compiler.h"
9 #include "src/ic/ic.h"
10 #include "src/isolate-inl.h"
11 
12 namespace v8 {
13 namespace internal {
14 
15 #define __ ACCESS_MASM(masm)
16 
17 
GenerateLoadViaGetter(MacroAssembler * masm,Handle<Map> map,Register receiver,Register holder,int accessor_index,int expected_arguments,Register scratch)18 void NamedLoadHandlerCompiler::GenerateLoadViaGetter(
19     MacroAssembler* masm, Handle<Map> map, Register receiver, Register holder,
20     int accessor_index, int expected_arguments, Register scratch) {
21   // ----------- S t a t e -------------
22   //  -- r0    : receiver
23   //  -- r2    : name
24   //  -- lr    : return address
25   // -----------------------------------
26   {
27     FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
28 
29     if (accessor_index >= 0) {
30       DCHECK(!holder.is(scratch));
31       DCHECK(!receiver.is(scratch));
32       // Call the JavaScript getter with the receiver on the stack.
33       if (map->IsJSGlobalObjectMap()) {
34         // Swap in the global receiver.
35         __ ldr(scratch,
36                FieldMemOperand(receiver, JSGlobalObject::kGlobalProxyOffset));
37         receiver = scratch;
38       }
39       __ push(receiver);
40       ParameterCount actual(0);
41       ParameterCount expected(expected_arguments);
42       __ LoadAccessor(r1, holder, accessor_index, ACCESSOR_GETTER);
43       __ InvokeFunction(r1, expected, actual, CALL_FUNCTION,
44                         CheckDebugStepCallWrapper());
45     } else {
46       // If we generate a global code snippet for deoptimization only, remember
47       // the place to continue after deoptimization.
48       masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset());
49     }
50 
51     // Restore context register.
52     __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
53   }
54   __ Ret();
55 }
56 
57 
GenerateStoreViaSetter(MacroAssembler * masm,Handle<Map> map,Register receiver,Register holder,int accessor_index,int expected_arguments,Register scratch)58 void NamedStoreHandlerCompiler::GenerateStoreViaSetter(
59     MacroAssembler* masm, Handle<Map> map, Register receiver, Register holder,
60     int accessor_index, int expected_arguments, Register scratch) {
61   // ----------- S t a t e -------------
62   //  -- lr    : return address
63   // -----------------------------------
64   {
65     FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
66 
67     // Save value register, so we can restore it later.
68     __ push(value());
69 
70     if (accessor_index >= 0) {
71       DCHECK(!holder.is(scratch));
72       DCHECK(!receiver.is(scratch));
73       DCHECK(!value().is(scratch));
74       // Call the JavaScript setter with receiver and value on the stack.
75       if (map->IsJSGlobalObjectMap()) {
76         // Swap in the global receiver.
77         __ ldr(scratch,
78                FieldMemOperand(receiver, JSGlobalObject::kGlobalProxyOffset));
79         receiver = scratch;
80       }
81       __ Push(receiver, value());
82       ParameterCount actual(1);
83       ParameterCount expected(expected_arguments);
84       __ LoadAccessor(r1, holder, accessor_index, ACCESSOR_SETTER);
85       __ InvokeFunction(r1, expected, actual, CALL_FUNCTION,
86                         CheckDebugStepCallWrapper());
87     } else {
88       // If we generate a global code snippet for deoptimization only, remember
89       // the place to continue after deoptimization.
90       masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset());
91     }
92 
93     // We have to return the passed value, not the return value of the setter.
94     __ pop(r0);
95 
96     // Restore context register.
97     __ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
98   }
99   __ Ret();
100 }
101 
102 
PushVectorAndSlot(Register vector,Register slot)103 void PropertyHandlerCompiler::PushVectorAndSlot(Register vector,
104                                                 Register slot) {
105   MacroAssembler* masm = this->masm();
106   __ push(vector);
107   __ push(slot);
108 }
109 
110 
PopVectorAndSlot(Register vector,Register slot)111 void PropertyHandlerCompiler::PopVectorAndSlot(Register vector, Register slot) {
112   MacroAssembler* masm = this->masm();
113   __ pop(slot);
114   __ pop(vector);
115 }
116 
117 
DiscardVectorAndSlot()118 void PropertyHandlerCompiler::DiscardVectorAndSlot() {
119   MacroAssembler* masm = this->masm();
120   // Remove vector and slot.
121   __ add(sp, sp, Operand(2 * kPointerSize));
122 }
123 
124 
GenerateDictionaryNegativeLookup(MacroAssembler * masm,Label * miss_label,Register receiver,Handle<Name> name,Register scratch0,Register scratch1)125 void PropertyHandlerCompiler::GenerateDictionaryNegativeLookup(
126     MacroAssembler* masm, Label* miss_label, Register receiver,
127     Handle<Name> name, Register scratch0, Register scratch1) {
128   DCHECK(name->IsUniqueName());
129   DCHECK(!receiver.is(scratch0));
130   Counters* counters = masm->isolate()->counters();
131   __ IncrementCounter(counters->negative_lookups(), 1, scratch0, scratch1);
132   __ IncrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
133 
134   Label done;
135 
136   const int kInterceptorOrAccessCheckNeededMask =
137       (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded);
138 
139   // Bail out if the receiver has a named interceptor or requires access checks.
140   Register map = scratch1;
141   __ ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
142   __ ldrb(scratch0, FieldMemOperand(map, Map::kBitFieldOffset));
143   __ tst(scratch0, Operand(kInterceptorOrAccessCheckNeededMask));
144   __ b(ne, miss_label);
145 
146   // Check that receiver is a JSObject.
147   __ ldrb(scratch0, FieldMemOperand(map, Map::kInstanceTypeOffset));
148   __ cmp(scratch0, Operand(FIRST_JS_RECEIVER_TYPE));
149   __ b(lt, miss_label);
150 
151   // Load properties array.
152   Register properties = scratch0;
153   __ ldr(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
154   // Check that the properties array is a dictionary.
155   __ ldr(map, FieldMemOperand(properties, HeapObject::kMapOffset));
156   Register tmp = properties;
157   __ LoadRoot(tmp, Heap::kHashTableMapRootIndex);
158   __ cmp(map, tmp);
159   __ b(ne, miss_label);
160 
161   // Restore the temporarily used register.
162   __ ldr(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
163 
164 
165   NameDictionaryLookupStub::GenerateNegativeLookup(
166       masm, miss_label, &done, receiver, properties, name, scratch1);
167   __ bind(&done);
168   __ DecrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
169 }
170 
171 
GenerateDirectLoadGlobalFunctionPrototype(MacroAssembler * masm,int index,Register result,Label * miss)172 void NamedLoadHandlerCompiler::GenerateDirectLoadGlobalFunctionPrototype(
173     MacroAssembler* masm, int index, Register result, Label* miss) {
174   __ LoadNativeContextSlot(index, result);
175   // Load its initial map. The global functions all have initial maps.
176   __ ldr(result,
177          FieldMemOperand(result, JSFunction::kPrototypeOrInitialMapOffset));
178   // Load the prototype from the initial map.
179   __ ldr(result, FieldMemOperand(result, Map::kPrototypeOffset));
180 }
181 
182 
GenerateLoadFunctionPrototype(MacroAssembler * masm,Register receiver,Register scratch1,Register scratch2,Label * miss_label)183 void NamedLoadHandlerCompiler::GenerateLoadFunctionPrototype(
184     MacroAssembler* masm, Register receiver, Register scratch1,
185     Register scratch2, Label* miss_label) {
186   __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label);
187   __ mov(r0, scratch1);
188   __ Ret();
189 }
190 
191 
192 // Generate code to check that a global property cell is empty. Create
193 // the property cell at compilation time if no cell exists for the
194 // property.
GenerateCheckPropertyCell(MacroAssembler * masm,Handle<JSGlobalObject> global,Handle<Name> name,Register scratch,Label * miss)195 void PropertyHandlerCompiler::GenerateCheckPropertyCell(
196     MacroAssembler* masm, Handle<JSGlobalObject> global, Handle<Name> name,
197     Register scratch, Label* miss) {
198   Handle<PropertyCell> cell = JSGlobalObject::EnsurePropertyCell(global, name);
199   DCHECK(cell->value()->IsTheHole());
200   Handle<WeakCell> weak_cell = masm->isolate()->factory()->NewWeakCell(cell);
201   __ LoadWeakValue(scratch, weak_cell, miss);
202   __ ldr(scratch, FieldMemOperand(scratch, PropertyCell::kValueOffset));
203   __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
204   __ cmp(scratch, ip);
205   __ b(ne, miss);
206 }
207 
208 
PushInterceptorArguments(MacroAssembler * masm,Register receiver,Register holder,Register name,Handle<JSObject> holder_obj)209 static void PushInterceptorArguments(MacroAssembler* masm, Register receiver,
210                                      Register holder, Register name,
211                                      Handle<JSObject> holder_obj) {
212   STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsNameIndex == 0);
213   STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsThisIndex == 1);
214   STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsHolderIndex == 2);
215   STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsLength == 3);
216   __ push(name);
217   __ push(receiver);
218   __ push(holder);
219 }
220 
221 
CompileCallLoadPropertyWithInterceptor(MacroAssembler * masm,Register receiver,Register holder,Register name,Handle<JSObject> holder_obj,Runtime::FunctionId id)222 static void CompileCallLoadPropertyWithInterceptor(
223     MacroAssembler* masm, Register receiver, Register holder, Register name,
224     Handle<JSObject> holder_obj, Runtime::FunctionId id) {
225   DCHECK(NamedLoadHandlerCompiler::kInterceptorArgsLength ==
226          Runtime::FunctionForId(id)->nargs);
227   PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
228   __ CallRuntime(id);
229 }
230 
231 
232 // Generate call to api function.
GenerateApiAccessorCall(MacroAssembler * masm,const CallOptimization & optimization,Handle<Map> receiver_map,Register receiver,Register scratch_in,bool is_store,Register store_parameter,Register accessor_holder,int accessor_index)233 void PropertyHandlerCompiler::GenerateApiAccessorCall(
234     MacroAssembler* masm, const CallOptimization& optimization,
235     Handle<Map> receiver_map, Register receiver, Register scratch_in,
236     bool is_store, Register store_parameter, Register accessor_holder,
237     int accessor_index) {
238   DCHECK(!accessor_holder.is(scratch_in));
239   DCHECK(!receiver.is(scratch_in));
240   __ push(receiver);
241   // Write the arguments to stack frame.
242   if (is_store) {
243     DCHECK(!receiver.is(store_parameter));
244     DCHECK(!scratch_in.is(store_parameter));
245     __ push(store_parameter);
246   }
247   DCHECK(optimization.is_simple_api_call());
248 
249   // Abi for CallApiFunctionStub.
250   Register callee = r0;
251   Register data = r4;
252   Register holder = r2;
253   Register api_function_address = r1;
254 
255   // Put callee in place.
256   __ LoadAccessor(callee, accessor_holder, accessor_index,
257                   is_store ? ACCESSOR_SETTER : ACCESSOR_GETTER);
258 
259   // Put holder in place.
260   CallOptimization::HolderLookup holder_lookup;
261   int holder_depth = 0;
262   optimization.LookupHolderOfExpectedType(receiver_map, &holder_lookup,
263                                           &holder_depth);
264   switch (holder_lookup) {
265     case CallOptimization::kHolderIsReceiver:
266       __ Move(holder, receiver);
267       break;
268     case CallOptimization::kHolderFound:
269       __ ldr(holder, FieldMemOperand(receiver, HeapObject::kMapOffset));
270       __ ldr(holder, FieldMemOperand(holder, Map::kPrototypeOffset));
271       for (int i = 1; i < holder_depth; i++) {
272         __ ldr(holder, FieldMemOperand(holder, HeapObject::kMapOffset));
273         __ ldr(holder, FieldMemOperand(holder, Map::kPrototypeOffset));
274       }
275       break;
276     case CallOptimization::kHolderNotFound:
277       UNREACHABLE();
278       break;
279   }
280 
281   Isolate* isolate = masm->isolate();
282   Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
283   bool call_data_undefined = false;
284   // Put call data in place.
285   if (api_call_info->data()->IsUndefined()) {
286     call_data_undefined = true;
287     __ LoadRoot(data, Heap::kUndefinedValueRootIndex);
288   } else {
289     __ ldr(data,
290            FieldMemOperand(callee, JSFunction::kSharedFunctionInfoOffset));
291     __ ldr(data,
292            FieldMemOperand(data, SharedFunctionInfo::kFunctionDataOffset));
293     __ ldr(data, FieldMemOperand(data, FunctionTemplateInfo::kCallCodeOffset));
294     __ ldr(data, FieldMemOperand(data, CallHandlerInfo::kDataOffset));
295   }
296 
297   if (api_call_info->fast_handler()->IsCode()) {
298     // Just tail call into the fast handler if present.
299     __ Jump(handle(Code::cast(api_call_info->fast_handler())),
300             RelocInfo::CODE_TARGET);
301     return;
302   }
303 
304   // Put api_function_address in place.
305   Address function_address = v8::ToCData<Address>(api_call_info->callback());
306   ApiFunction fun(function_address);
307   ExternalReference::Type type = ExternalReference::DIRECT_API_CALL;
308   ExternalReference ref = ExternalReference(&fun, type, masm->isolate());
309   __ mov(api_function_address, Operand(ref));
310 
311   // Jump to stub.
312   CallApiAccessorStub stub(isolate, is_store, call_data_undefined);
313   __ TailCallStub(&stub);
314 }
315 
316 
StoreIC_PushArgs(MacroAssembler * masm)317 static void StoreIC_PushArgs(MacroAssembler* masm) {
318   __ Push(StoreDescriptor::ReceiverRegister(), StoreDescriptor::NameRegister(),
319           StoreDescriptor::ValueRegister(),
320           VectorStoreICDescriptor::SlotRegister(),
321           VectorStoreICDescriptor::VectorRegister());
322 }
323 
324 
GenerateSlow(MacroAssembler * masm)325 void NamedStoreHandlerCompiler::GenerateSlow(MacroAssembler* masm) {
326   StoreIC_PushArgs(masm);
327 
328   // The slow case calls into the runtime to complete the store without causing
329   // an IC miss that would otherwise cause a transition to the generic stub.
330   __ TailCallRuntime(Runtime::kStoreIC_Slow);
331 }
332 
333 
GenerateStoreSlow(MacroAssembler * masm)334 void ElementHandlerCompiler::GenerateStoreSlow(MacroAssembler* masm) {
335   StoreIC_PushArgs(masm);
336 
337   // The slow case calls into the runtime to complete the store without causing
338   // an IC miss that would otherwise cause a transition to the generic stub.
339   __ TailCallRuntime(Runtime::kKeyedStoreIC_Slow);
340 }
341 
342 
343 #undef __
344 #define __ ACCESS_MASM(masm())
345 
346 
GenerateRestoreName(Label * label,Handle<Name> name)347 void NamedStoreHandlerCompiler::GenerateRestoreName(Label* label,
348                                                     Handle<Name> name) {
349   if (!label->is_unused()) {
350     __ bind(label);
351     __ mov(this->name(), Operand(name));
352   }
353 }
354 
355 
GenerateRestoreName(Handle<Name> name)356 void NamedStoreHandlerCompiler::GenerateRestoreName(Handle<Name> name) {
357   __ mov(this->name(), Operand(name));
358 }
359 
360 
RearrangeVectorAndSlot(Register current_map,Register destination_map)361 void NamedStoreHandlerCompiler::RearrangeVectorAndSlot(
362     Register current_map, Register destination_map) {
363   DCHECK(false);  // Not implemented.
364 }
365 
366 
GenerateRestoreMap(Handle<Map> transition,Register map_reg,Register scratch,Label * miss)367 void NamedStoreHandlerCompiler::GenerateRestoreMap(Handle<Map> transition,
368                                                    Register map_reg,
369                                                    Register scratch,
370                                                    Label* miss) {
371   Handle<WeakCell> cell = Map::WeakCellForMap(transition);
372   DCHECK(!map_reg.is(scratch));
373   __ LoadWeakValue(map_reg, cell, miss);
374   if (transition->CanBeDeprecated()) {
375     __ ldr(scratch, FieldMemOperand(map_reg, Map::kBitField3Offset));
376     __ tst(scratch, Operand(Map::Deprecated::kMask));
377     __ b(ne, miss);
378   }
379 }
380 
381 
GenerateConstantCheck(Register map_reg,int descriptor,Register value_reg,Register scratch,Label * miss_label)382 void NamedStoreHandlerCompiler::GenerateConstantCheck(Register map_reg,
383                                                       int descriptor,
384                                                       Register value_reg,
385                                                       Register scratch,
386                                                       Label* miss_label) {
387   DCHECK(!map_reg.is(scratch));
388   DCHECK(!map_reg.is(value_reg));
389   DCHECK(!value_reg.is(scratch));
390   __ LoadInstanceDescriptors(map_reg, scratch);
391   __ ldr(scratch,
392          FieldMemOperand(scratch, DescriptorArray::GetValueOffset(descriptor)));
393   __ cmp(value_reg, scratch);
394   __ b(ne, miss_label);
395 }
396 
397 
GenerateFieldTypeChecks(HeapType * field_type,Register value_reg,Label * miss_label)398 void NamedStoreHandlerCompiler::GenerateFieldTypeChecks(HeapType* field_type,
399                                                         Register value_reg,
400                                                         Label* miss_label) {
401   Register map_reg = scratch1();
402   Register scratch = scratch2();
403   DCHECK(!value_reg.is(map_reg));
404   DCHECK(!value_reg.is(scratch));
405   __ JumpIfSmi(value_reg, miss_label);
406   HeapType::Iterator<Map> it = field_type->Classes();
407   if (!it.Done()) {
408     __ ldr(map_reg, FieldMemOperand(value_reg, HeapObject::kMapOffset));
409     Label do_store;
410     while (true) {
411       __ CmpWeakValue(map_reg, Map::WeakCellForMap(it.Current()), scratch);
412       it.Advance();
413       if (it.Done()) {
414         __ b(ne, miss_label);
415         break;
416       }
417       __ b(eq, &do_store);
418     }
419     __ bind(&do_store);
420   }
421 }
422 
423 
CheckPrototypes(Register object_reg,Register holder_reg,Register scratch1,Register scratch2,Handle<Name> name,Label * miss,PrototypeCheckType check,ReturnHolder return_what)424 Register PropertyHandlerCompiler::CheckPrototypes(
425     Register object_reg, Register holder_reg, Register scratch1,
426     Register scratch2, Handle<Name> name, Label* miss, PrototypeCheckType check,
427     ReturnHolder return_what) {
428   Handle<Map> receiver_map = map();
429 
430   // Make sure there's no overlap between holder and object registers.
431   DCHECK(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
432   DCHECK(!scratch2.is(object_reg) && !scratch2.is(holder_reg) &&
433          !scratch2.is(scratch1));
434 
435   if (FLAG_eliminate_prototype_chain_checks) {
436     Handle<Cell> validity_cell =
437         Map::GetOrCreatePrototypeChainValidityCell(receiver_map, isolate());
438     if (!validity_cell.is_null()) {
439       DCHECK_EQ(Smi::FromInt(Map::kPrototypeChainValid),
440                 validity_cell->value());
441       __ mov(scratch1, Operand(validity_cell));
442       __ ldr(scratch1, FieldMemOperand(scratch1, Cell::kValueOffset));
443       __ cmp(scratch1, Operand(Smi::FromInt(Map::kPrototypeChainValid)));
444       __ b(ne, miss);
445     }
446 
447     // The prototype chain of primitives (and their JSValue wrappers) depends
448     // on the native context, which can't be guarded by validity cells.
449     // |object_reg| holds the native context specific prototype in this case;
450     // we need to check its map.
451     if (check == CHECK_ALL_MAPS) {
452       __ ldr(scratch1, FieldMemOperand(object_reg, HeapObject::kMapOffset));
453       Handle<WeakCell> cell = Map::WeakCellForMap(receiver_map);
454       __ CmpWeakValue(scratch1, cell, scratch2);
455       __ b(ne, miss);
456     }
457   }
458 
459   // Keep track of the current object in register reg.
460   Register reg = object_reg;
461   int depth = 0;
462 
463   Handle<JSObject> current = Handle<JSObject>::null();
464   if (receiver_map->IsJSGlobalObjectMap()) {
465     current = isolate()->global_object();
466   }
467 
468   // Check access rights to the global object.  This has to happen after
469   // the map check so that we know that the object is actually a global
470   // object.
471   // This allows us to install generated handlers for accesses to the
472   // global proxy (as opposed to using slow ICs). See corresponding code
473   // in LookupForRead().
474   if (receiver_map->IsJSGlobalProxyMap()) {
475     __ CheckAccessGlobalProxy(reg, scratch2, miss);
476   }
477 
478   Handle<JSObject> prototype = Handle<JSObject>::null();
479   Handle<Map> current_map = receiver_map;
480   Handle<Map> holder_map(holder()->map());
481   // Traverse the prototype chain and check the maps in the prototype chain for
482   // fast and global objects or do negative lookup for normal objects.
483   while (!current_map.is_identical_to(holder_map)) {
484     ++depth;
485 
486     // Only global objects and objects that do not require access
487     // checks are allowed in stubs.
488     DCHECK(current_map->IsJSGlobalProxyMap() ||
489            !current_map->is_access_check_needed());
490 
491     prototype = handle(JSObject::cast(current_map->prototype()));
492     if (current_map->is_dictionary_map() &&
493         !current_map->IsJSGlobalObjectMap()) {
494       DCHECK(!current_map->IsJSGlobalProxyMap());  // Proxy maps are fast.
495       if (!name->IsUniqueName()) {
496         DCHECK(name->IsString());
497         name = factory()->InternalizeString(Handle<String>::cast(name));
498       }
499       DCHECK(current.is_null() ||
500              current->property_dictionary()->FindEntry(name) ==
501                  NameDictionary::kNotFound);
502 
503       if (FLAG_eliminate_prototype_chain_checks && depth > 1) {
504         // TODO(jkummerow): Cache and re-use weak cell.
505         __ LoadWeakValue(reg, isolate()->factory()->NewWeakCell(current), miss);
506       }
507       GenerateDictionaryNegativeLookup(masm(), miss, reg, name, scratch1,
508                                        scratch2);
509       if (!FLAG_eliminate_prototype_chain_checks) {
510         __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
511         __ ldr(holder_reg, FieldMemOperand(scratch1, Map::kPrototypeOffset));
512       }
513     } else {
514       Register map_reg = scratch1;
515       if (!FLAG_eliminate_prototype_chain_checks) {
516         __ ldr(map_reg, FieldMemOperand(reg, HeapObject::kMapOffset));
517       }
518       if (current_map->IsJSGlobalObjectMap()) {
519         GenerateCheckPropertyCell(masm(), Handle<JSGlobalObject>::cast(current),
520                                   name, scratch2, miss);
521       } else if (!FLAG_eliminate_prototype_chain_checks &&
522                  (depth != 1 || check == CHECK_ALL_MAPS)) {
523         Handle<WeakCell> cell = Map::WeakCellForMap(current_map);
524         __ CmpWeakValue(map_reg, cell, scratch2);
525         __ b(ne, miss);
526       }
527       if (!FLAG_eliminate_prototype_chain_checks) {
528         __ ldr(holder_reg, FieldMemOperand(map_reg, Map::kPrototypeOffset));
529       }
530     }
531 
532     reg = holder_reg;  // From now on the object will be in holder_reg.
533     // Go to the next object in the prototype chain.
534     current = prototype;
535     current_map = handle(current->map());
536   }
537 
538   DCHECK(!current_map->IsJSGlobalProxyMap());
539 
540   // Log the check depth.
541   LOG(isolate(), IntEvent("check-maps-depth", depth + 1));
542 
543   if (!FLAG_eliminate_prototype_chain_checks &&
544       (depth != 0 || check == CHECK_ALL_MAPS)) {
545     // Check the holder map.
546     __ ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
547     Handle<WeakCell> cell = Map::WeakCellForMap(current_map);
548     __ CmpWeakValue(scratch1, cell, scratch2);
549     __ b(ne, miss);
550   }
551 
552   bool return_holder = return_what == RETURN_HOLDER;
553   if (FLAG_eliminate_prototype_chain_checks && return_holder && depth != 0) {
554     __ LoadWeakValue(reg, isolate()->factory()->NewWeakCell(current), miss);
555   }
556 
557   // Return the register containing the holder.
558   return return_holder ? reg : no_reg;
559 }
560 
561 
FrontendFooter(Handle<Name> name,Label * miss)562 void NamedLoadHandlerCompiler::FrontendFooter(Handle<Name> name, Label* miss) {
563   if (!miss->is_unused()) {
564     Label success;
565     __ b(&success);
566     __ bind(miss);
567     if (IC::ICUseVector(kind())) {
568       DCHECK(kind() == Code::LOAD_IC);
569       PopVectorAndSlot();
570     }
571     TailCallBuiltin(masm(), MissBuiltin(kind()));
572     __ bind(&success);
573   }
574 }
575 
576 
FrontendFooter(Handle<Name> name,Label * miss)577 void NamedStoreHandlerCompiler::FrontendFooter(Handle<Name> name, Label* miss) {
578   if (!miss->is_unused()) {
579     Label success;
580     __ b(&success);
581     GenerateRestoreName(miss, name);
582     if (IC::ICUseVector(kind())) PopVectorAndSlot();
583     TailCallBuiltin(masm(), MissBuiltin(kind()));
584     __ bind(&success);
585   }
586 }
587 
588 
GenerateLoadConstant(Handle<Object> value)589 void NamedLoadHandlerCompiler::GenerateLoadConstant(Handle<Object> value) {
590   // Return the constant value.
591   __ Move(r0, value);
592   __ Ret();
593 }
594 
595 
GenerateLoadCallback(Register reg,Handle<ExecutableAccessorInfo> callback)596 void NamedLoadHandlerCompiler::GenerateLoadCallback(
597     Register reg, Handle<ExecutableAccessorInfo> callback) {
598   // Build AccessorInfo::args_ list on the stack and push property name below
599   // the exit frame to make GC aware of them and store pointers to them.
600   STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 0);
601   STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 1);
602   STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 2);
603   STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 3);
604   STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 4);
605   STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 5);
606   STATIC_ASSERT(PropertyCallbackArguments::kArgsLength == 6);
607   DCHECK(!scratch2().is(reg));
608   DCHECK(!scratch3().is(reg));
609   DCHECK(!scratch4().is(reg));
610   __ push(receiver());
611   // Push data from ExecutableAccessorInfo.
612   Handle<Object> data(callback->data(), isolate());
613   if (data->IsUndefined() || data->IsSmi()) {
614     __ Move(scratch3(), data);
615   } else {
616     Handle<WeakCell> cell =
617         isolate()->factory()->NewWeakCell(Handle<HeapObject>::cast(data));
618     // The callback is alive if this instruction is executed,
619     // so the weak cell is not cleared and points to data.
620     __ GetWeakValue(scratch3(), cell);
621   }
622   __ push(scratch3());
623   __ LoadRoot(scratch3(), Heap::kUndefinedValueRootIndex);
624   __ mov(scratch4(), scratch3());
625   __ Push(scratch3(), scratch4());
626   __ mov(scratch4(), Operand(ExternalReference::isolate_address(isolate())));
627   __ Push(scratch4(), reg);
628   __ mov(scratch2(), sp);  // scratch2 = PropertyAccessorInfo::args_
629   __ push(name());
630 
631   // Abi for CallApiGetter
632   Register getter_address_reg = ApiGetterDescriptor::function_address();
633 
634   Address getter_address = v8::ToCData<Address>(callback->getter());
635   ApiFunction fun(getter_address);
636   ExternalReference::Type type = ExternalReference::DIRECT_GETTER_CALL;
637   ExternalReference ref = ExternalReference(&fun, type, isolate());
638   __ mov(getter_address_reg, Operand(ref));
639 
640   CallApiGetterStub stub(isolate());
641   __ TailCallStub(&stub);
642 }
643 
644 
GenerateLoadInterceptorWithFollowup(LookupIterator * it,Register holder_reg)645 void NamedLoadHandlerCompiler::GenerateLoadInterceptorWithFollowup(
646     LookupIterator* it, Register holder_reg) {
647   DCHECK(holder()->HasNamedInterceptor());
648   DCHECK(!holder()->GetNamedInterceptor()->getter()->IsUndefined());
649 
650   // Compile the interceptor call, followed by inline code to load the
651   // property from further up the prototype chain if the call fails.
652   // Check that the maps haven't changed.
653   DCHECK(holder_reg.is(receiver()) || holder_reg.is(scratch1()));
654 
655   // Preserve the receiver register explicitly whenever it is different from the
656   // holder and it is needed should the interceptor return without any result.
657   // The ACCESSOR case needs the receiver to be passed into C++ code, the FIELD
658   // case might cause a miss during the prototype check.
659   bool must_perform_prototype_check =
660       !holder().is_identical_to(it->GetHolder<JSObject>());
661   bool must_preserve_receiver_reg =
662       !receiver().is(holder_reg) &&
663       (it->state() == LookupIterator::ACCESSOR || must_perform_prototype_check);
664 
665   // Save necessary data before invoking an interceptor.
666   // Requires a frame to make GC aware of pushed pointers.
667   {
668     FrameAndConstantPoolScope frame_scope(masm(), StackFrame::INTERNAL);
669     if (must_preserve_receiver_reg) {
670       __ Push(receiver(), holder_reg, this->name());
671     } else {
672       __ Push(holder_reg, this->name());
673     }
674     InterceptorVectorSlotPush(holder_reg);
675     // Invoke an interceptor.  Note: map checks from receiver to
676     // interceptor's holder has been compiled before (see a caller
677     // of this method.)
678     CompileCallLoadPropertyWithInterceptor(
679         masm(), receiver(), holder_reg, this->name(), holder(),
680         Runtime::kLoadPropertyWithInterceptorOnly);
681 
682     // Check if interceptor provided a value for property.  If it's
683     // the case, return immediately.
684     Label interceptor_failed;
685     __ LoadRoot(scratch1(), Heap::kNoInterceptorResultSentinelRootIndex);
686     __ cmp(r0, scratch1());
687     __ b(eq, &interceptor_failed);
688     frame_scope.GenerateLeaveFrame();
689     __ Ret();
690 
691     __ bind(&interceptor_failed);
692     InterceptorVectorSlotPop(holder_reg);
693     __ pop(this->name());
694     __ pop(holder_reg);
695     if (must_preserve_receiver_reg) {
696       __ pop(receiver());
697     }
698     // Leave the internal frame.
699   }
700 
701   GenerateLoadPostInterceptor(it, holder_reg);
702 }
703 
704 
GenerateLoadInterceptor(Register holder_reg)705 void NamedLoadHandlerCompiler::GenerateLoadInterceptor(Register holder_reg) {
706   // Call the runtime system to load the interceptor.
707   DCHECK(holder()->HasNamedInterceptor());
708   DCHECK(!holder()->GetNamedInterceptor()->getter()->IsUndefined());
709   PushInterceptorArguments(masm(), receiver(), holder_reg, this->name(),
710                            holder());
711 
712   __ TailCallRuntime(Runtime::kLoadPropertyWithInterceptor);
713 }
714 
715 
CompileStoreCallback(Handle<JSObject> object,Handle<Name> name,Handle<ExecutableAccessorInfo> callback)716 Handle<Code> NamedStoreHandlerCompiler::CompileStoreCallback(
717     Handle<JSObject> object, Handle<Name> name,
718     Handle<ExecutableAccessorInfo> callback) {
719   Register holder_reg = Frontend(name);
720 
721   __ push(receiver());  // receiver
722   __ push(holder_reg);
723 
724   // If the callback cannot leak, then push the callback directly,
725   // otherwise wrap it in a weak cell.
726   if (callback->data()->IsUndefined() || callback->data()->IsSmi()) {
727     __ mov(ip, Operand(callback));
728   } else {
729     Handle<WeakCell> cell = isolate()->factory()->NewWeakCell(callback);
730     __ mov(ip, Operand(cell));
731   }
732   __ push(ip);
733   __ mov(ip, Operand(name));
734   __ Push(ip, value());
735 
736   // Do tail-call to the runtime system.
737   __ TailCallRuntime(Runtime::kStoreCallbackProperty);
738 
739   // Return the generated code.
740   return GetCode(kind(), Code::FAST, name);
741 }
742 
743 
CompileStoreInterceptor(Handle<Name> name)744 Handle<Code> NamedStoreHandlerCompiler::CompileStoreInterceptor(
745     Handle<Name> name) {
746   __ Push(receiver(), this->name(), value());
747 
748   // Do tail-call to the runtime system.
749   __ TailCallRuntime(Runtime::kStorePropertyWithInterceptor);
750 
751   // Return the generated code.
752   return GetCode(kind(), Code::FAST, name);
753 }
754 
755 
value()756 Register NamedStoreHandlerCompiler::value() {
757   return StoreDescriptor::ValueRegister();
758 }
759 
760 
CompileLoadGlobal(Handle<PropertyCell> cell,Handle<Name> name,bool is_configurable)761 Handle<Code> NamedLoadHandlerCompiler::CompileLoadGlobal(
762     Handle<PropertyCell> cell, Handle<Name> name, bool is_configurable) {
763   Label miss;
764   if (IC::ICUseVector(kind())) {
765     PushVectorAndSlot();
766   }
767   FrontendHeader(receiver(), name, &miss, DONT_RETURN_ANYTHING);
768 
769   // Get the value from the cell.
770   Register result = StoreDescriptor::ValueRegister();
771   Handle<WeakCell> weak_cell = factory()->NewWeakCell(cell);
772   __ LoadWeakValue(result, weak_cell, &miss);
773   __ ldr(result, FieldMemOperand(result, PropertyCell::kValueOffset));
774 
775   // Check for deleted property if property can actually be deleted.
776   if (is_configurable) {
777     __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
778     __ cmp(result, ip);
779     __ b(eq, &miss);
780   }
781 
782   Counters* counters = isolate()->counters();
783   __ IncrementCounter(counters->named_load_global_stub(), 1, r1, r3);
784   if (IC::ICUseVector(kind())) {
785     DiscardVectorAndSlot();
786   }
787   __ Ret();
788 
789   FrontendFooter(name, &miss);
790 
791   // Return the generated code.
792   return GetCode(kind(), Code::NORMAL, name);
793 }
794 
795 
796 #undef __
797 }  // namespace internal
798 }  // namespace v8
799 
800 #endif  // V8_TARGET_ARCH_ARM
801