1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #if V8_TARGET_ARCH_ARM64
6
7 #include "src/ic/call-optimization.h"
8 #include "src/ic/handler-compiler.h"
9 #include "src/ic/ic.h"
10 #include "src/isolate-inl.h"
11
12 namespace v8 {
13 namespace internal {
14
15 #define __ ACCESS_MASM(masm)
16
PushVectorAndSlot(Register vector,Register slot)17 void PropertyHandlerCompiler::PushVectorAndSlot(Register vector,
18 Register slot) {
19 MacroAssembler* masm = this->masm();
20 __ Push(vector);
21 __ Push(slot);
22 }
23
24
PopVectorAndSlot(Register vector,Register slot)25 void PropertyHandlerCompiler::PopVectorAndSlot(Register vector, Register slot) {
26 MacroAssembler* masm = this->masm();
27 __ Pop(slot);
28 __ Pop(vector);
29 }
30
31
DiscardVectorAndSlot()32 void PropertyHandlerCompiler::DiscardVectorAndSlot() {
33 MacroAssembler* masm = this->masm();
34 // Remove vector and slot.
35 __ Drop(2);
36 }
37
38
GenerateDictionaryNegativeLookup(MacroAssembler * masm,Label * miss_label,Register receiver,Handle<Name> name,Register scratch0,Register scratch1)39 void PropertyHandlerCompiler::GenerateDictionaryNegativeLookup(
40 MacroAssembler* masm, Label* miss_label, Register receiver,
41 Handle<Name> name, Register scratch0, Register scratch1) {
42 DCHECK(!AreAliased(receiver, scratch0, scratch1));
43 DCHECK(name->IsUniqueName());
44 Counters* counters = masm->isolate()->counters();
45 __ IncrementCounter(counters->negative_lookups(), 1, scratch0, scratch1);
46 __ IncrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
47
48 Label done;
49
50 const int kInterceptorOrAccessCheckNeededMask =
51 (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded);
52
53 // Bail out if the receiver has a named interceptor or requires access checks.
54 Register map = scratch1;
55 __ Ldr(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
56 __ Ldrb(scratch0, FieldMemOperand(map, Map::kBitFieldOffset));
57 __ Tst(scratch0, kInterceptorOrAccessCheckNeededMask);
58 __ B(ne, miss_label);
59
60 // Check that receiver is a JSObject.
61 __ Ldrb(scratch0, FieldMemOperand(map, Map::kInstanceTypeOffset));
62 __ Cmp(scratch0, FIRST_JS_RECEIVER_TYPE);
63 __ B(lt, miss_label);
64
65 // Load properties array.
66 Register properties = scratch0;
67 __ Ldr(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
68 // Check that the properties array is a dictionary.
69 __ Ldr(map, FieldMemOperand(properties, HeapObject::kMapOffset));
70 __ JumpIfNotRoot(map, Heap::kHashTableMapRootIndex, miss_label);
71
72 NameDictionaryLookupStub::GenerateNegativeLookup(
73 masm, miss_label, &done, receiver, properties, name, scratch1);
74 __ Bind(&done);
75 __ DecrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
76 }
77
78
GenerateDirectLoadGlobalFunctionPrototype(MacroAssembler * masm,int index,Register result,Label * miss)79 void NamedLoadHandlerCompiler::GenerateDirectLoadGlobalFunctionPrototype(
80 MacroAssembler* masm, int index, Register result, Label* miss) {
81 __ LoadNativeContextSlot(index, result);
82 // Load its initial map. The global functions all have initial maps.
83 __ Ldr(result,
84 FieldMemOperand(result, JSFunction::kPrototypeOrInitialMapOffset));
85 // Load the prototype from the initial map.
86 __ Ldr(result, FieldMemOperand(result, Map::kPrototypeOffset));
87 }
88
89
GenerateLoadFunctionPrototype(MacroAssembler * masm,Register receiver,Register scratch1,Register scratch2,Label * miss_label)90 void NamedLoadHandlerCompiler::GenerateLoadFunctionPrototype(
91 MacroAssembler* masm, Register receiver, Register scratch1,
92 Register scratch2, Label* miss_label) {
93 __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label);
94 // TryGetFunctionPrototype can't put the result directly in x0 because the
95 // 3 inputs registers can't alias and we call this function from
96 // LoadIC::GenerateFunctionPrototype, where receiver is x0. So we explicitly
97 // move the result in x0.
98 __ Mov(x0, scratch1);
99 __ Ret();
100 }
101
102
103 // Generate code to check that a global property cell is empty. Create
104 // the property cell at compilation time if no cell exists for the
105 // property.
GenerateCheckPropertyCell(MacroAssembler * masm,Handle<JSGlobalObject> global,Handle<Name> name,Register scratch,Label * miss)106 void PropertyHandlerCompiler::GenerateCheckPropertyCell(
107 MacroAssembler* masm, Handle<JSGlobalObject> global, Handle<Name> name,
108 Register scratch, Label* miss) {
109 Handle<PropertyCell> cell = JSGlobalObject::EnsurePropertyCell(global, name);
110 DCHECK(cell->value()->IsTheHole());
111 Handle<WeakCell> weak_cell = masm->isolate()->factory()->NewWeakCell(cell);
112 __ LoadWeakValue(scratch, weak_cell, miss);
113 __ Ldr(scratch, FieldMemOperand(scratch, PropertyCell::kValueOffset));
114 __ JumpIfNotRoot(scratch, Heap::kTheHoleValueRootIndex, miss);
115 }
116
117
PushInterceptorArguments(MacroAssembler * masm,Register receiver,Register holder,Register name,Handle<JSObject> holder_obj)118 static void PushInterceptorArguments(MacroAssembler* masm, Register receiver,
119 Register holder, Register name,
120 Handle<JSObject> holder_obj) {
121 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsNameIndex == 0);
122 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsThisIndex == 1);
123 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsHolderIndex == 2);
124 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsLength == 3);
125
126 __ Push(name, receiver, holder);
127 }
128
129
CompileCallLoadPropertyWithInterceptor(MacroAssembler * masm,Register receiver,Register holder,Register name,Handle<JSObject> holder_obj,Runtime::FunctionId id)130 static void CompileCallLoadPropertyWithInterceptor(
131 MacroAssembler* masm, Register receiver, Register holder, Register name,
132 Handle<JSObject> holder_obj, Runtime::FunctionId id) {
133 DCHECK(NamedLoadHandlerCompiler::kInterceptorArgsLength ==
134 Runtime::FunctionForId(id)->nargs);
135 PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
136 __ CallRuntime(id);
137 }
138
139
140 // Generate call to api function.
GenerateApiAccessorCall(MacroAssembler * masm,const CallOptimization & optimization,Handle<Map> receiver_map,Register receiver,Register scratch,bool is_store,Register store_parameter,Register accessor_holder,int accessor_index)141 void PropertyHandlerCompiler::GenerateApiAccessorCall(
142 MacroAssembler* masm, const CallOptimization& optimization,
143 Handle<Map> receiver_map, Register receiver, Register scratch,
144 bool is_store, Register store_parameter, Register accessor_holder,
145 int accessor_index) {
146 DCHECK(!AreAliased(accessor_holder, scratch));
147 DCHECK(!AreAliased(receiver, scratch));
148
149 MacroAssembler::PushPopQueue queue(masm);
150 queue.Queue(receiver);
151 // Write the arguments to the stack frame.
152 if (is_store) {
153 DCHECK(!receiver.is(store_parameter));
154 DCHECK(!scratch.is(store_parameter));
155 queue.Queue(store_parameter);
156 }
157 queue.PushQueued();
158
159 DCHECK(optimization.is_simple_api_call());
160
161 // Abi for CallApiFunctionStub.
162 Register callee = x0;
163 Register data = x4;
164 Register holder = x2;
165 Register api_function_address = x1;
166
167 // Put callee in place.
168 __ LoadAccessor(callee, accessor_holder, accessor_index,
169 is_store ? ACCESSOR_SETTER : ACCESSOR_GETTER);
170
171 // Put holder in place.
172 CallOptimization::HolderLookup holder_lookup;
173 int holder_depth = 0;
174 optimization.LookupHolderOfExpectedType(receiver_map, &holder_lookup,
175 &holder_depth);
176 switch (holder_lookup) {
177 case CallOptimization::kHolderIsReceiver:
178 __ Mov(holder, receiver);
179 break;
180 case CallOptimization::kHolderFound:
181 __ Ldr(holder, FieldMemOperand(receiver, HeapObject::kMapOffset));
182 __ Ldr(holder, FieldMemOperand(holder, Map::kPrototypeOffset));
183 for (int i = 1; i < holder_depth; i++) {
184 __ Ldr(holder, FieldMemOperand(holder, HeapObject::kMapOffset));
185 __ Ldr(holder, FieldMemOperand(holder, Map::kPrototypeOffset));
186 }
187 break;
188 case CallOptimization::kHolderNotFound:
189 UNREACHABLE();
190 break;
191 }
192
193 Isolate* isolate = masm->isolate();
194 Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
195 bool call_data_undefined = false;
196 // Put call data in place.
197 if (api_call_info->data()->IsUndefined()) {
198 call_data_undefined = true;
199 __ LoadRoot(data, Heap::kUndefinedValueRootIndex);
200 } else {
201 __ Ldr(data,
202 FieldMemOperand(callee, JSFunction::kSharedFunctionInfoOffset));
203 __ Ldr(data,
204 FieldMemOperand(data, SharedFunctionInfo::kFunctionDataOffset));
205 __ Ldr(data, FieldMemOperand(data, FunctionTemplateInfo::kCallCodeOffset));
206 __ Ldr(data, FieldMemOperand(data, CallHandlerInfo::kDataOffset));
207 }
208
209 if (api_call_info->fast_handler()->IsCode()) {
210 // Just tail call into the fast handler if present.
211 __ Jump(handle(Code::cast(api_call_info->fast_handler())),
212 RelocInfo::CODE_TARGET);
213 return;
214 }
215
216 // Put api_function_address in place.
217 Address function_address = v8::ToCData<Address>(api_call_info->callback());
218 ApiFunction fun(function_address);
219 ExternalReference ref = ExternalReference(
220 &fun, ExternalReference::DIRECT_API_CALL, masm->isolate());
221 __ Mov(api_function_address, ref);
222
223 // Jump to stub.
224 CallApiAccessorStub stub(isolate, is_store, call_data_undefined);
225 __ TailCallStub(&stub);
226 }
227
228
GenerateStoreViaSetter(MacroAssembler * masm,Handle<Map> map,Register receiver,Register holder,int accessor_index,int expected_arguments,Register scratch)229 void NamedStoreHandlerCompiler::GenerateStoreViaSetter(
230 MacroAssembler* masm, Handle<Map> map, Register receiver, Register holder,
231 int accessor_index, int expected_arguments, Register scratch) {
232 // ----------- S t a t e -------------
233 // -- lr : return address
234 // -----------------------------------
235 Label miss;
236 {
237 FrameScope scope(masm, StackFrame::INTERNAL);
238
239 // Save value register, so we can restore it later.
240 __ Push(value());
241
242 if (accessor_index >= 0) {
243 DCHECK(!AreAliased(holder, scratch));
244 DCHECK(!AreAliased(receiver, scratch));
245 DCHECK(!AreAliased(value(), scratch));
246 // Call the JavaScript setter with receiver and value on the stack.
247 if (map->IsJSGlobalObjectMap()) {
248 // Swap in the global receiver.
249 __ Ldr(scratch,
250 FieldMemOperand(receiver, JSGlobalObject::kGlobalProxyOffset));
251 receiver = scratch;
252 }
253 __ Push(receiver, value());
254 ParameterCount actual(1);
255 ParameterCount expected(expected_arguments);
256 __ LoadAccessor(x1, holder, accessor_index, ACCESSOR_SETTER);
257 __ InvokeFunction(x1, expected, actual, CALL_FUNCTION,
258 CheckDebugStepCallWrapper());
259 } else {
260 // If we generate a global code snippet for deoptimization only, remember
261 // the place to continue after deoptimization.
262 masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset());
263 }
264
265 // We have to return the passed value, not the return value of the setter.
266 __ Pop(x0);
267
268 // Restore context register.
269 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
270 }
271 __ Ret();
272 }
273
274
GenerateLoadViaGetter(MacroAssembler * masm,Handle<Map> map,Register receiver,Register holder,int accessor_index,int expected_arguments,Register scratch)275 void NamedLoadHandlerCompiler::GenerateLoadViaGetter(
276 MacroAssembler* masm, Handle<Map> map, Register receiver, Register holder,
277 int accessor_index, int expected_arguments, Register scratch) {
278 {
279 FrameScope scope(masm, StackFrame::INTERNAL);
280
281 if (accessor_index >= 0) {
282 DCHECK(!AreAliased(holder, scratch));
283 DCHECK(!AreAliased(receiver, scratch));
284 // Call the JavaScript getter with the receiver on the stack.
285 if (map->IsJSGlobalObjectMap()) {
286 // Swap in the global receiver.
287 __ Ldr(scratch,
288 FieldMemOperand(receiver, JSGlobalObject::kGlobalProxyOffset));
289 receiver = scratch;
290 }
291 __ Push(receiver);
292 ParameterCount actual(0);
293 ParameterCount expected(expected_arguments);
294 __ LoadAccessor(x1, holder, accessor_index, ACCESSOR_GETTER);
295 __ InvokeFunction(x1, expected, actual, CALL_FUNCTION,
296 CheckDebugStepCallWrapper());
297 } else {
298 // If we generate a global code snippet for deoptimization only, remember
299 // the place to continue after deoptimization.
300 masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset());
301 }
302
303 // Restore context register.
304 __ Ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
305 }
306 __ Ret();
307 }
308
309
StoreIC_PushArgs(MacroAssembler * masm)310 static void StoreIC_PushArgs(MacroAssembler* masm) {
311 __ Push(StoreDescriptor::ReceiverRegister(), StoreDescriptor::NameRegister(),
312 StoreDescriptor::ValueRegister(),
313 VectorStoreICDescriptor::SlotRegister(),
314 VectorStoreICDescriptor::VectorRegister());
315 }
316
317
GenerateSlow(MacroAssembler * masm)318 void NamedStoreHandlerCompiler::GenerateSlow(MacroAssembler* masm) {
319 StoreIC_PushArgs(masm);
320
321 // The slow case calls into the runtime to complete the store without causing
322 // an IC miss that would otherwise cause a transition to the generic stub.
323 __ TailCallRuntime(Runtime::kStoreIC_Slow);
324 }
325
326
GenerateStoreSlow(MacroAssembler * masm)327 void ElementHandlerCompiler::GenerateStoreSlow(MacroAssembler* masm) {
328 ASM_LOCATION("ElementHandlerCompiler::GenerateStoreSlow");
329 StoreIC_PushArgs(masm);
330
331 // The slow case calls into the runtime to complete the store without causing
332 // an IC miss that would otherwise cause a transition to the generic stub.
333 __ TailCallRuntime(Runtime::kKeyedStoreIC_Slow);
334 }
335
336
337 #undef __
338 #define __ ACCESS_MASM(masm())
339
340
CompileLoadGlobal(Handle<PropertyCell> cell,Handle<Name> name,bool is_configurable)341 Handle<Code> NamedLoadHandlerCompiler::CompileLoadGlobal(
342 Handle<PropertyCell> cell, Handle<Name> name, bool is_configurable) {
343 Label miss;
344 if (IC::ICUseVector(kind())) {
345 PushVectorAndSlot();
346 }
347 FrontendHeader(receiver(), name, &miss, DONT_RETURN_ANYTHING);
348
349 // Get the value from the cell.
350 Register result = StoreDescriptor::ValueRegister();
351 Handle<WeakCell> weak_cell = factory()->NewWeakCell(cell);
352 __ LoadWeakValue(result, weak_cell, &miss);
353 __ Ldr(result, FieldMemOperand(result, PropertyCell::kValueOffset));
354
355 // Check for deleted property if property can actually be deleted.
356 if (is_configurable) {
357 __ JumpIfRoot(result, Heap::kTheHoleValueRootIndex, &miss);
358 }
359
360 Counters* counters = isolate()->counters();
361 __ IncrementCounter(counters->named_load_global_stub(), 1, x1, x3);
362 if (IC::ICUseVector(kind())) {
363 DiscardVectorAndSlot();
364 }
365 __ Ret();
366
367 FrontendFooter(name, &miss);
368
369 // Return the generated code.
370 return GetCode(kind(), Code::NORMAL, name);
371 }
372
373
CompileStoreInterceptor(Handle<Name> name)374 Handle<Code> NamedStoreHandlerCompiler::CompileStoreInterceptor(
375 Handle<Name> name) {
376 Label miss;
377
378 ASM_LOCATION("NamedStoreHandlerCompiler::CompileStoreInterceptor");
379
380 __ Push(receiver(), this->name(), value());
381
382 // Do tail-call to the runtime system.
383 __ TailCallRuntime(Runtime::kStorePropertyWithInterceptor);
384
385 // Return the generated code.
386 return GetCode(kind(), Code::FAST, name);
387 }
388
389
value()390 Register NamedStoreHandlerCompiler::value() {
391 return StoreDescriptor::ValueRegister();
392 }
393
394
GenerateRestoreName(Label * label,Handle<Name> name)395 void NamedStoreHandlerCompiler::GenerateRestoreName(Label* label,
396 Handle<Name> name) {
397 if (!label->is_unused()) {
398 __ Bind(label);
399 __ Mov(this->name(), Operand(name));
400 }
401 }
402
403
GenerateRestoreName(Handle<Name> name)404 void NamedStoreHandlerCompiler::GenerateRestoreName(Handle<Name> name) {
405 __ Mov(this->name(), Operand(name));
406 }
407
408
RearrangeVectorAndSlot(Register current_map,Register destination_map)409 void NamedStoreHandlerCompiler::RearrangeVectorAndSlot(
410 Register current_map, Register destination_map) {
411 DCHECK(false); // Not implemented.
412 }
413
414
GenerateRestoreMap(Handle<Map> transition,Register map_reg,Register scratch,Label * miss)415 void NamedStoreHandlerCompiler::GenerateRestoreMap(Handle<Map> transition,
416 Register map_reg,
417 Register scratch,
418 Label* miss) {
419 Handle<WeakCell> cell = Map::WeakCellForMap(transition);
420 DCHECK(!map_reg.is(scratch));
421 __ LoadWeakValue(map_reg, cell, miss);
422 if (transition->CanBeDeprecated()) {
423 __ Ldrsw(scratch, FieldMemOperand(map_reg, Map::kBitField3Offset));
424 __ TestAndBranchIfAnySet(scratch, Map::Deprecated::kMask, miss);
425 }
426 }
427
428
GenerateConstantCheck(Register map_reg,int descriptor,Register value_reg,Register scratch,Label * miss_label)429 void NamedStoreHandlerCompiler::GenerateConstantCheck(Register map_reg,
430 int descriptor,
431 Register value_reg,
432 Register scratch,
433 Label* miss_label) {
434 DCHECK(!map_reg.is(scratch));
435 DCHECK(!map_reg.is(value_reg));
436 DCHECK(!value_reg.is(scratch));
437 __ LoadInstanceDescriptors(map_reg, scratch);
438 __ Ldr(scratch,
439 FieldMemOperand(scratch, DescriptorArray::GetValueOffset(descriptor)));
440 __ Cmp(value_reg, scratch);
441 __ B(ne, miss_label);
442 }
443
444
GenerateFieldTypeChecks(HeapType * field_type,Register value_reg,Label * miss_label)445 void NamedStoreHandlerCompiler::GenerateFieldTypeChecks(HeapType* field_type,
446 Register value_reg,
447 Label* miss_label) {
448 Register map_reg = scratch1();
449 Register scratch = scratch2();
450 DCHECK(!value_reg.is(map_reg));
451 DCHECK(!value_reg.is(scratch));
452 __ JumpIfSmi(value_reg, miss_label);
453 HeapType::Iterator<Map> it = field_type->Classes();
454 if (!it.Done()) {
455 __ Ldr(map_reg, FieldMemOperand(value_reg, HeapObject::kMapOffset));
456 Label do_store;
457 while (true) {
458 __ CmpWeakValue(map_reg, Map::WeakCellForMap(it.Current()), scratch);
459 it.Advance();
460 if (it.Done()) {
461 __ B(ne, miss_label);
462 break;
463 }
464 __ B(eq, &do_store);
465 }
466 __ Bind(&do_store);
467 }
468 }
469
470
CheckPrototypes(Register object_reg,Register holder_reg,Register scratch1,Register scratch2,Handle<Name> name,Label * miss,PrototypeCheckType check,ReturnHolder return_what)471 Register PropertyHandlerCompiler::CheckPrototypes(
472 Register object_reg, Register holder_reg, Register scratch1,
473 Register scratch2, Handle<Name> name, Label* miss, PrototypeCheckType check,
474 ReturnHolder return_what) {
475 Handle<Map> receiver_map = map();
476
477 // object_reg and holder_reg registers can alias.
478 DCHECK(!AreAliased(object_reg, scratch1, scratch2));
479 DCHECK(!AreAliased(holder_reg, scratch1, scratch2));
480
481 if (FLAG_eliminate_prototype_chain_checks) {
482 Handle<Cell> validity_cell =
483 Map::GetOrCreatePrototypeChainValidityCell(receiver_map, isolate());
484 if (!validity_cell.is_null()) {
485 DCHECK_EQ(Smi::FromInt(Map::kPrototypeChainValid),
486 validity_cell->value());
487 __ Mov(scratch1, Operand(validity_cell));
488 __ Ldr(scratch1, FieldMemOperand(scratch1, Cell::kValueOffset));
489 __ Cmp(scratch1, Operand(Smi::FromInt(Map::kPrototypeChainValid)));
490 __ B(ne, miss);
491 }
492
493 // The prototype chain of primitives (and their JSValue wrappers) depends
494 // on the native context, which can't be guarded by validity cells.
495 // |object_reg| holds the native context specific prototype in this case;
496 // we need to check its map.
497 if (check == CHECK_ALL_MAPS) {
498 __ Ldr(scratch1, FieldMemOperand(object_reg, HeapObject::kMapOffset));
499 Handle<WeakCell> cell = Map::WeakCellForMap(receiver_map);
500 __ CmpWeakValue(scratch1, cell, scratch2);
501 __ B(ne, miss);
502 }
503 }
504
505 // Keep track of the current object in register reg.
506 Register reg = object_reg;
507 int depth = 0;
508
509 Handle<JSObject> current = Handle<JSObject>::null();
510 if (receiver_map->IsJSGlobalObjectMap()) {
511 current = isolate()->global_object();
512 }
513
514 // Check access rights to the global object. This has to happen after
515 // the map check so that we know that the object is actually a global
516 // object.
517 // This allows us to install generated handlers for accesses to the
518 // global proxy (as opposed to using slow ICs). See corresponding code
519 // in LookupForRead().
520 if (receiver_map->IsJSGlobalProxyMap()) {
521 UseScratchRegisterScope temps(masm());
522 __ CheckAccessGlobalProxy(reg, scratch2, temps.AcquireX(), miss);
523 }
524
525 Handle<JSObject> prototype = Handle<JSObject>::null();
526 Handle<Map> current_map = receiver_map;
527 Handle<Map> holder_map(holder()->map());
528 // Traverse the prototype chain and check the maps in the prototype chain for
529 // fast and global objects or do negative lookup for normal objects.
530 while (!current_map.is_identical_to(holder_map)) {
531 ++depth;
532
533 // Only global objects and objects that do not require access
534 // checks are allowed in stubs.
535 DCHECK(current_map->IsJSGlobalProxyMap() ||
536 !current_map->is_access_check_needed());
537
538 prototype = handle(JSObject::cast(current_map->prototype()));
539 if (current_map->is_dictionary_map() &&
540 !current_map->IsJSGlobalObjectMap()) {
541 DCHECK(!current_map->IsJSGlobalProxyMap()); // Proxy maps are fast.
542 if (!name->IsUniqueName()) {
543 DCHECK(name->IsString());
544 name = factory()->InternalizeString(Handle<String>::cast(name));
545 }
546 DCHECK(current.is_null() || (current->property_dictionary()->FindEntry(
547 name) == NameDictionary::kNotFound));
548
549 if (FLAG_eliminate_prototype_chain_checks && depth > 1) {
550 // TODO(jkummerow): Cache and re-use weak cell.
551 __ LoadWeakValue(reg, isolate()->factory()->NewWeakCell(current), miss);
552 }
553 GenerateDictionaryNegativeLookup(masm(), miss, reg, name, scratch1,
554 scratch2);
555
556 if (!FLAG_eliminate_prototype_chain_checks) {
557 __ Ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
558 __ Ldr(holder_reg, FieldMemOperand(scratch1, Map::kPrototypeOffset));
559 }
560 } else {
561 Register map_reg = scratch1;
562 if (!FLAG_eliminate_prototype_chain_checks) {
563 __ Ldr(map_reg, FieldMemOperand(reg, HeapObject::kMapOffset));
564 }
565 if (current_map->IsJSGlobalObjectMap()) {
566 GenerateCheckPropertyCell(masm(), Handle<JSGlobalObject>::cast(current),
567 name, scratch2, miss);
568 } else if (!FLAG_eliminate_prototype_chain_checks &&
569 (depth != 1 || check == CHECK_ALL_MAPS)) {
570 Handle<WeakCell> cell = Map::WeakCellForMap(current_map);
571 __ CmpWeakValue(map_reg, cell, scratch2);
572 __ B(ne, miss);
573 }
574 if (!FLAG_eliminate_prototype_chain_checks) {
575 __ Ldr(holder_reg, FieldMemOperand(map_reg, Map::kPrototypeOffset));
576 }
577 }
578
579 reg = holder_reg; // From now on the object will be in holder_reg.
580 // Go to the next object in the prototype chain.
581 current = prototype;
582 current_map = handle(current->map());
583 }
584
585 DCHECK(!current_map->IsJSGlobalProxyMap());
586
587 // Log the check depth.
588 LOG(isolate(), IntEvent("check-maps-depth", depth + 1));
589
590 if (!FLAG_eliminate_prototype_chain_checks &&
591 (depth != 0 || check == CHECK_ALL_MAPS)) {
592 // Check the holder map.
593 __ Ldr(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
594 Handle<WeakCell> cell = Map::WeakCellForMap(current_map);
595 __ CmpWeakValue(scratch1, cell, scratch2);
596 __ B(ne, miss);
597 }
598
599 bool return_holder = return_what == RETURN_HOLDER;
600 if (FLAG_eliminate_prototype_chain_checks && return_holder && depth != 0) {
601 __ LoadWeakValue(reg, isolate()->factory()->NewWeakCell(current), miss);
602 }
603
604 // Return the register containing the holder.
605 return return_holder ? reg : no_reg;
606 }
607
608
FrontendFooter(Handle<Name> name,Label * miss)609 void NamedLoadHandlerCompiler::FrontendFooter(Handle<Name> name, Label* miss) {
610 if (!miss->is_unused()) {
611 Label success;
612 __ B(&success);
613
614 __ Bind(miss);
615 if (IC::ICUseVector(kind())) {
616 DCHECK(kind() == Code::LOAD_IC);
617 PopVectorAndSlot();
618 }
619 TailCallBuiltin(masm(), MissBuiltin(kind()));
620
621 __ Bind(&success);
622 }
623 }
624
625
FrontendFooter(Handle<Name> name,Label * miss)626 void NamedStoreHandlerCompiler::FrontendFooter(Handle<Name> name, Label* miss) {
627 if (!miss->is_unused()) {
628 Label success;
629 __ B(&success);
630
631 GenerateRestoreName(miss, name);
632 if (IC::ICUseVector(kind())) PopVectorAndSlot();
633 TailCallBuiltin(masm(), MissBuiltin(kind()));
634
635 __ Bind(&success);
636 }
637 }
638
639
GenerateLoadConstant(Handle<Object> value)640 void NamedLoadHandlerCompiler::GenerateLoadConstant(Handle<Object> value) {
641 // Return the constant value.
642 __ LoadObject(x0, value);
643 __ Ret();
644 }
645
646
GenerateLoadCallback(Register reg,Handle<ExecutableAccessorInfo> callback)647 void NamedLoadHandlerCompiler::GenerateLoadCallback(
648 Register reg, Handle<ExecutableAccessorInfo> callback) {
649 DCHECK(!AreAliased(scratch2(), scratch3(), scratch4(), reg));
650
651 // Build ExecutableAccessorInfo::args_ list on the stack and push property
652 // name below the exit frame to make GC aware of them and store pointers to
653 // them.
654 STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 0);
655 STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 1);
656 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 2);
657 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 3);
658 STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 4);
659 STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 5);
660 STATIC_ASSERT(PropertyCallbackArguments::kArgsLength == 6);
661
662 __ Push(receiver());
663
664 Handle<Object> data(callback->data(), isolate());
665 if (data->IsUndefined() || data->IsSmi()) {
666 __ Mov(scratch3(), Operand(data));
667 } else {
668 Handle<WeakCell> cell =
669 isolate()->factory()->NewWeakCell(Handle<HeapObject>::cast(data));
670 // The callback is alive if this instruction is executed,
671 // so the weak cell is not cleared and points to data.
672 __ GetWeakValue(scratch3(), cell);
673 }
674 __ LoadRoot(scratch4(), Heap::kUndefinedValueRootIndex);
675 __ Mov(scratch2(), Operand(ExternalReference::isolate_address(isolate())));
676 __ Push(scratch3(), scratch4(), scratch4(), scratch2(), reg, name());
677
678 Register args_addr = scratch2();
679 __ Add(args_addr, __ StackPointer(), kPointerSize);
680
681 // Stack at this point:
682 // sp[40] callback data
683 // sp[32] undefined
684 // sp[24] undefined
685 // sp[16] isolate
686 // args_addr -> sp[8] reg
687 // sp[0] name
688
689 // Abi for CallApiGetter.
690 Register getter_address_reg = x2;
691
692 // Set up the call.
693 Address getter_address = v8::ToCData<Address>(callback->getter());
694 ApiFunction fun(getter_address);
695 ExternalReference::Type type = ExternalReference::DIRECT_GETTER_CALL;
696 ExternalReference ref = ExternalReference(&fun, type, isolate());
697 __ Mov(getter_address_reg, ref);
698
699 CallApiGetterStub stub(isolate());
700 __ TailCallStub(&stub);
701 }
702
703
GenerateLoadInterceptorWithFollowup(LookupIterator * it,Register holder_reg)704 void NamedLoadHandlerCompiler::GenerateLoadInterceptorWithFollowup(
705 LookupIterator* it, Register holder_reg) {
706 DCHECK(!AreAliased(receiver(), this->name(), scratch1(), scratch2(),
707 scratch3()));
708 DCHECK(holder()->HasNamedInterceptor());
709 DCHECK(!holder()->GetNamedInterceptor()->getter()->IsUndefined());
710
711 // Compile the interceptor call, followed by inline code to load the
712 // property from further up the prototype chain if the call fails.
713 // Check that the maps haven't changed.
714 DCHECK(holder_reg.is(receiver()) || holder_reg.is(scratch1()));
715
716 // Preserve the receiver register explicitly whenever it is different from the
717 // holder and it is needed should the interceptor return without any result.
718 // The ACCESSOR case needs the receiver to be passed into C++ code, the FIELD
719 // case might cause a miss during the prototype check.
720 bool must_perform_prototype_check =
721 !holder().is_identical_to(it->GetHolder<JSObject>());
722 bool must_preserve_receiver_reg =
723 !receiver().is(holder_reg) &&
724 (it->state() == LookupIterator::ACCESSOR || must_perform_prototype_check);
725
726 // Save necessary data before invoking an interceptor.
727 // Requires a frame to make GC aware of pushed pointers.
728 {
729 FrameScope frame_scope(masm(), StackFrame::INTERNAL);
730 if (must_preserve_receiver_reg) {
731 __ Push(receiver(), holder_reg, this->name());
732 } else {
733 __ Push(holder_reg, this->name());
734 }
735 InterceptorVectorSlotPush(holder_reg);
736 // Invoke an interceptor. Note: map checks from receiver to
737 // interceptor's holder has been compiled before (see a caller
738 // of this method.)
739 CompileCallLoadPropertyWithInterceptor(
740 masm(), receiver(), holder_reg, this->name(), holder(),
741 Runtime::kLoadPropertyWithInterceptorOnly);
742
743 // Check if interceptor provided a value for property. If it's
744 // the case, return immediately.
745 Label interceptor_failed;
746 __ JumpIfRoot(x0, Heap::kNoInterceptorResultSentinelRootIndex,
747 &interceptor_failed);
748 frame_scope.GenerateLeaveFrame();
749 __ Ret();
750
751 __ Bind(&interceptor_failed);
752 InterceptorVectorSlotPop(holder_reg);
753 if (must_preserve_receiver_reg) {
754 __ Pop(this->name(), holder_reg, receiver());
755 } else {
756 __ Pop(this->name(), holder_reg);
757 }
758 // Leave the internal frame.
759 }
760
761 GenerateLoadPostInterceptor(it, holder_reg);
762 }
763
764
GenerateLoadInterceptor(Register holder_reg)765 void NamedLoadHandlerCompiler::GenerateLoadInterceptor(Register holder_reg) {
766 // Call the runtime system to load the interceptor.
767 DCHECK(holder()->HasNamedInterceptor());
768 DCHECK(!holder()->GetNamedInterceptor()->getter()->IsUndefined());
769 PushInterceptorArguments(masm(), receiver(), holder_reg, this->name(),
770 holder());
771
772 __ TailCallRuntime(Runtime::kLoadPropertyWithInterceptor);
773 }
774
775
CompileStoreCallback(Handle<JSObject> object,Handle<Name> name,Handle<ExecutableAccessorInfo> callback)776 Handle<Code> NamedStoreHandlerCompiler::CompileStoreCallback(
777 Handle<JSObject> object, Handle<Name> name,
778 Handle<ExecutableAccessorInfo> callback) {
779 ASM_LOCATION("NamedStoreHandlerCompiler::CompileStoreCallback");
780 Register holder_reg = Frontend(name);
781
782 // Stub never generated for non-global objects that require access checks.
783 DCHECK(holder()->IsJSGlobalProxy() || !holder()->IsAccessCheckNeeded());
784
785 // receiver() and holder_reg can alias.
786 DCHECK(!AreAliased(receiver(), scratch1(), scratch2(), value()));
787 DCHECK(!AreAliased(holder_reg, scratch1(), scratch2(), value()));
788 // If the callback cannot leak, then push the callback directly,
789 // otherwise wrap it in a weak cell.
790 if (callback->data()->IsUndefined() || callback->data()->IsSmi()) {
791 __ Mov(scratch1(), Operand(callback));
792 } else {
793 Handle<WeakCell> cell = isolate()->factory()->NewWeakCell(callback);
794 __ Mov(scratch1(), Operand(cell));
795 }
796 __ Mov(scratch2(), Operand(name));
797 __ Push(receiver(), holder_reg, scratch1(), scratch2(), value());
798
799 // Do tail-call to the runtime system.
800 __ TailCallRuntime(Runtime::kStoreCallbackProperty);
801
802 // Return the generated code.
803 return GetCode(kind(), Code::FAST, name);
804 }
805
806
807 #undef __
808 } // namespace internal
809 } // namespace v8
810
811 #endif // V8_TARGET_ARCH_IA32
812