1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #if V8_TARGET_ARCH_MIPS
6
7 #include "src/ic/call-optimization.h"
8 #include "src/ic/handler-compiler.h"
9 #include "src/ic/ic.h"
10 #include "src/isolate-inl.h"
11
12 namespace v8 {
13 namespace internal {
14
15 #define __ ACCESS_MASM(masm)
16
17
GenerateLoadViaGetter(MacroAssembler * masm,Handle<Map> map,Register receiver,Register holder,int accessor_index,int expected_arguments,Register scratch)18 void NamedLoadHandlerCompiler::GenerateLoadViaGetter(
19 MacroAssembler* masm, Handle<Map> map, Register receiver, Register holder,
20 int accessor_index, int expected_arguments, Register scratch) {
21 // ----------- S t a t e -------------
22 // -- a0 : receiver
23 // -- a2 : name
24 // -- ra : return address
25 // -----------------------------------
26 {
27 FrameScope scope(masm, StackFrame::INTERNAL);
28
29 if (accessor_index >= 0) {
30 DCHECK(!holder.is(scratch));
31 DCHECK(!receiver.is(scratch));
32 // Call the JavaScript getter with the receiver on the stack.
33 if (map->IsJSGlobalObjectMap()) {
34 // Swap in the global receiver.
35 __ lw(scratch,
36 FieldMemOperand(receiver, JSGlobalObject::kGlobalProxyOffset));
37 receiver = scratch;
38 }
39 __ push(receiver);
40 ParameterCount actual(0);
41 ParameterCount expected(expected_arguments);
42 __ LoadAccessor(a1, holder, accessor_index, ACCESSOR_GETTER);
43 __ InvokeFunction(a1, expected, actual, CALL_FUNCTION,
44 CheckDebugStepCallWrapper());
45 } else {
46 // If we generate a global code snippet for deoptimization only, remember
47 // the place to continue after deoptimization.
48 masm->isolate()->heap()->SetGetterStubDeoptPCOffset(masm->pc_offset());
49 }
50
51 // Restore context register.
52 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
53 }
54 __ Ret();
55 }
56
57
GenerateStoreViaSetter(MacroAssembler * masm,Handle<Map> map,Register receiver,Register holder,int accessor_index,int expected_arguments,Register scratch)58 void NamedStoreHandlerCompiler::GenerateStoreViaSetter(
59 MacroAssembler* masm, Handle<Map> map, Register receiver, Register holder,
60 int accessor_index, int expected_arguments, Register scratch) {
61 // ----------- S t a t e -------------
62 // -- ra : return address
63 // -----------------------------------
64 {
65 FrameScope scope(masm, StackFrame::INTERNAL);
66
67 // Save value register, so we can restore it later.
68 __ push(value());
69
70 if (accessor_index >= 0) {
71 DCHECK(!holder.is(scratch));
72 DCHECK(!receiver.is(scratch));
73 DCHECK(!value().is(scratch));
74 // Call the JavaScript setter with receiver and value on the stack.
75 if (map->IsJSGlobalObjectMap()) {
76 // Swap in the global receiver.
77 __ lw(scratch,
78 FieldMemOperand(receiver, JSGlobalObject::kGlobalProxyOffset));
79 receiver = scratch;
80 }
81 __ Push(receiver, value());
82 ParameterCount actual(1);
83 ParameterCount expected(expected_arguments);
84 __ LoadAccessor(a1, holder, accessor_index, ACCESSOR_SETTER);
85 __ InvokeFunction(a1, expected, actual, CALL_FUNCTION,
86 CheckDebugStepCallWrapper());
87 } else {
88 // If we generate a global code snippet for deoptimization only, remember
89 // the place to continue after deoptimization.
90 masm->isolate()->heap()->SetSetterStubDeoptPCOffset(masm->pc_offset());
91 }
92
93 // We have to return the passed value, not the return value of the setter.
94 __ pop(v0);
95
96 // Restore context register.
97 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
98 }
99 __ Ret();
100 }
101
102
PushVectorAndSlot(Register vector,Register slot)103 void PropertyHandlerCompiler::PushVectorAndSlot(Register vector,
104 Register slot) {
105 MacroAssembler* masm = this->masm();
106 __ Push(vector, slot);
107 }
108
109
PopVectorAndSlot(Register vector,Register slot)110 void PropertyHandlerCompiler::PopVectorAndSlot(Register vector, Register slot) {
111 MacroAssembler* masm = this->masm();
112 __ Pop(vector, slot);
113 }
114
115
DiscardVectorAndSlot()116 void PropertyHandlerCompiler::DiscardVectorAndSlot() {
117 MacroAssembler* masm = this->masm();
118 // Remove vector and slot.
119 __ Addu(sp, sp, Operand(2 * kPointerSize));
120 }
121
122
GenerateDictionaryNegativeLookup(MacroAssembler * masm,Label * miss_label,Register receiver,Handle<Name> name,Register scratch0,Register scratch1)123 void PropertyHandlerCompiler::GenerateDictionaryNegativeLookup(
124 MacroAssembler* masm, Label* miss_label, Register receiver,
125 Handle<Name> name, Register scratch0, Register scratch1) {
126 DCHECK(name->IsUniqueName());
127 DCHECK(!receiver.is(scratch0));
128 Counters* counters = masm->isolate()->counters();
129 __ IncrementCounter(counters->negative_lookups(), 1, scratch0, scratch1);
130 __ IncrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
131
132 Label done;
133
134 const int kInterceptorOrAccessCheckNeededMask =
135 (1 << Map::kHasNamedInterceptor) | (1 << Map::kIsAccessCheckNeeded);
136
137 // Bail out if the receiver has a named interceptor or requires access checks.
138 Register map = scratch1;
139 __ lw(map, FieldMemOperand(receiver, HeapObject::kMapOffset));
140 __ lbu(scratch0, FieldMemOperand(map, Map::kBitFieldOffset));
141 __ And(scratch0, scratch0, Operand(kInterceptorOrAccessCheckNeededMask));
142 __ Branch(miss_label, ne, scratch0, Operand(zero_reg));
143
144 // Check that receiver is a JSObject.
145 __ lbu(scratch0, FieldMemOperand(map, Map::kInstanceTypeOffset));
146 __ Branch(miss_label, lt, scratch0, Operand(FIRST_JS_RECEIVER_TYPE));
147
148 // Load properties array.
149 Register properties = scratch0;
150 __ lw(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
151 // Check that the properties array is a dictionary.
152 __ lw(map, FieldMemOperand(properties, HeapObject::kMapOffset));
153 Register tmp = properties;
154 __ LoadRoot(tmp, Heap::kHashTableMapRootIndex);
155 __ Branch(miss_label, ne, map, Operand(tmp));
156
157 // Restore the temporarily used register.
158 __ lw(properties, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
159
160
161 NameDictionaryLookupStub::GenerateNegativeLookup(
162 masm, miss_label, &done, receiver, properties, name, scratch1);
163 __ bind(&done);
164 __ DecrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
165 }
166
167
GenerateDirectLoadGlobalFunctionPrototype(MacroAssembler * masm,int index,Register result,Label * miss)168 void NamedLoadHandlerCompiler::GenerateDirectLoadGlobalFunctionPrototype(
169 MacroAssembler* masm, int index, Register result, Label* miss) {
170 __ LoadNativeContextSlot(index, result);
171 // Load its initial map. The global functions all have initial maps.
172 __ lw(result,
173 FieldMemOperand(result, JSFunction::kPrototypeOrInitialMapOffset));
174 // Load the prototype from the initial map.
175 __ lw(result, FieldMemOperand(result, Map::kPrototypeOffset));
176 }
177
178
GenerateLoadFunctionPrototype(MacroAssembler * masm,Register receiver,Register scratch1,Register scratch2,Label * miss_label)179 void NamedLoadHandlerCompiler::GenerateLoadFunctionPrototype(
180 MacroAssembler* masm, Register receiver, Register scratch1,
181 Register scratch2, Label* miss_label) {
182 __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label);
183 __ Ret(USE_DELAY_SLOT);
184 __ mov(v0, scratch1);
185 }
186
187
188 // Generate code to check that a global property cell is empty. Create
189 // the property cell at compilation time if no cell exists for the
190 // property.
GenerateCheckPropertyCell(MacroAssembler * masm,Handle<JSGlobalObject> global,Handle<Name> name,Register scratch,Label * miss)191 void PropertyHandlerCompiler::GenerateCheckPropertyCell(
192 MacroAssembler* masm, Handle<JSGlobalObject> global, Handle<Name> name,
193 Register scratch, Label* miss) {
194 Handle<PropertyCell> cell = JSGlobalObject::EnsurePropertyCell(global, name);
195 DCHECK(cell->value()->IsTheHole());
196 Handle<WeakCell> weak_cell = masm->isolate()->factory()->NewWeakCell(cell);
197 __ LoadWeakValue(scratch, weak_cell, miss);
198 __ lw(scratch, FieldMemOperand(scratch, PropertyCell::kValueOffset));
199 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
200 __ Branch(miss, ne, scratch, Operand(at));
201 }
202
203
PushInterceptorArguments(MacroAssembler * masm,Register receiver,Register holder,Register name,Handle<JSObject> holder_obj)204 static void PushInterceptorArguments(MacroAssembler* masm, Register receiver,
205 Register holder, Register name,
206 Handle<JSObject> holder_obj) {
207 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsNameIndex == 0);
208 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsThisIndex == 1);
209 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsHolderIndex == 2);
210 STATIC_ASSERT(NamedLoadHandlerCompiler::kInterceptorArgsLength == 3);
211 __ Push(name, receiver, holder);
212 }
213
214
CompileCallLoadPropertyWithInterceptor(MacroAssembler * masm,Register receiver,Register holder,Register name,Handle<JSObject> holder_obj,Runtime::FunctionId id)215 static void CompileCallLoadPropertyWithInterceptor(
216 MacroAssembler* masm, Register receiver, Register holder, Register name,
217 Handle<JSObject> holder_obj, Runtime::FunctionId id) {
218 DCHECK(NamedLoadHandlerCompiler::kInterceptorArgsLength ==
219 Runtime::FunctionForId(id)->nargs);
220 PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
221 __ CallRuntime(id);
222 }
223
224
225 // Generate call to api function.
GenerateApiAccessorCall(MacroAssembler * masm,const CallOptimization & optimization,Handle<Map> receiver_map,Register receiver,Register scratch_in,bool is_store,Register store_parameter,Register accessor_holder,int accessor_index)226 void PropertyHandlerCompiler::GenerateApiAccessorCall(
227 MacroAssembler* masm, const CallOptimization& optimization,
228 Handle<Map> receiver_map, Register receiver, Register scratch_in,
229 bool is_store, Register store_parameter, Register accessor_holder,
230 int accessor_index) {
231 DCHECK(!accessor_holder.is(scratch_in));
232 DCHECK(!receiver.is(scratch_in));
233 __ push(receiver);
234 // Write the arguments to stack frame.
235 if (is_store) {
236 DCHECK(!receiver.is(store_parameter));
237 DCHECK(!scratch_in.is(store_parameter));
238 __ push(store_parameter);
239 }
240 DCHECK(optimization.is_simple_api_call());
241
242 // Abi for CallApiFunctionStub.
243 Register callee = a0;
244 Register data = t0;
245 Register holder = a2;
246 Register api_function_address = a1;
247
248 // Put callee in place.
249 __ LoadAccessor(callee, accessor_holder, accessor_index,
250 is_store ? ACCESSOR_SETTER : ACCESSOR_GETTER);
251
252 // Put holder in place.
253 CallOptimization::HolderLookup holder_lookup;
254 int holder_depth = 0;
255 optimization.LookupHolderOfExpectedType(receiver_map, &holder_lookup,
256 &holder_depth);
257 switch (holder_lookup) {
258 case CallOptimization::kHolderIsReceiver:
259 __ Move(holder, receiver);
260 break;
261 case CallOptimization::kHolderFound:
262 __ lw(holder, FieldMemOperand(receiver, HeapObject::kMapOffset));
263 __ lw(holder, FieldMemOperand(holder, Map::kPrototypeOffset));
264 for (int i = 1; i < holder_depth; i++) {
265 __ lw(holder, FieldMemOperand(holder, HeapObject::kMapOffset));
266 __ lw(holder, FieldMemOperand(holder, Map::kPrototypeOffset));
267 }
268 break;
269 case CallOptimization::kHolderNotFound:
270 UNREACHABLE();
271 break;
272 }
273
274 Isolate* isolate = masm->isolate();
275 Handle<CallHandlerInfo> api_call_info = optimization.api_call_info();
276 bool call_data_undefined = false;
277 // Put call data in place.
278 if (api_call_info->data()->IsUndefined()) {
279 call_data_undefined = true;
280 __ LoadRoot(data, Heap::kUndefinedValueRootIndex);
281 } else {
282 __ lw(data, FieldMemOperand(callee, JSFunction::kSharedFunctionInfoOffset));
283 __ lw(data, FieldMemOperand(data, SharedFunctionInfo::kFunctionDataOffset));
284 __ lw(data, FieldMemOperand(data, FunctionTemplateInfo::kCallCodeOffset));
285 __ lw(data, FieldMemOperand(data, CallHandlerInfo::kDataOffset));
286 }
287
288 if (api_call_info->fast_handler()->IsCode()) {
289 // Just tail call into the fast handler if present.
290 __ Jump(handle(Code::cast(api_call_info->fast_handler())),
291 RelocInfo::CODE_TARGET);
292 return;
293 }
294 // Put api_function_address in place.
295 Address function_address = v8::ToCData<Address>(api_call_info->callback());
296 ApiFunction fun(function_address);
297 ExternalReference::Type type = ExternalReference::DIRECT_API_CALL;
298 ExternalReference ref = ExternalReference(&fun, type, masm->isolate());
299 __ li(api_function_address, Operand(ref));
300
301 // Jump to stub.
302 CallApiAccessorStub stub(isolate, is_store, call_data_undefined);
303 __ TailCallStub(&stub);
304 }
305
306
StoreIC_PushArgs(MacroAssembler * masm)307 static void StoreIC_PushArgs(MacroAssembler* masm) {
308 __ Push(StoreDescriptor::ReceiverRegister(), StoreDescriptor::NameRegister(),
309 StoreDescriptor::ValueRegister(),
310 VectorStoreICDescriptor::SlotRegister(),
311 VectorStoreICDescriptor::VectorRegister());
312 }
313
314
GenerateSlow(MacroAssembler * masm)315 void NamedStoreHandlerCompiler::GenerateSlow(MacroAssembler* masm) {
316 StoreIC_PushArgs(masm);
317
318 // The slow case calls into the runtime to complete the store without causing
319 // an IC miss that would otherwise cause a transition to the generic stub.
320 __ TailCallRuntime(Runtime::kStoreIC_Slow);
321 }
322
323
GenerateStoreSlow(MacroAssembler * masm)324 void ElementHandlerCompiler::GenerateStoreSlow(MacroAssembler* masm) {
325 StoreIC_PushArgs(masm);
326
327 // The slow case calls into the runtime to complete the store without causing
328 // an IC miss that would otherwise cause a transition to the generic stub.
329 __ TailCallRuntime(Runtime::kKeyedStoreIC_Slow);
330 }
331
332
333 #undef __
334 #define __ ACCESS_MASM(masm())
335
336
GenerateRestoreName(Label * label,Handle<Name> name)337 void NamedStoreHandlerCompiler::GenerateRestoreName(Label* label,
338 Handle<Name> name) {
339 if (!label->is_unused()) {
340 __ bind(label);
341 __ li(this->name(), Operand(name));
342 }
343 }
344
345
GenerateRestoreName(Handle<Name> name)346 void NamedStoreHandlerCompiler::GenerateRestoreName(Handle<Name> name) {
347 __ li(this->name(), Operand(name));
348 }
349
350
RearrangeVectorAndSlot(Register current_map,Register destination_map)351 void NamedStoreHandlerCompiler::RearrangeVectorAndSlot(
352 Register current_map, Register destination_map) {
353 DCHECK(false); // Not implemented.
354 }
355
356
GenerateRestoreMap(Handle<Map> transition,Register map_reg,Register scratch,Label * miss)357 void NamedStoreHandlerCompiler::GenerateRestoreMap(Handle<Map> transition,
358 Register map_reg,
359 Register scratch,
360 Label* miss) {
361 Handle<WeakCell> cell = Map::WeakCellForMap(transition);
362 DCHECK(!map_reg.is(scratch));
363 __ LoadWeakValue(map_reg, cell, miss);
364 if (transition->CanBeDeprecated()) {
365 __ lw(scratch, FieldMemOperand(map_reg, Map::kBitField3Offset));
366 __ And(at, scratch, Operand(Map::Deprecated::kMask));
367 __ Branch(miss, ne, at, Operand(zero_reg));
368 }
369 }
370
371
GenerateConstantCheck(Register map_reg,int descriptor,Register value_reg,Register scratch,Label * miss_label)372 void NamedStoreHandlerCompiler::GenerateConstantCheck(Register map_reg,
373 int descriptor,
374 Register value_reg,
375 Register scratch,
376 Label* miss_label) {
377 DCHECK(!map_reg.is(scratch));
378 DCHECK(!map_reg.is(value_reg));
379 DCHECK(!value_reg.is(scratch));
380 __ LoadInstanceDescriptors(map_reg, scratch);
381 __ lw(scratch,
382 FieldMemOperand(scratch, DescriptorArray::GetValueOffset(descriptor)));
383 __ Branch(miss_label, ne, value_reg, Operand(scratch));
384 }
385
386
GenerateFieldTypeChecks(HeapType * field_type,Register value_reg,Label * miss_label)387 void NamedStoreHandlerCompiler::GenerateFieldTypeChecks(HeapType* field_type,
388 Register value_reg,
389 Label* miss_label) {
390 Register map_reg = scratch1();
391 Register scratch = scratch2();
392 DCHECK(!value_reg.is(map_reg));
393 DCHECK(!value_reg.is(scratch));
394 __ JumpIfSmi(value_reg, miss_label);
395 HeapType::Iterator<Map> it = field_type->Classes();
396 if (!it.Done()) {
397 __ lw(map_reg, FieldMemOperand(value_reg, HeapObject::kMapOffset));
398 Label do_store;
399 while (true) {
400 // Compare map directly within the Branch() functions.
401 __ GetWeakValue(scratch, Map::WeakCellForMap(it.Current()));
402 it.Advance();
403 if (it.Done()) {
404 __ Branch(miss_label, ne, map_reg, Operand(scratch));
405 break;
406 }
407 __ Branch(&do_store, eq, map_reg, Operand(scratch));
408 }
409 __ bind(&do_store);
410 }
411 }
412
413
CheckPrototypes(Register object_reg,Register holder_reg,Register scratch1,Register scratch2,Handle<Name> name,Label * miss,PrototypeCheckType check,ReturnHolder return_what)414 Register PropertyHandlerCompiler::CheckPrototypes(
415 Register object_reg, Register holder_reg, Register scratch1,
416 Register scratch2, Handle<Name> name, Label* miss, PrototypeCheckType check,
417 ReturnHolder return_what) {
418 Handle<Map> receiver_map = map();
419
420 // Make sure there's no overlap between holder and object registers.
421 DCHECK(!scratch1.is(object_reg) && !scratch1.is(holder_reg));
422 DCHECK(!scratch2.is(object_reg) && !scratch2.is(holder_reg) &&
423 !scratch2.is(scratch1));
424
425 if (FLAG_eliminate_prototype_chain_checks) {
426 Handle<Cell> validity_cell =
427 Map::GetOrCreatePrototypeChainValidityCell(receiver_map, isolate());
428 if (!validity_cell.is_null()) {
429 DCHECK_EQ(Smi::FromInt(Map::kPrototypeChainValid),
430 validity_cell->value());
431 __ li(scratch1, Operand(validity_cell));
432 __ lw(scratch1, FieldMemOperand(scratch1, Cell::kValueOffset));
433 __ Branch(miss, ne, scratch1,
434 Operand(Smi::FromInt(Map::kPrototypeChainValid)));
435 }
436
437 // The prototype chain of primitives (and their JSValue wrappers) depends
438 // on the native context, which can't be guarded by validity cells.
439 // |object_reg| holds the native context specific prototype in this case;
440 // we need to check its map.
441 if (check == CHECK_ALL_MAPS) {
442 __ lw(scratch1, FieldMemOperand(object_reg, HeapObject::kMapOffset));
443 Handle<WeakCell> cell = Map::WeakCellForMap(receiver_map);
444 __ GetWeakValue(scratch2, cell);
445 __ Branch(miss, ne, scratch1, Operand(scratch2));
446 }
447 }
448
449 // Keep track of the current object in register reg.
450 Register reg = object_reg;
451 int depth = 0;
452
453 Handle<JSObject> current = Handle<JSObject>::null();
454 if (receiver_map->IsJSGlobalObjectMap()) {
455 current = isolate()->global_object();
456 }
457
458 // Check access rights to the global object. This has to happen after
459 // the map check so that we know that the object is actually a global
460 // object.
461 // This allows us to install generated handlers for accesses to the
462 // global proxy (as opposed to using slow ICs). See corresponding code
463 // in LookupForRead().
464 if (receiver_map->IsJSGlobalProxyMap()) {
465 __ CheckAccessGlobalProxy(reg, scratch2, miss);
466 }
467
468 Handle<JSObject> prototype = Handle<JSObject>::null();
469 Handle<Map> current_map = receiver_map;
470 Handle<Map> holder_map(holder()->map());
471 // Traverse the prototype chain and check the maps in the prototype chain for
472 // fast and global objects or do negative lookup for normal objects.
473 while (!current_map.is_identical_to(holder_map)) {
474 ++depth;
475
476 // Only global objects and objects that do not require access
477 // checks are allowed in stubs.
478 DCHECK(current_map->IsJSGlobalProxyMap() ||
479 !current_map->is_access_check_needed());
480
481 prototype = handle(JSObject::cast(current_map->prototype()));
482 if (current_map->is_dictionary_map() &&
483 !current_map->IsJSGlobalObjectMap()) {
484 DCHECK(!current_map->IsJSGlobalProxyMap()); // Proxy maps are fast.
485 if (!name->IsUniqueName()) {
486 DCHECK(name->IsString());
487 name = factory()->InternalizeString(Handle<String>::cast(name));
488 }
489 DCHECK(current.is_null() ||
490 current->property_dictionary()->FindEntry(name) ==
491 NameDictionary::kNotFound);
492
493 if (FLAG_eliminate_prototype_chain_checks && depth > 1) {
494 // TODO(jkummerow): Cache and re-use weak cell.
495 __ LoadWeakValue(reg, isolate()->factory()->NewWeakCell(current), miss);
496 }
497 GenerateDictionaryNegativeLookup(masm(), miss, reg, name, scratch1,
498 scratch2);
499 if (!FLAG_eliminate_prototype_chain_checks) {
500 __ lw(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
501 __ lw(holder_reg, FieldMemOperand(scratch1, Map::kPrototypeOffset));
502 }
503 } else {
504 Register map_reg = scratch1;
505 if (!FLAG_eliminate_prototype_chain_checks) {
506 __ lw(map_reg, FieldMemOperand(reg, HeapObject::kMapOffset));
507 }
508 if (current_map->IsJSGlobalObjectMap()) {
509 GenerateCheckPropertyCell(masm(), Handle<JSGlobalObject>::cast(current),
510 name, scratch2, miss);
511 } else if (!FLAG_eliminate_prototype_chain_checks &&
512 (depth != 1 || check == CHECK_ALL_MAPS)) {
513 Handle<WeakCell> cell = Map::WeakCellForMap(current_map);
514 __ GetWeakValue(scratch2, cell);
515 __ Branch(miss, ne, scratch2, Operand(map_reg));
516 }
517 if (!FLAG_eliminate_prototype_chain_checks) {
518 __ lw(holder_reg, FieldMemOperand(map_reg, Map::kPrototypeOffset));
519 }
520 }
521
522 reg = holder_reg; // From now on the object will be in holder_reg.
523 // Go to the next object in the prototype chain.
524 current = prototype;
525 current_map = handle(current->map());
526 }
527
528 DCHECK(!current_map->IsJSGlobalProxyMap());
529
530 // Log the check depth.
531 LOG(isolate(), IntEvent("check-maps-depth", depth + 1));
532
533 if (!FLAG_eliminate_prototype_chain_checks &&
534 (depth != 0 || check == CHECK_ALL_MAPS)) {
535 // Check the holder map.
536 __ lw(scratch1, FieldMemOperand(reg, HeapObject::kMapOffset));
537 Handle<WeakCell> cell = Map::WeakCellForMap(current_map);
538 __ GetWeakValue(scratch2, cell);
539 __ Branch(miss, ne, scratch2, Operand(scratch1));
540 }
541
542 bool return_holder = return_what == RETURN_HOLDER;
543 if (FLAG_eliminate_prototype_chain_checks && return_holder && depth != 0) {
544 __ LoadWeakValue(reg, isolate()->factory()->NewWeakCell(current), miss);
545 }
546
547 // Return the register containing the holder.
548 return return_holder ? reg : no_reg;
549 }
550
551
FrontendFooter(Handle<Name> name,Label * miss)552 void NamedLoadHandlerCompiler::FrontendFooter(Handle<Name> name, Label* miss) {
553 if (!miss->is_unused()) {
554 Label success;
555 __ Branch(&success);
556 __ bind(miss);
557 if (IC::ICUseVector(kind())) {
558 DCHECK(kind() == Code::LOAD_IC);
559 PopVectorAndSlot();
560 }
561 TailCallBuiltin(masm(), MissBuiltin(kind()));
562 __ bind(&success);
563 }
564 }
565
566
FrontendFooter(Handle<Name> name,Label * miss)567 void NamedStoreHandlerCompiler::FrontendFooter(Handle<Name> name, Label* miss) {
568 if (!miss->is_unused()) {
569 Label success;
570 __ Branch(&success);
571 GenerateRestoreName(miss, name);
572 if (IC::ICUseVector(kind())) PopVectorAndSlot();
573 TailCallBuiltin(masm(), MissBuiltin(kind()));
574 __ bind(&success);
575 }
576 }
577
578
GenerateLoadConstant(Handle<Object> value)579 void NamedLoadHandlerCompiler::GenerateLoadConstant(Handle<Object> value) {
580 // Return the constant value.
581 __ li(v0, value);
582 __ Ret();
583 }
584
585
GenerateLoadCallback(Register reg,Handle<ExecutableAccessorInfo> callback)586 void NamedLoadHandlerCompiler::GenerateLoadCallback(
587 Register reg, Handle<ExecutableAccessorInfo> callback) {
588 // Build AccessorInfo::args_ list on the stack and push property name below
589 // the exit frame to make GC aware of them and store pointers to them.
590 STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 0);
591 STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 1);
592 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 2);
593 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 3);
594 STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 4);
595 STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 5);
596 STATIC_ASSERT(PropertyCallbackArguments::kArgsLength == 6);
597 DCHECK(!scratch2().is(reg));
598 DCHECK(!scratch3().is(reg));
599 DCHECK(!scratch4().is(reg));
600 __ push(receiver());
601 Handle<Object> data(callback->data(), isolate());
602 if (data->IsUndefined() || data->IsSmi()) {
603 __ li(scratch3(), data);
604 } else {
605 Handle<WeakCell> cell =
606 isolate()->factory()->NewWeakCell(Handle<HeapObject>::cast(data));
607 // The callback is alive if this instruction is executed,
608 // so the weak cell is not cleared and points to data.
609 __ GetWeakValue(scratch3(), cell);
610 }
611 __ Subu(sp, sp, 6 * kPointerSize);
612 __ sw(scratch3(), MemOperand(sp, 5 * kPointerSize));
613 __ LoadRoot(scratch3(), Heap::kUndefinedValueRootIndex);
614 __ sw(scratch3(), MemOperand(sp, 4 * kPointerSize));
615 __ sw(scratch3(), MemOperand(sp, 3 * kPointerSize));
616 __ li(scratch4(), Operand(ExternalReference::isolate_address(isolate())));
617 __ sw(scratch4(), MemOperand(sp, 2 * kPointerSize));
618 __ sw(reg, MemOperand(sp, 1 * kPointerSize));
619 __ sw(name(), MemOperand(sp, 0 * kPointerSize));
620 __ Addu(scratch2(), sp, 1 * kPointerSize);
621
622 __ mov(a2, scratch2()); // Saved in case scratch2 == a1.
623 // Abi for CallApiGetter.
624 Register getter_address_reg = ApiGetterDescriptor::function_address();
625
626 Address getter_address = v8::ToCData<Address>(callback->getter());
627 ApiFunction fun(getter_address);
628 ExternalReference::Type type = ExternalReference::DIRECT_GETTER_CALL;
629 ExternalReference ref = ExternalReference(&fun, type, isolate());
630 __ li(getter_address_reg, Operand(ref));
631
632 CallApiGetterStub stub(isolate());
633 __ TailCallStub(&stub);
634 }
635
636
GenerateLoadInterceptorWithFollowup(LookupIterator * it,Register holder_reg)637 void NamedLoadHandlerCompiler::GenerateLoadInterceptorWithFollowup(
638 LookupIterator* it, Register holder_reg) {
639 DCHECK(holder()->HasNamedInterceptor());
640 DCHECK(!holder()->GetNamedInterceptor()->getter()->IsUndefined());
641
642 // Compile the interceptor call, followed by inline code to load the
643 // property from further up the prototype chain if the call fails.
644 // Check that the maps haven't changed.
645 DCHECK(holder_reg.is(receiver()) || holder_reg.is(scratch1()));
646
647 // Preserve the receiver register explicitly whenever it is different from the
648 // holder and it is needed should the interceptor return without any result.
649 // The ACCESSOR case needs the receiver to be passed into C++ code, the FIELD
650 // case might cause a miss during the prototype check.
651 bool must_perform_prototype_check =
652 !holder().is_identical_to(it->GetHolder<JSObject>());
653 bool must_preserve_receiver_reg =
654 !receiver().is(holder_reg) &&
655 (it->state() == LookupIterator::ACCESSOR || must_perform_prototype_check);
656
657 // Save necessary data before invoking an interceptor.
658 // Requires a frame to make GC aware of pushed pointers.
659 {
660 FrameScope frame_scope(masm(), StackFrame::INTERNAL);
661 if (must_preserve_receiver_reg) {
662 __ Push(receiver(), holder_reg, this->name());
663 } else {
664 __ Push(holder_reg, this->name());
665 }
666 InterceptorVectorSlotPush(holder_reg);
667 // Invoke an interceptor. Note: map checks from receiver to
668 // interceptor's holder has been compiled before (see a caller
669 // of this method).
670 CompileCallLoadPropertyWithInterceptor(
671 masm(), receiver(), holder_reg, this->name(), holder(),
672 Runtime::kLoadPropertyWithInterceptorOnly);
673
674 // Check if interceptor provided a value for property. If it's
675 // the case, return immediately.
676 Label interceptor_failed;
677 __ LoadRoot(scratch1(), Heap::kNoInterceptorResultSentinelRootIndex);
678 __ Branch(&interceptor_failed, eq, v0, Operand(scratch1()));
679 frame_scope.GenerateLeaveFrame();
680 __ Ret();
681
682 __ bind(&interceptor_failed);
683 InterceptorVectorSlotPop(holder_reg);
684 if (must_preserve_receiver_reg) {
685 __ Pop(receiver(), holder_reg, this->name());
686 } else {
687 __ Pop(holder_reg, this->name());
688 }
689 // Leave the internal frame.
690 }
691
692 GenerateLoadPostInterceptor(it, holder_reg);
693 }
694
695
GenerateLoadInterceptor(Register holder_reg)696 void NamedLoadHandlerCompiler::GenerateLoadInterceptor(Register holder_reg) {
697 // Call the runtime system to load the interceptor.
698 DCHECK(holder()->HasNamedInterceptor());
699 DCHECK(!holder()->GetNamedInterceptor()->getter()->IsUndefined());
700 PushInterceptorArguments(masm(), receiver(), holder_reg, this->name(),
701 holder());
702
703 __ TailCallRuntime(Runtime::kLoadPropertyWithInterceptor);
704 }
705
706
CompileStoreCallback(Handle<JSObject> object,Handle<Name> name,Handle<ExecutableAccessorInfo> callback)707 Handle<Code> NamedStoreHandlerCompiler::CompileStoreCallback(
708 Handle<JSObject> object, Handle<Name> name,
709 Handle<ExecutableAccessorInfo> callback) {
710 Register holder_reg = Frontend(name);
711
712 __ Push(receiver(), holder_reg); // Receiver.
713 // If the callback cannot leak, then push the callback directly,
714 // otherwise wrap it in a weak cell.
715 if (callback->data()->IsUndefined() || callback->data()->IsSmi()) {
716 __ li(at, Operand(callback));
717 } else {
718 Handle<WeakCell> cell = isolate()->factory()->NewWeakCell(callback);
719 __ li(at, Operand(cell));
720 }
721 __ push(at);
722 __ li(at, Operand(name));
723 __ Push(at, value());
724
725 // Do tail-call to the runtime system.
726 __ TailCallRuntime(Runtime::kStoreCallbackProperty);
727
728 // Return the generated code.
729 return GetCode(kind(), Code::FAST, name);
730 }
731
732
CompileStoreInterceptor(Handle<Name> name)733 Handle<Code> NamedStoreHandlerCompiler::CompileStoreInterceptor(
734 Handle<Name> name) {
735 __ Push(receiver(), this->name(), value());
736
737 // Do tail-call to the runtime system.
738 __ TailCallRuntime(Runtime::kStorePropertyWithInterceptor);
739
740 // Return the generated code.
741 return GetCode(kind(), Code::FAST, name);
742 }
743
744
value()745 Register NamedStoreHandlerCompiler::value() {
746 return StoreDescriptor::ValueRegister();
747 }
748
749
CompileLoadGlobal(Handle<PropertyCell> cell,Handle<Name> name,bool is_configurable)750 Handle<Code> NamedLoadHandlerCompiler::CompileLoadGlobal(
751 Handle<PropertyCell> cell, Handle<Name> name, bool is_configurable) {
752 Label miss;
753 if (IC::ICUseVector(kind())) {
754 PushVectorAndSlot();
755 }
756
757 FrontendHeader(receiver(), name, &miss, DONT_RETURN_ANYTHING);
758
759 // Get the value from the cell.
760 Register result = StoreDescriptor::ValueRegister();
761 Handle<WeakCell> weak_cell = factory()->NewWeakCell(cell);
762 __ LoadWeakValue(result, weak_cell, &miss);
763 __ lw(result, FieldMemOperand(result, PropertyCell::kValueOffset));
764
765 // Check for deleted property if property can actually be deleted.
766 if (is_configurable) {
767 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
768 __ Branch(&miss, eq, result, Operand(at));
769 }
770
771 Counters* counters = isolate()->counters();
772 __ IncrementCounter(counters->named_load_global_stub(), 1, a1, a3);
773 if (IC::ICUseVector(kind())) {
774 DiscardVectorAndSlot();
775 }
776 __ Ret(USE_DELAY_SLOT);
777 __ mov(v0, result);
778
779 FrontendFooter(name, &miss);
780
781 // Return the generated code.
782 return GetCode(kind(), Code::NORMAL, name);
783 }
784
785
786 #undef __
787 } // namespace internal
788 } // namespace v8
789
790 #endif // V8_TARGET_ARCH_MIPS
791