1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5
6
7 #include "src/v8.h"
8
9 #if V8_TARGET_ARCH_MIPS
10
11 #include "src/codegen.h"
12 #include "src/debug.h"
13 #include "src/deoptimizer.h"
14 #include "src/full-codegen.h"
15 #include "src/runtime.h"
16
17
18 namespace v8 {
19 namespace internal {
20
21
22 #define __ ACCESS_MASM(masm)
23
24
Generate_Adaptor(MacroAssembler * masm,CFunctionId id,BuiltinExtraArguments extra_args)25 void Builtins::Generate_Adaptor(MacroAssembler* masm,
26 CFunctionId id,
27 BuiltinExtraArguments extra_args) {
28 // ----------- S t a t e -------------
29 // -- a0 : number of arguments excluding receiver
30 // -- a1 : called function (only guaranteed when
31 // -- extra_args requires it)
32 // -- cp : context
33 // -- sp[0] : last argument
34 // -- ...
35 // -- sp[4 * (argc - 1)] : first argument
36 // -- sp[4 * agrc] : receiver
37 // -----------------------------------
38
39 // Insert extra arguments.
40 int num_extra_args = 0;
41 if (extra_args == NEEDS_CALLED_FUNCTION) {
42 num_extra_args = 1;
43 __ push(a1);
44 } else {
45 DCHECK(extra_args == NO_EXTRA_ARGUMENTS);
46 }
47
48 // JumpToExternalReference expects s0 to contain the number of arguments
49 // including the receiver and the extra arguments.
50 __ Addu(s0, a0, num_extra_args + 1);
51 __ sll(s1, s0, kPointerSizeLog2);
52 __ Subu(s1, s1, kPointerSize);
53 __ JumpToExternalReference(ExternalReference(id, masm->isolate()));
54 }
55
56
57 // Load the built-in InternalArray function from the current context.
GenerateLoadInternalArrayFunction(MacroAssembler * masm,Register result)58 static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
59 Register result) {
60 // Load the native context.
61
62 __ lw(result,
63 MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
64 __ lw(result,
65 FieldMemOperand(result, GlobalObject::kNativeContextOffset));
66 // Load the InternalArray function from the native context.
67 __ lw(result,
68 MemOperand(result,
69 Context::SlotOffset(
70 Context::INTERNAL_ARRAY_FUNCTION_INDEX)));
71 }
72
73
74 // Load the built-in Array function from the current context.
GenerateLoadArrayFunction(MacroAssembler * masm,Register result)75 static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
76 // Load the native context.
77
78 __ lw(result,
79 MemOperand(cp, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
80 __ lw(result,
81 FieldMemOperand(result, GlobalObject::kNativeContextOffset));
82 // Load the Array function from the native context.
83 __ lw(result,
84 MemOperand(result,
85 Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX)));
86 }
87
88
Generate_InternalArrayCode(MacroAssembler * masm)89 void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
90 // ----------- S t a t e -------------
91 // -- a0 : number of arguments
92 // -- ra : return address
93 // -- sp[...]: constructor arguments
94 // -----------------------------------
95 Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
96
97 // Get the InternalArray function.
98 GenerateLoadInternalArrayFunction(masm, a1);
99
100 if (FLAG_debug_code) {
101 // Initial map for the builtin InternalArray functions should be maps.
102 __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
103 __ SmiTst(a2, t0);
104 __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction,
105 t0, Operand(zero_reg));
106 __ GetObjectType(a2, a3, t0);
107 __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction,
108 t0, Operand(MAP_TYPE));
109 }
110
111 // Run the native code for the InternalArray function called as a normal
112 // function.
113 // Tail call a stub.
114 InternalArrayConstructorStub stub(masm->isolate());
115 __ TailCallStub(&stub);
116 }
117
118
Generate_ArrayCode(MacroAssembler * masm)119 void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
120 // ----------- S t a t e -------------
121 // -- a0 : number of arguments
122 // -- ra : return address
123 // -- sp[...]: constructor arguments
124 // -----------------------------------
125 Label generic_array_code;
126
127 // Get the Array function.
128 GenerateLoadArrayFunction(masm, a1);
129
130 if (FLAG_debug_code) {
131 // Initial map for the builtin Array functions should be maps.
132 __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
133 __ SmiTst(a2, t0);
134 __ Assert(ne, kUnexpectedInitialMapForArrayFunction1,
135 t0, Operand(zero_reg));
136 __ GetObjectType(a2, a3, t0);
137 __ Assert(eq, kUnexpectedInitialMapForArrayFunction2,
138 t0, Operand(MAP_TYPE));
139 }
140
141 // Run the native code for the Array function called as a normal function.
142 // Tail call a stub.
143 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
144 ArrayConstructorStub stub(masm->isolate());
145 __ TailCallStub(&stub);
146 }
147
148
Generate_StringConstructCode(MacroAssembler * masm)149 void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
150 // ----------- S t a t e -------------
151 // -- a0 : number of arguments
152 // -- a1 : constructor function
153 // -- ra : return address
154 // -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
155 // -- sp[argc * 4] : receiver
156 // -----------------------------------
157 Counters* counters = masm->isolate()->counters();
158 __ IncrementCounter(counters->string_ctor_calls(), 1, a2, a3);
159
160 Register function = a1;
161 if (FLAG_debug_code) {
162 __ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, a2);
163 __ Assert(eq, kUnexpectedStringFunction, function, Operand(a2));
164 }
165
166 // Load the first arguments in a0 and get rid of the rest.
167 Label no_arguments;
168 __ Branch(&no_arguments, eq, a0, Operand(zero_reg));
169 // First args = sp[(argc - 1) * 4].
170 __ Subu(a0, a0, Operand(1));
171 __ sll(a0, a0, kPointerSizeLog2);
172 __ Addu(sp, a0, sp);
173 __ lw(a0, MemOperand(sp));
174 // sp now point to args[0], drop args[0] + receiver.
175 __ Drop(2);
176
177 Register argument = a2;
178 Label not_cached, argument_is_string;
179 __ LookupNumberStringCache(a0, // Input.
180 argument, // Result.
181 a3, // Scratch.
182 t0, // Scratch.
183 t1, // Scratch.
184 ¬_cached);
185 __ IncrementCounter(counters->string_ctor_cached_number(), 1, a3, t0);
186 __ bind(&argument_is_string);
187
188 // ----------- S t a t e -------------
189 // -- a2 : argument converted to string
190 // -- a1 : constructor function
191 // -- ra : return address
192 // -----------------------------------
193
194 Label gc_required;
195 __ Allocate(JSValue::kSize,
196 v0, // Result.
197 a3, // Scratch.
198 t0, // Scratch.
199 &gc_required,
200 TAG_OBJECT);
201
202 // Initialising the String Object.
203 Register map = a3;
204 __ LoadGlobalFunctionInitialMap(function, map, t0);
205 if (FLAG_debug_code) {
206 __ lbu(t0, FieldMemOperand(map, Map::kInstanceSizeOffset));
207 __ Assert(eq, kUnexpectedStringWrapperInstanceSize,
208 t0, Operand(JSValue::kSize >> kPointerSizeLog2));
209 __ lbu(t0, FieldMemOperand(map, Map::kUnusedPropertyFieldsOffset));
210 __ Assert(eq, kUnexpectedUnusedPropertiesOfStringWrapper,
211 t0, Operand(zero_reg));
212 }
213 __ sw(map, FieldMemOperand(v0, HeapObject::kMapOffset));
214
215 __ LoadRoot(a3, Heap::kEmptyFixedArrayRootIndex);
216 __ sw(a3, FieldMemOperand(v0, JSObject::kPropertiesOffset));
217 __ sw(a3, FieldMemOperand(v0, JSObject::kElementsOffset));
218
219 __ sw(argument, FieldMemOperand(v0, JSValue::kValueOffset));
220
221 // Ensure the object is fully initialized.
222 STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize);
223
224 __ Ret();
225
226 // The argument was not found in the number to string cache. Check
227 // if it's a string already before calling the conversion builtin.
228 Label convert_argument;
229 __ bind(¬_cached);
230 __ JumpIfSmi(a0, &convert_argument);
231
232 // Is it a String?
233 __ lw(a2, FieldMemOperand(a0, HeapObject::kMapOffset));
234 __ lbu(a3, FieldMemOperand(a2, Map::kInstanceTypeOffset));
235 STATIC_ASSERT(kNotStringTag != 0);
236 __ And(t0, a3, Operand(kIsNotStringMask));
237 __ Branch(&convert_argument, ne, t0, Operand(zero_reg));
238 __ mov(argument, a0);
239 __ IncrementCounter(counters->string_ctor_conversions(), 1, a3, t0);
240 __ Branch(&argument_is_string);
241
242 // Invoke the conversion builtin and put the result into a2.
243 __ bind(&convert_argument);
244 __ push(function); // Preserve the function.
245 __ IncrementCounter(counters->string_ctor_conversions(), 1, a3, t0);
246 {
247 FrameScope scope(masm, StackFrame::INTERNAL);
248 __ push(a0);
249 __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION);
250 }
251 __ pop(function);
252 __ mov(argument, v0);
253 __ Branch(&argument_is_string);
254
255 // Load the empty string into a2, remove the receiver from the
256 // stack, and jump back to the case where the argument is a string.
257 __ bind(&no_arguments);
258 __ LoadRoot(argument, Heap::kempty_stringRootIndex);
259 __ Drop(1);
260 __ Branch(&argument_is_string);
261
262 // At this point the argument is already a string. Call runtime to
263 // create a string wrapper.
264 __ bind(&gc_required);
265 __ IncrementCounter(counters->string_ctor_gc_required(), 1, a3, t0);
266 {
267 FrameScope scope(masm, StackFrame::INTERNAL);
268 __ push(argument);
269 __ CallRuntime(Runtime::kNewStringWrapper, 1);
270 }
271 __ Ret();
272 }
273
274
CallRuntimePassFunction(MacroAssembler * masm,Runtime::FunctionId function_id)275 static void CallRuntimePassFunction(
276 MacroAssembler* masm, Runtime::FunctionId function_id) {
277 FrameScope scope(masm, StackFrame::INTERNAL);
278 // Push a copy of the function onto the stack.
279 // Push call kind information and function as parameter to the runtime call.
280 __ Push(a1, a1);
281
282 __ CallRuntime(function_id, 1);
283 // Restore call kind information and receiver.
284 __ Pop(a1);
285 }
286
287
GenerateTailCallToSharedCode(MacroAssembler * masm)288 static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
289 __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
290 __ lw(a2, FieldMemOperand(a2, SharedFunctionInfo::kCodeOffset));
291 __ Addu(at, a2, Operand(Code::kHeaderSize - kHeapObjectTag));
292 __ Jump(at);
293 }
294
295
GenerateTailCallToReturnedCode(MacroAssembler * masm)296 static void GenerateTailCallToReturnedCode(MacroAssembler* masm) {
297 __ Addu(at, v0, Operand(Code::kHeaderSize - kHeapObjectTag));
298 __ Jump(at);
299 }
300
301
Generate_InOptimizationQueue(MacroAssembler * masm)302 void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
303 // Checking whether the queued function is ready for install is optional,
304 // since we come across interrupts and stack checks elsewhere. However,
305 // not checking may delay installing ready functions, and always checking
306 // would be quite expensive. A good compromise is to first check against
307 // stack limit as a cue for an interrupt signal.
308 Label ok;
309 __ LoadRoot(t0, Heap::kStackLimitRootIndex);
310 __ Branch(&ok, hs, sp, Operand(t0));
311
312 CallRuntimePassFunction(masm, Runtime::kTryInstallOptimizedCode);
313 GenerateTailCallToReturnedCode(masm);
314
315 __ bind(&ok);
316 GenerateTailCallToSharedCode(masm);
317 }
318
319
Generate_JSConstructStubHelper(MacroAssembler * masm,bool is_api_function,bool create_memento)320 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
321 bool is_api_function,
322 bool create_memento) {
323 // ----------- S t a t e -------------
324 // -- a0 : number of arguments
325 // -- a1 : constructor function
326 // -- a2 : allocation site or undefined
327 // -- ra : return address
328 // -- sp[...]: constructor arguments
329 // -----------------------------------
330
331 // Should never create mementos for api functions.
332 DCHECK(!is_api_function || !create_memento);
333
334 Isolate* isolate = masm->isolate();
335
336 // ----------- S t a t e -------------
337 // -- a0 : number of arguments
338 // -- a1 : constructor function
339 // -- ra : return address
340 // -- sp[...]: constructor arguments
341 // -----------------------------------
342
343 // Enter a construct frame.
344 {
345 FrameScope scope(masm, StackFrame::CONSTRUCT);
346
347 if (create_memento) {
348 __ AssertUndefinedOrAllocationSite(a2, a3);
349 __ push(a2);
350 }
351
352 // Preserve the two incoming parameters on the stack.
353 __ sll(a0, a0, kSmiTagSize); // Tag arguments count.
354 __ MultiPushReversed(a0.bit() | a1.bit());
355
356 Label rt_call, allocated;
357 // Try to allocate the object without transitioning into C code. If any of
358 // the preconditions is not met, the code bails out to the runtime call.
359 if (FLAG_inline_new) {
360 Label undo_allocation;
361 ExternalReference debug_step_in_fp =
362 ExternalReference::debug_step_in_fp_address(isolate);
363 __ li(a2, Operand(debug_step_in_fp));
364 __ lw(a2, MemOperand(a2));
365 __ Branch(&rt_call, ne, a2, Operand(zero_reg));
366
367 // Load the initial map and verify that it is in fact a map.
368 // a1: constructor function
369 __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
370 __ JumpIfSmi(a2, &rt_call);
371 __ GetObjectType(a2, a3, t4);
372 __ Branch(&rt_call, ne, t4, Operand(MAP_TYPE));
373
374 // Check that the constructor is not constructing a JSFunction (see
375 // comments in Runtime_NewObject in runtime.cc). In which case the
376 // initial map's instance type would be JS_FUNCTION_TYPE.
377 // a1: constructor function
378 // a2: initial map
379 __ lbu(a3, FieldMemOperand(a2, Map::kInstanceTypeOffset));
380 __ Branch(&rt_call, eq, a3, Operand(JS_FUNCTION_TYPE));
381
382 if (!is_api_function) {
383 Label allocate;
384 MemOperand bit_field3 = FieldMemOperand(a2, Map::kBitField3Offset);
385 // Check if slack tracking is enabled.
386 __ lw(t0, bit_field3);
387 __ DecodeField<Map::ConstructionCount>(t2, t0);
388 __ Branch(&allocate, eq, t2, Operand(JSFunction::kNoSlackTracking));
389 // Decrease generous allocation count.
390 __ Subu(t0, t0, Operand(1 << Map::ConstructionCount::kShift));
391 __ Branch(USE_DELAY_SLOT,
392 &allocate, ne, t2, Operand(JSFunction::kFinishSlackTracking));
393 __ sw(t0, bit_field3); // In delay slot.
394
395 __ Push(a1, a2, a1); // a1 = Constructor.
396 __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
397
398 __ Pop(a1, a2);
399 // Slack tracking counter is kNoSlackTracking after runtime call.
400 DCHECK(JSFunction::kNoSlackTracking == 0);
401 __ mov(t2, zero_reg);
402
403 __ bind(&allocate);
404 }
405
406 // Now allocate the JSObject on the heap.
407 // a1: constructor function
408 // a2: initial map
409 __ lbu(a3, FieldMemOperand(a2, Map::kInstanceSizeOffset));
410 if (create_memento) {
411 __ Addu(a3, a3, Operand(AllocationMemento::kSize / kPointerSize));
412 }
413
414 __ Allocate(a3, t4, t5, t6, &rt_call, SIZE_IN_WORDS);
415
416 // Allocated the JSObject, now initialize the fields. Map is set to
417 // initial map and properties and elements are set to empty fixed array.
418 // a1: constructor function
419 // a2: initial map
420 // a3: object size (not including memento if create_memento)
421 // t4: JSObject (not tagged)
422 __ LoadRoot(t6, Heap::kEmptyFixedArrayRootIndex);
423 __ mov(t5, t4);
424 __ sw(a2, MemOperand(t5, JSObject::kMapOffset));
425 __ sw(t6, MemOperand(t5, JSObject::kPropertiesOffset));
426 __ sw(t6, MemOperand(t5, JSObject::kElementsOffset));
427 __ Addu(t5, t5, Operand(3*kPointerSize));
428 DCHECK_EQ(0 * kPointerSize, JSObject::kMapOffset);
429 DCHECK_EQ(1 * kPointerSize, JSObject::kPropertiesOffset);
430 DCHECK_EQ(2 * kPointerSize, JSObject::kElementsOffset);
431
432 // Fill all the in-object properties with appropriate filler.
433 // a1: constructor function
434 // a2: initial map
435 // a3: object size (in words, including memento if create_memento)
436 // t4: JSObject (not tagged)
437 // t5: First in-object property of JSObject (not tagged)
438 // t2: slack tracking counter (non-API function case)
439 DCHECK_EQ(3 * kPointerSize, JSObject::kHeaderSize);
440
441 // Use t7 to hold undefined, which is used in several places below.
442 __ LoadRoot(t7, Heap::kUndefinedValueRootIndex);
443
444 if (!is_api_function) {
445 Label no_inobject_slack_tracking;
446
447 // Check if slack tracking is enabled.
448 __ Branch(&no_inobject_slack_tracking,
449 eq, t2, Operand(JSFunction::kNoSlackTracking));
450
451 // Allocate object with a slack.
452 __ lbu(a0, FieldMemOperand(a2, Map::kPreAllocatedPropertyFieldsOffset));
453 __ sll(at, a0, kPointerSizeLog2);
454 __ addu(a0, t5, at);
455 // a0: offset of first field after pre-allocated fields
456 if (FLAG_debug_code) {
457 __ sll(at, a3, kPointerSizeLog2);
458 __ Addu(t6, t4, Operand(at)); // End of object.
459 __ Assert(le, kUnexpectedNumberOfPreAllocatedPropertyFields,
460 a0, Operand(t6));
461 }
462 __ InitializeFieldsWithFiller(t5, a0, t7);
463 // To allow for truncation.
464 __ LoadRoot(t7, Heap::kOnePointerFillerMapRootIndex);
465 // Fill the remaining fields with one pointer filler map.
466
467 __ bind(&no_inobject_slack_tracking);
468 }
469
470 if (create_memento) {
471 __ Subu(a0, a3, Operand(AllocationMemento::kSize / kPointerSize));
472 __ sll(a0, a0, kPointerSizeLog2);
473 __ Addu(a0, t4, Operand(a0)); // End of object.
474 __ InitializeFieldsWithFiller(t5, a0, t7);
475
476 // Fill in memento fields.
477 // t5: points to the allocated but uninitialized memento.
478 __ LoadRoot(t7, Heap::kAllocationMementoMapRootIndex);
479 DCHECK_EQ(0 * kPointerSize, AllocationMemento::kMapOffset);
480 __ sw(t7, MemOperand(t5));
481 __ Addu(t5, t5, kPointerSize);
482 // Load the AllocationSite.
483 __ lw(t7, MemOperand(sp, 2 * kPointerSize));
484 DCHECK_EQ(1 * kPointerSize, AllocationMemento::kAllocationSiteOffset);
485 __ sw(t7, MemOperand(t5));
486 __ Addu(t5, t5, kPointerSize);
487 } else {
488 __ sll(at, a3, kPointerSizeLog2);
489 __ Addu(a0, t4, Operand(at)); // End of object.
490 __ InitializeFieldsWithFiller(t5, a0, t7);
491 }
492
493 // Add the object tag to make the JSObject real, so that we can continue
494 // and jump into the continuation code at any time from now on. Any
495 // failures need to undo the allocation, so that the heap is in a
496 // consistent state and verifiable.
497 __ Addu(t4, t4, Operand(kHeapObjectTag));
498
499 // Check if a non-empty properties array is needed. Continue with
500 // allocated object if not fall through to runtime call if it is.
501 // a1: constructor function
502 // t4: JSObject
503 // t5: start of next object (not tagged)
504 __ lbu(a3, FieldMemOperand(a2, Map::kUnusedPropertyFieldsOffset));
505 // The field instance sizes contains both pre-allocated property fields
506 // and in-object properties.
507 __ lbu(t6, FieldMemOperand(a2, Map::kPreAllocatedPropertyFieldsOffset));
508 __ Addu(a3, a3, Operand(t6));
509 __ lbu(t6, FieldMemOperand(a2, Map::kInObjectPropertiesOffset));
510 __ subu(a3, a3, t6);
511
512 // Done if no extra properties are to be allocated.
513 __ Branch(&allocated, eq, a3, Operand(zero_reg));
514 __ Assert(greater_equal, kPropertyAllocationCountFailed,
515 a3, Operand(zero_reg));
516
517 // Scale the number of elements by pointer size and add the header for
518 // FixedArrays to the start of the next object calculation from above.
519 // a1: constructor
520 // a3: number of elements in properties array
521 // t4: JSObject
522 // t5: start of next object
523 __ Addu(a0, a3, Operand(FixedArray::kHeaderSize / kPointerSize));
524 __ Allocate(
525 a0,
526 t5,
527 t6,
528 a2,
529 &undo_allocation,
530 static_cast<AllocationFlags>(RESULT_CONTAINS_TOP | SIZE_IN_WORDS));
531
532 // Initialize the FixedArray.
533 // a1: constructor
534 // a3: number of elements in properties array (untagged)
535 // t4: JSObject
536 // t5: start of next object
537 __ LoadRoot(t6, Heap::kFixedArrayMapRootIndex);
538 __ mov(a2, t5);
539 __ sw(t6, MemOperand(a2, JSObject::kMapOffset));
540 __ sll(a0, a3, kSmiTagSize);
541 __ sw(a0, MemOperand(a2, FixedArray::kLengthOffset));
542 __ Addu(a2, a2, Operand(2 * kPointerSize));
543
544 DCHECK_EQ(0 * kPointerSize, JSObject::kMapOffset);
545 DCHECK_EQ(1 * kPointerSize, FixedArray::kLengthOffset);
546
547 // Initialize the fields to undefined.
548 // a1: constructor
549 // a2: First element of FixedArray (not tagged)
550 // a3: number of elements in properties array
551 // t4: JSObject
552 // t5: FixedArray (not tagged)
553 __ sll(t3, a3, kPointerSizeLog2);
554 __ addu(t6, a2, t3); // End of object.
555 DCHECK_EQ(2 * kPointerSize, FixedArray::kHeaderSize);
556 { Label loop, entry;
557 if (!is_api_function || create_memento) {
558 __ LoadRoot(t7, Heap::kUndefinedValueRootIndex);
559 } else if (FLAG_debug_code) {
560 __ LoadRoot(t2, Heap::kUndefinedValueRootIndex);
561 __ Assert(eq, kUndefinedValueNotLoaded, t7, Operand(t2));
562 }
563 __ jmp(&entry);
564 __ bind(&loop);
565 __ sw(t7, MemOperand(a2));
566 __ addiu(a2, a2, kPointerSize);
567 __ bind(&entry);
568 __ Branch(&loop, less, a2, Operand(t6));
569 }
570
571 // Store the initialized FixedArray into the properties field of
572 // the JSObject.
573 // a1: constructor function
574 // t4: JSObject
575 // t5: FixedArray (not tagged)
576 __ Addu(t5, t5, Operand(kHeapObjectTag)); // Add the heap tag.
577 __ sw(t5, FieldMemOperand(t4, JSObject::kPropertiesOffset));
578
579 // Continue with JSObject being successfully allocated.
580 // a1: constructor function
581 // a4: JSObject
582 __ jmp(&allocated);
583
584 // Undo the setting of the new top so that the heap is verifiable. For
585 // example, the map's unused properties potentially do not match the
586 // allocated objects unused properties.
587 // t4: JSObject (previous new top)
588 __ bind(&undo_allocation);
589 __ UndoAllocationInNewSpace(t4, t5);
590 }
591
592 // Allocate the new receiver object using the runtime call.
593 // a1: constructor function
594 __ bind(&rt_call);
595 if (create_memento) {
596 // Get the cell or allocation site.
597 __ lw(a2, MemOperand(sp, 2 * kPointerSize));
598 __ push(a2);
599 }
600
601 __ push(a1); // Argument for Runtime_NewObject.
602 if (create_memento) {
603 __ CallRuntime(Runtime::kNewObjectWithAllocationSite, 2);
604 } else {
605 __ CallRuntime(Runtime::kNewObject, 1);
606 }
607 __ mov(t4, v0);
608
609 // If we ended up using the runtime, and we want a memento, then the
610 // runtime call made it for us, and we shouldn't do create count
611 // increment.
612 Label count_incremented;
613 if (create_memento) {
614 __ jmp(&count_incremented);
615 }
616
617 // Receiver for constructor call allocated.
618 // t4: JSObject
619 __ bind(&allocated);
620
621 if (create_memento) {
622 __ lw(a2, MemOperand(sp, kPointerSize * 2));
623 __ LoadRoot(t5, Heap::kUndefinedValueRootIndex);
624 __ Branch(&count_incremented, eq, a2, Operand(t5));
625 // a2 is an AllocationSite. We are creating a memento from it, so we
626 // need to increment the memento create count.
627 __ lw(a3, FieldMemOperand(a2,
628 AllocationSite::kPretenureCreateCountOffset));
629 __ Addu(a3, a3, Operand(Smi::FromInt(1)));
630 __ sw(a3, FieldMemOperand(a2,
631 AllocationSite::kPretenureCreateCountOffset));
632 __ bind(&count_incremented);
633 }
634
635 __ Push(t4, t4);
636
637 // Reload the number of arguments from the stack.
638 // sp[0]: receiver
639 // sp[1]: receiver
640 // sp[2]: constructor function
641 // sp[3]: number of arguments (smi-tagged)
642 __ lw(a1, MemOperand(sp, 2 * kPointerSize));
643 __ lw(a3, MemOperand(sp, 3 * kPointerSize));
644
645 // Set up pointer to last argument.
646 __ Addu(a2, fp, Operand(StandardFrameConstants::kCallerSPOffset));
647
648 // Set up number of arguments for function call below.
649 __ srl(a0, a3, kSmiTagSize);
650
651 // Copy arguments and receiver to the expression stack.
652 // a0: number of arguments
653 // a1: constructor function
654 // a2: address of last argument (caller sp)
655 // a3: number of arguments (smi-tagged)
656 // sp[0]: receiver
657 // sp[1]: receiver
658 // sp[2]: constructor function
659 // sp[3]: number of arguments (smi-tagged)
660 Label loop, entry;
661 __ jmp(&entry);
662 __ bind(&loop);
663 __ sll(t0, a3, kPointerSizeLog2 - kSmiTagSize);
664 __ Addu(t0, a2, Operand(t0));
665 __ lw(t1, MemOperand(t0));
666 __ push(t1);
667 __ bind(&entry);
668 __ Addu(a3, a3, Operand(-2));
669 __ Branch(&loop, greater_equal, a3, Operand(zero_reg));
670
671 // Call the function.
672 // a0: number of arguments
673 // a1: constructor function
674 if (is_api_function) {
675 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
676 Handle<Code> code =
677 masm->isolate()->builtins()->HandleApiCallConstruct();
678 __ Call(code, RelocInfo::CODE_TARGET);
679 } else {
680 ParameterCount actual(a0);
681 __ InvokeFunction(a1, actual, CALL_FUNCTION, NullCallWrapper());
682 }
683
684 // Store offset of return address for deoptimizer.
685 if (!is_api_function) {
686 masm->isolate()->heap()->SetConstructStubDeoptPCOffset(masm->pc_offset());
687 }
688
689 // Restore context from the frame.
690 __ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
691
692 // If the result is an object (in the ECMA sense), we should get rid
693 // of the receiver and use the result; see ECMA-262 section 13.2.2-7
694 // on page 74.
695 Label use_receiver, exit;
696
697 // If the result is a smi, it is *not* an object in the ECMA sense.
698 // v0: result
699 // sp[0]: receiver (newly allocated object)
700 // sp[1]: constructor function
701 // sp[2]: number of arguments (smi-tagged)
702 __ JumpIfSmi(v0, &use_receiver);
703
704 // If the type of the result (stored in its map) is less than
705 // FIRST_SPEC_OBJECT_TYPE, it is not an object in the ECMA sense.
706 __ GetObjectType(v0, a1, a3);
707 __ Branch(&exit, greater_equal, a3, Operand(FIRST_SPEC_OBJECT_TYPE));
708
709 // Throw away the result of the constructor invocation and use the
710 // on-stack receiver as the result.
711 __ bind(&use_receiver);
712 __ lw(v0, MemOperand(sp));
713
714 // Remove receiver from the stack, remove caller arguments, and
715 // return.
716 __ bind(&exit);
717 // v0: result
718 // sp[0]: receiver (newly allocated object)
719 // sp[1]: constructor function
720 // sp[2]: number of arguments (smi-tagged)
721 __ lw(a1, MemOperand(sp, 2 * kPointerSize));
722
723 // Leave construct frame.
724 }
725
726 __ sll(t0, a1, kPointerSizeLog2 - 1);
727 __ Addu(sp, sp, t0);
728 __ Addu(sp, sp, kPointerSize);
729 __ IncrementCounter(isolate->counters()->constructed_objects(), 1, a1, a2);
730 __ Ret();
731 }
732
733
Generate_JSConstructStubGeneric(MacroAssembler * masm)734 void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
735 Generate_JSConstructStubHelper(masm, false, FLAG_pretenuring_call_new);
736 }
737
738
Generate_JSConstructStubApi(MacroAssembler * masm)739 void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
740 Generate_JSConstructStubHelper(masm, true, false);
741 }
742
743
Generate_JSEntryTrampolineHelper(MacroAssembler * masm,bool is_construct)744 static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
745 bool is_construct) {
746 // Called from JSEntryStub::GenerateBody
747
748 // ----------- S t a t e -------------
749 // -- a0: code entry
750 // -- a1: function
751 // -- a2: receiver_pointer
752 // -- a3: argc
753 // -- s0: argv
754 // -----------------------------------
755 ProfileEntryHookStub::MaybeCallEntryHook(masm);
756
757 // Clear the context before we push it when entering the JS frame.
758 __ mov(cp, zero_reg);
759
760 // Enter an internal frame.
761 {
762 FrameScope scope(masm, StackFrame::INTERNAL);
763
764 // Set up the context from the function argument.
765 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
766
767 // Push the function and the receiver onto the stack.
768 __ Push(a1, a2);
769
770 // Copy arguments to the stack in a loop.
771 // a3: argc
772 // s0: argv, i.e. points to first arg
773 Label loop, entry;
774 __ sll(t0, a3, kPointerSizeLog2);
775 __ addu(t2, s0, t0);
776 __ b(&entry);
777 __ nop(); // Branch delay slot nop.
778 // t2 points past last arg.
779 __ bind(&loop);
780 __ lw(t0, MemOperand(s0)); // Read next parameter.
781 __ addiu(s0, s0, kPointerSize);
782 __ lw(t0, MemOperand(t0)); // Dereference handle.
783 __ push(t0); // Push parameter.
784 __ bind(&entry);
785 __ Branch(&loop, ne, s0, Operand(t2));
786
787 // Initialize all JavaScript callee-saved registers, since they will be seen
788 // by the garbage collector as part of handlers.
789 __ LoadRoot(t0, Heap::kUndefinedValueRootIndex);
790 __ mov(s1, t0);
791 __ mov(s2, t0);
792 __ mov(s3, t0);
793 __ mov(s4, t0);
794 __ mov(s5, t0);
795 // s6 holds the root address. Do not clobber.
796 // s7 is cp. Do not init.
797
798 // Invoke the code and pass argc as a0.
799 __ mov(a0, a3);
800 if (is_construct) {
801 // No type feedback cell is available
802 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
803 CallConstructStub stub(masm->isolate(), NO_CALL_CONSTRUCTOR_FLAGS);
804 __ CallStub(&stub);
805 } else {
806 ParameterCount actual(a0);
807 __ InvokeFunction(a1, actual, CALL_FUNCTION, NullCallWrapper());
808 }
809
810 // Leave internal frame.
811 }
812
813 __ Jump(ra);
814 }
815
816
Generate_JSEntryTrampoline(MacroAssembler * masm)817 void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
818 Generate_JSEntryTrampolineHelper(masm, false);
819 }
820
821
Generate_JSConstructEntryTrampoline(MacroAssembler * masm)822 void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
823 Generate_JSEntryTrampolineHelper(masm, true);
824 }
825
826
Generate_CompileLazy(MacroAssembler * masm)827 void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
828 CallRuntimePassFunction(masm, Runtime::kCompileLazy);
829 GenerateTailCallToReturnedCode(masm);
830 }
831
832
CallCompileOptimized(MacroAssembler * masm,bool concurrent)833 static void CallCompileOptimized(MacroAssembler* masm, bool concurrent) {
834 FrameScope scope(masm, StackFrame::INTERNAL);
835 // Push a copy of the function onto the stack.
836 // Push function as parameter to the runtime call.
837 __ Push(a1, a1);
838 // Whether to compile in a background thread.
839 __ Push(masm->isolate()->factory()->ToBoolean(concurrent));
840
841 __ CallRuntime(Runtime::kCompileOptimized, 2);
842 // Restore receiver.
843 __ Pop(a1);
844 }
845
846
Generate_CompileOptimized(MacroAssembler * masm)847 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
848 CallCompileOptimized(masm, false);
849 GenerateTailCallToReturnedCode(masm);
850 }
851
852
Generate_CompileOptimizedConcurrent(MacroAssembler * masm)853 void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
854 CallCompileOptimized(masm, true);
855 GenerateTailCallToReturnedCode(masm);
856 }
857
858
859
GenerateMakeCodeYoungAgainCommon(MacroAssembler * masm)860 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
861 // For now, we are relying on the fact that make_code_young doesn't do any
862 // garbage collection which allows us to save/restore the registers without
863 // worrying about which of them contain pointers. We also don't build an
864 // internal frame to make the code faster, since we shouldn't have to do stack
865 // crawls in MakeCodeYoung. This seems a bit fragile.
866
867 // Set a0 to point to the head of the PlatformCodeAge sequence.
868 __ Subu(a0, a0,
869 Operand(kNoCodeAgeSequenceLength - Assembler::kInstrSize));
870
871 // The following registers must be saved and restored when calling through to
872 // the runtime:
873 // a0 - contains return address (beginning of patch sequence)
874 // a1 - isolate
875 RegList saved_regs =
876 (a0.bit() | a1.bit() | ra.bit() | fp.bit()) & ~sp.bit();
877 FrameScope scope(masm, StackFrame::MANUAL);
878 __ MultiPush(saved_regs);
879 __ PrepareCallCFunction(2, 0, a2);
880 __ li(a1, Operand(ExternalReference::isolate_address(masm->isolate())));
881 __ CallCFunction(
882 ExternalReference::get_make_code_young_function(masm->isolate()), 2);
883 __ MultiPop(saved_regs);
884 __ Jump(a0);
885 }
886
887 #define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
888 void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
889 MacroAssembler* masm) { \
890 GenerateMakeCodeYoungAgainCommon(masm); \
891 } \
892 void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
893 MacroAssembler* masm) { \
894 GenerateMakeCodeYoungAgainCommon(masm); \
895 }
CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)896 CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
897 #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
898
899
900 void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
901 // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
902 // that make_code_young doesn't do any garbage collection which allows us to
903 // save/restore the registers without worrying about which of them contain
904 // pointers.
905
906 // Set a0 to point to the head of the PlatformCodeAge sequence.
907 __ Subu(a0, a0,
908 Operand(kNoCodeAgeSequenceLength - Assembler::kInstrSize));
909
910 // The following registers must be saved and restored when calling through to
911 // the runtime:
912 // a0 - contains return address (beginning of patch sequence)
913 // a1 - isolate
914 RegList saved_regs =
915 (a0.bit() | a1.bit() | ra.bit() | fp.bit()) & ~sp.bit();
916 FrameScope scope(masm, StackFrame::MANUAL);
917 __ MultiPush(saved_regs);
918 __ PrepareCallCFunction(2, 0, a2);
919 __ li(a1, Operand(ExternalReference::isolate_address(masm->isolate())));
920 __ CallCFunction(
921 ExternalReference::get_mark_code_as_executed_function(masm->isolate()),
922 2);
923 __ MultiPop(saved_regs);
924
925 // Perform prologue operations usually performed by the young code stub.
926 __ Push(ra, fp, cp, a1);
927 __ Addu(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp));
928
929 // Jump to point after the code-age stub.
930 __ Addu(a0, a0, Operand(kNoCodeAgeSequenceLength));
931 __ Jump(a0);
932 }
933
934
Generate_MarkCodeAsExecutedTwice(MacroAssembler * masm)935 void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
936 GenerateMakeCodeYoungAgainCommon(masm);
937 }
938
939
Generate_NotifyStubFailureHelper(MacroAssembler * masm,SaveFPRegsMode save_doubles)940 static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
941 SaveFPRegsMode save_doubles) {
942 {
943 FrameScope scope(masm, StackFrame::INTERNAL);
944
945 // Preserve registers across notification, this is important for compiled
946 // stubs that tail call the runtime on deopts passing their parameters in
947 // registers.
948 __ MultiPush(kJSCallerSaved | kCalleeSaved);
949 // Pass the function and deoptimization type to the runtime system.
950 __ CallRuntime(Runtime::kNotifyStubFailure, 0, save_doubles);
951 __ MultiPop(kJSCallerSaved | kCalleeSaved);
952 }
953
954 __ Addu(sp, sp, Operand(kPointerSize)); // Ignore state
955 __ Jump(ra); // Jump to miss handler
956 }
957
958
Generate_NotifyStubFailure(MacroAssembler * masm)959 void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
960 Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
961 }
962
963
Generate_NotifyStubFailureSaveDoubles(MacroAssembler * masm)964 void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
965 Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
966 }
967
968
Generate_NotifyDeoptimizedHelper(MacroAssembler * masm,Deoptimizer::BailoutType type)969 static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
970 Deoptimizer::BailoutType type) {
971 {
972 FrameScope scope(masm, StackFrame::INTERNAL);
973 // Pass the function and deoptimization type to the runtime system.
974 __ li(a0, Operand(Smi::FromInt(static_cast<int>(type))));
975 __ push(a0);
976 __ CallRuntime(Runtime::kNotifyDeoptimized, 1);
977 }
978
979 // Get the full codegen state from the stack and untag it -> t2.
980 __ lw(t2, MemOperand(sp, 0 * kPointerSize));
981 __ SmiUntag(t2);
982 // Switch on the state.
983 Label with_tos_register, unknown_state;
984 __ Branch(&with_tos_register,
985 ne, t2, Operand(FullCodeGenerator::NO_REGISTERS));
986 __ Ret(USE_DELAY_SLOT);
987 // Safe to fill delay slot Addu will emit one instruction.
988 __ Addu(sp, sp, Operand(1 * kPointerSize)); // Remove state.
989
990 __ bind(&with_tos_register);
991 __ lw(v0, MemOperand(sp, 1 * kPointerSize));
992 __ Branch(&unknown_state, ne, t2, Operand(FullCodeGenerator::TOS_REG));
993
994 __ Ret(USE_DELAY_SLOT);
995 // Safe to fill delay slot Addu will emit one instruction.
996 __ Addu(sp, sp, Operand(2 * kPointerSize)); // Remove state.
997
998 __ bind(&unknown_state);
999 __ stop("no cases left");
1000 }
1001
1002
Generate_NotifyDeoptimized(MacroAssembler * masm)1003 void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
1004 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
1005 }
1006
1007
Generate_NotifySoftDeoptimized(MacroAssembler * masm)1008 void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
1009 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
1010 }
1011
1012
Generate_NotifyLazyDeoptimized(MacroAssembler * masm)1013 void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
1014 Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
1015 }
1016
1017
Generate_OnStackReplacement(MacroAssembler * masm)1018 void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
1019 // Lookup the function in the JavaScript frame.
1020 __ lw(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
1021 {
1022 FrameScope scope(masm, StackFrame::INTERNAL);
1023 // Pass function as argument.
1024 __ push(a0);
1025 __ CallRuntime(Runtime::kCompileForOnStackReplacement, 1);
1026 }
1027
1028 // If the code object is null, just return to the unoptimized code.
1029 __ Ret(eq, v0, Operand(Smi::FromInt(0)));
1030
1031 // Load deoptimization data from the code object.
1032 // <deopt_data> = <code>[#deoptimization_data_offset]
1033 __ lw(a1, MemOperand(v0, Code::kDeoptimizationDataOffset - kHeapObjectTag));
1034
1035 // Load the OSR entrypoint offset from the deoptimization data.
1036 // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
1037 __ lw(a1, MemOperand(a1, FixedArray::OffsetOfElementAt(
1038 DeoptimizationInputData::kOsrPcOffsetIndex) - kHeapObjectTag));
1039 __ SmiUntag(a1);
1040
1041 // Compute the target address = code_obj + header_size + osr_offset
1042 // <entry_addr> = <code_obj> + #header_size + <osr_offset>
1043 __ addu(v0, v0, a1);
1044 __ addiu(ra, v0, Code::kHeaderSize - kHeapObjectTag);
1045
1046 // And "return" to the OSR entry point of the function.
1047 __ Ret();
1048 }
1049
1050
Generate_OsrAfterStackCheck(MacroAssembler * masm)1051 void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) {
1052 // We check the stack limit as indicator that recompilation might be done.
1053 Label ok;
1054 __ LoadRoot(at, Heap::kStackLimitRootIndex);
1055 __ Branch(&ok, hs, sp, Operand(at));
1056 {
1057 FrameScope scope(masm, StackFrame::INTERNAL);
1058 __ CallRuntime(Runtime::kStackGuard, 0);
1059 }
1060 __ Jump(masm->isolate()->builtins()->OnStackReplacement(),
1061 RelocInfo::CODE_TARGET);
1062
1063 __ bind(&ok);
1064 __ Ret();
1065 }
1066
1067
Generate_FunctionCall(MacroAssembler * masm)1068 void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
1069 // 1. Make sure we have at least one argument.
1070 // a0: actual number of arguments
1071 { Label done;
1072 __ Branch(&done, ne, a0, Operand(zero_reg));
1073 __ LoadRoot(t2, Heap::kUndefinedValueRootIndex);
1074 __ push(t2);
1075 __ Addu(a0, a0, Operand(1));
1076 __ bind(&done);
1077 }
1078
1079 // 2. Get the function to call (passed as receiver) from the stack, check
1080 // if it is a function.
1081 // a0: actual number of arguments
1082 Label slow, non_function;
1083 __ sll(at, a0, kPointerSizeLog2);
1084 __ addu(at, sp, at);
1085 __ lw(a1, MemOperand(at));
1086 __ JumpIfSmi(a1, &non_function);
1087 __ GetObjectType(a1, a2, a2);
1088 __ Branch(&slow, ne, a2, Operand(JS_FUNCTION_TYPE));
1089
1090 // 3a. Patch the first argument if necessary when calling a function.
1091 // a0: actual number of arguments
1092 // a1: function
1093 Label shift_arguments;
1094 __ li(t0, Operand(0, RelocInfo::NONE32)); // Indicate regular JS_FUNCTION.
1095 { Label convert_to_object, use_global_proxy, patch_receiver;
1096 // Change context eagerly in case we need the global receiver.
1097 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
1098
1099 // Do not transform the receiver for strict mode functions.
1100 __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1101 __ lw(a3, FieldMemOperand(a2, SharedFunctionInfo::kCompilerHintsOffset));
1102 __ And(t3, a3, Operand(1 << (SharedFunctionInfo::kStrictModeFunction +
1103 kSmiTagSize)));
1104 __ Branch(&shift_arguments, ne, t3, Operand(zero_reg));
1105
1106 // Do not transform the receiver for native (Compilerhints already in a3).
1107 __ And(t3, a3, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
1108 __ Branch(&shift_arguments, ne, t3, Operand(zero_reg));
1109
1110 // Compute the receiver in sloppy mode.
1111 // Load first argument in a2. a2 = -kPointerSize(sp + n_args << 2).
1112 __ sll(at, a0, kPointerSizeLog2);
1113 __ addu(a2, sp, at);
1114 __ lw(a2, MemOperand(a2, -kPointerSize));
1115 // a0: actual number of arguments
1116 // a1: function
1117 // a2: first argument
1118 __ JumpIfSmi(a2, &convert_to_object, t2);
1119
1120 __ LoadRoot(a3, Heap::kUndefinedValueRootIndex);
1121 __ Branch(&use_global_proxy, eq, a2, Operand(a3));
1122 __ LoadRoot(a3, Heap::kNullValueRootIndex);
1123 __ Branch(&use_global_proxy, eq, a2, Operand(a3));
1124
1125 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
1126 __ GetObjectType(a2, a3, a3);
1127 __ Branch(&shift_arguments, ge, a3, Operand(FIRST_SPEC_OBJECT_TYPE));
1128
1129 __ bind(&convert_to_object);
1130 // Enter an internal frame in order to preserve argument count.
1131 {
1132 FrameScope scope(masm, StackFrame::INTERNAL);
1133 __ sll(a0, a0, kSmiTagSize); // Smi tagged.
1134 __ Push(a0, a2);
1135 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1136 __ mov(a2, v0);
1137
1138 __ pop(a0);
1139 __ sra(a0, a0, kSmiTagSize); // Un-tag.
1140 // Leave internal frame.
1141 }
1142
1143 // Restore the function to a1, and the flag to t0.
1144 __ sll(at, a0, kPointerSizeLog2);
1145 __ addu(at, sp, at);
1146 __ lw(a1, MemOperand(at));
1147 __ Branch(USE_DELAY_SLOT, &patch_receiver);
1148 __ li(t0, Operand(0, RelocInfo::NONE32)); // In delay slot.
1149
1150 __ bind(&use_global_proxy);
1151 __ lw(a2, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
1152 __ lw(a2, FieldMemOperand(a2, GlobalObject::kGlobalProxyOffset));
1153
1154 __ bind(&patch_receiver);
1155 __ sll(at, a0, kPointerSizeLog2);
1156 __ addu(a3, sp, at);
1157 __ sw(a2, MemOperand(a3, -kPointerSize));
1158
1159 __ Branch(&shift_arguments);
1160 }
1161
1162 // 3b. Check for function proxy.
1163 __ bind(&slow);
1164 __ li(t0, Operand(1, RelocInfo::NONE32)); // Indicate function proxy.
1165 __ Branch(&shift_arguments, eq, a2, Operand(JS_FUNCTION_PROXY_TYPE));
1166
1167 __ bind(&non_function);
1168 __ li(t0, Operand(2, RelocInfo::NONE32)); // Indicate non-function.
1169
1170 // 3c. Patch the first argument when calling a non-function. The
1171 // CALL_NON_FUNCTION builtin expects the non-function callee as
1172 // receiver, so overwrite the first argument which will ultimately
1173 // become the receiver.
1174 // a0: actual number of arguments
1175 // a1: function
1176 // t0: call type (0: JS function, 1: function proxy, 2: non-function)
1177 __ sll(at, a0, kPointerSizeLog2);
1178 __ addu(a2, sp, at);
1179 __ sw(a1, MemOperand(a2, -kPointerSize));
1180
1181 // 4. Shift arguments and return address one slot down on the stack
1182 // (overwriting the original receiver). Adjust argument count to make
1183 // the original first argument the new receiver.
1184 // a0: actual number of arguments
1185 // a1: function
1186 // t0: call type (0: JS function, 1: function proxy, 2: non-function)
1187 __ bind(&shift_arguments);
1188 { Label loop;
1189 // Calculate the copy start address (destination). Copy end address is sp.
1190 __ sll(at, a0, kPointerSizeLog2);
1191 __ addu(a2, sp, at);
1192
1193 __ bind(&loop);
1194 __ lw(at, MemOperand(a2, -kPointerSize));
1195 __ sw(at, MemOperand(a2));
1196 __ Subu(a2, a2, Operand(kPointerSize));
1197 __ Branch(&loop, ne, a2, Operand(sp));
1198 // Adjust the actual number of arguments and remove the top element
1199 // (which is a copy of the last argument).
1200 __ Subu(a0, a0, Operand(1));
1201 __ Pop();
1202 }
1203
1204 // 5a. Call non-function via tail call to CALL_NON_FUNCTION builtin,
1205 // or a function proxy via CALL_FUNCTION_PROXY.
1206 // a0: actual number of arguments
1207 // a1: function
1208 // t0: call type (0: JS function, 1: function proxy, 2: non-function)
1209 { Label function, non_proxy;
1210 __ Branch(&function, eq, t0, Operand(zero_reg));
1211 // Expected number of arguments is 0 for CALL_NON_FUNCTION.
1212 __ mov(a2, zero_reg);
1213 __ Branch(&non_proxy, ne, t0, Operand(1));
1214
1215 __ push(a1); // Re-add proxy object as additional argument.
1216 __ Addu(a0, a0, Operand(1));
1217 __ GetBuiltinFunction(a1, Builtins::CALL_FUNCTION_PROXY);
1218 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1219 RelocInfo::CODE_TARGET);
1220
1221 __ bind(&non_proxy);
1222 __ GetBuiltinFunction(a1, Builtins::CALL_NON_FUNCTION);
1223 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1224 RelocInfo::CODE_TARGET);
1225 __ bind(&function);
1226 }
1227
1228 // 5b. Get the code to call from the function and check that the number of
1229 // expected arguments matches what we're providing. If so, jump
1230 // (tail-call) to the code in register edx without checking arguments.
1231 // a0: actual number of arguments
1232 // a1: function
1233 __ lw(a3, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1234 __ lw(a2,
1235 FieldMemOperand(a3, SharedFunctionInfo::kFormalParameterCountOffset));
1236 __ sra(a2, a2, kSmiTagSize);
1237 // Check formal and actual parameter counts.
1238 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1239 RelocInfo::CODE_TARGET, ne, a2, Operand(a0));
1240
1241 __ lw(a3, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
1242 ParameterCount expected(0);
1243 __ InvokeCode(a3, expected, expected, JUMP_FUNCTION, NullCallWrapper());
1244 }
1245
1246
Generate_FunctionApply(MacroAssembler * masm)1247 void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
1248 const int kIndexOffset =
1249 StandardFrameConstants::kExpressionsOffset - (2 * kPointerSize);
1250 const int kLimitOffset =
1251 StandardFrameConstants::kExpressionsOffset - (1 * kPointerSize);
1252 const int kArgsOffset = 2 * kPointerSize;
1253 const int kRecvOffset = 3 * kPointerSize;
1254 const int kFunctionOffset = 4 * kPointerSize;
1255
1256 {
1257 FrameScope frame_scope(masm, StackFrame::INTERNAL);
1258 __ lw(a0, MemOperand(fp, kFunctionOffset)); // Get the function.
1259 __ push(a0);
1260 __ lw(a0, MemOperand(fp, kArgsOffset)); // Get the args array.
1261 __ push(a0);
1262 // Returns (in v0) number of arguments to copy to stack as Smi.
1263 __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION);
1264
1265 // Check the stack for overflow. We are not trying to catch
1266 // interruptions (e.g. debug break and preemption) here, so the "real stack
1267 // limit" is checked.
1268 Label okay;
1269 __ LoadRoot(a2, Heap::kRealStackLimitRootIndex);
1270 // Make a2 the space we have left. The stack might already be overflowed
1271 // here which will cause a2 to become negative.
1272 __ subu(a2, sp, a2);
1273 // Check if the arguments will overflow the stack.
1274 __ sll(t3, v0, kPointerSizeLog2 - kSmiTagSize);
1275 __ Branch(&okay, gt, a2, Operand(t3)); // Signed comparison.
1276
1277 // Out of stack space.
1278 __ lw(a1, MemOperand(fp, kFunctionOffset));
1279 __ Push(a1, v0);
1280 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
1281 // End of stack check.
1282
1283 // Push current limit and index.
1284 __ bind(&okay);
1285 __ mov(a1, zero_reg);
1286 __ Push(v0, a1); // Limit and initial index.
1287
1288 // Get the receiver.
1289 __ lw(a0, MemOperand(fp, kRecvOffset));
1290
1291 // Check that the function is a JS function (otherwise it must be a proxy).
1292 Label push_receiver;
1293 __ lw(a1, MemOperand(fp, kFunctionOffset));
1294 __ GetObjectType(a1, a2, a2);
1295 __ Branch(&push_receiver, ne, a2, Operand(JS_FUNCTION_TYPE));
1296
1297 // Change context eagerly to get the right global object if necessary.
1298 __ lw(cp, FieldMemOperand(a1, JSFunction::kContextOffset));
1299 // Load the shared function info while the function is still in a1.
1300 __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
1301
1302 // Compute the receiver.
1303 // Do not transform the receiver for strict mode functions.
1304 Label call_to_object, use_global_proxy;
1305 __ lw(a2, FieldMemOperand(a2, SharedFunctionInfo::kCompilerHintsOffset));
1306 __ And(t3, a2, Operand(1 << (SharedFunctionInfo::kStrictModeFunction +
1307 kSmiTagSize)));
1308 __ Branch(&push_receiver, ne, t3, Operand(zero_reg));
1309
1310 // Do not transform the receiver for native (Compilerhints already in a2).
1311 __ And(t3, a2, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize)));
1312 __ Branch(&push_receiver, ne, t3, Operand(zero_reg));
1313
1314 // Compute the receiver in sloppy mode.
1315 __ JumpIfSmi(a0, &call_to_object);
1316 __ LoadRoot(a1, Heap::kNullValueRootIndex);
1317 __ Branch(&use_global_proxy, eq, a0, Operand(a1));
1318 __ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
1319 __ Branch(&use_global_proxy, eq, a0, Operand(a2));
1320
1321 // Check if the receiver is already a JavaScript object.
1322 // a0: receiver
1323 STATIC_ASSERT(LAST_SPEC_OBJECT_TYPE == LAST_TYPE);
1324 __ GetObjectType(a0, a1, a1);
1325 __ Branch(&push_receiver, ge, a1, Operand(FIRST_SPEC_OBJECT_TYPE));
1326
1327 // Convert the receiver to a regular object.
1328 // a0: receiver
1329 __ bind(&call_to_object);
1330 __ push(a0);
1331 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1332 __ mov(a0, v0); // Put object in a0 to match other paths to push_receiver.
1333 __ Branch(&push_receiver);
1334
1335 __ bind(&use_global_proxy);
1336 __ lw(a0, ContextOperand(cp, Context::GLOBAL_OBJECT_INDEX));
1337 __ lw(a0, FieldMemOperand(a0, GlobalObject::kGlobalProxyOffset));
1338
1339 // Push the receiver.
1340 // a0: receiver
1341 __ bind(&push_receiver);
1342 __ push(a0);
1343
1344 // Copy all arguments from the array to the stack.
1345 Label entry, loop;
1346 __ lw(a0, MemOperand(fp, kIndexOffset));
1347 __ Branch(&entry);
1348
1349 // Load the current argument from the arguments array and push it to the
1350 // stack.
1351 // a0: current argument index
1352 __ bind(&loop);
1353 __ lw(a1, MemOperand(fp, kArgsOffset));
1354 __ Push(a1, a0);
1355
1356 // Call the runtime to access the property in the arguments array.
1357 __ CallRuntime(Runtime::kGetProperty, 2);
1358 __ push(v0);
1359
1360 // Use inline caching to access the arguments.
1361 __ lw(a0, MemOperand(fp, kIndexOffset));
1362 __ Addu(a0, a0, Operand(1 << kSmiTagSize));
1363 __ sw(a0, MemOperand(fp, kIndexOffset));
1364
1365 // Test if the copy loop has finished copying all the elements from the
1366 // arguments object.
1367 __ bind(&entry);
1368 __ lw(a1, MemOperand(fp, kLimitOffset));
1369 __ Branch(&loop, ne, a0, Operand(a1));
1370
1371 // Call the function.
1372 Label call_proxy;
1373 ParameterCount actual(a0);
1374 __ sra(a0, a0, kSmiTagSize);
1375 __ lw(a1, MemOperand(fp, kFunctionOffset));
1376 __ GetObjectType(a1, a2, a2);
1377 __ Branch(&call_proxy, ne, a2, Operand(JS_FUNCTION_TYPE));
1378
1379 __ InvokeFunction(a1, actual, CALL_FUNCTION, NullCallWrapper());
1380
1381 frame_scope.GenerateLeaveFrame();
1382 __ Ret(USE_DELAY_SLOT);
1383 __ Addu(sp, sp, Operand(3 * kPointerSize)); // In delay slot.
1384
1385 // Call the function proxy.
1386 __ bind(&call_proxy);
1387 __ push(a1); // Add function proxy as last argument.
1388 __ Addu(a0, a0, Operand(1));
1389 __ li(a2, Operand(0, RelocInfo::NONE32));
1390 __ GetBuiltinFunction(a1, Builtins::CALL_FUNCTION_PROXY);
1391 __ Call(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
1392 RelocInfo::CODE_TARGET);
1393 // Tear down the internal frame and remove function, receiver and args.
1394 }
1395
1396 __ Ret(USE_DELAY_SLOT);
1397 __ Addu(sp, sp, Operand(3 * kPointerSize)); // In delay slot.
1398 }
1399
1400
ArgumentAdaptorStackCheck(MacroAssembler * masm,Label * stack_overflow)1401 static void ArgumentAdaptorStackCheck(MacroAssembler* masm,
1402 Label* stack_overflow) {
1403 // ----------- S t a t e -------------
1404 // -- a0 : actual number of arguments
1405 // -- a1 : function (passed through to callee)
1406 // -- a2 : expected number of arguments
1407 // -----------------------------------
1408 // Check the stack for overflow. We are not trying to catch
1409 // interruptions (e.g. debug break and preemption) here, so the "real stack
1410 // limit" is checked.
1411 __ LoadRoot(t1, Heap::kRealStackLimitRootIndex);
1412 // Make t1 the space we have left. The stack might already be overflowed
1413 // here which will cause t1 to become negative.
1414 __ subu(t1, sp, t1);
1415 // Check if the arguments will overflow the stack.
1416 __ sll(at, a2, kPointerSizeLog2);
1417 // Signed comparison.
1418 __ Branch(stack_overflow, le, t1, Operand(at));
1419 }
1420
1421
EnterArgumentsAdaptorFrame(MacroAssembler * masm)1422 static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
1423 __ sll(a0, a0, kSmiTagSize);
1424 __ li(t0, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
1425 __ MultiPush(a0.bit() | a1.bit() | t0.bit() | fp.bit() | ra.bit());
1426 __ Addu(fp, sp,
1427 Operand(StandardFrameConstants::kFixedFrameSizeFromFp + kPointerSize));
1428 }
1429
1430
LeaveArgumentsAdaptorFrame(MacroAssembler * masm)1431 static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
1432 // ----------- S t a t e -------------
1433 // -- v0 : result being passed through
1434 // -----------------------------------
1435 // Get the number of arguments passed (as a smi), tear down the frame and
1436 // then tear down the parameters.
1437 __ lw(a1, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp +
1438 kPointerSize)));
1439 __ mov(sp, fp);
1440 __ MultiPop(fp.bit() | ra.bit());
1441 __ sll(t0, a1, kPointerSizeLog2 - kSmiTagSize);
1442 __ Addu(sp, sp, t0);
1443 // Adjust for the receiver.
1444 __ Addu(sp, sp, Operand(kPointerSize));
1445 }
1446
1447
Generate_ArgumentsAdaptorTrampoline(MacroAssembler * masm)1448 void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
1449 // State setup as expected by MacroAssembler::InvokePrologue.
1450 // ----------- S t a t e -------------
1451 // -- a0: actual arguments count
1452 // -- a1: function (passed through to callee)
1453 // -- a2: expected arguments count
1454 // -----------------------------------
1455
1456 Label stack_overflow;
1457 ArgumentAdaptorStackCheck(masm, &stack_overflow);
1458 Label invoke, dont_adapt_arguments;
1459
1460 Label enough, too_few;
1461 __ lw(a3, FieldMemOperand(a1, JSFunction::kCodeEntryOffset));
1462 __ Branch(&dont_adapt_arguments, eq,
1463 a2, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
1464 // We use Uless as the number of argument should always be greater than 0.
1465 __ Branch(&too_few, Uless, a0, Operand(a2));
1466
1467 { // Enough parameters: actual >= expected.
1468 // a0: actual number of arguments as a smi
1469 // a1: function
1470 // a2: expected number of arguments
1471 // a3: code entry to call
1472 __ bind(&enough);
1473 EnterArgumentsAdaptorFrame(masm);
1474
1475 // Calculate copy start address into a0 and copy end address into a2.
1476 __ sll(a0, a0, kPointerSizeLog2 - kSmiTagSize);
1477 __ Addu(a0, fp, a0);
1478 // Adjust for return address and receiver.
1479 __ Addu(a0, a0, Operand(2 * kPointerSize));
1480 // Compute copy end address.
1481 __ sll(a2, a2, kPointerSizeLog2);
1482 __ subu(a2, a0, a2);
1483
1484 // Copy the arguments (including the receiver) to the new stack frame.
1485 // a0: copy start address
1486 // a1: function
1487 // a2: copy end address
1488 // a3: code entry to call
1489
1490 Label copy;
1491 __ bind(©);
1492 __ lw(t0, MemOperand(a0));
1493 __ push(t0);
1494 __ Branch(USE_DELAY_SLOT, ©, ne, a0, Operand(a2));
1495 __ addiu(a0, a0, -kPointerSize); // In delay slot.
1496
1497 __ jmp(&invoke);
1498 }
1499
1500 { // Too few parameters: Actual < expected.
1501 __ bind(&too_few);
1502 EnterArgumentsAdaptorFrame(masm);
1503
1504 // Calculate copy start address into a0 and copy end address is fp.
1505 // a0: actual number of arguments as a smi
1506 // a1: function
1507 // a2: expected number of arguments
1508 // a3: code entry to call
1509 __ sll(a0, a0, kPointerSizeLog2 - kSmiTagSize);
1510 __ Addu(a0, fp, a0);
1511 // Adjust for return address and receiver.
1512 __ Addu(a0, a0, Operand(2 * kPointerSize));
1513 // Compute copy end address. Also adjust for return address.
1514 __ Addu(t3, fp, kPointerSize);
1515
1516 // Copy the arguments (including the receiver) to the new stack frame.
1517 // a0: copy start address
1518 // a1: function
1519 // a2: expected number of arguments
1520 // a3: code entry to call
1521 // t3: copy end address
1522 Label copy;
1523 __ bind(©);
1524 __ lw(t0, MemOperand(a0)); // Adjusted above for return addr and receiver.
1525 __ Subu(sp, sp, kPointerSize);
1526 __ Subu(a0, a0, kPointerSize);
1527 __ Branch(USE_DELAY_SLOT, ©, ne, a0, Operand(t3));
1528 __ sw(t0, MemOperand(sp)); // In the delay slot.
1529
1530 // Fill the remaining expected arguments with undefined.
1531 // a1: function
1532 // a2: expected number of arguments
1533 // a3: code entry to call
1534 __ LoadRoot(t0, Heap::kUndefinedValueRootIndex);
1535 __ sll(t2, a2, kPointerSizeLog2);
1536 __ Subu(a2, fp, Operand(t2));
1537 // Adjust for frame.
1538 __ Subu(a2, a2, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
1539 2 * kPointerSize));
1540
1541 Label fill;
1542 __ bind(&fill);
1543 __ Subu(sp, sp, kPointerSize);
1544 __ Branch(USE_DELAY_SLOT, &fill, ne, sp, Operand(a2));
1545 __ sw(t0, MemOperand(sp));
1546 }
1547
1548 // Call the entry point.
1549 __ bind(&invoke);
1550
1551 __ Call(a3);
1552
1553 // Store offset of return address for deoptimizer.
1554 masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
1555
1556 // Exit frame and return.
1557 LeaveArgumentsAdaptorFrame(masm);
1558 __ Ret();
1559
1560
1561 // -------------------------------------------
1562 // Don't adapt arguments.
1563 // -------------------------------------------
1564 __ bind(&dont_adapt_arguments);
1565 __ Jump(a3);
1566
1567 __ bind(&stack_overflow);
1568 {
1569 FrameScope frame(masm, StackFrame::MANUAL);
1570 EnterArgumentsAdaptorFrame(masm);
1571 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
1572 __ break_(0xCC);
1573 }
1574 }
1575
1576
1577 #undef __
1578
1579 } } // namespace v8::internal
1580
1581 #endif // V8_TARGET_ARCH_MIPS
1582